Compare commits
616 Commits
dc1ed2a59e
...
feat/ms22-
| Author | SHA1 | Date | |
|---|---|---|---|
| 7c7a821b0f | |||
| c640d22394 | |||
| 5e7346adc7 | |||
| d07a840f25 | |||
| 4b2e48af9c | |||
| 7b390d8be2 | |||
| e8502577b8 | |||
| af68f84dcd | |||
| b57f549d39 | |||
| 2c8d0a8daf | |||
| c939a541a7 | |||
| 895ea7fd14 | |||
| e93e7ffaa9 | |||
| 307639eca0 | |||
| 31814f181a | |||
| 5cd6b8622d | |||
| 20c9e68e1b | |||
| 127bf61fe2 | |||
| f99107fbfc | |||
| 5b782bafc9 | |||
| 85d3f930f3 | |||
| 0e6734bdae | |||
| 5bcaaeddd9 | |||
| 676a2a288b | |||
| ac16d6ed88 | |||
| 8388d49786 | |||
| 20f914ea85 | |||
| 1b84741f1a | |||
| ffc10c9a45 | |||
| 62d9ac0e5a | |||
| 8098504fb8 | |||
| 128431ba58 | |||
| d2c51eda91 | |||
| 78b643a945 | |||
| f93503ebcf | |||
| c0e679ab7c | |||
| 6ac63fe755 | |||
| 1667f28d71 | |||
| 66fe475fa1 | |||
| d39ab6aafc | |||
| 147e8ac574 | |||
| c38bfae16c | |||
| 36b4d8323d | |||
| 833662a64f | |||
| b3922e1d5b | |||
| 78b71a0ecc | |||
| dd0568cf15 | |||
| 8964226163 | |||
| 11f22a7e96 | |||
| edcff6a0e0 | |||
| e3cba37e8c | |||
| 21bf7e050f | |||
| 83d5aee53a | |||
| cc5b108b2f | |||
| 5ed0a859da | |||
| bf299bb672 | |||
| ad99cb9a03 | |||
| d05b870f08 | |||
| 1aaf5618ce | |||
| 9b2520ce1f | |||
| b110c469c4 | |||
| 859dcfc4b7 | |||
| 13aa52aa53 | |||
| 417c6ab49c | |||
| 8128eb7fbe | |||
| 7de0e734b0 | |||
| 6290fc3d53 | |||
| 9f4de1682f | |||
| 374ca7ace3 | |||
| 72c64d2eeb | |||
| 5f6c520a98 | |||
| 9a7673bea2 | |||
| 91934b9933 | |||
| 7f89682946 | |||
| 8b4c565f20 | |||
| d5ecc0b107 | |||
| a81c4a5edd | |||
| ff5a09c3fb | |||
| f93fa60fff | |||
| cc56f2cbe1 | |||
| f9cccd6965 | |||
| 90c3bbccdf | |||
| 79286e98c6 | |||
| cfd1def4a9 | |||
| f435d8e8c6 | |||
| 3d78b09064 | |||
| a7955b9b32 | |||
| 372cc100cc | |||
| 37cf813b88 | |||
| 3d5b50af11 | |||
| f30c2f790c | |||
| 05b1a93ccb | |||
| a78a8b88e1 | |||
| 172ed1d40f | |||
| ee2ddfc8b8 | |||
| 5a6d00a064 | |||
| ffda74ec12 | |||
| f97be2e6a3 | |||
| 97606713b5 | |||
| d0c720e6da | |||
| 64e817cfb8 | |||
| cd5c2218c8 | |||
| f643d2bc04 | |||
| 8957904ea9 | |||
| 458cac7cdd | |||
| 7581d26567 | |||
| 07f5225a76 | |||
| 7c55464d54 | |||
| ea1620fa7a | |||
| d218902cb0 | |||
| b43e860c40 | |||
| 716f230f72 | |||
| a5ed260fbd | |||
| 9b5c15ca56 | |||
| 74c8c376b7 | |||
| 9901fba61e | |||
| 17144b1c42 | |||
| a6f75cd587 | |||
| 06e54328d5 | |||
| 7480deff10 | |||
| 1b66417be5 | |||
| 23d610ba5b | |||
| 25ae14aba1 | |||
| 1425893318 | |||
| bc4c1f9c70 | |||
| d66451cf48 | |||
| c23ebca648 | |||
|
|
eae55bc4a3 | ||
| b5ac2630c1 | |||
| 8424a28faa | |||
| d2cec04cba | |||
| 9ac971e857 | |||
| 0c2a6b14cf | |||
| af299abdaf | |||
| fa9f173f8e | |||
| 7935d86015 | |||
| f43631671f | |||
| 8328f9509b | |||
| f72e8c2da9 | |||
| 1a668627a3 | |||
| bd3625ae1b | |||
| aeac188d40 | |||
| f219dd71a0 | |||
| 2c3c1f67ac | |||
| dedc1af080 | |||
| 3b16b2c743 | |||
|
|
6fd8e85266 | ||
|
|
d3474cdd74 | ||
| 157b702331 | |||
|
|
63c6a129bd | ||
| 4a4aee7b7c | |||
|
|
9d9a01f5f7 | ||
|
|
5bce7dbb05 | ||
|
|
ab902250f8 | ||
|
|
d34f097a5c | ||
|
|
f4ad7eba37 | ||
|
|
4d089cd020 | ||
|
|
3258cd4f4d | ||
| 35dd623ab5 | |||
|
|
758b2a839b | ||
| af113707d9 | |||
|
|
57d0f5d2a3 | ||
|
|
ad428598a9 | ||
|
|
cab8d690ab | ||
| 0a780a5062 | |||
| a1515676db | |||
|
|
254f85369b | ||
|
|
ddf6851bfd | ||
| 027fee1afa | |||
| abe57621cd | |||
| 7c7ad59002 | |||
| ca430d6fdf | |||
| 18e5f6312b | |||
| d2ed1f2817 | |||
| fb609d40e3 | |||
| 0c93be417a | |||
| b719fa0444 | |||
|
|
8961f5b18c | ||
| d58bf47cd7 | |||
|
|
c917a639c4 | ||
|
|
9d3a673e6c | ||
|
|
b96e2d7dc6 | ||
|
|
76756ad695 | ||
|
|
05ee6303c2 | ||
|
|
5328390f4c | ||
|
|
4d9b75994f | ||
|
|
d7de20e586 | ||
|
|
399d5a31c8 | ||
|
|
b675db1324 | ||
|
|
e0d6d585b3 | ||
|
|
0a2eaaa5e4 | ||
|
|
df495c67b5 | ||
|
|
3e2c1b69ea | ||
|
|
27c4c8edf3 | ||
|
|
e600cfd2d0 | ||
|
|
08e32d42a3 | ||
|
|
752e839054 | ||
|
|
8a572e8525 | ||
|
|
4f31690281 | ||
|
|
097f5f4ab6 | ||
|
|
ac492aab80 | ||
|
|
110e181272 | ||
|
|
9696e45265 | ||
|
|
7ead8b1076 | ||
|
|
3fbba135b9 | ||
|
|
c233d97ba0 | ||
|
|
f1ee0df933 | ||
|
|
07084208a7 | ||
|
|
f500300b1f | ||
|
|
24ee7c7f87 | ||
|
|
d9a3eeb9aa | ||
|
|
077bb042b7 | ||
|
|
1d7d5a9d01 | ||
|
|
2020c15545 | ||
|
|
3ab87362a9 | ||
|
|
81b5204258 | ||
|
|
9623a3be97 | ||
|
|
f37c83e280 | ||
|
|
7ebbcbf958 | ||
|
|
b316e98b64 | ||
|
|
447141f05d | ||
|
|
3b2356f5a0 | ||
|
|
d2605196ac | ||
|
|
2d59c4b2e4 | ||
|
|
a9090aca7f | ||
|
|
f6eadff5bf | ||
|
|
9ae21c4c15 | ||
|
|
976d14d94b | ||
|
|
b2eec3cf83 | ||
|
|
bd7470f5d7 | ||
| 491675b613 | |||
| 4b3eecf05a | |||
| 3376d8162e | |||
| e2ffaa71b1 | |||
| 444fa1116a | |||
| 31ce9e920c | |||
| ba54de88fd | |||
| ca21416efc | |||
| 1bad7a8cca | |||
| 6015ace1de | |||
| 92de2f282f | |||
| 1fde25760a | |||
| cf28efa880 | |||
| 11d284554d | |||
| 3cc2030446 | |||
| eca2c46e9d | |||
| c5a87df6e1 | |||
| 17ee28b6f6 | |||
| af9c5799af | |||
| dcbc8d1053 | |||
| d2c7602430 | |||
| 24065aa199 | |||
| bc86947d01 | |||
| 74d6c1092e | |||
| 03d0c032e4 | |||
| 8d19ac1f4b | |||
| 28c9e6fe65 | |||
| b3d6d73348 | |||
| 527262af38 | |||
| a1f0d1dd71 | |||
| 9cc70dbe31 | |||
| 6c465566f6 | |||
| 68808c0933 | |||
| 7b4fda6011 | |||
| 0819dfa470 | |||
| 93cd31435b | |||
| d37c78f503 | |||
| aa106a948a | |||
| 79b1d81d27 | |||
| ad24720616 | |||
| a943ae139a | |||
| 8e27f73f8f | |||
| b5edb4f37e | |||
| 4a9ecab4dd | |||
| 3ae9e53bcc | |||
| 771ed484e4 | |||
| 2eafa91e70 | |||
| 7d22c2490a | |||
| 248f711571 | |||
| 306c2e5bd8 | |||
| 746ab20c38 | |||
| a5ee974765 | |||
| 5958569cba | |||
| d6c6af10d9 | |||
| ed23293e1a | |||
| fcecf3654b | |||
| 24c21f45b3 | |||
| 314dd24dce | |||
| 8d8d37dbf9 | |||
| c40373fa3b | |||
| 52553c8266 | |||
| f238867eae | |||
| 5b5d3811d6 | |||
| 4cc43bece6 | |||
| 4a5cb6441e | |||
| 6e4236b359 | |||
| fb53272fa9 | |||
| 8ce6843af2 | |||
| dfe89b7a3b | |||
| 7aee5ed5ba | |||
| 3d54f7a7f0 | |||
| 6e20fc5d16 | |||
| d2003a7b03 | |||
| 8733a643bf | |||
| 91307c87cc | |||
| f4e759c07a | |||
| b6d272992a | |||
| 14162b9213 | |||
| 44a44b5f56 | |||
| 899faba7e2 | |||
| bcee4fa601 | |||
| ab52827d9c | |||
| 0ca3945061 | |||
| 7b892d5197 | |||
| e23490a5f7 | |||
| 1b3ff1b5e1 | |||
| 46be7aa36f | |||
|
|
0363a14098 | ||
|
|
7fb70210a4 | ||
| 2ab795a95d | |||
|
|
e8a9a3087a | ||
|
|
3b12adf8f7 | ||
|
|
3833805a93 | ||
|
|
08f62f1787 | ||
|
|
d58edcb51c | ||
|
|
b957468738 | ||
|
|
111a41c7ca | ||
|
|
a269f4b0ee | ||
|
|
6335459799 | ||
|
|
8020101cc8 | ||
|
|
c5b360f670 | ||
|
|
432dbd4d83 | ||
|
|
a534f70abd | ||
|
|
429cf85f87 | ||
|
|
dce975bf4e | ||
|
|
5af32c6d47 | ||
|
|
5a35fd69bc | ||
| e368083e84 | |||
| 4a4d3efbfb | |||
| 3a922d447f | |||
| 9ff1e69860 | |||
| c8bf7f6b70 | |||
| 64396cf9de | |||
| 1456a6f149 | |||
| fc2a13ad74 | |||
| 72b1d9f4f2 | |||
| b3c0f51dc9 | |||
| 6a5a4e4de8 | |||
| ab64583951 | |||
| f3694592cc | |||
| c4f6552e12 | |||
| af2e2b083d | |||
| 281c7ab39b | |||
| d273220838 | |||
|
|
946d84442a | ||
|
|
64077b5169 | ||
|
|
e9392e719c | ||
| 709499c167 | |||
| ecfd02541f | |||
| 4545c6dc7a | |||
| 3485ab7883 | |||
| 66269fa816 | |||
| 83dee62f0e | |||
| 7c01352ab5 | |||
| c195b8c8fd | |||
| dac735af56 | |||
| f8477d5052 | |||
| 6521cba735 | |||
| 71b32398ad | |||
| c5b028932c | |||
| 5b5a5e458a | |||
| f1e6fc29f6 | |||
| aad6cb75d0 | |||
| a61f9262e6 | |||
| 32aff3787d | |||
| 8b78ffe4a0 | |||
| f0bfbe4367 | |||
| 657c33927b | |||
| 2ca36b1518 | |||
| ee6929fad5 | |||
| 0e3baae415 | |||
| 7f3499b1f2 | |||
| 2a9a1f1367 | |||
| ed92bb5402 | |||
| dc551f138a | |||
| 75766a37b4 | |||
| 0b0666558e | |||
| 4552c2c460 | |||
| b9e1e3756e | |||
| 9f0956d4a4 | |||
| 73074932f6 | |||
| 33dc746714 | |||
| 46d0a06ef5 | |||
| aa2ee5aea3 | |||
| 864c23dc94 | |||
| 1f86c36cc1 | |||
| 40f7e7e4c0 | |||
| dd171b287f | |||
| d4d1e59885 | |||
| 9446475ea2 | |||
| 737eb40d18 | |||
| 89464583a4 | |||
| cf9a3dc526 | |||
| 6a1ca5bc10 | |||
| 93d403807b | |||
| e20aea99b9 | |||
| a69904a47b | |||
| 7feb686d73 | |||
| 51ce32cc76 | |||
| ec87c5479b | |||
| bed440dc36 | |||
| 65e56cac5e | |||
|
|
69cc3f8e1e | ||
|
|
f64ca3871d | ||
|
|
da1862816f | ||
|
|
893a139087 | ||
| ac796072d8 | |||
|
|
fd73709092 | ||
|
|
3d9edf4141 | ||
|
|
bfeea743f7 | ||
|
|
952eeb7323 | ||
|
|
214139f4d5 | ||
|
|
1005b7969c | ||
|
|
12fa093f58 | ||
|
|
014264c592 | ||
|
|
14b547d468 | ||
|
|
6d92251fc1 | ||
|
|
65b078c85e | ||
|
|
dfef71b660 | ||
|
|
6934d9261c | ||
|
|
3880993b60 | ||
|
|
144495ae6b | ||
|
|
08d077605a | ||
|
|
2e11931ded | ||
|
|
617df12b52 | ||
|
|
6c379d099a | ||
|
|
92c310333c | ||
|
|
2bb1dffe97 | ||
|
|
36f55558d2 | ||
|
|
57441e2e64 | ||
|
|
433212e00f | ||
|
|
298a379c42 | ||
|
|
d52423d3ce | ||
|
|
c9ad3a661a | ||
|
|
a0062494b7 | ||
|
|
2b356f6ca2 | ||
|
|
6dd2ce1014 | ||
|
|
d9efa85924 | ||
|
|
25d2958fe4 | ||
|
|
c38271da3b | ||
|
|
bb6e08208c | ||
|
|
17cfeb974b | ||
|
|
ef1f1eee9d | ||
|
|
7f0f7ce484 | ||
|
|
2c49371102 | ||
|
|
76ac113d0c | ||
|
|
89ec509eb9 | ||
|
|
d84730e8e1 | ||
| 2146798768 | |||
|
|
3c5ca0c2be | ||
|
|
6bbac918c2 | ||
|
|
c7381476e0 | ||
|
|
00b7500d05 | ||
|
|
96b259cbc1 | ||
|
|
10b49c4afb | ||
|
|
519093f42e | ||
| 4188f29161 | |||
|
|
fcaeb0fbcd | ||
|
|
8d8db47289 | ||
|
|
52f47c2311 | ||
|
|
7e9022bf9b | ||
|
|
722b16a903 | ||
|
|
3cfed1ebe3 | ||
|
|
89bb24493a | ||
|
|
22446acd8a | ||
|
|
e891449e0f | ||
|
|
b952c24f21 | ||
|
|
dcf9a2217d | ||
|
|
880919c77e | ||
|
|
a22fadae7e | ||
|
|
a42f88d64c | ||
|
|
8d57191a91 | ||
|
|
a3490d7b09 | ||
|
|
442f8e0971 | ||
|
|
d53c80fef0 | ||
|
|
3b80e9c396 | ||
|
|
ce7fb27c46 | ||
|
|
3f16bbeca1 | ||
|
|
e747c8db04 | ||
|
|
67c72a2d82 | ||
|
|
1852fe2812 | ||
|
|
203bd1e7f2 | ||
|
|
10d4de5d69 | ||
|
|
1c79da70a6 | ||
|
|
1a15c12c56 | ||
|
|
dd46025d60 | ||
|
|
63a622cbef | ||
|
|
587272e2d0 | ||
|
|
344e5df3bb | ||
|
|
5ae07f7a84 | ||
|
|
970cc9f606 | ||
|
|
06de72a355 | ||
|
|
32c81e96cf | ||
|
|
7ae92f3e1c | ||
|
|
53f2cd7f47 | ||
|
|
7390cac2cc | ||
|
|
7f3cd17488 | ||
|
|
6c88e2b96d | ||
|
|
8d542609ff | ||
|
|
721d6d15c5 | ||
|
|
3055bd2d85 | ||
|
|
c30b4b1cc2 | ||
|
|
7cb7a4f543 | ||
|
|
45a795d29e | ||
|
|
6552edaa11 | ||
|
|
6a4f58dc1c | ||
|
|
6d6ef1d151 | ||
|
|
949d0d0ead | ||
|
|
65df2bbdd3 | ||
|
|
7e983e2455 | ||
|
|
e237c40482 | ||
|
|
6bb9846cde | ||
|
|
aa14b580b3 | ||
|
|
000145af96 | ||
|
|
c74b6b13d1 | ||
|
|
630f946718 | ||
|
|
9dfbf8cf61 | ||
|
|
b56bef0747 | ||
| bbc211f56e | |||
| 6b63ca3e07 | |||
| c22bde16cd | |||
| 4e4454b0ca | |||
| 670809afdb | |||
| 7bc37fc513 | |||
| dc4857b167 | |||
| 8f2afcd022 | |||
| 0f0488856f | |||
| a8828cb53e | |||
| 25bed45411 | |||
| 02cd6d4815 | |||
| 9e89fa320a | |||
|
|
c68b541b6f | ||
|
|
5a0f090cc5 | ||
|
|
0796cbc744 | ||
|
|
92ae8097df | ||
|
|
2cb3fe8f5a | ||
|
|
22dc964503 | ||
|
|
e7f277ff0c | ||
|
|
b93f4c59ce | ||
|
|
751005391b | ||
|
|
c8c81fc437 | ||
|
|
dd954ffee3 | ||
|
|
27bbbe79df | ||
|
|
06fa8f7402 | ||
|
|
6de631cd07 | ||
|
|
b836940b89 | ||
|
|
6516843612 | ||
|
|
5d683d401e | ||
| 3a98b78661 | |||
| 41f1dc48ed | |||
| e57271c278 | |||
| db23486e9e | |||
| f87a28ac55 | |||
| 6ff6957db4 | |||
| 9582d9a265 | |||
| d675189a77 | |||
| 4ac4219ce0 | |||
| 3e02bade98 | |||
| 68f641211a | |||
| 555fcd04db | |||
| 88be403c86 | |||
| ae4221968e | |||
| a2b61d2bff | |||
| 8aadfb99af | |||
| bc5ab30363 | |||
| 0b90012947 | |||
| 43681ca1b1 | |||
| 14ae97bba4 | |||
| d373ce591f | |||
| c59ab66d94 | |||
| e151d09531 | |||
| 38695b3bb8 | |||
| 01639fff95 | |||
| 3bba2f1c33 | |||
| 61e2bf7063 | |||
| 1390da2e74 | |||
| 77d1d14e08 | |||
| ecb33a17fe | |||
| aabf97fe4e | |||
| a1973e6419 | |||
| 482507ce4d | |||
| 3705af9991 | |||
| f25782a850 | |||
| 0a527d2a4e | |||
| 09bb6df0b6 | |||
| 671446864d | |||
| ebd842f007 | |||
| 001a44532d | |||
| b7f4749ffb | |||
| 596ec39442 | |||
| a9254c1bd8 | |||
| 744290a438 | |||
| 0669c7cb77 | |||
| 7d9c102c6d | |||
| 7a84d96d72 | |||
| 148121c9d4 | |||
| 07f271e4fa | |||
| 701df76df1 | |||
| 7c9bb67fcd | |||
| 3e15f39b3e | |||
| 449ef39d96 | |||
| de9ab5d96d | |||
| e31cf89437 | |||
| 004f7828fb | |||
| f0be6a31e4 | |||
|
|
bb144a7d1c |
314
.env.example
314
.env.example
@@ -15,17 +15,30 @@ WEB_PORT=3000
|
||||
# ======================
|
||||
NEXT_PUBLIC_APP_URL=http://localhost:3000
|
||||
NEXT_PUBLIC_API_URL=http://localhost:3001
|
||||
# Frontend auth mode:
|
||||
# - real: Normal auth/session flow
|
||||
# - mock: Local-only seeded user for FE development (blocked outside NODE_ENV=development)
|
||||
# Use `mock` locally to continue FE work when auth flow is unstable.
|
||||
# If omitted, web runtime defaults:
|
||||
# - development -> mock
|
||||
# - production -> real
|
||||
NEXT_PUBLIC_AUTH_MODE=real
|
||||
|
||||
# ======================
|
||||
# PostgreSQL Database
|
||||
# ======================
|
||||
# Bundled PostgreSQL
|
||||
# SECURITY: Change POSTGRES_PASSWORD to a strong random password in production
|
||||
DATABASE_URL=postgresql://mosaic:REPLACE_WITH_SECURE_PASSWORD@localhost:5432/mosaic
|
||||
DATABASE_URL=postgresql://mosaic:REPLACE_WITH_SECURE_PASSWORD@postgres:5432/mosaic
|
||||
POSTGRES_USER=mosaic
|
||||
POSTGRES_PASSWORD=REPLACE_WITH_SECURE_PASSWORD
|
||||
POSTGRES_DB=mosaic
|
||||
POSTGRES_PORT=5432
|
||||
|
||||
# External PostgreSQL (managed service)
|
||||
# To use an external instance, update DATABASE_URL above
|
||||
# Example: DATABASE_URL=postgresql://user:pass@rds.amazonaws.com:5432/mosaic
|
||||
|
||||
# PostgreSQL Performance Tuning (Optional)
|
||||
POSTGRES_SHARED_BUFFERS=256MB
|
||||
POSTGRES_EFFECTIVE_CACHE_SIZE=1GB
|
||||
@@ -34,12 +47,18 @@ POSTGRES_MAX_CONNECTIONS=100
|
||||
# ======================
|
||||
# Valkey Cache (Redis-compatible)
|
||||
# ======================
|
||||
VALKEY_URL=redis://localhost:6379
|
||||
VALKEY_HOST=localhost
|
||||
# Bundled Valkey
|
||||
VALKEY_URL=redis://valkey:6379
|
||||
VALKEY_HOST=valkey
|
||||
VALKEY_PORT=6379
|
||||
# VALKEY_PASSWORD= # Optional: Password for Valkey authentication
|
||||
VALKEY_MAXMEMORY=256mb
|
||||
|
||||
# External Redis/Valkey (managed service)
|
||||
# To use an external instance, update VALKEY_URL above
|
||||
# Example: VALKEY_URL=redis://elasticache.amazonaws.com:6379
|
||||
# Example with auth: VALKEY_URL=redis://:password@redis.example.com:6379
|
||||
|
||||
# Knowledge Module Cache Configuration
|
||||
# Set KNOWLEDGE_CACHE_ENABLED=false to disable caching (useful for development)
|
||||
KNOWLEDGE_CACHE_ENABLED=true
|
||||
@@ -49,14 +68,19 @@ KNOWLEDGE_CACHE_TTL=300
|
||||
# ======================
|
||||
# Authentication (Authentik OIDC)
|
||||
# ======================
|
||||
# Authentik Server URLs
|
||||
# Set to 'true' to enable OIDC authentication with Authentik
|
||||
# When enabled, OIDC_ISSUER, OIDC_CLIENT_ID, OIDC_CLIENT_SECRET, and OIDC_REDIRECT_URI are required
|
||||
OIDC_ENABLED=false
|
||||
|
||||
# Authentik Server URLs (required when OIDC_ENABLED=true)
|
||||
# OIDC_ISSUER must end with a trailing slash (/)
|
||||
OIDC_ISSUER=https://auth.example.com/application/o/mosaic-stack/
|
||||
OIDC_CLIENT_ID=your-client-id-here
|
||||
OIDC_CLIENT_SECRET=your-client-secret-here
|
||||
# Redirect URI must match what's configured in Authentik
|
||||
# Development: http://localhost:3001/auth/callback/authentik
|
||||
# Production: https://api.mosaicstack.dev/auth/callback/authentik
|
||||
OIDC_REDIRECT_URI=http://localhost:3001/auth/callback/authentik
|
||||
# Development: http://localhost:3001/auth/oauth2/callback/authentik
|
||||
# Production: https://mosaic-api.woltje.com/auth/oauth2/callback/authentik
|
||||
OIDC_REDIRECT_URI=http://localhost:3001/auth/oauth2/callback/authentik
|
||||
|
||||
# Authentik PostgreSQL Database
|
||||
AUTHENTIK_POSTGRES_USER=authentik
|
||||
@@ -77,6 +101,14 @@ AUTHENTIK_COOKIE_DOMAIN=.localhost
|
||||
AUTHENTIK_PORT_HTTP=9000
|
||||
AUTHENTIK_PORT_HTTPS=9443
|
||||
|
||||
# ======================
|
||||
# CSRF Protection
|
||||
# ======================
|
||||
# CRITICAL: Generate a random secret for CSRF token signing
|
||||
# Required in production; auto-generated in development (not persistent across restarts)
|
||||
# Command to generate: node -e "console.log(require('crypto').randomBytes(32).toString('hex'))"
|
||||
CSRF_SECRET=REPLACE_WITH_64_CHAR_HEX_STRING
|
||||
|
||||
# ======================
|
||||
# JWT Configuration
|
||||
# ======================
|
||||
@@ -85,6 +117,62 @@ AUTHENTIK_PORT_HTTPS=9443
|
||||
JWT_SECRET=REPLACE_WITH_RANDOM_SECRET_MINIMUM_32_CHARS
|
||||
JWT_EXPIRATION=24h
|
||||
|
||||
# ======================
|
||||
# BetterAuth Configuration
|
||||
# ======================
|
||||
# CRITICAL: Generate a random secret key with at least 32 characters
|
||||
# This is used by BetterAuth for session management and CSRF protection
|
||||
# Example: openssl rand -base64 32
|
||||
BETTER_AUTH_SECRET=REPLACE_WITH_RANDOM_SECRET_MINIMUM_32_CHARS
|
||||
# Optional explicit BetterAuth origin for callback/error URL generation.
|
||||
# When empty, backend falls back to NEXT_PUBLIC_API_URL.
|
||||
BETTER_AUTH_URL=
|
||||
|
||||
# Trusted Origins (comma-separated list of additional trusted origins for CORS and auth)
|
||||
# These are added to NEXT_PUBLIC_APP_URL and NEXT_PUBLIC_API_URL automatically
|
||||
TRUSTED_ORIGINS=
|
||||
|
||||
# Cookie Domain (for cross-subdomain session sharing)
|
||||
# Leave empty for single-domain setups. Set to ".example.com" for cross-subdomain.
|
||||
COOKIE_DOMAIN=
|
||||
|
||||
# ======================
|
||||
# Encryption (Credential Security)
|
||||
# ======================
|
||||
# CRITICAL: Generate a random 32-byte (256-bit) encryption key
|
||||
# This key is used for AES-256-GCM encryption of OAuth tokens and sensitive data
|
||||
# Command to generate: openssl rand -hex 32
|
||||
# SECURITY: Never commit this key to version control
|
||||
# SECURITY: Use different keys for development, staging, and production
|
||||
# SECURITY: Store production keys in a secure secrets manager (see docs/design/credential-security.md)
|
||||
ENCRYPTION_KEY=REPLACE_WITH_64_CHAR_HEX_STRING_GENERATE_WITH_OPENSSL_RAND_HEX_32
|
||||
|
||||
# ======================
|
||||
# OpenBao Secrets Management
|
||||
# ======================
|
||||
# OpenBao provides Transit encryption for sensitive credentials
|
||||
# Enable with: COMPOSE_PROFILES=openbao or COMPOSE_PROFILES=full
|
||||
# Auto-initialized on first run via openbao-init sidecar
|
||||
|
||||
# Bundled OpenBao (when openbao profile enabled)
|
||||
OPENBAO_ADDR=http://openbao:8200
|
||||
OPENBAO_PORT=8200
|
||||
|
||||
# External OpenBao/Vault (managed service)
|
||||
# Disable 'openbao' profile and set OPENBAO_ADDR to your external instance
|
||||
# Example: OPENBAO_ADDR=https://vault.example.com:8200
|
||||
# Example: OPENBAO_ADDR=https://vault.hashicorp.com:8200
|
||||
|
||||
# AppRole Authentication (Optional)
|
||||
# If not set, credentials are read from /openbao/init/approle-credentials volume
|
||||
# Required when using external OpenBao
|
||||
# OPENBAO_ROLE_ID=your-role-id-here
|
||||
# OPENBAO_SECRET_ID=your-secret-id-here
|
||||
|
||||
# Fallback Mode
|
||||
# When OpenBao is unavailable, API automatically falls back to AES-256-GCM
|
||||
# encryption using ENCRYPTION_KEY. This provides graceful degradation.
|
||||
|
||||
# ======================
|
||||
# Ollama (Optional AI Service)
|
||||
# ======================
|
||||
@@ -120,15 +208,36 @@ SEMANTIC_SEARCH_SIMILARITY_THRESHOLD=0.5
|
||||
# ======================
|
||||
NODE_ENV=development
|
||||
|
||||
# ======================
|
||||
# Docker Image Configuration
|
||||
# ======================
|
||||
# Docker image tag for pulling pre-built images from git.mosaicstack.dev registry
|
||||
# Used by docker-compose.yml (pulls images) and docker-swarm.yml
|
||||
# For local builds, use docker-compose.build.yml instead
|
||||
# Options:
|
||||
# - latest: Pull latest images from registry (default, built from main branch)
|
||||
# - <version>: Use specific version tag (e.g., v1.0.0)
|
||||
IMAGE_TAG=latest
|
||||
|
||||
# ======================
|
||||
# Docker Compose Profiles
|
||||
# ======================
|
||||
# Uncomment to enable optional services:
|
||||
# COMPOSE_PROFILES=authentik,ollama # Enable both Authentik and Ollama
|
||||
# COMPOSE_PROFILES=full # Enable all optional services
|
||||
# COMPOSE_PROFILES=authentik # Enable only Authentik
|
||||
# COMPOSE_PROFILES=ollama # Enable only Ollama
|
||||
# COMPOSE_PROFILES=traefik-bundled # Enable bundled Traefik reverse proxy
|
||||
# Enable optional services via profiles. Combine multiple profiles with commas.
|
||||
#
|
||||
# Available profiles:
|
||||
# - database: PostgreSQL database (disable to use external database)
|
||||
# - cache: Valkey cache (disable to use external Redis)
|
||||
# - openbao: OpenBao secrets management (disable to use external vault or fallback encryption)
|
||||
# - authentik: Authentik OIDC authentication (disable to use external auth provider)
|
||||
# - ollama: Ollama AI/LLM service (disable to use external LLM service)
|
||||
# - traefik-bundled: Bundled Traefik reverse proxy (disable to use external proxy)
|
||||
# - full: Enable all optional services (turnkey deployment)
|
||||
#
|
||||
# Examples:
|
||||
# COMPOSE_PROFILES=full # Everything bundled (development)
|
||||
# COMPOSE_PROFILES=database,cache,openbao # Core services only
|
||||
# COMPOSE_PROFILES= # All external services (production)
|
||||
COMPOSE_PROFILES=full
|
||||
|
||||
# ======================
|
||||
# Traefik Reverse Proxy
|
||||
@@ -144,12 +253,16 @@ MOSAIC_API_DOMAIN=api.mosaic.local
|
||||
MOSAIC_WEB_DOMAIN=mosaic.local
|
||||
MOSAIC_AUTH_DOMAIN=auth.mosaic.local
|
||||
|
||||
# External Traefik network name (for upstream mode)
|
||||
# External Traefik network name (for upstream mode and swarm)
|
||||
# Must match the network name of your existing Traefik instance
|
||||
TRAEFIK_NETWORK=traefik-public
|
||||
TRAEFIK_DOCKER_NETWORK=traefik-public
|
||||
|
||||
# TLS/SSL Configuration
|
||||
TRAEFIK_TLS_ENABLED=true
|
||||
TRAEFIK_ENTRYPOINT=websecure
|
||||
# Cert resolver name (leave empty if TLS is handled externally or using self-signed certs)
|
||||
TRAEFIK_CERTRESOLVER=
|
||||
# For Let's Encrypt (production):
|
||||
TRAEFIK_ACME_EMAIL=admin@example.com
|
||||
# For self-signed certificates (development), leave TRAEFIK_ACME_EMAIL empty
|
||||
@@ -185,6 +298,15 @@ GITEA_WEBHOOK_SECRET=REPLACE_WITH_RANDOM_WEBHOOK_SECRET
|
||||
# The coordinator service uses this key to authenticate with the API
|
||||
COORDINATOR_API_KEY=REPLACE_WITH_RANDOM_API_KEY_MINIMUM_32_CHARS
|
||||
|
||||
# Anthropic API Key (used by coordinator for issue parsing)
|
||||
# Get your API key from: https://console.anthropic.com/
|
||||
ANTHROPIC_API_KEY=REPLACE_WITH_ANTHROPIC_API_KEY
|
||||
|
||||
# Coordinator tuning
|
||||
COORDINATOR_POLL_INTERVAL=5.0
|
||||
COORDINATOR_MAX_CONCURRENT_AGENTS=10
|
||||
COORDINATOR_ENABLED=true
|
||||
|
||||
# ======================
|
||||
# Rate Limiting
|
||||
# ======================
|
||||
@@ -192,17 +314,19 @@ COORDINATOR_API_KEY=REPLACE_WITH_RANDOM_API_KEY_MINIMUM_32_CHARS
|
||||
# TTL is in seconds, limits are per TTL window
|
||||
|
||||
# Global rate limit (applies to all endpoints unless overridden)
|
||||
RATE_LIMIT_TTL=60 # Time window in seconds
|
||||
RATE_LIMIT_GLOBAL_LIMIT=100 # Requests per window
|
||||
# Time window in seconds
|
||||
RATE_LIMIT_TTL=60
|
||||
# Requests per window
|
||||
RATE_LIMIT_GLOBAL_LIMIT=100
|
||||
|
||||
# Webhook endpoints (/stitcher/webhook, /stitcher/dispatch)
|
||||
RATE_LIMIT_WEBHOOK_LIMIT=60 # Requests per minute
|
||||
# Webhook endpoints (/stitcher/webhook, /stitcher/dispatch) — requests per minute
|
||||
RATE_LIMIT_WEBHOOK_LIMIT=60
|
||||
|
||||
# Coordinator endpoints (/coordinator/*)
|
||||
RATE_LIMIT_COORDINATOR_LIMIT=100 # Requests per minute
|
||||
# Coordinator endpoints (/coordinator/*) — requests per minute
|
||||
RATE_LIMIT_COORDINATOR_LIMIT=100
|
||||
|
||||
# Health check endpoints (/coordinator/health)
|
||||
RATE_LIMIT_HEALTH_LIMIT=300 # Requests per minute (higher for monitoring)
|
||||
# Health check endpoints (/coordinator/health) — requests per minute (higher for monitoring)
|
||||
RATE_LIMIT_HEALTH_LIMIT=300
|
||||
|
||||
# Storage backend for rate limiting (redis or memory)
|
||||
# redis: Uses Valkey for distributed rate limiting (recommended for production)
|
||||
@@ -224,6 +348,152 @@ RATE_LIMIT_STORAGE=redis
|
||||
# multi-tenant isolation. Each Discord bot instance should be configured for
|
||||
# a single workspace.
|
||||
|
||||
# ======================
|
||||
# Matrix Bridge (Optional)
|
||||
# ======================
|
||||
# Matrix bot integration for chat-based control via Matrix protocol
|
||||
# Requires a Matrix account with an access token for the bot user
|
||||
# Set these AFTER deploying Synapse and creating the bot account.
|
||||
#
|
||||
# SECURITY: MATRIX_WORKSPACE_ID must be a valid workspace UUID from your database.
|
||||
# All Matrix commands will execute within this workspace context for proper
|
||||
# multi-tenant isolation. Each Matrix bot instance should be configured for
|
||||
# a single workspace.
|
||||
MATRIX_HOMESERVER_URL=http://synapse:8008
|
||||
MATRIX_ACCESS_TOKEN=
|
||||
MATRIX_BOT_USER_ID=@mosaic-bot:matrix.woltje.com
|
||||
MATRIX_SERVER_NAME=matrix.woltje.com
|
||||
# MATRIX_CONTROL_ROOM_ID=!roomid:matrix.woltje.com
|
||||
# MATRIX_WORKSPACE_ID=your-workspace-uuid
|
||||
|
||||
# ======================
|
||||
# Matrix / Synapse Deployment
|
||||
# ======================
|
||||
# Domains for Traefik routing to Matrix services
|
||||
MATRIX_DOMAIN=matrix.woltje.com
|
||||
ELEMENT_DOMAIN=chat.woltje.com
|
||||
|
||||
# Synapse database (created automatically by synapse-db-init in the swarm compose)
|
||||
SYNAPSE_POSTGRES_DB=synapse
|
||||
SYNAPSE_POSTGRES_USER=synapse
|
||||
SYNAPSE_POSTGRES_PASSWORD=REPLACE_WITH_SECURE_SYNAPSE_DB_PASSWORD
|
||||
|
||||
# Image tags for Matrix services
|
||||
SYNAPSE_IMAGE_TAG=latest
|
||||
ELEMENT_IMAGE_TAG=latest
|
||||
|
||||
# ======================
|
||||
# Orchestrator Configuration
|
||||
# ======================
|
||||
# API Key for orchestrator agent management endpoints
|
||||
# CRITICAL: Generate a random API key with at least 32 characters
|
||||
# Example: openssl rand -base64 32
|
||||
# Required for all /agents/* endpoints (spawn, kill, kill-all, status)
|
||||
# Health endpoints (/health/*) remain unauthenticated
|
||||
ORCHESTRATOR_API_KEY=REPLACE_WITH_RANDOM_API_KEY_MINIMUM_32_CHARS
|
||||
|
||||
# Runtime safety defaults (recommended for low-memory hosts)
|
||||
MAX_CONCURRENT_AGENTS=2
|
||||
SESSION_CLEANUP_DELAY_MS=30000
|
||||
ORCHESTRATOR_QUEUE_NAME=orchestrator-tasks
|
||||
ORCHESTRATOR_QUEUE_CONCURRENCY=1
|
||||
ORCHESTRATOR_QUEUE_MAX_RETRIES=3
|
||||
ORCHESTRATOR_QUEUE_BASE_DELAY_MS=1000
|
||||
ORCHESTRATOR_QUEUE_MAX_DELAY_MS=60000
|
||||
SANDBOX_DEFAULT_MEMORY_MB=256
|
||||
SANDBOX_DEFAULT_CPU_LIMIT=1.0
|
||||
|
||||
# ======================
|
||||
# AI Provider Configuration
|
||||
# ======================
|
||||
# Choose the AI provider for orchestrator agents
|
||||
# Options: ollama, claude, openai
|
||||
# Default: ollama (no API key required)
|
||||
AI_PROVIDER=ollama
|
||||
|
||||
# Ollama Configuration (when AI_PROVIDER=ollama)
|
||||
# For local Ollama: http://localhost:11434
|
||||
# For remote Ollama: http://your-ollama-server:11434
|
||||
OLLAMA_MODEL=llama3.1:latest
|
||||
|
||||
# Claude API Key
|
||||
# Required only when AI_PROVIDER=claude.
|
||||
# Get your API key from: https://console.anthropic.com/
|
||||
CLAUDE_API_KEY=REPLACE_WITH_CLAUDE_API_KEY
|
||||
|
||||
# OpenAI API Configuration (when AI_PROVIDER=openai)
|
||||
# OPTIONAL: Only required if AI_PROVIDER=openai
|
||||
# Get your API key from: https://platform.openai.com/api-keys
|
||||
# OPENAI_API_KEY=sk-...
|
||||
|
||||
# ======================
|
||||
# Speech Services (STT / TTS)
|
||||
# ======================
|
||||
# Speech-to-Text (STT) - Whisper via Speaches
|
||||
# Set STT_ENABLED=true to enable speech-to-text transcription
|
||||
# STT_BASE_URL is required when STT_ENABLED=true
|
||||
STT_ENABLED=true
|
||||
STT_BASE_URL=http://speaches:8000/v1
|
||||
STT_MODEL=Systran/faster-whisper-large-v3-turbo
|
||||
STT_LANGUAGE=en
|
||||
|
||||
# Text-to-Speech (TTS) - Default Engine (Kokoro)
|
||||
# Set TTS_ENABLED=true to enable text-to-speech synthesis
|
||||
# TTS_DEFAULT_URL is required when TTS_ENABLED=true
|
||||
TTS_ENABLED=true
|
||||
TTS_DEFAULT_URL=http://kokoro-tts:8880/v1
|
||||
TTS_DEFAULT_VOICE=af_heart
|
||||
TTS_DEFAULT_FORMAT=mp3
|
||||
|
||||
# Text-to-Speech (TTS) - Premium Engine (Chatterbox) - Optional
|
||||
# Higher quality voice cloning engine, disabled by default
|
||||
# TTS_PREMIUM_URL is required when TTS_PREMIUM_ENABLED=true
|
||||
TTS_PREMIUM_ENABLED=false
|
||||
TTS_PREMIUM_URL=http://chatterbox-tts:8881/v1
|
||||
|
||||
# Text-to-Speech (TTS) - Fallback Engine (Piper/OpenedAI) - Optional
|
||||
# Lightweight fallback engine, disabled by default
|
||||
# TTS_FALLBACK_URL is required when TTS_FALLBACK_ENABLED=true
|
||||
TTS_FALLBACK_ENABLED=false
|
||||
TTS_FALLBACK_URL=http://openedai-speech:8000/v1
|
||||
|
||||
# Whisper model for Speaches STT engine
|
||||
SPEACHES_WHISPER_MODEL=Systran/faster-whisper-large-v3-turbo
|
||||
|
||||
# Speech Service Limits
|
||||
# Maximum upload file size in bytes (default: 25MB)
|
||||
SPEECH_MAX_UPLOAD_SIZE=25000000
|
||||
# Maximum audio duration in seconds (default: 600 = 10 minutes)
|
||||
SPEECH_MAX_DURATION_SECONDS=600
|
||||
# Maximum text length for TTS in characters (default: 4096)
|
||||
SPEECH_MAX_TEXT_LENGTH=4096
|
||||
|
||||
# ======================
|
||||
# Mosaic Telemetry (Task Completion Tracking & Predictions)
|
||||
# ======================
|
||||
# Telemetry tracks task completion patterns to provide time estimates and predictions.
|
||||
# Data is sent to the Mosaic Telemetry API (a separate service).
|
||||
|
||||
# Master switch: set to false to completely disable telemetry (no HTTP calls will be made)
|
||||
MOSAIC_TELEMETRY_ENABLED=true
|
||||
|
||||
# URL of the telemetry API server
|
||||
# For Docker Compose (internal): http://telemetry-api:8000
|
||||
# For production/swarm: https://tel-api.mosaicstack.dev
|
||||
MOSAIC_TELEMETRY_SERVER_URL=http://telemetry-api:8000
|
||||
|
||||
# API key for authenticating with the telemetry server
|
||||
# Generate with: openssl rand -hex 32
|
||||
MOSAIC_TELEMETRY_API_KEY=your-64-char-hex-api-key-here
|
||||
|
||||
# Unique identifier for this Mosaic Stack instance
|
||||
# Generate with: uuidgen or python -c "import uuid; print(uuid.uuid4())"
|
||||
MOSAIC_TELEMETRY_INSTANCE_ID=your-instance-uuid-here
|
||||
|
||||
# Dry run mode: set to true to log telemetry events to console instead of sending HTTP requests
|
||||
# Useful for development and debugging telemetry payloads
|
||||
MOSAIC_TELEMETRY_DRY_RUN=false
|
||||
|
||||
# ======================
|
||||
# Logging & Debugging
|
||||
# ======================
|
||||
|
||||
@@ -1,66 +0,0 @@
|
||||
# ==============================================
|
||||
# Mosaic Stack Production Environment
|
||||
# ==============================================
|
||||
# Copy to .env and configure for production deployment
|
||||
|
||||
# ======================
|
||||
# PostgreSQL Database
|
||||
# ======================
|
||||
# CRITICAL: Use a strong, unique password
|
||||
POSTGRES_USER=mosaic
|
||||
POSTGRES_PASSWORD=REPLACE_WITH_SECURE_PASSWORD
|
||||
POSTGRES_DB=mosaic
|
||||
POSTGRES_SHARED_BUFFERS=256MB
|
||||
POSTGRES_EFFECTIVE_CACHE_SIZE=1GB
|
||||
POSTGRES_MAX_CONNECTIONS=100
|
||||
|
||||
# ======================
|
||||
# Valkey Cache
|
||||
# ======================
|
||||
VALKEY_MAXMEMORY=256mb
|
||||
|
||||
# ======================
|
||||
# API Configuration
|
||||
# ======================
|
||||
API_PORT=3001
|
||||
API_HOST=0.0.0.0
|
||||
|
||||
# ======================
|
||||
# Web Configuration
|
||||
# ======================
|
||||
WEB_PORT=3000
|
||||
NEXT_PUBLIC_API_URL=https://api.mosaicstack.dev
|
||||
|
||||
# ======================
|
||||
# Authentication (Authentik OIDC)
|
||||
# ======================
|
||||
OIDC_ISSUER=https://auth.diversecanvas.com/application/o/mosaic-stack/
|
||||
OIDC_CLIENT_ID=your-client-id
|
||||
OIDC_CLIENT_SECRET=your-client-secret
|
||||
OIDC_REDIRECT_URI=https://api.mosaicstack.dev/auth/callback/authentik
|
||||
|
||||
# ======================
|
||||
# JWT Configuration
|
||||
# ======================
|
||||
# CRITICAL: Generate a random secret (openssl rand -base64 32)
|
||||
JWT_SECRET=REPLACE_WITH_RANDOM_SECRET
|
||||
JWT_EXPIRATION=24h
|
||||
|
||||
# ======================
|
||||
# Traefik Integration
|
||||
# ======================
|
||||
# Set to true if using external Traefik
|
||||
TRAEFIK_ENABLE=true
|
||||
TRAEFIK_ENTRYPOINT=websecure
|
||||
TRAEFIK_TLS_ENABLED=true
|
||||
TRAEFIK_DOCKER_NETWORK=traefik-public
|
||||
TRAEFIK_CERTRESOLVER=letsencrypt
|
||||
|
||||
# Domain configuration
|
||||
MOSAIC_API_DOMAIN=api.mosaicstack.dev
|
||||
MOSAIC_WEB_DOMAIN=app.mosaicstack.dev
|
||||
|
||||
# ======================
|
||||
# Optional: Ollama
|
||||
# ======================
|
||||
# OLLAMA_ENDPOINT=http://ollama.diversecanvas.com:11434
|
||||
15
.gitignore
vendored
15
.gitignore
vendored
@@ -30,10 +30,12 @@ Thumbs.db
|
||||
# Environment
|
||||
.env
|
||||
.env.local
|
||||
.env.test
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
.env.bak.*
|
||||
*.bak
|
||||
|
||||
# Credentials (never commit)
|
||||
.admin-credentials
|
||||
@@ -54,3 +56,16 @@ yarn-error.log*
|
||||
|
||||
# Husky
|
||||
.husky/_
|
||||
|
||||
# Orchestrator reports (generated by QA automation, cleaned up after processing)
|
||||
docs/reports/qa-automation/
|
||||
|
||||
# Repo-local orchestrator runtime artifacts
|
||||
.mosaic/orchestrator/orchestrator.pid
|
||||
.mosaic/orchestrator/state.json
|
||||
.mosaic/orchestrator/tasks.json
|
||||
.mosaic/orchestrator/matrix_state.json
|
||||
.mosaic/orchestrator/logs/*.log
|
||||
.mosaic/orchestrator/results/*
|
||||
!.mosaic/orchestrator/logs/.gitkeep
|
||||
!.mosaic/orchestrator/results/.gitkeep
|
||||
|
||||
15
.mosaic/README.md
Normal file
15
.mosaic/README.md
Normal file
@@ -0,0 +1,15 @@
|
||||
# Repo Mosaic Linkage
|
||||
|
||||
This repository is attached to the machine-wide Mosaic framework.
|
||||
|
||||
## Load Order for Agents
|
||||
|
||||
1. `~/.config/mosaic/STANDARDS.md`
|
||||
2. `AGENTS.md` (this repository)
|
||||
3. `.mosaic/repo-hooks.sh` (repo-specific automation hooks)
|
||||
|
||||
## Purpose
|
||||
|
||||
- Keep universal standards in `~/.config/mosaic`
|
||||
- Keep repo-specific behavior in this repo
|
||||
- Avoid copying large runtime configs into each project
|
||||
18
.mosaic/orchestrator/config.json
Normal file
18
.mosaic/orchestrator/config.json
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"enabled": true,
|
||||
"transport": "matrix",
|
||||
"matrix": {
|
||||
"control_room_id": "",
|
||||
"workspace_id": "",
|
||||
"homeserver_url": "",
|
||||
"access_token": "",
|
||||
"bot_user_id": ""
|
||||
},
|
||||
"worker": {
|
||||
"runtime": "codex",
|
||||
"command_template": "bash scripts/agent/orchestrator-worker.sh {task_file}",
|
||||
"timeout_seconds": 7200,
|
||||
"max_attempts": 1
|
||||
},
|
||||
"quality_gates": ["pnpm lint", "pnpm typecheck", "pnpm test"]
|
||||
}
|
||||
1
.mosaic/orchestrator/logs/.gitkeep
Normal file
1
.mosaic/orchestrator/logs/.gitkeep
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
90
.mosaic/orchestrator/mission.json
Normal file
90
.mosaic/orchestrator/mission.json
Normal file
@@ -0,0 +1,90 @@
|
||||
{
|
||||
"schema_version": 1,
|
||||
"mission_id": "ms21-multi-tenant-rbac-data-migration-20260228",
|
||||
"name": "MS21 Multi-Tenant RBAC Data Migration",
|
||||
"description": "Build multi-tenant user/workspace/team management, break-glass auth, RBAC UI enforcement, and migrate jarvis-brain data into Mosaic Stack",
|
||||
"project_path": "/home/jwoltje/src/mosaic-stack",
|
||||
"created_at": "2026-02-28T17:10:22Z",
|
||||
"status": "active",
|
||||
"task_prefix": "MS21",
|
||||
"quality_gates": "pnpm lint && pnpm build && pnpm test",
|
||||
"milestone_version": "0.0.21",
|
||||
"milestones": [
|
||||
{
|
||||
"id": "phase-1",
|
||||
"name": "Schema and Admin API",
|
||||
"status": "pending",
|
||||
"branch": "schema-and-admin-api",
|
||||
"issue_ref": "",
|
||||
"started_at": "",
|
||||
"completed_at": ""
|
||||
},
|
||||
{
|
||||
"id": "phase-2",
|
||||
"name": "Break-Glass Authentication",
|
||||
"status": "pending",
|
||||
"branch": "break-glass-authentication",
|
||||
"issue_ref": "",
|
||||
"started_at": "",
|
||||
"completed_at": ""
|
||||
},
|
||||
{
|
||||
"id": "phase-3",
|
||||
"name": "Data Migration",
|
||||
"status": "pending",
|
||||
"branch": "data-migration",
|
||||
"issue_ref": "",
|
||||
"started_at": "",
|
||||
"completed_at": ""
|
||||
},
|
||||
{
|
||||
"id": "phase-4",
|
||||
"name": "Admin UI",
|
||||
"status": "pending",
|
||||
"branch": "admin-ui",
|
||||
"issue_ref": "",
|
||||
"started_at": "",
|
||||
"completed_at": ""
|
||||
},
|
||||
{
|
||||
"id": "phase-5",
|
||||
"name": "RBAC UI Enforcement",
|
||||
"status": "pending",
|
||||
"branch": "rbac-ui-enforcement",
|
||||
"issue_ref": "",
|
||||
"started_at": "",
|
||||
"completed_at": ""
|
||||
},
|
||||
{
|
||||
"id": "phase-6",
|
||||
"name": "Verification",
|
||||
"status": "pending",
|
||||
"branch": "verification",
|
||||
"issue_ref": "",
|
||||
"started_at": "",
|
||||
"completed_at": ""
|
||||
}
|
||||
],
|
||||
"sessions": [
|
||||
{
|
||||
"session_id": "sess-001",
|
||||
"runtime": "unknown",
|
||||
"started_at": "2026-02-28T17:48:51Z",
|
||||
"ended_at": "",
|
||||
"ended_reason": "",
|
||||
"milestone_at_end": "",
|
||||
"tasks_completed": [],
|
||||
"last_task_id": ""
|
||||
},
|
||||
{
|
||||
"session_id": "sess-002",
|
||||
"runtime": "unknown",
|
||||
"started_at": "2026-02-28T20:30:13Z",
|
||||
"ended_at": "",
|
||||
"ended_reason": "",
|
||||
"milestone_at_end": "",
|
||||
"tasks_completed": [],
|
||||
"last_task_id": ""
|
||||
}
|
||||
]
|
||||
}
|
||||
1
.mosaic/orchestrator/results/.gitkeep
Normal file
1
.mosaic/orchestrator/results/.gitkeep
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
8
.mosaic/orchestrator/session.lock
Normal file
8
.mosaic/orchestrator/session.lock
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"session_id": "sess-002",
|
||||
"runtime": "unknown",
|
||||
"pid": 3178395,
|
||||
"started_at": "2026-02-28T20:30:13Z",
|
||||
"project_path": "/tmp/ms21-ui-001",
|
||||
"milestone_id": ""
|
||||
}
|
||||
10
.mosaic/quality-rails.yml
Normal file
10
.mosaic/quality-rails.yml
Normal file
@@ -0,0 +1,10 @@
|
||||
enabled: false
|
||||
template: ""
|
||||
|
||||
# Set enabled: true and choose one template:
|
||||
# - typescript-node
|
||||
# - typescript-nextjs
|
||||
# - monorepo
|
||||
#
|
||||
# Apply manually:
|
||||
# ~/.config/mosaic/bin/mosaic-quality-apply --template <template> --target <repo>
|
||||
29
.mosaic/repo-hooks.sh
Executable file
29
.mosaic/repo-hooks.sh
Executable file
@@ -0,0 +1,29 @@
|
||||
#!/usr/bin/env bash
|
||||
# Repo-specific hooks used by scripts/agent/*.sh for Mosaic Stack.
|
||||
|
||||
mosaic_hook_session_start() {
|
||||
echo "[mosaic-stack] Branch: $(git rev-parse --abbrev-ref HEAD)"
|
||||
echo "[mosaic-stack] Remotes:"
|
||||
git remote -v | sed 's/^/[mosaic-stack] /'
|
||||
if command -v node >/dev/null 2>&1; then
|
||||
echo "[mosaic-stack] Node: $(node -v)"
|
||||
fi
|
||||
if command -v pnpm >/dev/null 2>&1; then
|
||||
echo "[mosaic-stack] pnpm: $(pnpm -v)"
|
||||
fi
|
||||
}
|
||||
|
||||
mosaic_hook_critical() {
|
||||
echo "[mosaic-stack] Recent commits:"
|
||||
git log --oneline --decorate -n 5 | sed 's/^/[mosaic-stack] /'
|
||||
echo "[mosaic-stack] Open TODO/FIXME markers (top 20):"
|
||||
rg -n "(TODO|FIXME|HACK|SECURITY)" apps packages plugins docs --glob '!**/node_modules/**' -S \
|
||||
| head -n 20 \
|
||||
| sed 's/^/[mosaic-stack] /' \
|
||||
|| true
|
||||
}
|
||||
|
||||
mosaic_hook_session_end() {
|
||||
echo "[mosaic-stack] Working tree summary:"
|
||||
git status --short | sed 's/^/[mosaic-stack] /' || true
|
||||
}
|
||||
1
.npmrc
Normal file
1
.npmrc
Normal file
@@ -0,0 +1 @@
|
||||
@mosaicstack:registry=https://git.mosaicstack.dev/api/packages/mosaic/npm/
|
||||
42
.trivyignore
Normal file
42
.trivyignore
Normal file
@@ -0,0 +1,42 @@
|
||||
# Trivy CVE Suppressions — Upstream Dependencies
|
||||
# Reviewed: 2026-02-13 | Milestone: M11-CIPipeline
|
||||
#
|
||||
# MITIGATED:
|
||||
# - Go stdlib CVEs (6): gosu rebuilt from source with Go 1.26
|
||||
# - npm bundled CVEs (5): npm removed from production Node.js images
|
||||
# - Node.js 20 → 24 LTS migration (#367): base images updated
|
||||
#
|
||||
# REMAINING: OpenBao (5 CVEs) + Next.js bundled tar/minimatch (5 CVEs)
|
||||
# Re-evaluate when upgrading openbao image beyond 2.5.0 or Next.js beyond 16.1.6.
|
||||
|
||||
# === OpenBao false positives ===
|
||||
# Trivy reads Go module pseudo-version (v0.0.0-20260204...) from bin/bao
|
||||
# and reports CVEs fixed in openbao 2.0.3–2.4.4. We run openbao:2.5.0.
|
||||
CVE-2024-8185 # HIGH: DoS via Raft join (fixed in 2.0.3)
|
||||
CVE-2024-9180 # HIGH: privilege escalation (fixed in 2.0.3)
|
||||
CVE-2025-59043 # HIGH: DoS via malicious JSON (fixed in 2.4.1)
|
||||
CVE-2025-64761 # HIGH: identity group root escalation (fixed in 2.4.4)
|
||||
|
||||
# === Next.js bundled tar/minimatch CVEs (upstream — waiting on Next.js release) ===
|
||||
# Next.js 16.1.6 bundles tar@7.5.2 and minimatch@9.0.5 in next/dist/compiled/ (pre-compiled).
|
||||
# These are NOT pnpm dependencies — they're embedded in the Next.js package itself.
|
||||
# pnpm overrides cannot reach these; only a Next.js upgrade can fix them.
|
||||
# Affects web image only (orchestrator and API are clean).
|
||||
# npm was also removed from all production images, eliminating the npm-bundled copy.
|
||||
# To resolve: upgrade Next.js when a release bundles tar >= 7.5.8 and minimatch >= 10.2.1.
|
||||
CVE-2026-23745 # HIGH: tar arbitrary file overwrite via unsanitized linkpaths (fixed in 7.5.3)
|
||||
CVE-2026-23950 # HIGH: tar arbitrary file overwrite via Unicode path collision (fixed in 7.5.4)
|
||||
CVE-2026-24842 # HIGH: tar arbitrary file creation via hardlink path traversal (needs tar >= 7.5.7)
|
||||
CVE-2026-26960 # HIGH: tar arbitrary file read/write via malicious archive hardlink (needs tar >= 7.5.8)
|
||||
CVE-2026-26996 # HIGH: minimatch DoS via specially crafted glob patterns (needs minimatch >= 10.2.1)
|
||||
|
||||
# === OpenBao Go stdlib (waiting on upstream rebuild) ===
|
||||
# OpenBao 2.5.0 compiled with Go 1.25.6, fix needs Go >= 1.25.7.
|
||||
# Cannot build OpenBao from source (large project). Waiting for upstream release.
|
||||
CVE-2025-68121 # CRITICAL: crypto/tls session resumption
|
||||
|
||||
# === multer CVEs (upstream via @nestjs/platform-express) ===
|
||||
# multer <2.1.0 — waiting on NestJS to update their dependency
|
||||
# These are DoS vulnerabilities in file upload handling
|
||||
GHSA-xf7r-hgr6-v32p # HIGH: DoS via incomplete cleanup
|
||||
GHSA-v52c-386h-88mc # HIGH: DoS via resource exhaustion
|
||||
185
.woodpecker.yml
185
.woodpecker.yml
@@ -1,185 +0,0 @@
|
||||
# Woodpecker CI Quality Enforcement Pipeline - Monorepo
|
||||
when:
|
||||
- event: [push, pull_request, manual]
|
||||
|
||||
variables:
|
||||
- &node_image "node:20-alpine"
|
||||
- &install_deps |
|
||||
corepack enable
|
||||
pnpm install --frozen-lockfile
|
||||
- &use_deps |
|
||||
corepack enable
|
||||
# Kaniko base command setup
|
||||
- &kaniko_setup |
|
||||
mkdir -p /kaniko/.docker
|
||||
echo "{\"auths\":{\"reg.mosaicstack.dev\":{\"username\":\"$HARBOR_USER\",\"password\":\"$HARBOR_PASS\"}}}" > /kaniko/.docker/config.json
|
||||
|
||||
steps:
|
||||
install:
|
||||
image: *node_image
|
||||
commands:
|
||||
- *install_deps
|
||||
|
||||
security-audit:
|
||||
image: *node_image
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm audit --audit-level=high
|
||||
depends_on:
|
||||
- install
|
||||
|
||||
lint:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm lint || true # Non-blocking while fixing legacy code
|
||||
depends_on:
|
||||
- install
|
||||
when:
|
||||
- evaluate: 'CI_PIPELINE_EVENT != "pull_request" || CI_COMMIT_BRANCH != "main"'
|
||||
|
||||
prisma-generate:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm --filter "@mosaic/api" prisma:generate
|
||||
depends_on:
|
||||
- install
|
||||
|
||||
typecheck:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm typecheck
|
||||
depends_on:
|
||||
- prisma-generate
|
||||
|
||||
test:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm test || true # Non-blocking while fixing legacy tests
|
||||
depends_on:
|
||||
- prisma-generate
|
||||
|
||||
build:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
NODE_ENV: "production"
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm build
|
||||
depends_on:
|
||||
- typecheck # Only block on critical checks
|
||||
- security-audit
|
||||
- prisma-generate
|
||||
|
||||
# ======================
|
||||
# Docker Build & Push (main/develop only)
|
||||
# ======================
|
||||
# Requires secrets: harbor_username, harbor_password
|
||||
#
|
||||
# Tagging Strategy:
|
||||
# - Always: commit SHA (e.g., 658ec077)
|
||||
# - main branch: 'latest'
|
||||
# - develop branch: 'dev'
|
||||
# - git tags: version tag (e.g., v1.0.0)
|
||||
|
||||
# Build and push API image using Kaniko
|
||||
docker-build-api:
|
||||
image: gcr.io/kaniko-project/executor:debug
|
||||
environment:
|
||||
HARBOR_USER:
|
||||
from_secret: harbor_username
|
||||
HARBOR_PASS:
|
||||
from_secret: harbor_password
|
||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||
CI_COMMIT_SHA: ${CI_COMMIT_SHA}
|
||||
commands:
|
||||
- *kaniko_setup
|
||||
- |
|
||||
DESTINATIONS="--destination reg.mosaicstack.dev/mosaic/api:${CI_COMMIT_SHA:0:8}"
|
||||
if [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
||||
DESTINATIONS="$DESTINATIONS --destination reg.mosaicstack.dev/mosaic/api:latest"
|
||||
elif [ "$CI_COMMIT_BRANCH" = "develop" ]; then
|
||||
DESTINATIONS="$DESTINATIONS --destination reg.mosaicstack.dev/mosaic/api:dev"
|
||||
fi
|
||||
if [ -n "$CI_COMMIT_TAG" ]; then
|
||||
DESTINATIONS="$DESTINATIONS --destination reg.mosaicstack.dev/mosaic/api:$CI_COMMIT_TAG"
|
||||
fi
|
||||
/kaniko/executor --context . --dockerfile apps/api/Dockerfile $DESTINATIONS
|
||||
when:
|
||||
- branch: [main, develop]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- build
|
||||
|
||||
# Build and push Web image using Kaniko
|
||||
docker-build-web:
|
||||
image: gcr.io/kaniko-project/executor:debug
|
||||
environment:
|
||||
HARBOR_USER:
|
||||
from_secret: harbor_username
|
||||
HARBOR_PASS:
|
||||
from_secret: harbor_password
|
||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||
CI_COMMIT_SHA: ${CI_COMMIT_SHA}
|
||||
commands:
|
||||
- *kaniko_setup
|
||||
- |
|
||||
DESTINATIONS="--destination reg.mosaicstack.dev/mosaic/web:${CI_COMMIT_SHA:0:8}"
|
||||
if [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
||||
DESTINATIONS="$DESTINATIONS --destination reg.mosaicstack.dev/mosaic/web:latest"
|
||||
elif [ "$CI_COMMIT_BRANCH" = "develop" ]; then
|
||||
DESTINATIONS="$DESTINATIONS --destination reg.mosaicstack.dev/mosaic/web:dev"
|
||||
fi
|
||||
if [ -n "$CI_COMMIT_TAG" ]; then
|
||||
DESTINATIONS="$DESTINATIONS --destination reg.mosaicstack.dev/mosaic/web:$CI_COMMIT_TAG"
|
||||
fi
|
||||
/kaniko/executor --context . --dockerfile apps/web/Dockerfile --build-arg NEXT_PUBLIC_API_URL=https://api.mosaicstack.dev $DESTINATIONS
|
||||
when:
|
||||
- branch: [main, develop]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- build
|
||||
|
||||
# Build and push Postgres image using Kaniko
|
||||
docker-build-postgres:
|
||||
image: gcr.io/kaniko-project/executor:debug
|
||||
environment:
|
||||
HARBOR_USER:
|
||||
from_secret: harbor_username
|
||||
HARBOR_PASS:
|
||||
from_secret: harbor_password
|
||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||
CI_COMMIT_SHA: ${CI_COMMIT_SHA}
|
||||
commands:
|
||||
- *kaniko_setup
|
||||
- |
|
||||
DESTINATIONS="--destination reg.mosaicstack.dev/mosaic/postgres:${CI_COMMIT_SHA:0:8}"
|
||||
if [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
||||
DESTINATIONS="$DESTINATIONS --destination reg.mosaicstack.dev/mosaic/postgres:latest"
|
||||
elif [ "$CI_COMMIT_BRANCH" = "develop" ]; then
|
||||
DESTINATIONS="$DESTINATIONS --destination reg.mosaicstack.dev/mosaic/postgres:dev"
|
||||
fi
|
||||
if [ -n "$CI_COMMIT_TAG" ]; then
|
||||
DESTINATIONS="$DESTINATIONS --destination reg.mosaicstack.dev/mosaic/postgres:$CI_COMMIT_TAG"
|
||||
fi
|
||||
/kaniko/executor --context docker/postgres --dockerfile docker/postgres/Dockerfile $DESTINATIONS
|
||||
when:
|
||||
- branch: [main, develop]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- build
|
||||
141
.woodpecker/README.md
Normal file
141
.woodpecker/README.md
Normal file
@@ -0,0 +1,141 @@
|
||||
# Woodpecker CI Configuration for Mosaic Stack
|
||||
|
||||
## Pipeline Architecture
|
||||
|
||||
Split per-package pipelines with path filtering. Only affected packages rebuild on push.
|
||||
|
||||
```
|
||||
.woodpecker/
|
||||
├── api.yml # @mosaic/api (NestJS)
|
||||
├── web.yml # @mosaic/web (Next.js)
|
||||
├── orchestrator.yml # @mosaic/orchestrator (NestJS)
|
||||
├── coordinator.yml # mosaic-coordinator (Python/FastAPI)
|
||||
├── infra.yml # postgres + openbao Docker images
|
||||
├── codex-review.yml # AI code/security review (PRs only)
|
||||
├── README.md
|
||||
└── schemas/
|
||||
├── code-review-schema.json
|
||||
└── security-review-schema.json
|
||||
```
|
||||
|
||||
## Path Filtering
|
||||
|
||||
| Pipeline | Triggers On |
|
||||
| ------------------ | --------------------------------------------------- |
|
||||
| `api.yml` | `apps/api/**`, `packages/**`, root configs |
|
||||
| `web.yml` | `apps/web/**`, `packages/**`, root configs |
|
||||
| `orchestrator.yml` | `apps/orchestrator/**`, `packages/**`, root configs |
|
||||
| `coordinator.yml` | `apps/coordinator/**` |
|
||||
| `infra.yml` | `docker/**` |
|
||||
| `codex-review.yml` | All PRs (no path filter) |
|
||||
|
||||
**Root configs** = `pnpm-lock.yaml`, `pnpm-workspace.yaml`, `turbo.json`, `package.json`
|
||||
|
||||
## Security Chain
|
||||
|
||||
Every pipeline follows the full security chain required by the CI/CD guide:
|
||||
|
||||
```
|
||||
source scanning (lint + pnpm audit / bandit + pip-audit)
|
||||
-> docker build (Kaniko)
|
||||
-> container scanning (Trivy: HIGH,CRITICAL)
|
||||
-> package linking (Gitea registry)
|
||||
```
|
||||
|
||||
Docker builds gate on ALL quality + security steps passing.
|
||||
|
||||
## Pipeline Dependency Graphs
|
||||
|
||||
### Node.js Apps (api, web, orchestrator)
|
||||
|
||||
```
|
||||
install -> [security-audit, lint, prisma-generate*]
|
||||
prisma-generate* -> [typecheck, prisma-migrate*]
|
||||
prisma-migrate* -> test
|
||||
[all quality gates] -> build -> docker-build -> trivy -> link
|
||||
```
|
||||
|
||||
_\*prisma steps: api.yml only_
|
||||
|
||||
### Coordinator (Python)
|
||||
|
||||
```
|
||||
install -> [ruff-check, mypy, security-bandit, security-pip-audit, test]
|
||||
[all quality gates] -> docker-build -> trivy -> link
|
||||
```
|
||||
|
||||
### Infrastructure
|
||||
|
||||
```
|
||||
[docker-build-postgres, docker-build-openbao]
|
||||
-> [trivy-postgres, trivy-openbao]
|
||||
-> link
|
||||
```
|
||||
|
||||
## Docker Images
|
||||
|
||||
| Image | Registry Path | Context |
|
||||
| ------------------ | ----------------------------------------------- | ------------------- |
|
||||
| stack-api | `git.mosaicstack.dev/mosaic/stack-api` | `.` (monorepo root) |
|
||||
| stack-web | `git.mosaicstack.dev/mosaic/stack-web` | `.` (monorepo root) |
|
||||
| stack-orchestrator | `git.mosaicstack.dev/mosaic/stack-orchestrator` | `.` (monorepo root) |
|
||||
| stack-coordinator | `git.mosaicstack.dev/mosaic/stack-coordinator` | `apps/coordinator` |
|
||||
| stack-postgres | `git.mosaicstack.dev/mosaic/stack-postgres` | `docker/postgres` |
|
||||
| stack-openbao | `git.mosaicstack.dev/mosaic/stack-openbao` | `docker/openbao` |
|
||||
|
||||
## Image Tagging
|
||||
|
||||
| Condition | Tag | Purpose |
|
||||
| ------------- | -------------------------- | -------------------------- |
|
||||
| Always | `${CI_COMMIT_SHA:0:8}` | Immutable commit reference |
|
||||
| `main` branch | `latest` | Current latest build |
|
||||
| Git tag | tag value (e.g., `v1.0.0`) | Semantic version release |
|
||||
|
||||
## Required Secrets
|
||||
|
||||
Configure in Woodpecker UI (Settings > Secrets):
|
||||
|
||||
| Secret | Scope | Purpose |
|
||||
| ---------------- | ----------------- | ------------------------------------------- |
|
||||
| `gitea_username` | push, manual, tag | Gitea registry auth |
|
||||
| `gitea_token` | push, manual, tag | Gitea registry auth (`package:write` scope) |
|
||||
| `codex_api_key` | pull_request | Codex AI reviews |
|
||||
|
||||
## Codex AI Review Pipeline
|
||||
|
||||
The `codex-review.yml` pipeline runs independently on all PRs:
|
||||
|
||||
- **Code review**: Correctness, code quality, testing, performance
|
||||
- **Security review**: OWASP Top 10, hardcoded secrets, injection flaws
|
||||
|
||||
Fails on blockers or critical/high severity security findings.
|
||||
|
||||
### Local Testing
|
||||
|
||||
```bash
|
||||
~/.claude/scripts/codex/codex-code-review.sh --uncommitted
|
||||
~/.claude/scripts/codex/codex-security-review.sh --uncommitted
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### "unauthorized: authentication required"
|
||||
|
||||
- Verify `gitea_username` and `gitea_token` secrets in Woodpecker
|
||||
- Verify token has `package:write` scope
|
||||
|
||||
### Trivy scan fails with HIGH/CRITICAL
|
||||
|
||||
- Check if the vulnerability is in the base image (not our code)
|
||||
- Add to `.trivyignore` if it's a known, accepted risk
|
||||
- Use `--ignore-unfixed` (already set) to skip unfixable CVEs
|
||||
|
||||
### Package linking returns 404
|
||||
|
||||
- Normal for recently pushed packages — retry logic handles this
|
||||
- If persistent: verify package name matches exactly (case-sensitive)
|
||||
|
||||
### Pipeline runs Docker builds on pull requests
|
||||
|
||||
- Docker build steps have `when: branch: [main]` guards
|
||||
- PRs only run quality gates, not Docker builds
|
||||
337
.woodpecker/ci.yml
Normal file
337
.woodpecker/ci.yml
Normal file
@@ -0,0 +1,337 @@
|
||||
# Unified CI Pipeline - Mosaic Stack
|
||||
# Single install, parallel quality gates, sequential deploy
|
||||
#
|
||||
# Replaces: api.yml, orchestrator.yml, web.yml
|
||||
# Keeps: coordinator.yml (Python), infra.yml (separate concerns)
|
||||
#
|
||||
# Flow:
|
||||
# install → security-audit
|
||||
# → prisma-generate → lint + typecheck (parallel)
|
||||
# → prisma-migrate → test
|
||||
# → build (after all gates pass)
|
||||
# → docker builds (main only, parallel)
|
||||
# → trivy scans (main only, parallel)
|
||||
# → package linking (main only)
|
||||
|
||||
when:
|
||||
- event: [push, pull_request, manual]
|
||||
path:
|
||||
include:
|
||||
- "apps/api/**"
|
||||
- "apps/orchestrator/**"
|
||||
- "apps/web/**"
|
||||
- "packages/**"
|
||||
- "pnpm-lock.yaml"
|
||||
- "pnpm-workspace.yaml"
|
||||
- "turbo.json"
|
||||
- "package.json"
|
||||
- ".woodpecker/ci.yml"
|
||||
- ".trivyignore"
|
||||
|
||||
variables:
|
||||
- &node_image "node:24-alpine"
|
||||
- &install_deps |
|
||||
corepack enable
|
||||
pnpm install --frozen-lockfile
|
||||
- &use_deps |
|
||||
corepack enable
|
||||
- &turbo_env
|
||||
TURBO_API:
|
||||
from_secret: turbo_api
|
||||
TURBO_TOKEN:
|
||||
from_secret: turbo_token
|
||||
TURBO_TEAM:
|
||||
from_secret: turbo_team
|
||||
- &kaniko_setup |
|
||||
mkdir -p /kaniko/.docker
|
||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$GITEA_USER\",\"password\":\"$GITEA_TOKEN\"}}}" > /kaniko/.docker/config.json
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:17.7-alpine3.22
|
||||
environment:
|
||||
POSTGRES_DB: test_db
|
||||
POSTGRES_USER: test_user
|
||||
POSTGRES_PASSWORD: test_password
|
||||
|
||||
steps:
|
||||
# ─── Install (once) ─────────────────────────────────────────
|
||||
install:
|
||||
image: *node_image
|
||||
commands:
|
||||
- *install_deps
|
||||
|
||||
# ─── Security Audit (once) ──────────────────────────────────
|
||||
security-audit:
|
||||
image: *node_image
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm audit --audit-level=high
|
||||
depends_on:
|
||||
- install
|
||||
|
||||
# ─── Prisma Generate ────────────────────────────────────────
|
||||
prisma-generate:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm --filter "@mosaic/api" prisma:generate
|
||||
depends_on:
|
||||
- install
|
||||
|
||||
# ─── Lint (all packages) ────────────────────────────────────
|
||||
lint:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
<<: *turbo_env
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm turbo lint
|
||||
depends_on:
|
||||
- prisma-generate
|
||||
|
||||
# ─── Typecheck (all packages, parallel with lint) ───────────
|
||||
typecheck:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
<<: *turbo_env
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm turbo typecheck
|
||||
depends_on:
|
||||
- prisma-generate
|
||||
|
||||
# ─── Prisma Migrate (test DB) ──────────────────────────────
|
||||
prisma-migrate:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
DATABASE_URL: "postgresql://test_user:test_password@postgres:5432/test_db?schema=public"
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm --filter "@mosaic/api" prisma migrate deploy
|
||||
depends_on:
|
||||
- prisma-generate
|
||||
|
||||
# ─── Test (all packages) ───────────────────────────────────
|
||||
test:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
DATABASE_URL: "postgresql://test_user:test_password@postgres:5432/test_db?schema=public"
|
||||
ENCRYPTION_KEY: "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"
|
||||
<<: *turbo_env
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm --filter "@mosaic/api" exec vitest run --exclude 'src/auth/auth-rls.integration.spec.ts' --exclude 'src/credentials/user-credential.model.spec.ts' --exclude 'src/job-events/job-events.performance.spec.ts' --exclude 'src/knowledge/services/fulltext-search.spec.ts' --exclude 'src/mosaic-telemetry/mosaic-telemetry.module.spec.ts'
|
||||
- pnpm turbo test --filter=@mosaic/orchestrator --filter=@mosaic/web
|
||||
depends_on:
|
||||
- prisma-migrate
|
||||
|
||||
# ─── Build (all packages) ──────────────────────────────────
|
||||
build:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
NODE_ENV: "production"
|
||||
<<: *turbo_env
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm turbo build
|
||||
depends_on:
|
||||
- lint
|
||||
- typecheck
|
||||
- test
|
||||
- security-audit
|
||||
|
||||
# ─── Docker Builds (main only, parallel) ───────────────────
|
||||
|
||||
docker-build-api:
|
||||
image: gcr.io/kaniko-project/executor:debug
|
||||
environment:
|
||||
GITEA_USER:
|
||||
from_secret: gitea_username
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||
commands:
|
||||
- *kaniko_setup
|
||||
- |
|
||||
DESTINATIONS=""
|
||||
if [ -n "$CI_COMMIT_TAG" ]; then
|
||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-api:$CI_COMMIT_TAG"
|
||||
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-api:latest"
|
||||
fi
|
||||
/kaniko/executor --context . --dockerfile apps/api/Dockerfile --snapshot-mode=redo $DESTINATIONS
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- build
|
||||
|
||||
docker-build-orchestrator:
|
||||
image: gcr.io/kaniko-project/executor:debug
|
||||
environment:
|
||||
GITEA_USER:
|
||||
from_secret: gitea_username
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||
commands:
|
||||
- *kaniko_setup
|
||||
- |
|
||||
DESTINATIONS=""
|
||||
if [ -n "$CI_COMMIT_TAG" ]; then
|
||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-orchestrator:$CI_COMMIT_TAG"
|
||||
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-orchestrator:latest"
|
||||
fi
|
||||
/kaniko/executor --context . --dockerfile apps/orchestrator/Dockerfile --snapshot-mode=redo $DESTINATIONS
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- build
|
||||
|
||||
docker-build-web:
|
||||
image: gcr.io/kaniko-project/executor:debug
|
||||
environment:
|
||||
GITEA_USER:
|
||||
from_secret: gitea_username
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||
commands:
|
||||
- *kaniko_setup
|
||||
- |
|
||||
DESTINATIONS=""
|
||||
if [ -n "$CI_COMMIT_TAG" ]; then
|
||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-web:$CI_COMMIT_TAG"
|
||||
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-web:latest"
|
||||
fi
|
||||
/kaniko/executor --context . --dockerfile apps/web/Dockerfile --snapshot-mode=redo --build-arg NEXT_PUBLIC_API_URL=https://api.mosaicstack.dev $DESTINATIONS
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- build
|
||||
|
||||
# ─── Container Security Scans (main only) ──────────────────
|
||||
|
||||
security-trivy-api:
|
||||
image: aquasec/trivy:latest
|
||||
environment:
|
||||
GITEA_USER:
|
||||
from_secret: gitea_username
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||
commands:
|
||||
- |
|
||||
if [ -n "$$CI_COMMIT_TAG" ]; then SCAN_TAG="$$CI_COMMIT_TAG"; else SCAN_TAG="latest"; fi
|
||||
mkdir -p ~/.docker
|
||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
||||
trivy image --exit-code 1 --severity HIGH,CRITICAL --ignore-unfixed --ignorefile .trivyignore git.mosaicstack.dev/mosaic/stack-api:$$SCAN_TAG
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- docker-build-api
|
||||
|
||||
security-trivy-orchestrator:
|
||||
image: aquasec/trivy:latest
|
||||
environment:
|
||||
GITEA_USER:
|
||||
from_secret: gitea_username
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||
commands:
|
||||
- |
|
||||
if [ -n "$$CI_COMMIT_TAG" ]; then SCAN_TAG="$$CI_COMMIT_TAG"; else SCAN_TAG="latest"; fi
|
||||
mkdir -p ~/.docker
|
||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
||||
trivy image --exit-code 1 --severity HIGH,CRITICAL --ignore-unfixed --ignorefile .trivyignore git.mosaicstack.dev/mosaic/stack-orchestrator:$$SCAN_TAG
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- docker-build-orchestrator
|
||||
|
||||
security-trivy-web:
|
||||
image: aquasec/trivy:latest
|
||||
environment:
|
||||
GITEA_USER:
|
||||
from_secret: gitea_username
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||
commands:
|
||||
- |
|
||||
if [ -n "$$CI_COMMIT_TAG" ]; then SCAN_TAG="$$CI_COMMIT_TAG"; else SCAN_TAG="latest"; fi
|
||||
mkdir -p ~/.docker
|
||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
||||
trivy image --exit-code 1 --severity HIGH,CRITICAL --ignore-unfixed --ignorefile .trivyignore git.mosaicstack.dev/mosaic/stack-web:$$SCAN_TAG
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- docker-build-web
|
||||
|
||||
# ─── Package Linking (main only, once) ─────────────────────
|
||||
|
||||
link-packages:
|
||||
image: alpine:3
|
||||
environment:
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
commands:
|
||||
- apk add --no-cache curl
|
||||
- sleep 10
|
||||
- |
|
||||
set -e
|
||||
link_package() {
|
||||
PKG="$$1"
|
||||
echo "Linking $$PKG..."
|
||||
for attempt in 1 2 3; do
|
||||
STATUS=$$(curl -s -o /tmp/link-response.txt -w "%{http_code}" -X POST \
|
||||
-H "Authorization: token $$GITEA_TOKEN" \
|
||||
"https://git.mosaicstack.dev/api/v1/packages/mosaic/container/$$PKG/-/link/stack")
|
||||
if [ "$$STATUS" = "201" ] || [ "$$STATUS" = "204" ]; then
|
||||
echo " Linked $$PKG"
|
||||
return 0
|
||||
elif [ "$$STATUS" = "400" ]; then
|
||||
echo " $$PKG already linked"
|
||||
return 0
|
||||
elif [ "$$STATUS" = "404" ] && [ $$attempt -lt 3 ]; then
|
||||
echo " $$PKG not found yet, retrying in 5s (attempt $$attempt/3)..."
|
||||
sleep 5
|
||||
else
|
||||
echo " FAILED: $$PKG status $$STATUS"
|
||||
cat /tmp/link-response.txt
|
||||
return 1
|
||||
fi
|
||||
done
|
||||
}
|
||||
link_package "stack-api"
|
||||
link_package "stack-orchestrator"
|
||||
link_package "stack-web"
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- security-trivy-api
|
||||
- security-trivy-orchestrator
|
||||
- security-trivy-web
|
||||
90
.woodpecker/codex-review.yml
Normal file
90
.woodpecker/codex-review.yml
Normal file
@@ -0,0 +1,90 @@
|
||||
# Codex AI Review Pipeline for Woodpecker CI
|
||||
# Drop this into your repo's .woodpecker/ directory to enable automated
|
||||
# code and security reviews on every pull request.
|
||||
#
|
||||
# Required secrets:
|
||||
# - codex_api_key: OpenAI API key or Codex-compatible key
|
||||
#
|
||||
# Optional secrets:
|
||||
# - gitea_token: Gitea API token for posting PR comments (if not using tea CLI auth)
|
||||
|
||||
when:
|
||||
event: pull_request
|
||||
|
||||
variables:
|
||||
- &node_image "node:24-slim"
|
||||
- &install_codex "npm i -g @openai/codex"
|
||||
|
||||
steps:
|
||||
# --- Code Quality Review ---
|
||||
code-review:
|
||||
image: *node_image
|
||||
environment:
|
||||
CODEX_API_KEY:
|
||||
from_secret: codex_api_key
|
||||
commands:
|
||||
- *install_codex
|
||||
- apt-get update -qq && apt-get install -y -qq jq git > /dev/null 2>&1
|
||||
|
||||
# Generate the diff
|
||||
- git fetch origin ${CI_COMMIT_TARGET_BRANCH:-main}
|
||||
- DIFF=$(git diff origin/${CI_COMMIT_TARGET_BRANCH:-main}...HEAD)
|
||||
|
||||
# Run code review with structured output
|
||||
- |
|
||||
codex exec \
|
||||
--sandbox read-only \
|
||||
--output-schema .woodpecker/schemas/code-review-schema.json \
|
||||
-o /tmp/code-review.json \
|
||||
"You are an expert code reviewer. Review the following code changes for correctness, code quality, testing, performance, and documentation issues. Only flag actionable, important issues. Categorize as blocker/should-fix/suggestion. If code looks good, say so.
|
||||
|
||||
Changes:
|
||||
$DIFF"
|
||||
|
||||
# Output summary
|
||||
- echo "=== Code Review Results ==="
|
||||
- jq '.' /tmp/code-review.json
|
||||
- |
|
||||
BLOCKERS=$(jq '.stats.blockers // 0' /tmp/code-review.json)
|
||||
if [ "$BLOCKERS" -gt 0 ]; then
|
||||
echo "FAIL: $BLOCKERS blocker(s) found"
|
||||
exit 1
|
||||
fi
|
||||
echo "PASS: No blockers found"
|
||||
|
||||
# --- Security Review ---
|
||||
security-review:
|
||||
image: *node_image
|
||||
environment:
|
||||
CODEX_API_KEY:
|
||||
from_secret: codex_api_key
|
||||
commands:
|
||||
- *install_codex
|
||||
- apt-get update -qq && apt-get install -y -qq jq git > /dev/null 2>&1
|
||||
|
||||
# Generate the diff
|
||||
- git fetch origin ${CI_COMMIT_TARGET_BRANCH:-main}
|
||||
- DIFF=$(git diff origin/${CI_COMMIT_TARGET_BRANCH:-main}...HEAD)
|
||||
|
||||
# Run security review with structured output
|
||||
- |
|
||||
codex exec \
|
||||
--sandbox read-only \
|
||||
--output-schema .woodpecker/schemas/security-review-schema.json \
|
||||
-o /tmp/security-review.json \
|
||||
"You are an expert application security engineer. Review the following code changes for security vulnerabilities including OWASP Top 10, hardcoded secrets, injection flaws, auth/authz gaps, XSS, CSRF, SSRF, path traversal, and supply chain risks. Include CWE IDs and remediation steps. Only flag real security issues, not code quality.
|
||||
|
||||
Changes:
|
||||
$DIFF"
|
||||
|
||||
# Output summary
|
||||
- echo "=== Security Review Results ==="
|
||||
- jq '.' /tmp/security-review.json
|
||||
- |
|
||||
CRITICAL=$(jq '.stats.critical // 0' /tmp/security-review.json)
|
||||
HIGH=$(jq '.stats.high // 0' /tmp/security-review.json)
|
||||
if [ "$CRITICAL" -gt 0 ] || [ "$HIGH" -gt 0 ]; then
|
||||
echo "FAIL: $CRITICAL critical, $HIGH high severity finding(s)"
|
||||
exit 1
|
||||
fi
|
||||
echo "PASS: No critical or high severity findings"
|
||||
178
.woodpecker/coordinator.yml
Normal file
178
.woodpecker/coordinator.yml
Normal file
@@ -0,0 +1,178 @@
|
||||
# Coordinator Pipeline - Mosaic Stack
|
||||
# Quality gates, build, and Docker publish for mosaic-coordinator (Python)
|
||||
#
|
||||
# Triggers on: apps/coordinator/**
|
||||
# Security chain: bandit + pip-audit + Trivy container scan
|
||||
|
||||
when:
|
||||
- event: [push, pull_request, manual]
|
||||
path:
|
||||
include:
|
||||
- "apps/coordinator/**"
|
||||
- ".woodpecker/coordinator.yml"
|
||||
|
||||
variables:
|
||||
- &python_image "python:3.11-slim"
|
||||
- &activate_venv |
|
||||
cd apps/coordinator
|
||||
. venv/bin/activate
|
||||
- &kaniko_setup |
|
||||
mkdir -p /kaniko/.docker
|
||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$GITEA_USER\",\"password\":\"$GITEA_TOKEN\"}}}" > /kaniko/.docker/config.json
|
||||
|
||||
steps:
|
||||
# === Quality Gates ===
|
||||
|
||||
install:
|
||||
image: *python_image
|
||||
commands:
|
||||
- cd apps/coordinator
|
||||
- python -m venv venv
|
||||
- . venv/bin/activate
|
||||
- pip install --no-cache-dir --upgrade "pip>=25.3"
|
||||
- pip install --no-cache-dir --extra-index-url https://git.mosaicstack.dev/api/packages/mosaic/pypi/simple/ -e ".[dev]"
|
||||
- pip install --no-cache-dir bandit pip-audit
|
||||
|
||||
ruff-check:
|
||||
image: *python_image
|
||||
commands:
|
||||
- *activate_venv
|
||||
- ruff check src/ tests/
|
||||
depends_on:
|
||||
- install
|
||||
|
||||
mypy:
|
||||
image: *python_image
|
||||
commands:
|
||||
- *activate_venv
|
||||
- mypy src/
|
||||
depends_on:
|
||||
- install
|
||||
|
||||
security-bandit:
|
||||
image: *python_image
|
||||
commands:
|
||||
- *activate_venv
|
||||
- bandit -r src/ -c bandit.yaml -f screen
|
||||
depends_on:
|
||||
- install
|
||||
|
||||
security-pip-audit:
|
||||
image: *python_image
|
||||
commands:
|
||||
- *activate_venv
|
||||
- pip-audit
|
||||
depends_on:
|
||||
- install
|
||||
|
||||
test:
|
||||
image: *python_image
|
||||
commands:
|
||||
- *activate_venv
|
||||
- pytest
|
||||
depends_on:
|
||||
- install
|
||||
|
||||
# === Docker Build & Push ===
|
||||
|
||||
docker-build-coordinator:
|
||||
image: gcr.io/kaniko-project/executor:debug
|
||||
environment:
|
||||
GITEA_USER:
|
||||
from_secret: gitea_username
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||
commands:
|
||||
- *kaniko_setup
|
||||
- |
|
||||
DESTINATIONS=""
|
||||
if [ -n "$CI_COMMIT_TAG" ]; then
|
||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-coordinator:$CI_COMMIT_TAG"
|
||||
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-coordinator:latest"
|
||||
fi
|
||||
/kaniko/executor --context apps/coordinator --dockerfile apps/coordinator/Dockerfile --snapshot-mode=redo $DESTINATIONS
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- ruff-check
|
||||
- mypy
|
||||
- security-bandit
|
||||
- security-pip-audit
|
||||
- test
|
||||
|
||||
# === Container Security Scan ===
|
||||
|
||||
security-trivy-coordinator:
|
||||
image: aquasec/trivy:latest
|
||||
environment:
|
||||
GITEA_USER:
|
||||
from_secret: gitea_username
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||
commands:
|
||||
- |
|
||||
if [ -n "$$CI_COMMIT_TAG" ]; then
|
||||
SCAN_TAG="$$CI_COMMIT_TAG"
|
||||
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
||||
SCAN_TAG="latest"
|
||||
else
|
||||
SCAN_TAG="latest"
|
||||
fi
|
||||
mkdir -p ~/.docker
|
||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
||||
trivy image --exit-code 1 --severity HIGH,CRITICAL --ignore-unfixed \
|
||||
--ignorefile .trivyignore \
|
||||
git.mosaicstack.dev/mosaic/stack-coordinator:$$SCAN_TAG
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- docker-build-coordinator
|
||||
|
||||
# === Package Linking ===
|
||||
|
||||
link-packages:
|
||||
image: alpine:3
|
||||
environment:
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
commands:
|
||||
- apk add --no-cache curl
|
||||
- sleep 10
|
||||
- |
|
||||
set -e
|
||||
link_package() {
|
||||
PKG="$$1"
|
||||
echo "Linking $$PKG..."
|
||||
for attempt in 1 2 3; do
|
||||
STATUS=$$(curl -s -o /tmp/link-response.txt -w "%{http_code}" -X POST \
|
||||
-H "Authorization: token $$GITEA_TOKEN" \
|
||||
"https://git.mosaicstack.dev/api/v1/packages/mosaic/container/$$PKG/-/link/stack")
|
||||
if [ "$$STATUS" = "201" ] || [ "$$STATUS" = "204" ]; then
|
||||
echo " Linked $$PKG"
|
||||
return 0
|
||||
elif [ "$$STATUS" = "400" ]; then
|
||||
echo " $$PKG already linked"
|
||||
return 0
|
||||
elif [ "$$STATUS" = "404" ] && [ $$attempt -lt 3 ]; then
|
||||
echo " $$PKG not found yet, retrying in 5s (attempt $$attempt/3)..."
|
||||
sleep 5
|
||||
else
|
||||
echo " FAILED: $$PKG status $$STATUS"
|
||||
cat /tmp/link-response.txt
|
||||
return 1
|
||||
fi
|
||||
done
|
||||
}
|
||||
link_package "stack-coordinator"
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- security-trivy-coordinator
|
||||
170
.woodpecker/infra.yml
Normal file
170
.woodpecker/infra.yml
Normal file
@@ -0,0 +1,170 @@
|
||||
# Infrastructure Pipeline - Mosaic Stack
|
||||
# Docker build, Trivy scan, and publish for postgres + openbao images
|
||||
#
|
||||
# Triggers on: docker/**
|
||||
# No quality gates — infrastructure images (base image + config only)
|
||||
|
||||
when:
|
||||
- event: [push, manual, tag]
|
||||
path:
|
||||
include:
|
||||
- "docker/**"
|
||||
- ".woodpecker/infra.yml"
|
||||
|
||||
variables:
|
||||
- &kaniko_setup |
|
||||
mkdir -p /kaniko/.docker
|
||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$GITEA_USER\",\"password\":\"$GITEA_TOKEN\"}}}" > /kaniko/.docker/config.json
|
||||
|
||||
steps:
|
||||
# === Docker Build & Push ===
|
||||
|
||||
docker-build-postgres:
|
||||
image: gcr.io/kaniko-project/executor:debug
|
||||
environment:
|
||||
GITEA_USER:
|
||||
from_secret: gitea_username
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||
commands:
|
||||
- *kaniko_setup
|
||||
- |
|
||||
DESTINATIONS=""
|
||||
if [ -n "$CI_COMMIT_TAG" ]; then
|
||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-postgres:$CI_COMMIT_TAG"
|
||||
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-postgres:latest"
|
||||
fi
|
||||
/kaniko/executor --context docker/postgres --dockerfile docker/postgres/Dockerfile --snapshot-mode=redo $DESTINATIONS
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
|
||||
docker-build-openbao:
|
||||
image: gcr.io/kaniko-project/executor:debug
|
||||
environment:
|
||||
GITEA_USER:
|
||||
from_secret: gitea_username
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||
commands:
|
||||
- *kaniko_setup
|
||||
- |
|
||||
DESTINATIONS=""
|
||||
if [ -n "$CI_COMMIT_TAG" ]; then
|
||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-openbao:$CI_COMMIT_TAG"
|
||||
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-openbao:latest"
|
||||
fi
|
||||
/kaniko/executor --context docker/openbao --dockerfile docker/openbao/Dockerfile --snapshot-mode=redo $DESTINATIONS
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
|
||||
# === Container Security Scans ===
|
||||
|
||||
security-trivy-postgres:
|
||||
image: aquasec/trivy:latest
|
||||
environment:
|
||||
GITEA_USER:
|
||||
from_secret: gitea_username
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||
commands:
|
||||
- |
|
||||
if [ -n "$$CI_COMMIT_TAG" ]; then
|
||||
SCAN_TAG="$$CI_COMMIT_TAG"
|
||||
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
||||
SCAN_TAG="latest"
|
||||
else
|
||||
SCAN_TAG="latest"
|
||||
fi
|
||||
mkdir -p ~/.docker
|
||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
||||
trivy image --exit-code 1 --severity HIGH,CRITICAL --ignore-unfixed \
|
||||
--ignorefile .trivyignore \
|
||||
git.mosaicstack.dev/mosaic/stack-postgres:$$SCAN_TAG
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- docker-build-postgres
|
||||
|
||||
security-trivy-openbao:
|
||||
image: aquasec/trivy:latest
|
||||
environment:
|
||||
GITEA_USER:
|
||||
from_secret: gitea_username
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||
commands:
|
||||
- |
|
||||
if [ -n "$$CI_COMMIT_TAG" ]; then
|
||||
SCAN_TAG="$$CI_COMMIT_TAG"
|
||||
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
||||
SCAN_TAG="latest"
|
||||
else
|
||||
SCAN_TAG="latest"
|
||||
fi
|
||||
mkdir -p ~/.docker
|
||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
||||
trivy image --exit-code 1 --severity HIGH,CRITICAL --ignore-unfixed \
|
||||
--ignorefile .trivyignore \
|
||||
git.mosaicstack.dev/mosaic/stack-openbao:$$SCAN_TAG
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- docker-build-openbao
|
||||
|
||||
# === Package Linking ===
|
||||
|
||||
link-packages:
|
||||
image: alpine:3
|
||||
environment:
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
commands:
|
||||
- apk add --no-cache curl
|
||||
- sleep 10
|
||||
- |
|
||||
set -e
|
||||
link_package() {
|
||||
PKG="$$1"
|
||||
echo "Linking $$PKG..."
|
||||
for attempt in 1 2 3; do
|
||||
STATUS=$$(curl -s -o /tmp/link-response.txt -w "%{http_code}" -X POST \
|
||||
-H "Authorization: token $$GITEA_TOKEN" \
|
||||
"https://git.mosaicstack.dev/api/v1/packages/mosaic/container/$$PKG/-/link/stack")
|
||||
if [ "$$STATUS" = "201" ] || [ "$$STATUS" = "204" ]; then
|
||||
echo " Linked $$PKG"
|
||||
return 0
|
||||
elif [ "$$STATUS" = "400" ]; then
|
||||
echo " $$PKG already linked"
|
||||
return 0
|
||||
elif [ "$$STATUS" = "404" ] && [ $$attempt -lt 3 ]; then
|
||||
echo " $$PKG not found yet, retrying in 5s (attempt $$attempt/3)..."
|
||||
sleep 5
|
||||
else
|
||||
echo " FAILED: $$PKG status $$STATUS"
|
||||
cat /tmp/link-response.txt
|
||||
return 1
|
||||
fi
|
||||
done
|
||||
}
|
||||
link_package "stack-postgres"
|
||||
link_package "stack-openbao"
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- security-trivy-postgres
|
||||
- security-trivy-openbao
|
||||
92
.woodpecker/schemas/code-review-schema.json
Normal file
92
.woodpecker/schemas/code-review-schema.json
Normal file
@@ -0,0 +1,92 @@
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"summary": {
|
||||
"type": "string",
|
||||
"description": "Brief overall assessment of the code changes"
|
||||
},
|
||||
"verdict": {
|
||||
"type": "string",
|
||||
"enum": ["approve", "request-changes", "comment"],
|
||||
"description": "Overall review verdict"
|
||||
},
|
||||
"confidence": {
|
||||
"type": "number",
|
||||
"minimum": 0,
|
||||
"maximum": 1,
|
||||
"description": "Confidence score for the review (0-1)"
|
||||
},
|
||||
"findings": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"severity": {
|
||||
"type": "string",
|
||||
"enum": ["blocker", "should-fix", "suggestion"],
|
||||
"description": "Finding severity: blocker (must fix), should-fix (important), suggestion (optional)"
|
||||
},
|
||||
"title": {
|
||||
"type": "string",
|
||||
"description": "Short title describing the issue"
|
||||
},
|
||||
"file": {
|
||||
"type": "string",
|
||||
"description": "File path where the issue was found"
|
||||
},
|
||||
"line_start": {
|
||||
"type": "integer",
|
||||
"description": "Starting line number"
|
||||
},
|
||||
"line_end": {
|
||||
"type": "integer",
|
||||
"description": "Ending line number"
|
||||
},
|
||||
"description": {
|
||||
"type": "string",
|
||||
"description": "Detailed explanation of the issue"
|
||||
},
|
||||
"suggestion": {
|
||||
"type": "string",
|
||||
"description": "Suggested fix or improvement"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"severity",
|
||||
"title",
|
||||
"file",
|
||||
"line_start",
|
||||
"line_end",
|
||||
"description",
|
||||
"suggestion"
|
||||
]
|
||||
}
|
||||
},
|
||||
"stats": {
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"files_reviewed": {
|
||||
"type": "integer",
|
||||
"description": "Number of files reviewed"
|
||||
},
|
||||
"blockers": {
|
||||
"type": "integer",
|
||||
"description": "Count of blocker findings"
|
||||
},
|
||||
"should_fix": {
|
||||
"type": "integer",
|
||||
"description": "Count of should-fix findings"
|
||||
},
|
||||
"suggestions": {
|
||||
"type": "integer",
|
||||
"description": "Count of suggestion findings"
|
||||
}
|
||||
},
|
||||
"required": ["files_reviewed", "blockers", "should_fix", "suggestions"]
|
||||
}
|
||||
},
|
||||
"required": ["summary", "verdict", "confidence", "findings", "stats"]
|
||||
}
|
||||
106
.woodpecker/schemas/security-review-schema.json
Normal file
106
.woodpecker/schemas/security-review-schema.json
Normal file
@@ -0,0 +1,106 @@
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"summary": {
|
||||
"type": "string",
|
||||
"description": "Brief overall security assessment of the code changes"
|
||||
},
|
||||
"risk_level": {
|
||||
"type": "string",
|
||||
"enum": ["critical", "high", "medium", "low", "none"],
|
||||
"description": "Overall security risk level"
|
||||
},
|
||||
"confidence": {
|
||||
"type": "number",
|
||||
"minimum": 0,
|
||||
"maximum": 1,
|
||||
"description": "Confidence score for the review (0-1)"
|
||||
},
|
||||
"findings": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"severity": {
|
||||
"type": "string",
|
||||
"enum": ["critical", "high", "medium", "low"],
|
||||
"description": "Vulnerability severity level"
|
||||
},
|
||||
"title": {
|
||||
"type": "string",
|
||||
"description": "Short title describing the vulnerability"
|
||||
},
|
||||
"file": {
|
||||
"type": "string",
|
||||
"description": "File path where the vulnerability was found"
|
||||
},
|
||||
"line_start": {
|
||||
"type": "integer",
|
||||
"description": "Starting line number"
|
||||
},
|
||||
"line_end": {
|
||||
"type": "integer",
|
||||
"description": "Ending line number"
|
||||
},
|
||||
"description": {
|
||||
"type": "string",
|
||||
"description": "Detailed explanation of the vulnerability"
|
||||
},
|
||||
"cwe_id": {
|
||||
"type": "string",
|
||||
"description": "CWE identifier if applicable (e.g., CWE-79)"
|
||||
},
|
||||
"owasp_category": {
|
||||
"type": "string",
|
||||
"description": "OWASP Top 10 category if applicable (e.g., A03:2021-Injection)"
|
||||
},
|
||||
"remediation": {
|
||||
"type": "string",
|
||||
"description": "Specific remediation steps to fix the vulnerability"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"severity",
|
||||
"title",
|
||||
"file",
|
||||
"line_start",
|
||||
"line_end",
|
||||
"description",
|
||||
"cwe_id",
|
||||
"owasp_category",
|
||||
"remediation"
|
||||
]
|
||||
}
|
||||
},
|
||||
"stats": {
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"files_reviewed": {
|
||||
"type": "integer",
|
||||
"description": "Number of files reviewed"
|
||||
},
|
||||
"critical": {
|
||||
"type": "integer",
|
||||
"description": "Count of critical findings"
|
||||
},
|
||||
"high": {
|
||||
"type": "integer",
|
||||
"description": "Count of high findings"
|
||||
},
|
||||
"medium": {
|
||||
"type": "integer",
|
||||
"description": "Count of medium findings"
|
||||
},
|
||||
"low": {
|
||||
"type": "integer",
|
||||
"description": "Count of low findings"
|
||||
}
|
||||
},
|
||||
"required": ["files_reviewed", "critical", "high", "medium", "low"]
|
||||
}
|
||||
},
|
||||
"required": ["summary", "risk_level", "confidence", "findings", "stats"]
|
||||
}
|
||||
139
AGENTS.md
139
AGENTS.md
@@ -1,101 +1,82 @@
|
||||
# AGENTS.md — Mosaic Stack
|
||||
# Mosaic Stack — Agent Guidelines
|
||||
|
||||
Guidelines for AI agents working on this codebase.
|
||||
## Load Order
|
||||
|
||||
## Quick Start
|
||||
1. `SOUL.md` (repo identity + behavior invariants)
|
||||
2. `~/.config/mosaic/STANDARDS.md` (machine-wide standards rails)
|
||||
3. `AGENTS.md` (repo-specific overlay)
|
||||
4. `.mosaic/repo-hooks.sh` (repo lifecycle hooks)
|
||||
|
||||
1. Read `CLAUDE.md` for project-specific patterns
|
||||
2. Check this file for workflow and context management
|
||||
3. Use `TOOLS.md` patterns (if present) before fumbling with CLIs
|
||||
## Runtime Contract
|
||||
|
||||
## Context Management
|
||||
- This file is authoritative for repo-local operations.
|
||||
- `CLAUDE.md` is a compatibility pointer to `AGENTS.md`.
|
||||
- Follow universal rails from `~/.config/mosaic/guides/` and `~/.config/mosaic/rails/`.
|
||||
|
||||
Context = tokens = cost. Be smart.
|
||||
|
||||
| Strategy | When |
|
||||
| ----------------------------- | -------------------------------------------------------------- |
|
||||
| **Spawn sub-agents** | Isolated coding tasks, research, anything that can report back |
|
||||
| **Batch operations** | Group related API calls, don't do one-at-a-time |
|
||||
| **Check existing patterns** | Before writing new code, see how similar features were built |
|
||||
| **Minimize re-reading** | Don't re-read files you just wrote |
|
||||
| **Summarize before clearing** | Extract learnings to memory before context reset |
|
||||
|
||||
## Workflow (Non-Negotiable)
|
||||
|
||||
### Code Changes
|
||||
|
||||
```
|
||||
1. Branch → git checkout -b feature/XX-description
|
||||
2. Code → TDD: write test (RED), implement (GREEN), refactor
|
||||
3. Test → pnpm test (must pass)
|
||||
4. Push → git push origin feature/XX-description
|
||||
5. PR → Create PR to develop (not main)
|
||||
6. Review → Wait for approval or self-merge if authorized
|
||||
7. Close → Close related issues via API
|
||||
```
|
||||
|
||||
**Never merge directly to develop without a PR.**
|
||||
|
||||
### Issue Management
|
||||
## Session Lifecycle
|
||||
|
||||
```bash
|
||||
# Get Gitea token
|
||||
TOKEN="$(jq -r '.gitea.mosaicstack.token' ~/src/jarvis-brain/credentials.json)"
|
||||
|
||||
# Create issue
|
||||
curl -s -H "Authorization: token $TOKEN" -H "Content-Type: application/json" \
|
||||
"https://git.mosaicstack.dev/api/v1/repos/mosaic/stack/issues" \
|
||||
-d '{"title":"Title","body":"Description","milestone":54}'
|
||||
|
||||
# Close issue (REQUIRED after merge)
|
||||
curl -s -X PATCH -H "Authorization: token $TOKEN" -H "Content-Type: application/json" \
|
||||
"https://git.mosaicstack.dev/api/v1/repos/mosaic/stack/issues/XX" \
|
||||
-d '{"state":"closed"}'
|
||||
|
||||
# Create PR (tea CLI works for this)
|
||||
tea pulls create --repo mosaic/stack --base develop --head feature/XX-name \
|
||||
--title "feat(#XX): Title" --description "Description"
|
||||
bash scripts/agent/session-start.sh
|
||||
bash scripts/agent/critical.sh
|
||||
bash scripts/agent/session-end.sh
|
||||
```
|
||||
|
||||
### Commit Messages
|
||||
Optional:
|
||||
|
||||
```
|
||||
<type>(#issue): Brief description
|
||||
|
||||
Detailed explanation if needed.
|
||||
|
||||
Closes #XX, #YY
|
||||
```bash
|
||||
bash scripts/agent/log-limitation.sh "Short Name"
|
||||
bash scripts/agent/orchestrator-daemon.sh status
|
||||
bash scripts/agent/orchestrator-events.sh recent --limit 50
|
||||
```
|
||||
|
||||
Types: `feat`, `fix`, `docs`, `test`, `refactor`, `chore`
|
||||
## Repo Context
|
||||
|
||||
## TDD Requirements
|
||||
- Platform: multi-tenant personal assistant stack
|
||||
- Monorepo: `pnpm` workspaces + Turborepo
|
||||
- Core apps: `apps/api` (NestJS), `apps/web` (Next.js), orchestrator/coordinator services
|
||||
- Infrastructure: Docker Compose + PostgreSQL + Valkey + Authentik
|
||||
|
||||
**All code must follow TDD. This is non-negotiable.**
|
||||
## Quick Command Set
|
||||
|
||||
1. **RED** — Write failing test first
|
||||
2. **GREEN** — Minimal code to pass
|
||||
3. **REFACTOR** — Clean up while tests stay green
|
||||
```bash
|
||||
pnpm install
|
||||
pnpm dev
|
||||
pnpm test
|
||||
pnpm lint
|
||||
pnpm build
|
||||
```
|
||||
|
||||
Minimum 85% coverage for new code.
|
||||
## Versioning Protocol (HARD GATE)
|
||||
|
||||
## Token-Saving Tips
|
||||
**This project is ALPHA. All versions MUST be `0.0.x`.**
|
||||
|
||||
- **Sub-agents die after task** — their context doesn't pollute main session
|
||||
- **API over CLI** when CLI needs TTY or confirmation prompts
|
||||
- **One commit** with all issue numbers, not separate commits per issue
|
||||
- **Don't re-read** files you just wrote
|
||||
- **Batch similar operations** — create all issues at once, close all at once
|
||||
- The `0.1.0` release is FORBIDDEN until Jason explicitly authorizes it.
|
||||
- Every milestone bump increments the patch: `0.0.20` → `0.0.21` → `0.0.22`, etc.
|
||||
- ALL package.json files in the monorepo MUST stay in sync at the same version.
|
||||
- Use `scripts/version-bump.sh <version>` to bump — it enforces the alpha constraint and updates all packages atomically.
|
||||
- The script rejects any version >= `0.1.0`.
|
||||
- When creating a release tag, the tag MUST match the package version: `v0.0.x`.
|
||||
|
||||
## Key Files
|
||||
**Milestone-to-version mapping** is defined in the PRD (`docs/PRD.md`) under "Delivery/Milestone Intent". Agents MUST use the version from that table when tagging a milestone release.
|
||||
|
||||
| File | Purpose |
|
||||
| ------------------------------- | ----------------------------------------- |
|
||||
| `CLAUDE.md` | Project overview, tech stack, conventions |
|
||||
| `CONTRIBUTING.md` | Human contributor guide |
|
||||
| `apps/api/prisma/schema.prisma` | Database schema |
|
||||
| `docs/` | Architecture and setup docs |
|
||||
**Violation of this protocol is a blocking error.** If an agent attempts to set a version >= `0.1.0`, stop and escalate.
|
||||
|
||||
---
|
||||
## Standards and Quality
|
||||
|
||||
_Model-agnostic. Works for Claude, MiniMax, GPT, Llama, etc._
|
||||
- Enforce strict typing and no unsafe shortcuts.
|
||||
- Keep lint/typecheck/tests green before completion.
|
||||
- Prefer small, focused commits and clear change descriptions.
|
||||
|
||||
## App-Specific Overlays
|
||||
|
||||
- `apps/api/AGENTS.md`
|
||||
- `apps/web/AGENTS.md`
|
||||
- `apps/coordinator/AGENTS.md`
|
||||
- `apps/orchestrator/AGENTS.md`
|
||||
|
||||
## Additional Guidance
|
||||
|
||||
- Orchestrator guidance: `docs/claude/orchestrator.md`
|
||||
- Security remediation context: `docs/reports/codebase-review-2026-02-05/01-security-review.md`
|
||||
- Code quality context: `docs/reports/codebase-review-2026-02-05/02-code-quality-review.md`
|
||||
- QA context: `docs/reports/codebase-review-2026-02-05/03-qa-test-coverage.md`
|
||||
|
||||
466
CLAUDE.md
466
CLAUDE.md
@@ -1,464 +1,10 @@
|
||||
**Multi-tenant personal assistant platform with PostgreSQL backend, Authentik SSO, and MoltBot
|
||||
integration.**
|
||||
# CLAUDE Compatibility Pointer
|
||||
|
||||
## Project Overview
|
||||
This file exists so Claude Code sessions load Mosaic standards.
|
||||
|
||||
Mosaic Stack is a standalone platform that provides:
|
||||
## MANDATORY — Read Before Any Response
|
||||
|
||||
- Multi-user workspaces with team sharing
|
||||
- Task, event, and project management
|
||||
- Gantt charts and Kanban boards
|
||||
- MoltBot integration via plugins (stock MoltBot + mosaic-plugin-\*)
|
||||
- PDA-friendly design throughout
|
||||
BEFORE responding to any user message, READ `~/.config/mosaic/AGENTS.md`.
|
||||
|
||||
**Repository:** git.mosaicstack.dev/mosaic/stack
|
||||
**Versioning:** Start at 0.0.1, MVP = 0.1.0
|
||||
|
||||
## Technology Stack
|
||||
|
||||
| Layer | Technology |
|
||||
| ---------- | -------------------------------------------- |
|
||||
| Frontend | Next.js 16 + React + TailwindCSS + Shadcn/ui |
|
||||
| Backend | NestJS + Prisma ORM |
|
||||
| Database | PostgreSQL 17 + pgvector |
|
||||
| Cache | Valkey (Redis-compatible) |
|
||||
| Auth | Authentik (OIDC) |
|
||||
| AI | Ollama (configurable: local or remote) |
|
||||
| Messaging | MoltBot (stock + Mosaic plugins) |
|
||||
| Real-time | WebSockets (Socket.io) |
|
||||
| Monorepo | pnpm workspaces + TurboRepo |
|
||||
| Testing | Vitest + Playwright |
|
||||
| Deployment | Docker + docker-compose |
|
||||
|
||||
## Repository Structure
|
||||
|
||||
mosaic-stack/
|
||||
├── apps/
|
||||
│ ├── api/ # mosaic-api (NestJS)
|
||||
│ │ ├── src/
|
||||
│ │ │ ├── auth/ # Authentik OIDC
|
||||
│ │ │ ├── tasks/ # Task management
|
||||
│ │ │ ├── events/ # Calendar/events
|
||||
│ │ │ ├── projects/ # Project management
|
||||
│ │ │ ├── brain/ # MoltBot integration
|
||||
│ │ │ └── activity/ # Activity logging
|
||||
│ │ ├── prisma/
|
||||
│ │ │ └── schema.prisma
|
||||
│ │ └── Dockerfile
|
||||
│ └── web/ # mosaic-web (Next.js 16)
|
||||
│ ├── app/
|
||||
│ ├── components/
|
||||
│ └── Dockerfile
|
||||
├── packages/
|
||||
│ ├── shared/ # Shared types, utilities
|
||||
│ ├── ui/ # Shared UI components
|
||||
│ └── config/ # Shared configuration
|
||||
├── plugins/
|
||||
│ ├── mosaic-plugin-brain/ # MoltBot skill: API queries
|
||||
│ ├── mosaic-plugin-calendar/ # MoltBot skill: Calendar
|
||||
│ ├── mosaic-plugin-tasks/ # MoltBot skill: Tasks
|
||||
│ └── mosaic-plugin-gantt/ # MoltBot skill: Gantt
|
||||
├── docker/
|
||||
│ ├── docker-compose.yml # Turnkey deployment
|
||||
│ └── init-scripts/ # PostgreSQL init
|
||||
├── docs/
|
||||
│ ├── SETUP.md
|
||||
│ ├── CONFIGURATION.md
|
||||
│ └── DESIGN-PRINCIPLES.md
|
||||
├── .env.example
|
||||
├── turbo.json
|
||||
├── pnpm-workspace.yaml
|
||||
└── README.md
|
||||
|
||||
## Development Workflow
|
||||
|
||||
### Branch Strategy
|
||||
|
||||
- `main` — stable releases only
|
||||
- `develop` — active development (default working branch)
|
||||
- `feature/*` — feature branches from develop
|
||||
- `fix/*` — bug fix branches
|
||||
|
||||
### Starting Work
|
||||
|
||||
````bash
|
||||
git checkout develop
|
||||
git pull --rebase
|
||||
pnpm install
|
||||
|
||||
Running Locally
|
||||
|
||||
# Start all services (Docker)
|
||||
docker compose up -d
|
||||
|
||||
# Or run individually for development
|
||||
pnpm dev # All apps
|
||||
pnpm dev:api # API only
|
||||
pnpm dev:web # Web only
|
||||
|
||||
Testing
|
||||
|
||||
pnpm test # Run all tests
|
||||
pnpm test:api # API tests only
|
||||
pnpm test:web # Web tests only
|
||||
pnpm test:e2e # Playwright E2E
|
||||
|
||||
Building
|
||||
|
||||
pnpm build # Build all
|
||||
pnpm build:api # Build API
|
||||
pnpm build:web # Build Web
|
||||
|
||||
Design Principles (NON-NEGOTIABLE)
|
||||
|
||||
PDA-Friendly Language
|
||||
|
||||
NEVER use demanding language. This is critical.
|
||||
┌─────────────┬──────────────────────┐
|
||||
│ ❌ NEVER │ ✅ ALWAYS │
|
||||
├─────────────┼──────────────────────┤
|
||||
│ OVERDUE │ Target passed │
|
||||
├─────────────┼──────────────────────┤
|
||||
│ URGENT │ Approaching target │
|
||||
├─────────────┼──────────────────────┤
|
||||
│ MUST DO │ Scheduled for │
|
||||
├─────────────┼──────────────────────┤
|
||||
│ CRITICAL │ High priority │
|
||||
├─────────────┼──────────────────────┤
|
||||
│ YOU NEED TO │ Consider / Option to │
|
||||
├─────────────┼──────────────────────┤
|
||||
│ REQUIRED │ Recommended │
|
||||
└─────────────┴──────────────────────┘
|
||||
Visual Indicators
|
||||
|
||||
Use status indicators consistently:
|
||||
- 🟢 On track / Active
|
||||
- 🔵 Upcoming / Scheduled
|
||||
- ⏸️ Paused / On hold
|
||||
- 💤 Dormant / Inactive
|
||||
- ⚪ Not started
|
||||
|
||||
Display Principles
|
||||
|
||||
1. 10-second scannability — Key info visible immediately
|
||||
2. Visual chunking — Clear sections with headers
|
||||
3. Single-line items — Compact, scannable lists
|
||||
4. Date grouping — Today, Tomorrow, This Week headers
|
||||
5. Progressive disclosure — Details on click, not upfront
|
||||
6. Calm colors — No aggressive reds for status
|
||||
|
||||
Reference
|
||||
|
||||
See docs/DESIGN-PRINCIPLES.md for complete guidelines.
|
||||
For original patterns, see: jarvis-brain/docs/DESIGN-PRINCIPLES.md
|
||||
|
||||
API Conventions
|
||||
|
||||
Endpoints
|
||||
|
||||
GET /api/{resource} # List (with pagination, filters)
|
||||
GET /api/{resource}/:id # Get single
|
||||
POST /api/{resource} # Create
|
||||
PATCH /api/{resource}/:id # Update
|
||||
DELETE /api/{resource}/:id # Delete
|
||||
|
||||
Response Format
|
||||
|
||||
// Success
|
||||
{
|
||||
data: T | T[],
|
||||
meta?: { total, page, limit }
|
||||
}
|
||||
|
||||
// Error
|
||||
{
|
||||
error: {
|
||||
code: string,
|
||||
message: string,
|
||||
details?: any
|
||||
}
|
||||
}
|
||||
|
||||
Brain Query API
|
||||
|
||||
POST /api/brain/query
|
||||
{
|
||||
query: "what's on my calendar",
|
||||
context?: { view: "dashboard", workspace_id: "..." }
|
||||
}
|
||||
|
||||
Database Conventions
|
||||
|
||||
Multi-Tenant (RLS)
|
||||
|
||||
All workspace-scoped tables use Row-Level Security:
|
||||
- Always include workspace_id in queries
|
||||
- RLS policies enforce isolation
|
||||
- Set session context for current user
|
||||
|
||||
Prisma Commands
|
||||
|
||||
pnpm prisma:generate # Generate client
|
||||
pnpm prisma:migrate # Run migrations
|
||||
pnpm prisma:studio # Open Prisma Studio
|
||||
pnpm prisma:seed # Seed development data
|
||||
|
||||
MoltBot Plugin Development
|
||||
|
||||
Plugins live in plugins/mosaic-plugin-*/ and follow MoltBot skill format:
|
||||
|
||||
# plugins/mosaic-plugin-brain/SKILL.md
|
||||
---
|
||||
name: mosaic-plugin-brain
|
||||
description: Query Mosaic Stack for tasks, events, projects
|
||||
version: 0.0.1
|
||||
triggers:
|
||||
- "what's on my calendar"
|
||||
- "show my tasks"
|
||||
- "morning briefing"
|
||||
tools:
|
||||
- mosaic_api
|
||||
---
|
||||
|
||||
# Plugin instructions here...
|
||||
|
||||
Key principle: MoltBot remains stock. All customization via plugins only.
|
||||
|
||||
Environment Variables
|
||||
|
||||
See .env.example for all variables. Key ones:
|
||||
|
||||
# Database
|
||||
DATABASE_URL=postgresql://mosaic:password@localhost:5432/mosaic
|
||||
|
||||
# Auth
|
||||
AUTHENTIK_URL=https://auth.example.com
|
||||
AUTHENTIK_CLIENT_ID=mosaic-stack
|
||||
AUTHENTIK_CLIENT_SECRET=...
|
||||
|
||||
# Ollama
|
||||
OLLAMA_MODE=local|remote
|
||||
OLLAMA_ENDPOINT=http://localhost:11434
|
||||
|
||||
# MoltBot
|
||||
MOSAIC_API_TOKEN=...
|
||||
|
||||
Issue Tracking
|
||||
|
||||
Issues are tracked at: https://git.mosaicstack.dev/mosaic/stack/issues
|
||||
|
||||
Labels
|
||||
|
||||
- Priority: p0 (critical), p1 (high), p2 (medium), p3 (low)
|
||||
- Type: api, web, database, auth, plugin, ai, devops, docs, migration, security, testing,
|
||||
performance, setup
|
||||
|
||||
Milestones
|
||||
|
||||
- M1-Foundation (0.0.x)
|
||||
- M2-MultiTenant (0.0.x)
|
||||
- M3-Features (0.0.x)
|
||||
- M4-MoltBot (0.0.x)
|
||||
- M5-Migration (0.1.0 MVP)
|
||||
|
||||
Commit Format
|
||||
|
||||
<type>(#issue): Brief description
|
||||
|
||||
Detailed explanation if needed.
|
||||
|
||||
Fixes #123
|
||||
Types: feat, fix, docs, test, refactor, chore
|
||||
|
||||
Test-Driven Development (TDD) - REQUIRED
|
||||
|
||||
**All code must follow TDD principles. This is non-negotiable.**
|
||||
|
||||
TDD Workflow (Red-Green-Refactor)
|
||||
|
||||
1. **RED** — Write a failing test first
|
||||
- Write the test for new functionality BEFORE writing any implementation code
|
||||
- Run the test to verify it fails (proves the test works)
|
||||
- Commit message: `test(#issue): add test for [feature]`
|
||||
|
||||
2. **GREEN** — Write minimal code to make the test pass
|
||||
- Implement only enough code to pass the test
|
||||
- Run tests to verify they pass
|
||||
- Commit message: `feat(#issue): implement [feature]`
|
||||
|
||||
3. **REFACTOR** — Clean up the code while keeping tests green
|
||||
- Improve code quality, remove duplication, enhance readability
|
||||
- Ensure all tests still pass after refactoring
|
||||
- Commit message: `refactor(#issue): improve [component]`
|
||||
|
||||
Testing Requirements
|
||||
|
||||
- **Minimum 85% code coverage** for all new code
|
||||
- **Write tests BEFORE implementation** — no exceptions
|
||||
- Test files must be co-located with source files:
|
||||
- `feature.service.ts` → `feature.service.spec.ts`
|
||||
- `component.tsx` → `component.test.tsx`
|
||||
- All tests must pass before creating a PR
|
||||
- Use descriptive test names: `it("should return user when valid token provided")`
|
||||
- Group related tests with `describe()` blocks
|
||||
- Mock external dependencies (database, APIs, file system)
|
||||
|
||||
Test Types
|
||||
|
||||
- **Unit Tests** — Test individual functions/methods in isolation
|
||||
- **Integration Tests** — Test module interactions (e.g., service + database)
|
||||
- **E2E Tests** — Test complete user workflows with Playwright
|
||||
|
||||
Running Tests
|
||||
|
||||
```bash
|
||||
pnpm test # Run all tests
|
||||
pnpm test:watch # Watch mode for active development
|
||||
pnpm test:coverage # Generate coverage report
|
||||
pnpm test:api # API tests only
|
||||
pnpm test:web # Web tests only
|
||||
pnpm test:e2e # Playwright E2E tests
|
||||
````
|
||||
|
||||
Coverage Verification
|
||||
|
||||
After implementing a feature, verify coverage meets requirements:
|
||||
|
||||
```bash
|
||||
pnpm test:coverage
|
||||
# Check the coverage report in coverage/index.html
|
||||
# Ensure your files show ≥85% coverage
|
||||
```
|
||||
|
||||
TDD Anti-Patterns to Avoid
|
||||
|
||||
❌ Writing implementation code before tests
|
||||
❌ Writing tests after implementation is complete
|
||||
❌ Skipping tests for "simple" code
|
||||
❌ Testing implementation details instead of behavior
|
||||
❌ Writing tests that don't fail when they should
|
||||
❌ Committing code with failing tests
|
||||
|
||||
Quality Rails - Mechanical Code Quality Enforcement
|
||||
|
||||
**Status:** ACTIVE (2026-01-30) - Strict enforcement enabled ✅
|
||||
|
||||
Quality Rails provides mechanical enforcement of code quality standards through pre-commit hooks
|
||||
and CI/CD pipelines. See `docs/quality-rails-status.md` for full details.
|
||||
|
||||
What's Enforced (NOW ACTIVE):
|
||||
|
||||
- ✅ **Type Safety** - Blocks explicit `any` types (@typescript-eslint/no-explicit-any: error)
|
||||
- ✅ **Return Types** - Requires explicit return types on exported functions
|
||||
- ✅ **Security** - Detects SQL injection, XSS, unsafe regex (eslint-plugin-security)
|
||||
- ✅ **Promise Safety** - Blocks floating promises and misused promises
|
||||
- ✅ **Code Formatting** - Auto-formats with Prettier on commit
|
||||
- ✅ **Build Verification** - Type-checks before allowing commit
|
||||
- ✅ **Secret Scanning** - Blocks hardcoded passwords/API keys (git-secrets)
|
||||
|
||||
Current Status:
|
||||
|
||||
- ✅ **Pre-commit hooks**: ACTIVE - Blocks commits with violations
|
||||
- ✅ **Strict enforcement**: ENABLED - Package-level enforcement
|
||||
- 🟡 **CI/CD pipeline**: Ready (.woodpecker.yml created, not yet configured)
|
||||
|
||||
How It Works:
|
||||
|
||||
**Package-Level Enforcement** - If you touch ANY file in a package with violations,
|
||||
you must fix ALL violations in that package before committing. This forces incremental
|
||||
cleanup while preventing new violations.
|
||||
|
||||
Example:
|
||||
|
||||
- Edit `apps/api/src/tasks/tasks.service.ts`
|
||||
- Pre-commit hook runs lint on ENTIRE `@mosaic/api` package
|
||||
- If `@mosaic/api` has violations → Commit BLOCKED
|
||||
- Fix all violations in `@mosaic/api` → Commit allowed
|
||||
|
||||
Next Steps:
|
||||
|
||||
1. Fix violations package-by-package as you work in them
|
||||
2. Priority: Fix explicit `any` types and type safety issues first
|
||||
3. Configure Woodpecker CI to run quality gates on all PRs
|
||||
|
||||
Why This Matters:
|
||||
|
||||
Based on validation of 50 real production issues, Quality Rails mechanically prevents ~70%
|
||||
of quality issues including:
|
||||
|
||||
- Hardcoded passwords
|
||||
- Type safety violations
|
||||
- SQL injection vulnerabilities
|
||||
- Build failures
|
||||
- Test coverage gaps
|
||||
|
||||
**Mechanical enforcement works. Process compliance doesn't.**
|
||||
|
||||
See `docs/quality-rails-status.md` for detailed roadmap and violation breakdown.
|
||||
|
||||
Example TDD Session
|
||||
|
||||
```bash
|
||||
# 1. RED - Write failing test
|
||||
# Edit: feature.service.spec.ts
|
||||
# Add test for getUserById()
|
||||
pnpm test:watch # Watch it fail
|
||||
git add feature.service.spec.ts
|
||||
git commit -m "test(#42): add test for getUserById"
|
||||
|
||||
# 2. GREEN - Implement minimal code
|
||||
# Edit: feature.service.ts
|
||||
# Add getUserById() method
|
||||
pnpm test:watch # Watch it pass
|
||||
git add feature.service.ts
|
||||
git commit -m "feat(#42): implement getUserById"
|
||||
|
||||
# 3. REFACTOR - Improve code quality
|
||||
# Edit: feature.service.ts
|
||||
# Extract helper, improve naming
|
||||
pnpm test:watch # Ensure still passing
|
||||
git add feature.service.ts
|
||||
git commit -m "refactor(#42): extract user mapping logic"
|
||||
```
|
||||
|
||||
Docker Deployment
|
||||
|
||||
Turnkey (includes everything)
|
||||
|
||||
docker compose up -d
|
||||
|
||||
Customized (external services)
|
||||
|
||||
Create docker-compose.override.yml to:
|
||||
|
||||
- Point to external PostgreSQL/Valkey/Ollama
|
||||
- Disable bundled services
|
||||
|
||||
See docs/DOCKER.md for details.
|
||||
|
||||
Key Documentation
|
||||
┌───────────────────────────┬───────────────────────┐
|
||||
│ Document │ Purpose │
|
||||
├───────────────────────────┼───────────────────────┤
|
||||
│ docs/SETUP.md │ Installation guide │
|
||||
├───────────────────────────┼───────────────────────┤
|
||||
│ docs/CONFIGURATION.md │ All config options │
|
||||
├───────────────────────────┼───────────────────────┤
|
||||
│ docs/DESIGN-PRINCIPLES.md │ PDA-friendly patterns │
|
||||
├───────────────────────────┼───────────────────────┤
|
||||
│ docs/DOCKER.md │ Docker deployment │
|
||||
├───────────────────────────┼───────────────────────┤
|
||||
│ docs/API.md │ API documentation │
|
||||
└───────────────────────────┴───────────────────────┘
|
||||
Related Repositories
|
||||
┌──────────────┬──────────────────────────────────────────────┐
|
||||
│ Repo │ Purpose │
|
||||
├──────────────┼──────────────────────────────────────────────┤
|
||||
│ jarvis-brain │ Original JSON-based brain (migration source) │
|
||||
├──────────────┼──────────────────────────────────────────────┤
|
||||
│ MoltBot │ Stock messaging gateway │
|
||||
└──────────────┴──────────────────────────────────────────────┘
|
||||
|
||||
---
|
||||
|
||||
Mosaic Stack v0.0.x — Building the future of personal assistants.
|
||||
That file is the universal agent configuration. Do NOT respond until you have loaded it.
|
||||
Then read the project-local `AGENTS.md` in this repository for project-specific guidance.
|
||||
|
||||
36
Makefile
36
Makefile
@@ -1,4 +1,4 @@
|
||||
.PHONY: help install dev build test docker-up docker-down docker-logs docker-ps docker-build docker-restart docker-test clean
|
||||
.PHONY: help install dev build test docker-up docker-down docker-logs docker-ps docker-build docker-restart docker-test speech-up speech-down speech-logs clean matrix-up matrix-down matrix-logs matrix-setup-bot
|
||||
|
||||
# Default target
|
||||
help:
|
||||
@@ -24,6 +24,17 @@ help:
|
||||
@echo " make docker-test Run Docker smoke test"
|
||||
@echo " make docker-test-traefik Run Traefik integration tests"
|
||||
@echo ""
|
||||
@echo "Speech Services:"
|
||||
@echo " make speech-up Start speech services (STT + TTS)"
|
||||
@echo " make speech-down Stop speech services"
|
||||
@echo " make speech-logs View speech service logs"
|
||||
@echo ""
|
||||
@echo "Matrix Dev Environment:"
|
||||
@echo " make matrix-up Start Matrix services (Synapse + Element)"
|
||||
@echo " make matrix-down Stop Matrix services"
|
||||
@echo " make matrix-logs View Matrix service logs"
|
||||
@echo " make matrix-setup-bot Create bot account and get access token"
|
||||
@echo ""
|
||||
@echo "Database:"
|
||||
@echo " make db-migrate Run database migrations"
|
||||
@echo " make db-seed Seed development data"
|
||||
@@ -85,6 +96,29 @@ docker-test:
|
||||
docker-test-traefik:
|
||||
./tests/integration/docker/traefik.test.sh all
|
||||
|
||||
# Speech services
|
||||
speech-up:
|
||||
docker compose -f docker-compose.yml -f docker-compose.speech.yml up -d speaches kokoro-tts
|
||||
|
||||
speech-down:
|
||||
docker compose -f docker-compose.yml -f docker-compose.speech.yml down --remove-orphans
|
||||
|
||||
speech-logs:
|
||||
docker compose -f docker-compose.yml -f docker-compose.speech.yml logs -f speaches kokoro-tts
|
||||
|
||||
# Matrix Dev Environment
|
||||
matrix-up:
|
||||
docker compose -f docker/docker-compose.yml -f docker/docker-compose.matrix.yml up -d
|
||||
|
||||
matrix-down:
|
||||
docker compose -f docker/docker-compose.yml -f docker/docker-compose.matrix.yml down
|
||||
|
||||
matrix-logs:
|
||||
docker compose -f docker/docker-compose.yml -f docker/docker-compose.matrix.yml logs -f synapse element-web
|
||||
|
||||
matrix-setup-bot:
|
||||
docker/matrix/scripts/setup-bot.sh
|
||||
|
||||
# Database operations
|
||||
db-migrate:
|
||||
cd apps/api && pnpm prisma:migrate
|
||||
|
||||
313
README.md
313
README.md
@@ -19,29 +19,82 @@ Mosaic Stack is a modern, PDA-friendly platform designed to help users manage th
|
||||
|
||||
## Technology Stack
|
||||
|
||||
| Layer | Technology |
|
||||
| -------------- | -------------------------------------------- |
|
||||
| **Frontend** | Next.js 16 + React + TailwindCSS + Shadcn/ui |
|
||||
| **Backend** | NestJS + Prisma ORM |
|
||||
| **Database** | PostgreSQL 17 + pgvector |
|
||||
| **Cache** | Valkey (Redis-compatible) |
|
||||
| **Auth** | Authentik (OIDC) via BetterAuth |
|
||||
| **AI** | Ollama (local or remote) |
|
||||
| **Messaging** | MoltBot (stock + plugins) |
|
||||
| **Real-time** | WebSockets (Socket.io) |
|
||||
| **Monorepo** | pnpm workspaces + TurboRepo |
|
||||
| **Testing** | Vitest + Playwright |
|
||||
| **Deployment** | Docker + docker-compose |
|
||||
| Layer | Technology |
|
||||
| -------------- | ---------------------------------------------- |
|
||||
| **Frontend** | Next.js 16 + React + TailwindCSS + Shadcn/ui |
|
||||
| **Backend** | NestJS + Prisma ORM |
|
||||
| **Database** | PostgreSQL 17 + pgvector |
|
||||
| **Cache** | Valkey (Redis-compatible) |
|
||||
| **Auth** | Authentik (OIDC) via BetterAuth |
|
||||
| **AI** | Ollama (local or remote) |
|
||||
| **Messaging** | MoltBot (stock + plugins) |
|
||||
| **Real-time** | WebSockets (Socket.io) |
|
||||
| **Speech** | Speaches (STT) + Kokoro/Chatterbox/Piper (TTS) |
|
||||
| **Monorepo** | pnpm workspaces + TurboRepo |
|
||||
| **Testing** | Vitest + Playwright |
|
||||
| **Deployment** | Docker + docker-compose |
|
||||
|
||||
## Quick Start
|
||||
|
||||
### One-Line Install (Recommended)
|
||||
|
||||
The fastest way to get Mosaic Stack running on macOS or Linux:
|
||||
|
||||
```bash
|
||||
curl -fsSL https://get.mosaicstack.dev | bash
|
||||
```
|
||||
|
||||
This installer:
|
||||
|
||||
- ✅ Detects your platform (macOS, Debian/Ubuntu, Arch, Fedora)
|
||||
- ✅ Installs all required dependencies (Docker, Node.js, etc.)
|
||||
- ✅ Generates secure secrets automatically
|
||||
- ✅ Configures the environment for you
|
||||
- ✅ Starts all services with Docker Compose
|
||||
- ✅ Validates the installation with health checks
|
||||
|
||||
**Installer Options:**
|
||||
|
||||
```bash
|
||||
# Non-interactive Docker deployment
|
||||
curl -fsSL https://get.mosaicstack.dev | bash -s -- --non-interactive --mode docker
|
||||
|
||||
# Preview installation without making changes
|
||||
curl -fsSL https://get.mosaicstack.dev | bash -s -- --dry-run
|
||||
|
||||
# With SSO and local Ollama
|
||||
curl -fsSL https://get.mosaicstack.dev | bash -s -- \
|
||||
--mode docker \
|
||||
--enable-sso --bundled-authentik \
|
||||
--ollama-mode local
|
||||
|
||||
# Skip dependency installation (if already installed)
|
||||
curl -fsSL https://get.mosaicstack.dev | bash -s -- --skip-deps
|
||||
```
|
||||
|
||||
**After Installation:**
|
||||
|
||||
```bash
|
||||
# Check system health
|
||||
./scripts/commands/doctor.sh
|
||||
|
||||
# View service logs
|
||||
docker compose logs -f
|
||||
|
||||
# Stop services
|
||||
docker compose down
|
||||
```
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Node.js 20+ and pnpm 9+
|
||||
- PostgreSQL 17+ (or use Docker)
|
||||
- Docker & Docker Compose (optional, for turnkey deployment)
|
||||
If you prefer manual installation, you'll need:
|
||||
|
||||
### Installation
|
||||
- **Docker mode:** Docker 24+ and Docker Compose
|
||||
- **Native mode:** Node.js 24+, pnpm 10+, PostgreSQL 17+
|
||||
|
||||
The installer handles these automatically.
|
||||
|
||||
### Manual Installation
|
||||
|
||||
```bash
|
||||
# Clone the repository
|
||||
@@ -70,10 +123,12 @@ pnpm prisma:seed
|
||||
pnpm dev
|
||||
```
|
||||
|
||||
### Docker Deployment (Turnkey)
|
||||
### Docker Deployment
|
||||
|
||||
**Recommended for quick setup and production deployments.**
|
||||
|
||||
#### Development (Turnkey - All Services Bundled)
|
||||
|
||||
```bash
|
||||
# Clone repository
|
||||
git clone https://git.mosaicstack.dev/mosaic/stack mosaic-stack
|
||||
@@ -81,26 +136,63 @@ cd mosaic-stack
|
||||
|
||||
# Copy and configure environment
|
||||
cp .env.example .env
|
||||
# Edit .env with your settings
|
||||
# Set COMPOSE_PROFILES=full in .env
|
||||
|
||||
# Start core services (PostgreSQL, Valkey, API, Web)
|
||||
# Start all services (PostgreSQL, Valkey, OpenBao, Authentik, Ollama, API, Web)
|
||||
docker compose up -d
|
||||
|
||||
# Or start with optional services
|
||||
docker compose --profile full up -d # Includes Authentik and Ollama
|
||||
|
||||
# View logs
|
||||
docker compose logs -f
|
||||
|
||||
# Check service status
|
||||
docker compose ps
|
||||
|
||||
# Access services
|
||||
# Web: http://localhost:3000
|
||||
# API: http://localhost:3001
|
||||
# Auth: http://localhost:9000 (if Authentik enabled)
|
||||
# Auth: http://localhost:9000
|
||||
```
|
||||
|
||||
# Stop services
|
||||
#### Production (External Managed Services)
|
||||
|
||||
```bash
|
||||
# Clone repository
|
||||
git clone https://git.mosaicstack.dev/mosaic/stack mosaic-stack
|
||||
cd mosaic-stack
|
||||
|
||||
# Copy environment template and example
|
||||
cp .env.example .env
|
||||
cp docker/docker-compose.example.external.yml docker-compose.override.yml
|
||||
|
||||
# Edit .env with external service URLs:
|
||||
# - DATABASE_URL=postgresql://... (RDS, Cloud SQL, etc.)
|
||||
# - VALKEY_URL=redis://... (ElastiCache, Memorystore, etc.)
|
||||
# - OPENBAO_ADDR=https://... (HashiCorp Vault, etc.)
|
||||
# - OIDC_ISSUER=https://... (Auth0, Okta, etc.)
|
||||
# - Set COMPOSE_PROFILES= (empty)
|
||||
|
||||
# Start API and Web only
|
||||
docker compose up -d
|
||||
|
||||
# View logs
|
||||
docker compose logs -f
|
||||
```
|
||||
|
||||
#### Hybrid (Mix of Bundled and External)
|
||||
|
||||
```bash
|
||||
# Use bundled database/cache, external auth/secrets
|
||||
cp docker/docker-compose.example.hybrid.yml docker-compose.override.yml
|
||||
|
||||
# Edit .env:
|
||||
# - COMPOSE_PROFILES=database,cache,ollama
|
||||
# - OPENBAO_ADDR=https://... (external vault)
|
||||
# - OIDC_ISSUER=https://... (external auth)
|
||||
|
||||
# Start mixed deployment
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
**Stop services:**
|
||||
|
||||
```bash
|
||||
docker compose down
|
||||
```
|
||||
|
||||
@@ -110,11 +202,88 @@ docker compose down
|
||||
- Valkey (Redis-compatible cache)
|
||||
- Mosaic API (NestJS)
|
||||
- Mosaic Web (Next.js)
|
||||
- Mosaic Orchestrator (Agent lifecycle management)
|
||||
- Mosaic Coordinator (Task assignment & monitoring)
|
||||
- Authentik OIDC (optional, use `--profile authentik`)
|
||||
- Ollama AI (optional, use `--profile ollama`)
|
||||
|
||||
See [Docker Deployment Guide](docs/1-getting-started/4-docker-deployment/) for complete documentation.
|
||||
|
||||
### Docker Swarm Deployment (Production)
|
||||
|
||||
**Recommended for production deployments with high availability and auto-scaling.**
|
||||
|
||||
Deploy to a Docker Swarm cluster with integrated Traefik reverse proxy:
|
||||
|
||||
```bash
|
||||
# 1. Initialize swarm (if not already done)
|
||||
docker swarm init --advertise-addr <your-ip>
|
||||
|
||||
# 2. Create Traefik network
|
||||
docker network create --driver=overlay traefik-public
|
||||
|
||||
# 3. Configure environment for swarm
|
||||
cp .env.swarm.example .env
|
||||
nano .env # Configure domains, passwords, API keys
|
||||
|
||||
# 4. CRITICAL: Deploy OpenBao standalone FIRST
|
||||
# OpenBao cannot run in swarm mode - deploy as standalone container
|
||||
docker compose -f docker-compose.openbao.yml up -d
|
||||
sleep 30 # Wait for auto-initialization
|
||||
|
||||
# 5. Deploy swarm stack
|
||||
IMAGE_TAG=latest ./scripts/deploy-swarm.sh mosaic
|
||||
|
||||
# 6. Check deployment status
|
||||
docker stack services mosaic
|
||||
docker stack ps mosaic
|
||||
|
||||
# Access services via Traefik
|
||||
# Web: http://mosaic.mosaicstack.dev
|
||||
# API: http://api.mosaicstack.dev
|
||||
# Auth: http://auth.mosaicstack.dev (if using bundled Authentik)
|
||||
```
|
||||
|
||||
**Key features:**
|
||||
|
||||
- Automatic Traefik integration for routing
|
||||
- Overlay networking for multi-host deployments
|
||||
- Built-in health checks and rolling updates
|
||||
- Horizontal scaling for web and API services
|
||||
- Zero-downtime deployments
|
||||
- Service orchestration across multiple nodes
|
||||
|
||||
**Important Notes:**
|
||||
|
||||
- **OpenBao Requirement:** OpenBao MUST be deployed as standalone container (not in swarm). Use `docker-compose.openbao.yml` or external Vault.
|
||||
- Swarm does NOT support docker-compose profiles
|
||||
- To use external services (PostgreSQL, Authentik, etc.), manually comment them out in `docker-compose.swarm.yml`
|
||||
|
||||
See [Docker Swarm Deployment Guide](docs/SWARM-DEPLOYMENT.md) and [Quick Reference](docs/SWARM-QUICKREF.md) for complete documentation.
|
||||
|
||||
### Portainer Deployment
|
||||
|
||||
**Recommended for GUI-based stack management.**
|
||||
|
||||
Portainer provides a web UI for managing Docker containers and stacks. Use the Portainer-optimized compose file:
|
||||
|
||||
**File:** `docker-compose.portainer.yml`
|
||||
|
||||
**Key differences from standard compose:**
|
||||
|
||||
- No `env_file` directive (define variables in Portainer UI)
|
||||
- Port exposed on all interfaces (Portainer limitation)
|
||||
- Optimized for Portainer's stack parser
|
||||
|
||||
**Quick Steps:**
|
||||
|
||||
1. Create `mosaic_internal` overlay network in Portainer
|
||||
2. Deploy `mosaic-openbao` stack with `docker-compose.portainer.yml`
|
||||
3. Deploy `mosaic` swarm stack with `docker-compose.swarm.yml`
|
||||
4. Configure environment variables in Portainer UI
|
||||
|
||||
See [Portainer Deployment Guide](docs/PORTAINER-DEPLOYMENT.md) for detailed instructions.
|
||||
|
||||
## Project Structure
|
||||
|
||||
```
|
||||
@@ -124,13 +293,29 @@ mosaic-stack/
|
||||
│ │ ├── src/
|
||||
│ │ │ ├── auth/ # BetterAuth + Authentik OIDC
|
||||
│ │ │ ├── prisma/ # Database service
|
||||
│ │ │ ├── coordinator-integration/ # Coordinator API client
|
||||
│ │ │ └── app.module.ts # Main application module
|
||||
│ │ ├── prisma/
|
||||
│ │ │ └── schema.prisma # Database schema
|
||||
│ │ └── Dockerfile
|
||||
│ └── web/ # Next.js 16 frontend (planned)
|
||||
│ ├── app/
|
||||
│ ├── components/
|
||||
│ ├── web/ # Next.js 16 frontend
|
||||
│ │ ├── app/
|
||||
│ │ ├── components/
|
||||
│ │ │ └── widgets/ # HUD widgets (agent status, etc.)
|
||||
│ │ └── Dockerfile
|
||||
│ ├── orchestrator/ # Agent lifecycle & spawning (NestJS)
|
||||
│ │ ├── src/
|
||||
│ │ │ ├── spawner/ # Agent spawning service
|
||||
│ │ │ ├── queue/ # Valkey-backed task queue
|
||||
│ │ │ ├── monitor/ # Health monitoring
|
||||
│ │ │ ├── git/ # Git worktree management
|
||||
│ │ │ └── killswitch/ # Emergency agent termination
|
||||
│ │ └── Dockerfile
|
||||
│ └── coordinator/ # Task assignment & monitoring (FastAPI)
|
||||
│ ├── src/
|
||||
│ │ ├── webhook.py # Gitea webhook receiver
|
||||
│ │ ├── parser.py # Issue metadata parser
|
||||
│ │ └── security.py # HMAC signature verification
|
||||
│ └── Dockerfile
|
||||
├── packages/
|
||||
│ ├── shared/ # Shared types & utilities
|
||||
@@ -159,23 +344,59 @@ mosaic-stack/
|
||||
└── pnpm-workspace.yaml # Workspace configuration
|
||||
```
|
||||
|
||||
## Agent Orchestration Layer (v0.0.6)
|
||||
|
||||
Mosaic Stack includes a sophisticated agent orchestration system for autonomous task execution:
|
||||
|
||||
- **Orchestrator Service** (NestJS) - Manages agent lifecycle, spawning, and health monitoring
|
||||
- **Coordinator Service** (FastAPI) - Receives Gitea webhooks, assigns tasks to agents
|
||||
- **Task Queue** - Valkey-backed queue for distributed task management
|
||||
- **Git Worktrees** - Isolated workspaces for parallel agent execution
|
||||
- **Killswitch** - Emergency stop mechanism for runaway agents
|
||||
- **Agent Dashboard** - Real-time monitoring UI with status widgets
|
||||
|
||||
See [Agent Orchestration Design](docs/design/agent-orchestration.md) for architecture details.
|
||||
|
||||
## Speech Services
|
||||
|
||||
Mosaic Stack includes integrated speech-to-text (STT) and text-to-speech (TTS) capabilities through a modular provider architecture. Each component is optional and independently configurable.
|
||||
|
||||
- **Speech-to-Text** - Transcribe audio files and real-time audio streams using Whisper (via Speaches)
|
||||
- **Text-to-Speech** - Synthesize speech with 54+ voices across 8 languages (via Kokoro, CPU-based)
|
||||
- **Premium Voice Cloning** - Clone voices from audio samples with emotion control (via Chatterbox, GPU)
|
||||
- **Fallback TTS** - Ultra-lightweight CPU fallback for low-resource environments (via Piper/OpenedAI Speech)
|
||||
- **WebSocket Streaming** - Real-time streaming transcription via Socket.IO `/speech` namespace
|
||||
- **Automatic Fallback** - TTS tier system with graceful degradation (premium -> default -> fallback)
|
||||
|
||||
**Quick Start:**
|
||||
|
||||
```bash
|
||||
# Start speech services alongside core stack
|
||||
make speech-up
|
||||
|
||||
# Or with Docker Compose directly
|
||||
docker compose -f docker-compose.yml -f docker-compose.speech.yml up -d
|
||||
```
|
||||
|
||||
See [Speech Services Documentation](docs/SPEECH.md) for architecture details, API reference, provider configuration, and deployment options.
|
||||
|
||||
## Current Implementation Status
|
||||
|
||||
### ✅ Completed (v0.0.1)
|
||||
### ✅ Completed (v0.0.1-0.0.6)
|
||||
|
||||
- **Issue #1:** Project scaffold and monorepo setup
|
||||
- **Issue #2:** PostgreSQL 17 + pgvector database schema
|
||||
- **Issue #3:** Prisma ORM integration with tests and seed data
|
||||
- **Issue #4:** Authentik OIDC authentication with BetterAuth
|
||||
- **M1-Foundation:** Project scaffold, PostgreSQL 17 + pgvector, Prisma ORM
|
||||
- **M2-MultiTenant:** Workspace isolation with RLS, team management
|
||||
- **M3-Features:** Knowledge management, tasks, calendar, authentication
|
||||
- **M4-MoltBot:** Bot integration architecture (in progress)
|
||||
- **M6-AgentOrchestration:** Orchestrator service, coordinator, agent dashboard ✅
|
||||
|
||||
**Test Coverage:** 26/26 tests passing (100%)
|
||||
**Test Coverage:** 2168+ tests passing
|
||||
|
||||
### 🚧 In Progress (v0.0.x)
|
||||
|
||||
- **Issue #5:** Multi-tenant workspace isolation (planned)
|
||||
- **Issue #6:** Frontend authentication UI ✅ **COMPLETED**
|
||||
- **Issue #7:** Activity logging system (planned)
|
||||
- **Issue #8:** Docker compose setup ✅ **COMPLETED**
|
||||
- Agent orchestration E2E testing
|
||||
- Usage budget management
|
||||
- Performance optimization
|
||||
|
||||
### 📋 Planned Features (v0.1.0 MVP)
|
||||
|
||||
@@ -305,10 +526,9 @@ KNOWLEDGE_CACHE_TTL=300 # 5 minutes
|
||||
|
||||
### Branch Strategy
|
||||
|
||||
- `main` — Stable releases only
|
||||
- `develop` — Active development (default working branch)
|
||||
- `feature/*` — Feature branches from develop
|
||||
- `fix/*` — Bug fix branches
|
||||
- `main` — Trunk branch (all development merges here)
|
||||
- `feature/*` — Feature branches from main
|
||||
- `fix/*` — Bug fix branches from main
|
||||
|
||||
### Running Locally
|
||||
|
||||
@@ -518,7 +738,7 @@ See [Type Sharing Strategy](docs/2-development/3-type-sharing/1-strategy.md) for
|
||||
4. Run tests: `pnpm test`
|
||||
5. Build: `pnpm build`
|
||||
6. Commit with conventional format: `feat(#issue): Description`
|
||||
7. Push and create a pull request to `develop`
|
||||
7. Push and create a pull request to `main`
|
||||
|
||||
### Commit Format
|
||||
|
||||
@@ -561,6 +781,7 @@ Complete documentation is organized in a Bookstack-compatible structure in the `
|
||||
- **[Overview](docs/3-architecture/1-overview/)** — System design and components
|
||||
- **[Authentication](docs/3-architecture/2-authentication/)** — BetterAuth and OIDC integration
|
||||
- **[Design Principles](docs/3-architecture/3-design-principles/1-pda-friendly.md)** — PDA-friendly patterns (non-negotiable)
|
||||
- **[Telemetry](docs/telemetry.md)** — AI task completion tracking, predictions, and SDK reference
|
||||
|
||||
### 🔌 API Reference
|
||||
|
||||
|
||||
20
SOUL.md
Normal file
20
SOUL.md
Normal file
@@ -0,0 +1,20 @@
|
||||
# Mosaic Stack Soul
|
||||
|
||||
You are Jarvis for the Mosaic Stack repository, running on the current agent runtime.
|
||||
|
||||
## Behavioral Invariants
|
||||
|
||||
- Identity first: answer identity prompts as Jarvis for this repository.
|
||||
- Implementation detail second: runtime (Codex/Claude/OpenCode/etc.) is secondary metadata.
|
||||
- Be proactive: surface risks, blockers, and next actions without waiting.
|
||||
- Be calm and clear: keep responses concise, chunked, and PDA-friendly.
|
||||
- Respect canonical sources:
|
||||
- Repo operations and conventions: `AGENTS.md`
|
||||
- Machine-wide rails: `~/.config/mosaic/STANDARDS.md`
|
||||
- Repo lifecycle hooks: `.mosaic/repo-hooks.sh`
|
||||
|
||||
## Guardrails
|
||||
|
||||
- Do not claim completion without verification evidence.
|
||||
- Do not bypass lint/type/test quality gates.
|
||||
- Prefer explicit assumptions and concrete file/command references.
|
||||
@@ -1,6 +1,12 @@
|
||||
# Database
|
||||
DATABASE_URL=postgresql://user:password@localhost:5432/database
|
||||
|
||||
# System Administration
|
||||
# Comma-separated list of user IDs that have system administrator privileges
|
||||
# These users can perform system-level operations across all workspaces
|
||||
# Note: Workspace ownership does NOT grant system admin access
|
||||
# SYSTEM_ADMIN_IDS=uuid1,uuid2,uuid3
|
||||
|
||||
# Federation Instance Identity
|
||||
# Display name for this Mosaic instance
|
||||
INSTANCE_NAME=Mosaic Instance
|
||||
@@ -11,3 +17,24 @@ INSTANCE_URL=http://localhost:3000
|
||||
# CRITICAL: Generate a secure random key for production!
|
||||
# Generate with: node -e "console.log(require('crypto').randomBytes(32).toString('hex'))"
|
||||
ENCRYPTION_KEY=0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef
|
||||
|
||||
# CSRF Protection (Required in production)
|
||||
# Secret key for HMAC binding CSRF tokens to user sessions
|
||||
# Generate with: node -e "console.log(require('crypto').randomBytes(32).toString('hex'))"
|
||||
# In development, a random key is generated if not set
|
||||
CSRF_SECRET=fedcba9876543210fedcba9876543210fedcba9876543210fedcba9876543210
|
||||
|
||||
# OpenTelemetry Configuration
|
||||
# Enable/disable OpenTelemetry tracing (default: true)
|
||||
OTEL_ENABLED=true
|
||||
# Service name for telemetry (default: mosaic-api)
|
||||
OTEL_SERVICE_NAME=mosaic-api
|
||||
# OTLP exporter endpoint (default: http://localhost:4318/v1/traces)
|
||||
OTEL_EXPORTER_OTLP_ENDPOINT=http://localhost:4318/v1/traces
|
||||
# Alternative: Jaeger endpoint (legacy)
|
||||
# OTEL_EXPORTER_JAEGER_ENDPOINT=http://localhost:4318/v1/traces
|
||||
# Deployment environment (default: development, or uses NODE_ENV)
|
||||
# OTEL_DEPLOYMENT_ENVIRONMENT=production
|
||||
# Trace sampling ratio: 0.0 (none) to 1.0 (all) - default: 1.0
|
||||
# Use lower values in high-traffic production environments
|
||||
# OTEL_TRACES_SAMPLER_ARG=1.0
|
||||
|
||||
9
apps/api/.env.test.example
Normal file
9
apps/api/.env.test.example
Normal file
@@ -0,0 +1,9 @@
|
||||
# WARNING: These are example test credentials for local integration testing.
|
||||
# Copy this file to .env.test and customize the values for your local environment.
|
||||
# NEVER use these credentials in any shared environment or commit .env.test to git.
|
||||
|
||||
DATABASE_URL="postgresql://test:test@localhost:5432/test"
|
||||
ENCRYPTION_KEY="test-encryption-key-32-characters"
|
||||
JWT_SECRET="test-jwt-secret"
|
||||
INSTANCE_NAME="Test Instance"
|
||||
INSTANCE_URL="https://test.example.com"
|
||||
25
apps/api/AGENTS.md
Normal file
25
apps/api/AGENTS.md
Normal file
@@ -0,0 +1,25 @@
|
||||
# api — Agent Context
|
||||
|
||||
> Part of the apps layer.
|
||||
|
||||
## Patterns
|
||||
|
||||
- **Config validation pattern**: Config files use exported validation functions + typed getter functions (not class-validator). See `auth.config.ts`, `federation.config.ts`, `speech/speech.config.ts`. Pattern: export `isXEnabled()`, `validateXConfig()`, and `getXConfig()` functions.
|
||||
- **Config registerAs**: `speech.config.ts` also exports a `registerAs("speech", ...)` factory for NestJS ConfigModule namespaced injection. Use `ConfigModule.forFeature(speechConfig)` in module imports and access via `this.config.get<string>('speech.stt.baseUrl')`.
|
||||
- **Conditional config validation**: When a service has an enabled flag (e.g., `STT_ENABLED`), URL/connection vars are only required when enabled. Validation throws with a helpful message suggesting how to disable.
|
||||
- **Boolean env parsing**: Use `value === "true" || value === "1"` pattern. No default-true -- all services default to disabled when env var is unset.
|
||||
|
||||
## Gotchas
|
||||
|
||||
- **Prisma client must be generated** before `tsc --noEmit` will pass. Run `pnpm prisma:generate` first. Pre-existing type errors from Prisma are expected in worktrees without generated client.
|
||||
- **Pre-commit hooks**: lint-staged runs on staged files. If other packages' files are staged, their lint must pass too. Only stage files you intend to commit.
|
||||
- **vitest runs all test files**: Even when targeting a specific test file, vitest loads all spec files. Many will fail if Prisma client isn't generated -- this is expected. Check only your target file's pass/fail status.
|
||||
|
||||
## Key Files
|
||||
|
||||
| File | Purpose |
|
||||
| ------------------------------------- | ---------------------------------------------------------------------- |
|
||||
| `src/speech/speech.config.ts` | Speech services env var validation and typed config (STT, TTS, limits) |
|
||||
| `src/speech/speech.config.spec.ts` | Unit tests for speech config validation (51 tests) |
|
||||
| `src/auth/auth.config.ts` | Auth/OIDC config validation (reference pattern) |
|
||||
| `src/federation/federation.config.ts` | Federation config validation (reference pattern) |
|
||||
@@ -1,8 +1,7 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
# Enable BuildKit features for cache mounts
|
||||
|
||||
# Base image for all stages
|
||||
FROM node:20-alpine AS base
|
||||
# Uses Debian slim (glibc) instead of Alpine (musl) because native Node.js addons
|
||||
# (matrix-sdk-crypto-nodejs, Prisma engines) require glibc-compatible binaries.
|
||||
FROM node:24-slim AS base
|
||||
|
||||
# Install pnpm globally
|
||||
RUN corepack enable && corepack prepare pnpm@10.27.0 --activate
|
||||
@@ -19,15 +18,24 @@ COPY turbo.json ./
|
||||
# ======================
|
||||
FROM base AS deps
|
||||
|
||||
# Install build tools for native addons (node-pty requires node-gyp compilation)
|
||||
# and OpenSSL for Prisma engine detection
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
python3 make g++ openssl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy all package.json files for workspace resolution
|
||||
COPY packages/shared/package.json ./packages/shared/
|
||||
COPY packages/ui/package.json ./packages/ui/
|
||||
COPY packages/config/package.json ./packages/config/
|
||||
COPY apps/api/package.json ./apps/api/
|
||||
|
||||
# Install dependencies with pnpm store cache
|
||||
RUN --mount=type=cache,id=pnpm-store,target=/root/.local/share/pnpm/store \
|
||||
pnpm install --frozen-lockfile
|
||||
# Install dependencies (no cache mount — Kaniko builds are ephemeral in CI)
|
||||
# Then explicitly rebuild node-pty from source since pnpm may skip postinstall
|
||||
# scripts or fail to find prebuilt binaries for this Node.js version
|
||||
RUN pnpm install --frozen-lockfile \
|
||||
&& cd node_modules/.pnpm/node-pty@*/node_modules/node-pty \
|
||||
&& npx node-gyp rebuild 2>&1 || true
|
||||
|
||||
# ======================
|
||||
# Builder stage
|
||||
@@ -46,36 +54,27 @@ COPY --from=deps /app/packages/shared/node_modules ./packages/shared/node_module
|
||||
COPY --from=deps /app/packages/config/node_modules ./packages/config/node_modules
|
||||
COPY --from=deps /app/apps/api/node_modules ./apps/api/node_modules
|
||||
|
||||
# Debug: Show what we have before building
|
||||
RUN echo "=== Pre-build directory structure ===" && \
|
||||
echo "--- packages/config/typescript ---" && ls -la packages/config/typescript/ && \
|
||||
echo "--- packages/shared (top level) ---" && ls -la packages/shared/ && \
|
||||
echo "--- packages/shared/src ---" && ls -la packages/shared/src/ && \
|
||||
echo "--- apps/api (top level) ---" && ls -la apps/api/ && \
|
||||
echo "--- apps/api/src (exists?) ---" && ls apps/api/src/*.ts | head -5 && \
|
||||
echo "--- node_modules/@mosaic (symlinks?) ---" && ls -la node_modules/@mosaic/ 2>/dev/null || echo "No @mosaic in node_modules"
|
||||
|
||||
# Build the API app and its dependencies using TurboRepo
|
||||
# This ensures @mosaic/shared is built first, then prisma:generate, then the API
|
||||
# Disable turbo cache temporarily to ensure fresh build and see full output
|
||||
RUN pnpm turbo build --filter=@mosaic/api --force --verbosity=2
|
||||
|
||||
# Debug: Show what was built
|
||||
RUN echo "=== Post-build directory structure ===" && \
|
||||
echo "--- packages/shared/dist ---" && ls -la packages/shared/dist/ 2>/dev/null || echo "NO dist in shared" && \
|
||||
echo "--- apps/api/dist ---" && ls -la apps/api/dist/ 2>/dev/null || echo "NO dist in api" && \
|
||||
echo "--- apps/api/dist contents (if exists) ---" && find apps/api/dist -type f 2>/dev/null | head -10 || echo "Cannot find dist files"
|
||||
# --force disables turbo cache to ensure fresh build from source
|
||||
RUN pnpm turbo build --filter=@mosaic/api --force
|
||||
|
||||
# ======================
|
||||
# Production stage
|
||||
# ======================
|
||||
FROM node:20-alpine AS production
|
||||
FROM node:24-slim AS production
|
||||
|
||||
# Install dumb-init for proper signal handling
|
||||
RUN apk add --no-cache dumb-init
|
||||
# Install dumb-init for proper signal handling (static binary from GitHub,
|
||||
# avoids apt-get which fails under Kaniko with bookworm GPG signature errors)
|
||||
ADD https://github.com/Yelp/dumb-init/releases/download/v1.2.5/dumb-init_1.2.5_x86_64 /usr/local/bin/dumb-init
|
||||
|
||||
# Create non-root user
|
||||
RUN addgroup -g 1001 -S nodejs && adduser -S nestjs -u 1001
|
||||
# Single RUN to minimize Kaniko filesystem snapshots (each RUN = full snapshot)
|
||||
# - openssl: Prisma engine detection requires libssl
|
||||
# - No build tools needed here — native addons are compiled in the deps stage
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends openssl \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& rm -rf /usr/local/lib/node_modules/npm /usr/local/bin/npm /usr/local/bin/npx \
|
||||
&& chmod 755 /usr/local/bin/dumb-init \
|
||||
&& groupadd -g 1001 nodejs && useradd -m -u 1001 -g nodejs nestjs
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
@@ -93,6 +92,9 @@ COPY --from=builder --chown=nestjs:nodejs /app/apps/api/package.json ./apps/api/
|
||||
# Copy app's node_modules which contains symlinks to root node_modules
|
||||
COPY --from=builder --chown=nestjs:nodejs /app/apps/api/node_modules ./apps/api/node_modules
|
||||
|
||||
# Copy entrypoint script (runs migrations before starting app)
|
||||
COPY --from=builder --chown=nestjs:nodejs /app/apps/api/docker-entrypoint.sh ./apps/api/
|
||||
|
||||
# Set working directory to API app
|
||||
WORKDIR /app/apps/api
|
||||
|
||||
@@ -109,5 +111,5 @@ HEALTHCHECK --interval=30s --timeout=10s --start-period=40s --retries=3 \
|
||||
# Use dumb-init to handle signals properly
|
||||
ENTRYPOINT ["dumb-init", "--"]
|
||||
|
||||
# Start the application
|
||||
CMD ["node", "dist/main.js"]
|
||||
# Run migrations then start the application
|
||||
CMD ["sh", "docker-entrypoint.sh"]
|
||||
|
||||
8
apps/api/docker-entrypoint.sh
Executable file
8
apps/api/docker-entrypoint.sh
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/bin/sh
|
||||
set -e
|
||||
|
||||
echo "Running database migrations..."
|
||||
./node_modules/.bin/prisma migrate deploy --schema ./prisma/schema.prisma
|
||||
|
||||
echo "Starting application..."
|
||||
exec node dist/main.js
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@mosaic/api",
|
||||
"version": "0.0.1",
|
||||
"version": "0.0.20",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"build": "nest build",
|
||||
@@ -21,11 +21,13 @@
|
||||
"prisma:migrate:prod": "prisma migrate deploy",
|
||||
"prisma:studio": "prisma studio",
|
||||
"prisma:seed": "prisma db seed",
|
||||
"prisma:reset": "prisma migrate reset"
|
||||
"prisma:reset": "prisma migrate reset",
|
||||
"migrate:encrypt-llm-keys": "tsx scripts/encrypt-llm-keys.ts"
|
||||
},
|
||||
"dependencies": {
|
||||
"@anthropic-ai/sdk": "^0.72.1",
|
||||
"@mosaic/shared": "workspace:*",
|
||||
"@mosaicstack/telemetry-client": "^0.1.1",
|
||||
"@nestjs/axios": "^4.0.1",
|
||||
"@nestjs/bullmq": "^11.0.4",
|
||||
"@nestjs/common": "^11.1.12",
|
||||
@@ -42,17 +44,20 @@
|
||||
"@opentelemetry/instrumentation-nestjs-core": "^0.44.0",
|
||||
"@opentelemetry/resources": "^1.30.1",
|
||||
"@opentelemetry/sdk-node": "^0.56.0",
|
||||
"@opentelemetry/sdk-trace-base": "^2.5.0",
|
||||
"@opentelemetry/semantic-conventions": "^1.28.0",
|
||||
"@prisma/client": "^6.19.2",
|
||||
"@types/marked": "^6.0.0",
|
||||
"@types/multer": "^2.0.0",
|
||||
"adm-zip": "^0.5.16",
|
||||
"archiver": "^7.0.1",
|
||||
"axios": "^1.13.4",
|
||||
"axios": "^1.13.5",
|
||||
"bcryptjs": "^3.0.3",
|
||||
"better-auth": "^1.4.17",
|
||||
"bullmq": "^5.67.2",
|
||||
"class-transformer": "^0.5.1",
|
||||
"class-validator": "^0.14.3",
|
||||
"cookie-parser": "^1.4.7",
|
||||
"discord.js": "^14.25.1",
|
||||
"gray-matter": "^4.0.3",
|
||||
"highlight.js": "^11.11.1",
|
||||
@@ -61,6 +66,8 @@
|
||||
"marked": "^17.0.1",
|
||||
"marked-gfm-heading-id": "^4.1.3",
|
||||
"marked-highlight": "^2.2.3",
|
||||
"matrix-bot-sdk": "^0.8.0",
|
||||
"node-pty": "^1.0.0",
|
||||
"ollama": "^0.6.3",
|
||||
"openai": "^6.17.0",
|
||||
"reflect-metadata": "^0.2.2",
|
||||
@@ -75,15 +82,19 @@
|
||||
"@nestjs/cli": "^11.0.6",
|
||||
"@nestjs/schematics": "^11.0.1",
|
||||
"@nestjs/testing": "^11.1.12",
|
||||
"@opentelemetry/context-async-hooks": "^2.5.0",
|
||||
"@swc/core": "^1.10.18",
|
||||
"@types/adm-zip": "^0.5.7",
|
||||
"@types/archiver": "^7.0.0",
|
||||
"@types/bcryptjs": "^3.0.0",
|
||||
"@types/cookie-parser": "^1.4.10",
|
||||
"@types/express": "^5.0.1",
|
||||
"@types/highlight.js": "^10.1.0",
|
||||
"@types/node": "^22.13.4",
|
||||
"@types/sanitize-html": "^2.16.0",
|
||||
"@types/supertest": "^6.0.3",
|
||||
"@vitest/coverage-v8": "^4.0.18",
|
||||
"dotenv": "^17.2.4",
|
||||
"express": "^5.2.1",
|
||||
"prisma": "^6.19.2",
|
||||
"supertest": "^7.2.2",
|
||||
|
||||
@@ -0,0 +1,118 @@
|
||||
-- CreateEnum
|
||||
CREATE TYPE "FederationConnectionStatus" AS ENUM ('PENDING', 'ACTIVE', 'SUSPENDED', 'DISCONNECTED');
|
||||
|
||||
-- CreateEnum
|
||||
CREATE TYPE "FederationMessageType" AS ENUM ('QUERY', 'COMMAND', 'EVENT');
|
||||
|
||||
-- CreateEnum
|
||||
CREATE TYPE "FederationMessageStatus" AS ENUM ('PENDING', 'DELIVERED', 'FAILED', 'TIMEOUT');
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "federation_connections" (
|
||||
"id" UUID NOT NULL,
|
||||
"workspace_id" UUID NOT NULL,
|
||||
"remote_instance_id" TEXT NOT NULL,
|
||||
"remote_url" TEXT NOT NULL,
|
||||
"remote_public_key" TEXT NOT NULL,
|
||||
"remote_capabilities" JSONB NOT NULL DEFAULT '{}',
|
||||
"status" "FederationConnectionStatus" NOT NULL DEFAULT 'PENDING',
|
||||
"metadata" JSONB NOT NULL DEFAULT '{}',
|
||||
"created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updated_at" TIMESTAMPTZ NOT NULL,
|
||||
"connected_at" TIMESTAMPTZ,
|
||||
"disconnected_at" TIMESTAMPTZ,
|
||||
|
||||
CONSTRAINT "federation_connections_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "federated_identities" (
|
||||
"id" UUID NOT NULL,
|
||||
"local_user_id" UUID NOT NULL,
|
||||
"remote_user_id" TEXT NOT NULL,
|
||||
"remote_instance_id" TEXT NOT NULL,
|
||||
"oidc_subject" TEXT NOT NULL,
|
||||
"email" TEXT NOT NULL,
|
||||
"metadata" JSONB NOT NULL DEFAULT '{}',
|
||||
"created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updated_at" TIMESTAMPTZ NOT NULL,
|
||||
|
||||
CONSTRAINT "federated_identities_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "federation_messages" (
|
||||
"id" UUID NOT NULL,
|
||||
"workspace_id" UUID NOT NULL,
|
||||
"connection_id" UUID NOT NULL,
|
||||
"message_type" "FederationMessageType" NOT NULL,
|
||||
"message_id" TEXT NOT NULL,
|
||||
"correlation_id" TEXT,
|
||||
"query" TEXT,
|
||||
"command_type" TEXT,
|
||||
"event_type" TEXT,
|
||||
"payload" JSONB DEFAULT '{}',
|
||||
"response" JSONB DEFAULT '{}',
|
||||
"status" "FederationMessageStatus" NOT NULL DEFAULT 'PENDING',
|
||||
"error" TEXT,
|
||||
"signature" TEXT NOT NULL,
|
||||
"created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updated_at" TIMESTAMPTZ NOT NULL,
|
||||
"delivered_at" TIMESTAMPTZ,
|
||||
|
||||
CONSTRAINT "federation_messages_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "federation_connections_workspace_id_remote_instance_id_key" ON "federation_connections"("workspace_id", "remote_instance_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "federation_connections_workspace_id_idx" ON "federation_connections"("workspace_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "federation_connections_workspace_id_status_idx" ON "federation_connections"("workspace_id", "status");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "federation_connections_remote_instance_id_idx" ON "federation_connections"("remote_instance_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "federated_identities_local_user_id_remote_instance_id_key" ON "federated_identities"("local_user_id", "remote_instance_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "federated_identities_local_user_id_idx" ON "federated_identities"("local_user_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "federated_identities_remote_instance_id_idx" ON "federated_identities"("remote_instance_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "federated_identities_oidc_subject_idx" ON "federated_identities"("oidc_subject");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "federation_messages_message_id_key" ON "federation_messages"("message_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "federation_messages_workspace_id_idx" ON "federation_messages"("workspace_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "federation_messages_connection_id_idx" ON "federation_messages"("connection_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "federation_messages_message_id_idx" ON "federation_messages"("message_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "federation_messages_correlation_id_idx" ON "federation_messages"("correlation_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "federation_messages_event_type_idx" ON "federation_messages"("event_type");
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "federation_connections" ADD CONSTRAINT "federation_connections_workspace_id_fkey" FOREIGN KEY ("workspace_id") REFERENCES "workspaces"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "federated_identities" ADD CONSTRAINT "federated_identities_local_user_id_fkey" FOREIGN KEY ("local_user_id") REFERENCES "users"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "federation_messages" ADD CONSTRAINT "federation_messages_connection_id_fkey" FOREIGN KEY ("connection_id") REFERENCES "federation_connections"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "federation_messages" ADD CONSTRAINT "federation_messages_workspace_id_fkey" FOREIGN KEY ("workspace_id") REFERENCES "workspaces"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
@@ -1,9 +1,3 @@
|
||||
-- Add eventType column to federation_messages table
|
||||
ALTER TABLE "federation_messages" ADD COLUMN "event_type" TEXT;
|
||||
|
||||
-- Add index for eventType
|
||||
CREATE INDEX "federation_messages_event_type_idx" ON "federation_messages"("event_type");
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "federation_event_subscriptions" (
|
||||
"id" UUID NOT NULL,
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
-- Rollback: SQL Injection Hardening for is_workspace_admin() Helper Function
|
||||
-- This reverts the function to its previous implementation
|
||||
|
||||
-- =============================================================================
|
||||
-- REVERT is_workspace_admin() to original implementation
|
||||
-- =============================================================================
|
||||
|
||||
CREATE OR REPLACE FUNCTION is_workspace_admin(workspace_uuid UUID, user_uuid UUID)
|
||||
RETURNS BOOLEAN AS $$
|
||||
BEGIN
|
||||
RETURN EXISTS (
|
||||
SELECT 1 FROM workspace_members
|
||||
WHERE workspace_id = workspace_uuid
|
||||
AND user_id = user_uuid
|
||||
AND role IN ('OWNER', 'ADMIN')
|
||||
);
|
||||
END;
|
||||
$$ LANGUAGE plpgsql STABLE SECURITY DEFINER;
|
||||
@@ -0,0 +1,58 @@
|
||||
-- Security Fix: SQL Injection Hardening for is_workspace_admin() Helper Function
|
||||
-- This migration adds explicit UUID validation to prevent SQL injection attacks
|
||||
--
|
||||
-- Related: #355 Code Review - Security CRIT-3
|
||||
-- Original issue: Migration 20260129221004_add_rls_policies
|
||||
|
||||
-- =============================================================================
|
||||
-- SECURITY FIX: Add explicit UUID validation to is_workspace_admin()
|
||||
-- =============================================================================
|
||||
-- The is_workspace_admin() function previously accepted UUID parameters without
|
||||
-- explicit type casting/validation. Although PostgreSQL's parameter binding provides
|
||||
-- some protection, explicit UUID type validation is a security best practice.
|
||||
--
|
||||
-- This fix adds explicit UUID validation using PostgreSQL's uuid type checking
|
||||
-- to ensure that non-UUID values cannot bypass the function's intent.
|
||||
|
||||
CREATE OR REPLACE FUNCTION is_workspace_admin(workspace_uuid UUID, user_uuid UUID)
|
||||
RETURNS BOOLEAN AS $$
|
||||
DECLARE
|
||||
-- Validate input parameters are valid UUIDs
|
||||
v_workspace_id UUID;
|
||||
v_user_id UUID;
|
||||
BEGIN
|
||||
-- Explicitly validate workspace_uuid parameter
|
||||
IF workspace_uuid IS NULL THEN
|
||||
RETURN FALSE;
|
||||
END IF;
|
||||
v_workspace_id := workspace_uuid::UUID;
|
||||
|
||||
-- Explicitly validate user_uuid parameter
|
||||
IF user_uuid IS NULL THEN
|
||||
RETURN FALSE;
|
||||
END IF;
|
||||
v_user_id := user_uuid::UUID;
|
||||
|
||||
-- Query with validated parameters
|
||||
RETURN EXISTS (
|
||||
SELECT 1 FROM workspace_members
|
||||
WHERE workspace_id = v_workspace_id
|
||||
AND user_id = v_user_id
|
||||
AND role IN ('OWNER', 'ADMIN')
|
||||
);
|
||||
END;
|
||||
$$ LANGUAGE plpgsql STABLE SECURITY DEFINER;
|
||||
|
||||
-- =============================================================================
|
||||
-- NOTES
|
||||
-- =============================================================================
|
||||
-- This is a hardening fix that adds defense-in-depth to the is_workspace_admin()
|
||||
-- helper function. While PostgreSQL's parameterized queries already provide
|
||||
-- protection against SQL injection, explicit UUID type validation ensures:
|
||||
--
|
||||
-- 1. Parameters are explicitly cast to UUID type
|
||||
-- 2. NULL values are handled defensively
|
||||
-- 3. The function's intent is clear and secure
|
||||
-- 4. Compliance with security best practices
|
||||
--
|
||||
-- This change is backward compatible and does not affect existing functionality.
|
||||
@@ -0,0 +1,91 @@
|
||||
-- Row-Level Security (RLS) for Auth Tables
|
||||
-- This migration adds FORCE ROW LEVEL SECURITY and policies to accounts and sessions tables
|
||||
-- to ensure users can only access their own authentication data.
|
||||
--
|
||||
-- Related: #350 - Add RLS policies to auth tables with FORCE enforcement
|
||||
-- Design: docs/design/credential-security.md (Phase 1a)
|
||||
|
||||
-- =============================================================================
|
||||
-- ENABLE FORCE RLS ON AUTH TABLES
|
||||
-- =============================================================================
|
||||
-- FORCE means the table owner (mosaic) is also subject to RLS policies.
|
||||
-- This prevents Prisma (connecting as owner) from bypassing policies.
|
||||
|
||||
ALTER TABLE accounts ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE accounts FORCE ROW LEVEL SECURITY;
|
||||
|
||||
ALTER TABLE sessions ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE sessions FORCE ROW LEVEL SECURITY;
|
||||
|
||||
-- =============================================================================
|
||||
-- ACCOUNTS TABLE POLICIES
|
||||
-- =============================================================================
|
||||
|
||||
-- Owner bypass policy: Allow access to all rows ONLY when no RLS context is set
|
||||
-- This is required for:
|
||||
-- 1. Prisma migrations that run without RLS context
|
||||
-- 2. BetterAuth internal operations during authentication flow (when no user context)
|
||||
-- 3. Database maintenance operations
|
||||
-- When RLS context IS set (current_user_id() returns non-NULL), this policy does not apply
|
||||
--
|
||||
-- NOTE: If connecting as a PostgreSQL superuser (like the default 'mosaic' role),
|
||||
-- RLS policies are bypassed entirely. For full RLS enforcement, the application
|
||||
-- should connect as a non-superuser role. See docs/design/credential-security.md
|
||||
CREATE POLICY accounts_owner_bypass ON accounts
|
||||
FOR ALL
|
||||
USING (current_user_id() IS NULL);
|
||||
|
||||
-- User access policy: Users can only access their own accounts
|
||||
-- Uses current_user_id() helper from migration 20260129221004_add_rls_policies
|
||||
-- This policy applies to all operations: SELECT, INSERT, UPDATE, DELETE
|
||||
CREATE POLICY accounts_user_access ON accounts
|
||||
FOR ALL
|
||||
USING (user_id = current_user_id());
|
||||
|
||||
-- =============================================================================
|
||||
-- SESSIONS TABLE POLICIES
|
||||
-- =============================================================================
|
||||
|
||||
-- Owner bypass policy: Allow access to all rows ONLY when no RLS context is set
|
||||
-- See note on accounts_owner_bypass policy about superuser limitations
|
||||
CREATE POLICY sessions_owner_bypass ON sessions
|
||||
FOR ALL
|
||||
USING (current_user_id() IS NULL);
|
||||
|
||||
-- User access policy: Users can only access their own sessions
|
||||
CREATE POLICY sessions_user_access ON sessions
|
||||
FOR ALL
|
||||
USING (user_id = current_user_id());
|
||||
|
||||
-- =============================================================================
|
||||
-- VERIFICATION TABLE ANALYSIS
|
||||
-- =============================================================================
|
||||
-- The verifications table does NOT need RLS policies because:
|
||||
-- 1. It stores ephemeral verification tokens (email verification, password reset)
|
||||
-- 2. It has no user_id column - only identifier (email) and value (token)
|
||||
-- 3. Tokens are short-lived and accessed by token value, not user context
|
||||
-- 4. BetterAuth manages access control through token validation, not RLS
|
||||
-- 5. No cross-user data leakage risk since tokens are random and expire
|
||||
--
|
||||
-- Therefore, we intentionally do NOT add RLS to verifications table.
|
||||
|
||||
-- =============================================================================
|
||||
-- IMPORTANT: SUPERUSER LIMITATION
|
||||
-- =============================================================================
|
||||
-- PostgreSQL superusers (including the default 'mosaic' role) ALWAYS bypass
|
||||
-- Row-Level Security policies, even with FORCE ROW LEVEL SECURITY enabled.
|
||||
-- This is a fundamental PostgreSQL security design.
|
||||
--
|
||||
-- For production deployments with full RLS enforcement, create a dedicated
|
||||
-- non-superuser application role:
|
||||
--
|
||||
-- CREATE ROLE mosaic_app WITH LOGIN PASSWORD 'secure-password';
|
||||
-- GRANT CONNECT ON DATABASE mosaic TO mosaic_app;
|
||||
-- GRANT USAGE ON SCHEMA public TO mosaic_app;
|
||||
-- GRANT SELECT, INSERT, UPDATE, DELETE ON ALL TABLES IN SCHEMA public TO mosaic_app;
|
||||
-- GRANT USAGE, SELECT ON ALL SEQUENCES IN SCHEMA public TO mosaic_app;
|
||||
--
|
||||
-- Then update DATABASE_URL to connect as mosaic_app instead of mosaic.
|
||||
-- The RLS policies will then be properly enforced for application queries.
|
||||
--
|
||||
-- See: https://www.postgresql.org/docs/current/ddl-rowsecurity.html
|
||||
@@ -0,0 +1,76 @@
|
||||
-- Rollback: User Credentials Storage with RLS Policies
|
||||
-- This migration reverses all changes from migration.sql
|
||||
--
|
||||
-- Related: #355 - Create UserCredential Prisma model with RLS policies
|
||||
|
||||
-- =============================================================================
|
||||
-- DROP TRIGGERS AND FUNCTIONS
|
||||
-- =============================================================================
|
||||
|
||||
DROP TRIGGER IF EXISTS user_credentials_updated_at ON user_credentials;
|
||||
DROP FUNCTION IF EXISTS update_user_credentials_updated_at();
|
||||
|
||||
-- =============================================================================
|
||||
-- DISABLE RLS
|
||||
-- =============================================================================
|
||||
|
||||
ALTER TABLE user_credentials DISABLE ROW LEVEL SECURITY;
|
||||
|
||||
-- =============================================================================
|
||||
-- DROP RLS POLICIES
|
||||
-- =============================================================================
|
||||
|
||||
DROP POLICY IF EXISTS user_credentials_owner_bypass ON user_credentials;
|
||||
DROP POLICY IF EXISTS user_credentials_user_access ON user_credentials;
|
||||
DROP POLICY IF EXISTS user_credentials_workspace_access ON user_credentials;
|
||||
|
||||
-- =============================================================================
|
||||
-- DROP INDEXES
|
||||
-- =============================================================================
|
||||
|
||||
DROP INDEX IF EXISTS "user_credentials_user_id_workspace_id_provider_name_key";
|
||||
DROP INDEX IF EXISTS "user_credentials_scope_is_active_idx";
|
||||
DROP INDEX IF EXISTS "user_credentials_workspace_id_scope_idx";
|
||||
DROP INDEX IF EXISTS "user_credentials_user_id_scope_idx";
|
||||
DROP INDEX IF EXISTS "user_credentials_workspace_id_idx";
|
||||
DROP INDEX IF EXISTS "user_credentials_user_id_idx";
|
||||
|
||||
-- =============================================================================
|
||||
-- DROP FOREIGN KEY CONSTRAINTS
|
||||
-- =============================================================================
|
||||
|
||||
ALTER TABLE "user_credentials" DROP CONSTRAINT IF EXISTS "user_credentials_workspace_id_fkey";
|
||||
ALTER TABLE "user_credentials" DROP CONSTRAINT IF EXISTS "user_credentials_user_id_fkey";
|
||||
|
||||
-- =============================================================================
|
||||
-- DROP TABLE
|
||||
-- =============================================================================
|
||||
|
||||
DROP TABLE IF EXISTS "user_credentials";
|
||||
|
||||
-- =============================================================================
|
||||
-- DROP ENUMS
|
||||
-- =============================================================================
|
||||
-- NOTE: ENUM values cannot be easily removed from an existing enum type in PostgreSQL.
|
||||
-- To fully reverse this migration, you would need to:
|
||||
--
|
||||
-- 1. Remove the 'CREDENTIAL' value from EntityType enum (if not used elsewhere):
|
||||
-- ALTER TYPE "EntityType" RENAME TO "EntityType_old";
|
||||
-- CREATE TYPE "EntityType" AS ENUM (...all values except CREDENTIAL...);
|
||||
-- -- Then rebuild all dependent objects
|
||||
--
|
||||
-- 2. Remove credential-related actions from ActivityAction enum (if not used elsewhere):
|
||||
-- ALTER TYPE "ActivityAction" RENAME TO "ActivityAction_old";
|
||||
-- CREATE TYPE "ActivityAction" AS ENUM (...all values except CREDENTIAL_*...);
|
||||
-- -- Then rebuild all dependent objects
|
||||
--
|
||||
-- 3. Drop the CredentialType and CredentialScope enums:
|
||||
-- DROP TYPE IF EXISTS "CredentialType";
|
||||
-- DROP TYPE IF EXISTS "CredentialScope";
|
||||
--
|
||||
-- Due to the complexity and risk of breaking existing data/code that references
|
||||
-- these enum values, this migration does NOT automatically remove them.
|
||||
-- If you need to clean up the enums, manually execute the steps above.
|
||||
--
|
||||
-- For development environments, you can safely drop and recreate the enums manually
|
||||
-- using the SQL statements above.
|
||||
@@ -0,0 +1,184 @@
|
||||
-- User Credentials Storage with RLS Policies
|
||||
-- This migration adds the user_credentials table for secure storage of user API keys,
|
||||
-- OAuth tokens, and other credentials with encryption and RLS enforcement.
|
||||
--
|
||||
-- Related: #355 - Create UserCredential Prisma model with RLS policies
|
||||
-- Design: docs/design/credential-security.md (Phase 3a)
|
||||
|
||||
-- =============================================================================
|
||||
-- CREATE ENUMS
|
||||
-- =============================================================================
|
||||
|
||||
-- CredentialType enum: Types of credentials that can be stored
|
||||
CREATE TYPE "CredentialType" AS ENUM ('API_KEY', 'OAUTH_TOKEN', 'ACCESS_TOKEN', 'SECRET', 'PASSWORD', 'CUSTOM');
|
||||
|
||||
-- CredentialScope enum: Access scope for credentials
|
||||
CREATE TYPE "CredentialScope" AS ENUM ('USER', 'WORKSPACE', 'SYSTEM');
|
||||
|
||||
-- =============================================================================
|
||||
-- EXTEND EXISTING ENUMS
|
||||
-- =============================================================================
|
||||
|
||||
-- Add CREDENTIAL to EntityType for activity logging
|
||||
ALTER TYPE "EntityType" ADD VALUE 'CREDENTIAL';
|
||||
|
||||
-- Add credential-related actions to ActivityAction
|
||||
ALTER TYPE "ActivityAction" ADD VALUE 'CREDENTIAL_CREATED';
|
||||
ALTER TYPE "ActivityAction" ADD VALUE 'CREDENTIAL_ACCESSED';
|
||||
ALTER TYPE "ActivityAction" ADD VALUE 'CREDENTIAL_ROTATED';
|
||||
ALTER TYPE "ActivityAction" ADD VALUE 'CREDENTIAL_REVOKED';
|
||||
|
||||
-- =============================================================================
|
||||
-- CREATE USER_CREDENTIALS TABLE
|
||||
-- =============================================================================
|
||||
|
||||
CREATE TABLE "user_credentials" (
|
||||
"id" UUID NOT NULL DEFAULT uuid_generate_v4(),
|
||||
"user_id" UUID NOT NULL,
|
||||
"workspace_id" UUID,
|
||||
|
||||
-- Identity
|
||||
"name" VARCHAR(255) NOT NULL,
|
||||
"provider" VARCHAR(100) NOT NULL,
|
||||
"type" "CredentialType" NOT NULL,
|
||||
"scope" "CredentialScope" NOT NULL DEFAULT 'USER',
|
||||
|
||||
-- Encrypted storage
|
||||
"encrypted_value" TEXT NOT NULL,
|
||||
"masked_value" VARCHAR(20),
|
||||
|
||||
-- Metadata
|
||||
"description" TEXT,
|
||||
"expires_at" TIMESTAMPTZ,
|
||||
"last_used_at" TIMESTAMPTZ,
|
||||
"metadata" JSONB NOT NULL DEFAULT '{}',
|
||||
|
||||
-- Status
|
||||
"is_active" BOOLEAN NOT NULL DEFAULT true,
|
||||
"rotated_at" TIMESTAMPTZ,
|
||||
|
||||
-- Audit
|
||||
"created_at" TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
"updated_at" TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
|
||||
CONSTRAINT "user_credentials_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- =============================================================================
|
||||
-- CREATE FOREIGN KEY CONSTRAINTS
|
||||
-- =============================================================================
|
||||
|
||||
ALTER TABLE "user_credentials" ADD CONSTRAINT "user_credentials_user_id_fkey"
|
||||
FOREIGN KEY ("user_id") REFERENCES "users"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
ALTER TABLE "user_credentials" ADD CONSTRAINT "user_credentials_workspace_id_fkey"
|
||||
FOREIGN KEY ("workspace_id") REFERENCES "workspaces"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- =============================================================================
|
||||
-- CREATE INDEXES
|
||||
-- =============================================================================
|
||||
|
||||
-- Index for user lookups
|
||||
CREATE INDEX "user_credentials_user_id_idx" ON "user_credentials"("user_id");
|
||||
|
||||
-- Index for workspace lookups
|
||||
CREATE INDEX "user_credentials_workspace_id_idx" ON "user_credentials"("workspace_id");
|
||||
|
||||
-- Index for user + scope queries
|
||||
CREATE INDEX "user_credentials_user_id_scope_idx" ON "user_credentials"("user_id", "scope");
|
||||
|
||||
-- Index for workspace + scope queries
|
||||
CREATE INDEX "user_credentials_workspace_id_scope_idx" ON "user_credentials"("workspace_id", "scope");
|
||||
|
||||
-- Index for scope + active status queries
|
||||
CREATE INDEX "user_credentials_scope_is_active_idx" ON "user_credentials"("scope", "is_active");
|
||||
|
||||
-- =============================================================================
|
||||
-- CREATE UNIQUE CONSTRAINT
|
||||
-- =============================================================================
|
||||
|
||||
-- Prevent duplicate credentials per user/workspace/provider/name
|
||||
CREATE UNIQUE INDEX "user_credentials_user_id_workspace_id_provider_name_key"
|
||||
ON "user_credentials"("user_id", "workspace_id", "provider", "name");
|
||||
|
||||
-- =============================================================================
|
||||
-- ENABLE FORCE ROW LEVEL SECURITY
|
||||
-- =============================================================================
|
||||
-- FORCE means the table owner (mosaic) is also subject to RLS policies.
|
||||
-- This prevents Prisma (connecting as owner) from bypassing policies.
|
||||
|
||||
ALTER TABLE user_credentials ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE user_credentials FORCE ROW LEVEL SECURITY;
|
||||
|
||||
-- =============================================================================
|
||||
-- RLS POLICIES
|
||||
-- =============================================================================
|
||||
|
||||
-- Owner bypass policy: Allow access to all rows ONLY when no RLS context is set
|
||||
-- This is required for:
|
||||
-- 1. Prisma migrations that run without RLS context
|
||||
-- 2. Database maintenance operations
|
||||
-- When RLS context IS set (current_user_id() returns non-NULL), this policy does not apply
|
||||
--
|
||||
-- NOTE: If connecting as a PostgreSQL superuser (like the default 'mosaic' role),
|
||||
-- RLS policies are bypassed entirely. For full RLS enforcement, the application
|
||||
-- should connect as a non-superuser role. See docs/design/credential-security.md
|
||||
CREATE POLICY user_credentials_owner_bypass ON user_credentials
|
||||
FOR ALL
|
||||
USING (current_user_id() IS NULL);
|
||||
|
||||
-- User access policy: USER-scoped credentials visible only to owner
|
||||
-- Uses current_user_id() helper from migration 20260129221004_add_rls_policies
|
||||
CREATE POLICY user_credentials_user_access ON user_credentials
|
||||
FOR ALL
|
||||
USING (
|
||||
scope = 'USER' AND user_id = current_user_id()
|
||||
);
|
||||
|
||||
-- Workspace admin access policy: WORKSPACE-scoped credentials visible to workspace admins
|
||||
-- Uses is_workspace_admin() helper from migration 20260129221004_add_rls_policies
|
||||
CREATE POLICY user_credentials_workspace_access ON user_credentials
|
||||
FOR ALL
|
||||
USING (
|
||||
scope = 'WORKSPACE'
|
||||
AND workspace_id IS NOT NULL
|
||||
AND is_workspace_admin(workspace_id, current_user_id())
|
||||
);
|
||||
|
||||
-- SYSTEM-scoped credentials are only accessible via owner bypass policy
|
||||
-- (when current_user_id() IS NULL, which happens for admin operations)
|
||||
|
||||
-- =============================================================================
|
||||
-- AUDIT TRIGGER
|
||||
-- =============================================================================
|
||||
|
||||
-- Update updated_at timestamp on row changes
|
||||
CREATE OR REPLACE FUNCTION update_user_credentials_updated_at()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
NEW.updated_at = NOW();
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE TRIGGER user_credentials_updated_at
|
||||
BEFORE UPDATE ON user_credentials
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION update_user_credentials_updated_at();
|
||||
|
||||
-- =============================================================================
|
||||
-- NOTES
|
||||
-- =============================================================================
|
||||
-- This migration creates the foundation for secure credential storage.
|
||||
-- The encrypted_value column stores ciphertext in one of two formats:
|
||||
--
|
||||
-- 1. OpenBao Transit format (preferred): vault:v1:base64data
|
||||
-- 2. AES-256-GCM fallback format: iv:authTag:encrypted
|
||||
--
|
||||
-- The VaultService (issue #353) handles encryption/decryption with automatic
|
||||
-- fallback to CryptoService when OpenBao is unavailable.
|
||||
--
|
||||
-- RLS enforcement ensures:
|
||||
-- - USER scope: Only the credential owner can access
|
||||
-- - WORKSPACE scope: Only workspace admins can access
|
||||
-- - SYSTEM scope: Only accessible via admin/migration bypass
|
||||
@@ -0,0 +1,37 @@
|
||||
-- Encrypt existing plaintext Account tokens
|
||||
-- This migration adds an encryption_version column and marks existing records for encryption
|
||||
-- The actual encryption happens via Prisma middleware on first read/write
|
||||
|
||||
-- Add encryption_version column to track encryption state
|
||||
-- NULL = not encrypted (legacy plaintext)
|
||||
-- 'aes' = AES-256-GCM encrypted
|
||||
-- 'vault' = OpenBao Transit encrypted (Phase 2)
|
||||
ALTER TABLE accounts ADD COLUMN IF NOT EXISTS encryption_version VARCHAR(20);
|
||||
|
||||
-- Create index for efficient queries filtering by encryption status
|
||||
-- This index is also declared in Prisma schema (@@index([encryptionVersion]))
|
||||
-- Using CREATE INDEX IF NOT EXISTS for idempotency
|
||||
CREATE INDEX IF NOT EXISTS "accounts_encryption_version_idx" ON accounts(encryption_version);
|
||||
|
||||
-- Verify index was created successfully by running:
|
||||
-- SELECT indexname, indexdef FROM pg_indexes WHERE tablename = 'accounts' AND indexname = 'accounts_encryption_version_idx';
|
||||
|
||||
-- Update statistics for query planner
|
||||
ANALYZE accounts;
|
||||
|
||||
-- Migration Note:
|
||||
-- This migration does NOT encrypt data in-place to avoid downtime and data corruption risks.
|
||||
-- Instead, the Prisma middleware (account-encryption.middleware.ts) handles encryption:
|
||||
--
|
||||
-- 1. On READ: Detects format (plaintext vs encrypted) and decrypts if needed
|
||||
-- 2. On WRITE: Encrypts tokens and sets encryption_version = 'aes'
|
||||
-- 3. Backward compatible: Plaintext tokens (encryption_version = NULL) are passed through unchanged
|
||||
--
|
||||
-- To actively encrypt existing tokens, run the companion script:
|
||||
-- node scripts/encrypt-account-tokens.js
|
||||
--
|
||||
-- This approach ensures:
|
||||
-- - Zero downtime migration
|
||||
-- - No risk of corrupting tokens during bulk encryption
|
||||
-- - Progressive encryption as tokens are accessed/refreshed
|
||||
-- - Easy rollback (middleware is idempotent)
|
||||
@@ -0,0 +1,26 @@
|
||||
-- Encrypt LLM Provider API Keys Migration
|
||||
--
|
||||
-- This migration enables transparent encryption/decryption of LLM provider API keys
|
||||
-- stored in the llm_provider_instances.config JSON field.
|
||||
--
|
||||
-- IMPORTANT: This is a data migration with no schema changes.
|
||||
--
|
||||
-- Strategy:
|
||||
-- 1. Prisma middleware (llm-encryption.middleware.ts) handles encryption/decryption
|
||||
-- 2. Middleware auto-detects encryption format:
|
||||
-- - vault:v1:... = OpenBao Transit encrypted
|
||||
-- - Otherwise = Legacy plaintext (backward compatible)
|
||||
-- 3. New API keys are always encrypted on write
|
||||
-- 4. Existing plaintext keys work until re-saved (lazy migration)
|
||||
--
|
||||
-- To actively encrypt all existing API keys NOW:
|
||||
-- pnpm --filter @mosaic/api migrate:encrypt-llm-keys
|
||||
--
|
||||
-- This approach ensures:
|
||||
-- - Zero downtime migration
|
||||
-- - No schema changes required
|
||||
-- - Backward compatible with plaintext keys
|
||||
-- - Progressive encryption as keys are accessed/updated
|
||||
-- - Easy rollback (middleware is idempotent)
|
||||
--
|
||||
-- Note: No SQL changes needed. This file exists for migration tracking only.
|
||||
@@ -0,0 +1,197 @@
|
||||
-- RecreateEnum: FormalityLevel was dropped in 20260129235248_add_link_storage_fields
|
||||
CREATE TYPE "FormalityLevel" AS ENUM ('VERY_CASUAL', 'CASUAL', 'NEUTRAL', 'FORMAL', 'VERY_FORMAL');
|
||||
|
||||
-- RecreateTable: personalities was dropped in 20260129235248_add_link_storage_fields
|
||||
-- Recreated with current schema (display_name, system_prompt, temperature, etc.)
|
||||
CREATE TABLE "personalities" (
|
||||
"id" UUID NOT NULL,
|
||||
"workspace_id" UUID NOT NULL,
|
||||
"name" TEXT NOT NULL,
|
||||
"display_name" TEXT NOT NULL,
|
||||
"description" TEXT,
|
||||
"system_prompt" TEXT NOT NULL,
|
||||
"temperature" DOUBLE PRECISION,
|
||||
"max_tokens" INTEGER,
|
||||
"llm_provider_instance_id" UUID,
|
||||
"is_default" BOOLEAN NOT NULL DEFAULT false,
|
||||
"is_enabled" BOOLEAN NOT NULL DEFAULT true,
|
||||
"created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updated_at" TIMESTAMPTZ NOT NULL,
|
||||
|
||||
CONSTRAINT "personalities_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex: personalities
|
||||
CREATE UNIQUE INDEX "personalities_id_workspace_id_key" ON "personalities"("id", "workspace_id");
|
||||
CREATE UNIQUE INDEX "personalities_workspace_id_name_key" ON "personalities"("workspace_id", "name");
|
||||
CREATE INDEX "personalities_workspace_id_idx" ON "personalities"("workspace_id");
|
||||
CREATE INDEX "personalities_workspace_id_is_default_idx" ON "personalities"("workspace_id", "is_default");
|
||||
CREATE INDEX "personalities_workspace_id_is_enabled_idx" ON "personalities"("workspace_id", "is_enabled");
|
||||
CREATE INDEX "personalities_llm_provider_instance_id_idx" ON "personalities"("llm_provider_instance_id");
|
||||
|
||||
-- AddForeignKey: personalities
|
||||
ALTER TABLE "personalities" ADD CONSTRAINT "personalities_workspace_id_fkey" FOREIGN KEY ("workspace_id") REFERENCES "workspaces"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
ALTER TABLE "personalities" ADD CONSTRAINT "personalities_llm_provider_instance_id_fkey" FOREIGN KEY ("llm_provider_instance_id") REFERENCES "llm_provider_instances"("id") ON DELETE SET NULL ON UPDATE CASCADE;
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "cron_schedules" (
|
||||
"id" UUID NOT NULL,
|
||||
"workspace_id" UUID NOT NULL,
|
||||
"expression" TEXT NOT NULL,
|
||||
"command" TEXT NOT NULL,
|
||||
"enabled" BOOLEAN NOT NULL DEFAULT true,
|
||||
"last_run" TIMESTAMPTZ,
|
||||
"next_run" TIMESTAMPTZ,
|
||||
"created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updated_at" TIMESTAMPTZ NOT NULL,
|
||||
|
||||
CONSTRAINT "cron_schedules_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "workspace_llm_settings" (
|
||||
"id" UUID NOT NULL,
|
||||
"workspace_id" UUID NOT NULL,
|
||||
"default_llm_provider_id" UUID,
|
||||
"default_personality_id" UUID,
|
||||
"settings" JSONB DEFAULT '{}',
|
||||
"created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updated_at" TIMESTAMPTZ NOT NULL,
|
||||
|
||||
CONSTRAINT "workspace_llm_settings_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "quality_gates" (
|
||||
"id" UUID NOT NULL,
|
||||
"workspace_id" UUID NOT NULL,
|
||||
"name" TEXT NOT NULL,
|
||||
"description" TEXT,
|
||||
"type" TEXT NOT NULL,
|
||||
"command" TEXT,
|
||||
"expected_output" TEXT,
|
||||
"is_regex" BOOLEAN NOT NULL DEFAULT false,
|
||||
"required" BOOLEAN NOT NULL DEFAULT true,
|
||||
"order" INTEGER NOT NULL DEFAULT 0,
|
||||
"is_enabled" BOOLEAN NOT NULL DEFAULT true,
|
||||
"created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updated_at" TIMESTAMPTZ NOT NULL,
|
||||
|
||||
CONSTRAINT "quality_gates_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "task_rejections" (
|
||||
"id" UUID NOT NULL,
|
||||
"task_id" TEXT NOT NULL,
|
||||
"workspace_id" TEXT NOT NULL,
|
||||
"agent_id" TEXT NOT NULL,
|
||||
"attempt_count" INTEGER NOT NULL,
|
||||
"failures" JSONB NOT NULL,
|
||||
"original_task" TEXT NOT NULL,
|
||||
"started_at" TIMESTAMPTZ NOT NULL,
|
||||
"rejected_at" TIMESTAMPTZ NOT NULL,
|
||||
"escalated" BOOLEAN NOT NULL DEFAULT false,
|
||||
"manual_review" BOOLEAN NOT NULL DEFAULT false,
|
||||
"resolved_at" TIMESTAMPTZ,
|
||||
"resolution" TEXT,
|
||||
|
||||
CONSTRAINT "task_rejections_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "token_budgets" (
|
||||
"id" UUID NOT NULL,
|
||||
"task_id" UUID NOT NULL,
|
||||
"workspace_id" UUID NOT NULL,
|
||||
"agent_id" TEXT NOT NULL,
|
||||
"allocated_tokens" INTEGER NOT NULL,
|
||||
"estimated_complexity" TEXT NOT NULL,
|
||||
"input_tokens_used" INTEGER NOT NULL DEFAULT 0,
|
||||
"output_tokens_used" INTEGER NOT NULL DEFAULT 0,
|
||||
"total_tokens_used" INTEGER NOT NULL DEFAULT 0,
|
||||
"estimated_cost" DECIMAL(10,6),
|
||||
"started_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"last_updated_at" TIMESTAMPTZ NOT NULL,
|
||||
"completed_at" TIMESTAMPTZ,
|
||||
"budget_utilization" DOUBLE PRECISION,
|
||||
"suspicious_pattern" BOOLEAN NOT NULL DEFAULT false,
|
||||
"suspicious_reason" TEXT,
|
||||
|
||||
CONSTRAINT "token_budgets_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "llm_usage_logs" (
|
||||
"id" UUID NOT NULL,
|
||||
"workspace_id" UUID NOT NULL,
|
||||
"user_id" UUID NOT NULL,
|
||||
"provider" VARCHAR(50) NOT NULL,
|
||||
"model" VARCHAR(100) NOT NULL,
|
||||
"provider_instance_id" UUID,
|
||||
"prompt_tokens" INTEGER NOT NULL DEFAULT 0,
|
||||
"completion_tokens" INTEGER NOT NULL DEFAULT 0,
|
||||
"total_tokens" INTEGER NOT NULL DEFAULT 0,
|
||||
"cost_cents" DOUBLE PRECISION,
|
||||
"task_type" VARCHAR(50),
|
||||
"conversation_id" UUID,
|
||||
"duration_ms" INTEGER,
|
||||
"created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
CONSTRAINT "llm_usage_logs_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex: cron_schedules
|
||||
CREATE INDEX "cron_schedules_workspace_id_idx" ON "cron_schedules"("workspace_id");
|
||||
CREATE INDEX "cron_schedules_workspace_id_enabled_idx" ON "cron_schedules"("workspace_id", "enabled");
|
||||
CREATE INDEX "cron_schedules_next_run_idx" ON "cron_schedules"("next_run");
|
||||
|
||||
-- CreateIndex: workspace_llm_settings
|
||||
CREATE UNIQUE INDEX "workspace_llm_settings_workspace_id_key" ON "workspace_llm_settings"("workspace_id");
|
||||
CREATE INDEX "workspace_llm_settings_workspace_id_idx" ON "workspace_llm_settings"("workspace_id");
|
||||
CREATE INDEX "workspace_llm_settings_default_llm_provider_id_idx" ON "workspace_llm_settings"("default_llm_provider_id");
|
||||
CREATE INDEX "workspace_llm_settings_default_personality_id_idx" ON "workspace_llm_settings"("default_personality_id");
|
||||
|
||||
-- CreateIndex: quality_gates
|
||||
CREATE UNIQUE INDEX "quality_gates_workspace_id_name_key" ON "quality_gates"("workspace_id", "name");
|
||||
CREATE INDEX "quality_gates_workspace_id_idx" ON "quality_gates"("workspace_id");
|
||||
CREATE INDEX "quality_gates_workspace_id_is_enabled_idx" ON "quality_gates"("workspace_id", "is_enabled");
|
||||
|
||||
-- CreateIndex: task_rejections
|
||||
CREATE INDEX "task_rejections_task_id_idx" ON "task_rejections"("task_id");
|
||||
CREATE INDEX "task_rejections_workspace_id_idx" ON "task_rejections"("workspace_id");
|
||||
CREATE INDEX "task_rejections_agent_id_idx" ON "task_rejections"("agent_id");
|
||||
CREATE INDEX "task_rejections_escalated_idx" ON "task_rejections"("escalated");
|
||||
CREATE INDEX "task_rejections_manual_review_idx" ON "task_rejections"("manual_review");
|
||||
|
||||
-- CreateIndex: token_budgets
|
||||
CREATE UNIQUE INDEX "token_budgets_task_id_key" ON "token_budgets"("task_id");
|
||||
CREATE INDEX "token_budgets_task_id_idx" ON "token_budgets"("task_id");
|
||||
CREATE INDEX "token_budgets_workspace_id_idx" ON "token_budgets"("workspace_id");
|
||||
CREATE INDEX "token_budgets_suspicious_pattern_idx" ON "token_budgets"("suspicious_pattern");
|
||||
|
||||
-- CreateIndex: llm_usage_logs
|
||||
CREATE INDEX "llm_usage_logs_workspace_id_idx" ON "llm_usage_logs"("workspace_id");
|
||||
CREATE INDEX "llm_usage_logs_workspace_id_created_at_idx" ON "llm_usage_logs"("workspace_id", "created_at");
|
||||
CREATE INDEX "llm_usage_logs_user_id_idx" ON "llm_usage_logs"("user_id");
|
||||
CREATE INDEX "llm_usage_logs_provider_idx" ON "llm_usage_logs"("provider");
|
||||
CREATE INDEX "llm_usage_logs_model_idx" ON "llm_usage_logs"("model");
|
||||
CREATE INDEX "llm_usage_logs_provider_instance_id_idx" ON "llm_usage_logs"("provider_instance_id");
|
||||
CREATE INDEX "llm_usage_logs_task_type_idx" ON "llm_usage_logs"("task_type");
|
||||
CREATE INDEX "llm_usage_logs_conversation_id_idx" ON "llm_usage_logs"("conversation_id");
|
||||
|
||||
-- AddForeignKey: cron_schedules
|
||||
ALTER TABLE "cron_schedules" ADD CONSTRAINT "cron_schedules_workspace_id_fkey" FOREIGN KEY ("workspace_id") REFERENCES "workspaces"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey: workspace_llm_settings
|
||||
ALTER TABLE "workspace_llm_settings" ADD CONSTRAINT "workspace_llm_settings_workspace_id_fkey" FOREIGN KEY ("workspace_id") REFERENCES "workspaces"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
ALTER TABLE "workspace_llm_settings" ADD CONSTRAINT "workspace_llm_settings_default_llm_provider_id_fkey" FOREIGN KEY ("default_llm_provider_id") REFERENCES "llm_provider_instances"("id") ON DELETE SET NULL ON UPDATE CASCADE;
|
||||
ALTER TABLE "workspace_llm_settings" ADD CONSTRAINT "workspace_llm_settings_default_personality_id_fkey" FOREIGN KEY ("default_personality_id") REFERENCES "personalities"("id") ON DELETE SET NULL ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey: quality_gates
|
||||
ALTER TABLE "quality_gates" ADD CONSTRAINT "quality_gates_workspace_id_fkey" FOREIGN KEY ("workspace_id") REFERENCES "workspaces"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey: llm_usage_logs
|
||||
ALTER TABLE "llm_usage_logs" ADD CONSTRAINT "llm_usage_logs_workspace_id_fkey" FOREIGN KEY ("workspace_id") REFERENCES "workspaces"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
ALTER TABLE "llm_usage_logs" ADD CONSTRAINT "llm_usage_logs_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "users"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
ALTER TABLE "llm_usage_logs" ADD CONSTRAINT "llm_usage_logs_provider_instance_id_fkey" FOREIGN KEY ("provider_instance_id") REFERENCES "llm_provider_instances"("id") ON DELETE SET NULL ON UPDATE CASCADE;
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "workspaces" ADD COLUMN "matrix_room_id" TEXT;
|
||||
@@ -0,0 +1,49 @@
|
||||
-- Fix schema drift: tables, indexes, and constraints defined in schema.prisma
|
||||
-- but never created (or dropped and never recreated) by prior migrations.
|
||||
|
||||
-- ============================================
|
||||
-- CreateTable: instances (Federation module)
|
||||
-- Never created in any prior migration
|
||||
-- ============================================
|
||||
CREATE TABLE "instances" (
|
||||
"id" UUID NOT NULL,
|
||||
"instance_id" TEXT NOT NULL,
|
||||
"name" TEXT NOT NULL,
|
||||
"url" TEXT NOT NULL,
|
||||
"public_key" TEXT NOT NULL,
|
||||
"private_key" TEXT NOT NULL,
|
||||
"capabilities" JSONB NOT NULL DEFAULT '{}',
|
||||
"metadata" JSONB NOT NULL DEFAULT '{}',
|
||||
"created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updated_at" TIMESTAMPTZ NOT NULL,
|
||||
|
||||
CONSTRAINT "instances_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX "instances_instance_id_key" ON "instances"("instance_id");
|
||||
|
||||
-- ============================================
|
||||
-- Recreate dropped unique index on knowledge_links
|
||||
-- Created in 20260129220645_add_knowledge_module, dropped in
|
||||
-- 20260129235248_add_link_storage_fields, never recreated.
|
||||
-- ============================================
|
||||
CREATE UNIQUE INDEX "knowledge_links_source_id_target_id_key" ON "knowledge_links"("source_id", "target_id");
|
||||
|
||||
-- ============================================
|
||||
-- Missing @@unique([id, workspaceId]) composite indexes
|
||||
-- Defined in schema.prisma but never created in migrations.
|
||||
-- (agent_tasks and runner_jobs already have these.)
|
||||
-- ============================================
|
||||
CREATE UNIQUE INDEX "tasks_id_workspace_id_key" ON "tasks"("id", "workspace_id");
|
||||
CREATE UNIQUE INDEX "events_id_workspace_id_key" ON "events"("id", "workspace_id");
|
||||
CREATE UNIQUE INDEX "projects_id_workspace_id_key" ON "projects"("id", "workspace_id");
|
||||
CREATE UNIQUE INDEX "activity_logs_id_workspace_id_key" ON "activity_logs"("id", "workspace_id");
|
||||
CREATE UNIQUE INDEX "domains_id_workspace_id_key" ON "domains"("id", "workspace_id");
|
||||
CREATE UNIQUE INDEX "ideas_id_workspace_id_key" ON "ideas"("id", "workspace_id");
|
||||
CREATE UNIQUE INDEX "user_layouts_id_workspace_id_key" ON "user_layouts"("id", "workspace_id");
|
||||
|
||||
-- ============================================
|
||||
-- Missing index on agent_tasks.agent_type
|
||||
-- Defined as @@index([agentType]) in schema.prisma
|
||||
-- ============================================
|
||||
CREATE INDEX "agent_tasks_agent_type_idx" ON "agent_tasks"("agent_type");
|
||||
@@ -0,0 +1,23 @@
|
||||
-- CreateEnum
|
||||
CREATE TYPE "TerminalSessionStatus" AS ENUM ('ACTIVE', 'CLOSED');
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "terminal_sessions" (
|
||||
"id" UUID NOT NULL,
|
||||
"workspace_id" UUID NOT NULL,
|
||||
"name" TEXT NOT NULL DEFAULT 'Terminal',
|
||||
"status" "TerminalSessionStatus" NOT NULL DEFAULT 'ACTIVE',
|
||||
"created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"closed_at" TIMESTAMPTZ,
|
||||
|
||||
CONSTRAINT "terminal_sessions_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "terminal_sessions_workspace_id_idx" ON "terminal_sessions"("workspace_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "terminal_sessions_workspace_id_status_idx" ON "terminal_sessions"("workspace_id", "status");
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "terminal_sessions" ADD CONSTRAINT "terminal_sessions_workspace_id_fkey" FOREIGN KEY ("workspace_id") REFERENCES "workspaces"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
@@ -0,0 +1,3 @@
|
||||
-- AlterTable: add tone and formality_level columns to personalities
|
||||
ALTER TABLE "personalities" ADD COLUMN "tone" TEXT NOT NULL DEFAULT 'neutral';
|
||||
ALTER TABLE "personalities" ADD COLUMN "formality_level" "FormalityLevel" NOT NULL DEFAULT 'NEUTRAL';
|
||||
@@ -0,0 +1,24 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "agent_memories" (
|
||||
"id" UUID NOT NULL,
|
||||
"workspace_id" UUID NOT NULL,
|
||||
"agent_id" TEXT NOT NULL,
|
||||
"key" TEXT NOT NULL,
|
||||
"value" JSONB NOT NULL,
|
||||
"created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updated_at" TIMESTAMPTZ NOT NULL,
|
||||
|
||||
CONSTRAINT "agent_memories_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "agent_memories_workspace_id_agent_id_key_key" ON "agent_memories"("workspace_id", "agent_id", "key");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "agent_memories_workspace_id_idx" ON "agent_memories"("workspace_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "agent_memories_agent_id_idx" ON "agent_memories"("agent_id");
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "agent_memories" ADD CONSTRAINT "agent_memories_workspace_id_fkey" FOREIGN KEY ("workspace_id") REFERENCES "workspaces"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
@@ -0,0 +1,33 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "conversation_archives" (
|
||||
"id" UUID NOT NULL,
|
||||
"workspace_id" UUID NOT NULL,
|
||||
"session_id" TEXT NOT NULL,
|
||||
"agent_id" TEXT NOT NULL,
|
||||
"messages" JSONB NOT NULL,
|
||||
"message_count" INTEGER NOT NULL,
|
||||
"summary" TEXT NOT NULL,
|
||||
"embedding" vector(1536),
|
||||
"started_at" TIMESTAMPTZ NOT NULL,
|
||||
"ended_at" TIMESTAMPTZ,
|
||||
"metadata" JSONB NOT NULL DEFAULT '{}',
|
||||
"created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updated_at" TIMESTAMPTZ NOT NULL,
|
||||
|
||||
CONSTRAINT "conversation_archives_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "conversation_archives_workspace_id_session_id_key" ON "conversation_archives"("workspace_id", "session_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "conversation_archives_workspace_id_idx" ON "conversation_archives"("workspace_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "conversation_archives_agent_id_idx" ON "conversation_archives"("agent_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "conversation_archives_started_at_idx" ON "conversation_archives"("started_at");
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "conversation_archives" ADD CONSTRAINT "conversation_archives_workspace_id_fkey" FOREIGN KEY ("workspace_id") REFERENCES "workspaces"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
@@ -0,0 +1,37 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "findings" (
|
||||
"id" UUID NOT NULL,
|
||||
"workspace_id" UUID NOT NULL,
|
||||
"task_id" UUID,
|
||||
"agent_id" TEXT NOT NULL,
|
||||
"type" TEXT NOT NULL,
|
||||
"title" TEXT NOT NULL,
|
||||
"data" JSONB NOT NULL,
|
||||
"summary" TEXT NOT NULL,
|
||||
"embedding" vector(1536),
|
||||
"created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updated_at" TIMESTAMPTZ NOT NULL,
|
||||
|
||||
CONSTRAINT "findings_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "findings_id_workspace_id_key" ON "findings"("id", "workspace_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "findings_workspace_id_idx" ON "findings"("workspace_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "findings_agent_id_idx" ON "findings"("agent_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "findings_type_idx" ON "findings"("type");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "findings_task_id_idx" ON "findings"("task_id");
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "findings" ADD CONSTRAINT "findings_workspace_id_fkey" FOREIGN KEY ("workspace_id") REFERENCES "workspaces"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "findings" ADD CONSTRAINT "findings_task_id_fkey" FOREIGN KEY ("task_id") REFERENCES "agent_tasks"("id") ON DELETE SET NULL ON UPDATE CASCADE;
|
||||
@@ -3,6 +3,7 @@
|
||||
|
||||
generator client {
|
||||
provider = "prisma-client-js"
|
||||
binaryTargets = ["native", "debian-openssl-3.0.x"]
|
||||
previewFeatures = ["postgresqlExtensions"]
|
||||
}
|
||||
|
||||
@@ -62,6 +63,10 @@ enum ActivityAction {
|
||||
LOGOUT
|
||||
PASSWORD_RESET
|
||||
EMAIL_VERIFIED
|
||||
CREDENTIAL_CREATED
|
||||
CREDENTIAL_ACCESSED
|
||||
CREDENTIAL_ROTATED
|
||||
CREDENTIAL_REVOKED
|
||||
}
|
||||
|
||||
enum EntityType {
|
||||
@@ -72,6 +77,7 @@ enum EntityType {
|
||||
USER
|
||||
IDEA
|
||||
DOMAIN
|
||||
CREDENTIAL
|
||||
}
|
||||
|
||||
enum IdeaStatus {
|
||||
@@ -186,6 +192,26 @@ enum FederationMessageStatus {
|
||||
TIMEOUT
|
||||
}
|
||||
|
||||
enum CredentialType {
|
||||
API_KEY
|
||||
OAUTH_TOKEN
|
||||
ACCESS_TOKEN
|
||||
SECRET
|
||||
PASSWORD
|
||||
CUSTOM
|
||||
}
|
||||
|
||||
enum CredentialScope {
|
||||
USER
|
||||
WORKSPACE
|
||||
SYSTEM
|
||||
}
|
||||
|
||||
enum TerminalSessionStatus {
|
||||
ACTIVE
|
||||
CLOSED
|
||||
}
|
||||
|
||||
// ============================================
|
||||
// MODELS
|
||||
// ============================================
|
||||
@@ -201,6 +227,14 @@ model User {
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz
|
||||
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz
|
||||
|
||||
// MS21: Admin, local auth, and invitation fields
|
||||
deactivatedAt DateTime? @map("deactivated_at") @db.Timestamptz
|
||||
isLocalAuth Boolean @default(false) @map("is_local_auth")
|
||||
passwordHash String? @map("password_hash")
|
||||
invitedBy String? @map("invited_by") @db.Uuid
|
||||
invitationToken String? @unique @map("invitation_token")
|
||||
invitedAt DateTime? @map("invited_at") @db.Timestamptz
|
||||
|
||||
// Relations
|
||||
ownedWorkspaces Workspace[] @relation("WorkspaceOwner")
|
||||
workspaceMemberships WorkspaceMember[]
|
||||
@@ -221,6 +255,8 @@ model User {
|
||||
knowledgeEntryVersions KnowledgeEntryVersion[] @relation("EntryVersionAuthor")
|
||||
llmProviders LlmProviderInstance[] @relation("UserLlmProviders")
|
||||
federatedIdentities FederatedIdentity[]
|
||||
llmUsageLogs LlmUsageLog[] @relation("UserLlmUsageLogs")
|
||||
userCredentials UserCredential[] @relation("UserCredentials")
|
||||
|
||||
@@map("users")
|
||||
}
|
||||
@@ -239,39 +275,46 @@ model UserPreference {
|
||||
}
|
||||
|
||||
model Workspace {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
name String
|
||||
ownerId String @map("owner_id") @db.Uuid
|
||||
settings Json @default("{}")
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz
|
||||
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
name String
|
||||
ownerId String @map("owner_id") @db.Uuid
|
||||
settings Json @default("{}")
|
||||
matrixRoomId String? @map("matrix_room_id")
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz
|
||||
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz
|
||||
|
||||
// Relations
|
||||
owner User @relation("WorkspaceOwner", fields: [ownerId], references: [id], onDelete: Cascade)
|
||||
members WorkspaceMember[]
|
||||
teams Team[]
|
||||
tasks Task[]
|
||||
events Event[]
|
||||
projects Project[]
|
||||
activityLogs ActivityLog[]
|
||||
memoryEmbeddings MemoryEmbedding[]
|
||||
domains Domain[]
|
||||
ideas Idea[]
|
||||
relationships Relationship[]
|
||||
agents Agent[]
|
||||
agentSessions AgentSession[]
|
||||
agentTasks AgentTask[]
|
||||
userLayouts UserLayout[]
|
||||
knowledgeEntries KnowledgeEntry[]
|
||||
knowledgeTags KnowledgeTag[]
|
||||
cronSchedules CronSchedule[]
|
||||
personalities Personality[]
|
||||
llmSettings WorkspaceLlmSettings?
|
||||
qualityGates QualityGate[]
|
||||
runnerJobs RunnerJob[]
|
||||
federationConnections FederationConnection[]
|
||||
federationMessages FederationMessage[]
|
||||
federationEventSubscriptions FederationEventSubscription[]
|
||||
owner User @relation("WorkspaceOwner", fields: [ownerId], references: [id], onDelete: Cascade)
|
||||
members WorkspaceMember[]
|
||||
teams Team[]
|
||||
tasks Task[]
|
||||
events Event[]
|
||||
projects Project[]
|
||||
activityLogs ActivityLog[]
|
||||
memoryEmbeddings MemoryEmbedding[]
|
||||
domains Domain[]
|
||||
ideas Idea[]
|
||||
relationships Relationship[]
|
||||
agents Agent[]
|
||||
agentSessions AgentSession[]
|
||||
agentTasks AgentTask[]
|
||||
findings Finding[]
|
||||
agentMemories AgentMemory[]
|
||||
userLayouts UserLayout[]
|
||||
knowledgeEntries KnowledgeEntry[]
|
||||
knowledgeTags KnowledgeTag[]
|
||||
cronSchedules CronSchedule[]
|
||||
personalities Personality[]
|
||||
llmSettings WorkspaceLlmSettings?
|
||||
qualityGates QualityGate[]
|
||||
runnerJobs RunnerJob[]
|
||||
federationConnections FederationConnection[]
|
||||
federationMessages FederationMessage[]
|
||||
federationEventSubscriptions FederationEventSubscription[]
|
||||
llmUsageLogs LlmUsageLog[]
|
||||
userCredentials UserCredential[]
|
||||
terminalSessions TerminalSession[]
|
||||
conversationArchives ConversationArchive[]
|
||||
|
||||
@@index([ownerId])
|
||||
@@map("workspaces")
|
||||
@@ -649,6 +692,7 @@ model AgentTask {
|
||||
createdBy User @relation("AgentTaskCreator", fields: [createdById], references: [id], onDelete: Cascade)
|
||||
createdById String @map("created_by_id") @db.Uuid
|
||||
runnerJobs RunnerJob[]
|
||||
findings Finding[]
|
||||
|
||||
@@unique([id, workspaceId])
|
||||
@@index([workspaceId])
|
||||
@@ -658,6 +702,33 @@ model AgentTask {
|
||||
@@map("agent_tasks")
|
||||
}
|
||||
|
||||
model Finding {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
workspaceId String @map("workspace_id") @db.Uuid
|
||||
taskId String? @map("task_id") @db.Uuid
|
||||
|
||||
agentId String @map("agent_id")
|
||||
type String
|
||||
title String
|
||||
data Json
|
||||
summary String @db.Text
|
||||
// Note: vector dimension (1536) must match EMBEDDING_DIMENSION constant in @mosaic/shared
|
||||
embedding Unsupported("vector(1536)")?
|
||||
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz
|
||||
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz
|
||||
|
||||
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
|
||||
task AgentTask? @relation(fields: [taskId], references: [id], onDelete: SetNull)
|
||||
|
||||
@@unique([id, workspaceId])
|
||||
@@index([workspaceId])
|
||||
@@index([agentId])
|
||||
@@index([type])
|
||||
@@index([taskId])
|
||||
@@map("findings")
|
||||
}
|
||||
|
||||
model AgentSession {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
workspaceId String @map("workspace_id") @db.Uuid
|
||||
@@ -695,6 +766,23 @@ model AgentSession {
|
||||
@@map("agent_sessions")
|
||||
}
|
||||
|
||||
model AgentMemory {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
workspaceId String @map("workspace_id") @db.Uuid
|
||||
agentId String @map("agent_id")
|
||||
key String
|
||||
value Json
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz
|
||||
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz
|
||||
|
||||
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@unique([workspaceId, agentId, key])
|
||||
@@index([workspaceId])
|
||||
@@index([agentId])
|
||||
@@map("agent_memories")
|
||||
}
|
||||
|
||||
model WidgetDefinition {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
|
||||
@@ -781,6 +869,7 @@ model Account {
|
||||
refreshTokenExpiresAt DateTime? @map("refresh_token_expires_at") @db.Timestamptz
|
||||
scope String?
|
||||
password String?
|
||||
encryptionVersion String? @map("encryption_version") @db.VarChar(20)
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz
|
||||
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz
|
||||
|
||||
@@ -789,6 +878,7 @@ model Account {
|
||||
|
||||
@@unique([providerId, accountId])
|
||||
@@index([userId])
|
||||
@@index([encryptionVersion])
|
||||
@@map("accounts")
|
||||
}
|
||||
|
||||
@@ -804,6 +894,52 @@ model Verification {
|
||||
@@map("verifications")
|
||||
}
|
||||
|
||||
// ============================================
|
||||
// USER CREDENTIALS MODULE
|
||||
// ============================================
|
||||
|
||||
model UserCredential {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
userId String @map("user_id") @db.Uuid
|
||||
workspaceId String? @map("workspace_id") @db.Uuid
|
||||
|
||||
// Identity
|
||||
name String
|
||||
provider String // "github", "openai", "custom"
|
||||
type CredentialType
|
||||
scope CredentialScope @default(USER)
|
||||
|
||||
// Encrypted storage
|
||||
encryptedValue String @map("encrypted_value") @db.Text
|
||||
maskedValue String? @map("masked_value") @db.VarChar(20)
|
||||
|
||||
// Metadata
|
||||
description String? @db.Text
|
||||
expiresAt DateTime? @map("expires_at") @db.Timestamptz
|
||||
lastUsedAt DateTime? @map("last_used_at") @db.Timestamptz
|
||||
metadata Json @default("{}")
|
||||
|
||||
// Status
|
||||
isActive Boolean @default(true) @map("is_active")
|
||||
rotatedAt DateTime? @map("rotated_at") @db.Timestamptz
|
||||
|
||||
// Audit
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz
|
||||
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz
|
||||
|
||||
// Relations
|
||||
user User @relation("UserCredentials", fields: [userId], references: [id], onDelete: Cascade)
|
||||
workspace Workspace? @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@unique([userId, workspaceId, provider, name])
|
||||
@@index([userId])
|
||||
@@index([workspaceId])
|
||||
@@index([userId, scope])
|
||||
@@index([workspaceId, scope])
|
||||
@@index([scope, isActive])
|
||||
@@map("user_credentials")
|
||||
}
|
||||
|
||||
// ============================================
|
||||
// KNOWLEDGE MODULE
|
||||
// ============================================
|
||||
@@ -988,6 +1124,10 @@ model Personality {
|
||||
displayName String @map("display_name")
|
||||
description String? @db.Text
|
||||
|
||||
// Tone and formality
|
||||
tone String @default("neutral")
|
||||
formalityLevel FormalityLevel @default(NEUTRAL) @map("formality_level")
|
||||
|
||||
// System prompt
|
||||
systemPrompt String @map("system_prompt") @db.Text
|
||||
|
||||
@@ -1036,6 +1176,7 @@ model LlmProviderInstance {
|
||||
user User? @relation("UserLlmProviders", fields: [userId], references: [id], onDelete: Cascade)
|
||||
personalities Personality[] @relation("PersonalityLlmProvider")
|
||||
workspaceLlmSettings WorkspaceLlmSettings[] @relation("WorkspaceLlmProvider")
|
||||
llmUsageLogs LlmUsageLog[] @relation("LlmUsageLogs")
|
||||
|
||||
@@index([userId])
|
||||
@@index([providerType])
|
||||
@@ -1288,8 +1429,8 @@ model FederationConnection {
|
||||
disconnectedAt DateTime? @map("disconnected_at") @db.Timestamptz
|
||||
|
||||
// Relations
|
||||
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
|
||||
messages FederationMessage[]
|
||||
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
|
||||
messages FederationMessage[]
|
||||
eventSubscriptions FederationEventSubscription[]
|
||||
|
||||
@@unique([workspaceId, remoteInstanceId])
|
||||
@@ -1383,3 +1524,103 @@ model FederationEventSubscription {
|
||||
@@index([workspaceId, isActive])
|
||||
@@map("federation_event_subscriptions")
|
||||
}
|
||||
|
||||
// ============================================
|
||||
// LLM USAGE TRACKING MODULE
|
||||
// ============================================
|
||||
|
||||
model LlmUsageLog {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
workspaceId String @map("workspace_id") @db.Uuid
|
||||
userId String @map("user_id") @db.Uuid
|
||||
|
||||
// LLM provider and model info
|
||||
provider String @db.VarChar(50)
|
||||
model String @db.VarChar(100)
|
||||
providerInstanceId String? @map("provider_instance_id") @db.Uuid
|
||||
|
||||
// Token usage
|
||||
promptTokens Int @default(0) @map("prompt_tokens")
|
||||
completionTokens Int @default(0) @map("completion_tokens")
|
||||
totalTokens Int @default(0) @map("total_tokens")
|
||||
|
||||
// Optional cost (in cents for precision)
|
||||
costCents Float? @map("cost_cents")
|
||||
|
||||
// Task type for routing analytics
|
||||
taskType String? @map("task_type") @db.VarChar(50)
|
||||
|
||||
// Optional reference to conversation/session
|
||||
conversationId String? @map("conversation_id") @db.Uuid
|
||||
|
||||
// Duration in milliseconds
|
||||
durationMs Int? @map("duration_ms")
|
||||
|
||||
// Timestamp
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz
|
||||
|
||||
// Relations
|
||||
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
|
||||
user User @relation("UserLlmUsageLogs", fields: [userId], references: [id], onDelete: Cascade)
|
||||
llmProviderInstance LlmProviderInstance? @relation("LlmUsageLogs", fields: [providerInstanceId], references: [id], onDelete: SetNull)
|
||||
|
||||
@@index([workspaceId])
|
||||
@@index([workspaceId, createdAt])
|
||||
@@index([userId])
|
||||
@@index([provider])
|
||||
@@index([model])
|
||||
@@index([providerInstanceId])
|
||||
@@index([taskType])
|
||||
@@index([conversationId])
|
||||
@@map("llm_usage_logs")
|
||||
}
|
||||
|
||||
// ============================================
|
||||
// TERMINAL MODULE
|
||||
// ============================================
|
||||
|
||||
model TerminalSession {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
workspaceId String @map("workspace_id") @db.Uuid
|
||||
name String @default("Terminal")
|
||||
status TerminalSessionStatus @default(ACTIVE)
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz
|
||||
closedAt DateTime? @map("closed_at") @db.Timestamptz
|
||||
|
||||
// Relations
|
||||
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@index([workspaceId])
|
||||
@@index([workspaceId, status])
|
||||
@@map("terminal_sessions")
|
||||
}
|
||||
|
||||
// ============================================
|
||||
// CONVERSATION ARCHIVE MODULE
|
||||
// ============================================
|
||||
|
||||
model ConversationArchive {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
workspaceId String @map("workspace_id") @db.Uuid
|
||||
sessionId String @map("session_id")
|
||||
agentId String @map("agent_id")
|
||||
messages Json
|
||||
messageCount Int @map("message_count")
|
||||
summary String @db.Text
|
||||
// Note: vector dimension (1536) must match EMBEDDING_DIMENSION constant in @mosaic/shared
|
||||
embedding Unsupported("vector(1536)")?
|
||||
startedAt DateTime @map("started_at") @db.Timestamptz
|
||||
endedAt DateTime? @map("ended_at") @db.Timestamptz
|
||||
metadata Json @default("{}")
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz
|
||||
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz
|
||||
|
||||
// Relations
|
||||
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@unique([workspaceId, sessionId])
|
||||
@@index([workspaceId])
|
||||
@@index([agentId])
|
||||
@@index([startedAt])
|
||||
@@map("conversation_archives")
|
||||
}
|
||||
|
||||
@@ -65,6 +65,136 @@ async function main() {
|
||||
},
|
||||
});
|
||||
|
||||
// ============================================
|
||||
// WIDGET DEFINITIONS (global, not workspace-scoped)
|
||||
// ============================================
|
||||
const widgetDefs = [
|
||||
{
|
||||
name: "TasksWidget",
|
||||
displayName: "Tasks",
|
||||
description: "View and manage your tasks",
|
||||
component: "TasksWidget",
|
||||
defaultWidth: 2,
|
||||
defaultHeight: 2,
|
||||
minWidth: 1,
|
||||
minHeight: 2,
|
||||
maxWidth: 4,
|
||||
maxHeight: null,
|
||||
configSchema: {},
|
||||
},
|
||||
{
|
||||
name: "CalendarWidget",
|
||||
displayName: "Calendar",
|
||||
description: "View upcoming events and schedule",
|
||||
component: "CalendarWidget",
|
||||
defaultWidth: 2,
|
||||
defaultHeight: 2,
|
||||
minWidth: 2,
|
||||
minHeight: 2,
|
||||
maxWidth: 4,
|
||||
maxHeight: null,
|
||||
configSchema: {},
|
||||
},
|
||||
{
|
||||
name: "QuickCaptureWidget",
|
||||
displayName: "Quick Capture",
|
||||
description: "Quickly capture notes and tasks",
|
||||
component: "QuickCaptureWidget",
|
||||
defaultWidth: 2,
|
||||
defaultHeight: 1,
|
||||
minWidth: 2,
|
||||
minHeight: 1,
|
||||
maxWidth: 4,
|
||||
maxHeight: 2,
|
||||
configSchema: {},
|
||||
},
|
||||
{
|
||||
name: "AgentStatusWidget",
|
||||
displayName: "Agent Status",
|
||||
description: "Monitor agent activity and status",
|
||||
component: "AgentStatusWidget",
|
||||
defaultWidth: 2,
|
||||
defaultHeight: 2,
|
||||
minWidth: 1,
|
||||
minHeight: 2,
|
||||
maxWidth: 3,
|
||||
maxHeight: null,
|
||||
configSchema: {},
|
||||
},
|
||||
{
|
||||
name: "ActiveProjectsWidget",
|
||||
displayName: "Active Projects & Agent Chains",
|
||||
description: "View active projects and running agent sessions",
|
||||
component: "ActiveProjectsWidget",
|
||||
defaultWidth: 2,
|
||||
defaultHeight: 3,
|
||||
minWidth: 2,
|
||||
minHeight: 2,
|
||||
maxWidth: 4,
|
||||
maxHeight: null,
|
||||
configSchema: {},
|
||||
},
|
||||
{
|
||||
name: "TaskProgressWidget",
|
||||
displayName: "Task Progress",
|
||||
description: "Live progress of orchestrator agent tasks",
|
||||
component: "TaskProgressWidget",
|
||||
defaultWidth: 2,
|
||||
defaultHeight: 2,
|
||||
minWidth: 1,
|
||||
minHeight: 2,
|
||||
maxWidth: 3,
|
||||
maxHeight: null,
|
||||
configSchema: {},
|
||||
},
|
||||
{
|
||||
name: "OrchestratorEventsWidget",
|
||||
displayName: "Orchestrator Events",
|
||||
description: "Recent orchestration events with stream/Matrix visibility",
|
||||
component: "OrchestratorEventsWidget",
|
||||
defaultWidth: 2,
|
||||
defaultHeight: 2,
|
||||
minWidth: 1,
|
||||
minHeight: 2,
|
||||
maxWidth: 4,
|
||||
maxHeight: null,
|
||||
configSchema: {},
|
||||
},
|
||||
];
|
||||
|
||||
for (const wd of widgetDefs) {
|
||||
await prisma.widgetDefinition.upsert({
|
||||
where: { name: wd.name },
|
||||
update: {
|
||||
displayName: wd.displayName,
|
||||
description: wd.description,
|
||||
component: wd.component,
|
||||
defaultWidth: wd.defaultWidth,
|
||||
defaultHeight: wd.defaultHeight,
|
||||
minWidth: wd.minWidth,
|
||||
minHeight: wd.minHeight,
|
||||
maxWidth: wd.maxWidth,
|
||||
maxHeight: wd.maxHeight,
|
||||
configSchema: wd.configSchema,
|
||||
},
|
||||
create: {
|
||||
name: wd.name,
|
||||
displayName: wd.displayName,
|
||||
description: wd.description,
|
||||
component: wd.component,
|
||||
defaultWidth: wd.defaultWidth,
|
||||
defaultHeight: wd.defaultHeight,
|
||||
minWidth: wd.minWidth,
|
||||
minHeight: wd.minHeight,
|
||||
maxWidth: wd.maxWidth,
|
||||
maxHeight: wd.maxHeight,
|
||||
configSchema: wd.configSchema,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
console.log(`Seeded ${widgetDefs.length} widget definitions`);
|
||||
|
||||
// Use transaction for atomic seed data reset and creation
|
||||
await prisma.$transaction(async (tx) => {
|
||||
// Delete existing seed data for idempotency (avoids duplicates on re-run)
|
||||
|
||||
166
apps/api/scripts/encrypt-llm-keys.ts
Normal file
166
apps/api/scripts/encrypt-llm-keys.ts
Normal file
@@ -0,0 +1,166 @@
|
||||
/**
|
||||
* Data Migration: Encrypt LLM Provider API Keys
|
||||
*
|
||||
* Encrypts all plaintext API keys in llm_provider_instances.config using OpenBao Transit.
|
||||
* This script processes records in batches and runs in a transaction for safety.
|
||||
*
|
||||
* Usage:
|
||||
* pnpm --filter @mosaic/api migrate:encrypt-llm-keys
|
||||
*
|
||||
* Environment Variables:
|
||||
* DATABASE_URL - PostgreSQL connection string
|
||||
* OPENBAO_ADDR - OpenBao server address (default: http://openbao:8200)
|
||||
* APPROLE_CREDENTIALS_PATH - Path to AppRole credentials file
|
||||
*/
|
||||
|
||||
import { PrismaClient } from "@prisma/client";
|
||||
import { VaultService } from "../src/vault/vault.service";
|
||||
import { TransitKey } from "../src/vault/vault.constants";
|
||||
import { Logger } from "@nestjs/common";
|
||||
import { ConfigService } from "@nestjs/config";
|
||||
|
||||
interface LlmProviderConfig {
|
||||
apiKey?: string;
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
interface LlmProviderInstance {
|
||||
id: string;
|
||||
config: LlmProviderConfig;
|
||||
providerType: string;
|
||||
displayName: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a value is already encrypted
|
||||
*/
|
||||
function isEncrypted(value: string): boolean {
|
||||
if (!value || typeof value !== "string") {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Vault format: vault:v1:...
|
||||
if (value.startsWith("vault:v1:")) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// AES format: iv:authTag:encrypted (3 colon-separated hex parts)
|
||||
const parts = value.split(":");
|
||||
if (parts.length === 3 && parts.every((part) => /^[0-9a-f]+$/i.test(part))) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Main migration function
|
||||
*/
|
||||
async function main(): Promise<void> {
|
||||
const logger = new Logger("EncryptLlmKeys");
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
try {
|
||||
logger.log("Starting LLM API key encryption migration...");
|
||||
|
||||
// Initialize VaultService
|
||||
const configService = new ConfigService();
|
||||
const vaultService = new VaultService(configService);
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-call
|
||||
await vaultService.onModuleInit();
|
||||
|
||||
logger.log("VaultService initialized successfully");
|
||||
|
||||
// Fetch all LLM provider instances
|
||||
const instances = await prisma.llmProviderInstance.findMany({
|
||||
select: {
|
||||
id: true,
|
||||
config: true,
|
||||
providerType: true,
|
||||
displayName: true,
|
||||
},
|
||||
});
|
||||
|
||||
logger.log(`Found ${String(instances.length)} LLM provider instances`);
|
||||
|
||||
let encryptedCount = 0;
|
||||
let skippedCount = 0;
|
||||
let errorCount = 0;
|
||||
|
||||
// Process each instance
|
||||
for (const instance of instances as LlmProviderInstance[]) {
|
||||
try {
|
||||
const config = instance.config;
|
||||
|
||||
// Skip if no apiKey field
|
||||
if (!config.apiKey || typeof config.apiKey !== "string") {
|
||||
logger.debug(`Skipping ${instance.displayName} (${instance.id}): No API key`);
|
||||
skippedCount++;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Skip if already encrypted
|
||||
if (isEncrypted(config.apiKey)) {
|
||||
logger.debug(`Skipping ${instance.displayName} (${instance.id}): Already encrypted`);
|
||||
skippedCount++;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Encrypt the API key
|
||||
logger.log(`Encrypting ${instance.displayName} (${instance.providerType})...`);
|
||||
|
||||
const encryptedApiKey = await vaultService.encrypt(config.apiKey, TransitKey.LLM_CONFIG);
|
||||
|
||||
// Update the instance with encrypted key
|
||||
await prisma.llmProviderInstance.update({
|
||||
where: { id: instance.id },
|
||||
data: {
|
||||
config: {
|
||||
...config,
|
||||
apiKey: encryptedApiKey,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
encryptedCount++;
|
||||
logger.log(`✓ Encrypted ${instance.displayName} (${instance.id})`);
|
||||
} catch (error: unknown) {
|
||||
errorCount++;
|
||||
const errorMsg = error instanceof Error ? error.message : String(error);
|
||||
logger.error(`✗ Failed to encrypt ${instance.displayName} (${instance.id}): ${errorMsg}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Summary
|
||||
logger.log("\n=== Migration Summary ===");
|
||||
logger.log(`Total instances: ${String(instances.length)}`);
|
||||
logger.log(`Encrypted: ${String(encryptedCount)}`);
|
||||
logger.log(`Skipped: ${String(skippedCount)}`);
|
||||
logger.log(`Errors: ${String(errorCount)}`);
|
||||
|
||||
if (errorCount > 0) {
|
||||
logger.warn("\n⚠️ Some API keys failed to encrypt. Please review the errors above.");
|
||||
process.exit(1);
|
||||
} else if (encryptedCount === 0) {
|
||||
logger.log("\n✓ All API keys are already encrypted or no keys found.");
|
||||
} else {
|
||||
logger.log("\n✓ Migration completed successfully!");
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
const errorMsg = error instanceof Error ? error.message : String(error);
|
||||
logger.error(`Migration failed: ${errorMsg}`);
|
||||
throw error;
|
||||
} finally {
|
||||
await prisma.$disconnect();
|
||||
}
|
||||
}
|
||||
|
||||
// Run migration
|
||||
main()
|
||||
.then(() => {
|
||||
process.exit(0);
|
||||
})
|
||||
.catch((error: unknown) => {
|
||||
console.error(error);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -802,7 +802,7 @@ describe("ActivityService", () => {
|
||||
);
|
||||
});
|
||||
|
||||
it("should handle database errors gracefully when logging activity", async () => {
|
||||
it("should handle database errors gracefully when logging activity (fire-and-forget)", async () => {
|
||||
const input: CreateActivityLogInput = {
|
||||
workspaceId: "workspace-123",
|
||||
userId: "user-123",
|
||||
@@ -814,7 +814,9 @@ describe("ActivityService", () => {
|
||||
const dbError = new Error("Database connection failed");
|
||||
mockPrismaService.activityLog.create.mockRejectedValue(dbError);
|
||||
|
||||
await expect(service.logActivity(input)).rejects.toThrow("Database connection failed");
|
||||
// Activity logging is fire-and-forget - returns null on error instead of throwing
|
||||
const result = await service.logActivity(input);
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("should handle extremely large details objects", async () => {
|
||||
@@ -1132,7 +1134,7 @@ describe("ActivityService", () => {
|
||||
});
|
||||
|
||||
describe("database error handling", () => {
|
||||
it("should handle database connection failures in logActivity", async () => {
|
||||
it("should handle database connection failures in logActivity (fire-and-forget)", async () => {
|
||||
const createInput: CreateActivityLogInput = {
|
||||
workspaceId: "workspace-123",
|
||||
userId: "user-123",
|
||||
@@ -1144,7 +1146,9 @@ describe("ActivityService", () => {
|
||||
const dbError = new Error("Connection refused");
|
||||
mockPrismaService.activityLog.create.mockRejectedValue(dbError);
|
||||
|
||||
await expect(service.logActivity(createInput)).rejects.toThrow("Connection refused");
|
||||
// Activity logging is fire-and-forget - returns null on error instead of throwing
|
||||
const result = await service.logActivity(createInput);
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("should handle Prisma timeout errors in findAll", async () => {
|
||||
|
||||
@@ -18,16 +18,25 @@ export class ActivityService {
|
||||
constructor(private readonly prisma: PrismaService) {}
|
||||
|
||||
/**
|
||||
* Create a new activity log entry
|
||||
* Create a new activity log entry (fire-and-forget)
|
||||
*
|
||||
* Activity logging failures are logged but never propagate to callers.
|
||||
* This ensures activity logging never breaks primary operations.
|
||||
*
|
||||
* @returns The created ActivityLog or null if logging failed
|
||||
*/
|
||||
async logActivity(input: CreateActivityLogInput): Promise<ActivityLog> {
|
||||
async logActivity(input: CreateActivityLogInput): Promise<ActivityLog | null> {
|
||||
try {
|
||||
return await this.prisma.activityLog.create({
|
||||
data: input as unknown as Prisma.ActivityLogCreateInput,
|
||||
});
|
||||
} catch (error) {
|
||||
this.logger.error("Failed to log activity", error);
|
||||
throw error;
|
||||
// Log the error but don't propagate - activity logging is fire-and-forget
|
||||
this.logger.error(
|
||||
`Failed to log activity: action=${input.action} entityType=${input.entityType} entityId=${input.entityId}`,
|
||||
error instanceof Error ? error.stack : String(error)
|
||||
);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -167,7 +176,7 @@ export class ActivityService {
|
||||
userId: string,
|
||||
taskId: string,
|
||||
details?: Prisma.JsonValue
|
||||
): Promise<ActivityLog> {
|
||||
): Promise<ActivityLog | null> {
|
||||
return this.logActivity({
|
||||
workspaceId,
|
||||
userId,
|
||||
@@ -186,7 +195,7 @@ export class ActivityService {
|
||||
userId: string,
|
||||
taskId: string,
|
||||
details?: Prisma.JsonValue
|
||||
): Promise<ActivityLog> {
|
||||
): Promise<ActivityLog | null> {
|
||||
return this.logActivity({
|
||||
workspaceId,
|
||||
userId,
|
||||
@@ -205,7 +214,7 @@ export class ActivityService {
|
||||
userId: string,
|
||||
taskId: string,
|
||||
details?: Prisma.JsonValue
|
||||
): Promise<ActivityLog> {
|
||||
): Promise<ActivityLog | null> {
|
||||
return this.logActivity({
|
||||
workspaceId,
|
||||
userId,
|
||||
@@ -224,7 +233,7 @@ export class ActivityService {
|
||||
userId: string,
|
||||
taskId: string,
|
||||
details?: Prisma.JsonValue
|
||||
): Promise<ActivityLog> {
|
||||
): Promise<ActivityLog | null> {
|
||||
return this.logActivity({
|
||||
workspaceId,
|
||||
userId,
|
||||
@@ -243,7 +252,7 @@ export class ActivityService {
|
||||
userId: string,
|
||||
taskId: string,
|
||||
assigneeId: string
|
||||
): Promise<ActivityLog> {
|
||||
): Promise<ActivityLog | null> {
|
||||
return this.logActivity({
|
||||
workspaceId,
|
||||
userId,
|
||||
@@ -262,7 +271,7 @@ export class ActivityService {
|
||||
userId: string,
|
||||
eventId: string,
|
||||
details?: Prisma.JsonValue
|
||||
): Promise<ActivityLog> {
|
||||
): Promise<ActivityLog | null> {
|
||||
return this.logActivity({
|
||||
workspaceId,
|
||||
userId,
|
||||
@@ -281,7 +290,7 @@ export class ActivityService {
|
||||
userId: string,
|
||||
eventId: string,
|
||||
details?: Prisma.JsonValue
|
||||
): Promise<ActivityLog> {
|
||||
): Promise<ActivityLog | null> {
|
||||
return this.logActivity({
|
||||
workspaceId,
|
||||
userId,
|
||||
@@ -300,7 +309,7 @@ export class ActivityService {
|
||||
userId: string,
|
||||
eventId: string,
|
||||
details?: Prisma.JsonValue
|
||||
): Promise<ActivityLog> {
|
||||
): Promise<ActivityLog | null> {
|
||||
return this.logActivity({
|
||||
workspaceId,
|
||||
userId,
|
||||
@@ -319,7 +328,7 @@ export class ActivityService {
|
||||
userId: string,
|
||||
projectId: string,
|
||||
details?: Prisma.JsonValue
|
||||
): Promise<ActivityLog> {
|
||||
): Promise<ActivityLog | null> {
|
||||
return this.logActivity({
|
||||
workspaceId,
|
||||
userId,
|
||||
@@ -338,7 +347,7 @@ export class ActivityService {
|
||||
userId: string,
|
||||
projectId: string,
|
||||
details?: Prisma.JsonValue
|
||||
): Promise<ActivityLog> {
|
||||
): Promise<ActivityLog | null> {
|
||||
return this.logActivity({
|
||||
workspaceId,
|
||||
userId,
|
||||
@@ -357,7 +366,7 @@ export class ActivityService {
|
||||
userId: string,
|
||||
projectId: string,
|
||||
details?: Prisma.JsonValue
|
||||
): Promise<ActivityLog> {
|
||||
): Promise<ActivityLog | null> {
|
||||
return this.logActivity({
|
||||
workspaceId,
|
||||
userId,
|
||||
@@ -375,7 +384,7 @@ export class ActivityService {
|
||||
workspaceId: string,
|
||||
userId: string,
|
||||
details?: Prisma.JsonValue
|
||||
): Promise<ActivityLog> {
|
||||
): Promise<ActivityLog | null> {
|
||||
return this.logActivity({
|
||||
workspaceId,
|
||||
userId,
|
||||
@@ -393,7 +402,7 @@ export class ActivityService {
|
||||
workspaceId: string,
|
||||
userId: string,
|
||||
details?: Prisma.JsonValue
|
||||
): Promise<ActivityLog> {
|
||||
): Promise<ActivityLog | null> {
|
||||
return this.logActivity({
|
||||
workspaceId,
|
||||
userId,
|
||||
@@ -412,7 +421,7 @@ export class ActivityService {
|
||||
userId: string,
|
||||
memberId: string,
|
||||
role: string
|
||||
): Promise<ActivityLog> {
|
||||
): Promise<ActivityLog | null> {
|
||||
return this.logActivity({
|
||||
workspaceId,
|
||||
userId,
|
||||
@@ -430,7 +439,7 @@ export class ActivityService {
|
||||
workspaceId: string,
|
||||
userId: string,
|
||||
memberId: string
|
||||
): Promise<ActivityLog> {
|
||||
): Promise<ActivityLog | null> {
|
||||
return this.logActivity({
|
||||
workspaceId,
|
||||
userId,
|
||||
@@ -448,7 +457,7 @@ export class ActivityService {
|
||||
workspaceId: string,
|
||||
userId: string,
|
||||
details?: Prisma.JsonValue
|
||||
): Promise<ActivityLog> {
|
||||
): Promise<ActivityLog | null> {
|
||||
return this.logActivity({
|
||||
workspaceId,
|
||||
userId,
|
||||
@@ -467,7 +476,7 @@ export class ActivityService {
|
||||
userId: string,
|
||||
domainId: string,
|
||||
details?: Prisma.JsonValue
|
||||
): Promise<ActivityLog> {
|
||||
): Promise<ActivityLog | null> {
|
||||
return this.logActivity({
|
||||
workspaceId,
|
||||
userId,
|
||||
@@ -486,7 +495,7 @@ export class ActivityService {
|
||||
userId: string,
|
||||
domainId: string,
|
||||
details?: Prisma.JsonValue
|
||||
): Promise<ActivityLog> {
|
||||
): Promise<ActivityLog | null> {
|
||||
return this.logActivity({
|
||||
workspaceId,
|
||||
userId,
|
||||
@@ -505,7 +514,7 @@ export class ActivityService {
|
||||
userId: string,
|
||||
domainId: string,
|
||||
details?: Prisma.JsonValue
|
||||
): Promise<ActivityLog> {
|
||||
): Promise<ActivityLog | null> {
|
||||
return this.logActivity({
|
||||
workspaceId,
|
||||
userId,
|
||||
@@ -524,7 +533,7 @@ export class ActivityService {
|
||||
userId: string,
|
||||
ideaId: string,
|
||||
details?: Prisma.JsonValue
|
||||
): Promise<ActivityLog> {
|
||||
): Promise<ActivityLog | null> {
|
||||
return this.logActivity({
|
||||
workspaceId,
|
||||
userId,
|
||||
@@ -543,7 +552,7 @@ export class ActivityService {
|
||||
userId: string,
|
||||
ideaId: string,
|
||||
details?: Prisma.JsonValue
|
||||
): Promise<ActivityLog> {
|
||||
): Promise<ActivityLog | null> {
|
||||
return this.logActivity({
|
||||
workspaceId,
|
||||
userId,
|
||||
@@ -562,7 +571,7 @@ export class ActivityService {
|
||||
userId: string,
|
||||
ideaId: string,
|
||||
details?: Prisma.JsonValue
|
||||
): Promise<ActivityLog> {
|
||||
): Promise<ActivityLog | null> {
|
||||
return this.logActivity({
|
||||
workspaceId,
|
||||
userId,
|
||||
|
||||
258
apps/api/src/admin/admin.controller.spec.ts
Normal file
258
apps/api/src/admin/admin.controller.spec.ts
Normal file
@@ -0,0 +1,258 @@
|
||||
import { describe, it, expect, beforeEach, vi } from "vitest";
|
||||
import { Test, TestingModule } from "@nestjs/testing";
|
||||
import { AdminController } from "./admin.controller";
|
||||
import { AdminService } from "./admin.service";
|
||||
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||
import { AdminGuard } from "../auth/guards/admin.guard";
|
||||
import { WorkspaceMemberRole } from "@prisma/client";
|
||||
import type { ExecutionContext } from "@nestjs/common";
|
||||
|
||||
describe("AdminController", () => {
|
||||
let controller: AdminController;
|
||||
let service: AdminService;
|
||||
|
||||
const mockAdminService = {
|
||||
listUsers: vi.fn(),
|
||||
inviteUser: vi.fn(),
|
||||
updateUser: vi.fn(),
|
||||
deactivateUser: vi.fn(),
|
||||
createWorkspace: vi.fn(),
|
||||
updateWorkspace: vi.fn(),
|
||||
};
|
||||
|
||||
const mockAuthGuard = {
|
||||
canActivate: vi.fn((context: ExecutionContext) => {
|
||||
const request = context.switchToHttp().getRequest();
|
||||
request.user = {
|
||||
id: "550e8400-e29b-41d4-a716-446655440001",
|
||||
email: "admin@example.com",
|
||||
name: "Admin User",
|
||||
};
|
||||
return true;
|
||||
}),
|
||||
};
|
||||
|
||||
const mockAdminGuard = {
|
||||
canActivate: vi.fn(() => true),
|
||||
};
|
||||
|
||||
const mockAdminId = "550e8400-e29b-41d4-a716-446655440001";
|
||||
const mockUserId = "550e8400-e29b-41d4-a716-446655440002";
|
||||
const mockWorkspaceId = "550e8400-e29b-41d4-a716-446655440003";
|
||||
|
||||
const mockAdminUser = {
|
||||
id: mockAdminId,
|
||||
email: "admin@example.com",
|
||||
name: "Admin User",
|
||||
};
|
||||
|
||||
const mockUserResponse = {
|
||||
id: mockUserId,
|
||||
name: "Test User",
|
||||
email: "test@example.com",
|
||||
emailVerified: false,
|
||||
image: null,
|
||||
createdAt: new Date("2026-01-01"),
|
||||
deactivatedAt: null,
|
||||
isLocalAuth: false,
|
||||
invitedAt: null,
|
||||
invitedBy: null,
|
||||
workspaceMemberships: [],
|
||||
};
|
||||
|
||||
const mockWorkspaceResponse = {
|
||||
id: mockWorkspaceId,
|
||||
name: "Test Workspace",
|
||||
ownerId: mockAdminId,
|
||||
settings: {},
|
||||
createdAt: new Date("2026-01-01"),
|
||||
updatedAt: new Date("2026-01-01"),
|
||||
memberCount: 1,
|
||||
};
|
||||
|
||||
beforeEach(async () => {
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
controllers: [AdminController],
|
||||
providers: [
|
||||
{
|
||||
provide: AdminService,
|
||||
useValue: mockAdminService,
|
||||
},
|
||||
],
|
||||
})
|
||||
.overrideGuard(AuthGuard)
|
||||
.useValue(mockAuthGuard)
|
||||
.overrideGuard(AdminGuard)
|
||||
.useValue(mockAdminGuard)
|
||||
.compile();
|
||||
|
||||
controller = module.get<AdminController>(AdminController);
|
||||
service = module.get<AdminService>(AdminService);
|
||||
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
it("should be defined", () => {
|
||||
expect(controller).toBeDefined();
|
||||
});
|
||||
|
||||
describe("listUsers", () => {
|
||||
it("should return paginated users", async () => {
|
||||
const paginatedResult = {
|
||||
data: [mockUserResponse],
|
||||
meta: { total: 1, page: 1, limit: 50, totalPages: 1 },
|
||||
};
|
||||
mockAdminService.listUsers.mockResolvedValue(paginatedResult);
|
||||
|
||||
const result = await controller.listUsers({ page: 1, limit: 50 });
|
||||
|
||||
expect(result).toEqual(paginatedResult);
|
||||
expect(service.listUsers).toHaveBeenCalledWith(1, 50);
|
||||
});
|
||||
|
||||
it("should use default pagination", async () => {
|
||||
const paginatedResult = {
|
||||
data: [],
|
||||
meta: { total: 0, page: 1, limit: 50, totalPages: 0 },
|
||||
};
|
||||
mockAdminService.listUsers.mockResolvedValue(paginatedResult);
|
||||
|
||||
await controller.listUsers({});
|
||||
|
||||
expect(service.listUsers).toHaveBeenCalledWith(undefined, undefined);
|
||||
});
|
||||
});
|
||||
|
||||
describe("inviteUser", () => {
|
||||
it("should invite a user", async () => {
|
||||
const inviteDto = { email: "new@example.com" };
|
||||
const invitationResponse = {
|
||||
userId: "new-id",
|
||||
invitationToken: "token",
|
||||
email: "new@example.com",
|
||||
invitedAt: new Date(),
|
||||
};
|
||||
mockAdminService.inviteUser.mockResolvedValue(invitationResponse);
|
||||
|
||||
const result = await controller.inviteUser(inviteDto, mockAdminUser);
|
||||
|
||||
expect(result).toEqual(invitationResponse);
|
||||
expect(service.inviteUser).toHaveBeenCalledWith(inviteDto, mockAdminId);
|
||||
});
|
||||
|
||||
it("should invite a user with workspace and role", async () => {
|
||||
const inviteDto = {
|
||||
email: "new@example.com",
|
||||
workspaceId: mockWorkspaceId,
|
||||
role: WorkspaceMemberRole.ADMIN,
|
||||
};
|
||||
mockAdminService.inviteUser.mockResolvedValue({
|
||||
userId: "new-id",
|
||||
invitationToken: "token",
|
||||
email: "new@example.com",
|
||||
invitedAt: new Date(),
|
||||
});
|
||||
|
||||
await controller.inviteUser(inviteDto, mockAdminUser);
|
||||
|
||||
expect(service.inviteUser).toHaveBeenCalledWith(inviteDto, mockAdminId);
|
||||
});
|
||||
});
|
||||
|
||||
describe("updateUser", () => {
|
||||
it("should update a user", async () => {
|
||||
const updateDto = { name: "Updated Name" };
|
||||
mockAdminService.updateUser.mockResolvedValue({
|
||||
...mockUserResponse,
|
||||
name: "Updated Name",
|
||||
});
|
||||
|
||||
const result = await controller.updateUser(mockUserId, updateDto);
|
||||
|
||||
expect(result.name).toBe("Updated Name");
|
||||
expect(service.updateUser).toHaveBeenCalledWith(mockUserId, updateDto);
|
||||
});
|
||||
|
||||
it("should deactivate a user via update", async () => {
|
||||
const deactivatedAt = "2026-02-28T00:00:00.000Z";
|
||||
const updateDto = { deactivatedAt };
|
||||
mockAdminService.updateUser.mockResolvedValue({
|
||||
...mockUserResponse,
|
||||
deactivatedAt: new Date(deactivatedAt),
|
||||
});
|
||||
|
||||
const result = await controller.updateUser(mockUserId, updateDto);
|
||||
|
||||
expect(result.deactivatedAt).toEqual(new Date(deactivatedAt));
|
||||
});
|
||||
});
|
||||
|
||||
describe("deactivateUser", () => {
|
||||
it("should soft-delete a user", async () => {
|
||||
mockAdminService.deactivateUser.mockResolvedValue({
|
||||
...mockUserResponse,
|
||||
deactivatedAt: new Date(),
|
||||
});
|
||||
|
||||
const result = await controller.deactivateUser(mockUserId);
|
||||
|
||||
expect(result.deactivatedAt).toBeDefined();
|
||||
expect(service.deactivateUser).toHaveBeenCalledWith(mockUserId);
|
||||
});
|
||||
});
|
||||
|
||||
describe("createWorkspace", () => {
|
||||
it("should create a workspace", async () => {
|
||||
const createDto = { name: "New Workspace", ownerId: mockAdminId };
|
||||
mockAdminService.createWorkspace.mockResolvedValue(mockWorkspaceResponse);
|
||||
|
||||
const result = await controller.createWorkspace(createDto);
|
||||
|
||||
expect(result).toEqual(mockWorkspaceResponse);
|
||||
expect(service.createWorkspace).toHaveBeenCalledWith(createDto);
|
||||
});
|
||||
|
||||
it("should create workspace with settings", async () => {
|
||||
const createDto = {
|
||||
name: "New Workspace",
|
||||
ownerId: mockAdminId,
|
||||
settings: { feature: true },
|
||||
};
|
||||
mockAdminService.createWorkspace.mockResolvedValue({
|
||||
...mockWorkspaceResponse,
|
||||
settings: { feature: true },
|
||||
});
|
||||
|
||||
const result = await controller.createWorkspace(createDto);
|
||||
|
||||
expect(result.settings).toEqual({ feature: true });
|
||||
});
|
||||
});
|
||||
|
||||
describe("updateWorkspace", () => {
|
||||
it("should update a workspace", async () => {
|
||||
const updateDto = { name: "Updated Workspace" };
|
||||
mockAdminService.updateWorkspace.mockResolvedValue({
|
||||
...mockWorkspaceResponse,
|
||||
name: "Updated Workspace",
|
||||
});
|
||||
|
||||
const result = await controller.updateWorkspace(mockWorkspaceId, updateDto);
|
||||
|
||||
expect(result.name).toBe("Updated Workspace");
|
||||
expect(service.updateWorkspace).toHaveBeenCalledWith(mockWorkspaceId, updateDto);
|
||||
});
|
||||
|
||||
it("should update workspace settings", async () => {
|
||||
const updateDto = { settings: { notifications: false } };
|
||||
mockAdminService.updateWorkspace.mockResolvedValue({
|
||||
...mockWorkspaceResponse,
|
||||
settings: { notifications: false },
|
||||
});
|
||||
|
||||
const result = await controller.updateWorkspace(mockWorkspaceId, updateDto);
|
||||
|
||||
expect(result.settings).toEqual({ notifications: false });
|
||||
});
|
||||
});
|
||||
});
|
||||
64
apps/api/src/admin/admin.controller.ts
Normal file
64
apps/api/src/admin/admin.controller.ts
Normal file
@@ -0,0 +1,64 @@
|
||||
import {
|
||||
Controller,
|
||||
Get,
|
||||
Post,
|
||||
Patch,
|
||||
Delete,
|
||||
Body,
|
||||
Param,
|
||||
Query,
|
||||
UseGuards,
|
||||
ParseUUIDPipe,
|
||||
} from "@nestjs/common";
|
||||
import { AdminService } from "./admin.service";
|
||||
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||
import { AdminGuard } from "../auth/guards/admin.guard";
|
||||
import { CurrentUser } from "../auth/decorators/current-user.decorator";
|
||||
import type { AuthUser } from "@mosaic/shared";
|
||||
import { InviteUserDto } from "./dto/invite-user.dto";
|
||||
import { UpdateUserDto } from "./dto/update-user.dto";
|
||||
import { CreateWorkspaceDto } from "./dto/create-workspace.dto";
|
||||
import { UpdateWorkspaceDto } from "./dto/update-workspace.dto";
|
||||
import { QueryUsersDto } from "./dto/query-users.dto";
|
||||
|
||||
@Controller("admin")
|
||||
@UseGuards(AuthGuard, AdminGuard)
|
||||
export class AdminController {
|
||||
constructor(private readonly adminService: AdminService) {}
|
||||
|
||||
@Get("users")
|
||||
async listUsers(@Query() query: QueryUsersDto) {
|
||||
return this.adminService.listUsers(query.page, query.limit);
|
||||
}
|
||||
|
||||
@Post("users/invite")
|
||||
async inviteUser(@Body() dto: InviteUserDto, @CurrentUser() user: AuthUser) {
|
||||
return this.adminService.inviteUser(dto, user.id);
|
||||
}
|
||||
|
||||
@Patch("users/:id")
|
||||
async updateUser(
|
||||
@Param("id", new ParseUUIDPipe({ version: "4" })) id: string,
|
||||
@Body() dto: UpdateUserDto
|
||||
) {
|
||||
return this.adminService.updateUser(id, dto);
|
||||
}
|
||||
|
||||
@Delete("users/:id")
|
||||
async deactivateUser(@Param("id", new ParseUUIDPipe({ version: "4" })) id: string) {
|
||||
return this.adminService.deactivateUser(id);
|
||||
}
|
||||
|
||||
@Post("workspaces")
|
||||
async createWorkspace(@Body() dto: CreateWorkspaceDto) {
|
||||
return this.adminService.createWorkspace(dto);
|
||||
}
|
||||
|
||||
@Patch("workspaces/:id")
|
||||
async updateWorkspace(
|
||||
@Param("id", new ParseUUIDPipe({ version: "4" })) id: string,
|
||||
@Body() dto: UpdateWorkspaceDto
|
||||
) {
|
||||
return this.adminService.updateWorkspace(id, dto);
|
||||
}
|
||||
}
|
||||
13
apps/api/src/admin/admin.module.ts
Normal file
13
apps/api/src/admin/admin.module.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import { Module } from "@nestjs/common";
|
||||
import { AdminController } from "./admin.controller";
|
||||
import { AdminService } from "./admin.service";
|
||||
import { PrismaModule } from "../prisma/prisma.module";
|
||||
import { AuthModule } from "../auth/auth.module";
|
||||
|
||||
@Module({
|
||||
imports: [PrismaModule, AuthModule],
|
||||
controllers: [AdminController],
|
||||
providers: [AdminService],
|
||||
exports: [AdminService],
|
||||
})
|
||||
export class AdminModule {}
|
||||
477
apps/api/src/admin/admin.service.spec.ts
Normal file
477
apps/api/src/admin/admin.service.spec.ts
Normal file
@@ -0,0 +1,477 @@
|
||||
import { describe, it, expect, beforeEach, vi } from "vitest";
|
||||
import { Test, TestingModule } from "@nestjs/testing";
|
||||
import { AdminService } from "./admin.service";
|
||||
import { PrismaService } from "../prisma/prisma.service";
|
||||
import { BadRequestException, ConflictException, NotFoundException } from "@nestjs/common";
|
||||
import { WorkspaceMemberRole } from "@prisma/client";
|
||||
|
||||
describe("AdminService", () => {
|
||||
let service: AdminService;
|
||||
|
||||
const mockPrismaService = {
|
||||
user: {
|
||||
findMany: vi.fn(),
|
||||
findUnique: vi.fn(),
|
||||
count: vi.fn(),
|
||||
create: vi.fn(),
|
||||
update: vi.fn(),
|
||||
},
|
||||
workspace: {
|
||||
findUnique: vi.fn(),
|
||||
create: vi.fn(),
|
||||
update: vi.fn(),
|
||||
},
|
||||
workspaceMember: {
|
||||
create: vi.fn(),
|
||||
},
|
||||
session: {
|
||||
deleteMany: vi.fn(),
|
||||
},
|
||||
$transaction: vi.fn(async (ops) => {
|
||||
if (typeof ops === "function") {
|
||||
return ops(mockPrismaService);
|
||||
}
|
||||
return Promise.all(ops);
|
||||
}),
|
||||
};
|
||||
|
||||
const mockAdminId = "550e8400-e29b-41d4-a716-446655440001";
|
||||
const mockUserId = "550e8400-e29b-41d4-a716-446655440002";
|
||||
const mockWorkspaceId = "550e8400-e29b-41d4-a716-446655440003";
|
||||
|
||||
const mockUser = {
|
||||
id: mockUserId,
|
||||
name: "Test User",
|
||||
email: "test@example.com",
|
||||
emailVerified: false,
|
||||
image: null,
|
||||
createdAt: new Date("2026-01-01"),
|
||||
updatedAt: new Date("2026-01-01"),
|
||||
deactivatedAt: null,
|
||||
isLocalAuth: false,
|
||||
passwordHash: null,
|
||||
invitedBy: null,
|
||||
invitationToken: null,
|
||||
invitedAt: null,
|
||||
authProviderId: null,
|
||||
preferences: {},
|
||||
workspaceMemberships: [
|
||||
{
|
||||
workspaceId: mockWorkspaceId,
|
||||
userId: mockUserId,
|
||||
role: WorkspaceMemberRole.MEMBER,
|
||||
joinedAt: new Date("2026-01-01"),
|
||||
workspace: { id: mockWorkspaceId, name: "Test Workspace" },
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const mockWorkspace = {
|
||||
id: mockWorkspaceId,
|
||||
name: "Test Workspace",
|
||||
ownerId: mockAdminId,
|
||||
settings: {},
|
||||
createdAt: new Date("2026-01-01"),
|
||||
updatedAt: new Date("2026-01-01"),
|
||||
matrixRoomId: null,
|
||||
};
|
||||
|
||||
beforeEach(async () => {
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
AdminService,
|
||||
{
|
||||
provide: PrismaService,
|
||||
useValue: mockPrismaService,
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
service = module.get<AdminService>(AdminService);
|
||||
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
it("should be defined", () => {
|
||||
expect(service).toBeDefined();
|
||||
});
|
||||
|
||||
describe("listUsers", () => {
|
||||
it("should return paginated users with memberships", async () => {
|
||||
mockPrismaService.user.findMany.mockResolvedValue([mockUser]);
|
||||
mockPrismaService.user.count.mockResolvedValue(1);
|
||||
|
||||
const result = await service.listUsers(1, 50);
|
||||
|
||||
expect(result.data).toHaveLength(1);
|
||||
expect(result.data[0]?.id).toBe(mockUserId);
|
||||
expect(result.data[0]?.workspaceMemberships).toHaveLength(1);
|
||||
expect(result.meta).toEqual({
|
||||
total: 1,
|
||||
page: 1,
|
||||
limit: 50,
|
||||
totalPages: 1,
|
||||
});
|
||||
});
|
||||
|
||||
it("should use default pagination when not provided", async () => {
|
||||
mockPrismaService.user.findMany.mockResolvedValue([]);
|
||||
mockPrismaService.user.count.mockResolvedValue(0);
|
||||
|
||||
await service.listUsers();
|
||||
|
||||
expect(mockPrismaService.user.findMany).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
skip: 0,
|
||||
take: 50,
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it("should calculate pagination correctly", async () => {
|
||||
mockPrismaService.user.findMany.mockResolvedValue([]);
|
||||
mockPrismaService.user.count.mockResolvedValue(150);
|
||||
|
||||
const result = await service.listUsers(3, 25);
|
||||
|
||||
expect(mockPrismaService.user.findMany).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
skip: 50,
|
||||
take: 25,
|
||||
})
|
||||
);
|
||||
expect(result.meta.totalPages).toBe(6);
|
||||
});
|
||||
});
|
||||
|
||||
describe("inviteUser", () => {
|
||||
it("should create a user with invitation token", async () => {
|
||||
mockPrismaService.user.findUnique.mockResolvedValue(null);
|
||||
const createdUser = {
|
||||
id: "new-user-id",
|
||||
email: "new@example.com",
|
||||
name: "new",
|
||||
invitationToken: "some-token",
|
||||
};
|
||||
mockPrismaService.user.create.mockResolvedValue(createdUser);
|
||||
|
||||
const result = await service.inviteUser({ email: "new@example.com" }, mockAdminId);
|
||||
|
||||
expect(result.email).toBe("new@example.com");
|
||||
expect(result.invitationToken).toBeDefined();
|
||||
expect(result.userId).toBe("new-user-id");
|
||||
expect(mockPrismaService.user.create).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
data: expect.objectContaining({
|
||||
email: "new@example.com",
|
||||
invitedBy: mockAdminId,
|
||||
invitationToken: expect.any(String),
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it("should add user to workspace when workspaceId provided", async () => {
|
||||
mockPrismaService.user.findUnique.mockResolvedValue(null);
|
||||
mockPrismaService.workspace.findUnique.mockResolvedValue(mockWorkspace);
|
||||
const createdUser = { id: "new-user-id", email: "new@example.com", name: "new" };
|
||||
mockPrismaService.user.create.mockResolvedValue(createdUser);
|
||||
|
||||
await service.inviteUser(
|
||||
{
|
||||
email: "new@example.com",
|
||||
workspaceId: mockWorkspaceId,
|
||||
role: WorkspaceMemberRole.ADMIN,
|
||||
},
|
||||
mockAdminId
|
||||
);
|
||||
|
||||
expect(mockPrismaService.workspaceMember.create).toHaveBeenCalledWith({
|
||||
data: {
|
||||
workspaceId: mockWorkspaceId,
|
||||
userId: "new-user-id",
|
||||
role: WorkspaceMemberRole.ADMIN,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it("should throw ConflictException if email already exists", async () => {
|
||||
mockPrismaService.user.findUnique.mockResolvedValue(mockUser);
|
||||
|
||||
await expect(service.inviteUser({ email: "test@example.com" }, mockAdminId)).rejects.toThrow(
|
||||
ConflictException
|
||||
);
|
||||
});
|
||||
|
||||
it("should throw NotFoundException if workspace does not exist", async () => {
|
||||
mockPrismaService.user.findUnique.mockResolvedValue(null);
|
||||
mockPrismaService.workspace.findUnique.mockResolvedValue(null);
|
||||
|
||||
await expect(
|
||||
service.inviteUser({ email: "new@example.com", workspaceId: "non-existent" }, mockAdminId)
|
||||
).rejects.toThrow(NotFoundException);
|
||||
});
|
||||
|
||||
it("should use email prefix as default name", async () => {
|
||||
mockPrismaService.user.findUnique.mockResolvedValue(null);
|
||||
const createdUser = { id: "new-user-id", email: "jane.doe@example.com", name: "jane.doe" };
|
||||
mockPrismaService.user.create.mockResolvedValue(createdUser);
|
||||
|
||||
await service.inviteUser({ email: "jane.doe@example.com" }, mockAdminId);
|
||||
|
||||
expect(mockPrismaService.user.create).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
data: expect.objectContaining({
|
||||
name: "jane.doe",
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it("should use provided name when given", async () => {
|
||||
mockPrismaService.user.findUnique.mockResolvedValue(null);
|
||||
const createdUser = { id: "new-user-id", email: "j@example.com", name: "Jane Doe" };
|
||||
mockPrismaService.user.create.mockResolvedValue(createdUser);
|
||||
|
||||
await service.inviteUser({ email: "j@example.com", name: "Jane Doe" }, mockAdminId);
|
||||
|
||||
expect(mockPrismaService.user.create).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
data: expect.objectContaining({
|
||||
name: "Jane Doe",
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("updateUser", () => {
|
||||
it("should update user fields", async () => {
|
||||
mockPrismaService.user.findUnique.mockResolvedValue(mockUser);
|
||||
mockPrismaService.user.update.mockResolvedValue({
|
||||
...mockUser,
|
||||
name: "Updated Name",
|
||||
});
|
||||
|
||||
const result = await service.updateUser(mockUserId, { name: "Updated Name" });
|
||||
|
||||
expect(result.name).toBe("Updated Name");
|
||||
expect(mockPrismaService.user.update).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
where: { id: mockUserId },
|
||||
data: { name: "Updated Name" },
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it("should set deactivatedAt when provided", async () => {
|
||||
const deactivatedAt = "2026-02-28T00:00:00.000Z";
|
||||
mockPrismaService.user.findUnique.mockResolvedValue(mockUser);
|
||||
mockPrismaService.user.update.mockResolvedValue({
|
||||
...mockUser,
|
||||
deactivatedAt: new Date(deactivatedAt),
|
||||
});
|
||||
|
||||
const result = await service.updateUser(mockUserId, { deactivatedAt });
|
||||
|
||||
expect(result.deactivatedAt).toEqual(new Date(deactivatedAt));
|
||||
});
|
||||
|
||||
it("should clear deactivatedAt when set to null", async () => {
|
||||
const deactivatedUser = { ...mockUser, deactivatedAt: new Date() };
|
||||
mockPrismaService.user.findUnique.mockResolvedValue(deactivatedUser);
|
||||
mockPrismaService.user.update.mockResolvedValue({
|
||||
...deactivatedUser,
|
||||
deactivatedAt: null,
|
||||
});
|
||||
|
||||
const result = await service.updateUser(mockUserId, { deactivatedAt: null });
|
||||
|
||||
expect(result.deactivatedAt).toBeNull();
|
||||
});
|
||||
|
||||
it("should throw NotFoundException if user does not exist", async () => {
|
||||
mockPrismaService.user.findUnique.mockResolvedValue(null);
|
||||
|
||||
await expect(service.updateUser("non-existent", { name: "Test" })).rejects.toThrow(
|
||||
NotFoundException
|
||||
);
|
||||
});
|
||||
|
||||
it("should update emailVerified", async () => {
|
||||
mockPrismaService.user.findUnique.mockResolvedValue(mockUser);
|
||||
mockPrismaService.user.update.mockResolvedValue({
|
||||
...mockUser,
|
||||
emailVerified: true,
|
||||
});
|
||||
|
||||
const result = await service.updateUser(mockUserId, { emailVerified: true });
|
||||
|
||||
expect(result.emailVerified).toBe(true);
|
||||
});
|
||||
|
||||
it("should update preferences", async () => {
|
||||
const prefs = { theme: "dark" };
|
||||
mockPrismaService.user.findUnique.mockResolvedValue(mockUser);
|
||||
mockPrismaService.user.update.mockResolvedValue({
|
||||
...mockUser,
|
||||
preferences: prefs,
|
||||
});
|
||||
|
||||
await service.updateUser(mockUserId, { preferences: prefs });
|
||||
|
||||
expect(mockPrismaService.user.update).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
data: expect.objectContaining({ preferences: prefs }),
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("deactivateUser", () => {
|
||||
it("should set deactivatedAt and invalidate sessions", async () => {
|
||||
mockPrismaService.user.findUnique.mockResolvedValue(mockUser);
|
||||
mockPrismaService.user.update.mockResolvedValue({
|
||||
...mockUser,
|
||||
deactivatedAt: new Date(),
|
||||
});
|
||||
mockPrismaService.session.deleteMany.mockResolvedValue({ count: 3 });
|
||||
|
||||
const result = await service.deactivateUser(mockUserId);
|
||||
|
||||
expect(result.deactivatedAt).toBeDefined();
|
||||
expect(mockPrismaService.user.update).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
where: { id: mockUserId },
|
||||
data: { deactivatedAt: expect.any(Date) },
|
||||
})
|
||||
);
|
||||
expect(mockPrismaService.session.deleteMany).toHaveBeenCalledWith({ where: { userId: mockUserId } });
|
||||
});
|
||||
|
||||
it("should throw NotFoundException if user does not exist", async () => {
|
||||
mockPrismaService.user.findUnique.mockResolvedValue(null);
|
||||
|
||||
await expect(service.deactivateUser("non-existent")).rejects.toThrow(NotFoundException);
|
||||
});
|
||||
|
||||
it("should throw BadRequestException if user is already deactivated", async () => {
|
||||
mockPrismaService.user.findUnique.mockResolvedValue({
|
||||
...mockUser,
|
||||
deactivatedAt: new Date(),
|
||||
});
|
||||
|
||||
await expect(service.deactivateUser(mockUserId)).rejects.toThrow(BadRequestException);
|
||||
});
|
||||
});
|
||||
|
||||
describe("createWorkspace", () => {
|
||||
it("should create a workspace with owner membership", async () => {
|
||||
mockPrismaService.user.findUnique.mockResolvedValue(mockUser);
|
||||
mockPrismaService.workspace.create.mockResolvedValue(mockWorkspace);
|
||||
|
||||
const result = await service.createWorkspace({
|
||||
name: "New Workspace",
|
||||
ownerId: mockAdminId,
|
||||
});
|
||||
|
||||
expect(result.name).toBe("Test Workspace");
|
||||
expect(result.memberCount).toBe(1);
|
||||
expect(mockPrismaService.workspace.create).toHaveBeenCalled();
|
||||
expect(mockPrismaService.workspaceMember.create).toHaveBeenCalledWith({
|
||||
data: {
|
||||
workspaceId: mockWorkspace.id,
|
||||
userId: mockAdminId,
|
||||
role: WorkspaceMemberRole.OWNER,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it("should throw NotFoundException if owner does not exist", async () => {
|
||||
mockPrismaService.user.findUnique.mockResolvedValue(null);
|
||||
|
||||
await expect(
|
||||
service.createWorkspace({ name: "New Workspace", ownerId: "non-existent" })
|
||||
).rejects.toThrow(NotFoundException);
|
||||
});
|
||||
|
||||
it("should pass settings when provided", async () => {
|
||||
const settings = { theme: "dark", features: ["chat"] };
|
||||
mockPrismaService.user.findUnique.mockResolvedValue(mockUser);
|
||||
mockPrismaService.workspace.create.mockResolvedValue({
|
||||
...mockWorkspace,
|
||||
settings,
|
||||
});
|
||||
|
||||
await service.createWorkspace({
|
||||
name: "New Workspace",
|
||||
ownerId: mockAdminId,
|
||||
settings,
|
||||
});
|
||||
|
||||
expect(mockPrismaService.workspace.create).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
data: expect.objectContaining({ settings }),
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("updateWorkspace", () => {
|
||||
it("should update workspace name", async () => {
|
||||
mockPrismaService.workspace.findUnique.mockResolvedValue(mockWorkspace);
|
||||
mockPrismaService.workspace.update.mockResolvedValue({
|
||||
...mockWorkspace,
|
||||
name: "Updated Workspace",
|
||||
_count: { members: 3 },
|
||||
});
|
||||
|
||||
const result = await service.updateWorkspace(mockWorkspaceId, {
|
||||
name: "Updated Workspace",
|
||||
});
|
||||
|
||||
expect(result.name).toBe("Updated Workspace");
|
||||
expect(result.memberCount).toBe(3);
|
||||
});
|
||||
|
||||
it("should update workspace settings", async () => {
|
||||
const newSettings = { notifications: true };
|
||||
mockPrismaService.workspace.findUnique.mockResolvedValue(mockWorkspace);
|
||||
mockPrismaService.workspace.update.mockResolvedValue({
|
||||
...mockWorkspace,
|
||||
settings: newSettings,
|
||||
_count: { members: 1 },
|
||||
});
|
||||
|
||||
const result = await service.updateWorkspace(mockWorkspaceId, {
|
||||
settings: newSettings,
|
||||
});
|
||||
|
||||
expect(result.settings).toEqual(newSettings);
|
||||
});
|
||||
|
||||
it("should throw NotFoundException if workspace does not exist", async () => {
|
||||
mockPrismaService.workspace.findUnique.mockResolvedValue(null);
|
||||
|
||||
await expect(service.updateWorkspace("non-existent", { name: "Test" })).rejects.toThrow(
|
||||
NotFoundException
|
||||
);
|
||||
});
|
||||
|
||||
it("should only update provided fields", async () => {
|
||||
mockPrismaService.workspace.findUnique.mockResolvedValue(mockWorkspace);
|
||||
mockPrismaService.workspace.update.mockResolvedValue({
|
||||
...mockWorkspace,
|
||||
_count: { members: 1 },
|
||||
});
|
||||
|
||||
await service.updateWorkspace(mockWorkspaceId, { name: "Only Name" });
|
||||
|
||||
expect(mockPrismaService.workspace.update).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
data: { name: "Only Name" },
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
309
apps/api/src/admin/admin.service.ts
Normal file
309
apps/api/src/admin/admin.service.ts
Normal file
@@ -0,0 +1,309 @@
|
||||
import {
|
||||
BadRequestException,
|
||||
ConflictException,
|
||||
Injectable,
|
||||
Logger,
|
||||
NotFoundException,
|
||||
} from "@nestjs/common";
|
||||
import { Prisma, WorkspaceMemberRole } from "@prisma/client";
|
||||
import { randomUUID } from "node:crypto";
|
||||
import { PrismaService } from "../prisma/prisma.service";
|
||||
import type { InviteUserDto } from "./dto/invite-user.dto";
|
||||
import type { UpdateUserDto } from "./dto/update-user.dto";
|
||||
import type { CreateWorkspaceDto } from "./dto/create-workspace.dto";
|
||||
import type {
|
||||
AdminUserResponse,
|
||||
AdminWorkspaceResponse,
|
||||
InvitationResponse,
|
||||
PaginatedResponse,
|
||||
} from "./types/admin.types";
|
||||
|
||||
@Injectable()
|
||||
export class AdminService {
|
||||
private readonly logger = new Logger(AdminService.name);
|
||||
|
||||
constructor(private readonly prisma: PrismaService) {}
|
||||
|
||||
async listUsers(page = 1, limit = 50): Promise<PaginatedResponse<AdminUserResponse>> {
|
||||
const skip = (page - 1) * limit;
|
||||
|
||||
const [users, total] = await Promise.all([
|
||||
this.prisma.user.findMany({
|
||||
include: {
|
||||
workspaceMemberships: {
|
||||
include: {
|
||||
workspace: { select: { id: true, name: true } },
|
||||
},
|
||||
},
|
||||
},
|
||||
orderBy: { createdAt: "desc" },
|
||||
skip,
|
||||
take: limit,
|
||||
}),
|
||||
this.prisma.user.count(),
|
||||
]);
|
||||
|
||||
return {
|
||||
data: users.map((user) => ({
|
||||
id: user.id,
|
||||
name: user.name,
|
||||
email: user.email,
|
||||
emailVerified: user.emailVerified,
|
||||
image: user.image,
|
||||
createdAt: user.createdAt,
|
||||
deactivatedAt: user.deactivatedAt,
|
||||
isLocalAuth: user.isLocalAuth,
|
||||
invitedAt: user.invitedAt,
|
||||
invitedBy: user.invitedBy,
|
||||
workspaceMemberships: user.workspaceMemberships.map((m) => ({
|
||||
workspaceId: m.workspaceId,
|
||||
workspaceName: m.workspace.name,
|
||||
role: m.role,
|
||||
joinedAt: m.joinedAt,
|
||||
})),
|
||||
})),
|
||||
meta: {
|
||||
total,
|
||||
page,
|
||||
limit,
|
||||
totalPages: Math.ceil(total / limit),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
async inviteUser(dto: InviteUserDto, inviterId: string): Promise<InvitationResponse> {
|
||||
const existing = await this.prisma.user.findUnique({
|
||||
where: { email: dto.email },
|
||||
});
|
||||
|
||||
if (existing) {
|
||||
throw new ConflictException(`User with email ${dto.email} already exists`);
|
||||
}
|
||||
|
||||
if (dto.workspaceId) {
|
||||
const workspace = await this.prisma.workspace.findUnique({
|
||||
where: { id: dto.workspaceId },
|
||||
});
|
||||
if (!workspace) {
|
||||
throw new NotFoundException(`Workspace ${dto.workspaceId} not found`);
|
||||
}
|
||||
}
|
||||
|
||||
const invitationToken = randomUUID();
|
||||
const now = new Date();
|
||||
|
||||
const user = await this.prisma.$transaction(async (tx) => {
|
||||
const created = await tx.user.create({
|
||||
data: {
|
||||
email: dto.email,
|
||||
name: dto.name ?? dto.email.split("@")[0] ?? dto.email,
|
||||
emailVerified: false,
|
||||
invitedBy: inviterId,
|
||||
invitationToken,
|
||||
invitedAt: now,
|
||||
},
|
||||
});
|
||||
|
||||
if (dto.workspaceId) {
|
||||
await tx.workspaceMember.create({
|
||||
data: {
|
||||
workspaceId: dto.workspaceId,
|
||||
userId: created.id,
|
||||
role: dto.role ?? WorkspaceMemberRole.MEMBER,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
return created;
|
||||
});
|
||||
|
||||
this.logger.log(`User invited: ${user.email} by ${inviterId}`);
|
||||
|
||||
return {
|
||||
userId: user.id,
|
||||
invitationToken,
|
||||
email: user.email,
|
||||
invitedAt: now,
|
||||
};
|
||||
}
|
||||
|
||||
async updateUser(id: string, dto: UpdateUserDto): Promise<AdminUserResponse> {
|
||||
const existing = await this.prisma.user.findUnique({ where: { id } });
|
||||
if (!existing) {
|
||||
throw new NotFoundException(`User ${id} not found`);
|
||||
}
|
||||
|
||||
const data: Prisma.UserUpdateInput = {};
|
||||
|
||||
if (dto.name !== undefined) {
|
||||
data.name = dto.name;
|
||||
}
|
||||
if (dto.emailVerified !== undefined) {
|
||||
data.emailVerified = dto.emailVerified;
|
||||
}
|
||||
if (dto.preferences !== undefined) {
|
||||
data.preferences = dto.preferences as Prisma.InputJsonValue;
|
||||
}
|
||||
if (dto.deactivatedAt !== undefined) {
|
||||
data.deactivatedAt = dto.deactivatedAt ? new Date(dto.deactivatedAt) : null;
|
||||
}
|
||||
|
||||
const user = await this.prisma.user.update({
|
||||
where: { id },
|
||||
data,
|
||||
include: {
|
||||
workspaceMemberships: {
|
||||
include: {
|
||||
workspace: { select: { id: true, name: true } },
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
this.logger.log(`User updated: ${id}`);
|
||||
|
||||
return {
|
||||
id: user.id,
|
||||
name: user.name,
|
||||
email: user.email,
|
||||
emailVerified: user.emailVerified,
|
||||
image: user.image,
|
||||
createdAt: user.createdAt,
|
||||
deactivatedAt: user.deactivatedAt,
|
||||
isLocalAuth: user.isLocalAuth,
|
||||
invitedAt: user.invitedAt,
|
||||
invitedBy: user.invitedBy,
|
||||
workspaceMemberships: user.workspaceMemberships.map((m) => ({
|
||||
workspaceId: m.workspaceId,
|
||||
workspaceName: m.workspace.name,
|
||||
role: m.role,
|
||||
joinedAt: m.joinedAt,
|
||||
})),
|
||||
};
|
||||
}
|
||||
|
||||
async deactivateUser(id: string): Promise<AdminUserResponse> {
|
||||
const existing = await this.prisma.user.findUnique({ where: { id } });
|
||||
if (!existing) {
|
||||
throw new NotFoundException(`User ${id} not found`);
|
||||
}
|
||||
|
||||
if (existing.deactivatedAt) {
|
||||
throw new BadRequestException(`User ${id} is already deactivated`);
|
||||
}
|
||||
|
||||
const [user] = await this.prisma.$transaction([
|
||||
this.prisma.user.update({
|
||||
where: { id },
|
||||
data: { deactivatedAt: new Date() },
|
||||
include: {
|
||||
workspaceMemberships: {
|
||||
include: {
|
||||
workspace: { select: { id: true, name: true } },
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
this.prisma.session.deleteMany({ where: { userId: id } }),
|
||||
]);
|
||||
|
||||
this.logger.log(`User deactivated and sessions invalidated: ${id}`);
|
||||
|
||||
return {
|
||||
id: user.id,
|
||||
name: user.name,
|
||||
email: user.email,
|
||||
emailVerified: user.emailVerified,
|
||||
image: user.image,
|
||||
createdAt: user.createdAt,
|
||||
deactivatedAt: user.deactivatedAt,
|
||||
isLocalAuth: user.isLocalAuth,
|
||||
invitedAt: user.invitedAt,
|
||||
invitedBy: user.invitedBy,
|
||||
workspaceMemberships: user.workspaceMemberships.map((m) => ({
|
||||
workspaceId: m.workspaceId,
|
||||
workspaceName: m.workspace.name,
|
||||
role: m.role,
|
||||
joinedAt: m.joinedAt,
|
||||
})),
|
||||
};
|
||||
}
|
||||
|
||||
async createWorkspace(dto: CreateWorkspaceDto): Promise<AdminWorkspaceResponse> {
|
||||
const owner = await this.prisma.user.findUnique({ where: { id: dto.ownerId } });
|
||||
if (!owner) {
|
||||
throw new NotFoundException(`User ${dto.ownerId} not found`);
|
||||
}
|
||||
|
||||
const workspace = await this.prisma.$transaction(async (tx) => {
|
||||
const created = await tx.workspace.create({
|
||||
data: {
|
||||
name: dto.name,
|
||||
ownerId: dto.ownerId,
|
||||
settings: dto.settings ? (dto.settings as Prisma.InputJsonValue) : {},
|
||||
},
|
||||
});
|
||||
|
||||
await tx.workspaceMember.create({
|
||||
data: {
|
||||
workspaceId: created.id,
|
||||
userId: dto.ownerId,
|
||||
role: WorkspaceMemberRole.OWNER,
|
||||
},
|
||||
});
|
||||
|
||||
return created;
|
||||
});
|
||||
|
||||
this.logger.log(`Workspace created: ${workspace.id} with owner ${dto.ownerId}`);
|
||||
|
||||
return {
|
||||
id: workspace.id,
|
||||
name: workspace.name,
|
||||
ownerId: workspace.ownerId,
|
||||
settings: workspace.settings as Record<string, unknown>,
|
||||
createdAt: workspace.createdAt,
|
||||
updatedAt: workspace.updatedAt,
|
||||
memberCount: 1,
|
||||
};
|
||||
}
|
||||
|
||||
async updateWorkspace(
|
||||
id: string,
|
||||
dto: { name?: string; settings?: Record<string, unknown> }
|
||||
): Promise<AdminWorkspaceResponse> {
|
||||
const existing = await this.prisma.workspace.findUnique({ where: { id } });
|
||||
if (!existing) {
|
||||
throw new NotFoundException(`Workspace ${id} not found`);
|
||||
}
|
||||
|
||||
const data: Prisma.WorkspaceUpdateInput = {};
|
||||
|
||||
if (dto.name !== undefined) {
|
||||
data.name = dto.name;
|
||||
}
|
||||
if (dto.settings !== undefined) {
|
||||
data.settings = dto.settings as Prisma.InputJsonValue;
|
||||
}
|
||||
|
||||
const workspace = await this.prisma.workspace.update({
|
||||
where: { id },
|
||||
data,
|
||||
include: {
|
||||
_count: { select: { members: true } },
|
||||
},
|
||||
});
|
||||
|
||||
this.logger.log(`Workspace updated: ${id}`);
|
||||
|
||||
return {
|
||||
id: workspace.id,
|
||||
name: workspace.name,
|
||||
ownerId: workspace.ownerId,
|
||||
settings: workspace.settings as Record<string, unknown>,
|
||||
createdAt: workspace.createdAt,
|
||||
updatedAt: workspace.updatedAt,
|
||||
memberCount: workspace._count.members,
|
||||
};
|
||||
}
|
||||
}
|
||||
15
apps/api/src/admin/dto/create-workspace.dto.ts
Normal file
15
apps/api/src/admin/dto/create-workspace.dto.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import { IsObject, IsOptional, IsString, IsUUID, MaxLength, MinLength } from "class-validator";
|
||||
|
||||
export class CreateWorkspaceDto {
|
||||
@IsString({ message: "name must be a string" })
|
||||
@MinLength(1, { message: "name must not be empty" })
|
||||
@MaxLength(255, { message: "name must not exceed 255 characters" })
|
||||
name!: string;
|
||||
|
||||
@IsUUID("4", { message: "ownerId must be a valid UUID" })
|
||||
ownerId!: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsObject({ message: "settings must be an object" })
|
||||
settings?: Record<string, unknown>;
|
||||
}
|
||||
20
apps/api/src/admin/dto/invite-user.dto.ts
Normal file
20
apps/api/src/admin/dto/invite-user.dto.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import { WorkspaceMemberRole } from "@prisma/client";
|
||||
import { IsEmail, IsEnum, IsOptional, IsString, IsUUID, MaxLength } from "class-validator";
|
||||
|
||||
export class InviteUserDto {
|
||||
@IsEmail({}, { message: "email must be a valid email address" })
|
||||
email!: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsString({ message: "name must be a string" })
|
||||
@MaxLength(255, { message: "name must not exceed 255 characters" })
|
||||
name?: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsUUID("4", { message: "workspaceId must be a valid UUID" })
|
||||
workspaceId?: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsEnum(WorkspaceMemberRole, { message: "role must be a valid WorkspaceMemberRole" })
|
||||
role?: WorkspaceMemberRole;
|
||||
}
|
||||
15
apps/api/src/admin/dto/manage-member.dto.ts
Normal file
15
apps/api/src/admin/dto/manage-member.dto.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import { WorkspaceMemberRole } from "@prisma/client";
|
||||
import { IsEnum, IsUUID } from "class-validator";
|
||||
|
||||
export class AddMemberDto {
|
||||
@IsUUID("4", { message: "userId must be a valid UUID" })
|
||||
userId!: string;
|
||||
|
||||
@IsEnum(WorkspaceMemberRole, { message: "role must be a valid WorkspaceMemberRole" })
|
||||
role!: WorkspaceMemberRole;
|
||||
}
|
||||
|
||||
export class UpdateMemberRoleDto {
|
||||
@IsEnum(WorkspaceMemberRole, { message: "role must be a valid WorkspaceMemberRole" })
|
||||
role!: WorkspaceMemberRole;
|
||||
}
|
||||
17
apps/api/src/admin/dto/query-users.dto.ts
Normal file
17
apps/api/src/admin/dto/query-users.dto.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { IsInt, IsOptional, Max, Min } from "class-validator";
|
||||
import { Type } from "class-transformer";
|
||||
|
||||
export class QueryUsersDto {
|
||||
@IsOptional()
|
||||
@Type(() => Number)
|
||||
@IsInt({ message: "page must be an integer" })
|
||||
@Min(1, { message: "page must be at least 1" })
|
||||
page?: number;
|
||||
|
||||
@IsOptional()
|
||||
@Type(() => Number)
|
||||
@IsInt({ message: "limit must be an integer" })
|
||||
@Min(1, { message: "limit must be at least 1" })
|
||||
@Max(100, { message: "limit must not exceed 100" })
|
||||
limit?: number;
|
||||
}
|
||||
27
apps/api/src/admin/dto/update-user.dto.ts
Normal file
27
apps/api/src/admin/dto/update-user.dto.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import {
|
||||
IsBoolean,
|
||||
IsDateString,
|
||||
IsObject,
|
||||
IsOptional,
|
||||
IsString,
|
||||
MaxLength,
|
||||
} from "class-validator";
|
||||
|
||||
export class UpdateUserDto {
|
||||
@IsOptional()
|
||||
@IsString({ message: "name must be a string" })
|
||||
@MaxLength(255, { message: "name must not exceed 255 characters" })
|
||||
name?: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsDateString({}, { message: "deactivatedAt must be a valid ISO 8601 date string" })
|
||||
deactivatedAt?: string | null;
|
||||
|
||||
@IsOptional()
|
||||
@IsBoolean({ message: "emailVerified must be a boolean" })
|
||||
emailVerified?: boolean;
|
||||
|
||||
@IsOptional()
|
||||
@IsObject({ message: "preferences must be an object" })
|
||||
preferences?: Record<string, unknown>;
|
||||
}
|
||||
13
apps/api/src/admin/dto/update-workspace.dto.ts
Normal file
13
apps/api/src/admin/dto/update-workspace.dto.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import { IsObject, IsOptional, IsString, MaxLength, MinLength } from "class-validator";
|
||||
|
||||
export class UpdateWorkspaceDto {
|
||||
@IsOptional()
|
||||
@IsString({ message: "name must be a string" })
|
||||
@MinLength(1, { message: "name must not be empty" })
|
||||
@MaxLength(255, { message: "name must not exceed 255 characters" })
|
||||
name?: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsObject({ message: "settings must be an object" })
|
||||
settings?: Record<string, unknown>;
|
||||
}
|
||||
49
apps/api/src/admin/types/admin.types.ts
Normal file
49
apps/api/src/admin/types/admin.types.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import type { WorkspaceMemberRole } from "@prisma/client";
|
||||
|
||||
export interface AdminUserResponse {
|
||||
id: string;
|
||||
name: string;
|
||||
email: string;
|
||||
emailVerified: boolean;
|
||||
image: string | null;
|
||||
createdAt: Date;
|
||||
deactivatedAt: Date | null;
|
||||
isLocalAuth: boolean;
|
||||
invitedAt: Date | null;
|
||||
invitedBy: string | null;
|
||||
workspaceMemberships: WorkspaceMembershipResponse[];
|
||||
}
|
||||
|
||||
export interface WorkspaceMembershipResponse {
|
||||
workspaceId: string;
|
||||
workspaceName: string;
|
||||
role: WorkspaceMemberRole;
|
||||
joinedAt: Date;
|
||||
}
|
||||
|
||||
export interface PaginatedResponse<T> {
|
||||
data: T[];
|
||||
meta: {
|
||||
total: number;
|
||||
page: number;
|
||||
limit: number;
|
||||
totalPages: number;
|
||||
};
|
||||
}
|
||||
|
||||
export interface InvitationResponse {
|
||||
userId: string;
|
||||
invitationToken: string;
|
||||
email: string;
|
||||
invitedAt: Date;
|
||||
}
|
||||
|
||||
export interface AdminWorkspaceResponse {
|
||||
id: string;
|
||||
name: string;
|
||||
ownerId: string;
|
||||
settings: Record<string, unknown>;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
memberCount: number;
|
||||
}
|
||||
102
apps/api/src/agent-memory/agent-memory.controller.spec.ts
Normal file
102
apps/api/src/agent-memory/agent-memory.controller.spec.ts
Normal file
@@ -0,0 +1,102 @@
|
||||
import { Test, TestingModule } from "@nestjs/testing";
|
||||
import { AgentMemoryController } from "./agent-memory.controller";
|
||||
import { AgentMemoryService } from "./agent-memory.service";
|
||||
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||
import { WorkspaceGuard, PermissionGuard } from "../common/guards";
|
||||
import { describe, it, expect, beforeEach, vi } from "vitest";
|
||||
|
||||
describe("AgentMemoryController", () => {
|
||||
let controller: AgentMemoryController;
|
||||
|
||||
const mockAgentMemoryService = {
|
||||
upsert: vi.fn(),
|
||||
findAll: vi.fn(),
|
||||
findOne: vi.fn(),
|
||||
remove: vi.fn(),
|
||||
};
|
||||
|
||||
const mockGuard = { canActivate: vi.fn(() => true) };
|
||||
|
||||
beforeEach(async () => {
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
controllers: [AgentMemoryController],
|
||||
providers: [
|
||||
{
|
||||
provide: AgentMemoryService,
|
||||
useValue: mockAgentMemoryService,
|
||||
},
|
||||
],
|
||||
})
|
||||
.overrideGuard(AuthGuard)
|
||||
.useValue(mockGuard)
|
||||
.overrideGuard(WorkspaceGuard)
|
||||
.useValue(mockGuard)
|
||||
.overrideGuard(PermissionGuard)
|
||||
.useValue(mockGuard)
|
||||
.compile();
|
||||
|
||||
controller = module.get<AgentMemoryController>(AgentMemoryController);
|
||||
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
const workspaceId = "workspace-1";
|
||||
const agentId = "agent-1";
|
||||
const key = "context";
|
||||
|
||||
describe("upsert", () => {
|
||||
it("should upsert a memory entry", async () => {
|
||||
const dto = { value: { foo: "bar" } };
|
||||
const mockEntry = { id: "mem-1", workspaceId, agentId, key, value: dto.value };
|
||||
|
||||
mockAgentMemoryService.upsert.mockResolvedValue(mockEntry);
|
||||
|
||||
const result = await controller.upsert(agentId, key, dto, workspaceId);
|
||||
|
||||
expect(mockAgentMemoryService.upsert).toHaveBeenCalledWith(workspaceId, agentId, key, dto);
|
||||
expect(result).toEqual(mockEntry);
|
||||
});
|
||||
});
|
||||
|
||||
describe("findAll", () => {
|
||||
it("should list all memory entries for an agent", async () => {
|
||||
const mockEntries = [
|
||||
{ id: "mem-1", key: "a", value: 1 },
|
||||
{ id: "mem-2", key: "b", value: 2 },
|
||||
];
|
||||
|
||||
mockAgentMemoryService.findAll.mockResolvedValue(mockEntries);
|
||||
|
||||
const result = await controller.findAll(agentId, workspaceId);
|
||||
|
||||
expect(mockAgentMemoryService.findAll).toHaveBeenCalledWith(workspaceId, agentId);
|
||||
expect(result).toEqual(mockEntries);
|
||||
});
|
||||
});
|
||||
|
||||
describe("findOne", () => {
|
||||
it("should get a single memory entry", async () => {
|
||||
const mockEntry = { id: "mem-1", key, value: "v" };
|
||||
|
||||
mockAgentMemoryService.findOne.mockResolvedValue(mockEntry);
|
||||
|
||||
const result = await controller.findOne(agentId, key, workspaceId);
|
||||
|
||||
expect(mockAgentMemoryService.findOne).toHaveBeenCalledWith(workspaceId, agentId, key);
|
||||
expect(result).toEqual(mockEntry);
|
||||
});
|
||||
});
|
||||
|
||||
describe("remove", () => {
|
||||
it("should delete a memory entry", async () => {
|
||||
const mockResponse = { message: "Memory entry deleted successfully" };
|
||||
|
||||
mockAgentMemoryService.remove.mockResolvedValue(mockResponse);
|
||||
|
||||
const result = await controller.remove(agentId, key, workspaceId);
|
||||
|
||||
expect(mockAgentMemoryService.remove).toHaveBeenCalledWith(workspaceId, agentId, key);
|
||||
expect(result).toEqual(mockResponse);
|
||||
});
|
||||
});
|
||||
});
|
||||
89
apps/api/src/agent-memory/agent-memory.controller.ts
Normal file
89
apps/api/src/agent-memory/agent-memory.controller.ts
Normal file
@@ -0,0 +1,89 @@
|
||||
import {
|
||||
Controller,
|
||||
Get,
|
||||
Put,
|
||||
Delete,
|
||||
Body,
|
||||
Param,
|
||||
UseGuards,
|
||||
HttpCode,
|
||||
HttpStatus,
|
||||
} from "@nestjs/common";
|
||||
import { AgentMemoryService } from "./agent-memory.service";
|
||||
import { UpsertAgentMemoryDto } from "./dto";
|
||||
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||
import { WorkspaceGuard, PermissionGuard } from "../common/guards";
|
||||
import { Workspace, Permission, RequirePermission } from "../common/decorators";
|
||||
|
||||
/**
|
||||
* Controller for per-agent key/value memory endpoints.
|
||||
* All endpoints require authentication and workspace context.
|
||||
*
|
||||
* Guards are applied in order:
|
||||
* 1. AuthGuard - Verifies user authentication
|
||||
* 2. WorkspaceGuard - Validates workspace access
|
||||
* 3. PermissionGuard - Checks role-based permissions
|
||||
*/
|
||||
@Controller("agents/:agentId/memory")
|
||||
@UseGuards(AuthGuard, WorkspaceGuard, PermissionGuard)
|
||||
export class AgentMemoryController {
|
||||
constructor(private readonly agentMemoryService: AgentMemoryService) {}
|
||||
|
||||
/**
|
||||
* PUT /api/agents/:agentId/memory/:key
|
||||
* Upsert a memory entry for an agent
|
||||
* Requires: MEMBER role or higher
|
||||
*/
|
||||
@Put(":key")
|
||||
@RequirePermission(Permission.WORKSPACE_MEMBER)
|
||||
async upsert(
|
||||
@Param("agentId") agentId: string,
|
||||
@Param("key") key: string,
|
||||
@Body() dto: UpsertAgentMemoryDto,
|
||||
@Workspace() workspaceId: string
|
||||
) {
|
||||
return this.agentMemoryService.upsert(workspaceId, agentId, key, dto);
|
||||
}
|
||||
|
||||
/**
|
||||
* GET /api/agents/:agentId/memory
|
||||
* List all memory entries for an agent
|
||||
* Requires: Any workspace member (including GUEST)
|
||||
*/
|
||||
@Get()
|
||||
@RequirePermission(Permission.WORKSPACE_ANY)
|
||||
async findAll(@Param("agentId") agentId: string, @Workspace() workspaceId: string) {
|
||||
return this.agentMemoryService.findAll(workspaceId, agentId);
|
||||
}
|
||||
|
||||
/**
|
||||
* GET /api/agents/:agentId/memory/:key
|
||||
* Get a single memory entry by key
|
||||
* Requires: Any workspace member (including GUEST)
|
||||
*/
|
||||
@Get(":key")
|
||||
@RequirePermission(Permission.WORKSPACE_ANY)
|
||||
async findOne(
|
||||
@Param("agentId") agentId: string,
|
||||
@Param("key") key: string,
|
||||
@Workspace() workspaceId: string
|
||||
) {
|
||||
return this.agentMemoryService.findOne(workspaceId, agentId, key);
|
||||
}
|
||||
|
||||
/**
|
||||
* DELETE /api/agents/:agentId/memory/:key
|
||||
* Remove a memory entry
|
||||
* Requires: MEMBER role or higher
|
||||
*/
|
||||
@Delete(":key")
|
||||
@HttpCode(HttpStatus.OK)
|
||||
@RequirePermission(Permission.WORKSPACE_MEMBER)
|
||||
async remove(
|
||||
@Param("agentId") agentId: string,
|
||||
@Param("key") key: string,
|
||||
@Workspace() workspaceId: string
|
||||
) {
|
||||
return this.agentMemoryService.remove(workspaceId, agentId, key);
|
||||
}
|
||||
}
|
||||
13
apps/api/src/agent-memory/agent-memory.module.ts
Normal file
13
apps/api/src/agent-memory/agent-memory.module.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import { Module } from "@nestjs/common";
|
||||
import { AgentMemoryController } from "./agent-memory.controller";
|
||||
import { AgentMemoryService } from "./agent-memory.service";
|
||||
import { PrismaModule } from "../prisma/prisma.module";
|
||||
import { AuthModule } from "../auth/auth.module";
|
||||
|
||||
@Module({
|
||||
imports: [PrismaModule, AuthModule],
|
||||
controllers: [AgentMemoryController],
|
||||
providers: [AgentMemoryService],
|
||||
exports: [AgentMemoryService],
|
||||
})
|
||||
export class AgentMemoryModule {}
|
||||
126
apps/api/src/agent-memory/agent-memory.service.spec.ts
Normal file
126
apps/api/src/agent-memory/agent-memory.service.spec.ts
Normal file
@@ -0,0 +1,126 @@
|
||||
import { Test, TestingModule } from "@nestjs/testing";
|
||||
import { AgentMemoryService } from "./agent-memory.service";
|
||||
import { PrismaService } from "../prisma/prisma.service";
|
||||
import { NotFoundException } from "@nestjs/common";
|
||||
import { describe, it, expect, beforeEach, vi } from "vitest";
|
||||
|
||||
describe("AgentMemoryService", () => {
|
||||
let service: AgentMemoryService;
|
||||
|
||||
const mockPrismaService = {
|
||||
agentMemory: {
|
||||
upsert: vi.fn(),
|
||||
findMany: vi.fn(),
|
||||
findUnique: vi.fn(),
|
||||
delete: vi.fn(),
|
||||
},
|
||||
};
|
||||
|
||||
beforeEach(async () => {
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
AgentMemoryService,
|
||||
{
|
||||
provide: PrismaService,
|
||||
useValue: mockPrismaService,
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
service = module.get<AgentMemoryService>(AgentMemoryService);
|
||||
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
const workspaceId = "workspace-1";
|
||||
const agentId = "agent-1";
|
||||
const key = "session-context";
|
||||
|
||||
describe("upsert", () => {
|
||||
it("should upsert a memory entry", async () => {
|
||||
const dto = { value: { data: "some context" } };
|
||||
const mockEntry = {
|
||||
id: "mem-1",
|
||||
workspaceId,
|
||||
agentId,
|
||||
key,
|
||||
value: dto.value,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
mockPrismaService.agentMemory.upsert.mockResolvedValue(mockEntry);
|
||||
|
||||
const result = await service.upsert(workspaceId, agentId, key, dto);
|
||||
|
||||
expect(mockPrismaService.agentMemory.upsert).toHaveBeenCalledWith({
|
||||
where: { workspaceId_agentId_key: { workspaceId, agentId, key } },
|
||||
create: { workspaceId, agentId, key, value: dto.value },
|
||||
update: { value: dto.value },
|
||||
});
|
||||
expect(result).toEqual(mockEntry);
|
||||
});
|
||||
});
|
||||
|
||||
describe("findAll", () => {
|
||||
it("should return all memory entries for an agent", async () => {
|
||||
const mockEntries = [
|
||||
{ id: "mem-1", key: "a", value: 1 },
|
||||
{ id: "mem-2", key: "b", value: 2 },
|
||||
];
|
||||
|
||||
mockPrismaService.agentMemory.findMany.mockResolvedValue(mockEntries);
|
||||
|
||||
const result = await service.findAll(workspaceId, agentId);
|
||||
|
||||
expect(mockPrismaService.agentMemory.findMany).toHaveBeenCalledWith({
|
||||
where: { workspaceId, agentId },
|
||||
orderBy: { key: "asc" },
|
||||
});
|
||||
expect(result).toEqual(mockEntries);
|
||||
});
|
||||
});
|
||||
|
||||
describe("findOne", () => {
|
||||
it("should return a memory entry by key", async () => {
|
||||
const mockEntry = { id: "mem-1", workspaceId, agentId, key, value: "ctx" };
|
||||
|
||||
mockPrismaService.agentMemory.findUnique.mockResolvedValue(mockEntry);
|
||||
|
||||
const result = await service.findOne(workspaceId, agentId, key);
|
||||
|
||||
expect(mockPrismaService.agentMemory.findUnique).toHaveBeenCalledWith({
|
||||
where: { workspaceId_agentId_key: { workspaceId, agentId, key } },
|
||||
});
|
||||
expect(result).toEqual(mockEntry);
|
||||
});
|
||||
|
||||
it("should throw NotFoundException when key not found", async () => {
|
||||
mockPrismaService.agentMemory.findUnique.mockResolvedValue(null);
|
||||
|
||||
await expect(service.findOne(workspaceId, agentId, key)).rejects.toThrow(NotFoundException);
|
||||
});
|
||||
});
|
||||
|
||||
describe("remove", () => {
|
||||
it("should delete a memory entry", async () => {
|
||||
const mockEntry = { id: "mem-1", workspaceId, agentId, key, value: "x" };
|
||||
|
||||
mockPrismaService.agentMemory.findUnique.mockResolvedValue(mockEntry);
|
||||
mockPrismaService.agentMemory.delete.mockResolvedValue(mockEntry);
|
||||
|
||||
const result = await service.remove(workspaceId, agentId, key);
|
||||
|
||||
expect(mockPrismaService.agentMemory.delete).toHaveBeenCalledWith({
|
||||
where: { workspaceId_agentId_key: { workspaceId, agentId, key } },
|
||||
});
|
||||
expect(result).toEqual({ message: "Memory entry deleted successfully" });
|
||||
});
|
||||
|
||||
it("should throw NotFoundException when key not found", async () => {
|
||||
mockPrismaService.agentMemory.findUnique.mockResolvedValue(null);
|
||||
|
||||
await expect(service.remove(workspaceId, agentId, key)).rejects.toThrow(NotFoundException);
|
||||
});
|
||||
});
|
||||
});
|
||||
79
apps/api/src/agent-memory/agent-memory.service.ts
Normal file
79
apps/api/src/agent-memory/agent-memory.service.ts
Normal file
@@ -0,0 +1,79 @@
|
||||
import { Injectable, NotFoundException } from "@nestjs/common";
|
||||
import { PrismaService } from "../prisma/prisma.service";
|
||||
import { Prisma } from "@prisma/client";
|
||||
import type { UpsertAgentMemoryDto } from "./dto";
|
||||
|
||||
@Injectable()
|
||||
export class AgentMemoryService {
|
||||
constructor(private readonly prisma: PrismaService) {}
|
||||
|
||||
/**
|
||||
* Upsert a memory entry for an agent.
|
||||
*/
|
||||
async upsert(workspaceId: string, agentId: string, key: string, dto: UpsertAgentMemoryDto) {
|
||||
return this.prisma.agentMemory.upsert({
|
||||
where: {
|
||||
workspaceId_agentId_key: { workspaceId, agentId, key },
|
||||
},
|
||||
create: {
|
||||
workspaceId,
|
||||
agentId,
|
||||
key,
|
||||
value: dto.value as Prisma.InputJsonValue,
|
||||
},
|
||||
update: {
|
||||
value: dto.value as Prisma.InputJsonValue,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* List all memory entries for an agent in a workspace.
|
||||
*/
|
||||
async findAll(workspaceId: string, agentId: string) {
|
||||
return this.prisma.agentMemory.findMany({
|
||||
where: { workspaceId, agentId },
|
||||
orderBy: { key: "asc" },
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a single memory entry by key.
|
||||
*/
|
||||
async findOne(workspaceId: string, agentId: string, key: string) {
|
||||
const entry = await this.prisma.agentMemory.findUnique({
|
||||
where: {
|
||||
workspaceId_agentId_key: { workspaceId, agentId, key },
|
||||
},
|
||||
});
|
||||
|
||||
if (!entry) {
|
||||
throw new NotFoundException(`Memory key "${key}" not found for agent "${agentId}"`);
|
||||
}
|
||||
|
||||
return entry;
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a memory entry by key.
|
||||
*/
|
||||
async remove(workspaceId: string, agentId: string, key: string) {
|
||||
const entry = await this.prisma.agentMemory.findUnique({
|
||||
where: {
|
||||
workspaceId_agentId_key: { workspaceId, agentId, key },
|
||||
},
|
||||
});
|
||||
|
||||
if (!entry) {
|
||||
throw new NotFoundException(`Memory key "${key}" not found for agent "${agentId}"`);
|
||||
}
|
||||
|
||||
await this.prisma.agentMemory.delete({
|
||||
where: {
|
||||
workspaceId_agentId_key: { workspaceId, agentId, key },
|
||||
},
|
||||
});
|
||||
|
||||
return { message: "Memory entry deleted successfully" };
|
||||
}
|
||||
}
|
||||
1
apps/api/src/agent-memory/dto/index.ts
Normal file
1
apps/api/src/agent-memory/dto/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export * from "./upsert-agent-memory.dto";
|
||||
10
apps/api/src/agent-memory/dto/upsert-agent-memory.dto.ts
Normal file
10
apps/api/src/agent-memory/dto/upsert-agent-memory.dto.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { IsNotEmpty } from "class-validator";
|
||||
|
||||
/**
|
||||
* DTO for upserting an agent memory entry.
|
||||
* The value accepts any JSON-serializable data.
|
||||
*/
|
||||
export class UpsertAgentMemoryDto {
|
||||
@IsNotEmpty({ message: "value must not be empty" })
|
||||
value!: unknown;
|
||||
}
|
||||
@@ -1,4 +1,5 @@
|
||||
import { Controller, Get } from "@nestjs/common";
|
||||
import { SkipThrottle } from "@nestjs/throttler";
|
||||
import { AppService } from "./app.service";
|
||||
import { PrismaService } from "./prisma/prisma.service";
|
||||
import type { ApiResponse, HealthStatus } from "@mosaic/shared";
|
||||
@@ -17,6 +18,7 @@ export class AppController {
|
||||
}
|
||||
|
||||
@Get("health")
|
||||
@SkipThrottle()
|
||||
async getHealth(): Promise<ApiResponse<HealthStatus>> {
|
||||
const dbHealthy = await this.prisma.isHealthy();
|
||||
const dbInfo = await this.prisma.getConnectionInfo();
|
||||
|
||||
@@ -3,8 +3,11 @@ import { APP_INTERCEPTOR, APP_GUARD } from "@nestjs/core";
|
||||
import { ThrottlerModule } from "@nestjs/throttler";
|
||||
import { BullModule } from "@nestjs/bullmq";
|
||||
import { ThrottlerValkeyStorageService, ThrottlerApiKeyGuard } from "./common/throttler";
|
||||
import { CsrfGuard } from "./common/guards/csrf.guard";
|
||||
import { CsrfService } from "./common/services/csrf.service";
|
||||
import { AppController } from "./app.controller";
|
||||
import { AppService } from "./app.service";
|
||||
import { CsrfController } from "./common/controllers/csrf.controller";
|
||||
import { PrismaModule } from "./prisma/prisma.module";
|
||||
import { DatabaseModule } from "./database/database.module";
|
||||
import { AuthModule } from "./auth/auth.module";
|
||||
@@ -20,9 +23,12 @@ import { KnowledgeModule } from "./knowledge/knowledge.module";
|
||||
import { UsersModule } from "./users/users.module";
|
||||
import { WebSocketModule } from "./websocket/websocket.module";
|
||||
import { LlmModule } from "./llm/llm.module";
|
||||
import { LlmUsageModule } from "./llm-usage/llm-usage.module";
|
||||
import { BrainModule } from "./brain/brain.module";
|
||||
import { CronModule } from "./cron/cron.module";
|
||||
import { AgentTasksModule } from "./agent-tasks/agent-tasks.module";
|
||||
import { FindingsModule } from "./findings/findings.module";
|
||||
import { AgentMemoryModule } from "./agent-memory/agent-memory.module";
|
||||
import { ValkeyModule } from "./valkey/valkey.module";
|
||||
import { BullMqModule } from "./bullmq/bullmq.module";
|
||||
import { StitcherModule } from "./stitcher/stitcher.module";
|
||||
@@ -32,6 +38,18 @@ import { JobEventsModule } from "./job-events/job-events.module";
|
||||
import { JobStepsModule } from "./job-steps/job-steps.module";
|
||||
import { CoordinatorIntegrationModule } from "./coordinator-integration/coordinator-integration.module";
|
||||
import { FederationModule } from "./federation/federation.module";
|
||||
import { CredentialsModule } from "./credentials/credentials.module";
|
||||
import { MosaicTelemetryModule } from "./mosaic-telemetry";
|
||||
import { SpeechModule } from "./speech/speech.module";
|
||||
import { DashboardModule } from "./dashboard/dashboard.module";
|
||||
import { TerminalModule } from "./terminal/terminal.module";
|
||||
import { PersonalitiesModule } from "./personalities/personalities.module";
|
||||
import { WorkspacesModule } from "./workspaces/workspaces.module";
|
||||
import { AdminModule } from "./admin/admin.module";
|
||||
import { TeamsModule } from "./teams/teams.module";
|
||||
import { ImportModule } from "./import/import.module";
|
||||
import { ConversationArchiveModule } from "./conversation-archive/conversation-archive.module";
|
||||
import { RlsContextInterceptor } from "./common/interceptors/rls-context.interceptor";
|
||||
|
||||
@Module({
|
||||
imports: [
|
||||
@@ -54,10 +72,13 @@ import { FederationModule } from "./federation/federation.module";
|
||||
}),
|
||||
// BullMQ job queue configuration
|
||||
BullModule.forRoot({
|
||||
connection: {
|
||||
host: process.env.VALKEY_HOST ?? "localhost",
|
||||
port: parseInt(process.env.VALKEY_PORT ?? "6379", 10),
|
||||
},
|
||||
connection: (() => {
|
||||
const url = new URL(process.env.VALKEY_URL ?? "redis://localhost:6379");
|
||||
return {
|
||||
host: url.hostname,
|
||||
port: parseInt(url.port || "6379", 10),
|
||||
};
|
||||
})(),
|
||||
}),
|
||||
TelemetryModule,
|
||||
PrismaModule,
|
||||
@@ -78,26 +99,49 @@ import { FederationModule } from "./federation/federation.module";
|
||||
UsersModule,
|
||||
WebSocketModule,
|
||||
LlmModule,
|
||||
LlmUsageModule,
|
||||
BrainModule,
|
||||
CronModule,
|
||||
AgentTasksModule,
|
||||
FindingsModule,
|
||||
AgentMemoryModule,
|
||||
RunnerJobsModule,
|
||||
JobEventsModule,
|
||||
JobStepsModule,
|
||||
CoordinatorIntegrationModule,
|
||||
FederationModule,
|
||||
CredentialsModule,
|
||||
MosaicTelemetryModule,
|
||||
SpeechModule,
|
||||
DashboardModule,
|
||||
TerminalModule,
|
||||
PersonalitiesModule,
|
||||
WorkspacesModule,
|
||||
AdminModule,
|
||||
TeamsModule,
|
||||
ImportModule,
|
||||
ConversationArchiveModule,
|
||||
],
|
||||
controllers: [AppController],
|
||||
controllers: [AppController, CsrfController],
|
||||
providers: [
|
||||
AppService,
|
||||
CsrfService,
|
||||
{
|
||||
provide: APP_INTERCEPTOR,
|
||||
useClass: TelemetryInterceptor,
|
||||
},
|
||||
{
|
||||
provide: APP_INTERCEPTOR,
|
||||
useClass: RlsContextInterceptor,
|
||||
},
|
||||
{
|
||||
provide: APP_GUARD,
|
||||
useClass: ThrottlerApiKeyGuard,
|
||||
},
|
||||
{
|
||||
provide: APP_GUARD,
|
||||
useClass: CsrfGuard,
|
||||
},
|
||||
],
|
||||
})
|
||||
export class AppModule {}
|
||||
|
||||
680
apps/api/src/auth/auth-rls.integration.spec.ts
Normal file
680
apps/api/src/auth/auth-rls.integration.spec.ts
Normal file
@@ -0,0 +1,680 @@
|
||||
/**
|
||||
* Auth Tables RLS Integration Tests
|
||||
*
|
||||
* Tests that RLS policies on accounts and sessions tables correctly
|
||||
* enforce user-scoped access and prevent cross-user data leakage.
|
||||
*
|
||||
* Related: #350 - Add RLS policies to auth tables with FORCE enforcement
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeAll, afterAll } from "vitest";
|
||||
import { PrismaClient, Prisma } from "@prisma/client";
|
||||
import { randomUUID as uuid } from "crypto";
|
||||
import { runWithRlsClient, getRlsClient } from "../prisma/rls-context.provider";
|
||||
|
||||
const shouldRunDbIntegrationTests =
|
||||
process.env.RUN_DB_TESTS === "true" && Boolean(process.env.DATABASE_URL);
|
||||
|
||||
describe.skipIf(!shouldRunDbIntegrationTests)(
|
||||
"Auth Tables RLS Policies (requires DATABASE_URL)",
|
||||
() => {
|
||||
let prisma: PrismaClient;
|
||||
const testData: {
|
||||
users: string[];
|
||||
accounts: string[];
|
||||
sessions: string[];
|
||||
} = {
|
||||
users: [],
|
||||
accounts: [],
|
||||
sessions: [],
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
// Skip setup if DATABASE_URL is not available
|
||||
if (!shouldRunDbIntegrationTests) {
|
||||
return;
|
||||
}
|
||||
|
||||
prisma = new PrismaClient();
|
||||
await prisma.$connect();
|
||||
|
||||
// RLS policies are bypassed for superusers
|
||||
const [{ rolsuper }] = await prisma.$queryRaw<[{ rolsuper: boolean }]>`
|
||||
SELECT rolsuper FROM pg_roles WHERE rolname = current_user
|
||||
`;
|
||||
if (rolsuper) {
|
||||
throw new Error(
|
||||
"Auth RLS integration tests require a non-superuser database role. " +
|
||||
"See migration 20260207_add_auth_rls_policies for setup instructions."
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
// Skip cleanup if DATABASE_URL is not available or prisma not initialized
|
||||
if (!shouldRunDbIntegrationTests || !prisma) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Clean up test data
|
||||
if (testData.sessions.length > 0) {
|
||||
await prisma.session.deleteMany({
|
||||
where: { id: { in: testData.sessions } },
|
||||
});
|
||||
}
|
||||
|
||||
if (testData.accounts.length > 0) {
|
||||
await prisma.account.deleteMany({
|
||||
where: { id: { in: testData.accounts } },
|
||||
});
|
||||
}
|
||||
|
||||
if (testData.users.length > 0) {
|
||||
await prisma.user.deleteMany({
|
||||
where: { id: { in: testData.users } },
|
||||
});
|
||||
}
|
||||
|
||||
await prisma.$disconnect();
|
||||
} catch (error) {
|
||||
console.error(
|
||||
"Test cleanup failed:",
|
||||
error instanceof Error ? error.message : String(error)
|
||||
);
|
||||
// Re-throw to make test failure visible
|
||||
throw new Error(
|
||||
"Test cleanup failed. Database may contain orphaned test data. " +
|
||||
`Error: ${error instanceof Error ? error.message : String(error)}`
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
async function createTestUser(email: string): Promise<string> {
|
||||
const userId = uuid();
|
||||
await prisma.user.create({
|
||||
data: {
|
||||
id: userId,
|
||||
email,
|
||||
name: `Test User ${email}`,
|
||||
authProviderId: `auth-${userId}`,
|
||||
},
|
||||
});
|
||||
testData.users.push(userId);
|
||||
return userId;
|
||||
}
|
||||
|
||||
async function createTestAccount(userId: string, token: string): Promise<string> {
|
||||
const accountId = uuid();
|
||||
await prisma.account.create({
|
||||
data: {
|
||||
id: accountId,
|
||||
userId,
|
||||
accountId: `account-${accountId}`,
|
||||
providerId: "test-provider",
|
||||
accessToken: token,
|
||||
},
|
||||
});
|
||||
testData.accounts.push(accountId);
|
||||
return accountId;
|
||||
}
|
||||
|
||||
async function createTestSession(userId: string): Promise<string> {
|
||||
const sessionId = uuid();
|
||||
await prisma.session.create({
|
||||
data: {
|
||||
id: sessionId,
|
||||
userId,
|
||||
token: `session-${sessionId}-${Date.now()}`,
|
||||
expiresAt: new Date(Date.now() + 86400000),
|
||||
},
|
||||
});
|
||||
testData.sessions.push(sessionId);
|
||||
return sessionId;
|
||||
}
|
||||
|
||||
describe("Account table RLS", () => {
|
||||
it("should allow user to read their own accounts when RLS context is set", async () => {
|
||||
const user1Id = await createTestUser("account-read-own@test.com");
|
||||
const account1Id = await createTestAccount(user1Id, "user1-token");
|
||||
|
||||
// Use runWithRlsClient to set RLS context
|
||||
const result = await prisma.$transaction(async (tx) => {
|
||||
await tx.$executeRaw`SELECT set_config('app.current_user_id', ${user1Id}::text, true)`;
|
||||
|
||||
return runWithRlsClient(tx, async () => {
|
||||
const client = getRlsClient()!;
|
||||
const accounts = await client.account.findMany({
|
||||
where: { userId: user1Id },
|
||||
});
|
||||
|
||||
return accounts;
|
||||
});
|
||||
});
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].id).toBe(account1Id);
|
||||
expect(result[0].accessToken).toBe("user1-token");
|
||||
});
|
||||
|
||||
it("should prevent user from reading other users accounts", async () => {
|
||||
const user1Id = await createTestUser("account-read-self@test.com");
|
||||
const user2Id = await createTestUser("account-read-other@test.com");
|
||||
await createTestAccount(user1Id, "user1-token");
|
||||
await createTestAccount(user2Id, "user2-token");
|
||||
|
||||
// Set RLS context for user1, try to read user2's accounts
|
||||
const result = await prisma.$transaction(async (tx) => {
|
||||
await tx.$executeRaw`SELECT set_config('app.current_user_id', ${user1Id}::text, true)`;
|
||||
|
||||
return runWithRlsClient(tx, async () => {
|
||||
const client = getRlsClient()!;
|
||||
const accounts = await client.account.findMany({
|
||||
where: { userId: user2Id },
|
||||
});
|
||||
|
||||
return accounts;
|
||||
});
|
||||
});
|
||||
|
||||
// Should return empty array due to RLS policy
|
||||
expect(result).toHaveLength(0);
|
||||
});
|
||||
|
||||
it("should prevent direct access by ID to other users accounts", async () => {
|
||||
const user1Id = await createTestUser("account-id-self@test.com");
|
||||
const user2Id = await createTestUser("account-id-other@test.com");
|
||||
await createTestAccount(user1Id, "user1-token");
|
||||
const account2Id = await createTestAccount(user2Id, "user2-token");
|
||||
|
||||
// Set RLS context for user1, try to read user2's account by ID
|
||||
const result = await prisma.$transaction(async (tx) => {
|
||||
await tx.$executeRaw`SELECT set_config('app.current_user_id', ${user1Id}::text, true)`;
|
||||
|
||||
return runWithRlsClient(tx, async () => {
|
||||
const client = getRlsClient()!;
|
||||
const account = await client.account.findUnique({
|
||||
where: { id: account2Id },
|
||||
});
|
||||
|
||||
return account;
|
||||
});
|
||||
});
|
||||
|
||||
// Should return null due to RLS policy
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("should allow user to create their own accounts", async () => {
|
||||
const user1Id = await createTestUser("account-create-own@test.com");
|
||||
|
||||
// Set RLS context for user1, create their own account
|
||||
const result = await prisma.$transaction(async (tx) => {
|
||||
await tx.$executeRaw`SELECT set_config('app.current_user_id', ${user1Id}::text, true)`;
|
||||
|
||||
return runWithRlsClient(tx, async () => {
|
||||
const client = getRlsClient()!;
|
||||
const newAccount = await client.account.create({
|
||||
data: {
|
||||
id: uuid(),
|
||||
userId: user1Id,
|
||||
accountId: "new-account",
|
||||
providerId: "test-provider",
|
||||
accessToken: "new-token",
|
||||
},
|
||||
});
|
||||
|
||||
testData.accounts.push(newAccount.id);
|
||||
return newAccount;
|
||||
});
|
||||
});
|
||||
|
||||
expect(result).toBeDefined();
|
||||
expect(result.userId).toBe(user1Id);
|
||||
expect(result.accessToken).toBe("new-token");
|
||||
});
|
||||
|
||||
it("should prevent user from creating accounts for other users", async () => {
|
||||
const user1Id = await createTestUser("account-create-self@test.com");
|
||||
const user2Id = await createTestUser("account-create-other@test.com");
|
||||
|
||||
// Set RLS context for user1, try to create an account for user2
|
||||
await expect(
|
||||
prisma.$transaction(async (tx) => {
|
||||
await tx.$executeRaw`SELECT set_config('app.current_user_id', ${user1Id}::text, true)`;
|
||||
|
||||
return runWithRlsClient(tx, async () => {
|
||||
const client = getRlsClient()!;
|
||||
const newAccount = await client.account.create({
|
||||
data: {
|
||||
id: uuid(),
|
||||
userId: user2Id, // Trying to create for user2 while logged in as user1
|
||||
accountId: "hacked-account",
|
||||
providerId: "test-provider",
|
||||
accessToken: "hacked-token",
|
||||
},
|
||||
});
|
||||
|
||||
testData.accounts.push(newAccount.id);
|
||||
return newAccount;
|
||||
});
|
||||
})
|
||||
).rejects.toThrow();
|
||||
});
|
||||
|
||||
it("should allow user to update their own accounts", async () => {
|
||||
const user1Id = await createTestUser("account-update-own@test.com");
|
||||
const account1Id = await createTestAccount(user1Id, "original-token");
|
||||
|
||||
const result = await prisma.$transaction(async (tx) => {
|
||||
await tx.$executeRaw`SELECT set_config('app.current_user_id', ${user1Id}::text, true)`;
|
||||
|
||||
return runWithRlsClient(tx, async () => {
|
||||
const client = getRlsClient()!;
|
||||
const updated = await client.account.update({
|
||||
where: { id: account1Id },
|
||||
data: { accessToken: "updated-token" },
|
||||
});
|
||||
|
||||
return updated;
|
||||
});
|
||||
});
|
||||
|
||||
expect(result.accessToken).toBe("updated-token");
|
||||
});
|
||||
|
||||
it("should prevent user from updating other users accounts", async () => {
|
||||
const user1Id = await createTestUser("account-update-self@test.com");
|
||||
const user2Id = await createTestUser("account-update-other@test.com");
|
||||
await createTestAccount(user1Id, "user1-token");
|
||||
const account2Id = await createTestAccount(user2Id, "user2-token");
|
||||
|
||||
// Set RLS context for user1, try to update user2's account
|
||||
await expect(
|
||||
prisma.$transaction(async (tx) => {
|
||||
await tx.$executeRaw`SELECT set_config('app.current_user_id', ${user1Id}::text, true)`;
|
||||
|
||||
return runWithRlsClient(tx, async () => {
|
||||
const client = getRlsClient()!;
|
||||
await client.account.update({
|
||||
where: { id: account2Id },
|
||||
data: { accessToken: "hacked-token" },
|
||||
});
|
||||
});
|
||||
})
|
||||
).rejects.toThrow();
|
||||
});
|
||||
|
||||
it("should prevent user from deleting other users accounts", async () => {
|
||||
const user1Id = await createTestUser("account-delete-self@test.com");
|
||||
const user2Id = await createTestUser("account-delete-other@test.com");
|
||||
await createTestAccount(user1Id, "user1-token");
|
||||
const account2Id = await createTestAccount(user2Id, "user2-token");
|
||||
|
||||
// Set RLS context for user1, try to delete user2's account
|
||||
await expect(
|
||||
prisma.$transaction(async (tx) => {
|
||||
await tx.$executeRaw`SELECT set_config('app.current_user_id', ${user1Id}::text, true)`;
|
||||
|
||||
return runWithRlsClient(tx, async () => {
|
||||
const client = getRlsClient()!;
|
||||
await client.account.delete({
|
||||
where: { id: account2Id },
|
||||
});
|
||||
});
|
||||
})
|
||||
).rejects.toThrow();
|
||||
|
||||
// Verify the record still exists and wasn't deleted
|
||||
const stillExists = await prisma.account.findUnique({ where: { id: account2Id } });
|
||||
expect(stillExists).not.toBeNull();
|
||||
expect(stillExists?.userId).toBe(user2Id);
|
||||
});
|
||||
|
||||
it("should allow user to delete their own accounts", async () => {
|
||||
const user1Id = await createTestUser("account-delete-own@test.com");
|
||||
const account1Id = await createTestAccount(user1Id, "user1-token");
|
||||
|
||||
// Set RLS context for user1, delete their own account
|
||||
await prisma.$transaction(async (tx) => {
|
||||
await tx.$executeRaw`SELECT set_config('app.current_user_id', ${user1Id}::text, true)`;
|
||||
|
||||
return runWithRlsClient(tx, async () => {
|
||||
const client = getRlsClient()!;
|
||||
await client.account.delete({
|
||||
where: { id: account1Id },
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// Verify the record was actually deleted
|
||||
const deleted = await prisma.account.findUnique({ where: { id: account1Id } });
|
||||
expect(deleted).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe("Session table RLS", () => {
|
||||
it("should allow user to read their own sessions when RLS context is set", async () => {
|
||||
const user1Id = await createTestUser("session-read-own@test.com");
|
||||
const session1Id = await createTestSession(user1Id);
|
||||
|
||||
const result = await prisma.$transaction(async (tx) => {
|
||||
await tx.$executeRaw`SELECT set_config('app.current_user_id', ${user1Id}::text, true)`;
|
||||
|
||||
return runWithRlsClient(tx, async () => {
|
||||
const client = getRlsClient()!;
|
||||
const sessions = await client.session.findMany({
|
||||
where: { userId: user1Id },
|
||||
});
|
||||
|
||||
return sessions;
|
||||
});
|
||||
});
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].id).toBe(session1Id);
|
||||
});
|
||||
|
||||
it("should prevent user from reading other users sessions", async () => {
|
||||
const user1Id = await createTestUser("session-read-self@test.com");
|
||||
const user2Id = await createTestUser("session-read-other@test.com");
|
||||
await createTestSession(user1Id);
|
||||
await createTestSession(user2Id);
|
||||
|
||||
const result = await prisma.$transaction(async (tx) => {
|
||||
await tx.$executeRaw`SELECT set_config('app.current_user_id', ${user1Id}::text, true)`;
|
||||
|
||||
return runWithRlsClient(tx, async () => {
|
||||
const client = getRlsClient()!;
|
||||
const sessions = await client.session.findMany({
|
||||
where: { userId: user2Id },
|
||||
});
|
||||
|
||||
return sessions;
|
||||
});
|
||||
});
|
||||
|
||||
// Should return empty array due to RLS policy
|
||||
expect(result).toHaveLength(0);
|
||||
});
|
||||
|
||||
it("should prevent direct access by ID to other users sessions", async () => {
|
||||
const user1Id = await createTestUser("session-id-self@test.com");
|
||||
const user2Id = await createTestUser("session-id-other@test.com");
|
||||
await createTestSession(user1Id);
|
||||
const session2Id = await createTestSession(user2Id);
|
||||
|
||||
const result = await prisma.$transaction(async (tx) => {
|
||||
await tx.$executeRaw`SELECT set_config('app.current_user_id', ${user1Id}::text, true)`;
|
||||
|
||||
return runWithRlsClient(tx, async () => {
|
||||
const client = getRlsClient()!;
|
||||
const session = await client.session.findUnique({
|
||||
where: { id: session2Id },
|
||||
});
|
||||
|
||||
return session;
|
||||
});
|
||||
});
|
||||
|
||||
// Should return null due to RLS policy
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("should allow user to create their own sessions", async () => {
|
||||
const user1Id = await createTestUser("session-create-own@test.com");
|
||||
|
||||
// Set RLS context for user1, create their own session
|
||||
const result = await prisma.$transaction(async (tx) => {
|
||||
await tx.$executeRaw`SELECT set_config('app.current_user_id', ${user1Id}::text, true)`;
|
||||
|
||||
return runWithRlsClient(tx, async () => {
|
||||
const client = getRlsClient()!;
|
||||
const newSession = await client.session.create({
|
||||
data: {
|
||||
id: uuid(),
|
||||
userId: user1Id,
|
||||
token: `new-session-${Date.now()}`,
|
||||
expiresAt: new Date(Date.now() + 86400000),
|
||||
},
|
||||
});
|
||||
|
||||
testData.sessions.push(newSession.id);
|
||||
return newSession;
|
||||
});
|
||||
});
|
||||
|
||||
expect(result).toBeDefined();
|
||||
expect(result.userId).toBe(user1Id);
|
||||
expect(result.token).toContain("new-session");
|
||||
});
|
||||
|
||||
it("should prevent user from creating sessions for other users", async () => {
|
||||
const user1Id = await createTestUser("session-create-self@test.com");
|
||||
const user2Id = await createTestUser("session-create-other@test.com");
|
||||
|
||||
// Set RLS context for user1, try to create a session for user2
|
||||
await expect(
|
||||
prisma.$transaction(async (tx) => {
|
||||
await tx.$executeRaw`SELECT set_config('app.current_user_id', ${user1Id}::text, true)`;
|
||||
|
||||
return runWithRlsClient(tx, async () => {
|
||||
const client = getRlsClient()!;
|
||||
const newSession = await client.session.create({
|
||||
data: {
|
||||
id: uuid(),
|
||||
userId: user2Id, // Trying to create for user2 while logged in as user1
|
||||
token: `hacked-session-${Date.now()}`,
|
||||
expiresAt: new Date(Date.now() + 86400000),
|
||||
},
|
||||
});
|
||||
|
||||
testData.sessions.push(newSession.id);
|
||||
return newSession;
|
||||
});
|
||||
})
|
||||
).rejects.toThrow();
|
||||
});
|
||||
|
||||
it("should allow user to update their own sessions", async () => {
|
||||
const user1Id = await createTestUser("session-update-own@test.com");
|
||||
const session1Id = await createTestSession(user1Id);
|
||||
|
||||
const result = await prisma.$transaction(async (tx) => {
|
||||
await tx.$executeRaw`SELECT set_config('app.current_user_id', ${user1Id}::text, true)`;
|
||||
|
||||
return runWithRlsClient(tx, async () => {
|
||||
const client = getRlsClient()!;
|
||||
const updated = await client.session.update({
|
||||
where: { id: session1Id },
|
||||
data: { ipAddress: "192.168.1.1" },
|
||||
});
|
||||
|
||||
return updated;
|
||||
});
|
||||
});
|
||||
|
||||
expect(result.ipAddress).toBe("192.168.1.1");
|
||||
});
|
||||
|
||||
it("should prevent user from updating other users sessions", async () => {
|
||||
const user1Id = await createTestUser("session-update-self@test.com");
|
||||
const user2Id = await createTestUser("session-update-other@test.com");
|
||||
await createTestSession(user1Id);
|
||||
const session2Id = await createTestSession(user2Id);
|
||||
|
||||
await expect(
|
||||
prisma.$transaction(async (tx) => {
|
||||
await tx.$executeRaw`SELECT set_config('app.current_user_id', ${user1Id}::text, true)`;
|
||||
|
||||
return runWithRlsClient(tx, async () => {
|
||||
const client = getRlsClient()!;
|
||||
await client.session.update({
|
||||
where: { id: session2Id },
|
||||
data: { ipAddress: "10.0.0.1" },
|
||||
});
|
||||
});
|
||||
})
|
||||
).rejects.toThrow();
|
||||
});
|
||||
|
||||
it("should prevent user from deleting other users sessions", async () => {
|
||||
const user1Id = await createTestUser("session-delete-self@test.com");
|
||||
const user2Id = await createTestUser("session-delete-other@test.com");
|
||||
await createTestSession(user1Id);
|
||||
const session2Id = await createTestSession(user2Id);
|
||||
|
||||
await expect(
|
||||
prisma.$transaction(async (tx) => {
|
||||
await tx.$executeRaw`SELECT set_config('app.current_user_id', ${user1Id}::text, true)`;
|
||||
|
||||
return runWithRlsClient(tx, async () => {
|
||||
const client = getRlsClient()!;
|
||||
await client.session.delete({
|
||||
where: { id: session2Id },
|
||||
});
|
||||
});
|
||||
})
|
||||
).rejects.toThrow();
|
||||
|
||||
// Verify the record still exists and wasn't deleted
|
||||
const stillExists = await prisma.session.findUnique({ where: { id: session2Id } });
|
||||
expect(stillExists).not.toBeNull();
|
||||
expect(stillExists?.userId).toBe(user2Id);
|
||||
});
|
||||
|
||||
it("should allow user to delete their own sessions", async () => {
|
||||
const user1Id = await createTestUser("session-delete-own@test.com");
|
||||
const session1Id = await createTestSession(user1Id);
|
||||
|
||||
// Set RLS context for user1, delete their own session
|
||||
await prisma.$transaction(async (tx) => {
|
||||
await tx.$executeRaw`SELECT set_config('app.current_user_id', ${user1Id}::text, true)`;
|
||||
|
||||
return runWithRlsClient(tx, async () => {
|
||||
const client = getRlsClient()!;
|
||||
await client.session.delete({
|
||||
where: { id: session1Id },
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// Verify the record was actually deleted
|
||||
const deleted = await prisma.session.findUnique({ where: { id: session1Id } });
|
||||
expect(deleted).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe("Owner bypass policy", () => {
|
||||
it("should allow table owner to access all records without RLS context", async () => {
|
||||
const user1Id = await createTestUser("owner-bypass-1@test.com");
|
||||
const user2Id = await createTestUser("owner-bypass-2@test.com");
|
||||
const account1Id = await createTestAccount(user1Id, "token1");
|
||||
const account2Id = await createTestAccount(user2Id, "token2");
|
||||
|
||||
// Don't set RLS context - rely on owner bypass policy
|
||||
const accounts = await prisma.account.findMany({
|
||||
where: {
|
||||
id: { in: [account1Id, account2Id] },
|
||||
},
|
||||
});
|
||||
|
||||
// Owner should see both accounts
|
||||
expect(accounts).toHaveLength(2);
|
||||
});
|
||||
|
||||
it("should allow migrations to work without RLS context", async () => {
|
||||
const userId = await createTestUser("migration-test@test.com");
|
||||
|
||||
// This simulates a migration or BetterAuth internal operation
|
||||
// that doesn't have RLS context set
|
||||
const newAccount = await prisma.account.create({
|
||||
data: {
|
||||
id: uuid(),
|
||||
userId,
|
||||
accountId: "migration-test-account",
|
||||
providerId: "test-migration",
|
||||
},
|
||||
});
|
||||
|
||||
expect(newAccount.id).toBeDefined();
|
||||
|
||||
// Clean up
|
||||
await prisma.account.delete({
|
||||
where: { id: newAccount.id },
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("RLS context isolation", () => {
|
||||
it("should enforce RLS when context is set, even for table owner", async () => {
|
||||
const user1Id = await createTestUser("rls-enforce-1@test.com");
|
||||
const user2Id = await createTestUser("rls-enforce-2@test.com");
|
||||
const account1Id = await createTestAccount(user1Id, "token1");
|
||||
const account2Id = await createTestAccount(user2Id, "token2");
|
||||
|
||||
// With RLS context set for user1, they should only see their own account
|
||||
const result = await prisma.$transaction(async (tx) => {
|
||||
await tx.$executeRaw`SELECT set_config('app.current_user_id', ${user1Id}::text, true)`;
|
||||
|
||||
return runWithRlsClient(tx, async () => {
|
||||
const client = getRlsClient()!;
|
||||
const accounts = await client.account.findMany({
|
||||
where: {
|
||||
id: { in: [account1Id, account2Id] },
|
||||
},
|
||||
});
|
||||
|
||||
return accounts;
|
||||
});
|
||||
});
|
||||
|
||||
// Should only see user1's account, not user2's
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].id).toBe(account1Id);
|
||||
});
|
||||
|
||||
it("should allow different users to see only their own data in separate contexts", async () => {
|
||||
const user1Id = await createTestUser("context-user1@test.com");
|
||||
const user2Id = await createTestUser("context-user2@test.com");
|
||||
const session1Id = await createTestSession(user1Id);
|
||||
const session2Id = await createTestSession(user2Id);
|
||||
|
||||
// User1 context - query for both sessions, but RLS should only return user1's
|
||||
const user1Result = await prisma.$transaction(async (tx) => {
|
||||
await tx.$executeRaw`SELECT set_config('app.current_user_id', ${user1Id}::text, true)`;
|
||||
|
||||
return runWithRlsClient(tx, async () => {
|
||||
const client = getRlsClient()!;
|
||||
return client.session.findMany({
|
||||
where: {
|
||||
id: { in: [session1Id, session2Id] },
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// User2 context - query for both sessions, but RLS should only return user2's
|
||||
const user2Result = await prisma.$transaction(async (tx) => {
|
||||
await tx.$executeRaw`SELECT set_config('app.current_user_id', ${user2Id}::text, true)`;
|
||||
|
||||
return runWithRlsClient(tx, async () => {
|
||||
const client = getRlsClient()!;
|
||||
return client.session.findMany({
|
||||
where: {
|
||||
id: { in: [session1Id, session2Id] },
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// Each user should only see their own session
|
||||
expect(user1Result).toHaveLength(1);
|
||||
expect(user1Result[0].id).toBe(session1Id);
|
||||
|
||||
expect(user2Result).toHaveLength(1);
|
||||
expect(user2Result[0].id).toBe(session2Id);
|
||||
});
|
||||
});
|
||||
}
|
||||
);
|
||||
716
apps/api/src/auth/auth.config.spec.ts
Normal file
716
apps/api/src/auth/auth.config.spec.ts
Normal file
@@ -0,0 +1,716 @@
|
||||
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
|
||||
import type { PrismaClient } from "@prisma/client";
|
||||
|
||||
// Mock better-auth modules to inspect genericOAuth plugin configuration
|
||||
const mockGenericOAuth = vi.fn().mockReturnValue({ id: "generic-oauth" });
|
||||
const mockBetterAuth = vi.fn().mockReturnValue({ handler: vi.fn() });
|
||||
const mockPrismaAdapter = vi.fn().mockReturnValue({});
|
||||
|
||||
vi.mock("better-auth/plugins", () => ({
|
||||
genericOAuth: (...args: unknown[]) => mockGenericOAuth(...args),
|
||||
}));
|
||||
|
||||
vi.mock("better-auth", () => ({
|
||||
betterAuth: (...args: unknown[]) => mockBetterAuth(...args),
|
||||
}));
|
||||
|
||||
vi.mock("better-auth/adapters/prisma", () => ({
|
||||
prismaAdapter: (...args: unknown[]) => mockPrismaAdapter(...args),
|
||||
}));
|
||||
|
||||
import {
|
||||
isOidcEnabled,
|
||||
validateOidcConfig,
|
||||
createAuth,
|
||||
getTrustedOrigins,
|
||||
getBetterAuthBaseUrl,
|
||||
} from "./auth.config";
|
||||
|
||||
describe("auth.config", () => {
|
||||
// Store original env vars to restore after each test
|
||||
const originalEnv = { ...process.env };
|
||||
|
||||
beforeEach(() => {
|
||||
// Clear relevant env vars before each test
|
||||
delete process.env.OIDC_ENABLED;
|
||||
delete process.env.OIDC_ISSUER;
|
||||
delete process.env.OIDC_CLIENT_ID;
|
||||
delete process.env.OIDC_CLIENT_SECRET;
|
||||
delete process.env.OIDC_REDIRECT_URI;
|
||||
delete process.env.NODE_ENV;
|
||||
delete process.env.BETTER_AUTH_URL;
|
||||
delete process.env.NEXT_PUBLIC_APP_URL;
|
||||
delete process.env.NEXT_PUBLIC_API_URL;
|
||||
delete process.env.TRUSTED_ORIGINS;
|
||||
delete process.env.COOKIE_DOMAIN;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Restore original env vars
|
||||
process.env = { ...originalEnv };
|
||||
});
|
||||
|
||||
describe("isOidcEnabled", () => {
|
||||
it("should return false when OIDC_ENABLED is not set", () => {
|
||||
expect(isOidcEnabled()).toBe(false);
|
||||
});
|
||||
|
||||
it("should return false when OIDC_ENABLED is 'false'", () => {
|
||||
process.env.OIDC_ENABLED = "false";
|
||||
expect(isOidcEnabled()).toBe(false);
|
||||
});
|
||||
|
||||
it("should return false when OIDC_ENABLED is '0'", () => {
|
||||
process.env.OIDC_ENABLED = "0";
|
||||
expect(isOidcEnabled()).toBe(false);
|
||||
});
|
||||
|
||||
it("should return false when OIDC_ENABLED is empty string", () => {
|
||||
process.env.OIDC_ENABLED = "";
|
||||
expect(isOidcEnabled()).toBe(false);
|
||||
});
|
||||
|
||||
it("should return true when OIDC_ENABLED is 'true'", () => {
|
||||
process.env.OIDC_ENABLED = "true";
|
||||
expect(isOidcEnabled()).toBe(true);
|
||||
});
|
||||
|
||||
it("should return true when OIDC_ENABLED is '1'", () => {
|
||||
process.env.OIDC_ENABLED = "1";
|
||||
expect(isOidcEnabled()).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("validateOidcConfig", () => {
|
||||
describe("when OIDC is disabled", () => {
|
||||
it("should not throw when OIDC_ENABLED is not set", () => {
|
||||
expect(() => validateOidcConfig()).not.toThrow();
|
||||
});
|
||||
|
||||
it("should not throw when OIDC_ENABLED is false even if vars are missing", () => {
|
||||
process.env.OIDC_ENABLED = "false";
|
||||
// Intentionally not setting any OIDC vars
|
||||
expect(() => validateOidcConfig()).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe("when OIDC is enabled", () => {
|
||||
beforeEach(() => {
|
||||
process.env.OIDC_ENABLED = "true";
|
||||
});
|
||||
|
||||
it("should throw when OIDC_ISSUER is missing", () => {
|
||||
process.env.OIDC_CLIENT_ID = "test-client-id";
|
||||
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
||||
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback/authentik";
|
||||
|
||||
expect(() => validateOidcConfig()).toThrow("OIDC_ISSUER");
|
||||
expect(() => validateOidcConfig()).toThrow("OIDC authentication is enabled");
|
||||
});
|
||||
|
||||
it("should throw when OIDC_CLIENT_ID is missing", () => {
|
||||
process.env.OIDC_ISSUER = "https://auth.example.com/";
|
||||
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
||||
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback/authentik";
|
||||
|
||||
expect(() => validateOidcConfig()).toThrow("OIDC_CLIENT_ID");
|
||||
});
|
||||
|
||||
it("should throw when OIDC_CLIENT_SECRET is missing", () => {
|
||||
process.env.OIDC_ISSUER = "https://auth.example.com/";
|
||||
process.env.OIDC_CLIENT_ID = "test-client-id";
|
||||
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback/authentik";
|
||||
|
||||
expect(() => validateOidcConfig()).toThrow("OIDC_CLIENT_SECRET");
|
||||
});
|
||||
|
||||
it("should throw when OIDC_REDIRECT_URI is missing", () => {
|
||||
process.env.OIDC_ISSUER = "https://auth.example.com/";
|
||||
process.env.OIDC_CLIENT_ID = "test-client-id";
|
||||
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
||||
|
||||
expect(() => validateOidcConfig()).toThrow("OIDC_REDIRECT_URI");
|
||||
});
|
||||
|
||||
it("should throw when all required vars are missing", () => {
|
||||
expect(() => validateOidcConfig()).toThrow(
|
||||
"OIDC_ISSUER, OIDC_CLIENT_ID, OIDC_CLIENT_SECRET, OIDC_REDIRECT_URI"
|
||||
);
|
||||
});
|
||||
|
||||
it("should throw when vars are empty strings", () => {
|
||||
process.env.OIDC_ISSUER = "";
|
||||
process.env.OIDC_CLIENT_ID = "";
|
||||
process.env.OIDC_CLIENT_SECRET = "";
|
||||
process.env.OIDC_REDIRECT_URI = "";
|
||||
|
||||
expect(() => validateOidcConfig()).toThrow(
|
||||
"OIDC_ISSUER, OIDC_CLIENT_ID, OIDC_CLIENT_SECRET, OIDC_REDIRECT_URI"
|
||||
);
|
||||
});
|
||||
|
||||
it("should throw when vars are whitespace only", () => {
|
||||
process.env.OIDC_ISSUER = " ";
|
||||
process.env.OIDC_CLIENT_ID = "test-client-id";
|
||||
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
||||
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback/authentik";
|
||||
|
||||
expect(() => validateOidcConfig()).toThrow("OIDC_ISSUER");
|
||||
});
|
||||
|
||||
it("should throw when OIDC_ISSUER does not end with trailing slash", () => {
|
||||
process.env.OIDC_ISSUER = "https://auth.example.com/application/o/mosaic";
|
||||
process.env.OIDC_CLIENT_ID = "test-client-id";
|
||||
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
||||
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback/authentik";
|
||||
|
||||
expect(() => validateOidcConfig()).toThrow("OIDC_ISSUER must end with a trailing slash");
|
||||
expect(() => validateOidcConfig()).toThrow("https://auth.example.com/application/o/mosaic");
|
||||
});
|
||||
|
||||
it("should not throw with valid complete configuration", () => {
|
||||
process.env.OIDC_ISSUER = "https://auth.example.com/application/o/mosaic-stack/";
|
||||
process.env.OIDC_CLIENT_ID = "test-client-id";
|
||||
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
||||
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback/authentik";
|
||||
|
||||
expect(() => validateOidcConfig()).not.toThrow();
|
||||
});
|
||||
|
||||
it("should suggest disabling OIDC in error message", () => {
|
||||
expect(() => validateOidcConfig()).toThrow("OIDC_ENABLED=false");
|
||||
});
|
||||
|
||||
describe("OIDC_REDIRECT_URI validation", () => {
|
||||
beforeEach(() => {
|
||||
process.env.OIDC_ISSUER = "https://auth.example.com/application/o/mosaic-stack/";
|
||||
process.env.OIDC_CLIENT_ID = "test-client-id";
|
||||
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
||||
});
|
||||
|
||||
it("should throw when OIDC_REDIRECT_URI is not a valid URL", () => {
|
||||
process.env.OIDC_REDIRECT_URI = "not-a-url";
|
||||
|
||||
expect(() => validateOidcConfig()).toThrow("OIDC_REDIRECT_URI must be a valid URL");
|
||||
expect(() => validateOidcConfig()).toThrow("not-a-url");
|
||||
expect(() => validateOidcConfig()).toThrow("Parse error:");
|
||||
});
|
||||
|
||||
it("should throw when OIDC_REDIRECT_URI path does not start with /auth/oauth2/callback", () => {
|
||||
process.env.OIDC_REDIRECT_URI = "https://app.example.com/oauth/callback";
|
||||
|
||||
expect(() => validateOidcConfig()).toThrow(
|
||||
'OIDC_REDIRECT_URI path must start with "/auth/oauth2/callback"'
|
||||
);
|
||||
expect(() => validateOidcConfig()).toThrow("/oauth/callback");
|
||||
});
|
||||
|
||||
it("should accept a valid OIDC_REDIRECT_URI with /auth/oauth2/callback path", () => {
|
||||
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback/authentik";
|
||||
|
||||
expect(() => validateOidcConfig()).not.toThrow();
|
||||
});
|
||||
|
||||
it("should accept OIDC_REDIRECT_URI with exactly /auth/oauth2/callback path", () => {
|
||||
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback";
|
||||
|
||||
expect(() => validateOidcConfig()).not.toThrow();
|
||||
});
|
||||
|
||||
it("should warn but not throw when using localhost in production", () => {
|
||||
process.env.NODE_ENV = "production";
|
||||
process.env.OIDC_REDIRECT_URI = "http://localhost:3000/auth/oauth2/callback/authentik";
|
||||
|
||||
const warnSpy = vi.spyOn(console, "warn").mockImplementation(() => {});
|
||||
|
||||
expect(() => validateOidcConfig()).not.toThrow();
|
||||
expect(warnSpy).toHaveBeenCalledWith(
|
||||
expect.stringContaining("OIDC_REDIRECT_URI uses localhost")
|
||||
);
|
||||
|
||||
warnSpy.mockRestore();
|
||||
});
|
||||
|
||||
it("should warn but not throw when using 127.0.0.1 in production", () => {
|
||||
process.env.NODE_ENV = "production";
|
||||
process.env.OIDC_REDIRECT_URI = "http://127.0.0.1:3000/auth/oauth2/callback/authentik";
|
||||
|
||||
const warnSpy = vi.spyOn(console, "warn").mockImplementation(() => {});
|
||||
|
||||
expect(() => validateOidcConfig()).not.toThrow();
|
||||
expect(warnSpy).toHaveBeenCalledWith(
|
||||
expect.stringContaining("OIDC_REDIRECT_URI uses localhost")
|
||||
);
|
||||
|
||||
warnSpy.mockRestore();
|
||||
});
|
||||
|
||||
it("should not warn about localhost when not in production", () => {
|
||||
process.env.NODE_ENV = "development";
|
||||
process.env.OIDC_REDIRECT_URI = "http://localhost:3000/auth/oauth2/callback/authentik";
|
||||
|
||||
const warnSpy = vi.spyOn(console, "warn").mockImplementation(() => {});
|
||||
|
||||
expect(() => validateOidcConfig()).not.toThrow();
|
||||
expect(warnSpy).not.toHaveBeenCalled();
|
||||
|
||||
warnSpy.mockRestore();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("createAuth - genericOAuth PKCE configuration", () => {
|
||||
beforeEach(() => {
|
||||
mockGenericOAuth.mockClear();
|
||||
mockBetterAuth.mockClear();
|
||||
mockPrismaAdapter.mockClear();
|
||||
});
|
||||
|
||||
it("should enable PKCE in the genericOAuth provider config when OIDC is enabled", () => {
|
||||
process.env.OIDC_ENABLED = "true";
|
||||
process.env.OIDC_ISSUER = "https://auth.example.com/application/o/mosaic-stack/";
|
||||
process.env.OIDC_CLIENT_ID = "test-client-id";
|
||||
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
||||
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback/authentik";
|
||||
|
||||
const mockPrisma = {} as PrismaClient;
|
||||
createAuth(mockPrisma);
|
||||
|
||||
expect(mockGenericOAuth).toHaveBeenCalledOnce();
|
||||
const callArgs = mockGenericOAuth.mock.calls[0][0] as {
|
||||
config: Array<{ pkce?: boolean; redirectURI?: string }>;
|
||||
};
|
||||
expect(callArgs.config[0].pkce).toBe(true);
|
||||
expect(callArgs.config[0].redirectURI).toBe(
|
||||
"https://app.example.com/auth/oauth2/callback/authentik"
|
||||
);
|
||||
});
|
||||
|
||||
it("should not call genericOAuth when OIDC is disabled", () => {
|
||||
process.env.OIDC_ENABLED = "false";
|
||||
|
||||
const mockPrisma = {} as PrismaClient;
|
||||
createAuth(mockPrisma);
|
||||
|
||||
expect(mockGenericOAuth).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("should throw if OIDC_CLIENT_ID is missing when OIDC is enabled", () => {
|
||||
process.env.OIDC_ENABLED = "true";
|
||||
process.env.OIDC_ISSUER = "https://auth.example.com/application/o/mosaic-stack/";
|
||||
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
||||
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback/authentik";
|
||||
// OIDC_CLIENT_ID deliberately not set
|
||||
|
||||
// validateOidcConfig will throw first, so we need to bypass it
|
||||
// by setting the var then deleting it after validation
|
||||
// Instead, test via the validation path which is fine — but let's
|
||||
// verify the plugin-level guard by using a direct approach:
|
||||
// Set env to pass validateOidcConfig, then delete OIDC_CLIENT_ID
|
||||
// The validateOidcConfig will catch this first, which is correct behavior
|
||||
const mockPrisma = {} as PrismaClient;
|
||||
expect(() => createAuth(mockPrisma)).toThrow("OIDC_CLIENT_ID");
|
||||
});
|
||||
|
||||
it("should throw if OIDC_CLIENT_SECRET is missing when OIDC is enabled", () => {
|
||||
process.env.OIDC_ENABLED = "true";
|
||||
process.env.OIDC_ISSUER = "https://auth.example.com/application/o/mosaic-stack/";
|
||||
process.env.OIDC_CLIENT_ID = "test-client-id";
|
||||
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback/authentik";
|
||||
// OIDC_CLIENT_SECRET deliberately not set
|
||||
|
||||
const mockPrisma = {} as PrismaClient;
|
||||
expect(() => createAuth(mockPrisma)).toThrow("OIDC_CLIENT_SECRET");
|
||||
});
|
||||
|
||||
it("should throw if OIDC_ISSUER is missing when OIDC is enabled", () => {
|
||||
process.env.OIDC_ENABLED = "true";
|
||||
process.env.OIDC_CLIENT_ID = "test-client-id";
|
||||
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
||||
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback/authentik";
|
||||
// OIDC_ISSUER deliberately not set
|
||||
|
||||
const mockPrisma = {} as PrismaClient;
|
||||
expect(() => createAuth(mockPrisma)).toThrow("OIDC_ISSUER");
|
||||
});
|
||||
});
|
||||
|
||||
describe("getTrustedOrigins", () => {
|
||||
it("should return localhost URLs when NODE_ENV is not production", () => {
|
||||
process.env.NODE_ENV = "development";
|
||||
|
||||
const origins = getTrustedOrigins();
|
||||
|
||||
expect(origins).toContain("http://localhost:3000");
|
||||
expect(origins).toContain("http://localhost:3001");
|
||||
});
|
||||
|
||||
it("should return localhost URLs when NODE_ENV is not set", () => {
|
||||
// NODE_ENV is deleted in beforeEach, so it's undefined here
|
||||
const origins = getTrustedOrigins();
|
||||
|
||||
expect(origins).toContain("http://localhost:3000");
|
||||
expect(origins).toContain("http://localhost:3001");
|
||||
});
|
||||
|
||||
it("should exclude localhost URLs in production", () => {
|
||||
process.env.NODE_ENV = "production";
|
||||
|
||||
const origins = getTrustedOrigins();
|
||||
|
||||
expect(origins).not.toContain("http://localhost:3000");
|
||||
expect(origins).not.toContain("http://localhost:3001");
|
||||
});
|
||||
|
||||
it("should parse TRUSTED_ORIGINS comma-separated values", () => {
|
||||
process.env.TRUSTED_ORIGINS = "https://app.mosaicstack.dev,https://api.mosaicstack.dev";
|
||||
|
||||
const origins = getTrustedOrigins();
|
||||
|
||||
expect(origins).toContain("https://app.mosaicstack.dev");
|
||||
expect(origins).toContain("https://api.mosaicstack.dev");
|
||||
});
|
||||
|
||||
it("should trim whitespace from TRUSTED_ORIGINS entries", () => {
|
||||
process.env.TRUSTED_ORIGINS = " https://app.mosaicstack.dev , https://api.mosaicstack.dev ";
|
||||
|
||||
const origins = getTrustedOrigins();
|
||||
|
||||
expect(origins).toContain("https://app.mosaicstack.dev");
|
||||
expect(origins).toContain("https://api.mosaicstack.dev");
|
||||
});
|
||||
|
||||
it("should filter out empty strings from TRUSTED_ORIGINS", () => {
|
||||
process.env.TRUSTED_ORIGINS = "https://app.mosaicstack.dev,,, ,";
|
||||
|
||||
const origins = getTrustedOrigins();
|
||||
|
||||
expect(origins).toContain("https://app.mosaicstack.dev");
|
||||
// No empty strings in the result
|
||||
origins.forEach((o) => expect(o).not.toBe(""));
|
||||
});
|
||||
|
||||
it("should include NEXT_PUBLIC_APP_URL", () => {
|
||||
process.env.NEXT_PUBLIC_APP_URL = "https://my-app.example.com";
|
||||
|
||||
const origins = getTrustedOrigins();
|
||||
|
||||
expect(origins).toContain("https://my-app.example.com");
|
||||
});
|
||||
|
||||
it("should include NEXT_PUBLIC_API_URL", () => {
|
||||
process.env.NEXT_PUBLIC_API_URL = "https://my-api.example.com";
|
||||
|
||||
const origins = getTrustedOrigins();
|
||||
|
||||
expect(origins).toContain("https://my-api.example.com");
|
||||
});
|
||||
|
||||
it("should deduplicate origins", () => {
|
||||
process.env.NEXT_PUBLIC_APP_URL = "http://localhost:3000";
|
||||
process.env.TRUSTED_ORIGINS = "http://localhost:3000,http://localhost:3001";
|
||||
// NODE_ENV not set, so localhost fallbacks are also added
|
||||
|
||||
const origins = getTrustedOrigins();
|
||||
|
||||
const countLocalhost3000 = origins.filter((o) => o === "http://localhost:3000").length;
|
||||
const countLocalhost3001 = origins.filter((o) => o === "http://localhost:3001").length;
|
||||
expect(countLocalhost3000).toBe(1);
|
||||
expect(countLocalhost3001).toBe(1);
|
||||
});
|
||||
|
||||
it("should handle all env vars missing gracefully", () => {
|
||||
// All env vars deleted in beforeEach; NODE_ENV is also deleted (not production)
|
||||
const origins = getTrustedOrigins();
|
||||
|
||||
// Should still return localhost fallbacks since not in production
|
||||
expect(origins).toContain("http://localhost:3000");
|
||||
expect(origins).toContain("http://localhost:3001");
|
||||
expect(origins).toHaveLength(2);
|
||||
});
|
||||
|
||||
it("should return empty array when all env vars missing in production", () => {
|
||||
process.env.NODE_ENV = "production";
|
||||
|
||||
const origins = getTrustedOrigins();
|
||||
|
||||
expect(origins).toHaveLength(0);
|
||||
});
|
||||
|
||||
it("should combine all sources correctly", () => {
|
||||
process.env.NEXT_PUBLIC_APP_URL = "https://app.mosaicstack.dev";
|
||||
process.env.NEXT_PUBLIC_API_URL = "https://api.mosaicstack.dev";
|
||||
process.env.TRUSTED_ORIGINS = "https://extra.example.com";
|
||||
process.env.NODE_ENV = "development";
|
||||
|
||||
const origins = getTrustedOrigins();
|
||||
|
||||
expect(origins).toContain("https://app.mosaicstack.dev");
|
||||
expect(origins).toContain("https://api.mosaicstack.dev");
|
||||
expect(origins).toContain("https://extra.example.com");
|
||||
expect(origins).toContain("http://localhost:3000");
|
||||
expect(origins).toContain("http://localhost:3001");
|
||||
expect(origins).toHaveLength(5);
|
||||
});
|
||||
|
||||
it("should reject invalid URLs in TRUSTED_ORIGINS with a warning including error details", () => {
|
||||
process.env.TRUSTED_ORIGINS = "not-a-url,https://valid.example.com";
|
||||
process.env.NODE_ENV = "production";
|
||||
|
||||
const warnSpy = vi.spyOn(console, "warn").mockImplementation(() => {});
|
||||
|
||||
const origins = getTrustedOrigins();
|
||||
|
||||
expect(origins).toContain("https://valid.example.com");
|
||||
expect(origins).not.toContain("not-a-url");
|
||||
expect(warnSpy).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Ignoring invalid URL in TRUSTED_ORIGINS: "not-a-url"')
|
||||
);
|
||||
// Verify that error detail is included in the warning
|
||||
const warnCall = warnSpy.mock.calls.find(
|
||||
(call) => typeof call[0] === "string" && call[0].includes("not-a-url")
|
||||
);
|
||||
expect(warnCall).toBeDefined();
|
||||
expect(warnCall![0]).toMatch(/\(.*\)$/);
|
||||
|
||||
warnSpy.mockRestore();
|
||||
});
|
||||
|
||||
it("should reject non-HTTP origins in TRUSTED_ORIGINS with a warning", () => {
|
||||
process.env.TRUSTED_ORIGINS = "ftp://files.example.com,https://valid.example.com";
|
||||
process.env.NODE_ENV = "production";
|
||||
|
||||
const warnSpy = vi.spyOn(console, "warn").mockImplementation(() => {});
|
||||
|
||||
const origins = getTrustedOrigins();
|
||||
|
||||
expect(origins).toContain("https://valid.example.com");
|
||||
expect(origins).not.toContain("ftp://files.example.com");
|
||||
expect(warnSpy).toHaveBeenCalledWith(
|
||||
expect.stringContaining("Ignoring non-HTTP origin in TRUSTED_ORIGINS")
|
||||
);
|
||||
|
||||
warnSpy.mockRestore();
|
||||
});
|
||||
});
|
||||
|
||||
describe("createAuth - session and cookie configuration", () => {
|
||||
beforeEach(() => {
|
||||
mockGenericOAuth.mockClear();
|
||||
mockBetterAuth.mockClear();
|
||||
mockPrismaAdapter.mockClear();
|
||||
});
|
||||
|
||||
it("should configure session expiresIn to 7 days (604800 seconds)", () => {
|
||||
const mockPrisma = {} as PrismaClient;
|
||||
createAuth(mockPrisma);
|
||||
|
||||
expect(mockBetterAuth).toHaveBeenCalledOnce();
|
||||
const config = mockBetterAuth.mock.calls[0][0] as {
|
||||
session: { expiresIn: number; updateAge: number };
|
||||
};
|
||||
expect(config.session.expiresIn).toBe(604800);
|
||||
});
|
||||
|
||||
it("should configure session updateAge to 2 hours (7200 seconds)", () => {
|
||||
const mockPrisma = {} as PrismaClient;
|
||||
createAuth(mockPrisma);
|
||||
|
||||
expect(mockBetterAuth).toHaveBeenCalledOnce();
|
||||
const config = mockBetterAuth.mock.calls[0][0] as {
|
||||
session: { expiresIn: number; updateAge: number };
|
||||
};
|
||||
expect(config.session.updateAge).toBe(7200);
|
||||
});
|
||||
|
||||
it("should configure BetterAuth database ID generation as UUID", () => {
|
||||
const mockPrisma = {} as PrismaClient;
|
||||
createAuth(mockPrisma);
|
||||
|
||||
expect(mockBetterAuth).toHaveBeenCalledOnce();
|
||||
const config = mockBetterAuth.mock.calls[0][0] as {
|
||||
advanced: {
|
||||
database: {
|
||||
generateId: string;
|
||||
};
|
||||
};
|
||||
};
|
||||
expect(config.advanced.database.generateId).toBe("uuid");
|
||||
});
|
||||
|
||||
it("should set httpOnly cookie attribute to true", () => {
|
||||
const mockPrisma = {} as PrismaClient;
|
||||
createAuth(mockPrisma);
|
||||
|
||||
expect(mockBetterAuth).toHaveBeenCalledOnce();
|
||||
const config = mockBetterAuth.mock.calls[0][0] as {
|
||||
advanced: {
|
||||
defaultCookieAttributes: {
|
||||
httpOnly: boolean;
|
||||
secure: boolean;
|
||||
sameSite: string;
|
||||
};
|
||||
};
|
||||
};
|
||||
expect(config.advanced.defaultCookieAttributes.httpOnly).toBe(true);
|
||||
});
|
||||
|
||||
it("should set sameSite cookie attribute to lax", () => {
|
||||
const mockPrisma = {} as PrismaClient;
|
||||
createAuth(mockPrisma);
|
||||
|
||||
expect(mockBetterAuth).toHaveBeenCalledOnce();
|
||||
const config = mockBetterAuth.mock.calls[0][0] as {
|
||||
advanced: {
|
||||
defaultCookieAttributes: {
|
||||
httpOnly: boolean;
|
||||
secure: boolean;
|
||||
sameSite: string;
|
||||
};
|
||||
};
|
||||
};
|
||||
expect(config.advanced.defaultCookieAttributes.sameSite).toBe("lax");
|
||||
});
|
||||
|
||||
it("should set secure cookie attribute to true in production", () => {
|
||||
process.env.NODE_ENV = "production";
|
||||
process.env.NEXT_PUBLIC_API_URL = "https://api.example.com";
|
||||
const mockPrisma = {} as PrismaClient;
|
||||
createAuth(mockPrisma);
|
||||
|
||||
expect(mockBetterAuth).toHaveBeenCalledOnce();
|
||||
const config = mockBetterAuth.mock.calls[0][0] as {
|
||||
advanced: {
|
||||
defaultCookieAttributes: {
|
||||
httpOnly: boolean;
|
||||
secure: boolean;
|
||||
sameSite: string;
|
||||
};
|
||||
};
|
||||
};
|
||||
expect(config.advanced.defaultCookieAttributes.secure).toBe(true);
|
||||
});
|
||||
|
||||
it("should set secure cookie attribute to false in non-production", () => {
|
||||
process.env.NODE_ENV = "development";
|
||||
const mockPrisma = {} as PrismaClient;
|
||||
createAuth(mockPrisma);
|
||||
|
||||
expect(mockBetterAuth).toHaveBeenCalledOnce();
|
||||
const config = mockBetterAuth.mock.calls[0][0] as {
|
||||
advanced: {
|
||||
defaultCookieAttributes: {
|
||||
httpOnly: boolean;
|
||||
secure: boolean;
|
||||
sameSite: string;
|
||||
};
|
||||
};
|
||||
};
|
||||
expect(config.advanced.defaultCookieAttributes.secure).toBe(false);
|
||||
});
|
||||
|
||||
it("should set cookie domain when COOKIE_DOMAIN env var is present", () => {
|
||||
process.env.COOKIE_DOMAIN = ".mosaicstack.dev";
|
||||
const mockPrisma = {} as PrismaClient;
|
||||
createAuth(mockPrisma);
|
||||
|
||||
expect(mockBetterAuth).toHaveBeenCalledOnce();
|
||||
const config = mockBetterAuth.mock.calls[0][0] as {
|
||||
advanced: {
|
||||
defaultCookieAttributes: {
|
||||
httpOnly: boolean;
|
||||
secure: boolean;
|
||||
sameSite: string;
|
||||
domain?: string;
|
||||
};
|
||||
};
|
||||
};
|
||||
expect(config.advanced.defaultCookieAttributes.domain).toBe(".mosaicstack.dev");
|
||||
});
|
||||
|
||||
it("should not set cookie domain when COOKIE_DOMAIN env var is absent", () => {
|
||||
delete process.env.COOKIE_DOMAIN;
|
||||
const mockPrisma = {} as PrismaClient;
|
||||
createAuth(mockPrisma);
|
||||
|
||||
expect(mockBetterAuth).toHaveBeenCalledOnce();
|
||||
const config = mockBetterAuth.mock.calls[0][0] as {
|
||||
advanced: {
|
||||
defaultCookieAttributes: {
|
||||
httpOnly: boolean;
|
||||
secure: boolean;
|
||||
sameSite: string;
|
||||
domain?: string;
|
||||
};
|
||||
};
|
||||
};
|
||||
expect(config.advanced.defaultCookieAttributes.domain).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe("getBetterAuthBaseUrl", () => {
|
||||
it("should prefer BETTER_AUTH_URL when set", () => {
|
||||
process.env.BETTER_AUTH_URL = "https://auth-base.example.com";
|
||||
process.env.NEXT_PUBLIC_API_URL = "https://api.example.com";
|
||||
|
||||
expect(getBetterAuthBaseUrl()).toBe("https://auth-base.example.com");
|
||||
});
|
||||
|
||||
it("should fall back to NEXT_PUBLIC_API_URL when BETTER_AUTH_URL is not set", () => {
|
||||
process.env.NEXT_PUBLIC_API_URL = "https://api.example.com";
|
||||
|
||||
expect(getBetterAuthBaseUrl()).toBe("https://api.example.com");
|
||||
});
|
||||
|
||||
it("should throw when base URL is invalid", () => {
|
||||
process.env.BETTER_AUTH_URL = "not-a-url";
|
||||
|
||||
expect(() => getBetterAuthBaseUrl()).toThrow("BetterAuth base URL must be a valid URL");
|
||||
});
|
||||
|
||||
it("should throw when base URL is missing in production", () => {
|
||||
process.env.NODE_ENV = "production";
|
||||
|
||||
expect(() => getBetterAuthBaseUrl()).toThrow("Missing BetterAuth base URL in production");
|
||||
});
|
||||
|
||||
it("should throw when base URL is not https in production", () => {
|
||||
process.env.NODE_ENV = "production";
|
||||
process.env.BETTER_AUTH_URL = "http://api.example.com";
|
||||
|
||||
expect(() => getBetterAuthBaseUrl()).toThrow(
|
||||
"BetterAuth base URL must use https in production"
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("createAuth - baseURL wiring", () => {
|
||||
beforeEach(() => {
|
||||
mockBetterAuth.mockClear();
|
||||
mockPrismaAdapter.mockClear();
|
||||
});
|
||||
|
||||
it("should pass BETTER_AUTH_URL into BetterAuth config", () => {
|
||||
process.env.BETTER_AUTH_URL = "https://api.mosaicstack.dev";
|
||||
|
||||
const mockPrisma = {} as PrismaClient;
|
||||
createAuth(mockPrisma);
|
||||
|
||||
expect(mockBetterAuth).toHaveBeenCalledOnce();
|
||||
const config = mockBetterAuth.mock.calls[0][0] as { baseURL?: string };
|
||||
expect(config.baseURL).toBe("https://api.mosaicstack.dev");
|
||||
});
|
||||
|
||||
it("should pass NEXT_PUBLIC_API_URL into BetterAuth config when BETTER_AUTH_URL is absent", () => {
|
||||
process.env.NEXT_PUBLIC_API_URL = "https://api.fallback.dev";
|
||||
|
||||
const mockPrisma = {} as PrismaClient;
|
||||
createAuth(mockPrisma);
|
||||
|
||||
expect(mockBetterAuth).toHaveBeenCalledOnce();
|
||||
const config = mockBetterAuth.mock.calls[0][0] as { baseURL?: string };
|
||||
expect(config.baseURL).toBe("https://api.fallback.dev");
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -3,37 +3,278 @@ import { prismaAdapter } from "better-auth/adapters/prisma";
|
||||
import { genericOAuth } from "better-auth/plugins";
|
||||
import type { PrismaClient } from "@prisma/client";
|
||||
|
||||
/**
|
||||
* Required OIDC environment variables when OIDC is enabled
|
||||
*/
|
||||
const REQUIRED_OIDC_ENV_VARS = [
|
||||
"OIDC_ISSUER",
|
||||
"OIDC_CLIENT_ID",
|
||||
"OIDC_CLIENT_SECRET",
|
||||
"OIDC_REDIRECT_URI",
|
||||
] as const;
|
||||
|
||||
/**
|
||||
* Resolve BetterAuth base URL from explicit auth URL or API URL.
|
||||
* BetterAuth uses this to generate absolute callback/error URLs.
|
||||
*/
|
||||
export function getBetterAuthBaseUrl(): string | undefined {
|
||||
const configured = process.env.BETTER_AUTH_URL ?? process.env.NEXT_PUBLIC_API_URL;
|
||||
|
||||
if (!configured || configured.trim() === "") {
|
||||
if (process.env.NODE_ENV === "production") {
|
||||
throw new Error(
|
||||
"Missing BetterAuth base URL in production. Set BETTER_AUTH_URL (preferred) or NEXT_PUBLIC_API_URL."
|
||||
);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
let parsed: URL;
|
||||
try {
|
||||
parsed = new URL(configured);
|
||||
} catch (urlError: unknown) {
|
||||
const detail = urlError instanceof Error ? urlError.message : String(urlError);
|
||||
throw new Error(
|
||||
`BetterAuth base URL must be a valid URL. Current value: "${configured}". Parse error: ${detail}.`
|
||||
);
|
||||
}
|
||||
|
||||
if (process.env.NODE_ENV === "production" && parsed.protocol !== "https:") {
|
||||
throw new Error(
|
||||
`BetterAuth base URL must use https in production. Current value: "${configured}".`
|
||||
);
|
||||
}
|
||||
|
||||
return parsed.origin;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if OIDC authentication is enabled via environment variable
|
||||
*/
|
||||
export function isOidcEnabled(): boolean {
|
||||
const enabled = process.env.OIDC_ENABLED;
|
||||
return enabled === "true" || enabled === "1";
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates OIDC configuration at startup.
|
||||
* Throws an error if OIDC is enabled but required environment variables are missing.
|
||||
*
|
||||
* @throws Error if OIDC is enabled but required vars are missing or empty
|
||||
*/
|
||||
export function validateOidcConfig(): void {
|
||||
if (!isOidcEnabled()) {
|
||||
// OIDC is disabled, no validation needed
|
||||
return;
|
||||
}
|
||||
|
||||
const missingVars: string[] = [];
|
||||
|
||||
for (const envVar of REQUIRED_OIDC_ENV_VARS) {
|
||||
const value = process.env[envVar];
|
||||
if (!value || value.trim() === "") {
|
||||
missingVars.push(envVar);
|
||||
}
|
||||
}
|
||||
|
||||
if (missingVars.length > 0) {
|
||||
throw new Error(
|
||||
`OIDC authentication is enabled (OIDC_ENABLED=true) but required environment variables are missing or empty: ${missingVars.join(", ")}. ` +
|
||||
`Either set these variables or disable OIDC by setting OIDC_ENABLED=false.`
|
||||
);
|
||||
}
|
||||
|
||||
// Additional validation: OIDC_ISSUER should end with a trailing slash for proper discovery URL
|
||||
const issuer = process.env.OIDC_ISSUER;
|
||||
if (issuer && !issuer.endsWith("/")) {
|
||||
throw new Error(
|
||||
`OIDC_ISSUER must end with a trailing slash (/). Current value: "${issuer}". ` +
|
||||
`The discovery URL is constructed by appending ".well-known/openid-configuration" to the issuer.`
|
||||
);
|
||||
}
|
||||
|
||||
// Additional validation: OIDC_REDIRECT_URI must be a valid URL with /auth/oauth2/callback path
|
||||
validateRedirectUri();
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates the OIDC_REDIRECT_URI environment variable.
|
||||
* - Must be a parseable URL
|
||||
* - Path must start with /auth/oauth2/callback
|
||||
* - Warns (but does not throw) if using localhost in production
|
||||
*
|
||||
* @throws Error if URL is invalid or path does not start with /auth/oauth2/callback
|
||||
*/
|
||||
function validateRedirectUri(): void {
|
||||
const redirectUri = process.env.OIDC_REDIRECT_URI;
|
||||
if (!redirectUri || redirectUri.trim() === "") {
|
||||
// Already caught by REQUIRED_OIDC_ENV_VARS check above
|
||||
return;
|
||||
}
|
||||
|
||||
let parsed: URL;
|
||||
try {
|
||||
parsed = new URL(redirectUri);
|
||||
} catch (urlError: unknown) {
|
||||
const detail = urlError instanceof Error ? urlError.message : String(urlError);
|
||||
throw new Error(
|
||||
`OIDC_REDIRECT_URI must be a valid URL. Current value: "${redirectUri}". ` +
|
||||
`Parse error: ${detail}. ` +
|
||||
`Example: "https://api.example.com/auth/oauth2/callback/authentik".`
|
||||
);
|
||||
}
|
||||
|
||||
if (!parsed.pathname.startsWith("/auth/oauth2/callback")) {
|
||||
throw new Error(
|
||||
`OIDC_REDIRECT_URI path must start with "/auth/oauth2/callback". Current path: "${parsed.pathname}". ` +
|
||||
`Example: "https://api.example.com/auth/oauth2/callback/authentik".`
|
||||
);
|
||||
}
|
||||
|
||||
if (
|
||||
process.env.NODE_ENV === "production" &&
|
||||
(parsed.hostname === "localhost" || parsed.hostname === "127.0.0.1")
|
||||
) {
|
||||
console.warn(
|
||||
`[AUTH WARNING] OIDC_REDIRECT_URI uses localhost ("${redirectUri}") in production. ` +
|
||||
`This is likely a misconfiguration. Use a public domain for production deployments.`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get OIDC plugins configuration.
|
||||
* Returns empty array if OIDC is disabled, otherwise returns configured OAuth plugin.
|
||||
*/
|
||||
function getOidcPlugins(): ReturnType<typeof genericOAuth>[] {
|
||||
if (!isOidcEnabled()) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const clientId = process.env.OIDC_CLIENT_ID;
|
||||
const clientSecret = process.env.OIDC_CLIENT_SECRET;
|
||||
const issuer = process.env.OIDC_ISSUER;
|
||||
const redirectUri = process.env.OIDC_REDIRECT_URI;
|
||||
|
||||
if (!clientId) {
|
||||
throw new Error("OIDC_CLIENT_ID is required when OIDC is enabled but was not set.");
|
||||
}
|
||||
if (!clientSecret) {
|
||||
throw new Error("OIDC_CLIENT_SECRET is required when OIDC is enabled but was not set.");
|
||||
}
|
||||
if (!issuer) {
|
||||
throw new Error("OIDC_ISSUER is required when OIDC is enabled but was not set.");
|
||||
}
|
||||
if (!redirectUri) {
|
||||
throw new Error("OIDC_REDIRECT_URI is required when OIDC is enabled but was not set.");
|
||||
}
|
||||
|
||||
return [
|
||||
genericOAuth({
|
||||
config: [
|
||||
{
|
||||
providerId: "authentik",
|
||||
clientId,
|
||||
clientSecret,
|
||||
discoveryUrl: `${issuer}.well-known/openid-configuration`,
|
||||
redirectURI: redirectUri,
|
||||
pkce: true,
|
||||
scopes: ["openid", "profile", "email"],
|
||||
},
|
||||
],
|
||||
}),
|
||||
];
|
||||
}
|
||||
|
||||
/**
|
||||
* Build the list of trusted origins from environment variables.
|
||||
*
|
||||
* Sources (in order):
|
||||
* - NEXT_PUBLIC_APP_URL — primary frontend URL
|
||||
* - NEXT_PUBLIC_API_URL — API's own origin
|
||||
* - TRUSTED_ORIGINS — comma-separated additional origins
|
||||
* - localhost fallbacks — only when NODE_ENV !== "production"
|
||||
*
|
||||
* The returned list is deduplicated and empty strings are filtered out.
|
||||
*/
|
||||
export function getTrustedOrigins(): string[] {
|
||||
const origins: string[] = [];
|
||||
|
||||
// Environment-driven origins
|
||||
if (process.env.NEXT_PUBLIC_APP_URL) {
|
||||
origins.push(process.env.NEXT_PUBLIC_APP_URL);
|
||||
}
|
||||
|
||||
if (process.env.NEXT_PUBLIC_API_URL) {
|
||||
origins.push(process.env.NEXT_PUBLIC_API_URL);
|
||||
}
|
||||
|
||||
// Comma-separated additional origins (validated)
|
||||
if (process.env.TRUSTED_ORIGINS) {
|
||||
const rawOrigins = process.env.TRUSTED_ORIGINS.split(",")
|
||||
.map((o) => o.trim())
|
||||
.filter((o) => o !== "");
|
||||
for (const origin of rawOrigins) {
|
||||
try {
|
||||
const parsed = new URL(origin);
|
||||
if (parsed.protocol !== "http:" && parsed.protocol !== "https:") {
|
||||
console.warn(`[AUTH] Ignoring non-HTTP origin in TRUSTED_ORIGINS: "${origin}"`);
|
||||
continue;
|
||||
}
|
||||
origins.push(origin);
|
||||
} catch (urlError: unknown) {
|
||||
const detail = urlError instanceof Error ? urlError.message : String(urlError);
|
||||
console.warn(`[AUTH] Ignoring invalid URL in TRUSTED_ORIGINS: "${origin}" (${detail})`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Localhost fallbacks for development only
|
||||
if (process.env.NODE_ENV !== "production") {
|
||||
origins.push("http://localhost:3000", "http://localhost:3001");
|
||||
}
|
||||
|
||||
// Deduplicate and filter empty strings
|
||||
return [...new Set(origins)].filter((o) => o !== "");
|
||||
}
|
||||
|
||||
export function createAuth(prisma: PrismaClient) {
|
||||
// Validate OIDC configuration at startup - fail fast if misconfigured
|
||||
validateOidcConfig();
|
||||
|
||||
const baseURL = getBetterAuthBaseUrl();
|
||||
|
||||
return betterAuth({
|
||||
baseURL,
|
||||
basePath: "/auth",
|
||||
database: prismaAdapter(prisma, {
|
||||
provider: "postgresql",
|
||||
}),
|
||||
emailAndPassword: {
|
||||
enabled: true, // Enable for now, can be disabled later
|
||||
enabled: true,
|
||||
},
|
||||
plugins: [...getOidcPlugins()],
|
||||
logger: {
|
||||
disabled: false,
|
||||
level: "error",
|
||||
},
|
||||
plugins: [
|
||||
genericOAuth({
|
||||
config: [
|
||||
{
|
||||
providerId: "authentik",
|
||||
clientId: process.env.OIDC_CLIENT_ID ?? "",
|
||||
clientSecret: process.env.OIDC_CLIENT_SECRET ?? "",
|
||||
discoveryUrl: `${process.env.OIDC_ISSUER ?? ""}.well-known/openid-configuration`,
|
||||
scopes: ["openid", "profile", "email"],
|
||||
},
|
||||
],
|
||||
}),
|
||||
],
|
||||
session: {
|
||||
expiresIn: 60 * 60 * 24, // 24 hours
|
||||
updateAge: 60 * 60 * 24, // 24 hours
|
||||
expiresIn: 60 * 60 * 24 * 7, // 7 days absolute max
|
||||
updateAge: 60 * 60 * 2, // 2 hours — minimum session age before BetterAuth refreshes the expiry on next request
|
||||
},
|
||||
trustedOrigins: [
|
||||
process.env.NEXT_PUBLIC_APP_URL ?? "http://localhost:3000",
|
||||
"http://localhost:3001", // API origin (dev)
|
||||
"https://app.mosaicstack.dev", // Production web
|
||||
"https://api.mosaicstack.dev", // Production API
|
||||
],
|
||||
advanced: {
|
||||
database: {
|
||||
// BetterAuth's default ID generator emits opaque strings; our auth tables use UUID PKs.
|
||||
generateId: "uuid",
|
||||
},
|
||||
defaultCookieAttributes: {
|
||||
httpOnly: true,
|
||||
secure: process.env.NODE_ENV === "production",
|
||||
sameSite: "lax" as const,
|
||||
...(process.env.COOKIE_DOMAIN ? { domain: process.env.COOKIE_DOMAIN } : {}),
|
||||
},
|
||||
},
|
||||
trustedOrigins: getTrustedOrigins(),
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -1,15 +1,41 @@
|
||||
import { describe, it, expect, beforeEach, vi } from "vitest";
|
||||
|
||||
// Mock better-auth modules before importing AuthService (pulled in by AuthController)
|
||||
vi.mock("better-auth/node", () => ({
|
||||
toNodeHandler: vi.fn().mockReturnValue(vi.fn()),
|
||||
}));
|
||||
|
||||
vi.mock("better-auth", () => ({
|
||||
betterAuth: vi.fn().mockReturnValue({
|
||||
handler: vi.fn(),
|
||||
api: { getSession: vi.fn() },
|
||||
}),
|
||||
}));
|
||||
|
||||
vi.mock("better-auth/adapters/prisma", () => ({
|
||||
prismaAdapter: vi.fn().mockReturnValue({}),
|
||||
}));
|
||||
|
||||
vi.mock("better-auth/plugins", () => ({
|
||||
genericOAuth: vi.fn().mockReturnValue({ id: "generic-oauth" }),
|
||||
}));
|
||||
|
||||
import { Test, TestingModule } from "@nestjs/testing";
|
||||
import type { AuthUser } from "@mosaic/shared";
|
||||
import { HttpException, HttpStatus, UnauthorizedException } from "@nestjs/common";
|
||||
import type { AuthUser, AuthSession } from "@mosaic/shared";
|
||||
import type { Request as ExpressRequest, Response as ExpressResponse } from "express";
|
||||
import { AuthController } from "./auth.controller";
|
||||
import { AuthService } from "./auth.service";
|
||||
|
||||
describe("AuthController", () => {
|
||||
let controller: AuthController;
|
||||
let authService: AuthService;
|
||||
|
||||
const mockNodeHandler = vi.fn().mockResolvedValue(undefined);
|
||||
|
||||
const mockAuthService = {
|
||||
getAuth: vi.fn(),
|
||||
getNodeHandler: vi.fn().mockReturnValue(mockNodeHandler),
|
||||
getAuthConfig: vi.fn(),
|
||||
};
|
||||
|
||||
beforeEach(async () => {
|
||||
@@ -24,30 +50,338 @@ describe("AuthController", () => {
|
||||
}).compile();
|
||||
|
||||
controller = module.get<AuthController>(AuthController);
|
||||
authService = module.get<AuthService>(AuthService);
|
||||
|
||||
vi.clearAllMocks();
|
||||
|
||||
// Restore mock implementations after clearAllMocks
|
||||
mockAuthService.getNodeHandler.mockReturnValue(mockNodeHandler);
|
||||
mockNodeHandler.mockResolvedValue(undefined);
|
||||
});
|
||||
|
||||
describe("handleAuth", () => {
|
||||
it("should call BetterAuth handler", async () => {
|
||||
const mockHandler = vi.fn().mockResolvedValue({ status: 200 });
|
||||
mockAuthService.getAuth.mockReturnValue({ handler: mockHandler });
|
||||
|
||||
it("should delegate to BetterAuth node handler with Express req/res", async () => {
|
||||
const mockRequest = {
|
||||
method: "GET",
|
||||
url: "/auth/session",
|
||||
headers: {},
|
||||
ip: "127.0.0.1",
|
||||
socket: { remoteAddress: "127.0.0.1" },
|
||||
} as unknown as ExpressRequest;
|
||||
|
||||
const mockResponse = {
|
||||
headersSent: false,
|
||||
} as unknown as ExpressResponse;
|
||||
|
||||
await controller.handleAuth(mockRequest, mockResponse);
|
||||
|
||||
expect(mockAuthService.getNodeHandler).toHaveBeenCalled();
|
||||
expect(mockNodeHandler).toHaveBeenCalledWith(mockRequest, mockResponse);
|
||||
});
|
||||
|
||||
it("should throw HttpException with 500 when handler throws before headers sent", async () => {
|
||||
const handlerError = new Error("BetterAuth internal failure");
|
||||
mockNodeHandler.mockRejectedValueOnce(handlerError);
|
||||
|
||||
const mockRequest = {
|
||||
method: "POST",
|
||||
url: "/auth/sign-in",
|
||||
headers: {},
|
||||
ip: "192.168.1.10",
|
||||
socket: { remoteAddress: "192.168.1.10" },
|
||||
} as unknown as ExpressRequest;
|
||||
|
||||
const mockResponse = {
|
||||
headersSent: false,
|
||||
} as unknown as ExpressResponse;
|
||||
|
||||
try {
|
||||
await controller.handleAuth(mockRequest, mockResponse);
|
||||
// Should not reach here
|
||||
expect.unreachable("Expected HttpException to be thrown");
|
||||
} catch (err) {
|
||||
expect(err).toBeInstanceOf(HttpException);
|
||||
expect((err as HttpException).getStatus()).toBe(HttpStatus.INTERNAL_SERVER_ERROR);
|
||||
expect((err as HttpException).getResponse()).toBe(
|
||||
"Unable to complete authentication. Please try again in a moment."
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
it("should preserve better-call status and body for handler APIError", async () => {
|
||||
const apiError = {
|
||||
statusCode: HttpStatus.BAD_REQUEST,
|
||||
message: "Invalid OAuth configuration",
|
||||
body: {
|
||||
message: "Invalid OAuth configuration",
|
||||
code: "INVALID_OAUTH_CONFIGURATION",
|
||||
},
|
||||
};
|
||||
mockNodeHandler.mockRejectedValueOnce(apiError);
|
||||
|
||||
const mockRequest = {
|
||||
method: "POST",
|
||||
url: "/auth/sign-in/oauth2",
|
||||
headers: {},
|
||||
ip: "192.168.1.10",
|
||||
socket: { remoteAddress: "192.168.1.10" },
|
||||
} as unknown as ExpressRequest;
|
||||
|
||||
const mockResponse = {
|
||||
headersSent: false,
|
||||
} as unknown as ExpressResponse;
|
||||
|
||||
try {
|
||||
await controller.handleAuth(mockRequest, mockResponse);
|
||||
expect.unreachable("Expected HttpException to be thrown");
|
||||
} catch (err) {
|
||||
expect(err).toBeInstanceOf(HttpException);
|
||||
expect((err as HttpException).getStatus()).toBe(HttpStatus.BAD_REQUEST);
|
||||
expect((err as HttpException).getResponse()).toMatchObject({
|
||||
message: "Invalid OAuth configuration",
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
it("should log warning and not throw when handler throws after headers sent", async () => {
|
||||
const handlerError = new Error("Stream interrupted");
|
||||
mockNodeHandler.mockRejectedValueOnce(handlerError);
|
||||
|
||||
const mockRequest = {
|
||||
method: "POST",
|
||||
url: "/auth/sign-up",
|
||||
headers: {},
|
||||
ip: "10.0.0.5",
|
||||
socket: { remoteAddress: "10.0.0.5" },
|
||||
} as unknown as ExpressRequest;
|
||||
|
||||
const mockResponse = {
|
||||
headersSent: true,
|
||||
} as unknown as ExpressResponse;
|
||||
|
||||
// Should not throw when headers already sent
|
||||
await expect(controller.handleAuth(mockRequest, mockResponse)).resolves.toBeUndefined();
|
||||
});
|
||||
|
||||
it("should handle non-Error thrown values", async () => {
|
||||
mockNodeHandler.mockRejectedValueOnce("string error");
|
||||
|
||||
const mockRequest = {
|
||||
method: "GET",
|
||||
url: "/auth/callback",
|
||||
headers: {},
|
||||
ip: "127.0.0.1",
|
||||
socket: { remoteAddress: "127.0.0.1" },
|
||||
} as unknown as ExpressRequest;
|
||||
|
||||
const mockResponse = {
|
||||
headersSent: false,
|
||||
} as unknown as ExpressResponse;
|
||||
|
||||
await expect(controller.handleAuth(mockRequest, mockResponse)).rejects.toThrow(HttpException);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getConfig", () => {
|
||||
it("should return auth config from service", async () => {
|
||||
const mockConfig = {
|
||||
providers: [
|
||||
{ id: "email", name: "Email", type: "credentials" as const },
|
||||
{ id: "authentik", name: "Authentik", type: "oauth" as const },
|
||||
],
|
||||
};
|
||||
mockAuthService.getAuthConfig.mockResolvedValue(mockConfig);
|
||||
|
||||
const result = await controller.getConfig();
|
||||
|
||||
expect(result).toEqual(mockConfig);
|
||||
expect(mockAuthService.getAuthConfig).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("should return correct response shape with only email provider", async () => {
|
||||
const mockConfig = {
|
||||
providers: [{ id: "email", name: "Email", type: "credentials" as const }],
|
||||
};
|
||||
mockAuthService.getAuthConfig.mockResolvedValue(mockConfig);
|
||||
|
||||
const result = await controller.getConfig();
|
||||
|
||||
expect(result).toEqual(mockConfig);
|
||||
expect(result.providers).toHaveLength(1);
|
||||
expect(result.providers[0]).toEqual({
|
||||
id: "email",
|
||||
name: "Email",
|
||||
type: "credentials",
|
||||
});
|
||||
});
|
||||
|
||||
it("should never leak secrets in auth config response", async () => {
|
||||
// Set ALL sensitive environment variables with known values
|
||||
const sensitiveEnv: Record<string, string> = {
|
||||
OIDC_CLIENT_SECRET: "test-client-secret",
|
||||
OIDC_CLIENT_ID: "test-client-id",
|
||||
OIDC_ISSUER: "https://auth.test.com/",
|
||||
OIDC_REDIRECT_URI: "https://app.test.com/auth/oauth2/callback/authentik",
|
||||
BETTER_AUTH_SECRET: "test-better-auth-secret",
|
||||
JWT_SECRET: "test-jwt-secret",
|
||||
CSRF_SECRET: "test-csrf-secret",
|
||||
DATABASE_URL: "postgresql://user:password@localhost/db",
|
||||
OIDC_ENABLED: "true",
|
||||
};
|
||||
|
||||
await controller.handleAuth(mockRequest);
|
||||
const originalEnv: Record<string, string | undefined> = {};
|
||||
for (const [key, value] of Object.entries(sensitiveEnv)) {
|
||||
originalEnv[key] = process.env[key];
|
||||
process.env[key] = value;
|
||||
}
|
||||
|
||||
expect(mockAuthService.getAuth).toHaveBeenCalled();
|
||||
expect(mockHandler).toHaveBeenCalledWith(mockRequest);
|
||||
try {
|
||||
// Mock the service to return a realistic config with both providers
|
||||
const mockConfig = {
|
||||
providers: [
|
||||
{ id: "email", name: "Email", type: "credentials" as const },
|
||||
{ id: "authentik", name: "Authentik", type: "oauth" as const },
|
||||
],
|
||||
};
|
||||
mockAuthService.getAuthConfig.mockResolvedValue(mockConfig);
|
||||
|
||||
const result = await controller.getConfig();
|
||||
const serialized = JSON.stringify(result);
|
||||
|
||||
// Assert no secret values leak into the serialized response
|
||||
const forbiddenPatterns = [
|
||||
"test-client-secret",
|
||||
"test-client-id",
|
||||
"test-better-auth-secret",
|
||||
"test-jwt-secret",
|
||||
"test-csrf-secret",
|
||||
"auth.test.com",
|
||||
"callback",
|
||||
"password",
|
||||
];
|
||||
|
||||
for (const pattern of forbiddenPatterns) {
|
||||
expect(serialized).not.toContain(pattern);
|
||||
}
|
||||
|
||||
// Assert response contains ONLY expected fields
|
||||
expect(result).toHaveProperty("providers");
|
||||
expect(Object.keys(result)).toEqual(["providers"]);
|
||||
expect(Array.isArray(result.providers)).toBe(true);
|
||||
|
||||
for (const provider of result.providers) {
|
||||
const keys = Object.keys(provider);
|
||||
expect(keys).toEqual(expect.arrayContaining(["id", "name", "type"]));
|
||||
expect(keys).toHaveLength(3);
|
||||
}
|
||||
} finally {
|
||||
// Restore original environment
|
||||
for (const [key] of Object.entries(sensitiveEnv)) {
|
||||
if (originalEnv[key] === undefined) {
|
||||
delete process.env[key];
|
||||
} else {
|
||||
process.env[key] = originalEnv[key];
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("getSession", () => {
|
||||
it("should return user and session data", () => {
|
||||
const mockUser: AuthUser = {
|
||||
id: "user-123",
|
||||
email: "test@example.com",
|
||||
name: "Test User",
|
||||
workspaceId: "workspace-123",
|
||||
};
|
||||
|
||||
const mockSession = {
|
||||
id: "session-123",
|
||||
token: "session-token",
|
||||
expiresAt: new Date(Date.now() + 86400000),
|
||||
};
|
||||
|
||||
const mockRequest = {
|
||||
user: mockUser,
|
||||
session: mockSession,
|
||||
};
|
||||
|
||||
const result = controller.getSession(mockRequest);
|
||||
|
||||
const expected: AuthSession = {
|
||||
user: mockUser,
|
||||
session: {
|
||||
id: mockSession.id,
|
||||
token: mockSession.token,
|
||||
expiresAt: mockSession.expiresAt,
|
||||
},
|
||||
};
|
||||
|
||||
expect(result).toEqual(expected);
|
||||
});
|
||||
|
||||
it("should throw UnauthorizedException when req.user is undefined", () => {
|
||||
const mockRequest = {
|
||||
session: {
|
||||
id: "session-123",
|
||||
token: "session-token",
|
||||
expiresAt: new Date(Date.now() + 86400000),
|
||||
},
|
||||
};
|
||||
|
||||
expect(() => controller.getSession(mockRequest as never)).toThrow(UnauthorizedException);
|
||||
expect(() => controller.getSession(mockRequest as never)).toThrow(
|
||||
"Missing authentication context"
|
||||
);
|
||||
});
|
||||
|
||||
it("should throw UnauthorizedException when req.session is undefined", () => {
|
||||
const mockRequest = {
|
||||
user: {
|
||||
id: "user-123",
|
||||
email: "test@example.com",
|
||||
name: "Test User",
|
||||
},
|
||||
};
|
||||
|
||||
expect(() => controller.getSession(mockRequest as never)).toThrow(UnauthorizedException);
|
||||
expect(() => controller.getSession(mockRequest as never)).toThrow(
|
||||
"Missing authentication context"
|
||||
);
|
||||
});
|
||||
|
||||
it("should throw UnauthorizedException when both req.user and req.session are undefined", () => {
|
||||
const mockRequest = {};
|
||||
|
||||
expect(() => controller.getSession(mockRequest as never)).toThrow(UnauthorizedException);
|
||||
expect(() => controller.getSession(mockRequest as never)).toThrow(
|
||||
"Missing authentication context"
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getProfile", () => {
|
||||
it("should return user profile", () => {
|
||||
it("should return complete user profile with identity fields", () => {
|
||||
const mockUser: AuthUser = {
|
||||
id: "user-123",
|
||||
email: "test@example.com",
|
||||
name: "Test User",
|
||||
image: "https://example.com/avatar.jpg",
|
||||
emailVerified: true,
|
||||
};
|
||||
|
||||
const result = controller.getProfile(mockUser);
|
||||
|
||||
expect(result).toEqual({
|
||||
id: mockUser.id,
|
||||
email: mockUser.email,
|
||||
name: mockUser.name,
|
||||
image: mockUser.image,
|
||||
emailVerified: mockUser.emailVerified,
|
||||
});
|
||||
});
|
||||
|
||||
it("should return user profile with only required fields", () => {
|
||||
const mockUser: AuthUser = {
|
||||
id: "user-123",
|
||||
email: "test@example.com",
|
||||
@@ -61,6 +395,95 @@ describe("AuthController", () => {
|
||||
email: mockUser.email,
|
||||
name: mockUser.name,
|
||||
});
|
||||
// Workspace fields are not included — served by GET /api/workspaces
|
||||
expect(result).not.toHaveProperty("workspaceId");
|
||||
expect(result).not.toHaveProperty("currentWorkspaceId");
|
||||
expect(result).not.toHaveProperty("workspaceRole");
|
||||
});
|
||||
});
|
||||
|
||||
describe("getClientIp (via handleAuth)", () => {
|
||||
it("should extract IP from X-Forwarded-For with single IP", async () => {
|
||||
const mockRequest = {
|
||||
method: "GET",
|
||||
url: "/auth/callback",
|
||||
headers: { "x-forwarded-for": "203.0.113.50" },
|
||||
ip: "127.0.0.1",
|
||||
socket: { remoteAddress: "127.0.0.1" },
|
||||
} as unknown as ExpressRequest;
|
||||
|
||||
const mockResponse = {
|
||||
headersSent: false,
|
||||
} as unknown as ExpressResponse;
|
||||
|
||||
// Spy on the logger to verify the extracted IP
|
||||
const debugSpy = vi.spyOn(controller["logger"], "debug");
|
||||
|
||||
await controller.handleAuth(mockRequest, mockResponse);
|
||||
|
||||
expect(debugSpy).toHaveBeenCalledWith(expect.stringContaining("203.0.113.50"));
|
||||
});
|
||||
|
||||
it("should extract first IP from X-Forwarded-For with comma-separated IPs", async () => {
|
||||
const mockRequest = {
|
||||
method: "GET",
|
||||
url: "/auth/callback",
|
||||
headers: { "x-forwarded-for": "203.0.113.50, 70.41.3.18" },
|
||||
ip: "127.0.0.1",
|
||||
socket: { remoteAddress: "127.0.0.1" },
|
||||
} as unknown as ExpressRequest;
|
||||
|
||||
const mockResponse = {
|
||||
headersSent: false,
|
||||
} as unknown as ExpressResponse;
|
||||
|
||||
const debugSpy = vi.spyOn(controller["logger"], "debug");
|
||||
|
||||
await controller.handleAuth(mockRequest, mockResponse);
|
||||
|
||||
expect(debugSpy).toHaveBeenCalledWith(expect.stringContaining("203.0.113.50"));
|
||||
// Ensure it does NOT contain the second IP in the extracted position
|
||||
expect(debugSpy).toHaveBeenCalledWith(expect.not.stringContaining("70.41.3.18"));
|
||||
});
|
||||
|
||||
it("should extract first IP from X-Forwarded-For as array", async () => {
|
||||
const mockRequest = {
|
||||
method: "GET",
|
||||
url: "/auth/callback",
|
||||
headers: { "x-forwarded-for": ["203.0.113.50", "70.41.3.18"] },
|
||||
ip: "127.0.0.1",
|
||||
socket: { remoteAddress: "127.0.0.1" },
|
||||
} as unknown as ExpressRequest;
|
||||
|
||||
const mockResponse = {
|
||||
headersSent: false,
|
||||
} as unknown as ExpressResponse;
|
||||
|
||||
const debugSpy = vi.spyOn(controller["logger"], "debug");
|
||||
|
||||
await controller.handleAuth(mockRequest, mockResponse);
|
||||
|
||||
expect(debugSpy).toHaveBeenCalledWith(expect.stringContaining("203.0.113.50"));
|
||||
});
|
||||
|
||||
it("should fallback to req.ip when no X-Forwarded-For header", async () => {
|
||||
const mockRequest = {
|
||||
method: "GET",
|
||||
url: "/auth/callback",
|
||||
headers: {},
|
||||
ip: "192.168.1.100",
|
||||
socket: { remoteAddress: "192.168.1.100" },
|
||||
} as unknown as ExpressRequest;
|
||||
|
||||
const mockResponse = {
|
||||
headersSent: false,
|
||||
} as unknown as ExpressResponse;
|
||||
|
||||
const debugSpy = vi.spyOn(controller["logger"], "debug");
|
||||
|
||||
await controller.handleAuth(mockRequest, mockResponse);
|
||||
|
||||
expect(debugSpy).toHaveBeenCalledWith(expect.stringContaining("192.168.1.100"));
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,26 +1,211 @@
|
||||
import { Controller, All, Req, Get, UseGuards } from "@nestjs/common";
|
||||
import type { AuthUser } from "@mosaic/shared";
|
||||
import {
|
||||
Controller,
|
||||
All,
|
||||
Req,
|
||||
Res,
|
||||
Get,
|
||||
Header,
|
||||
UseGuards,
|
||||
Request,
|
||||
Logger,
|
||||
HttpException,
|
||||
HttpStatus,
|
||||
UnauthorizedException,
|
||||
} from "@nestjs/common";
|
||||
import { Throttle } from "@nestjs/throttler";
|
||||
import type { Request as ExpressRequest, Response as ExpressResponse } from "express";
|
||||
import type { AuthUser, AuthSession, AuthConfigResponse } from "@mosaic/shared";
|
||||
import { AuthService } from "./auth.service";
|
||||
import { AuthGuard } from "./guards/auth.guard";
|
||||
import { CurrentUser } from "./decorators/current-user.decorator";
|
||||
import { SkipCsrf } from "../common/decorators/skip-csrf.decorator";
|
||||
import type { AuthenticatedRequest } from "./types/better-auth-request.interface";
|
||||
|
||||
@Controller("auth")
|
||||
export class AuthController {
|
||||
private readonly logger = new Logger(AuthController.name);
|
||||
|
||||
constructor(private readonly authService: AuthService) {}
|
||||
|
||||
/**
|
||||
* Get current session
|
||||
* Returns user and session data for authenticated user
|
||||
*/
|
||||
@Get("session")
|
||||
@UseGuards(AuthGuard)
|
||||
getSession(@Request() req: AuthenticatedRequest): AuthSession {
|
||||
// Defense-in-depth: AuthGuard should guarantee these, but if someone adds
|
||||
// a route with AuthenticatedRequest and forgets @UseGuards(AuthGuard),
|
||||
// TypeScript types won't help at runtime.
|
||||
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
|
||||
if (!req.user || !req.session) {
|
||||
throw new UnauthorizedException("Missing authentication context");
|
||||
}
|
||||
|
||||
return {
|
||||
user: req.user,
|
||||
session: {
|
||||
id: req.session.id,
|
||||
token: req.session.token,
|
||||
expiresAt: req.session.expiresAt,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current user profile
|
||||
* Returns basic user information
|
||||
*/
|
||||
@Get("profile")
|
||||
@UseGuards(AuthGuard)
|
||||
getProfile(@CurrentUser() user: AuthUser) {
|
||||
return {
|
||||
getProfile(@CurrentUser() user: AuthUser): AuthUser {
|
||||
// Return only defined properties to maintain type safety
|
||||
const profile: AuthUser = {
|
||||
id: user.id,
|
||||
email: user.email,
|
||||
name: user.name,
|
||||
};
|
||||
|
||||
if (user.image !== undefined) {
|
||||
profile.image = user.image;
|
||||
}
|
||||
if (user.emailVerified !== undefined) {
|
||||
profile.emailVerified = user.emailVerified;
|
||||
}
|
||||
|
||||
// Workspace context is served by GET /api/workspaces, not the auth profile.
|
||||
// The deprecated workspaceId/currentWorkspaceId/workspaceRole fields on
|
||||
// AuthUser are never populated by BetterAuth and are omitted here.
|
||||
|
||||
return profile;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get available authentication providers.
|
||||
* Public endpoint (no auth guard) so the frontend can discover login options
|
||||
* before the user is authenticated.
|
||||
*/
|
||||
@Get("config")
|
||||
@Header("Cache-Control", "public, max-age=300")
|
||||
async getConfig(): Promise<AuthConfigResponse> {
|
||||
return this.authService.getAuthConfig();
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle all other auth routes (sign-in, sign-up, sign-out, etc.)
|
||||
* Delegates to BetterAuth
|
||||
*
|
||||
* Rate limit: "strict" tier (10 req/min) - More restrictive than normal routes
|
||||
* to prevent brute-force attacks on auth endpoints
|
||||
*
|
||||
* Security note: This catch-all route bypasses standard guards that other routes have.
|
||||
* Rate limiting and logging are applied to mitigate abuse (SEC-API-10).
|
||||
*/
|
||||
@All("*")
|
||||
async handleAuth(@Req() req: Request) {
|
||||
const auth = this.authService.getAuth();
|
||||
return auth.handler(req);
|
||||
// BetterAuth handles CSRF internally (Fetch Metadata + SameSite=Lax cookies).
|
||||
// @SkipCsrf avoids double-protection conflicts.
|
||||
// See: https://www.better-auth.com/docs/reference/security
|
||||
@SkipCsrf()
|
||||
@Throttle({ strict: { limit: 10, ttl: 60000 } })
|
||||
async handleAuth(@Req() req: ExpressRequest, @Res() res: ExpressResponse): Promise<void> {
|
||||
// Extract client IP for logging
|
||||
const clientIp = this.getClientIp(req);
|
||||
|
||||
// Log auth catch-all hits for monitoring and debugging
|
||||
this.logger.debug(`Auth catch-all: ${req.method} ${req.url} from ${clientIp}`);
|
||||
|
||||
const handler = this.authService.getNodeHandler();
|
||||
|
||||
try {
|
||||
await handler(req, res);
|
||||
|
||||
// BetterAuth writes responses directly — catch silent 500s that bypass NestJS error handling
|
||||
if (res.statusCode >= 500) {
|
||||
this.logger.error(
|
||||
`BetterAuth returned ${String(res.statusCode)} for ${req.method} ${req.url} from ${clientIp}` +
|
||||
` — check container stdout for '# SERVER_ERROR' details`
|
||||
);
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
const stack = error instanceof Error ? error.stack : undefined;
|
||||
|
||||
this.logger.error(
|
||||
`BetterAuth handler error: ${req.method} ${req.url} from ${clientIp} - ${message}`,
|
||||
stack
|
||||
);
|
||||
|
||||
if (!res.headersSent) {
|
||||
const mappedError = this.mapToHttpException(error);
|
||||
if (mappedError) {
|
||||
throw mappedError;
|
||||
}
|
||||
|
||||
throw new HttpException(
|
||||
"Unable to complete authentication. Please try again in a moment.",
|
||||
HttpStatus.INTERNAL_SERVER_ERROR
|
||||
);
|
||||
}
|
||||
|
||||
this.logger.error(
|
||||
`Headers already sent for failed auth request ${req.method} ${req.url} — client may have received partial response`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract client IP from request, handling proxies
|
||||
*/
|
||||
private getClientIp(req: ExpressRequest): string {
|
||||
// Check X-Forwarded-For header (for reverse proxy setups)
|
||||
const forwardedFor = req.headers["x-forwarded-for"];
|
||||
if (forwardedFor) {
|
||||
const ips = Array.isArray(forwardedFor) ? forwardedFor[0] : forwardedFor;
|
||||
return ips?.split(",")[0]?.trim() ?? "unknown";
|
||||
}
|
||||
|
||||
// Fall back to direct IP
|
||||
return req.ip ?? req.socket.remoteAddress ?? "unknown";
|
||||
}
|
||||
|
||||
/**
|
||||
* Preserve known HTTP errors from BetterAuth/better-call instead of converting
|
||||
* every failure into a generic 500.
|
||||
*/
|
||||
private mapToHttpException(error: unknown): HttpException | null {
|
||||
if (error instanceof HttpException) {
|
||||
return error;
|
||||
}
|
||||
|
||||
if (!error || typeof error !== "object") {
|
||||
return null;
|
||||
}
|
||||
|
||||
const statusCode = "statusCode" in error ? error.statusCode : undefined;
|
||||
if (!this.isHttpStatus(statusCode)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const responseBody = "body" in error && error.body !== undefined ? error.body : undefined;
|
||||
if (
|
||||
responseBody !== undefined &&
|
||||
responseBody !== null &&
|
||||
(typeof responseBody === "string" || typeof responseBody === "object")
|
||||
) {
|
||||
return new HttpException(responseBody, statusCode);
|
||||
}
|
||||
|
||||
const message =
|
||||
"message" in error && typeof error.message === "string" && error.message.length > 0
|
||||
? error.message
|
||||
: "Authentication request failed";
|
||||
return new HttpException(message, statusCode);
|
||||
}
|
||||
|
||||
private isHttpStatus(value: unknown): value is number {
|
||||
if (typeof value !== "number" || !Number.isInteger(value)) {
|
||||
return false;
|
||||
}
|
||||
return value >= 400 && value <= 599;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,11 +3,14 @@ import { PrismaModule } from "../prisma/prisma.module";
|
||||
import { AuthService } from "./auth.service";
|
||||
import { AuthController } from "./auth.controller";
|
||||
import { AuthGuard } from "./guards/auth.guard";
|
||||
import { LocalAuthController } from "./local/local-auth.controller";
|
||||
import { LocalAuthService } from "./local/local-auth.service";
|
||||
import { LocalAuthEnabledGuard } from "./local/local-auth.guard";
|
||||
|
||||
@Module({
|
||||
imports: [PrismaModule],
|
||||
controllers: [AuthController],
|
||||
providers: [AuthService, AuthGuard],
|
||||
controllers: [AuthController, LocalAuthController],
|
||||
providers: [AuthService, AuthGuard, LocalAuthService, LocalAuthEnabledGuard],
|
||||
exports: [AuthService, AuthGuard],
|
||||
})
|
||||
export class AuthModule {}
|
||||
|
||||
213
apps/api/src/auth/auth.rate-limit.spec.ts
Normal file
213
apps/api/src/auth/auth.rate-limit.spec.ts
Normal file
@@ -0,0 +1,213 @@
|
||||
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
|
||||
import { Test, TestingModule } from "@nestjs/testing";
|
||||
import { INestApplication, HttpStatus, Logger } from "@nestjs/common";
|
||||
import request from "supertest";
|
||||
import { AuthController } from "./auth.controller";
|
||||
import { AuthService } from "./auth.service";
|
||||
import { ThrottlerModule } from "@nestjs/throttler";
|
||||
import { APP_GUARD } from "@nestjs/core";
|
||||
import { ThrottlerApiKeyGuard } from "../common/throttler";
|
||||
|
||||
/**
|
||||
* Rate Limiting Tests for Auth Controller Catch-All Route
|
||||
*
|
||||
* These tests verify that rate limiting is properly enforced on the auth
|
||||
* catch-all route to prevent brute-force attacks (SEC-API-10).
|
||||
*
|
||||
* Test Coverage:
|
||||
* - Rate limit enforcement (429 status after 10 requests in 1 minute)
|
||||
* - Retry-After header inclusion
|
||||
* - Logging occurs for auth catch-all hits
|
||||
*/
|
||||
describe("AuthController - Rate Limiting", () => {
|
||||
let app: INestApplication;
|
||||
let loggerSpy: ReturnType<typeof vi.spyOn>;
|
||||
|
||||
const mockNodeHandler = vi.fn(
|
||||
(_req: unknown, res: { statusCode: number; end: (body: string) => void }) => {
|
||||
res.statusCode = 200;
|
||||
res.end(JSON.stringify({}));
|
||||
return Promise.resolve();
|
||||
}
|
||||
);
|
||||
|
||||
const mockAuthService = {
|
||||
getAuth: vi.fn(),
|
||||
getNodeHandler: vi.fn().mockReturnValue(mockNodeHandler),
|
||||
};
|
||||
|
||||
beforeEach(async () => {
|
||||
// Spy on Logger.prototype.debug to verify logging
|
||||
loggerSpy = vi.spyOn(Logger.prototype, "debug").mockImplementation(() => {});
|
||||
|
||||
const moduleFixture: TestingModule = await Test.createTestingModule({
|
||||
imports: [
|
||||
ThrottlerModule.forRoot([
|
||||
{
|
||||
ttl: 60000, // 1 minute
|
||||
limit: 10, // Match the "strict" tier limit
|
||||
},
|
||||
]),
|
||||
],
|
||||
controllers: [AuthController],
|
||||
providers: [
|
||||
{ provide: AuthService, useValue: mockAuthService },
|
||||
{
|
||||
provide: APP_GUARD,
|
||||
useClass: ThrottlerApiKeyGuard,
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
app = moduleFixture.createNestApplication();
|
||||
await app.init();
|
||||
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await app.close();
|
||||
loggerSpy.mockRestore();
|
||||
});
|
||||
|
||||
describe("Auth Catch-All Route - Rate Limiting", () => {
|
||||
it("should allow requests within rate limit", async () => {
|
||||
// Make 3 requests (within limit of 10)
|
||||
for (let i = 0; i < 3; i++) {
|
||||
const response = await request(app.getHttpServer()).post("/auth/sign-in").send({
|
||||
email: "test@example.com",
|
||||
password: "password",
|
||||
});
|
||||
|
||||
// Should not be rate limited
|
||||
expect(response.status).not.toBe(HttpStatus.TOO_MANY_REQUESTS);
|
||||
}
|
||||
|
||||
expect(mockAuthService.getNodeHandler).toHaveBeenCalledTimes(3);
|
||||
});
|
||||
|
||||
it("should return 429 when rate limit is exceeded", async () => {
|
||||
// Exhaust rate limit (10 requests)
|
||||
for (let i = 0; i < 10; i++) {
|
||||
await request(app.getHttpServer()).post("/auth/sign-in").send({
|
||||
email: "test@example.com",
|
||||
password: "password",
|
||||
});
|
||||
}
|
||||
|
||||
// The 11th request should be rate limited
|
||||
const response = await request(app.getHttpServer()).post("/auth/sign-in").send({
|
||||
email: "test@example.com",
|
||||
password: "password",
|
||||
});
|
||||
|
||||
expect(response.status).toBe(HttpStatus.TOO_MANY_REQUESTS);
|
||||
});
|
||||
|
||||
it("should include Retry-After header in 429 response", async () => {
|
||||
// Exhaust rate limit (10 requests)
|
||||
for (let i = 0; i < 10; i++) {
|
||||
await request(app.getHttpServer()).post("/auth/sign-in").send({
|
||||
email: "test@example.com",
|
||||
password: "password",
|
||||
});
|
||||
}
|
||||
|
||||
// Get rate limited response
|
||||
const response = await request(app.getHttpServer()).post("/auth/sign-in").send({
|
||||
email: "test@example.com",
|
||||
password: "password",
|
||||
});
|
||||
|
||||
expect(response.status).toBe(HttpStatus.TOO_MANY_REQUESTS);
|
||||
expect(response.headers).toHaveProperty("retry-after");
|
||||
expect(parseInt(response.headers["retry-after"])).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it("should rate limit different auth endpoints under the same limit", async () => {
|
||||
// Make 5 sign-in requests
|
||||
for (let i = 0; i < 5; i++) {
|
||||
await request(app.getHttpServer()).post("/auth/sign-in").send({
|
||||
email: "test@example.com",
|
||||
password: "password",
|
||||
});
|
||||
}
|
||||
|
||||
// Make 5 sign-up requests (total now 10)
|
||||
for (let i = 0; i < 5; i++) {
|
||||
await request(app.getHttpServer()).post("/auth/sign-up").send({
|
||||
email: "test@example.com",
|
||||
password: "password",
|
||||
name: "Test User",
|
||||
});
|
||||
}
|
||||
|
||||
// The 11th request (any auth endpoint) should be rate limited
|
||||
const response = await request(app.getHttpServer()).post("/auth/sign-in").send({
|
||||
email: "test@example.com",
|
||||
password: "password",
|
||||
});
|
||||
|
||||
expect(response.status).toBe(HttpStatus.TOO_MANY_REQUESTS);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Auth Catch-All Route - Logging", () => {
|
||||
it("should log auth catch-all hits with request details", async () => {
|
||||
await request(app.getHttpServer()).post("/auth/sign-in").send({
|
||||
email: "test@example.com",
|
||||
password: "password",
|
||||
});
|
||||
|
||||
// Verify logging was called
|
||||
expect(loggerSpy).toHaveBeenCalled();
|
||||
|
||||
// Find the log call that contains our expected message
|
||||
const logCalls = loggerSpy.mock.calls;
|
||||
const authLogCall = logCalls.find(
|
||||
(call) => typeof call[0] === "string" && call[0].includes("Auth catch-all:")
|
||||
);
|
||||
|
||||
expect(authLogCall).toBeDefined();
|
||||
expect(authLogCall?.[0]).toMatch(/Auth catch-all: POST/);
|
||||
});
|
||||
|
||||
it("should log different HTTP methods correctly", async () => {
|
||||
// Test GET request
|
||||
await request(app.getHttpServer()).get("/auth/callback");
|
||||
|
||||
const logCalls = loggerSpy.mock.calls;
|
||||
const getLogCall = logCalls.find(
|
||||
(call) =>
|
||||
typeof call[0] === "string" &&
|
||||
call[0].includes("Auth catch-all:") &&
|
||||
call[0].includes("GET")
|
||||
);
|
||||
|
||||
expect(getLogCall).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe("Per-IP Rate Limiting", () => {
|
||||
it("should track rate limits per IP independently", async () => {
|
||||
// Note: In a real scenario, different IPs would have different limits
|
||||
// This test verifies the rate limit tracking behavior
|
||||
|
||||
// Exhaust rate limit with requests
|
||||
for (let i = 0; i < 10; i++) {
|
||||
await request(app.getHttpServer()).post("/auth/sign-in").send({
|
||||
email: "test@example.com",
|
||||
password: "password",
|
||||
});
|
||||
}
|
||||
|
||||
// Should be rate limited now
|
||||
const response = await request(app.getHttpServer()).post("/auth/sign-in").send({
|
||||
email: "test@example.com",
|
||||
password: "password",
|
||||
});
|
||||
|
||||
expect(response.status).toBe(HttpStatus.TOO_MANY_REQUESTS);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,5 +1,26 @@
|
||||
import { describe, it, expect, beforeEach, vi } from "vitest";
|
||||
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
|
||||
import { Test, TestingModule } from "@nestjs/testing";
|
||||
|
||||
// Mock better-auth modules before importing AuthService
|
||||
vi.mock("better-auth/node", () => ({
|
||||
toNodeHandler: vi.fn().mockReturnValue(vi.fn()),
|
||||
}));
|
||||
|
||||
vi.mock("better-auth", () => ({
|
||||
betterAuth: vi.fn().mockReturnValue({
|
||||
handler: vi.fn(),
|
||||
api: { getSession: vi.fn() },
|
||||
}),
|
||||
}));
|
||||
|
||||
vi.mock("better-auth/adapters/prisma", () => ({
|
||||
prismaAdapter: vi.fn().mockReturnValue({}),
|
||||
}));
|
||||
|
||||
vi.mock("better-auth/plugins", () => ({
|
||||
genericOAuth: vi.fn().mockReturnValue({ id: "generic-oauth" }),
|
||||
}));
|
||||
|
||||
import { AuthService } from "./auth.service";
|
||||
import { PrismaService } from "../prisma/prisma.service";
|
||||
|
||||
@@ -30,6 +51,12 @@ describe("AuthService", () => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
delete process.env.OIDC_ENABLED;
|
||||
delete process.env.OIDC_ISSUER;
|
||||
});
|
||||
|
||||
describe("getAuth", () => {
|
||||
it("should return BetterAuth instance", () => {
|
||||
const auth = service.getAuth();
|
||||
@@ -62,6 +89,23 @@ describe("AuthService", () => {
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it("should return null when user is not found", async () => {
|
||||
mockPrismaService.user.findUnique.mockResolvedValue(null);
|
||||
|
||||
const result = await service.getUserById("nonexistent-id");
|
||||
|
||||
expect(result).toBeNull();
|
||||
expect(mockPrismaService.user.findUnique).toHaveBeenCalledWith({
|
||||
where: { id: "nonexistent-id" },
|
||||
select: {
|
||||
id: true,
|
||||
email: true,
|
||||
name: true,
|
||||
authProviderId: true,
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("getUserByEmail", () => {
|
||||
@@ -88,6 +132,269 @@ describe("AuthService", () => {
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it("should return null when user is not found", async () => {
|
||||
mockPrismaService.user.findUnique.mockResolvedValue(null);
|
||||
|
||||
const result = await service.getUserByEmail("unknown@example.com");
|
||||
|
||||
expect(result).toBeNull();
|
||||
expect(mockPrismaService.user.findUnique).toHaveBeenCalledWith({
|
||||
where: { email: "unknown@example.com" },
|
||||
select: {
|
||||
id: true,
|
||||
email: true,
|
||||
name: true,
|
||||
authProviderId: true,
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("isOidcProviderReachable", () => {
|
||||
const discoveryUrl = "https://auth.example.com/.well-known/openid-configuration";
|
||||
|
||||
beforeEach(() => {
|
||||
process.env.OIDC_ISSUER = "https://auth.example.com/";
|
||||
// Reset the cache by accessing private fields via bracket notation
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(service as any).lastHealthCheck = 0;
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(service as any).lastHealthResult = false;
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(service as any).consecutiveHealthFailures = 0;
|
||||
});
|
||||
|
||||
it("should return true when discovery URL returns 200", async () => {
|
||||
const mockFetch = vi.fn().mockResolvedValue({
|
||||
ok: true,
|
||||
status: 200,
|
||||
});
|
||||
vi.stubGlobal("fetch", mockFetch);
|
||||
|
||||
const result = await service.isOidcProviderReachable();
|
||||
|
||||
expect(result).toBe(true);
|
||||
expect(mockFetch).toHaveBeenCalledWith(discoveryUrl, {
|
||||
signal: expect.any(AbortSignal) as AbortSignal,
|
||||
});
|
||||
});
|
||||
|
||||
it("should return false on network error", async () => {
|
||||
const mockFetch = vi.fn().mockRejectedValue(new Error("ECONNREFUSED"));
|
||||
vi.stubGlobal("fetch", mockFetch);
|
||||
|
||||
const result = await service.isOidcProviderReachable();
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it("should return false on timeout", async () => {
|
||||
const mockFetch = vi.fn().mockRejectedValue(new DOMException("The operation was aborted"));
|
||||
vi.stubGlobal("fetch", mockFetch);
|
||||
|
||||
const result = await service.isOidcProviderReachable();
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it("should return false when discovery URL returns non-200", async () => {
|
||||
const mockFetch = vi.fn().mockResolvedValue({
|
||||
ok: false,
|
||||
status: 503,
|
||||
});
|
||||
vi.stubGlobal("fetch", mockFetch);
|
||||
|
||||
const result = await service.isOidcProviderReachable();
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it("should cache result for 30 seconds", async () => {
|
||||
const mockFetch = vi.fn().mockResolvedValue({
|
||||
ok: true,
|
||||
status: 200,
|
||||
});
|
||||
vi.stubGlobal("fetch", mockFetch);
|
||||
|
||||
// First call - fetches
|
||||
const result1 = await service.isOidcProviderReachable();
|
||||
expect(result1).toBe(true);
|
||||
expect(mockFetch).toHaveBeenCalledTimes(1);
|
||||
|
||||
// Second call within 30s - uses cache
|
||||
const result2 = await service.isOidcProviderReachable();
|
||||
expect(result2).toBe(true);
|
||||
expect(mockFetch).toHaveBeenCalledTimes(1); // Still 1, no new fetch
|
||||
|
||||
// Simulate cache expiry by moving lastHealthCheck back
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(service as any).lastHealthCheck = Date.now() - 31_000;
|
||||
|
||||
// Third call after cache expiry - fetches again
|
||||
const result3 = await service.isOidcProviderReachable();
|
||||
expect(result3).toBe(true);
|
||||
expect(mockFetch).toHaveBeenCalledTimes(2); // Now 2
|
||||
});
|
||||
|
||||
it("should cache false results too", async () => {
|
||||
const mockFetch = vi
|
||||
.fn()
|
||||
.mockRejectedValueOnce(new Error("ECONNREFUSED"))
|
||||
.mockResolvedValueOnce({ ok: true, status: 200 });
|
||||
vi.stubGlobal("fetch", mockFetch);
|
||||
|
||||
// First call - fails
|
||||
const result1 = await service.isOidcProviderReachable();
|
||||
expect(result1).toBe(false);
|
||||
expect(mockFetch).toHaveBeenCalledTimes(1);
|
||||
|
||||
// Second call within 30s - returns cached false
|
||||
const result2 = await service.isOidcProviderReachable();
|
||||
expect(result2).toBe(false);
|
||||
expect(mockFetch).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it("should escalate to error level after 3 consecutive failures", async () => {
|
||||
const mockFetch = vi.fn().mockRejectedValue(new Error("ECONNREFUSED"));
|
||||
vi.stubGlobal("fetch", mockFetch);
|
||||
|
||||
const loggerWarn = vi.spyOn(service["logger"], "warn");
|
||||
const loggerError = vi.spyOn(service["logger"], "error");
|
||||
|
||||
// Failures 1 and 2 should log at warn level
|
||||
await service.isOidcProviderReachable();
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(service as any).lastHealthCheck = 0; // Reset cache
|
||||
await service.isOidcProviderReachable();
|
||||
|
||||
expect(loggerWarn).toHaveBeenCalledTimes(2);
|
||||
expect(loggerError).not.toHaveBeenCalled();
|
||||
|
||||
// Failure 3 should escalate to error level
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(service as any).lastHealthCheck = 0;
|
||||
await service.isOidcProviderReachable();
|
||||
|
||||
expect(loggerError).toHaveBeenCalledTimes(1);
|
||||
expect(loggerError).toHaveBeenCalledWith(
|
||||
expect.stringContaining("OIDC provider unreachable")
|
||||
);
|
||||
});
|
||||
|
||||
it("should escalate to error level after 3 consecutive non-OK responses", async () => {
|
||||
const mockFetch = vi.fn().mockResolvedValue({ ok: false, status: 503 });
|
||||
vi.stubGlobal("fetch", mockFetch);
|
||||
|
||||
const loggerWarn = vi.spyOn(service["logger"], "warn");
|
||||
const loggerError = vi.spyOn(service["logger"], "error");
|
||||
|
||||
// Failures 1 and 2 at warn level
|
||||
await service.isOidcProviderReachable();
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(service as any).lastHealthCheck = 0;
|
||||
await service.isOidcProviderReachable();
|
||||
|
||||
expect(loggerWarn).toHaveBeenCalledTimes(2);
|
||||
expect(loggerError).not.toHaveBeenCalled();
|
||||
|
||||
// Failure 3 at error level
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(service as any).lastHealthCheck = 0;
|
||||
await service.isOidcProviderReachable();
|
||||
|
||||
expect(loggerError).toHaveBeenCalledTimes(1);
|
||||
expect(loggerError).toHaveBeenCalledWith(
|
||||
expect.stringContaining("OIDC provider returned non-OK status")
|
||||
);
|
||||
});
|
||||
|
||||
it("should reset failure counter and log recovery on success after failures", async () => {
|
||||
const mockFetch = vi
|
||||
.fn()
|
||||
.mockRejectedValueOnce(new Error("ECONNREFUSED"))
|
||||
.mockRejectedValueOnce(new Error("ECONNREFUSED"))
|
||||
.mockResolvedValueOnce({ ok: true, status: 200 });
|
||||
vi.stubGlobal("fetch", mockFetch);
|
||||
|
||||
const loggerLog = vi.spyOn(service["logger"], "log");
|
||||
|
||||
// Two failures
|
||||
await service.isOidcProviderReachable();
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(service as any).lastHealthCheck = 0;
|
||||
await service.isOidcProviderReachable();
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(service as any).lastHealthCheck = 0;
|
||||
|
||||
// Recovery
|
||||
const result = await service.isOidcProviderReachable();
|
||||
|
||||
expect(result).toBe(true);
|
||||
expect(loggerLog).toHaveBeenCalledWith(
|
||||
expect.stringContaining("OIDC provider recovered after 2 consecutive failure(s)")
|
||||
);
|
||||
// Verify counter reset
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
expect((service as any).consecutiveHealthFailures).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getAuthConfig", () => {
|
||||
it("should return only email provider when OIDC is disabled", async () => {
|
||||
delete process.env.OIDC_ENABLED;
|
||||
|
||||
const result = await service.getAuthConfig();
|
||||
|
||||
expect(result).toEqual({
|
||||
providers: [{ id: "email", name: "Email", type: "credentials" }],
|
||||
});
|
||||
});
|
||||
|
||||
it("should return both email and authentik providers when OIDC is enabled and reachable", async () => {
|
||||
process.env.OIDC_ENABLED = "true";
|
||||
process.env.OIDC_ISSUER = "https://auth.example.com/";
|
||||
|
||||
const mockFetch = vi.fn().mockResolvedValue({ ok: true, status: 200 });
|
||||
vi.stubGlobal("fetch", mockFetch);
|
||||
|
||||
const result = await service.getAuthConfig();
|
||||
|
||||
expect(result).toEqual({
|
||||
providers: [
|
||||
{ id: "email", name: "Email", type: "credentials" },
|
||||
{ id: "authentik", name: "Authentik", type: "oauth" },
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
it("should return only email provider when OIDC_ENABLED is false", async () => {
|
||||
process.env.OIDC_ENABLED = "false";
|
||||
|
||||
const result = await service.getAuthConfig();
|
||||
|
||||
expect(result).toEqual({
|
||||
providers: [{ id: "email", name: "Email", type: "credentials" }],
|
||||
});
|
||||
});
|
||||
|
||||
it("should omit authentik when OIDC is enabled but provider is unreachable", async () => {
|
||||
process.env.OIDC_ENABLED = "true";
|
||||
process.env.OIDC_ISSUER = "https://auth.example.com/";
|
||||
|
||||
// Reset cache
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(service as any).lastHealthCheck = 0;
|
||||
|
||||
const mockFetch = vi.fn().mockRejectedValue(new Error("ECONNREFUSED"));
|
||||
vi.stubGlobal("fetch", mockFetch);
|
||||
|
||||
const result = await service.getAuthConfig();
|
||||
|
||||
expect(result).toEqual({
|
||||
providers: [{ id: "email", name: "Email", type: "credentials" }],
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("verifySession", () => {
|
||||
@@ -103,7 +410,7 @@ describe("AuthService", () => {
|
||||
},
|
||||
};
|
||||
|
||||
it("should return session data for valid token", async () => {
|
||||
it("should validate session token using secure BetterAuth cookie header", async () => {
|
||||
const auth = service.getAuth();
|
||||
const mockGetSession = vi.fn().mockResolvedValue(mockSessionData);
|
||||
auth.api = { getSession: mockGetSession } as any;
|
||||
@@ -111,7 +418,58 @@ describe("AuthService", () => {
|
||||
const result = await service.verifySession("valid-token");
|
||||
|
||||
expect(result).toEqual(mockSessionData);
|
||||
expect(mockGetSession).toHaveBeenCalledTimes(1);
|
||||
expect(mockGetSession).toHaveBeenCalledWith({
|
||||
headers: {
|
||||
cookie: "__Secure-better-auth.session_token=valid-token",
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it("should preserve raw cookie token value without URL re-encoding", async () => {
|
||||
const auth = service.getAuth();
|
||||
const mockGetSession = vi.fn().mockResolvedValue(mockSessionData);
|
||||
auth.api = { getSession: mockGetSession } as any;
|
||||
|
||||
const result = await service.verifySession("tok/with+=chars=");
|
||||
|
||||
expect(result).toEqual(mockSessionData);
|
||||
expect(mockGetSession).toHaveBeenCalledWith({
|
||||
headers: {
|
||||
cookie: "__Secure-better-auth.session_token=tok/with+=chars=",
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it("should fall back to Authorization header when cookie-based lookups miss", async () => {
|
||||
const auth = service.getAuth();
|
||||
const mockGetSession = vi
|
||||
.fn()
|
||||
.mockResolvedValueOnce(null)
|
||||
.mockResolvedValueOnce(null)
|
||||
.mockResolvedValueOnce(null)
|
||||
.mockResolvedValueOnce(mockSessionData);
|
||||
auth.api = { getSession: mockGetSession } as any;
|
||||
|
||||
const result = await service.verifySession("valid-token");
|
||||
|
||||
expect(result).toEqual(mockSessionData);
|
||||
expect(mockGetSession).toHaveBeenNthCalledWith(1, {
|
||||
headers: {
|
||||
cookie: "__Secure-better-auth.session_token=valid-token",
|
||||
},
|
||||
});
|
||||
expect(mockGetSession).toHaveBeenNthCalledWith(2, {
|
||||
headers: {
|
||||
cookie: "better-auth.session_token=valid-token",
|
||||
},
|
||||
});
|
||||
expect(mockGetSession).toHaveBeenNthCalledWith(3, {
|
||||
headers: {
|
||||
cookie: "__Host-better-auth.session_token=valid-token",
|
||||
},
|
||||
});
|
||||
expect(mockGetSession).toHaveBeenNthCalledWith(4, {
|
||||
headers: {
|
||||
authorization: "Bearer valid-token",
|
||||
},
|
||||
@@ -128,14 +486,264 @@ describe("AuthService", () => {
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("should return null and log error on verification failure", async () => {
|
||||
it("should return null for 'invalid token' auth error", async () => {
|
||||
const auth = service.getAuth();
|
||||
const mockGetSession = vi.fn().mockRejectedValue(new Error("Invalid token provided"));
|
||||
auth.api = { getSession: mockGetSession } as any;
|
||||
|
||||
const result = await service.verifySession("bad-token");
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("should return null for 'expired' auth error", async () => {
|
||||
const auth = service.getAuth();
|
||||
const mockGetSession = vi.fn().mockRejectedValue(new Error("Token expired"));
|
||||
auth.api = { getSession: mockGetSession } as any;
|
||||
|
||||
const result = await service.verifySession("expired-token");
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("should return null for 'session not found' auth error", async () => {
|
||||
const auth = service.getAuth();
|
||||
const mockGetSession = vi.fn().mockRejectedValue(new Error("Session not found"));
|
||||
auth.api = { getSession: mockGetSession } as any;
|
||||
|
||||
const result = await service.verifySession("missing-session");
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("should return null for 'unauthorized' auth error", async () => {
|
||||
const auth = service.getAuth();
|
||||
const mockGetSession = vi.fn().mockRejectedValue(new Error("Unauthorized"));
|
||||
auth.api = { getSession: mockGetSession } as any;
|
||||
|
||||
const result = await service.verifySession("unauth-token");
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("should return null for 'invalid session' auth error", async () => {
|
||||
const auth = service.getAuth();
|
||||
const mockGetSession = vi.fn().mockRejectedValue(new Error("Invalid session"));
|
||||
auth.api = { getSession: mockGetSession } as any;
|
||||
|
||||
const result = await service.verifySession("invalid-session");
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("should return null for 'session expired' auth error", async () => {
|
||||
const auth = service.getAuth();
|
||||
const mockGetSession = vi.fn().mockRejectedValue(new Error("Session expired"));
|
||||
auth.api = { getSession: mockGetSession } as any;
|
||||
|
||||
const result = await service.verifySession("expired-session");
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("should return null for bare 'unauthorized' (exact match)", async () => {
|
||||
const auth = service.getAuth();
|
||||
const mockGetSession = vi.fn().mockRejectedValue(new Error("unauthorized"));
|
||||
auth.api = { getSession: mockGetSession } as any;
|
||||
|
||||
const result = await service.verifySession("unauth-token");
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("should return null for bare 'expired' (exact match)", async () => {
|
||||
const auth = service.getAuth();
|
||||
const mockGetSession = vi.fn().mockRejectedValue(new Error("expired"));
|
||||
auth.api = { getSession: mockGetSession } as any;
|
||||
|
||||
const result = await service.verifySession("expired-token");
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("should re-throw 'certificate has expired' as infrastructure error (not auth)", async () => {
|
||||
const auth = service.getAuth();
|
||||
const mockGetSession = vi.fn().mockRejectedValue(new Error("certificate has expired"));
|
||||
auth.api = { getSession: mockGetSession } as any;
|
||||
|
||||
await expect(service.verifySession("any-token")).rejects.toThrow("certificate has expired");
|
||||
});
|
||||
|
||||
it("should re-throw 'Unauthorized: Access denied for user' as infrastructure error (not auth)", async () => {
|
||||
const auth = service.getAuth();
|
||||
const mockGetSession = vi
|
||||
.fn()
|
||||
.mockRejectedValue(new Error("Unauthorized: Access denied for user"));
|
||||
auth.api = { getSession: mockGetSession } as any;
|
||||
|
||||
await expect(service.verifySession("any-token")).rejects.toThrow(
|
||||
"Unauthorized: Access denied for user"
|
||||
);
|
||||
});
|
||||
|
||||
it("should return null when a non-Error value is thrown", async () => {
|
||||
const auth = service.getAuth();
|
||||
const mockGetSession = vi.fn().mockRejectedValue("string-error");
|
||||
auth.api = { getSession: mockGetSession } as any;
|
||||
|
||||
const result = await service.verifySession("any-token");
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("should return null when getSession throws a non-Error value (string)", async () => {
|
||||
const auth = service.getAuth();
|
||||
const mockGetSession = vi.fn().mockRejectedValue("some error");
|
||||
auth.api = { getSession: mockGetSession } as any;
|
||||
|
||||
const result = await service.verifySession("any-token");
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("should return null when getSession throws a non-Error value (object)", async () => {
|
||||
const auth = service.getAuth();
|
||||
const mockGetSession = vi.fn().mockRejectedValue({ code: "ERR_UNKNOWN" });
|
||||
auth.api = { getSession: mockGetSession } as any;
|
||||
|
||||
const result = await service.verifySession("any-token");
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("should re-throw unexpected errors that are not known auth errors", async () => {
|
||||
const auth = service.getAuth();
|
||||
const mockGetSession = vi.fn().mockRejectedValue(new Error("Verification failed"));
|
||||
auth.api = { getSession: mockGetSession } as any;
|
||||
|
||||
const result = await service.verifySession("error-token");
|
||||
await expect(service.verifySession("error-token")).rejects.toThrow("Verification failed");
|
||||
});
|
||||
|
||||
it("should re-throw Prisma infrastructure errors", async () => {
|
||||
const auth = service.getAuth();
|
||||
const prismaError = new Error("connect ECONNREFUSED 127.0.0.1:5432");
|
||||
const mockGetSession = vi.fn().mockRejectedValue(prismaError);
|
||||
auth.api = { getSession: mockGetSession } as any;
|
||||
|
||||
await expect(service.verifySession("any-token")).rejects.toThrow("ECONNREFUSED");
|
||||
});
|
||||
|
||||
it("should re-throw timeout errors as infrastructure errors", async () => {
|
||||
const auth = service.getAuth();
|
||||
const timeoutError = new Error("Connection timeout after 5000ms");
|
||||
const mockGetSession = vi.fn().mockRejectedValue(timeoutError);
|
||||
auth.api = { getSession: mockGetSession } as any;
|
||||
|
||||
await expect(service.verifySession("any-token")).rejects.toThrow("timeout");
|
||||
});
|
||||
|
||||
it("should re-throw errors with Prisma-prefixed constructor name", async () => {
|
||||
const auth = service.getAuth();
|
||||
class PrismaClientKnownRequestError extends Error {
|
||||
constructor(message: string) {
|
||||
super(message);
|
||||
this.name = "PrismaClientKnownRequestError";
|
||||
}
|
||||
}
|
||||
const prismaError = new PrismaClientKnownRequestError("Database connection lost");
|
||||
const mockGetSession = vi.fn().mockRejectedValue(prismaError);
|
||||
auth.api = { getSession: mockGetSession } as any;
|
||||
|
||||
await expect(service.verifySession("any-token")).rejects.toThrow("Database connection lost");
|
||||
});
|
||||
|
||||
it("should redact Bearer tokens from logged error messages", async () => {
|
||||
const auth = service.getAuth();
|
||||
const errorWithToken = new Error(
|
||||
"Request failed: Bearer eyJhbGciOiJIUzI1NiJ9.secret-payload in header"
|
||||
);
|
||||
const mockGetSession = vi.fn().mockRejectedValue(errorWithToken);
|
||||
auth.api = { getSession: mockGetSession } as any;
|
||||
|
||||
const loggerError = vi.spyOn(service["logger"], "error");
|
||||
|
||||
await expect(service.verifySession("any-token")).rejects.toThrow();
|
||||
|
||||
expect(loggerError).toHaveBeenCalledWith(
|
||||
"Session verification failed due to unexpected error",
|
||||
expect.stringContaining("Bearer [REDACTED]")
|
||||
);
|
||||
expect(loggerError).toHaveBeenCalledWith(
|
||||
"Session verification failed due to unexpected error",
|
||||
expect.not.stringContaining("eyJhbGciOiJIUzI1NiJ9")
|
||||
);
|
||||
});
|
||||
|
||||
it("should redact Bearer tokens from error stack traces", async () => {
|
||||
const auth = service.getAuth();
|
||||
const errorWithToken = new Error("Something went wrong");
|
||||
errorWithToken.stack =
|
||||
"Error: Something went wrong\n at fetch (Bearer abc123-secret-token)\n at verifySession";
|
||||
const mockGetSession = vi.fn().mockRejectedValue(errorWithToken);
|
||||
auth.api = { getSession: mockGetSession } as any;
|
||||
|
||||
const loggerError = vi.spyOn(service["logger"], "error");
|
||||
|
||||
await expect(service.verifySession("any-token")).rejects.toThrow();
|
||||
|
||||
expect(loggerError).toHaveBeenCalledWith(
|
||||
"Session verification failed due to unexpected error",
|
||||
expect.stringContaining("Bearer [REDACTED]")
|
||||
);
|
||||
expect(loggerError).toHaveBeenCalledWith(
|
||||
"Session verification failed due to unexpected error",
|
||||
expect.not.stringContaining("abc123-secret-token")
|
||||
);
|
||||
});
|
||||
|
||||
it("should warn when a non-Error string value is thrown", async () => {
|
||||
const auth = service.getAuth();
|
||||
const mockGetSession = vi.fn().mockRejectedValue("string-error");
|
||||
auth.api = { getSession: mockGetSession } as any;
|
||||
|
||||
const loggerWarn = vi.spyOn(service["logger"], "warn");
|
||||
|
||||
const result = await service.verifySession("any-token");
|
||||
|
||||
expect(result).toBeNull();
|
||||
expect(loggerWarn).toHaveBeenCalledWith(
|
||||
"Session verification received non-Error thrown value",
|
||||
"string-error"
|
||||
);
|
||||
});
|
||||
|
||||
it("should warn with JSON when a non-Error object is thrown", async () => {
|
||||
const auth = service.getAuth();
|
||||
const mockGetSession = vi.fn().mockRejectedValue({ code: "ERR_UNKNOWN" });
|
||||
auth.api = { getSession: mockGetSession } as any;
|
||||
|
||||
const loggerWarn = vi.spyOn(service["logger"], "warn");
|
||||
|
||||
const result = await service.verifySession("any-token");
|
||||
|
||||
expect(result).toBeNull();
|
||||
expect(loggerWarn).toHaveBeenCalledWith(
|
||||
"Session verification received non-Error thrown value",
|
||||
JSON.stringify({ code: "ERR_UNKNOWN" })
|
||||
);
|
||||
});
|
||||
|
||||
it("should not warn for expected auth errors (Error instances)", async () => {
|
||||
const auth = service.getAuth();
|
||||
const mockGetSession = vi.fn().mockRejectedValue(new Error("Invalid token provided"));
|
||||
auth.api = { getSession: mockGetSession } as any;
|
||||
|
||||
const loggerWarn = vi.spyOn(service["logger"], "warn");
|
||||
|
||||
const result = await service.verifySession("bad-token");
|
||||
|
||||
expect(result).toBeNull();
|
||||
expect(loggerWarn).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,17 +1,49 @@
|
||||
import { Injectable, Logger } from "@nestjs/common";
|
||||
import type { PrismaClient } from "@prisma/client";
|
||||
import type { IncomingMessage, ServerResponse } from "http";
|
||||
import { toNodeHandler } from "better-auth/node";
|
||||
import type { AuthConfigResponse, AuthProviderConfig } from "@mosaic/shared";
|
||||
import { PrismaService } from "../prisma/prisma.service";
|
||||
import { createAuth, type Auth } from "./auth.config";
|
||||
import { createAuth, isOidcEnabled, type Auth } from "./auth.config";
|
||||
|
||||
/** Duration in milliseconds to cache the OIDC health check result */
|
||||
const OIDC_HEALTH_CACHE_TTL_MS = 30_000;
|
||||
|
||||
/** Timeout in milliseconds for the OIDC discovery URL fetch */
|
||||
const OIDC_HEALTH_TIMEOUT_MS = 2_000;
|
||||
|
||||
/** Number of consecutive health-check failures before escalating to error level */
|
||||
const HEALTH_ESCALATION_THRESHOLD = 3;
|
||||
|
||||
/** Verified session shape returned by BetterAuth's getSession */
|
||||
interface VerifiedSession {
|
||||
user: Record<string, unknown>;
|
||||
session: Record<string, unknown>;
|
||||
}
|
||||
|
||||
interface SessionHeaderCandidate {
|
||||
headers: Record<string, string>;
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class AuthService {
|
||||
private readonly logger = new Logger(AuthService.name);
|
||||
private readonly auth: Auth;
|
||||
private readonly nodeHandler: (req: IncomingMessage, res: ServerResponse) => Promise<void>;
|
||||
|
||||
/** Timestamp of the last OIDC health check */
|
||||
private lastHealthCheck = 0;
|
||||
/** Cached result of the last OIDC health check */
|
||||
private lastHealthResult = false;
|
||||
/** Consecutive OIDC health check failure count for log-level escalation */
|
||||
private consecutiveHealthFailures = 0;
|
||||
|
||||
constructor(private readonly prisma: PrismaService) {
|
||||
// PrismaService extends PrismaClient and is compatible with BetterAuth's adapter
|
||||
// Cast is safe as PrismaService provides all required PrismaClient methods
|
||||
// TODO(#411): BetterAuth returns opaque types — replace when upstream exports typed interfaces
|
||||
this.auth = createAuth(this.prisma as unknown as PrismaClient);
|
||||
this.nodeHandler = toNodeHandler(this.auth);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -21,6 +53,14 @@ export class AuthService {
|
||||
return this.auth;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get Node.js-compatible request handler for BetterAuth.
|
||||
* Wraps BetterAuth's Web API handler to work with Express/Node.js req/res.
|
||||
*/
|
||||
getNodeHandler(): (req: IncomingMessage, res: ServerResponse) => Promise<void> {
|
||||
return this.nodeHandler;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get user by ID
|
||||
*/
|
||||
@@ -63,32 +103,159 @@ export class AuthService {
|
||||
|
||||
/**
|
||||
* Verify session token
|
||||
* Returns session data if valid, null if invalid or expired
|
||||
* Returns session data if valid, null if invalid or expired.
|
||||
* Only known-safe auth errors return null; everything else propagates as 500.
|
||||
*/
|
||||
async verifySession(
|
||||
token: string
|
||||
): Promise<{ user: Record<string, unknown>; session: Record<string, unknown> } | null> {
|
||||
try {
|
||||
const session = await this.auth.api.getSession({
|
||||
async verifySession(token: string): Promise<VerifiedSession | null> {
|
||||
let sawNonError = false;
|
||||
|
||||
for (const candidate of this.buildSessionHeaderCandidates(token)) {
|
||||
try {
|
||||
// TODO(#411): BetterAuth getSession returns opaque types — replace when upstream exports typed interfaces
|
||||
const session = await this.auth.api.getSession(candidate);
|
||||
|
||||
if (!session) {
|
||||
continue;
|
||||
}
|
||||
|
||||
return {
|
||||
user: session.user as Record<string, unknown>,
|
||||
session: session.session as Record<string, unknown>,
|
||||
};
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
if (this.isExpectedAuthError(error.message)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Infrastructure or unexpected — propagate as 500
|
||||
const safeMessage = (error.stack ?? error.message).replace(
|
||||
/Bearer\s+\S+/gi,
|
||||
"Bearer [REDACTED]"
|
||||
);
|
||||
this.logger.error("Session verification failed due to unexpected error", safeMessage);
|
||||
throw error;
|
||||
}
|
||||
|
||||
// Non-Error thrown values — log once for observability, treat as auth failure
|
||||
if (!sawNonError) {
|
||||
const errorDetail = typeof error === "string" ? error : JSON.stringify(error);
|
||||
this.logger.warn("Session verification received non-Error thrown value", errorDetail);
|
||||
sawNonError = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private buildSessionHeaderCandidates(token: string): SessionHeaderCandidate[] {
|
||||
return [
|
||||
{
|
||||
headers: {
|
||||
cookie: `__Secure-better-auth.session_token=${token}`,
|
||||
},
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
cookie: `better-auth.session_token=${token}`,
|
||||
},
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
cookie: `__Host-better-auth.session_token=${token}`,
|
||||
},
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
authorization: `Bearer ${token}`,
|
||||
},
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
private isExpectedAuthError(message: string): boolean {
|
||||
const normalized = message.toLowerCase();
|
||||
return (
|
||||
normalized.includes("invalid token") ||
|
||||
normalized.includes("token expired") ||
|
||||
normalized.includes("session expired") ||
|
||||
normalized.includes("session not found") ||
|
||||
normalized.includes("invalid session") ||
|
||||
normalized === "unauthorized" ||
|
||||
normalized === "expired"
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the OIDC provider (Authentik) is reachable by fetching the discovery URL.
|
||||
* Results are cached for 30 seconds to prevent repeated network calls.
|
||||
*
|
||||
* @returns true if the provider responds with an HTTP 2xx status, false otherwise
|
||||
*/
|
||||
async isOidcProviderReachable(): Promise<boolean> {
|
||||
const now = Date.now();
|
||||
|
||||
// Return cached result if still valid
|
||||
if (now - this.lastHealthCheck < OIDC_HEALTH_CACHE_TTL_MS) {
|
||||
this.logger.debug("OIDC health check: returning cached result");
|
||||
return this.lastHealthResult;
|
||||
}
|
||||
|
||||
const discoveryUrl = `${process.env.OIDC_ISSUER ?? ""}.well-known/openid-configuration`;
|
||||
this.logger.debug(`OIDC health check: fetching ${discoveryUrl}`);
|
||||
|
||||
try {
|
||||
const response = await fetch(discoveryUrl, {
|
||||
signal: AbortSignal.timeout(OIDC_HEALTH_TIMEOUT_MS),
|
||||
});
|
||||
|
||||
if (!session) {
|
||||
return null;
|
||||
this.lastHealthCheck = Date.now();
|
||||
this.lastHealthResult = response.ok;
|
||||
|
||||
if (response.ok) {
|
||||
if (this.consecutiveHealthFailures > 0) {
|
||||
this.logger.log(
|
||||
`OIDC provider recovered after ${String(this.consecutiveHealthFailures)} consecutive failure(s)`
|
||||
);
|
||||
}
|
||||
this.consecutiveHealthFailures = 0;
|
||||
} else {
|
||||
this.consecutiveHealthFailures++;
|
||||
const logLevel =
|
||||
this.consecutiveHealthFailures >= HEALTH_ESCALATION_THRESHOLD ? "error" : "warn";
|
||||
this.logger[logLevel](
|
||||
`OIDC provider returned non-OK status: ${String(response.status)} from ${discoveryUrl}`
|
||||
);
|
||||
}
|
||||
|
||||
return {
|
||||
user: session.user as Record<string, unknown>,
|
||||
session: session.session as Record<string, unknown>,
|
||||
};
|
||||
} catch (error) {
|
||||
this.logger.error(
|
||||
"Session verification failed",
|
||||
error instanceof Error ? error.message : "Unknown error"
|
||||
);
|
||||
return null;
|
||||
return this.lastHealthResult;
|
||||
} catch (error: unknown) {
|
||||
this.lastHealthCheck = Date.now();
|
||||
this.lastHealthResult = false;
|
||||
this.consecutiveHealthFailures++;
|
||||
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
const logLevel =
|
||||
this.consecutiveHealthFailures >= HEALTH_ESCALATION_THRESHOLD ? "error" : "warn";
|
||||
this.logger[logLevel](`OIDC provider unreachable at ${discoveryUrl}: ${message}`);
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get authentication configuration for the frontend.
|
||||
* Returns available auth providers so the UI can render login options dynamically.
|
||||
* When OIDC is enabled, performs a health check to verify the provider is reachable.
|
||||
*/
|
||||
async getAuthConfig(): Promise<AuthConfigResponse> {
|
||||
const providers: AuthProviderConfig[] = [{ id: "email", name: "Email", type: "credentials" }];
|
||||
|
||||
if (isOidcEnabled() && (await this.isOidcProviderReachable())) {
|
||||
providers.push({ id: "authentik", name: "Authentik", type: "oauth" });
|
||||
}
|
||||
|
||||
return { providers };
|
||||
}
|
||||
}
|
||||
|
||||
96
apps/api/src/auth/decorators/current-user.decorator.spec.ts
Normal file
96
apps/api/src/auth/decorators/current-user.decorator.spec.ts
Normal file
@@ -0,0 +1,96 @@
|
||||
import { describe, it, expect } from "vitest";
|
||||
import { ExecutionContext, UnauthorizedException } from "@nestjs/common";
|
||||
import { ROUTE_ARGS_METADATA } from "@nestjs/common/constants";
|
||||
import { CurrentUser } from "./current-user.decorator";
|
||||
import type { AuthUser } from "@mosaic/shared";
|
||||
|
||||
/**
|
||||
* Extract the factory function from a NestJS param decorator created with createParamDecorator.
|
||||
* NestJS stores param decorator factories in metadata on a dummy class.
|
||||
*/
|
||||
function getParamDecoratorFactory(): (data: unknown, ctx: ExecutionContext) => AuthUser {
|
||||
class TestController {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
testMethod(@CurrentUser() _user: AuthUser): void {
|
||||
// no-op
|
||||
}
|
||||
}
|
||||
|
||||
const metadata = Reflect.getMetadata(ROUTE_ARGS_METADATA, TestController, "testMethod");
|
||||
|
||||
// The metadata keys are in the format "paramtype:index"
|
||||
const key = Object.keys(metadata)[0];
|
||||
return metadata[key].factory;
|
||||
}
|
||||
|
||||
function createMockExecutionContext(user?: AuthUser): ExecutionContext {
|
||||
const mockRequest = {
|
||||
...(user !== undefined ? { user } : {}),
|
||||
};
|
||||
|
||||
return {
|
||||
switchToHttp: () => ({
|
||||
getRequest: () => mockRequest,
|
||||
}),
|
||||
} as ExecutionContext;
|
||||
}
|
||||
|
||||
describe("CurrentUser decorator", () => {
|
||||
const factory = getParamDecoratorFactory();
|
||||
|
||||
const mockUser: AuthUser = {
|
||||
id: "user-123",
|
||||
email: "test@example.com",
|
||||
name: "Test User",
|
||||
};
|
||||
|
||||
it("should return the user when present on the request", () => {
|
||||
const ctx = createMockExecutionContext(mockUser);
|
||||
const result = factory(undefined, ctx);
|
||||
|
||||
expect(result).toEqual(mockUser);
|
||||
});
|
||||
|
||||
it("should return the user with optional fields", () => {
|
||||
const userWithOptionalFields: AuthUser = {
|
||||
...mockUser,
|
||||
image: "https://example.com/avatar.png",
|
||||
workspaceId: "ws-123",
|
||||
workspaceRole: "owner",
|
||||
};
|
||||
|
||||
const ctx = createMockExecutionContext(userWithOptionalFields);
|
||||
const result = factory(undefined, ctx);
|
||||
|
||||
expect(result).toEqual(userWithOptionalFields);
|
||||
expect(result.image).toBe("https://example.com/avatar.png");
|
||||
expect(result.workspaceId).toBe("ws-123");
|
||||
});
|
||||
|
||||
it("should throw UnauthorizedException when user is undefined", () => {
|
||||
const ctx = createMockExecutionContext(undefined);
|
||||
|
||||
expect(() => factory(undefined, ctx)).toThrow(UnauthorizedException);
|
||||
expect(() => factory(undefined, ctx)).toThrow("No authenticated user found on request");
|
||||
});
|
||||
|
||||
it("should throw UnauthorizedException when request has no user property", () => {
|
||||
// Request object without a user property at all
|
||||
const ctx = {
|
||||
switchToHttp: () => ({
|
||||
getRequest: () => ({}),
|
||||
}),
|
||||
} as ExecutionContext;
|
||||
|
||||
expect(() => factory(undefined, ctx)).toThrow(UnauthorizedException);
|
||||
});
|
||||
|
||||
it("should ignore the data parameter", () => {
|
||||
const ctx = createMockExecutionContext(mockUser);
|
||||
|
||||
// The decorator doesn't use the data parameter, but ensure it doesn't break
|
||||
const result = factory("some-data", ctx);
|
||||
|
||||
expect(result).toEqual(mockUser);
|
||||
});
|
||||
});
|
||||
@@ -1,10 +1,16 @@
|
||||
import type { ExecutionContext } from "@nestjs/common";
|
||||
import { createParamDecorator } from "@nestjs/common";
|
||||
import type { AuthenticatedRequest, AuthenticatedUser } from "../../common/types/user.types";
|
||||
import { createParamDecorator, UnauthorizedException } from "@nestjs/common";
|
||||
import type { AuthUser } from "@mosaic/shared";
|
||||
import type { MaybeAuthenticatedRequest } from "../types/better-auth-request.interface";
|
||||
|
||||
export const CurrentUser = createParamDecorator(
|
||||
(_data: unknown, ctx: ExecutionContext): AuthenticatedUser | undefined => {
|
||||
const request = ctx.switchToHttp().getRequest<AuthenticatedRequest>();
|
||||
(_data: unknown, ctx: ExecutionContext): AuthUser => {
|
||||
// Use MaybeAuthenticatedRequest because the decorator doesn't know
|
||||
// whether AuthGuard ran — the null check provides defense-in-depth.
|
||||
const request = ctx.switchToHttp().getRequest<MaybeAuthenticatedRequest>();
|
||||
if (!request.user) {
|
||||
throw new UnauthorizedException("No authenticated user found on request");
|
||||
}
|
||||
return request.user;
|
||||
}
|
||||
);
|
||||
|
||||
170
apps/api/src/auth/guards/admin.guard.spec.ts
Normal file
170
apps/api/src/auth/guards/admin.guard.spec.ts
Normal file
@@ -0,0 +1,170 @@
|
||||
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
|
||||
import { ExecutionContext, ForbiddenException } from "@nestjs/common";
|
||||
import { AdminGuard } from "./admin.guard";
|
||||
|
||||
describe("AdminGuard", () => {
|
||||
const originalEnv = process.env.SYSTEM_ADMIN_IDS;
|
||||
|
||||
afterEach(() => {
|
||||
// Restore original environment
|
||||
if (originalEnv !== undefined) {
|
||||
process.env.SYSTEM_ADMIN_IDS = originalEnv;
|
||||
} else {
|
||||
delete process.env.SYSTEM_ADMIN_IDS;
|
||||
}
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
const createMockExecutionContext = (user: { id: string } | undefined): ExecutionContext => {
|
||||
const mockRequest = {
|
||||
user,
|
||||
};
|
||||
|
||||
return {
|
||||
switchToHttp: () => ({
|
||||
getRequest: () => mockRequest,
|
||||
}),
|
||||
} as ExecutionContext;
|
||||
};
|
||||
|
||||
describe("constructor", () => {
|
||||
it("should parse system admin IDs from environment variable", () => {
|
||||
process.env.SYSTEM_ADMIN_IDS = "admin-1,admin-2,admin-3";
|
||||
const guard = new AdminGuard();
|
||||
|
||||
expect(guard.isSystemAdmin("admin-1")).toBe(true);
|
||||
expect(guard.isSystemAdmin("admin-2")).toBe(true);
|
||||
expect(guard.isSystemAdmin("admin-3")).toBe(true);
|
||||
});
|
||||
|
||||
it("should handle whitespace in admin IDs", () => {
|
||||
process.env.SYSTEM_ADMIN_IDS = " admin-1 , admin-2 , admin-3 ";
|
||||
const guard = new AdminGuard();
|
||||
|
||||
expect(guard.isSystemAdmin("admin-1")).toBe(true);
|
||||
expect(guard.isSystemAdmin("admin-2")).toBe(true);
|
||||
expect(guard.isSystemAdmin("admin-3")).toBe(true);
|
||||
});
|
||||
|
||||
it("should handle empty environment variable", () => {
|
||||
process.env.SYSTEM_ADMIN_IDS = "";
|
||||
const guard = new AdminGuard();
|
||||
|
||||
expect(guard.isSystemAdmin("any-user")).toBe(false);
|
||||
});
|
||||
|
||||
it("should handle missing environment variable", () => {
|
||||
delete process.env.SYSTEM_ADMIN_IDS;
|
||||
const guard = new AdminGuard();
|
||||
|
||||
expect(guard.isSystemAdmin("any-user")).toBe(false);
|
||||
});
|
||||
|
||||
it("should handle single admin ID", () => {
|
||||
process.env.SYSTEM_ADMIN_IDS = "single-admin";
|
||||
const guard = new AdminGuard();
|
||||
|
||||
expect(guard.isSystemAdmin("single-admin")).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("isSystemAdmin", () => {
|
||||
let guard: AdminGuard;
|
||||
|
||||
beforeEach(() => {
|
||||
process.env.SYSTEM_ADMIN_IDS = "admin-uuid-1,admin-uuid-2";
|
||||
guard = new AdminGuard();
|
||||
});
|
||||
|
||||
it("should return true for configured system admin", () => {
|
||||
expect(guard.isSystemAdmin("admin-uuid-1")).toBe(true);
|
||||
expect(guard.isSystemAdmin("admin-uuid-2")).toBe(true);
|
||||
});
|
||||
|
||||
it("should return false for non-admin user", () => {
|
||||
expect(guard.isSystemAdmin("regular-user-id")).toBe(false);
|
||||
});
|
||||
|
||||
it("should return false for empty string", () => {
|
||||
expect(guard.isSystemAdmin("")).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("canActivate", () => {
|
||||
let guard: AdminGuard;
|
||||
|
||||
beforeEach(() => {
|
||||
process.env.SYSTEM_ADMIN_IDS = "admin-uuid-1,admin-uuid-2";
|
||||
guard = new AdminGuard();
|
||||
});
|
||||
|
||||
it("should return true for system admin user", () => {
|
||||
const context = createMockExecutionContext({ id: "admin-uuid-1" });
|
||||
|
||||
const result = guard.canActivate(context);
|
||||
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it("should throw ForbiddenException for non-admin user", () => {
|
||||
const context = createMockExecutionContext({ id: "regular-user-id" });
|
||||
|
||||
expect(() => guard.canActivate(context)).toThrow(ForbiddenException);
|
||||
expect(() => guard.canActivate(context)).toThrow(
|
||||
"This operation requires system administrator privileges"
|
||||
);
|
||||
});
|
||||
|
||||
it("should throw ForbiddenException when user is not authenticated", () => {
|
||||
const context = createMockExecutionContext(undefined);
|
||||
|
||||
expect(() => guard.canActivate(context)).toThrow(ForbiddenException);
|
||||
expect(() => guard.canActivate(context)).toThrow("User not authenticated");
|
||||
});
|
||||
|
||||
it("should NOT grant admin access based on workspace ownership", () => {
|
||||
// This test verifies that workspace ownership alone does not grant admin access
|
||||
// The user must be explicitly listed in SYSTEM_ADMIN_IDS
|
||||
const workspaceOwnerButNotSystemAdmin = { id: "workspace-owner-id" };
|
||||
const context = createMockExecutionContext(workspaceOwnerButNotSystemAdmin);
|
||||
|
||||
expect(() => guard.canActivate(context)).toThrow(ForbiddenException);
|
||||
expect(() => guard.canActivate(context)).toThrow(
|
||||
"This operation requires system administrator privileges"
|
||||
);
|
||||
});
|
||||
|
||||
it("should deny access when no system admins are configured", () => {
|
||||
process.env.SYSTEM_ADMIN_IDS = "";
|
||||
const guardWithNoAdmins = new AdminGuard();
|
||||
|
||||
const context = createMockExecutionContext({ id: "any-user-id" });
|
||||
|
||||
expect(() => guardWithNoAdmins.canActivate(context)).toThrow(ForbiddenException);
|
||||
});
|
||||
});
|
||||
|
||||
describe("security: workspace ownership vs system admin", () => {
|
||||
it("should require explicit system admin configuration, not implicit workspace ownership", () => {
|
||||
// Setup: user is NOT in SYSTEM_ADMIN_IDS
|
||||
process.env.SYSTEM_ADMIN_IDS = "different-admin-id";
|
||||
const guard = new AdminGuard();
|
||||
|
||||
// Even if this user owns workspaces, they should NOT have system admin access
|
||||
// because they are not in SYSTEM_ADMIN_IDS
|
||||
const context = createMockExecutionContext({ id: "workspace-owner-user-id" });
|
||||
|
||||
expect(() => guard.canActivate(context)).toThrow(ForbiddenException);
|
||||
});
|
||||
|
||||
it("should grant access only to users explicitly listed as system admins", () => {
|
||||
const adminUserId = "explicitly-configured-admin";
|
||||
process.env.SYSTEM_ADMIN_IDS = adminUserId;
|
||||
const guard = new AdminGuard();
|
||||
|
||||
const context = createMockExecutionContext({ id: adminUserId });
|
||||
|
||||
expect(guard.canActivate(context)).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -2,8 +2,14 @@
|
||||
* Admin Guard
|
||||
*
|
||||
* Restricts access to system-level admin operations.
|
||||
* Currently checks if user owns at least one workspace (indicating admin status).
|
||||
* Future: Replace with proper role-based access control (RBAC).
|
||||
* System administrators are configured via the SYSTEM_ADMIN_IDS environment variable.
|
||||
*
|
||||
* Configuration:
|
||||
* SYSTEM_ADMIN_IDS=uuid1,uuid2,uuid3 (comma-separated list of user IDs)
|
||||
*
|
||||
* Note: Workspace ownership does NOT grant system admin access. These are separate concepts:
|
||||
* - Workspace owner: Can manage their workspace and its members
|
||||
* - System admin: Can perform system-level operations across all workspaces
|
||||
*/
|
||||
|
||||
import {
|
||||
@@ -13,16 +19,42 @@ import {
|
||||
ForbiddenException,
|
||||
Logger,
|
||||
} from "@nestjs/common";
|
||||
import { PrismaService } from "../../prisma/prisma.service";
|
||||
import type { AuthenticatedRequest } from "../../common/types/user.types";
|
||||
|
||||
@Injectable()
|
||||
export class AdminGuard implements CanActivate {
|
||||
private readonly logger = new Logger(AdminGuard.name);
|
||||
private readonly systemAdminIds: Set<string>;
|
||||
|
||||
constructor(private readonly prisma: PrismaService) {}
|
||||
constructor() {
|
||||
// Load system admin IDs from environment variable
|
||||
const adminIdsEnv = process.env.SYSTEM_ADMIN_IDS ?? "";
|
||||
this.systemAdminIds = new Set(
|
||||
adminIdsEnv
|
||||
.split(",")
|
||||
.map((id) => id.trim())
|
||||
.filter((id) => id.length > 0)
|
||||
);
|
||||
|
||||
async canActivate(context: ExecutionContext): Promise<boolean> {
|
||||
if (this.systemAdminIds.size === 0) {
|
||||
this.logger.warn(
|
||||
"No system administrators configured. Set SYSTEM_ADMIN_IDS environment variable."
|
||||
);
|
||||
} else {
|
||||
this.logger.log(
|
||||
`System administrators configured: ${String(this.systemAdminIds.size)} user(s)`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a user ID is a system administrator
|
||||
*/
|
||||
isSystemAdmin(userId: string): boolean {
|
||||
return this.systemAdminIds.has(userId);
|
||||
}
|
||||
|
||||
canActivate(context: ExecutionContext): boolean {
|
||||
const request = context.switchToHttp().getRequest<AuthenticatedRequest>();
|
||||
const user = request.user;
|
||||
|
||||
@@ -30,13 +62,7 @@ export class AdminGuard implements CanActivate {
|
||||
throw new ForbiddenException("User not authenticated");
|
||||
}
|
||||
|
||||
// Check if user owns any workspace (admin indicator)
|
||||
// TODO: Replace with proper RBAC system admin role check
|
||||
const ownedWorkspaces = await this.prisma.workspace.count({
|
||||
where: { ownerId: user.id },
|
||||
});
|
||||
|
||||
if (ownedWorkspaces === 0) {
|
||||
if (!this.isSystemAdmin(user.id)) {
|
||||
this.logger.warn(`Non-admin user ${user.id} attempted admin operation`);
|
||||
throw new ForbiddenException("This operation requires system administrator privileges");
|
||||
}
|
||||
|
||||
@@ -1,37 +1,50 @@
|
||||
import { describe, it, expect, beforeEach, vi } from "vitest";
|
||||
import { Test, TestingModule } from "@nestjs/testing";
|
||||
import { ExecutionContext, UnauthorizedException } from "@nestjs/common";
|
||||
|
||||
// Mock better-auth modules before importing AuthGuard (which imports AuthService)
|
||||
vi.mock("better-auth/node", () => ({
|
||||
toNodeHandler: vi.fn().mockReturnValue(vi.fn()),
|
||||
}));
|
||||
|
||||
vi.mock("better-auth", () => ({
|
||||
betterAuth: vi.fn().mockReturnValue({
|
||||
handler: vi.fn(),
|
||||
api: { getSession: vi.fn() },
|
||||
}),
|
||||
}));
|
||||
|
||||
vi.mock("better-auth/adapters/prisma", () => ({
|
||||
prismaAdapter: vi.fn().mockReturnValue({}),
|
||||
}));
|
||||
|
||||
vi.mock("better-auth/plugins", () => ({
|
||||
genericOAuth: vi.fn().mockReturnValue({ id: "generic-oauth" }),
|
||||
}));
|
||||
|
||||
import { AuthGuard } from "./auth.guard";
|
||||
import { AuthService } from "../auth.service";
|
||||
import type { AuthService } from "../auth.service";
|
||||
|
||||
describe("AuthGuard", () => {
|
||||
let guard: AuthGuard;
|
||||
let authService: AuthService;
|
||||
|
||||
const mockAuthService = {
|
||||
verifySession: vi.fn(),
|
||||
};
|
||||
|
||||
beforeEach(async () => {
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
AuthGuard,
|
||||
{
|
||||
provide: AuthService,
|
||||
useValue: mockAuthService,
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
guard = module.get<AuthGuard>(AuthGuard);
|
||||
authService = module.get<AuthService>(AuthService);
|
||||
beforeEach(() => {
|
||||
// Directly construct the guard with the mock to avoid NestJS DI issues
|
||||
guard = new AuthGuard(mockAuthService as unknown as AuthService);
|
||||
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
const createMockExecutionContext = (headers: any = {}): ExecutionContext => {
|
||||
const createMockExecutionContext = (
|
||||
headers: Record<string, string> = {},
|
||||
cookies: Record<string, string> = {}
|
||||
): ExecutionContext => {
|
||||
const mockRequest = {
|
||||
headers,
|
||||
cookies,
|
||||
};
|
||||
|
||||
return {
|
||||
@@ -42,57 +55,256 @@ describe("AuthGuard", () => {
|
||||
};
|
||||
|
||||
describe("canActivate", () => {
|
||||
it("should return true for valid session", async () => {
|
||||
const mockSessionData = {
|
||||
user: {
|
||||
id: "user-123",
|
||||
email: "test@example.com",
|
||||
name: "Test User",
|
||||
},
|
||||
session: {
|
||||
id: "session-123",
|
||||
},
|
||||
const mockSessionData = {
|
||||
user: {
|
||||
id: "user-123",
|
||||
email: "test@example.com",
|
||||
name: "Test User",
|
||||
},
|
||||
session: {
|
||||
id: "session-123",
|
||||
token: "session-token",
|
||||
expiresAt: new Date(Date.now() + 86400000),
|
||||
},
|
||||
};
|
||||
|
||||
describe("Bearer token authentication", () => {
|
||||
it("should return true for valid Bearer token", async () => {
|
||||
mockAuthService.verifySession.mockResolvedValue(mockSessionData);
|
||||
|
||||
const context = createMockExecutionContext({
|
||||
authorization: "Bearer valid-token",
|
||||
});
|
||||
|
||||
const result = await guard.canActivate(context);
|
||||
|
||||
expect(result).toBe(true);
|
||||
expect(mockAuthService.verifySession).toHaveBeenCalledWith("valid-token");
|
||||
});
|
||||
|
||||
it("should throw UnauthorizedException for invalid Bearer token", async () => {
|
||||
mockAuthService.verifySession.mockResolvedValue(null);
|
||||
|
||||
const context = createMockExecutionContext({
|
||||
authorization: "Bearer invalid-token",
|
||||
});
|
||||
|
||||
await expect(guard.canActivate(context)).rejects.toThrow(UnauthorizedException);
|
||||
await expect(guard.canActivate(context)).rejects.toThrow("Invalid or expired session");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Cookie-based authentication", () => {
|
||||
it("should return true for valid session cookie", async () => {
|
||||
mockAuthService.verifySession.mockResolvedValue(mockSessionData);
|
||||
|
||||
const context = createMockExecutionContext(
|
||||
{},
|
||||
{
|
||||
"better-auth.session_token": "cookie-token",
|
||||
}
|
||||
);
|
||||
|
||||
const result = await guard.canActivate(context);
|
||||
|
||||
expect(result).toBe(true);
|
||||
expect(mockAuthService.verifySession).toHaveBeenCalledWith("cookie-token");
|
||||
});
|
||||
|
||||
it("should prefer cookie over Bearer token when both present", async () => {
|
||||
mockAuthService.verifySession.mockResolvedValue(mockSessionData);
|
||||
|
||||
const context = createMockExecutionContext(
|
||||
{
|
||||
authorization: "Bearer bearer-token",
|
||||
},
|
||||
{
|
||||
"better-auth.session_token": "cookie-token",
|
||||
}
|
||||
);
|
||||
|
||||
const result = await guard.canActivate(context);
|
||||
|
||||
expect(result).toBe(true);
|
||||
expect(mockAuthService.verifySession).toHaveBeenCalledWith("cookie-token");
|
||||
});
|
||||
|
||||
it("should fallback to Bearer token if no cookie", async () => {
|
||||
mockAuthService.verifySession.mockResolvedValue(mockSessionData);
|
||||
|
||||
const context = createMockExecutionContext(
|
||||
{
|
||||
authorization: "Bearer bearer-token",
|
||||
},
|
||||
{}
|
||||
);
|
||||
|
||||
const result = await guard.canActivate(context);
|
||||
|
||||
expect(result).toBe(true);
|
||||
expect(mockAuthService.verifySession).toHaveBeenCalledWith("bearer-token");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Error handling", () => {
|
||||
it("should throw UnauthorizedException if no token provided", async () => {
|
||||
const context = createMockExecutionContext({}, {});
|
||||
|
||||
await expect(guard.canActivate(context)).rejects.toThrow(UnauthorizedException);
|
||||
await expect(guard.canActivate(context)).rejects.toThrow(
|
||||
"No authentication token provided"
|
||||
);
|
||||
});
|
||||
|
||||
it("should propagate non-auth errors as-is (not wrap as 401)", async () => {
|
||||
const infraError = new Error("connect ECONNREFUSED 127.0.0.1:5432");
|
||||
mockAuthService.verifySession.mockRejectedValue(infraError);
|
||||
|
||||
const context = createMockExecutionContext({
|
||||
authorization: "Bearer error-token",
|
||||
});
|
||||
|
||||
await expect(guard.canActivate(context)).rejects.toThrow(infraError);
|
||||
await expect(guard.canActivate(context)).rejects.not.toBeInstanceOf(UnauthorizedException);
|
||||
});
|
||||
|
||||
it("should propagate database errors so GlobalExceptionFilter returns 500", async () => {
|
||||
const dbError = new Error("PrismaClientKnownRequestError: Connection refused");
|
||||
mockAuthService.verifySession.mockRejectedValue(dbError);
|
||||
|
||||
const context = createMockExecutionContext({
|
||||
authorization: "Bearer valid-token",
|
||||
});
|
||||
|
||||
await expect(guard.canActivate(context)).rejects.toThrow(dbError);
|
||||
await expect(guard.canActivate(context)).rejects.not.toBeInstanceOf(UnauthorizedException);
|
||||
});
|
||||
|
||||
it("should propagate timeout errors so GlobalExceptionFilter returns 503", async () => {
|
||||
const timeoutError = new Error("Connection timeout after 5000ms");
|
||||
mockAuthService.verifySession.mockRejectedValue(timeoutError);
|
||||
|
||||
const context = createMockExecutionContext({
|
||||
authorization: "Bearer valid-token",
|
||||
});
|
||||
|
||||
await expect(guard.canActivate(context)).rejects.toThrow(timeoutError);
|
||||
await expect(guard.canActivate(context)).rejects.not.toBeInstanceOf(UnauthorizedException);
|
||||
});
|
||||
});
|
||||
|
||||
describe("user data validation", () => {
|
||||
const mockSession = {
|
||||
id: "session-123",
|
||||
token: "session-token",
|
||||
expiresAt: new Date(Date.now() + 86400000),
|
||||
};
|
||||
|
||||
mockAuthService.verifySession.mockResolvedValue(mockSessionData);
|
||||
it("should throw UnauthorizedException when user is missing id", async () => {
|
||||
mockAuthService.verifySession.mockResolvedValue({
|
||||
user: { email: "a@b.com", name: "Test" },
|
||||
session: mockSession,
|
||||
});
|
||||
|
||||
const context = createMockExecutionContext({
|
||||
authorization: "Bearer valid-token",
|
||||
const context = createMockExecutionContext({
|
||||
authorization: "Bearer valid-token",
|
||||
});
|
||||
|
||||
await expect(guard.canActivate(context)).rejects.toThrow(UnauthorizedException);
|
||||
await expect(guard.canActivate(context)).rejects.toThrow(
|
||||
"Invalid user data in session"
|
||||
);
|
||||
});
|
||||
|
||||
const result = await guard.canActivate(context);
|
||||
it("should throw UnauthorizedException when user is missing email", async () => {
|
||||
mockAuthService.verifySession.mockResolvedValue({
|
||||
user: { id: "1", name: "Test" },
|
||||
session: mockSession,
|
||||
});
|
||||
|
||||
expect(result).toBe(true);
|
||||
expect(mockAuthService.verifySession).toHaveBeenCalledWith("valid-token");
|
||||
});
|
||||
const context = createMockExecutionContext({
|
||||
authorization: "Bearer valid-token",
|
||||
});
|
||||
|
||||
it("should throw UnauthorizedException if no token provided", async () => {
|
||||
const context = createMockExecutionContext({});
|
||||
|
||||
await expect(guard.canActivate(context)).rejects.toThrow(UnauthorizedException);
|
||||
await expect(guard.canActivate(context)).rejects.toThrow("No authentication token provided");
|
||||
});
|
||||
|
||||
it("should throw UnauthorizedException if session is invalid", async () => {
|
||||
mockAuthService.verifySession.mockResolvedValue(null);
|
||||
|
||||
const context = createMockExecutionContext({
|
||||
authorization: "Bearer invalid-token",
|
||||
await expect(guard.canActivate(context)).rejects.toThrow(UnauthorizedException);
|
||||
await expect(guard.canActivate(context)).rejects.toThrow(
|
||||
"Invalid user data in session"
|
||||
);
|
||||
});
|
||||
|
||||
await expect(guard.canActivate(context)).rejects.toThrow(UnauthorizedException);
|
||||
await expect(guard.canActivate(context)).rejects.toThrow("Invalid or expired session");
|
||||
});
|
||||
it("should throw UnauthorizedException when user is missing name", async () => {
|
||||
mockAuthService.verifySession.mockResolvedValue({
|
||||
user: { id: "1", email: "a@b.com" },
|
||||
session: mockSession,
|
||||
});
|
||||
|
||||
it("should throw UnauthorizedException if session verification fails", async () => {
|
||||
mockAuthService.verifySession.mockRejectedValue(new Error("Verification failed"));
|
||||
const context = createMockExecutionContext({
|
||||
authorization: "Bearer valid-token",
|
||||
});
|
||||
|
||||
const context = createMockExecutionContext({
|
||||
authorization: "Bearer error-token",
|
||||
await expect(guard.canActivate(context)).rejects.toThrow(UnauthorizedException);
|
||||
await expect(guard.canActivate(context)).rejects.toThrow(
|
||||
"Invalid user data in session"
|
||||
);
|
||||
});
|
||||
|
||||
await expect(guard.canActivate(context)).rejects.toThrow(UnauthorizedException);
|
||||
await expect(guard.canActivate(context)).rejects.toThrow("Authentication failed");
|
||||
it("should throw UnauthorizedException when user is a string", async () => {
|
||||
mockAuthService.verifySession.mockResolvedValue({
|
||||
user: "not-an-object",
|
||||
session: mockSession,
|
||||
});
|
||||
|
||||
const context = createMockExecutionContext({
|
||||
authorization: "Bearer valid-token",
|
||||
});
|
||||
|
||||
await expect(guard.canActivate(context)).rejects.toThrow(UnauthorizedException);
|
||||
await expect(guard.canActivate(context)).rejects.toThrow(
|
||||
"Invalid user data in session"
|
||||
);
|
||||
});
|
||||
|
||||
it("should reject when user is null (typeof null === 'object' causes TypeError on 'in' operator)", async () => {
|
||||
// Note: typeof null === "object" in JS, so the guard's typeof check passes
|
||||
// but "id" in null throws TypeError. The catch block propagates non-auth errors as-is.
|
||||
mockAuthService.verifySession.mockResolvedValue({
|
||||
user: null,
|
||||
session: mockSession,
|
||||
});
|
||||
|
||||
const context = createMockExecutionContext({
|
||||
authorization: "Bearer valid-token",
|
||||
});
|
||||
|
||||
await expect(guard.canActivate(context)).rejects.toThrow(TypeError);
|
||||
await expect(guard.canActivate(context)).rejects.not.toBeInstanceOf(
|
||||
UnauthorizedException
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("request attachment", () => {
|
||||
it("should attach user and session to request on success", async () => {
|
||||
mockAuthService.verifySession.mockResolvedValue(mockSessionData);
|
||||
|
||||
const mockRequest = {
|
||||
headers: {
|
||||
authorization: "Bearer valid-token",
|
||||
},
|
||||
cookies: {},
|
||||
};
|
||||
|
||||
const context = {
|
||||
switchToHttp: () => ({
|
||||
getRequest: () => mockRequest,
|
||||
}),
|
||||
} as ExecutionContext;
|
||||
|
||||
await guard.canActivate(context);
|
||||
|
||||
expect(mockRequest).toHaveProperty("user", mockSessionData.user);
|
||||
expect(mockRequest).toHaveProperty("session", mockSessionData.session);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,14 +1,25 @@
|
||||
import { Injectable, CanActivate, ExecutionContext, UnauthorizedException } from "@nestjs/common";
|
||||
import {
|
||||
Injectable,
|
||||
CanActivate,
|
||||
ExecutionContext,
|
||||
UnauthorizedException,
|
||||
Logger,
|
||||
} from "@nestjs/common";
|
||||
import { AuthService } from "../auth.service";
|
||||
import type { AuthenticatedRequest } from "../../common/types/user.types";
|
||||
import type { AuthUser } from "@mosaic/shared";
|
||||
import type { MaybeAuthenticatedRequest } from "../types/better-auth-request.interface";
|
||||
|
||||
@Injectable()
|
||||
export class AuthGuard implements CanActivate {
|
||||
private readonly logger = new Logger(AuthGuard.name);
|
||||
|
||||
constructor(private readonly authService: AuthService) {}
|
||||
|
||||
async canActivate(context: ExecutionContext): Promise<boolean> {
|
||||
const request = context.switchToHttp().getRequest<AuthenticatedRequest>();
|
||||
const token = this.extractTokenFromHeader(request);
|
||||
const request = context.switchToHttp().getRequest<MaybeAuthenticatedRequest>();
|
||||
|
||||
// Try to get token from either cookie (preferred) or Authorization header
|
||||
const token = this.extractToken(request);
|
||||
|
||||
if (!token) {
|
||||
throw new UnauthorizedException("No authentication token provided");
|
||||
@@ -21,25 +32,74 @@ export class AuthGuard implements CanActivate {
|
||||
throw new UnauthorizedException("Invalid or expired session");
|
||||
}
|
||||
|
||||
// Attach user to request (with type assertion for session data structure)
|
||||
const user = sessionData.user as unknown as AuthenticatedRequest["user"];
|
||||
if (!user) {
|
||||
// Attach user and session to request
|
||||
const user = sessionData.user;
|
||||
// Validate user has required fields
|
||||
if (typeof user !== "object" || !("id" in user) || !("email" in user) || !("name" in user)) {
|
||||
throw new UnauthorizedException("Invalid user data in session");
|
||||
}
|
||||
request.user = user;
|
||||
request.user = user as unknown as AuthUser;
|
||||
request.session = sessionData.session;
|
||||
|
||||
return true;
|
||||
} catch (error) {
|
||||
// Re-throw if it's already an UnauthorizedException
|
||||
if (error instanceof UnauthorizedException) {
|
||||
throw error;
|
||||
}
|
||||
throw new UnauthorizedException("Authentication failed");
|
||||
// Infrastructure errors (DB down, connection refused, timeouts) must propagate
|
||||
// as 500/503 via GlobalExceptionFilter — never mask as 401
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private extractTokenFromHeader(request: AuthenticatedRequest): string | undefined {
|
||||
/**
|
||||
* Extract token from cookie (preferred) or Authorization header
|
||||
*/
|
||||
private extractToken(request: MaybeAuthenticatedRequest): string | undefined {
|
||||
// Try cookie first (BetterAuth default)
|
||||
const cookieToken = this.extractTokenFromCookie(request);
|
||||
if (cookieToken) {
|
||||
return cookieToken;
|
||||
}
|
||||
|
||||
// Fallback to Authorization header for API clients
|
||||
return this.extractTokenFromHeader(request);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract token from cookie.
|
||||
* BetterAuth may prefix the cookie name with "__Secure-" when running on HTTPS.
|
||||
*/
|
||||
private extractTokenFromCookie(request: MaybeAuthenticatedRequest): string | undefined {
|
||||
// Express types `cookies` as `any`; cast to a known shape for type safety.
|
||||
const cookies = request.cookies as Record<string, string> | undefined;
|
||||
if (!cookies) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// BetterAuth default cookie name is "better-auth.session_token"
|
||||
// When Secure cookies are enabled, BetterAuth prefixes with "__Secure-".
|
||||
const candidates = [
|
||||
"__Secure-better-auth.session_token",
|
||||
"better-auth.session_token",
|
||||
"__Host-better-auth.session_token",
|
||||
] as const;
|
||||
|
||||
for (const name of candidates) {
|
||||
const token = cookies[name];
|
||||
if (token) {
|
||||
this.logger.debug(`Session cookie found: ${name}`);
|
||||
return token;
|
||||
}
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract token from Authorization header (Bearer token)
|
||||
*/
|
||||
private extractTokenFromHeader(request: MaybeAuthenticatedRequest): string | undefined {
|
||||
const authHeader = request.headers.authorization;
|
||||
if (typeof authHeader !== "string") {
|
||||
return undefined;
|
||||
|
||||
10
apps/api/src/auth/local/dto/local-login.dto.ts
Normal file
10
apps/api/src/auth/local/dto/local-login.dto.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { IsEmail, IsString, MinLength } from "class-validator";
|
||||
|
||||
export class LocalLoginDto {
|
||||
@IsEmail({}, { message: "email must be a valid email address" })
|
||||
email!: string;
|
||||
|
||||
@IsString({ message: "password must be a string" })
|
||||
@MinLength(1, { message: "password must not be empty" })
|
||||
password!: string;
|
||||
}
|
||||
20
apps/api/src/auth/local/dto/local-setup.dto.ts
Normal file
20
apps/api/src/auth/local/dto/local-setup.dto.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import { IsEmail, IsString, MinLength, MaxLength } from "class-validator";
|
||||
|
||||
export class LocalSetupDto {
|
||||
@IsEmail({}, { message: "email must be a valid email address" })
|
||||
email!: string;
|
||||
|
||||
@IsString({ message: "name must be a string" })
|
||||
@MinLength(1, { message: "name must not be empty" })
|
||||
@MaxLength(255, { message: "name must not exceed 255 characters" })
|
||||
name!: string;
|
||||
|
||||
@IsString({ message: "password must be a string" })
|
||||
@MinLength(12, { message: "password must be at least 12 characters" })
|
||||
@MaxLength(128, { message: "password must not exceed 128 characters" })
|
||||
password!: string;
|
||||
|
||||
@IsString({ message: "setupToken must be a string" })
|
||||
@MinLength(1, { message: "setupToken must not be empty" })
|
||||
setupToken!: string;
|
||||
}
|
||||
232
apps/api/src/auth/local/local-auth.controller.spec.ts
Normal file
232
apps/api/src/auth/local/local-auth.controller.spec.ts
Normal file
@@ -0,0 +1,232 @@
|
||||
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
|
||||
import { Test, TestingModule } from "@nestjs/testing";
|
||||
import {
|
||||
NotFoundException,
|
||||
ForbiddenException,
|
||||
UnauthorizedException,
|
||||
ConflictException,
|
||||
} from "@nestjs/common";
|
||||
import { LocalAuthController } from "./local-auth.controller";
|
||||
import { LocalAuthService } from "./local-auth.service";
|
||||
import { LocalAuthEnabledGuard } from "./local-auth.guard";
|
||||
|
||||
describe("LocalAuthController", () => {
|
||||
let controller: LocalAuthController;
|
||||
let localAuthService: LocalAuthService;
|
||||
|
||||
const mockLocalAuthService = {
|
||||
setup: vi.fn(),
|
||||
login: vi.fn(),
|
||||
};
|
||||
|
||||
const mockRequest = {
|
||||
headers: { "user-agent": "TestAgent/1.0" },
|
||||
ip: "127.0.0.1",
|
||||
socket: { remoteAddress: "127.0.0.1" },
|
||||
};
|
||||
|
||||
const originalEnv = {
|
||||
ENABLE_LOCAL_AUTH: process.env.ENABLE_LOCAL_AUTH,
|
||||
};
|
||||
|
||||
beforeEach(async () => {
|
||||
process.env.ENABLE_LOCAL_AUTH = "true";
|
||||
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
controllers: [LocalAuthController],
|
||||
providers: [
|
||||
{
|
||||
provide: LocalAuthService,
|
||||
useValue: mockLocalAuthService,
|
||||
},
|
||||
],
|
||||
})
|
||||
.overrideGuard(LocalAuthEnabledGuard)
|
||||
.useValue({ canActivate: () => true })
|
||||
.compile();
|
||||
|
||||
controller = module.get<LocalAuthController>(LocalAuthController);
|
||||
localAuthService = module.get<LocalAuthService>(LocalAuthService);
|
||||
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
if (originalEnv.ENABLE_LOCAL_AUTH !== undefined) {
|
||||
process.env.ENABLE_LOCAL_AUTH = originalEnv.ENABLE_LOCAL_AUTH;
|
||||
} else {
|
||||
delete process.env.ENABLE_LOCAL_AUTH;
|
||||
}
|
||||
});
|
||||
|
||||
describe("setup", () => {
|
||||
const setupDto = {
|
||||
email: "admin@example.com",
|
||||
name: "Break Glass Admin",
|
||||
password: "securePassword123!",
|
||||
setupToken: "valid-token-123",
|
||||
};
|
||||
|
||||
const mockSetupResult = {
|
||||
user: {
|
||||
id: "user-uuid-123",
|
||||
email: "admin@example.com",
|
||||
name: "Break Glass Admin",
|
||||
isLocalAuth: true,
|
||||
createdAt: new Date("2026-02-28T00:00:00Z"),
|
||||
},
|
||||
session: {
|
||||
token: "session-token-abc",
|
||||
expiresAt: new Date("2026-03-07T00:00:00Z"),
|
||||
},
|
||||
};
|
||||
|
||||
it("should create a break-glass user and return user data with session", async () => {
|
||||
mockLocalAuthService.setup.mockResolvedValue(mockSetupResult);
|
||||
|
||||
const result = await controller.setup(setupDto, mockRequest as never);
|
||||
|
||||
expect(result).toEqual({
|
||||
user: mockSetupResult.user,
|
||||
session: mockSetupResult.session,
|
||||
});
|
||||
expect(mockLocalAuthService.setup).toHaveBeenCalledWith(
|
||||
"admin@example.com",
|
||||
"Break Glass Admin",
|
||||
"securePassword123!",
|
||||
"valid-token-123",
|
||||
"127.0.0.1",
|
||||
"TestAgent/1.0"
|
||||
);
|
||||
});
|
||||
|
||||
it("should extract client IP from x-forwarded-for header", async () => {
|
||||
mockLocalAuthService.setup.mockResolvedValue(mockSetupResult);
|
||||
const reqWithProxy = {
|
||||
...mockRequest,
|
||||
headers: {
|
||||
...mockRequest.headers,
|
||||
"x-forwarded-for": "203.0.113.50, 70.41.3.18",
|
||||
},
|
||||
};
|
||||
|
||||
await controller.setup(setupDto, reqWithProxy as never);
|
||||
|
||||
expect(mockLocalAuthService.setup).toHaveBeenCalledWith(
|
||||
expect.any(String) as string,
|
||||
expect.any(String) as string,
|
||||
expect.any(String) as string,
|
||||
expect.any(String) as string,
|
||||
"203.0.113.50",
|
||||
"TestAgent/1.0"
|
||||
);
|
||||
});
|
||||
|
||||
it("should propagate ForbiddenException from service", async () => {
|
||||
mockLocalAuthService.setup.mockRejectedValue(new ForbiddenException("Invalid setup token"));
|
||||
|
||||
await expect(controller.setup(setupDto, mockRequest as never)).rejects.toThrow(
|
||||
ForbiddenException
|
||||
);
|
||||
});
|
||||
|
||||
it("should propagate ConflictException from service", async () => {
|
||||
mockLocalAuthService.setup.mockRejectedValue(
|
||||
new ConflictException("A user with this email already exists")
|
||||
);
|
||||
|
||||
await expect(controller.setup(setupDto, mockRequest as never)).rejects.toThrow(
|
||||
ConflictException
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("login", () => {
|
||||
const loginDto = {
|
||||
email: "admin@example.com",
|
||||
password: "securePassword123!",
|
||||
};
|
||||
|
||||
const mockLoginResult = {
|
||||
user: {
|
||||
id: "user-uuid-123",
|
||||
email: "admin@example.com",
|
||||
name: "Break Glass Admin",
|
||||
},
|
||||
session: {
|
||||
token: "session-token-abc",
|
||||
expiresAt: new Date("2026-03-07T00:00:00Z"),
|
||||
},
|
||||
};
|
||||
|
||||
it("should authenticate and return user data with session", async () => {
|
||||
mockLocalAuthService.login.mockResolvedValue(mockLoginResult);
|
||||
|
||||
const result = await controller.login(loginDto, mockRequest as never);
|
||||
|
||||
expect(result).toEqual({
|
||||
user: mockLoginResult.user,
|
||||
session: mockLoginResult.session,
|
||||
});
|
||||
expect(mockLocalAuthService.login).toHaveBeenCalledWith(
|
||||
"admin@example.com",
|
||||
"securePassword123!",
|
||||
"127.0.0.1",
|
||||
"TestAgent/1.0"
|
||||
);
|
||||
});
|
||||
|
||||
it("should propagate UnauthorizedException from service", async () => {
|
||||
mockLocalAuthService.login.mockRejectedValue(
|
||||
new UnauthorizedException("Invalid email or password")
|
||||
);
|
||||
|
||||
await expect(controller.login(loginDto, mockRequest as never)).rejects.toThrow(
|
||||
UnauthorizedException
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("LocalAuthEnabledGuard", () => {
|
||||
let guard: LocalAuthEnabledGuard;
|
||||
|
||||
const originalEnv = process.env.ENABLE_LOCAL_AUTH;
|
||||
|
||||
beforeEach(() => {
|
||||
guard = new LocalAuthEnabledGuard();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
if (originalEnv !== undefined) {
|
||||
process.env.ENABLE_LOCAL_AUTH = originalEnv;
|
||||
} else {
|
||||
delete process.env.ENABLE_LOCAL_AUTH;
|
||||
}
|
||||
});
|
||||
|
||||
it("should allow access when ENABLE_LOCAL_AUTH is true", () => {
|
||||
process.env.ENABLE_LOCAL_AUTH = "true";
|
||||
|
||||
expect(guard.canActivate()).toBe(true);
|
||||
});
|
||||
|
||||
it("should throw NotFoundException when ENABLE_LOCAL_AUTH is not set", () => {
|
||||
delete process.env.ENABLE_LOCAL_AUTH;
|
||||
|
||||
expect(() => guard.canActivate()).toThrow(NotFoundException);
|
||||
});
|
||||
|
||||
it("should throw NotFoundException when ENABLE_LOCAL_AUTH is false", () => {
|
||||
process.env.ENABLE_LOCAL_AUTH = "false";
|
||||
|
||||
expect(() => guard.canActivate()).toThrow(NotFoundException);
|
||||
});
|
||||
|
||||
it("should throw NotFoundException when ENABLE_LOCAL_AUTH is empty", () => {
|
||||
process.env.ENABLE_LOCAL_AUTH = "";
|
||||
|
||||
expect(() => guard.canActivate()).toThrow(NotFoundException);
|
||||
});
|
||||
});
|
||||
81
apps/api/src/auth/local/local-auth.controller.ts
Normal file
81
apps/api/src/auth/local/local-auth.controller.ts
Normal file
@@ -0,0 +1,81 @@
|
||||
import {
|
||||
Controller,
|
||||
Post,
|
||||
Body,
|
||||
UseGuards,
|
||||
Req,
|
||||
Logger,
|
||||
HttpCode,
|
||||
HttpStatus,
|
||||
} from "@nestjs/common";
|
||||
import { Throttle } from "@nestjs/throttler";
|
||||
import type { Request as ExpressRequest } from "express";
|
||||
import { SkipCsrf } from "../../common/decorators/skip-csrf.decorator";
|
||||
import { LocalAuthService } from "./local-auth.service";
|
||||
import { LocalAuthEnabledGuard } from "./local-auth.guard";
|
||||
import { LocalLoginDto } from "./dto/local-login.dto";
|
||||
import { LocalSetupDto } from "./dto/local-setup.dto";
|
||||
|
||||
@Controller("auth/local")
|
||||
@UseGuards(LocalAuthEnabledGuard)
|
||||
export class LocalAuthController {
|
||||
private readonly logger = new Logger(LocalAuthController.name);
|
||||
|
||||
constructor(private readonly localAuthService: LocalAuthService) {}
|
||||
|
||||
/**
|
||||
* First-time break-glass user creation.
|
||||
* Requires BREAKGLASS_SETUP_TOKEN from environment.
|
||||
*/
|
||||
@Post("setup")
|
||||
@SkipCsrf()
|
||||
@Throttle({ strict: { limit: 5, ttl: 60000 } })
|
||||
async setup(@Body() dto: LocalSetupDto, @Req() req: ExpressRequest) {
|
||||
const ipAddress = this.getClientIp(req);
|
||||
const userAgent = req.headers["user-agent"];
|
||||
|
||||
this.logger.log(`Break-glass setup attempt from ${ipAddress}`);
|
||||
|
||||
const result = await this.localAuthService.setup(
|
||||
dto.email,
|
||||
dto.name,
|
||||
dto.password,
|
||||
dto.setupToken,
|
||||
ipAddress,
|
||||
userAgent
|
||||
);
|
||||
|
||||
return {
|
||||
user: result.user,
|
||||
session: result.session,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Break-glass login with email + password.
|
||||
*/
|
||||
@Post("login")
|
||||
@SkipCsrf()
|
||||
@HttpCode(HttpStatus.OK)
|
||||
@Throttle({ strict: { limit: 10, ttl: 60000 } })
|
||||
async login(@Body() dto: LocalLoginDto, @Req() req: ExpressRequest) {
|
||||
const ipAddress = this.getClientIp(req);
|
||||
const userAgent = req.headers["user-agent"];
|
||||
|
||||
const result = await this.localAuthService.login(dto.email, dto.password, ipAddress, userAgent);
|
||||
|
||||
return {
|
||||
user: result.user,
|
||||
session: result.session,
|
||||
};
|
||||
}
|
||||
|
||||
private getClientIp(req: ExpressRequest): string {
|
||||
const forwardedFor = req.headers["x-forwarded-for"];
|
||||
if (forwardedFor) {
|
||||
const ips = Array.isArray(forwardedFor) ? forwardedFor[0] : forwardedFor;
|
||||
return ips?.split(",")[0]?.trim() ?? "unknown";
|
||||
}
|
||||
return req.ip ?? req.socket.remoteAddress ?? "unknown";
|
||||
}
|
||||
}
|
||||
15
apps/api/src/auth/local/local-auth.guard.ts
Normal file
15
apps/api/src/auth/local/local-auth.guard.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import { Injectable, CanActivate, NotFoundException } from "@nestjs/common";
|
||||
|
||||
/**
|
||||
* Guard that checks if local authentication is enabled via ENABLE_LOCAL_AUTH env var.
|
||||
* Returns 404 when disabled so endpoints are invisible to callers.
|
||||
*/
|
||||
@Injectable()
|
||||
export class LocalAuthEnabledGuard implements CanActivate {
|
||||
canActivate(): boolean {
|
||||
if (process.env.ENABLE_LOCAL_AUTH !== "true") {
|
||||
throw new NotFoundException();
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
389
apps/api/src/auth/local/local-auth.service.spec.ts
Normal file
389
apps/api/src/auth/local/local-auth.service.spec.ts
Normal file
@@ -0,0 +1,389 @@
|
||||
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
|
||||
import { Test, TestingModule } from "@nestjs/testing";
|
||||
import {
|
||||
ConflictException,
|
||||
ForbiddenException,
|
||||
InternalServerErrorException,
|
||||
UnauthorizedException,
|
||||
} from "@nestjs/common";
|
||||
import { hash } from "bcryptjs";
|
||||
import { LocalAuthService } from "./local-auth.service";
|
||||
import { PrismaService } from "../../prisma/prisma.service";
|
||||
|
||||
describe("LocalAuthService", () => {
|
||||
let service: LocalAuthService;
|
||||
|
||||
const mockTxSession = {
|
||||
create: vi.fn(),
|
||||
};
|
||||
|
||||
const mockTxWorkspace = {
|
||||
findFirst: vi.fn(),
|
||||
create: vi.fn(),
|
||||
};
|
||||
|
||||
const mockTxWorkspaceMember = {
|
||||
create: vi.fn(),
|
||||
};
|
||||
|
||||
const mockTxUser = {
|
||||
create: vi.fn(),
|
||||
findUnique: vi.fn(),
|
||||
};
|
||||
|
||||
const mockTx = {
|
||||
user: mockTxUser,
|
||||
workspace: mockTxWorkspace,
|
||||
workspaceMember: mockTxWorkspaceMember,
|
||||
session: mockTxSession,
|
||||
};
|
||||
|
||||
const mockPrismaService = {
|
||||
user: {
|
||||
findUnique: vi.fn(),
|
||||
},
|
||||
session: {
|
||||
create: vi.fn(),
|
||||
},
|
||||
$transaction: vi
|
||||
.fn()
|
||||
.mockImplementation((fn: (tx: typeof mockTx) => Promise<unknown>) => fn(mockTx)),
|
||||
};
|
||||
|
||||
const originalEnv = {
|
||||
BREAKGLASS_SETUP_TOKEN: process.env.BREAKGLASS_SETUP_TOKEN,
|
||||
};
|
||||
|
||||
beforeEach(async () => {
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
LocalAuthService,
|
||||
{
|
||||
provide: PrismaService,
|
||||
useValue: mockPrismaService,
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
service = module.get<LocalAuthService>(LocalAuthService);
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
if (originalEnv.BREAKGLASS_SETUP_TOKEN !== undefined) {
|
||||
process.env.BREAKGLASS_SETUP_TOKEN = originalEnv.BREAKGLASS_SETUP_TOKEN;
|
||||
} else {
|
||||
delete process.env.BREAKGLASS_SETUP_TOKEN;
|
||||
}
|
||||
});
|
||||
|
||||
describe("setup", () => {
|
||||
const validSetupArgs = {
|
||||
email: "admin@example.com",
|
||||
name: "Break Glass Admin",
|
||||
password: "securePassword123!",
|
||||
setupToken: "valid-token-123",
|
||||
};
|
||||
|
||||
const mockCreatedUser = {
|
||||
id: "user-uuid-123",
|
||||
email: "admin@example.com",
|
||||
name: "Break Glass Admin",
|
||||
isLocalAuth: true,
|
||||
createdAt: new Date("2026-02-28T00:00:00Z"),
|
||||
};
|
||||
|
||||
const mockWorkspace = {
|
||||
id: "workspace-uuid-123",
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
process.env.BREAKGLASS_SETUP_TOKEN = "valid-token-123";
|
||||
mockPrismaService.user.findUnique.mockResolvedValue(null);
|
||||
mockTxUser.create.mockResolvedValue(mockCreatedUser);
|
||||
mockTxWorkspace.findFirst.mockResolvedValue(mockWorkspace);
|
||||
mockTxWorkspaceMember.create.mockResolvedValue({});
|
||||
mockTxSession.create.mockResolvedValue({});
|
||||
});
|
||||
|
||||
it("should create a local auth user with hashed password", async () => {
|
||||
const result = await service.setup(
|
||||
validSetupArgs.email,
|
||||
validSetupArgs.name,
|
||||
validSetupArgs.password,
|
||||
validSetupArgs.setupToken
|
||||
);
|
||||
|
||||
expect(result.user).toEqual(mockCreatedUser);
|
||||
expect(result.session.token).toBeDefined();
|
||||
expect(result.session.token.length).toBeGreaterThan(0);
|
||||
expect(result.session.expiresAt).toBeInstanceOf(Date);
|
||||
expect(result.session.expiresAt.getTime()).toBeGreaterThan(Date.now());
|
||||
|
||||
expect(mockTxUser.create).toHaveBeenCalledWith({
|
||||
data: expect.objectContaining({
|
||||
email: "admin@example.com",
|
||||
name: "Break Glass Admin",
|
||||
isLocalAuth: true,
|
||||
emailVerified: true,
|
||||
passwordHash: expect.any(String) as string,
|
||||
}),
|
||||
select: {
|
||||
id: true,
|
||||
email: true,
|
||||
name: true,
|
||||
isLocalAuth: true,
|
||||
createdAt: true,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it("should assign OWNER role on default workspace", async () => {
|
||||
await service.setup(
|
||||
validSetupArgs.email,
|
||||
validSetupArgs.name,
|
||||
validSetupArgs.password,
|
||||
validSetupArgs.setupToken
|
||||
);
|
||||
|
||||
expect(mockTxWorkspaceMember.create).toHaveBeenCalledWith({
|
||||
data: {
|
||||
workspaceId: "workspace-uuid-123",
|
||||
userId: "user-uuid-123",
|
||||
role: "OWNER",
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it("should create a new workspace if none exists", async () => {
|
||||
mockTxWorkspace.findFirst.mockResolvedValue(null);
|
||||
mockTxWorkspace.create.mockResolvedValue({ id: "new-workspace-uuid" });
|
||||
|
||||
await service.setup(
|
||||
validSetupArgs.email,
|
||||
validSetupArgs.name,
|
||||
validSetupArgs.password,
|
||||
validSetupArgs.setupToken
|
||||
);
|
||||
|
||||
expect(mockTxWorkspace.create).toHaveBeenCalledWith({
|
||||
data: {
|
||||
name: "Default Workspace",
|
||||
ownerId: "user-uuid-123",
|
||||
settings: {},
|
||||
},
|
||||
select: { id: true },
|
||||
});
|
||||
expect(mockTxWorkspaceMember.create).toHaveBeenCalledWith({
|
||||
data: {
|
||||
workspaceId: "new-workspace-uuid",
|
||||
userId: "user-uuid-123",
|
||||
role: "OWNER",
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it("should create a BetterAuth-compatible session", async () => {
|
||||
await service.setup(
|
||||
validSetupArgs.email,
|
||||
validSetupArgs.name,
|
||||
validSetupArgs.password,
|
||||
validSetupArgs.setupToken,
|
||||
"192.168.1.1",
|
||||
"TestAgent/1.0"
|
||||
);
|
||||
|
||||
expect(mockTxSession.create).toHaveBeenCalledWith({
|
||||
data: {
|
||||
userId: "user-uuid-123",
|
||||
token: expect.any(String) as string,
|
||||
expiresAt: expect.any(Date) as Date,
|
||||
ipAddress: "192.168.1.1",
|
||||
userAgent: "TestAgent/1.0",
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it("should reject when BREAKGLASS_SETUP_TOKEN is not set", async () => {
|
||||
delete process.env.BREAKGLASS_SETUP_TOKEN;
|
||||
|
||||
await expect(
|
||||
service.setup(
|
||||
validSetupArgs.email,
|
||||
validSetupArgs.name,
|
||||
validSetupArgs.password,
|
||||
validSetupArgs.setupToken
|
||||
)
|
||||
).rejects.toThrow(ForbiddenException);
|
||||
});
|
||||
|
||||
it("should reject when BREAKGLASS_SETUP_TOKEN is empty", async () => {
|
||||
process.env.BREAKGLASS_SETUP_TOKEN = "";
|
||||
|
||||
await expect(
|
||||
service.setup(
|
||||
validSetupArgs.email,
|
||||
validSetupArgs.name,
|
||||
validSetupArgs.password,
|
||||
validSetupArgs.setupToken
|
||||
)
|
||||
).rejects.toThrow(ForbiddenException);
|
||||
});
|
||||
|
||||
it("should reject when setup token does not match", async () => {
|
||||
await expect(
|
||||
service.setup(
|
||||
validSetupArgs.email,
|
||||
validSetupArgs.name,
|
||||
validSetupArgs.password,
|
||||
"wrong-token"
|
||||
)
|
||||
).rejects.toThrow(ForbiddenException);
|
||||
});
|
||||
|
||||
it("should reject when email already exists", async () => {
|
||||
mockPrismaService.user.findUnique.mockResolvedValue({
|
||||
id: "existing-user",
|
||||
email: "admin@example.com",
|
||||
});
|
||||
|
||||
await expect(
|
||||
service.setup(
|
||||
validSetupArgs.email,
|
||||
validSetupArgs.name,
|
||||
validSetupArgs.password,
|
||||
validSetupArgs.setupToken
|
||||
)
|
||||
).rejects.toThrow(ConflictException);
|
||||
});
|
||||
|
||||
it("should return session token and expiry", async () => {
|
||||
const result = await service.setup(
|
||||
validSetupArgs.email,
|
||||
validSetupArgs.name,
|
||||
validSetupArgs.password,
|
||||
validSetupArgs.setupToken
|
||||
);
|
||||
|
||||
expect(typeof result.session.token).toBe("string");
|
||||
expect(result.session.token.length).toBe(64); // 32 bytes hex
|
||||
expect(result.session.expiresAt).toBeInstanceOf(Date);
|
||||
});
|
||||
});
|
||||
|
||||
describe("login", () => {
|
||||
const validPasswordHash = "$2a$12$LJ3m4ys3Lz/YgP7xYz5k5uU6b5F6X1234567890abcdefghijkl";
|
||||
|
||||
beforeEach(async () => {
|
||||
// Create a real bcrypt hash for testing
|
||||
const realHash = await hash("securePassword123!", 4); // Low rounds for test speed
|
||||
mockPrismaService.user.findUnique.mockResolvedValue({
|
||||
id: "user-uuid-123",
|
||||
email: "admin@example.com",
|
||||
name: "Break Glass Admin",
|
||||
isLocalAuth: true,
|
||||
passwordHash: realHash,
|
||||
deactivatedAt: null,
|
||||
});
|
||||
mockPrismaService.session.create.mockResolvedValue({});
|
||||
});
|
||||
|
||||
it("should authenticate a valid local auth user", async () => {
|
||||
const result = await service.login("admin@example.com", "securePassword123!");
|
||||
|
||||
expect(result.user).toEqual({
|
||||
id: "user-uuid-123",
|
||||
email: "admin@example.com",
|
||||
name: "Break Glass Admin",
|
||||
});
|
||||
expect(result.session.token).toBeDefined();
|
||||
expect(result.session.expiresAt).toBeInstanceOf(Date);
|
||||
});
|
||||
|
||||
it("should create a session with ip and user agent", async () => {
|
||||
await service.login("admin@example.com", "securePassword123!", "10.0.0.1", "Mozilla/5.0");
|
||||
|
||||
expect(mockPrismaService.session.create).toHaveBeenCalledWith({
|
||||
data: {
|
||||
userId: "user-uuid-123",
|
||||
token: expect.any(String) as string,
|
||||
expiresAt: expect.any(Date) as Date,
|
||||
ipAddress: "10.0.0.1",
|
||||
userAgent: "Mozilla/5.0",
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it("should reject when user does not exist", async () => {
|
||||
mockPrismaService.user.findUnique.mockResolvedValue(null);
|
||||
|
||||
await expect(service.login("nonexistent@example.com", "password123456")).rejects.toThrow(
|
||||
UnauthorizedException
|
||||
);
|
||||
});
|
||||
|
||||
it("should reject when user is not a local auth user", async () => {
|
||||
mockPrismaService.user.findUnique.mockResolvedValue({
|
||||
id: "user-uuid-123",
|
||||
email: "admin@example.com",
|
||||
name: "OIDC User",
|
||||
isLocalAuth: false,
|
||||
passwordHash: null,
|
||||
deactivatedAt: null,
|
||||
});
|
||||
|
||||
await expect(service.login("admin@example.com", "password123456")).rejects.toThrow(
|
||||
UnauthorizedException
|
||||
);
|
||||
});
|
||||
|
||||
it("should reject when user is deactivated", async () => {
|
||||
const realHash = await hash("securePassword123!", 4);
|
||||
mockPrismaService.user.findUnique.mockResolvedValue({
|
||||
id: "user-uuid-123",
|
||||
email: "admin@example.com",
|
||||
name: "Deactivated User",
|
||||
isLocalAuth: true,
|
||||
passwordHash: realHash,
|
||||
deactivatedAt: new Date("2026-01-01"),
|
||||
});
|
||||
|
||||
await expect(service.login("admin@example.com", "securePassword123!")).rejects.toThrow(
|
||||
new UnauthorizedException("Account has been deactivated")
|
||||
);
|
||||
});
|
||||
|
||||
it("should reject when password is incorrect", async () => {
|
||||
await expect(service.login("admin@example.com", "wrongPassword123!")).rejects.toThrow(
|
||||
UnauthorizedException
|
||||
);
|
||||
});
|
||||
|
||||
it("should throw InternalServerError when local auth user has no password hash", async () => {
|
||||
mockPrismaService.user.findUnique.mockResolvedValue({
|
||||
id: "user-uuid-123",
|
||||
email: "admin@example.com",
|
||||
name: "Broken User",
|
||||
isLocalAuth: true,
|
||||
passwordHash: null,
|
||||
deactivatedAt: null,
|
||||
});
|
||||
|
||||
await expect(service.login("admin@example.com", "securePassword123!")).rejects.toThrow(
|
||||
InternalServerErrorException
|
||||
);
|
||||
});
|
||||
|
||||
it("should not reveal whether email exists in error messages", async () => {
|
||||
mockPrismaService.user.findUnique.mockResolvedValue(null);
|
||||
|
||||
try {
|
||||
await service.login("nonexistent@example.com", "password123456");
|
||||
} catch (error) {
|
||||
expect(error).toBeInstanceOf(UnauthorizedException);
|
||||
expect((error as UnauthorizedException).message).toBe("Invalid email or password");
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
230
apps/api/src/auth/local/local-auth.service.ts
Normal file
230
apps/api/src/auth/local/local-auth.service.ts
Normal file
@@ -0,0 +1,230 @@
|
||||
import {
|
||||
Injectable,
|
||||
Logger,
|
||||
ForbiddenException,
|
||||
UnauthorizedException,
|
||||
ConflictException,
|
||||
InternalServerErrorException,
|
||||
} from "@nestjs/common";
|
||||
import { WorkspaceMemberRole } from "@prisma/client";
|
||||
import { hash, compare } from "bcryptjs";
|
||||
import { randomBytes, timingSafeEqual } from "crypto";
|
||||
import { PrismaService } from "../../prisma/prisma.service";
|
||||
|
||||
const BCRYPT_ROUNDS = 12;
|
||||
|
||||
/** Session expiry: 7 days (matches BetterAuth config in auth.config.ts) */
|
||||
const SESSION_EXPIRY_MS = 7 * 24 * 60 * 60 * 1000;
|
||||
|
||||
interface SetupResult {
|
||||
user: {
|
||||
id: string;
|
||||
email: string;
|
||||
name: string;
|
||||
isLocalAuth: boolean;
|
||||
createdAt: Date;
|
||||
};
|
||||
session: {
|
||||
token: string;
|
||||
expiresAt: Date;
|
||||
};
|
||||
}
|
||||
|
||||
interface LoginResult {
|
||||
user: {
|
||||
id: string;
|
||||
email: string;
|
||||
name: string;
|
||||
};
|
||||
session: {
|
||||
token: string;
|
||||
expiresAt: Date;
|
||||
};
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class LocalAuthService {
|
||||
private readonly logger = new Logger(LocalAuthService.name);
|
||||
|
||||
constructor(private readonly prisma: PrismaService) {}
|
||||
|
||||
/**
|
||||
* First-time break-glass user creation.
|
||||
* Validates the setup token, creates a local auth user with bcrypt-hashed password,
|
||||
* and assigns OWNER role on the default workspace.
|
||||
*/
|
||||
async setup(
|
||||
email: string,
|
||||
name: string,
|
||||
password: string,
|
||||
setupToken: string,
|
||||
ipAddress?: string,
|
||||
userAgent?: string
|
||||
): Promise<SetupResult> {
|
||||
this.validateSetupToken(setupToken);
|
||||
|
||||
const existing = await this.prisma.user.findUnique({ where: { email } });
|
||||
if (existing) {
|
||||
throw new ConflictException("A user with this email already exists");
|
||||
}
|
||||
|
||||
const passwordHash = await hash(password, BCRYPT_ROUNDS);
|
||||
|
||||
const result = await this.prisma.$transaction(async (tx) => {
|
||||
const user = await tx.user.create({
|
||||
data: {
|
||||
email,
|
||||
name,
|
||||
isLocalAuth: true,
|
||||
passwordHash,
|
||||
emailVerified: true,
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
email: true,
|
||||
name: true,
|
||||
isLocalAuth: true,
|
||||
createdAt: true,
|
||||
},
|
||||
});
|
||||
|
||||
// Find or create a default workspace and assign OWNER role
|
||||
await this.assignDefaultWorkspace(tx, user.id);
|
||||
|
||||
// Create a BetterAuth-compatible session
|
||||
const session = await this.createSession(tx, user.id, ipAddress, userAgent);
|
||||
|
||||
return { user, session };
|
||||
});
|
||||
|
||||
this.logger.log(`Break-glass user created: ${email}`);
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Break-glass login: verify email + password against bcrypt hash.
|
||||
* Only works for users with isLocalAuth=true.
|
||||
*/
|
||||
async login(
|
||||
email: string,
|
||||
password: string,
|
||||
ipAddress?: string,
|
||||
userAgent?: string
|
||||
): Promise<LoginResult> {
|
||||
const user = await this.prisma.user.findUnique({
|
||||
where: { email },
|
||||
select: {
|
||||
id: true,
|
||||
email: true,
|
||||
name: true,
|
||||
isLocalAuth: true,
|
||||
passwordHash: true,
|
||||
deactivatedAt: true,
|
||||
},
|
||||
});
|
||||
|
||||
if (!user?.isLocalAuth) {
|
||||
throw new UnauthorizedException("Invalid email or password");
|
||||
}
|
||||
|
||||
if (user.deactivatedAt) {
|
||||
throw new UnauthorizedException("Account has been deactivated");
|
||||
}
|
||||
|
||||
if (!user.passwordHash) {
|
||||
this.logger.error(`Local auth user ${email} has no password hash`);
|
||||
throw new InternalServerErrorException("Account configuration error");
|
||||
}
|
||||
|
||||
const passwordValid = await compare(password, user.passwordHash);
|
||||
if (!passwordValid) {
|
||||
throw new UnauthorizedException("Invalid email or password");
|
||||
}
|
||||
|
||||
const session = await this.createSession(this.prisma, user.id, ipAddress, userAgent);
|
||||
|
||||
this.logger.log(`Break-glass login: ${email}`);
|
||||
return {
|
||||
user: { id: user.id, email: user.email, name: user.name },
|
||||
session,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate the setup token against the environment variable.
|
||||
*/
|
||||
private validateSetupToken(token: string): void {
|
||||
const expectedToken = process.env.BREAKGLASS_SETUP_TOKEN;
|
||||
|
||||
if (!expectedToken || expectedToken.trim() === "") {
|
||||
throw new ForbiddenException(
|
||||
"Break-glass setup is not configured. Set BREAKGLASS_SETUP_TOKEN environment variable."
|
||||
);
|
||||
}
|
||||
|
||||
const tokenBuffer = Buffer.from(token);
|
||||
const expectedBuffer = Buffer.from(expectedToken);
|
||||
if (
|
||||
tokenBuffer.length !== expectedBuffer.length ||
|
||||
!timingSafeEqual(tokenBuffer, expectedBuffer)
|
||||
) {
|
||||
this.logger.warn("Invalid break-glass setup token attempt");
|
||||
throw new ForbiddenException("Invalid setup token");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the first workspace or create a default one, then assign OWNER role.
|
||||
*/
|
||||
private async assignDefaultWorkspace(
|
||||
tx: Parameters<Parameters<PrismaService["$transaction"]>[0]>[0],
|
||||
userId: string
|
||||
): Promise<void> {
|
||||
let workspace = await tx.workspace.findFirst({
|
||||
orderBy: { createdAt: "asc" },
|
||||
select: { id: true },
|
||||
});
|
||||
|
||||
workspace ??= await tx.workspace.create({
|
||||
data: {
|
||||
name: "Default Workspace",
|
||||
ownerId: userId,
|
||||
settings: {},
|
||||
},
|
||||
select: { id: true },
|
||||
});
|
||||
|
||||
await tx.workspaceMember.create({
|
||||
data: {
|
||||
workspaceId: workspace.id,
|
||||
userId,
|
||||
role: WorkspaceMemberRole.OWNER,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a BetterAuth-compatible session record.
|
||||
*/
|
||||
private async createSession(
|
||||
tx: { session: { create: typeof PrismaService.prototype.session.create } },
|
||||
userId: string,
|
||||
ipAddress?: string,
|
||||
userAgent?: string
|
||||
): Promise<{ token: string; expiresAt: Date }> {
|
||||
const token = randomBytes(32).toString("hex");
|
||||
const expiresAt = new Date(Date.now() + SESSION_EXPIRY_MS);
|
||||
|
||||
await tx.session.create({
|
||||
data: {
|
||||
userId,
|
||||
token,
|
||||
expiresAt,
|
||||
ipAddress: ipAddress ?? null,
|
||||
userAgent: userAgent ?? null,
|
||||
},
|
||||
});
|
||||
|
||||
return { token, expiresAt };
|
||||
}
|
||||
}
|
||||
@@ -1,11 +1,14 @@
|
||||
/**
|
||||
* BetterAuth Request Type
|
||||
* Unified request types for authentication context.
|
||||
*
|
||||
* BetterAuth expects a Request object compatible with the Fetch API standard.
|
||||
* This extends the web standard Request interface with additional properties
|
||||
* that may be present in the Express request object at runtime.
|
||||
* Replaces the previously scattered interfaces:
|
||||
* - RequestWithSession (auth.controller.ts)
|
||||
* - AuthRequest (auth.guard.ts)
|
||||
* - BetterAuthRequest (this file, removed)
|
||||
* - RequestWithUser (current-user.decorator.ts)
|
||||
*/
|
||||
|
||||
import type { Request } from "express";
|
||||
import type { AuthUser } from "@mosaic/shared";
|
||||
|
||||
// Re-export AuthUser for use in other modules
|
||||
@@ -22,19 +25,21 @@ export interface RequestSession {
|
||||
}
|
||||
|
||||
/**
|
||||
* Web standard Request interface extended with Express-specific properties
|
||||
* This matches the Fetch API Request specification that BetterAuth expects.
|
||||
* Request that may or may not have auth data (before guard runs).
|
||||
* Used by AuthGuard and other middleware that processes requests
|
||||
* before authentication is confirmed.
|
||||
*/
|
||||
export interface BetterAuthRequest extends Request {
|
||||
// Express route parameters
|
||||
params?: Record<string, string>;
|
||||
|
||||
// Express query string parameters
|
||||
query?: Record<string, string | string[]>;
|
||||
|
||||
// Session data attached by AuthGuard after successful authentication
|
||||
session?: RequestSession;
|
||||
|
||||
// Authenticated user attached by AuthGuard
|
||||
export interface MaybeAuthenticatedRequest extends Request {
|
||||
user?: AuthUser;
|
||||
session?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Request with authenticated user attached by AuthGuard.
|
||||
* After AuthGuard runs, user and session are guaranteed present.
|
||||
* Use this type in controllers/decorators that sit behind AuthGuard.
|
||||
*/
|
||||
export interface AuthenticatedRequest extends Request {
|
||||
user: AuthUser;
|
||||
session: RequestSession;
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user