diff --git a/.ghjk/lock.json b/.ghjk/lock.json index b597a41..410bbd9 100644 --- a/.ghjk/lock.json +++ b/.ghjk/lock.json @@ -308,13 +308,13 @@ "lock-sed": { "ty": "denoFile@v1", "key": "lock-sed", - "envKey": "bciqa3gn6bjbmgjf7mvk7cv6wfjo23wj3vyppgtpqt4ovvxkpzjrcvua" + "envKey": "bciqlneazfye2nq2cb4jsgigjqisrzm25gdkyitmotyl3qmq5nf3nb7y" }, "cache-v8": { "ty": "denoFile@v1", "key": "cache-v8", "desc": "Install the V8 builds to a local cache.", - "envKey": "bciqjzvefargmop4vyqujvr7igrjunekxn5dmfqezjd2rgrdji5xpdai" + "envKey": "bciqe5yq32fnpsioc326yny37wl3jszc6sgq3imo6544ez7iswpu3yny" } }, "tasksNamed": [ @@ -327,33 +327,53 @@ "id": "envs", "config": { "envs": { - "bciqfzekhtsrjd72noxifmici3ssck4jgvbjwhxwhhwtirzm7yomhxya": { + "bciqazjf2gsxfy4wt2gd6qsou4pciedy7dyhdch5l7gsmoq62boo2s7a": { "desc": "the default default environment.", "provides": [ + { + "ty": "posix.envVar", + "key": "RUST_LOG", + "val": "trace,deno=info,denort=trace,swc_ecma_transforms_base=info" + }, { "ty": "ghjk.ports.InstallSetRef", "setId": "ghjkEnvProvInstSet___main" } ] }, - "bciqa3gn6bjbmgjf7mvk7cv6wfjo23wj3vyppgtpqt4ovvxkpzjrcvua": { + "bciqlneazfye2nq2cb4jsgigjqisrzm25gdkyitmotyl3qmq5nf3nb7y": { "provides": [ + { + "ty": "posix.envVar", + "key": "RUST_LOG", + "val": "trace,deno=info,denort=trace,swc_ecma_transforms_base=info" + }, { "ty": "ghjk.ports.InstallSetRef", "setId": "ghjkEnvProvInstSet___main" } ] }, - "bciqjzvefargmop4vyqujvr7igrjunekxn5dmfqezjd2rgrdji5xpdai": { + "bciqe5yq32fnpsioc326yny37wl3jszc6sgq3imo6544ez7iswpu3yny": { "provides": [ + { + "ty": "posix.envVar", + "key": "RUST_LOG", + "val": "trace,deno=info,denort=trace,swc_ecma_transforms_base=info" + }, { "ty": "ghjk.ports.InstallSetRef", "setId": "ghjkEnvProvInstSet____rust" } ] }, - "bciqjiedgfyl4fsp2j64t4xk2sc7ei4eri75x2wqyczcmnthtgupg33a": { + "bciqe26fbb6fkfbou7w3rf3t2uqw4neldvakclhcecktgtpcuxwnzgja": { "provides": [ + { + "ty": "posix.envVar", + "key": "RUST_LOG", + "val": "trace,deno=info,denort=trace,swc_ecma_transforms_base=info" + }, { "ty": "posix.envVar", "key": "RUSTY_V8_MIRROR", @@ -368,9 +388,9 @@ }, "defaultEnv": "dev", "envsNamed": { - "main": "bciqfzekhtsrjd72noxifmici3ssck4jgvbjwhxwhhwtirzm7yomhxya", - "_rust": "bciqjzvefargmop4vyqujvr7igrjunekxn5dmfqezjd2rgrdji5xpdai", - "dev": "bciqjiedgfyl4fsp2j64t4xk2sc7ei4eri75x2wqyczcmnthtgupg33a" + "main": "bciqazjf2gsxfy4wt2gd6qsou4pciedy7dyhdch5l7gsmoq62boo2s7a", + "_rust": "bciqe5yq32fnpsioc326yny37wl3jszc6sgq3imo6544ez7iswpu3yny", + "dev": "bciqe26fbb6fkfbou7w3rf3t2uqw4neldvakclhcecktgtpcuxwnzgja" } } } diff --git a/Cargo.lock b/Cargo.lock index e54e388..421172d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -309,9 +309,9 @@ dependencies = [ [[package]] name = "async-trait" -version = "0.1.80" +version = "0.1.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6fa2087f2753a7da8cc1c0dbfcf89579dd57458e36769de5ac750b4671737ca" +checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd" dependencies = [ "proc-macro2", "quote", @@ -3419,6 +3419,8 @@ name = "ghjk" version = "0.3.0" dependencies = [ "ahash", + "anyhow", + "async-trait", "bitflags 2.6.0", "clap", "color-eyre", diff --git a/Cargo.toml b/Cargo.toml index 4464176..2372000 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,5 +1,6 @@ [workspace] members = ["src/*"] +exclude = ["src/deno_systems"] resolver = "2" [workspace.package] diff --git a/deno.lock b/deno.lock index 77a1293..6d7581e 100644 --- a/deno.lock +++ b/deno.lock @@ -1,381 +1,347 @@ { - "version": "3", - "packages": { - "specifiers": { - "jsr:@david/dax@0.40.1": "jsr:@david/dax@0.40.1", - "jsr:@david/dax@0.41.0": "jsr:@david/dax@0.41.0", - "jsr:@david/which@0.3": "jsr:@david/which@0.3.0", - "jsr:@david/which@^0.4.1": "jsr:@david/which@0.4.1", - "jsr:@ghjk/dax@0.40.2-alpha-ghjk": "jsr:@ghjk/dax@0.40.2-alpha-ghjk", - "jsr:@std/assert@^0.221.0": "jsr:@std/assert@0.221.0", - "jsr:@std/bytes@^0.221.0": "jsr:@std/bytes@0.221.0", - "jsr:@std/fmt@^0.221.0": "jsr:@std/fmt@0.221.0", - "jsr:@std/fs@0.221.0": "jsr:@std/fs@0.221.0", - "jsr:@std/io@0.221.0": "jsr:@std/io@0.221.0", - "jsr:@std/io@^0.221.0": "jsr:@std/io@0.221.0", - "jsr:@std/path@0.221.0": "jsr:@std/path@0.221.0", - "jsr:@std/path@^0.221.0": "jsr:@std/path@0.221.0", - "jsr:@std/streams@0.221.0": "jsr:@std/streams@0.221.0", - "npm:@livekit/rtc-node@0.11.1": "npm:@livekit/rtc-node@0.11.1", - "npm:@noble/hashes@1.4.0": "npm:@noble/hashes@1.4.0", - "npm:@types/node": "npm:@types/node@18.16.19", - "npm:livekit-server-sdk@2.7.2": "npm:livekit-server-sdk@2.7.2", - "npm:lodash": "npm:lodash@4.17.21", - "npm:mathjs@11.11.1": "npm:mathjs@11.11.1", - "npm:multiformats@13.1.0": "npm:multiformats@13.1.0", - "npm:pg": "npm:pg@8.12.0", - "npm:validator": "npm:validator@13.12.0", - "npm:zod-validation-error": "npm:zod-validation-error@3.1.0_zod@3.23.3", - "npm:zod-validation-error@3.2.0": "npm:zod-validation-error@3.2.0_zod@3.23.3", - "npm:zod-validation-error@3.3.0": "npm:zod-validation-error@3.3.0_zod@3.23.3", - "npm:zod@3.23.8": "npm:zod@3.23.8" - }, - "jsr": { - "@david/dax@0.40.1": { - "integrity": "0c71d32a0484d3904f586417995f8ec26d45144f0eba95d3e5bb03b640b6df59", - "dependencies": [ - "jsr:@david/which@0.3", - "jsr:@std/fmt@^0.221.0", - "jsr:@std/fs@0.221.0", - "jsr:@std/io@0.221.0", - "jsr:@std/path@0.221.0", - "jsr:@std/streams@0.221.0" - ] - }, - "@david/dax@0.41.0": { - "integrity": "9e1ecf66a0415962cc8ad3ba4e3fa93ce0f1a1cc797dd95c36fdfb6977dc7fc8", - "dependencies": [ - "jsr:@david/which@^0.4.1", - "jsr:@std/fmt@^0.221.0", - "jsr:@std/fs@0.221.0", - "jsr:@std/io@0.221.0", - "jsr:@std/path@0.221.0", - "jsr:@std/streams@0.221.0" - ] - }, - "@david/which@0.3.0": { - "integrity": "6bdb62c40ac90edcf328e854fa8103a8db21e7c326089cbe3c3a1cf7887d3204" - }, - "@david/which@0.4.1": { - "integrity": "896a682b111f92ab866cc70c5b4afab2f5899d2f9bde31ed00203b9c250f225e" - }, - "@ghjk/dax@0.40.2-alpha-ghjk": { - "integrity": "87bc93e9947779cb2f3922fe277e21ea8c716de804b2627f80ba9e7bc3d0d019", - "dependencies": [ - "jsr:@david/which@0.3", - "jsr:@std/fmt@^0.221.0", - "jsr:@std/fs@0.221.0", - "jsr:@std/io@0.221.0", - "jsr:@std/path@0.221.0", - "jsr:@std/streams@0.221.0" - ] - }, - "@std/assert@0.221.0": { - "integrity": "a5f1aa6e7909dbea271754fd4ab3f4e687aeff4873b4cef9a320af813adb489a" - }, - "@std/bytes@0.221.0": { - "integrity": "64a047011cf833890a4a2ab7293ac55a1b4f5a050624ebc6a0159c357de91966" - }, - "@std/fmt@0.221.0": { - "integrity": "379fed69bdd9731110f26b9085aeb740606b20428ce6af31ef6bd45ef8efa62a" - }, - "@std/fs@0.221.0": { - "integrity": "028044450299de8ed5a716ade4e6d524399f035513b85913794f4e81f07da286", - "dependencies": [ - "jsr:@std/assert@^0.221.0", - "jsr:@std/path@^0.221.0" - ] - }, - "@std/io@0.221.0": { - "integrity": "faf7f8700d46ab527fa05cc6167f4b97701a06c413024431c6b4d207caa010da", - "dependencies": [ - "jsr:@std/assert@^0.221.0", - "jsr:@std/bytes@^0.221.0" - ] - }, - "@std/path@0.221.0": { - "integrity": "0a36f6b17314ef653a3a1649740cc8db51b25a133ecfe838f20b79a56ebe0095", - "dependencies": [ - "jsr:@std/assert@^0.221.0" - ] - }, - "@std/streams@0.221.0": { - "integrity": "47f2f74634b47449277c0ee79fe878da4424b66bd8975c032e3afdca88986e61", - "dependencies": [ - "jsr:@std/io@^0.221.0" - ] - } - }, - "npm": { - "@babel/runtime@7.24.7": { - "integrity": "sha512-UwgBRMjJP+xv857DCngvqXI3Iq6J4v0wXmwc6sapg+zyhbwmQX67LUEFrkK5tbyJ30jGuG3ZvWpBiB9LCy1kWw==", - "dependencies": { - "regenerator-runtime": "regenerator-runtime@0.14.1" - } - }, - "@bufbuild/protobuf@1.10.0": { - "integrity": "sha512-QDdVFLoN93Zjg36NoQPZfsVH9tZew7wKDKyV5qRdj8ntT4wQCOradQjRaTdwMhWUYsgKsvCINKKm87FdEk96Ag==", - "dependencies": {} - }, - "@bufbuild/protobuf@2.2.2": { - "integrity": "sha512-UNtPCbrwrenpmrXuRwn9jYpPoweNXj8X5sMvYgsqYyaH8jQ6LfUJSk3dJLnBK+6sfYPrF4iAIo5sd5HQ+tg75A==", - "dependencies": {} - }, - "@livekit/mutex@1.1.0": { - "integrity": "sha512-XRLG+z/0uoyDioupjUiskjI06Y51U/IXVPJn7qJ+R3J75XX01irYVBM9MpxeJahpVoe9QhU4moIEolX+HO9U9g==", - "dependencies": {} - }, - "@livekit/protocol@1.27.1": { - "integrity": "sha512-ISEp7uWdV82mtCR1eyHFTzdRZTVbe2+ZztjmjiMPzR/KPrI1Ma/u5kLh87NNuY3Rn8wv1VlEvGHHsFjQ+dKVUw==", - "dependencies": { - "@bufbuild/protobuf": "@bufbuild/protobuf@1.10.0" - } - }, - "@livekit/rtc-node-darwin-arm64@0.11.1": { - "integrity": "sha512-M+Ui87H06ae19GGI7r937dS6hI84MBBTQAkkNlL7qd+pvdCAk25u0FYa8r4SOElKJ0VR3AbzeDoXTihLgpvjMg==", - "dependencies": {} - }, - "@livekit/rtc-node-darwin-x64@0.11.1": { - "integrity": "sha512-7G92fyuK2p+jdTH2cUJTNeAmtknTGsXuy0xbI727V7VzQvHFDXULCExRlgwn4t9TxvNlIjUpiltiQ6RCSai6zw==", - "dependencies": {} - }, - "@livekit/rtc-node-linux-arm64-gnu@0.11.1": { - "integrity": "sha512-vqZN9+87Pvxit7auYVW69M+GvUPnf+EwipIJ92GgCJA3Ir1Tcceu5ud5/Ic+0FzSoV0cotVVlQNm74F0tQvyCg==", - "dependencies": {} - }, - "@livekit/rtc-node-linux-x64-gnu@0.11.1": { - "integrity": "sha512-smHZUMfgILQh6/eoauYNe/VlKwQCp4/4jWxiIADHY+mtDtVSvQ9zB6y4GP8FrpohRwFWesKCUpvPBypU0Icrng==", - "dependencies": {} - }, - "@livekit/rtc-node-win32-x64-msvc@0.11.1": { - "integrity": "sha512-bTWVtb+UiRPFjiuhrqq40gt5vs5mMPTa1e+kd2jGQPTOlKZPLArQ0WgFcep2TAy1zmcpOgfeM1XRPVFhZl7G1A==", - "dependencies": {} - }, - "@livekit/rtc-node@0.11.1": { - "integrity": "sha512-EFw+giPll12fcXATZpN2zKkE3umYJAdHvfjW+Yu0aBjwfxbUBXu8rz6le2CzDNvGmRwR888DSZXFZfYikwZgiw==", - "dependencies": { - "@bufbuild/protobuf": "@bufbuild/protobuf@2.2.2", - "@livekit/mutex": "@livekit/mutex@1.1.0", - "@livekit/rtc-node-darwin-arm64": "@livekit/rtc-node-darwin-arm64@0.11.1", - "@livekit/rtc-node-darwin-x64": "@livekit/rtc-node-darwin-x64@0.11.1", - "@livekit/rtc-node-linux-arm64-gnu": "@livekit/rtc-node-linux-arm64-gnu@0.11.1", - "@livekit/rtc-node-linux-x64-gnu": "@livekit/rtc-node-linux-x64-gnu@0.11.1", - "@livekit/rtc-node-win32-x64-msvc": "@livekit/rtc-node-win32-x64-msvc@0.11.1", - "@livekit/typed-emitter": "@livekit/typed-emitter@3.0.0" - } - }, - "@livekit/typed-emitter@3.0.0": { - "integrity": "sha512-9bl0k4MgBPZu3Qu3R3xy12rmbW17e3bE9yf4YY85gJIQ3ezLEj/uzpKHWBsLaDoL5Mozz8QCgggwIBudYQWeQg==", - "dependencies": {} - }, - "@noble/hashes@1.4.0": { - "integrity": "sha512-V1JJ1WTRUqHHrOSh597hURcMqVKVGL/ea3kv0gSnEdsEZ0/+VyPghM1lMNGc00z7CIQorSvbKpuJkxvuHbvdbg==", - "dependencies": {} - }, - "@types/node@18.16.19": { - "integrity": "sha512-IXl7o+R9iti9eBW4Wg2hx1xQDig183jj7YLn8F7udNceyfkbn1ZxmzZXuak20gR40D7pIkIY1kYGx5VIGbaHKA==", - "dependencies": {} - }, - "camelcase-keys@9.1.3": { - "integrity": "sha512-Rircqi9ch8AnZscQcsA1C47NFdaO3wukpmIRzYcDOrmvgt78hM/sj5pZhZNec2NM12uk5vTwRHZ4anGcrC4ZTg==", - "dependencies": { - "camelcase": "camelcase@8.0.0", - "map-obj": "map-obj@5.0.0", - "quick-lru": "quick-lru@6.1.2", - "type-fest": "type-fest@4.26.1" - } - }, - "camelcase@8.0.0": { - "integrity": "sha512-8WB3Jcas3swSvjIeA2yvCJ+Miyz5l1ZmB6HFb9R1317dt9LCQoswg/BGrmAmkWVEszSrrg4RwmO46qIm2OEnSA==", - "dependencies": {} - }, - "complex.js@2.1.1": { - "integrity": "sha512-8njCHOTtFFLtegk6zQo0kkVX1rngygb/KQI6z1qZxlFI3scluC+LVTCFbrkWjBv4vvLlbQ9t88IPMC6k95VTTg==", - "dependencies": {} - }, - "decimal.js@10.4.3": { - "integrity": "sha512-VBBaLc1MgL5XpzgIP7ny5Z6Nx3UrRkIViUkPUdtl9aya5amy3De1gsUUSB1g3+3sExYNjCAsAznmukyxCb1GRA==", - "dependencies": {} - }, - "escape-latex@1.2.0": { - "integrity": "sha512-nV5aVWW1K0wEiUIEdZ4erkGGH8mDxGyxSeqPzRNtWP7ataw+/olFObw7hujFWlVjNsaDFw5VZ5NzVSIqRgfTiw==", - "dependencies": {} - }, - "fraction.js@4.3.4": { - "integrity": "sha512-pwiTgt0Q7t+GHZA4yaLjObx4vXmmdcS0iSJ19o8d/goUGgItX9UZWKWNnLHehxviD8wU2IWRsnR8cD5+yOJP2Q==", - "dependencies": {} - }, - "javascript-natural-sort@0.7.1": { - "integrity": "sha512-nO6jcEfZWQXDhOiBtG2KvKyEptz7RVbpGP4vTD2hLBdmNQSsCiicO2Ioinv6UI4y9ukqnBpy+XZ9H6uLNgJTlw==", - "dependencies": {} - }, - "jose@5.9.6": { - "integrity": "sha512-AMlnetc9+CV9asI19zHmrgS/WYsWUwCn2R7RzlbJWD7F9eWYUTGyBmU9o6PxngtLGOiDGPRu+Uc4fhKzbpteZQ==", - "dependencies": {} - }, - "livekit-server-sdk@2.7.2": { - "integrity": "sha512-qDNRXeo+WMnY5nKSug7KHJ9er9JIuKi+r7H9ZaSBbmbaOt62i0b4BrHBMFSMr8pAuWzuSxihCFa29q5QvFc5fw==", - "dependencies": { - "@livekit/protocol": "@livekit/protocol@1.27.1", - "camelcase-keys": "camelcase-keys@9.1.3", - "jose": "jose@5.9.6" - } - }, - "lodash@4.17.21": { - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dependencies": {} - }, - "map-obj@5.0.0": { - "integrity": "sha512-2L3MIgJynYrZ3TYMriLDLWocz15okFakV6J12HXvMXDHui2x/zgChzg1u9mFFGbbGWE+GsLpQByt4POb9Or+uA==", - "dependencies": {} - }, - "mathjs@11.11.1": { - "integrity": "sha512-uWrwMrhU31TCqHKmm1yFz0C352njGUVr/I1UnpMOxI/VBTTbCktx/mREUXx5Vyg11xrFdg/F3wnMM7Ql/csVsQ==", - "dependencies": { - "@babel/runtime": "@babel/runtime@7.24.7", - "complex.js": "complex.js@2.1.1", - "decimal.js": "decimal.js@10.4.3", - "escape-latex": "escape-latex@1.2.0", - "fraction.js": "fraction.js@4.3.4", - "javascript-natural-sort": "javascript-natural-sort@0.7.1", - "seedrandom": "seedrandom@3.0.5", - "tiny-emitter": "tiny-emitter@2.1.0", - "typed-function": "typed-function@4.2.1" - } - }, - "multiformats@13.1.0": { - "integrity": "sha512-HzdtdBwxsIkzpeXzhQ5mAhhuxcHbjEHH+JQoxt7hG/2HGFjjwyolLo7hbaexcnhoEuV4e0TNJ8kkpMjiEYY4VQ==", - "dependencies": {} - }, - "pg-cloudflare@1.1.1": { - "integrity": "sha512-xWPagP/4B6BgFO+EKz3JONXv3YDgvkbVrGw2mTo3D6tVDQRh1e7cqVGvyR3BE+eQgAvx1XhW/iEASj4/jCWl3Q==", - "dependencies": {} - }, - "pg-connection-string@2.6.4": { - "integrity": "sha512-v+Z7W/0EO707aNMaAEfiGnGL9sxxumwLl2fJvCQtMn9Fxsg+lPpPkdcyBSv/KFgpGdYkMfn+EI1Or2EHjpgLCA==", - "dependencies": {} - }, - "pg-int8@1.0.1": { - "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==", - "dependencies": {} - }, - "pg-pool@3.6.2_pg@8.12.0": { - "integrity": "sha512-Htjbg8BlwXqSBQ9V8Vjtc+vzf/6fVUuak/3/XXKA9oxZprwW3IMDQTGHP+KDmVL7rtd+R1QjbnCFPuTHm3G4hg==", - "dependencies": { - "pg": "pg@8.12.0" - } - }, - "pg-protocol@1.6.1": { - "integrity": "sha512-jPIlvgoD63hrEuihvIg+tJhoGjUsLPn6poJY9N5CnlPd91c2T18T/9zBtLxZSb1EhYxBRoZJtzScCaWlYLtktg==", - "dependencies": {} - }, - "pg-types@2.2.0": { - "integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==", - "dependencies": { - "pg-int8": "pg-int8@1.0.1", - "postgres-array": "postgres-array@2.0.0", - "postgres-bytea": "postgres-bytea@1.0.0", - "postgres-date": "postgres-date@1.0.7", - "postgres-interval": "postgres-interval@1.2.0" - } - }, - "pg@8.12.0": { - "integrity": "sha512-A+LHUSnwnxrnL/tZ+OLfqR1SxLN3c/pgDztZ47Rpbsd4jUytsTtwQo/TLPRzPJMp/1pbhYVhH9cuSZLAajNfjQ==", - "dependencies": { - "pg-cloudflare": "pg-cloudflare@1.1.1", - "pg-connection-string": "pg-connection-string@2.6.4", - "pg-pool": "pg-pool@3.6.2_pg@8.12.0", - "pg-protocol": "pg-protocol@1.6.1", - "pg-types": "pg-types@2.2.0", - "pgpass": "pgpass@1.0.5" - } - }, - "pgpass@1.0.5": { - "integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==", - "dependencies": { - "split2": "split2@4.2.0" - } - }, - "postgres-array@2.0.0": { - "integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==", - "dependencies": {} - }, - "postgres-bytea@1.0.0": { - "integrity": "sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==", - "dependencies": {} - }, - "postgres-date@1.0.7": { - "integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==", - "dependencies": {} - }, - "postgres-interval@1.2.0": { - "integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==", - "dependencies": { - "xtend": "xtend@4.0.2" - } - }, - "quick-lru@6.1.2": { - "integrity": "sha512-AAFUA5O1d83pIHEhJwWCq/RQcRukCkn/NSm2QsTEMle5f2hP0ChI2+3Xb051PZCkLryI/Ir1MVKviT2FIloaTQ==", - "dependencies": {} - }, - "regenerator-runtime@0.14.1": { - "integrity": "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==", - "dependencies": {} - }, - "seedrandom@3.0.5": { - "integrity": "sha512-8OwmbklUNzwezjGInmZ+2clQmExQPvomqjL7LFqOYqtmuxRgQYqOD3mHaU+MvZn5FLUeVxVfQjwLZW/n/JFuqg==", - "dependencies": {} - }, - "split2@4.2.0": { - "integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==", - "dependencies": {} - }, - "tiny-emitter@2.1.0": { - "integrity": "sha512-NB6Dk1A9xgQPMoGqC5CVXn123gWyte215ONT5Pp5a0yt4nlEoO1ZWeCwpncaekPHXO60i47ihFnZPiRPjRMq4Q==", - "dependencies": {} - }, - "type-fest@4.26.1": { - "integrity": "sha512-yOGpmOAL7CkKe/91I5O3gPICmJNLJ1G4zFYVAsRHg7M64biSnPtRj0WNQt++bRkjYOqjWXrhnUw1utzmVErAdg==", - "dependencies": {} - }, - "typed-function@4.2.1": { - "integrity": "sha512-EGjWssW7Tsk4DGfE+5yluuljS1OGYWiI1J6e8puZz9nTMM51Oug8CD5Zo4gWMsOhq5BI+1bF+rWTm4Vbj3ivRA==", - "dependencies": {} - }, - "validator@13.12.0": { - "integrity": "sha512-c1Q0mCiPlgdTVVVIJIrBuxNicYE+t/7oKeI9MWLj3fh/uq2Pxh/3eeWbVZ4OcGW1TUf53At0njHw5SMdA3tmMg==", - "dependencies": {} - }, - "xtend@4.0.2": { - "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", - "dependencies": {} - }, - "zod-validation-error@3.1.0_zod@3.23.3": { - "integrity": "sha512-zujS6HqJjMZCsvjfbnRs7WI3PXN39ovTcY1n8a+KTm4kOH0ZXYsNiJkH1odZf4xZKMkBDL7M2rmQ913FCS1p9w==", - "dependencies": { - "zod": "zod@3.23.3" - } - }, - "zod-validation-error@3.2.0_zod@3.23.3": { - "integrity": "sha512-cYlPR6zuyrgmu2wRTdumEAJGuwI7eHVHGT+VyneAQxmRAKtGRL1/7pjz4wfLhz4J05f5qoSZc3rGacswgyTjjw==", - "dependencies": { - "zod": "zod@3.23.3" - } - }, - "zod-validation-error@3.3.0_zod@3.23.3": { - "integrity": "sha512-Syib9oumw1NTqEv4LT0e6U83Td9aVRk9iTXPUQr1otyV1PuXQKOvOwhMNqZIq5hluzHP2pMgnOmHEo7kPdI2mw==", - "dependencies": { - "zod": "zod@3.23.3" - } - }, - "zod@3.23.3": { - "integrity": "sha512-tPvq1B/2Yu/dh2uAIH2/BhUlUeLIUvAjr6dpL/75I0pCYefHgjhXk1o1Kob3kTU8C7yU1j396jFHlsVWFi9ogg==", - "dependencies": {} - }, - "zod@3.23.8": { - "integrity": "sha512-XBx9AXhXktjUqnepgTiE5flcKIYWi/rme0Eaj+5Y0lftuGBq+jyRu/md4WnuxqgP1ubdpNCsYEYPxrzVHD8d6g==", - "dependencies": {} - } + "version": "4", + "specifiers": { + "jsr:@david/dax@0.40.1": "0.40.1", + "jsr:@david/dax@0.41.0": "0.41.0", + "jsr:@david/which@0.3": "0.3.0", + "jsr:@david/which@~0.4.1": "0.4.1", + "jsr:@ghjk/dax@0.40.2-alpha-ghjk": "0.40.2-alpha-ghjk", + "jsr:@std/assert@0.221": "0.221.0", + "jsr:@std/bytes@0.221": "0.221.0", + "jsr:@std/fmt@0.221": "0.221.0", + "jsr:@std/fs@0.221.0": "0.221.0", + "jsr:@std/io@0.221": "0.221.0", + "jsr:@std/io@0.221.0": "0.221.0", + "jsr:@std/path@0.221": "0.221.0", + "jsr:@std/path@0.221.0": "0.221.0", + "jsr:@std/streams@0.221.0": "0.221.0", + "npm:@livekit/rtc-node@0.11.1": "0.11.1", + "npm:@noble/hashes@1.4.0": "1.4.0", + "npm:@types/node@*": "18.16.19", + "npm:livekit-server-sdk@2.7.2": "2.7.2", + "npm:lodash@*": "4.17.21", + "npm:mathjs@11.11.1": "11.11.1", + "npm:multiformats@13.1.0": "13.1.0", + "npm:pg@*": "8.12.0", + "npm:validator@*": "13.12.0", + "npm:zod-validation-error@*": "3.1.0_zod@3.23.3", + "npm:zod-validation-error@3.2.0": "3.2.0_zod@3.23.3", + "npm:zod-validation-error@3.3.0": "3.3.0_zod@3.23.3", + "npm:zod-validation-error@3.4.0": "3.4.0_zod@3.23.8", + "npm:zod@3.23.8": "3.23.8" + }, + "jsr": { + "@david/dax@0.40.1": { + "integrity": "0c71d32a0484d3904f586417995f8ec26d45144f0eba95d3e5bb03b640b6df59", + "dependencies": [ + "jsr:@david/which@0.3", + "jsr:@std/fmt", + "jsr:@std/fs", + "jsr:@std/io@0.221.0", + "jsr:@std/path@0.221.0", + "jsr:@std/streams" + ] + }, + "@david/dax@0.41.0": { + "integrity": "9e1ecf66a0415962cc8ad3ba4e3fa93ce0f1a1cc797dd95c36fdfb6977dc7fc8", + "dependencies": [ + "jsr:@david/which@~0.4.1", + "jsr:@std/fmt", + "jsr:@std/fs", + "jsr:@std/io@0.221.0", + "jsr:@std/path@0.221.0", + "jsr:@std/streams" + ] + }, + "@david/which@0.3.0": { + "integrity": "6bdb62c40ac90edcf328e854fa8103a8db21e7c326089cbe3c3a1cf7887d3204" + }, + "@david/which@0.4.1": { + "integrity": "896a682b111f92ab866cc70c5b4afab2f5899d2f9bde31ed00203b9c250f225e" + }, + "@ghjk/dax@0.40.2-alpha-ghjk": { + "integrity": "87bc93e9947779cb2f3922fe277e21ea8c716de804b2627f80ba9e7bc3d0d019", + "dependencies": [ + "jsr:@david/which@0.3", + "jsr:@std/fmt", + "jsr:@std/fs", + "jsr:@std/io@0.221.0", + "jsr:@std/path@0.221.0", + "jsr:@std/streams" + ] + }, + "@std/assert@0.221.0": { + "integrity": "a5f1aa6e7909dbea271754fd4ab3f4e687aeff4873b4cef9a320af813adb489a" + }, + "@std/bytes@0.221.0": { + "integrity": "64a047011cf833890a4a2ab7293ac55a1b4f5a050624ebc6a0159c357de91966" + }, + "@std/fmt@0.221.0": { + "integrity": "379fed69bdd9731110f26b9085aeb740606b20428ce6af31ef6bd45ef8efa62a" + }, + "@std/fs@0.221.0": { + "integrity": "028044450299de8ed5a716ade4e6d524399f035513b85913794f4e81f07da286", + "dependencies": [ + "jsr:@std/assert", + "jsr:@std/path@0.221" + ] + }, + "@std/io@0.221.0": { + "integrity": "faf7f8700d46ab527fa05cc6167f4b97701a06c413024431c6b4d207caa010da", + "dependencies": [ + "jsr:@std/assert", + "jsr:@std/bytes" + ] + }, + "@std/path@0.221.0": { + "integrity": "0a36f6b17314ef653a3a1649740cc8db51b25a133ecfe838f20b79a56ebe0095", + "dependencies": [ + "jsr:@std/assert" + ] + }, + "@std/streams@0.221.0": { + "integrity": "47f2f74634b47449277c0ee79fe878da4424b66bd8975c032e3afdca88986e61", + "dependencies": [ + "jsr:@std/io@0.221" + ] + } + }, + "npm": { + "@babel/runtime@7.24.7": { + "integrity": "sha512-UwgBRMjJP+xv857DCngvqXI3Iq6J4v0wXmwc6sapg+zyhbwmQX67LUEFrkK5tbyJ30jGuG3ZvWpBiB9LCy1kWw==", + "dependencies": [ + "regenerator-runtime" + ] + }, + "@bufbuild/protobuf@1.10.0": { + "integrity": "sha512-QDdVFLoN93Zjg36NoQPZfsVH9tZew7wKDKyV5qRdj8ntT4wQCOradQjRaTdwMhWUYsgKsvCINKKm87FdEk96Ag==" + }, + "@bufbuild/protobuf@2.2.2": { + "integrity": "sha512-UNtPCbrwrenpmrXuRwn9jYpPoweNXj8X5sMvYgsqYyaH8jQ6LfUJSk3dJLnBK+6sfYPrF4iAIo5sd5HQ+tg75A==" + }, + "@livekit/mutex@1.1.0": { + "integrity": "sha512-XRLG+z/0uoyDioupjUiskjI06Y51U/IXVPJn7qJ+R3J75XX01irYVBM9MpxeJahpVoe9QhU4moIEolX+HO9U9g==" + }, + "@livekit/protocol@1.27.1": { + "integrity": "sha512-ISEp7uWdV82mtCR1eyHFTzdRZTVbe2+ZztjmjiMPzR/KPrI1Ma/u5kLh87NNuY3Rn8wv1VlEvGHHsFjQ+dKVUw==", + "dependencies": [ + "@bufbuild/protobuf@1.10.0" + ] + }, + "@livekit/rtc-node-darwin-arm64@0.11.1": { + "integrity": "sha512-M+Ui87H06ae19GGI7r937dS6hI84MBBTQAkkNlL7qd+pvdCAk25u0FYa8r4SOElKJ0VR3AbzeDoXTihLgpvjMg==" + }, + "@livekit/rtc-node-darwin-x64@0.11.1": { + "integrity": "sha512-7G92fyuK2p+jdTH2cUJTNeAmtknTGsXuy0xbI727V7VzQvHFDXULCExRlgwn4t9TxvNlIjUpiltiQ6RCSai6zw==" + }, + "@livekit/rtc-node-linux-arm64-gnu@0.11.1": { + "integrity": "sha512-vqZN9+87Pvxit7auYVW69M+GvUPnf+EwipIJ92GgCJA3Ir1Tcceu5ud5/Ic+0FzSoV0cotVVlQNm74F0tQvyCg==" + }, + "@livekit/rtc-node-linux-x64-gnu@0.11.1": { + "integrity": "sha512-smHZUMfgILQh6/eoauYNe/VlKwQCp4/4jWxiIADHY+mtDtVSvQ9zB6y4GP8FrpohRwFWesKCUpvPBypU0Icrng==" + }, + "@livekit/rtc-node-win32-x64-msvc@0.11.1": { + "integrity": "sha512-bTWVtb+UiRPFjiuhrqq40gt5vs5mMPTa1e+kd2jGQPTOlKZPLArQ0WgFcep2TAy1zmcpOgfeM1XRPVFhZl7G1A==" + }, + "@livekit/rtc-node@0.11.1": { + "integrity": "sha512-EFw+giPll12fcXATZpN2zKkE3umYJAdHvfjW+Yu0aBjwfxbUBXu8rz6le2CzDNvGmRwR888DSZXFZfYikwZgiw==", + "dependencies": [ + "@bufbuild/protobuf@2.2.2", + "@livekit/mutex", + "@livekit/rtc-node-darwin-arm64", + "@livekit/rtc-node-darwin-x64", + "@livekit/rtc-node-linux-arm64-gnu", + "@livekit/rtc-node-linux-x64-gnu", + "@livekit/rtc-node-win32-x64-msvc", + "@livekit/typed-emitter" + ] + }, + "@livekit/typed-emitter@3.0.0": { + "integrity": "sha512-9bl0k4MgBPZu3Qu3R3xy12rmbW17e3bE9yf4YY85gJIQ3ezLEj/uzpKHWBsLaDoL5Mozz8QCgggwIBudYQWeQg==" + }, + "@noble/hashes@1.4.0": { + "integrity": "sha512-V1JJ1WTRUqHHrOSh597hURcMqVKVGL/ea3kv0gSnEdsEZ0/+VyPghM1lMNGc00z7CIQorSvbKpuJkxvuHbvdbg==" + }, + "@types/node@18.16.19": { + "integrity": "sha512-IXl7o+R9iti9eBW4Wg2hx1xQDig183jj7YLn8F7udNceyfkbn1ZxmzZXuak20gR40D7pIkIY1kYGx5VIGbaHKA==" + }, + "camelcase-keys@9.1.3": { + "integrity": "sha512-Rircqi9ch8AnZscQcsA1C47NFdaO3wukpmIRzYcDOrmvgt78hM/sj5pZhZNec2NM12uk5vTwRHZ4anGcrC4ZTg==", + "dependencies": [ + "camelcase", + "map-obj", + "quick-lru", + "type-fest" + ] + }, + "camelcase@8.0.0": { + "integrity": "sha512-8WB3Jcas3swSvjIeA2yvCJ+Miyz5l1ZmB6HFb9R1317dt9LCQoswg/BGrmAmkWVEszSrrg4RwmO46qIm2OEnSA==" + }, + "complex.js@2.1.1": { + "integrity": "sha512-8njCHOTtFFLtegk6zQo0kkVX1rngygb/KQI6z1qZxlFI3scluC+LVTCFbrkWjBv4vvLlbQ9t88IPMC6k95VTTg==" + }, + "decimal.js@10.4.3": { + "integrity": "sha512-VBBaLc1MgL5XpzgIP7ny5Z6Nx3UrRkIViUkPUdtl9aya5amy3De1gsUUSB1g3+3sExYNjCAsAznmukyxCb1GRA==" + }, + "escape-latex@1.2.0": { + "integrity": "sha512-nV5aVWW1K0wEiUIEdZ4erkGGH8mDxGyxSeqPzRNtWP7ataw+/olFObw7hujFWlVjNsaDFw5VZ5NzVSIqRgfTiw==" + }, + "fraction.js@4.3.4": { + "integrity": "sha512-pwiTgt0Q7t+GHZA4yaLjObx4vXmmdcS0iSJ19o8d/goUGgItX9UZWKWNnLHehxviD8wU2IWRsnR8cD5+yOJP2Q==" + }, + "javascript-natural-sort@0.7.1": { + "integrity": "sha512-nO6jcEfZWQXDhOiBtG2KvKyEptz7RVbpGP4vTD2hLBdmNQSsCiicO2Ioinv6UI4y9ukqnBpy+XZ9H6uLNgJTlw==" + }, + "jose@5.9.6": { + "integrity": "sha512-AMlnetc9+CV9asI19zHmrgS/WYsWUwCn2R7RzlbJWD7F9eWYUTGyBmU9o6PxngtLGOiDGPRu+Uc4fhKzbpteZQ==" + }, + "livekit-server-sdk@2.7.2": { + "integrity": "sha512-qDNRXeo+WMnY5nKSug7KHJ9er9JIuKi+r7H9ZaSBbmbaOt62i0b4BrHBMFSMr8pAuWzuSxihCFa29q5QvFc5fw==", + "dependencies": [ + "@livekit/protocol", + "camelcase-keys", + "jose" + ] + }, + "lodash@4.17.21": { + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" + }, + "map-obj@5.0.0": { + "integrity": "sha512-2L3MIgJynYrZ3TYMriLDLWocz15okFakV6J12HXvMXDHui2x/zgChzg1u9mFFGbbGWE+GsLpQByt4POb9Or+uA==" + }, + "mathjs@11.11.1": { + "integrity": "sha512-uWrwMrhU31TCqHKmm1yFz0C352njGUVr/I1UnpMOxI/VBTTbCktx/mREUXx5Vyg11xrFdg/F3wnMM7Ql/csVsQ==", + "dependencies": [ + "@babel/runtime", + "complex.js", + "decimal.js", + "escape-latex", + "fraction.js", + "javascript-natural-sort", + "seedrandom", + "tiny-emitter", + "typed-function" + ] + }, + "multiformats@13.1.0": { + "integrity": "sha512-HzdtdBwxsIkzpeXzhQ5mAhhuxcHbjEHH+JQoxt7hG/2HGFjjwyolLo7hbaexcnhoEuV4e0TNJ8kkpMjiEYY4VQ==" + }, + "pg-cloudflare@1.1.1": { + "integrity": "sha512-xWPagP/4B6BgFO+EKz3JONXv3YDgvkbVrGw2mTo3D6tVDQRh1e7cqVGvyR3BE+eQgAvx1XhW/iEASj4/jCWl3Q==" + }, + "pg-connection-string@2.6.4": { + "integrity": "sha512-v+Z7W/0EO707aNMaAEfiGnGL9sxxumwLl2fJvCQtMn9Fxsg+lPpPkdcyBSv/KFgpGdYkMfn+EI1Or2EHjpgLCA==" + }, + "pg-int8@1.0.1": { + "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==" + }, + "pg-pool@3.6.2_pg@8.12.0": { + "integrity": "sha512-Htjbg8BlwXqSBQ9V8Vjtc+vzf/6fVUuak/3/XXKA9oxZprwW3IMDQTGHP+KDmVL7rtd+R1QjbnCFPuTHm3G4hg==", + "dependencies": [ + "pg" + ] + }, + "pg-protocol@1.6.1": { + "integrity": "sha512-jPIlvgoD63hrEuihvIg+tJhoGjUsLPn6poJY9N5CnlPd91c2T18T/9zBtLxZSb1EhYxBRoZJtzScCaWlYLtktg==" + }, + "pg-types@2.2.0": { + "integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==", + "dependencies": [ + "pg-int8", + "postgres-array", + "postgres-bytea", + "postgres-date", + "postgres-interval" + ] + }, + "pg@8.12.0": { + "integrity": "sha512-A+LHUSnwnxrnL/tZ+OLfqR1SxLN3c/pgDztZ47Rpbsd4jUytsTtwQo/TLPRzPJMp/1pbhYVhH9cuSZLAajNfjQ==", + "dependencies": [ + "pg-cloudflare", + "pg-connection-string", + "pg-pool", + "pg-protocol", + "pg-types", + "pgpass" + ] + }, + "pgpass@1.0.5": { + "integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==", + "dependencies": [ + "split2" + ] + }, + "postgres-array@2.0.0": { + "integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==" + }, + "postgres-bytea@1.0.0": { + "integrity": "sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==" + }, + "postgres-date@1.0.7": { + "integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==" + }, + "postgres-interval@1.2.0": { + "integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==", + "dependencies": [ + "xtend" + ] + }, + "quick-lru@6.1.2": { + "integrity": "sha512-AAFUA5O1d83pIHEhJwWCq/RQcRukCkn/NSm2QsTEMle5f2hP0ChI2+3Xb051PZCkLryI/Ir1MVKviT2FIloaTQ==" + }, + "regenerator-runtime@0.14.1": { + "integrity": "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==" + }, + "seedrandom@3.0.5": { + "integrity": "sha512-8OwmbklUNzwezjGInmZ+2clQmExQPvomqjL7LFqOYqtmuxRgQYqOD3mHaU+MvZn5FLUeVxVfQjwLZW/n/JFuqg==" + }, + "split2@4.2.0": { + "integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==" + }, + "tiny-emitter@2.1.0": { + "integrity": "sha512-NB6Dk1A9xgQPMoGqC5CVXn123gWyte215ONT5Pp5a0yt4nlEoO1ZWeCwpncaekPHXO60i47ihFnZPiRPjRMq4Q==" + }, + "type-fest@4.26.1": { + "integrity": "sha512-yOGpmOAL7CkKe/91I5O3gPICmJNLJ1G4zFYVAsRHg7M64biSnPtRj0WNQt++bRkjYOqjWXrhnUw1utzmVErAdg==" + }, + "typed-function@4.2.1": { + "integrity": "sha512-EGjWssW7Tsk4DGfE+5yluuljS1OGYWiI1J6e8puZz9nTMM51Oug8CD5Zo4gWMsOhq5BI+1bF+rWTm4Vbj3ivRA==" + }, + "validator@13.12.0": { + "integrity": "sha512-c1Q0mCiPlgdTVVVIJIrBuxNicYE+t/7oKeI9MWLj3fh/uq2Pxh/3eeWbVZ4OcGW1TUf53At0njHw5SMdA3tmMg==" + }, + "xtend@4.0.2": { + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==" + }, + "zod-validation-error@3.1.0_zod@3.23.3": { + "integrity": "sha512-zujS6HqJjMZCsvjfbnRs7WI3PXN39ovTcY1n8a+KTm4kOH0ZXYsNiJkH1odZf4xZKMkBDL7M2rmQ913FCS1p9w==", + "dependencies": [ + "zod@3.23.3" + ] + }, + "zod-validation-error@3.2.0_zod@3.23.3": { + "integrity": "sha512-cYlPR6zuyrgmu2wRTdumEAJGuwI7eHVHGT+VyneAQxmRAKtGRL1/7pjz4wfLhz4J05f5qoSZc3rGacswgyTjjw==", + "dependencies": [ + "zod@3.23.3" + ] + }, + "zod-validation-error@3.3.0_zod@3.23.3": { + "integrity": "sha512-Syib9oumw1NTqEv4LT0e6U83Td9aVRk9iTXPUQr1otyV1PuXQKOvOwhMNqZIq5hluzHP2pMgnOmHEo7kPdI2mw==", + "dependencies": [ + "zod@3.23.3" + ] + }, + "zod-validation-error@3.4.0_zod@3.23.8": { + "integrity": "sha512-ZOPR9SVY6Pb2qqO5XHt+MkkTRxGXb4EVtnjc9JpXUOtUB1T9Ru7mZOT361AN3MsetVe7R0a1KZshJDZdgp9miQ==", + "dependencies": [ + "zod@3.23.8" + ] + }, + "zod@3.23.3": { + "integrity": "sha512-tPvq1B/2Yu/dh2uAIH2/BhUlUeLIUvAjr6dpL/75I0pCYefHgjhXk1o1Kob3kTU8C7yU1j396jFHlsVWFi9ogg==" + }, + "zod@3.23.8": { + "integrity": "sha512-XBx9AXhXktjUqnepgTiE5flcKIYWi/rme0Eaj+5Y0lftuGBq+jyRu/md4WnuxqgP1ubdpNCsYEYPxrzVHD8d6g==" } }, "remote": { diff --git a/deps/common.ts b/deps/common.ts index d81d085..8843e4b 100644 --- a/deps/common.ts +++ b/deps/common.ts @@ -10,7 +10,7 @@ export * as std_fmt_colors from "https://deno.land/std@0.213.0/fmt/colors.ts"; export * as std_url from "https://deno.land/std@0.213.0/url/mod.ts"; export * as std_path from "https://deno.land/std@0.213.0/path/mod.ts"; export * as std_fs from "https://deno.land/std@0.213.0/fs/mod.ts"; -export * as zod_val_err from "npm:zod-validation-error@3.3.0"; +export * as zod_val_err from "npm:zod-validation-error@3.4.0"; // avoid using the following directly and go through the // wrappers in ./utils/mod.ts diff --git a/files/deno/mod2.ts b/files/deno/bindings.ts similarity index 95% rename from files/deno/mod2.ts rename to files/deno/bindings.ts index ccb1da5..8bf47f1 100644 --- a/files/deno/mod2.ts +++ b/files/deno/bindings.ts @@ -9,6 +9,7 @@ // NOTE: only import types import { shimDenoNamespace } from "../../utils/worker.ts"; import { zod } from "../../deps/common.ts"; +import { Ghjk } from "../../src/ghjk/js/runtime.js"; const serializeArgs = zod.object({ uri: zod.string(), diff --git a/ghjk.ts b/ghjk.ts index 9cf3a48..3b0c45f 100644 --- a/ghjk.ts +++ b/ghjk.ts @@ -14,6 +14,10 @@ config({ allowedBuildDeps: [ports.cpy_bs({ version: "3.12.7" })], }); +env("main").vars({ + RUST_LOG: "trace,deno=info,denort=trace,swc_ecma_transforms_base=info", +}); + env("_rust") .install( ports.protoc(), diff --git a/host/mod.ts b/host/mod.ts index 811ed0d..651a9b8 100644 --- a/host/mod.ts +++ b/host/mod.ts @@ -268,9 +268,8 @@ async function commandsFromConfig(hcx: HostCtx, gcx: GhjkCtx) { `no lock entry found for module specified by lockfile config: ${man.id}`, ); } - const instance: ModuleBase = new mod.ctor(); + const instance: ModuleBase = new mod.ctor(gcx); lockEntries[man.id] = await instance.loadLockEntry( - gcx, entry as Json, ); } @@ -313,22 +312,21 @@ async function commandsFromConfig(hcx: HostCtx, gcx: GhjkCtx) { }; // command name to [cmd, source module id] const subCommands = {} as Record; - const instances = [] as [string, ModuleBase, unknown][]; + const instances = [] as [string, ModuleBase][]; for (const man of configExt.config.modules) { const mod = std_modules.map[man.id]; if (!mod) { throw new Error(`unrecognized module specified by ghjk.ts: ${man.id}`); } - const instance: ModuleBase = new mod.ctor(); - const pMan = await instance.processManifest( - gcx, + const instance: ModuleBase = new mod.ctor(gcx); + await instance.loadConfig( man, configExt.config.blackboard, lockEntries[man.id], ); - instances.push([man.id, instance, pMan] as const); - for (const [cmdName, cmd] of Object.entries(instance.commands(gcx, pMan))) { + instances.push([man.id, instance] as const); + for (const [cmdName, cmd] of Object.entries(instance.commands())) { const conflict = subCommands[cmdName]; if (conflict) { throw new Error( @@ -368,8 +366,8 @@ async function commandsFromConfig(hcx: HostCtx, gcx: GhjkCtx) { await Array.fromAsync( instances.map( async ( - [id, instance, pMan], - ) => [id, await instance.genLockEntry(gcx, pMan)], + [id, instance], + ) => [id, await instance.genLockEntry()], ), ), ); diff --git a/modules/envs/mod.ts b/modules/envs/mod.ts index 516173b..d15d706 100644 --- a/modules/envs/mod.ts +++ b/modules/envs/mod.ts @@ -33,9 +33,8 @@ const lockValidator = zod.object({ type EnvsLockEnt = zod.infer; -export class EnvsModule extends ModuleBase { - processManifest( - gcx: GhjkCtx, +export class EnvsModule extends ModuleBase { + loadConfig( manifest: ModuleManifest, _bb: Blackboard, _lockEnt: EnvsLockEnt | undefined, @@ -52,20 +51,18 @@ export class EnvsModule extends ModuleBase { const setEnv = Deno.env.get("GHJK_ENV"); const activeEnv = setEnv && setEnv != "" ? setEnv : config.defaultEnv; - const envsCtx = getEnvsCtx(gcx); - envsCtx.activeEnv = activeEnv; - envsCtx.config = config; + const ecx = getEnvsCtx(this.gcx); + ecx.activeEnv = activeEnv; + ecx.config = config; for (const [name, key] of Object.entries(config.envsNamed)) { - envsCtx.keyToName[key] = [name, ...(envsCtx.keyToName[key] ?? [])]; + ecx.keyToName[key] = [name, ...(ecx.keyToName[key] ?? [])]; } - - return Promise.resolve(envsCtx); } - commands( - gcx: GhjkCtx, - ecx: EnvsCtx, - ) { + commands() { + const gcx = this.gcx; + const ecx = getEnvsCtx(this.gcx); + function envKeyArgs( args: { taskKeyMaybe?: string; @@ -215,10 +212,7 @@ Cooks and activates an environment. }; } - loadLockEntry( - _gcx: GhjkCtx, - raw: Json, - ) { + loadLockEntry(raw: Json) { const entry = lockValidator.parse(raw); if (entry.version != "0") { @@ -227,10 +221,7 @@ Cooks and activates an environment. return entry; } - genLockEntry( - _gcx: GhjkCtx, - _tcx: EnvsCtx, - ) { + genLockEntry() { return { version: "0", }; diff --git a/modules/mod.ts b/modules/mod.ts index daa35fa..9ec8165 100644 --- a/modules/mod.ts +++ b/modules/mod.ts @@ -3,27 +3,20 @@ import { Blackboard } from "../host/types.ts"; import type { Json } from "../utils/mod.ts"; import type { GhjkCtx, ModuleManifest } from "./types.ts"; -export abstract class ModuleBase { +export abstract class ModuleBase { + constructor(protected gcx: GhjkCtx) {} /* init( _gcx: GhjkCtx, ): Promise | void {} */ - abstract processManifest( - gcx: GhjkCtx, + abstract loadConfig( manifest: ModuleManifest, bb: Blackboard, lockEnt: LockEnt | undefined, - ): Promise | Ctx; + ): Promise | void; // returns undefined if previous lock entry is no longer valid abstract loadLockEntry( - gcx: GhjkCtx, raw: Json, ): Promise | LockEnt | undefined; - abstract genLockEntry( - gcx: GhjkCtx, - mcx: Ctx, - ): Promise | Json; - abstract commands( - gcx: GhjkCtx, - mcx: Ctx, - ): Record>; + abstract genLockEntry(): Promise | Json; + abstract commands(): Record>; } diff --git a/modules/ports/mod.ts b/modules/ports/mod.ts index 1dcad40..9211346 100644 --- a/modules/ports/mod.ts +++ b/modules/ports/mod.ts @@ -49,9 +49,8 @@ const lockValidator = zod.object({ }); type PortsLockEnt = zod.infer; -export class PortsModule extends ModuleBase { - processManifest( - gcx: GhjkCtx, +export class PortsModule extends ModuleBase { + loadConfig( manifest: ModuleManifest, bb: Blackboard, _lockEnt: PortsLockEnt | undefined, @@ -67,7 +66,8 @@ export class PortsModule extends ModuleBase { validators.portsModuleConfigHashed.safeParse(manifest.config), ); - const pcx: PortsCtx = getPortsCtx(gcx); + const gcx = this.gcx; + const pcx = getPortsCtx(gcx); // pre-process the install sets found in the config for (const [id, hashedSet] of Object.entries(hashedModConf.sets)) { @@ -108,13 +108,12 @@ export class PortsModule extends ModuleBase { installSetProvisionTy, installSetReducer(gcx) as ProvisionReducer, ); - return pcx; } - commands( - gcx: GhjkCtx, - pcx: PortsCtx, - ) { + commands() { + const gcx = this.gcx; + const pcx = getPortsCtx(gcx); + return { ports: new cliffy_cmd.Command() .alias("p") @@ -268,16 +267,13 @@ export class PortsModule extends ModuleBase { ), }; } - loadLockEntry( - gcx: GhjkCtx, - raw: Json, - ) { + loadLockEntry(raw: Json) { const entry = lockValidator.parse(raw); if (entry.version != "0") { throw new Error(`unexepected version tag deserializing lockEntry`); } - const memoStore = getResolutionMemo(gcx); + const memoStore = getResolutionMemo(this.gcx); for (const [hash, config] of Object.entries(entry.configResolutions)) { logger().debug( "restoring resolution from lockfile", @@ -290,11 +286,8 @@ export class PortsModule extends ModuleBase { return entry; } - async genLockEntry( - gcx: GhjkCtx, - _pcx: PortsCtx, - ) { - const memo = getResolutionMemo(gcx); + async genLockEntry() { + const memo = getResolutionMemo(this.gcx); const configResolutions = Object.fromEntries( await Array.fromAsync( [...memo.entries()].map(async ([key, prom]) => [key, await prom]), diff --git a/modules/tasks/mod.ts b/modules/tasks/mod.ts index 222bda7..7bdb2a2 100644 --- a/modules/tasks/mod.ts +++ b/modules/tasks/mod.ts @@ -5,7 +5,7 @@ import { Json, unwrapZodRes } from "../../utils/mod.ts"; import validators from "./types.ts"; import type { TasksModuleConfigX } from "./types.ts"; -import { type GhjkCtx, type ModuleManifest } from "../types.ts"; +import { type ModuleManifest } from "../types.ts"; import { ModuleBase } from "../mod.ts"; import { buildTaskGraph, execTask, type TaskGraph } from "./exec.ts"; @@ -21,9 +21,8 @@ const lockValidator = zod.object({ }); type TasksLockEnt = zod.infer; -export class TasksModule extends ModuleBase { - processManifest( - gcx: GhjkCtx, +export class TasksModule extends ModuleBase { + loadConfig( manifest: ModuleManifest, bb: Blackboard, _lockEnt: TasksLockEnt | undefined, @@ -40,19 +39,17 @@ export class TasksModule extends ModuleBase { validators.tasksModuleConfig.safeParse(manifest.config), ); - const taskGraph = buildTaskGraph(gcx, config); + const taskGraph = buildTaskGraph(this.gcx, config); - const tasksCtx = getTasksCtx(gcx); - tasksCtx.config = config; - tasksCtx.taskGraph = taskGraph; - - return tasksCtx; + const tcx = getTasksCtx(this.gcx); + tcx.config = config; + tcx.taskGraph = taskGraph; } - commands( - gcx: GhjkCtx, - tcx: TasksCtx, - ) { + commands() { + const gcx = this.gcx; + const tcx = getTasksCtx(this.gcx); + const namedSet = new Set(tcx.config.tasksNamed); const commands = Object.keys(tcx.config.tasks) .sort() @@ -96,10 +93,7 @@ The named tasks in your ghjkfile will be listed here.`); }; } - loadLockEntry( - _gcx: GhjkCtx, - raw: Json, - ) { + loadLockEntry(raw: Json) { const entry = lockValidator.parse(raw); if (entry.version != "0") { @@ -108,10 +102,7 @@ The named tasks in your ghjkfile will be listed here.`); return entry; } - genLockEntry( - _gcx: GhjkCtx, - _tcx: TasksCtx, - ) { + genLockEntry() { return { version: "0", }; diff --git a/src/deno_systems/bindings.ts b/src/deno_systems/bindings.ts new file mode 100644 index 0000000..bbfd0e3 --- /dev/null +++ b/src/deno_systems/bindings.ts @@ -0,0 +1,126 @@ +// import "../../src/ghjk/js/mock.sfx.ts"; +import { zod } from "../../deps/common.ts"; +import { $, Json, unwrapZodRes } from "../../utils/mod.ts"; +import type { GhjkCtx, ModuleManifest } from "../../modules/types.ts"; +import type { ModuleBase } from "../../modules/mod.ts"; +import type { Blackboard } from "../../host/types.ts"; +import { Ghjk } from "../ghjk/js/runtime.js"; + +const prepareArgs = zod.object({ + uri: zod.string(), + gcx: zod.object({ + ghjkfile_path: zod.string().optional(), + ghjk_dir_path: zod.string(), + share_dir_path: zod.string(), + }), +}); + +const denoSystemsRoot = zod.object({ + systems: zod.record(zod.function()), +}); + +type DenoSystemCtor = (gcx: GhjkCtx) => ModuleBase; + +export type DenoSystemsRoot = { + systems: Record; +}; + +type ManifestDesc = { + id: string; + ctor_cb_key: string; +}; +type InstanceDesc = { + load_lock_entry_cb_key: string; + gen_lock_entry_cb_key: string; + load_config_cb_key: string; +}; + +async function prepareSystems(args: zod.infer) { + const gcx = { + ghjkDir: $.path(args.gcx.ghjk_dir_path), + ghjkShareDir: $.path(args.gcx.share_dir_path), + ghjkfilePath: args.gcx.ghjkfile_path + ? $.path(args.gcx.ghjkfile_path) + : undefined, + blackboard: new Map(), + } satisfies GhjkCtx; + + const { default: mod } = await import(args.uri); + const { systems } = unwrapZodRes( + denoSystemsRoot.safeParse(mod), + ) as DenoSystemsRoot; + + const manifests = [] as ManifestDesc[]; + + for (const [id, ctorFn] of Object.entries(systems)) { + manifests.push({ + id, + ctor_cb_key: Ghjk.callbacks.set( + `sys_ctor_${id}_${crypto.randomUUID()}`, + () => { + const instance = ctorFn(gcx); + return instanceBinding(gcx, id, instance); + }, + ), + }); + } + await Ghjk.hostcall("register_systems", manifests); +} + +function instanceBinding( + gcx: GhjkCtx, + sys_id: string, + instance: ModuleBase, +) { + const instanceId = crypto.randomUUID(); + type State = { + stateKey: string; + }; + return { + load_config_cb_key: Ghjk.callbacks.set( + `sys_load_config_${instanceId}`, + async (args: Json) => { + const { config, bb, state: stateRaw } = args as { + config: ModuleManifest; + bb: Blackboard; + state?: State; + }; + const state = stateRaw?.stateKey + ? gcx.blackboard.get(stateRaw?.stateKey) + : undefined; + await instance.loadConfig({ id: sys_id, config }, bb, state); + return null; + }, + ), + load_lock_entry_cb_key: Ghjk.callbacks.set( + `sys_load_lock_entry_${instanceId}`, + async (args: Json) => { + const { raw } = args as any; + const state = await instance.loadLockEntry(raw); + const stateKey = `sys_state_${instanceId}`; + gcx.blackboard.set(stateKey, state); + return { + stateKey, + } satisfies State; + }, + ), + gen_lock_entry_cb_key: Ghjk.callbacks.set( + `sys_gen_lock_entry_${instanceId}`, + () => { + return instance.genLockEntry(); + }, + ), + } satisfies InstanceDesc; +} + +// start an interval to prevent the event loop exiting +// after loading systems +setInterval(() => {/* beat */}, 1000); +// FIXME: better means of exit detection, keep alive as long +// as callbacks are registered? +// globalThis.onbeforeunload = (evt) => { +// evt.preventDefault(); +// }; + +const args = prepareArgs.parse(Ghjk.blackboard.get("args")); +await prepareSystems(args); diff --git a/src/deno_systems/mod.ts b/src/deno_systems/mod.ts new file mode 100644 index 0000000..ddfcea4 --- /dev/null +++ b/src/deno_systems/mod.ts @@ -0,0 +1,10 @@ +import { map } from "../../modules/std.ts"; +import type { DenoSystemsRoot } from "./bindings.ts"; + +export default { + systems: Object.fromEntries( + Object.entries(map).map( + ([id, sys]) => [id, (gcx) => new sys.ctor(gcx)], + ), + ), +} satisfies DenoSystemsRoot; diff --git a/src/denort/lib.rs b/src/denort/lib.rs index 4769097..e7ab513 100644 --- a/src/denort/lib.rs +++ b/src/denort/lib.rs @@ -1,7 +1,9 @@ -#![allow(dead_code, clippy::let_and_return)] +#![allow(clippy::let_and_return)] pub use deno; +pub mod promises; + #[allow(unused)] mod interlude { pub use std::future::Future; @@ -9,11 +11,16 @@ mod interlude { pub use std::sync::Arc; pub use color_eyre::eyre; + pub use deno::deno_runtime::{ + self, + deno_core::{self, v8}, + }; pub use eyre::{format_err as ferr, Context, Result as Res, WrapErr}; - pub use tracing::{debug, error, info, trace, warn}; + pub use tracing::{debug, error, info, trace, warn, Instrument}; pub use tracing_unwrap::*; } use crate::interlude::*; + use deno::{ deno_runtime::{ deno_core::{futures::FutureExt, unsync::JoinHandle, ModuleSpecifier}, @@ -22,6 +29,7 @@ use deno::{ }, *, }; +use std::sync::atomic::AtomicBool; #[rustfmt::skip] use deno_runtime::deno_core as deno_core; // necessary for re-exported macros to work @@ -42,6 +50,9 @@ pub fn init() { }; } +// thread tag used for basic sanity checks +pub const WORKER_THREAD_NAME: &str = "denort-worker-thread"; + /// This starts a new thread and uses it to run all the tasks /// that'll need to touch deno internals. Deno is single threaded. /// @@ -52,79 +63,112 @@ pub async fn worker( ) -> Res { let cx = WorkerContext::from_config(flags, custom_extensions_cb).await?; - let (tx, mut rx) = tokio::sync::mpsc::unbounded_channel::(); + let (tx, mut rx) = tokio::sync::mpsc::unbounded_channel::(); let rt = tokio::runtime::Handle::current(); + let term_signal = AtomicBool::new(false); + let term_signal = Arc::new(term_signal); + + let global_term_signal = term_signal.clone(); let join_handle = new_thread_builder() + .name(WORKER_THREAD_NAME.into()) .spawn(move || { let local = tokio::task::LocalSet::new(); - local.spawn_local(async move { - while let Some(req) = rx.recv().await { - match req { - DenoWorkerReq::PrepareModule { - response_channel, - main_module, - permissions, - mode, - stdio, - custom_extensions_cb, - } => { - let mut module_cx = match cx - .prepare_module( - main_module, - &permissions, - mode, - stdio, - custom_extensions_cb, - ) - .await - { - Ok(val) => val, - Err(err) => { - response_channel - .send(Err(err)) - .expect_or_log("channel error"); - continue; - } - }; - - let (module_tx, mut module_rx) = - tokio::sync::mpsc::unbounded_channel::(); - tokio::task::spawn_local(async move { - while let Some(req) = module_rx.recv().await { - match req { - ModuleWorkerReq::Run { response_channel } => { - response_channel - .send(module_cx.run().await) - .expect_or_log("channel error") - } - ModuleWorkerReq::GetLoadedModules { response_channel } => { - response_channel - .send(module_cx.get_loaded_modules()) - .expect_or_log("channel error") - } - } - } - }); - - response_channel - .send(Ok(ModuleWorkerHandle { sender: module_tx })) - .expect_or_log("channel error"); + local.spawn_local( + async move { + debug!("starting deno worker"); + while let Some(msg) = rx.recv().await { + debug!(?msg, "deno worker msg"); + match msg { + DenoWorkerMsg::PrepareModule { + response_channel, + inner, + } => { + response_channel + .send( + module_worker(&cx, global_term_signal.clone(), inner).await, + ) + .expect_or_log("channel error"); + } } } + debug!("deno worker done"); } - }); + .instrument(tracing::debug_span!("deno-worker")), + ); rt.block_on(local); }) .unwrap(); - let join_handle = Arc::new(join_handle); + let join_handle = Arc::new(std::sync::Mutex::new(Some(join_handle))); Ok(DenoWorkerHandle { sender: tx, + term_signal, join_handle, }) } +async fn module_worker( + cx: &WorkerContext, + global_term_signal: Arc, + msg: PrepareModuleMsg, +) -> Res { + let mut module_cx = cx + .prepare_module( + msg.main_module, + &msg.permissions, + msg.mode, + msg.stdio, + msg.custom_extensions_cb, + ) + .await?; + + let (module_tx, mut module_rx) = tokio::sync::mpsc::channel::(1); + tokio::task::spawn_local( + async move { + debug!("starting module worker"); + while let Some(msg) = module_rx.recv().await { + debug!(?msg, "module worker msg"); + match msg { + ModuleWorkerReq::Run { response_channel } => response_channel + .send( + module_cx + .run(&global_term_signal) + .await + .map_err(|err| ferr!(Box::new(err))), + ) + .expect_or_log("channel error"), + ModuleWorkerReq::DriveTillExit { + term_signal, + response_channel, + } => response_channel + .send( + module_cx + .drive_till_exit(&global_term_signal, &term_signal) + .await + .map_err(|err| ferr!(Box::new(err))), + ) + .expect_or_log("channel error"), + ModuleWorkerReq::Execute { response_channel } => response_channel + .send( + module_cx + .execute_main_module() + .await + .map_err(|err| ferr!(Box::new(err))), + ) + .expect_or_log("channel error"), + ModuleWorkerReq::GetLoadedModules { response_channel } => response_channel + .send(module_cx.get_loaded_modules()) + .expect_or_log("channel error"), + } + } + debug!("module worker done"); + } + .instrument(tracing::debug_span!("deno-module-worker")), + ); + Ok(ModuleWorkerHandle { sender: module_tx }) +} + #[derive(educe::Educe)] #[educe(Debug)] struct WorkerContext { @@ -187,7 +231,7 @@ impl WorkerContext { let permissions = deno_permissions::Permissions::from_options(desc_parser.as_ref(), permissions)?; let permissions = deno_permissions::PermissionsContainer::new(desc_parser, permissions); - let worker = self + let mut worker = self .worker_factory .create_custom_worker( mode, @@ -198,30 +242,72 @@ impl WorkerContext { ) .await .map_err(|err| ferr!(Box::new(err)))?; + let maybe_coverage_collector = worker + .maybe_setup_coverage_collector() + .await + .map_err(|err| ferr!(Box::new(err)))?; + + // TODO: hot module support, expose shared worker contet from deno/cli/worker + // let maybe_hmr_runner = worker + // .maybe_setup_hmr_runner() + // .await + // .map_err(|err| ferr!(Box::new(err)))?; + + let worker = worker.into_main_worker(); Ok(ModuleWorkerContext { main_module, worker, graph: self.graph.clone(), + maybe_coverage_collector, + // maybe_hmr_runner, }) } } -enum DenoWorkerReq { +#[derive(educe::Educe)] +#[educe(Debug)] +struct PrepareModuleMsg { + main_module: ModuleSpecifier, + permissions: deno_permissions::PermissionsOptions, + #[educe(Debug(ignore))] + mode: deno_runtime::WorkerExecutionMode, + #[educe(Debug(ignore))] + stdio: deno_runtime::deno_io::Stdio, + #[educe(Debug(ignore))] + custom_extensions_cb: Option>, +} + +#[derive(educe::Educe)] +#[educe(Debug)] +enum DenoWorkerMsg { PrepareModule { + #[educe(Debug(ignore))] response_channel: tokio::sync::oneshot::Sender>, - main_module: ModuleSpecifier, - permissions: deno_permissions::PermissionsOptions, - mode: deno_runtime::WorkerExecutionMode, - stdio: deno_runtime::deno_io::Stdio, - custom_extensions_cb: Option>, + inner: PrepareModuleMsg, }, } #[derive(Clone, Debug)] pub struct DenoWorkerHandle { - sender: tokio::sync::mpsc::UnboundedSender, - join_handle: Arc>, + sender: tokio::sync::mpsc::UnboundedSender, + join_handle: Arc>>>, + term_signal: Arc, +} + +impl DenoWorkerHandle { + pub fn terminate(self) { + self.term_signal + .store(true, std::sync::atomic::Ordering::Relaxed); + let join_handle = { + let mut opt = self.join_handle.lock().expect_or_log("mutex error"); + opt.take() + }; + let Some(join_handle) = join_handle else { + return; + }; + join_handle.join().expect_or_log("join error") + } } impl DenoWorkerHandle { @@ -235,13 +321,15 @@ impl DenoWorkerHandle { ) -> Res { let (tx, rx) = tokio::sync::oneshot::channel(); self.sender - .send(DenoWorkerReq::PrepareModule { + .send(DenoWorkerMsg::PrepareModule { response_channel: tx, - main_module, - permissions, - mode, - stdio, - custom_extensions_cb, + inner: PrepareModuleMsg { + main_module, + permissions, + mode, + stdio, + custom_extensions_cb, + }, }) .expect_or_log("channel error"); rx.await.expect_or_log("channel error") @@ -253,9 +341,12 @@ impl DenoWorkerHandle { struct ModuleWorkerContext { main_module: deno_core::ModuleSpecifier, #[educe(Debug(ignore))] - worker: deno::worker::CliMainWorker, + worker: deno_runtime::worker::MainWorker, #[educe(Debug(ignore))] graph: Arc, + #[educe(Debug(ignore))] + maybe_coverage_collector: Option>, + // maybe_hmr_runner: Option>, } impl ModuleWorkerContext { @@ -276,43 +367,205 @@ impl ModuleWorkerContext { .collect() } - async fn run(&mut self) -> Res { - self.worker.run().await.map_err(|err| ferr!(Box::new(err))) + async fn run(&mut self, global_term_signal: &AtomicBool) -> anyhow::Result { + debug!("main_module {}", self.main_module); + self.execute_main_module().await?; + self.drive_till_exit(global_term_signal, &AtomicBool::new(false)) + .await + } + + async fn drive_till_exit( + &mut self, + global_term_signal: &AtomicBool, + term_signal: &AtomicBool, + ) -> anyhow::Result { + self.worker.dispatch_load_event()?; + loop { + /* if let Some(hmr_runner) = self.maybe_hmr_runner.as_mut() { + let watcher_communicator = + self.shared.maybe_file_watcher_communicator.clone().unwrap(); + + let hmr_future = hmr_runner.run().boxed_local(); + let event_loop_future = self.worker.run_event_loop(false).boxed_local(); + + let result; + tokio::select! { + hmr_result = hmr_future => { + result = hmr_result; + }, + event_loop_result = event_loop_future => { + result = event_loop_result; + } + } + if let Err(e) = result { + watcher_communicator.change_restart_mode(WatcherRestartMode::Automatic); + return Err(e); + } + } else { + self.worker + .run_event_loop(self.maybe_coverage_collector.is_none()) + .await?; + } */ + self.worker + .run_event_loop(self.maybe_coverage_collector.is_none()) + .await?; + + if term_signal.load(std::sync::atomic::Ordering::Relaxed) { + trace!("worker term signal lit, shutting down event loop"); + break; + } + + if global_term_signal.load(std::sync::atomic::Ordering::Relaxed) { + trace!("globalterm signal lit, shutting down event loop"); + break; + } + + let web_continue = self.worker.dispatch_beforeunload_event()?; + if !web_continue { + let node_continue = self.worker.dispatch_process_beforeexit_event()?; + if !node_continue { + trace!("beforeunload and beforeexit success, shutting down loop"); + break; + } + } + } + self.worker.dispatch_unload_event()?; + self.worker.dispatch_process_exit_event()?; + if let Some(coverage_collector) = self.maybe_coverage_collector.as_mut() { + self.worker + .js_runtime + .with_event_loop_future( + coverage_collector.stop_collecting().boxed_local(), + deno_core::PollEventLoopOptions::default(), + ) + .await?; + } + /* if let Some(hmr_runner) = self.maybe_hmr_runner.as_mut() { + self.worker + .js_runtime + .with_event_loop_future( + hmr_runner.stop().boxed_local(), + deno_core::PollEventLoopOptions::default(), + ) + .await?; + } */ + Ok(self.worker.exit_code()) + //.map_err(|err| ferr!(Box::new(err))) + } + + async fn execute_main_module(&mut self) -> anyhow::Result<()> { + let id = self.worker.preload_main_module(&self.main_module).await?; + self.worker.evaluate_module(id).await } } +#[derive(educe::Educe)] +#[educe(Debug)] enum ModuleWorkerReq { Run { + #[educe(Debug(ignore))] + response_channel: tokio::sync::oneshot::Sender>, + }, + DriveTillExit { + #[educe(Debug(ignore))] + term_signal: Arc, + #[educe(Debug(ignore))] response_channel: tokio::sync::oneshot::Sender>, }, + Execute { + #[educe(Debug(ignore))] + response_channel: tokio::sync::oneshot::Sender>, + }, GetLoadedModules { + #[educe(Debug(ignore))] response_channel: tokio::sync::oneshot::Sender>, }, } #[derive(Clone, Debug)] pub struct ModuleWorkerHandle { - sender: tokio::sync::mpsc::UnboundedSender, + sender: tokio::sync::mpsc::Sender, +} + +#[derive(Clone, Debug)] +pub struct FinishedWorkerHandle { + sender: tokio::sync::mpsc::Sender, } + impl ModuleWorkerHandle { - pub async fn get_loaded_modules(&mut self) -> Vec { + /// Load and execute the main module + /// and drive the main loop until the program + /// exits. + pub async fn run(self) -> Res<(i32, FinishedWorkerHandle)> { let (tx, rx) = tokio::sync::oneshot::channel(); self.sender - .send(ModuleWorkerReq::GetLoadedModules { + .send(ModuleWorkerReq::Run { response_channel: tx, }) + .await + .expect_or_log("channel error"); + Ok(( + rx.await.expect_or_log("channel error")?, + FinishedWorkerHandle { + sender: self.sender, + }, + )) + } + + /// Load and execute the main module + /// but doesn't progress the main event + /// loop. + pub async fn execute(&mut self) -> Res<()> { + let (tx, rx) = tokio::sync::oneshot::channel(); + self.sender + .send(ModuleWorkerReq::Execute { + response_channel: tx, + }) + .await .expect_or_log("channel error"); - // FIXME: can use sync oneshot here rx.await.expect_or_log("channel error") } - pub async fn run(&mut self) -> Res { + /// Drive the event loop until exit and return + /// result in returned channel. + /// Expects that [`execute`] was called first on the worker. + pub async fn drive_till_exit( + self, + ) -> Res<( + tokio::sync::oneshot::Receiver>, + Arc, + FinishedWorkerHandle, + )> { + let term_signal = AtomicBool::new(false); + let term_signal = Arc::new(term_signal); let (tx, rx) = tokio::sync::oneshot::channel(); self.sender - .send(ModuleWorkerReq::Run { + .send(ModuleWorkerReq::DriveTillExit { + term_signal: term_signal.clone(), + response_channel: tx, + }) + .await + .expect_or_log("channel error"); + Ok(( + rx, + term_signal, + FinishedWorkerHandle { + sender: self.sender, + }, + )) + } +} + +impl FinishedWorkerHandle { + pub async fn get_loaded_modules(&mut self) -> Vec { + let (tx, rx) = tokio::sync::oneshot::channel(); + self.sender + .send(ModuleWorkerReq::GetLoadedModules { response_channel: tx, }) + .await .expect_or_log("channel error"); + // FIXME: can use sync oneshot here? rx.await.expect_or_log("channel error") } } diff --git a/src/denort/promises.rs b/src/denort/promises.rs new file mode 100644 index 0000000..7692575 --- /dev/null +++ b/src/denort/promises.rs @@ -0,0 +1,70 @@ +use crate::interlude::*; + +// Lifted from deno_core 0.318.0 +/* +MIT License + +Copyright 2018-2024 the Deno authors + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + */ + +/// Wrap a promise with `then` handlers allowing us to watch the resolution progress from a Rust closure. +/// This has a side-effect of preventing unhandled rejection handlers from triggering. If that is +/// desired, the final handler may choose to rethrow the exception. +pub fn watch_promise<'s, F>( + scope: &mut v8::HandleScope<'s>, + promise: v8::Local<'s, v8::Promise>, + f: F, +) -> Option> +where + F: FnOnce( + &mut v8::HandleScope, + v8::ReturnValue, + Result, v8::Local>, + ) + 'static, +{ + let external = v8::External::new(scope, Box::into_raw(Box::new(Some(f))) as _); + + fn get_handler(external: v8::Local) -> F { + unsafe { Box::>::from_raw(external.value() as _) } + .take() + .unwrap() + } + let on_fulfilled = v8::Function::builder( + |scope: &mut v8::HandleScope, args: v8::FunctionCallbackArguments, rv: v8::ReturnValue| { + let data = v8::Local::::try_from(args.data()).unwrap(); + let f = get_handler::(data); + f(scope, rv, Ok(args.get(0))); + }, + ) + .data(external.into()) + .build(scope); + let on_rejected = v8::Function::builder( + |scope: &mut v8::HandleScope, args: v8::FunctionCallbackArguments, rv: v8::ReturnValue| { + let data = v8::Local::::try_from(args.data()).unwrap(); + let f = get_handler::(data); + f(scope, rv, Err(args.get(0))); + }, + ) + .data(external.into()) + .build(scope); + // function builders will return None if the runtime is shutting down + let (Some(on_fulfilled), Some(on_rejected)) = (on_fulfilled, on_rejected) else { + _ = get_handler::(external); + return None; + }; + + // then2 will return None if the runtime is shutting down + let Some(promise) = promise.then2(scope, on_fulfilled, on_rejected) else { + _ = get_handler::(external); + return None; + }; + + Some(promise) +} diff --git a/src/ghjk/Cargo.toml b/src/ghjk/Cargo.toml index 89bb4b7..5335c17 100644 --- a/src/ghjk/Cargo.toml +++ b/src/ghjk/Cargo.toml @@ -39,6 +39,8 @@ tracing-error = "0.2" tracing-unwrap.workspace = true color-eyre.workspace = true +anyhow.workspace = true + tracing.workspace = true tracing-subscriber.workspace = true tracing-appender = "0.2" @@ -61,6 +63,7 @@ data-encoding = "2.6.0" sha2 = "0.10.8" pathdiff = "0.2.2" directories = "5.0.1" +async-trait = "0.1.83" [build-dependencies] shadow-rs.workspace = true diff --git a/src/ghjk/deno.rs b/src/ghjk/deno.rs deleted file mode 100644 index c8506f4..0000000 --- a/src/ghjk/deno.rs +++ /dev/null @@ -1,82 +0,0 @@ -use crate::interlude::*; - -#[rustfmt::skip] -use deno_core as deno_core; // necessary for re-exported macros to work - -pub fn extensions(config: ExtConfig) -> Arc { - // let atom = std::sync::atomic::AtomicBool::new(false); - Arc::new(move || { - // if atom.load(std::sync::atomic::Ordering::SeqCst) { - // return vec![]; - // } - // atom.store(true, std::sync::atomic::Ordering::SeqCst); - vec![ghjk_deno_ext::init_ops_and_esm(config.clone())] - }) -} - -// This is used to populate the deno_core::OpState with dependencies -// used by the different ops -#[derive(Clone)] -pub struct ExtConfig { - pub blackboard: Arc>, -} - -impl ExtConfig { - pub fn new(blackboard: Arc>) -> Self { - Self { blackboard } - } - - fn inject(self, state: &mut deno_core::OpState) { - state.put(ExtContext { - blackboard: self.blackboard.clone(), - }); - } -} - -deno_core::extension!( - ghjk_deno_ext, - ops = [op_get_blackboard, op_set_blackboard], - options = { config: ExtConfig }, - state = |state, opt| { - opt.config.inject(state); - }, - customizer = |ext: &mut deno_core::Extension| { - customizer(ext); - }, - docs = "Kitchen sink extension for all ghjk needs.", -); - -fn customizer(ext: &mut deno_core::Extension) { - ext.esm_files - .to_mut() - .push(deno_core::ExtensionFileSource::new( - "ext:ghjk_deno_ext/00_runtime.js", - deno_core::ascii_str_include!("deno/00_runtime.js"), - )); - ext.esm_entry_point = Some("ext:ghjk_deno_ext/00_runtime.js"); -} - -struct ExtContext { - blackboard: Arc>, -} - -#[deno_core::op2] -#[serde] -pub fn op_get_blackboard( - #[state] ctx: &ExtContext, - #[string] key: &str, -) -> Option { - ctx.blackboard.get(key).map(|val| val.clone()) -} - -#[deno_core::op2] -#[serde] -pub fn op_set_blackboard( - #[state] ctx: &ExtContext, - #[string] key: String, - #[serde] val: serde_json::Value, -) -> Option { - ctx.blackboard - .insert(key.into(), val) - .map(|val| val.clone()) -} diff --git a/src/ghjk/ext.rs b/src/ghjk/ext.rs new file mode 100644 index 0000000..c515700 --- /dev/null +++ b/src/ghjk/ext.rs @@ -0,0 +1,125 @@ +use std::{cell::RefCell, rc::Rc}; + +use crate::interlude::*; + +use deno_core::OpState; +#[rustfmt::skip] +use deno_core as deno_core; // necessary for re-exported macros to work + +mod callbacks; +pub use callbacks::CallbacksHandle; + +/// This extension assumes that deno was launched on top of a tokio::LocalSet +pub fn extensions(config: ExtConfig) -> Arc { + // let atom = std::sync::atomic::AtomicBool::new(false); + Arc::new(move || { + // if atom.load(std::sync::atomic::Ordering::SeqCst) { + // return vec![]; + // } + // atom.store(true, std::sync::atomic::Ordering::SeqCst); + vec![ghjk_deno_ext::init_ops_and_esm(config.clone())] + }) +} +// This is used to populate the deno_core::OpState with dependencies +// used by the different ops +#[derive(Clone, Default)] +pub struct ExtConfig { + pub blackboard: Arc>, + callbacks_rx: Arc>, + pub hostcalls: Hostcalls, +} + +impl ExtConfig { + pub fn new() -> Self { + Self::default() + } + + pub fn callbacks_handle(&mut self) -> callbacks::CallbacksHandle { + let (line, handle) = callbacks::CallbackLine::new(); + self.callbacks_rx = Arc::new(std::sync::Mutex::new(line)); + + handle + } + + fn inject(self, state: &mut deno_core::OpState) { + let ctx = ExtContext { + config: self, + callbacks: default(), + }; + callbacks::worker(ctx.clone()); + state.put(ctx); + } +} + +deno_core::extension!( + ghjk_deno_ext, + ops = [op_blackboard_get, op_blackboard_set, callbacks::op_callbacks_set, op_hostcall], + options = { config: ExtConfig }, + state = |state, opt| { + opt.config.inject(state); + }, + customizer = customizer, + docs = "Kitchen sink extension for all ghjk needs.", +); + +fn customizer(ext: &mut deno_core::Extension) { + ext.esm_files + .to_mut() + .push(deno_core::ExtensionFileSource::new( + "ext:ghjk_deno_ext/00_runtime.js", + deno_core::ascii_str_include!("js/00_runtime.js"), + )); + ext.esm_entry_point = Some("ext:ghjk_deno_ext/00_runtime.js"); +} + +#[derive(Clone)] +struct ExtContext { + callbacks: callbacks::Callbacks, + config: ExtConfig, +} + +#[deno_core::op2] +#[serde] +pub fn op_blackboard_get( + #[state] ctx: &ExtContext, + #[string] key: &str, +) -> Option { + ctx.config.blackboard.get(key).map(|val| val.clone()) +} + +#[deno_core::op2] +#[serde] +pub fn op_blackboard_set( + #[state] ctx: &ExtContext, + #[string] key: String, + #[serde] val: serde_json::Value, +) -> Option { + ctx.config.blackboard.insert(key.into(), val) +} + +#[derive(Clone, Default)] +pub struct Hostcalls { + pub funcs: Arc>, +} + +pub type HostcallFn = Box< + dyn Fn(serde_json::Value) -> BoxFuture<'static, Res> + 'static + Send + Sync, +>; + +#[deno_core::op2(async)] +#[serde] +pub async fn op_hostcall( + state: Rc>, + #[string] name: String, + #[serde] args: serde_json::Value, +) -> anyhow::Result { + let ctx = { + let state = state.borrow(); + let ctx = state.borrow::(); + ctx.clone() + }; + let Some(func) = ctx.config.hostcalls.funcs.get(&name[..]) else { + anyhow::bail!("no hostcall found under {name}"); + }; + func(args).await.map_err(|err| anyhow::anyhow!(err)) +} diff --git a/src/ghjk/ext/callbacks.rs b/src/ghjk/ext/callbacks.rs new file mode 100644 index 0000000..2f63aa3 --- /dev/null +++ b/src/ghjk/ext/callbacks.rs @@ -0,0 +1,293 @@ +use std::{cell::RefCell, rc::Rc}; + +use crate::interlude::*; + +use deno_core::serde_v8; +use deno_core::v8; +use deno_core::OpState; +#[rustfmt::skip] +use deno_core as deno_core; // necessary for re-exported macros to work +use tokio::sync::{mpsc, oneshot}; + +use super::ExtContext; + +#[derive(Debug, thiserror::Error)] +pub enum CallbackError { + #[error("no callback found under {key}")] + NotFound { key: String }, + #[error("callback protocol error {0:?}")] + ProtocolError(eyre::Report), + #[error("error executing callback {0:?}")] + JsError(eyre::Report), + #[error("v8 error {0:?}")] + V8Error(eyre::Report), +} + +/// Line used by the callback_worker to receive +/// invocations. +#[derive(Default)] +pub struct CallbackLine { + line: Option>, + was_set: bool, +} + +impl CallbackLine { + pub fn new() -> (Self, CallbacksHandle) { + let (tx, rx) = tokio::sync::mpsc::channel(1); + ( + Self { + was_set: true, + line: Some(rx), + }, + CallbacksHandle { sender: tx }, + ) + } + + fn take(&mut self) -> Option> { + if !self.was_set { + warn!("callback line was not set"); + return None; + } + match self.line.take() { + Some(val) => Some(val), + None => { + panic!("extensions were injected twice") + } + } + } +} + +/// Line used to invoke callbacks registered by js code. +#[derive(Clone)] +pub struct CallbacksHandle { + sender: mpsc::Sender, +} + +impl CallbacksHandle { + pub async fn exec( + &self, + key: CHeapStr, + args: serde_json::Value, + ) -> Result { + let (tx, rx) = tokio::sync::oneshot::channel(); + self.sender + .send(CallbacksMsg::Exec { + response_channel: tx, + key, + args, + }) + .await + .expect_or_log("channel error"); + rx.await.expect_or_log("channel error") + } +} + +/// Internal used to communicate between callback worker +#[derive(educe::Educe)] +#[educe(Debug)] +enum CallbacksMsg { + Exec { + #[educe(Debug(ignore))] + response_channel: oneshot::Sender>, + key: CHeapStr, + #[educe(Debug(ignore))] + args: serde_json::Value, + }, +} + +#[derive(Clone, Default)] +pub struct Callbacks { + store: Arc>, +} + +/// Start a worker task to execute callbacks on. +/// +/// Stored callbacks are not Sync so this expects to be started +/// on the same thread as deno. +pub fn worker(ctx: ExtContext) { + let mut line = { + let mut line = ctx.config.callbacks_rx.lock().expect_or_log("mutex err"); + let Some(line) = line.take() else { + return; + }; + line + }; + assert_eq!( + std::thread::current().name(), + Some(denort::WORKER_THREAD_NAME), + "callback worker must be launched on deno worker started with a LocalSet" + ); + // assumes local set + tokio::task::spawn_local( + async move { + debug!("callback worker starting"); + while let Some(msg) = line.recv().await { + debug!(?msg, "callback worker msg"); + match msg { + CallbacksMsg::Exec { + key: name, + args, + response_channel, + } => response_channel + .send(ctx.exec_callback(name, args).await) + .expect_or_log("channel error"), + } + } + debug!("callback worker done"); + } + .instrument(tracing::debug_span!("callback-worker")), + ); +} + +impl ExtContext { + pub async fn exec_callback( + &self, + key: CHeapStr, + args: serde_json::Value, + ) -> Result { + let Some(cb) = self.callbacks.store.get(&key[..]).map(|cb| cb.clone()) else { + return Err(CallbackError::NotFound { + key: key.to_string(), + }); + }; + + let (tx, rx) = oneshot::channel::>(); + + // we use the sender to spawn work on the v8 thread + let join_handle = tokio::task::spawn_blocking(move || { + cb.async_work_sender.spawn_blocking(move |scope| { + let args = serde_v8::to_v8(scope, args).map_err(|err| { + CallbackError::V8Error(ferr!(err).wrap_err("error serializaing args to v8")) + })?; + + let recv = v8::undefined(scope); + + let res = { + let tc_scope = &mut v8::TryCatch::new(scope); + // FIXME(@yohe): the original pointer was made from a global + // and yet we're transmuting it to a Local here. + // This is observed from the deno codebase + // and I can't explain it + let func = unsafe { + std::mem::transmute::, v8::Local>( + cb.js_fn, + ) + }; + + let res = func + .call(tc_scope, recv.into(), &[args]) + // FIXME: under what circumstances can this be None? + .expect_or_log("got None from callback call"); + if tc_scope.has_caught() { + let exception = tc_scope.exception().unwrap(); + let exception = exception.to_rust_string_lossy(tc_scope); + /* let exception = serde_v8::from_v8(tc_scope, exception).map_err(|err| { + CallbackError::ProtocolError( + ferr!(err).wrap_err("error deserializaing exception from v8"), + ) + })?; */ + return Err(CallbackError::JsError(ferr!( + "callback exception: {exception}" + ))); + } + res + }; + if res.is_promise() { + let promise = v8::Local::::try_from(res).unwrap(); + + denort::promises::watch_promise(scope, promise, move |scope, _rf, res| { + let res = match res { + Ok(val) => serde_v8::from_v8(scope, val).map_err(|err| { + CallbackError::ProtocolError( + ferr!(err) + .wrap_err("error deserializaing promise result from v8"), + ) + }), + Err(err) => Err(CallbackError::JsError(ferr!( + "callback promise rejection: {}", + err.to_rust_string_lossy(scope) + ))), /* Err(err) => match serde_v8::from_v8(scope, err) { + Ok(json) => Err(CallbackError::JsError(json)), + Err(err) => Err(CallbackError::ProtocolError( + ferr!(err) + .wrap_err("error deserializaing promise rejection from v8"), + )), + }, */ + }; + tx.send(res).expect_or_log("channel error") + }); + Ok(None) + } else { + let res = serde_v8::from_v8(scope, res).map_err(|err| { + CallbackError::ProtocolError( + ferr!(err).wrap_err("error deserializaing result from v8"), + ) + })?; + Ok(Some(res)) + } + }) + }); + + let res = match join_handle.await.expect_or_log("tokio error")? { + Some(res) => res, + None => { + debug!("waiting for callback proimse"); + rx.await.expect_or_log("channel error")? + } + }; + + Ok(res) + } +} + +struct Callback { + js_fn: SendPtr, + async_work_sender: deno_core::V8CrossThreadTaskSpawner, +} + +impl Clone for Callback { + fn clone(&self) -> Self { + Self { + js_fn: SendPtr(self.js_fn.0), + async_work_sender: self.async_work_sender.clone(), + } + } +} + +#[derive(Clone, Copy)] +#[repr(transparent)] +struct SendPtr(std::ptr::NonNull); +// SAFETY: we only ever access this value from within the same thread +// as deno +unsafe impl Send for SendPtr {} + +/* impl Callback { + fn drop(self, scope: &mut v8::HandleScope) { + unsafe { + _ = v8::Global::from_raw(scope, self.js_fn.0); + } + } +} */ + +#[deno_core::op2] +pub fn op_callbacks_set( + state: Rc>, + #[string] name: String, + #[global] cb: v8::Global, +) -> anyhow::Result<()> { + let (ctx, async_work_sender) = { + let state = state.borrow(); + let ctx = state.borrow::(); + let sender = state.borrow::(); + + (ctx.clone(), sender.clone()) + }; + ctx.callbacks.store.insert( + name.into(), + Callback { + js_fn: SendPtr(cb.into_raw()), + async_work_sender, + }, + ); + Ok(()) +} diff --git a/src/ghjk/host.rs b/src/ghjk/host.rs index df01517..4ded93f 100644 --- a/src/ghjk/host.rs +++ b/src/ghjk/host.rs @@ -1,59 +1,17 @@ use crate::interlude::*; +use crate::systems::*; + mod deno; mod hashfile; use hashfile::*; -#[derive(educe::Educe)] -#[educe(Debug)] -enum ModuleManifest { - Todo, -} - -impl ModuleManifest { - pub fn init(&self) -> ModuleInstance { - ModuleInstance::Todo - } -} -enum ModuleInstance { - Todo, -} - -type ModuleLockEntry = Box; -type ModuleContext = Box; - -impl ModuleInstance { - pub async fn load_lock_entry( - &mut self, - gcx: &GhjkCtx, - raw: serde_json::Value, - ) -> Res { - Ok(Box::new("todo")) - } - - pub async fn gen_lock_entry( - &mut self, - gcx: &GhjkCtx, - mcx: &ModuleContext, - ) -> Res { - Ok(serde_json::json!("todo")) - } - - pub async fn load_config( - &mut self, - gcx: &GhjkCtx, - bb: &ConfigBlackboard, - lock_entry: Option, - ) -> Res { - Ok(Box::new("todo")) - } -} - #[derive(Debug)] pub struct Config { /// Discard serialization cache. pub re_serialize: bool, /// Discard any resolved values in lockfile. + #[allow(unused)] pub re_resolve: bool, /// Force use serialization cache. pub locked: bool, @@ -61,32 +19,33 @@ pub struct Config { pub cwd: PathBuf, } -#[derive(Debug)] +#[derive(educe::Educe)] +#[educe(Debug)] pub struct HostCtx { pub gcx: Arc, - config: Config, - pub modules: HashMap, + pub config: Config, + #[educe(Debug(ignore))] + pub systems: HashMap, pub file_hash_memo: DHashMap, } impl HostCtx { - pub fn new(gcx: Arc, config: Config) -> Self { + pub fn new( + gcx: Arc, + config: Config, + systems: HashMap, + ) -> Self { Self { gcx, config, - modules: [ - ("envs".into(), ModuleManifest::Todo), - ("ports".into(), ModuleManifest::Todo), - ("tasks".into(), ModuleManifest::Todo), - ] - .into_iter() - .collect(), + systems, file_hash_memo: default(), } } } -pub async fn modules_from_ghjkfile(hcx: Arc) -> Res> { +#[tracing::instrument(skip(hcx))] +pub async fn systems_from_ghjkfile(hcx: Arc) -> Res> { let (hashfile_path, lockfile_path) = ( hcx.gcx.ghjk_dir_path.join("hash.json"), hcx.gcx.ghjk_dir_path.join("lock.json"), @@ -155,23 +114,23 @@ pub async fn modules_from_ghjkfile(hcx: Arc) -> Res) -> Res) -> Res, pub config: Arc, hash_obj: HashObj, - mod_instances: IndexMap, + sys_instances: IndexMap, old_lock_obj: Option, lockfile_path: PathBuf, hashfile_path: PathBuf, @@ -251,25 +210,23 @@ pub struct GhjkfileModules { hashfile_written: bool, } -impl GhjkfileModules { +impl GhjkfileSystems { + #[tracing::instrument(skip(self))] pub async fn write_lockfile(&mut self) -> Res<()> { let mut lock_obj = LockObj { version: "0".into(), config: self.config.clone(), - module_entries: default(), + sys_entries: default(), }; - // generate the lock entries after *all* the modules + // generate the lock entries after *all* the systems // are done processing their config to allow // any shared stores to be properly populated // e.g. the resolution memo store - for (mod_id, (mod_inst, mcx)) in &mut self.mod_instances { - let lock_entry = mod_inst - .gen_lock_entry(&self.hcx.gcx, mcx) - .await - .wrap_err_with(|| { - format!("error generating lock entry for module: {:?}", mod_id) - })?; - lock_obj.module_entries.insert(mod_id.clone(), lock_entry); + for (sys_id, sys_inst) in &mut self.sys_instances { + let lock_entry = sys_inst.gen_lock_entry().await.wrap_err_with(|| { + format!("error generating lock entry for system: {:?}", sys_id) + })?; + lock_obj.sys_entries.insert(sys_id.clone(), lock_entry); } if self.old_lock_obj.is_none() @@ -278,12 +235,13 @@ impl GhjkfileModules { if self.hcx.config.locked { warn!("locked flag set, changes to lockfile discarded"); } else { - tokio::fs::write( + debug!(lockfile_path = ?self.lockfile_path, ?lock_obj, "writing lock.json"); + /* tokio::fs::write( &self.lockfile_path, serde_json::to_vec_pretty(&lock_obj).expect_or_log("error jsonifying lockfile"), ) .await - .wrap_err("error writing to lockfile")?; + .wrap_err("error writing to lockfile")?; */ self.old_lock_obj.replace(lock_obj); } } @@ -294,13 +252,14 @@ impl GhjkfileModules { if self.hcx.config.locked { unreachable!("code should have early exited"); } - tokio::fs::write( - &self.lockfile_path, + debug!(hashfile_path = ?self.hashfile_path, hash_obj= ?self.hash_obj, "writing hash.json"); + /* tokio::fs::write( + &self.hashfile_path, serde_json::to_vec_pretty(&self.hash_obj) .expect_or_log("error jsonifying hashfile"), ) .await - .wrap_err("error writing to lockfile")?; + .wrap_err("error writing to lockfile")?; */ self.hashfile_written = true; } Ok(()) @@ -310,7 +269,6 @@ impl GhjkfileModules { async fn serialize_ghjkfile(hcx: &HostCtx, path: &Path) -> Res<(Arc, HashObj)> { let ext = path.extension(); let res = if ext.map(|ext| ext == "ts" || ext == "js") == Some(true) { - debug!("serializing deno ghjkfile"); deno::serialize_deno_ghjkfile(hcx, path).await? } else { eyre::bail!("unrecognized ghjkfile extension: {path:?}") @@ -323,7 +281,9 @@ async fn serialize_ghjkfile(hcx: &HostCtx, path: &Path) -> Res<(Arc Res<(Arc, read_file_paths: Vec, listed_file_paths: Vec, - loaded_modules: Vec, -} - -type ModuleId = CHeapStr; - -#[derive(Debug, Serialize, Deserialize, PartialEq)] -struct ModuleConfig { - pub id: ModuleId, - pub config: serde_json::Value, } -type ConfigBlackboard = serde_json::Map; - #[derive(Debug, Serialize, Deserialize, PartialEq)] pub struct SerializedConfig { - modules: Vec, + modules: Vec, blackboard: ConfigBlackboard, } #[derive(Debug, Serialize, Deserialize, PartialEq)] pub struct LockObj { pub version: String, - pub module_entries: indexmap::IndexMap, + pub sys_entries: indexmap::IndexMap, pub config: Arc, } impl LockObj { /// The lock.json file stores the serialized config and some entries - /// from modules. It's primary purpose is as a memo store to avoid + /// from systems. It's primary purpose is as a memo store to avoid /// re-serialization on each CLI invocation. pub async fn from_file(path: &Path) -> Res { let raw = tokio::fs::read(path) diff --git a/src/ghjk/host/deno.rs b/src/ghjk/host/deno.rs index c78168b..9703c6d 100644 --- a/src/ghjk/host/deno.rs +++ b/src/ghjk/host/deno.rs @@ -11,17 +11,20 @@ struct InternalSerializationResult { listed_file_paths: Vec, } +#[tracing::instrument(skip(hcx))] pub async fn serialize_deno_ghjkfile( hcx: &super::HostCtx, path: &Path, ) -> Res { - let main_module = deno_runtime::deno_core::resolve_path( - hcx.gcx.repo_root.join("./files/deno/mod2.ts"), - &hcx.config.cwd, - ) - .wrap_err("error resolving main module")?; + let main_module = hcx + .gcx + .repo_root + .join("files/deno/bindings.ts") + .wrap_err("repo url error")?; + + let mut ext_conf = crate::ext::ExtConfig::new(); - let blackboard = [ + ext_conf.blackboard = [ // blackboard is used as communication means // with the deno side of the code ( @@ -32,11 +35,12 @@ pub async fn serialize_deno_ghjkfile( ), ] .into_iter() - .collect::>(); + .collect::>() + .into(); - let blackboard = Arc::new(blackboard); + let bb = ext_conf.blackboard.clone(); - let mut worker = hcx + let worker = hcx .gcx .deno .prepare_module( @@ -50,27 +54,38 @@ pub async fn serialize_deno_ghjkfile( }, deno_runtime::WorkerExecutionMode::Run, default(), - Some(crate::deno::extensions(crate::deno::ExtConfig { - blackboard: blackboard.clone(), - })), + Some(crate::ext::extensions(ext_conf)), ) .await?; - let exit_code = worker.run().await?; + let (exit_code, mut worker) = worker.run().await?; if exit_code != 0 { eyre::bail!("non-zero exit code running deno module"); } let loaded_modules = worker.get_loaded_modules().await; - let (_, resp) = blackboard.remove("resp").expect_or_log("resp missing"); + let (_, resp) = bb.remove("resp").expect_or_log("resp missing"); let resp: InternalSerializationResult = serde_json::from_value(resp).expect_or_log("error deserializing resp"); + let mut loaded_modules = loaded_modules + .into_iter() + .filter(|url| url.scheme() == "file") + .map(|url| { + url.to_file_path() + .map_err(|()| ferr!("url to path error: {url}")) + }) + .collect::>>()?; + + let mut read_file_paths = resp.read_file_paths; + read_file_paths.append(&mut loaded_modules); + + debug!("ghjk.ts serialized"); + Ok(super::SerializationResult { config: resp.config, accessed_env_keys: resp.accessed_env_keys, listed_file_paths: resp.listed_file_paths, - read_file_paths: resp.read_file_paths, - loaded_modules, + read_file_paths, }) } diff --git a/src/ghjk/host/hashfile.rs b/src/ghjk/host/hashfile.rs index 97d4c8d..7c67d7f 100644 --- a/src/ghjk/host/hashfile.rs +++ b/src/ghjk/host/hashfile.rs @@ -51,7 +51,7 @@ impl HashObj { { if self.read_file_hashes != file_digests( - &hcx, + hcx, self.read_file_hashes .keys() .map(|path| path.as_ref()) @@ -74,10 +74,7 @@ pub fn env_var_digests<'a>( .map(|key| { ( key.to_owned(), - match all.get(key) { - Some(val) => Some(crate::utils::hash_str(val)), - None => None, - }, + all.get(key).map(|val| crate::utils::hash_str(val)), ) }) .collect() @@ -162,7 +159,7 @@ impl From for StatMeta { ts.duration_since(std::time::SystemTime::UNIX_EPOCH) .map_err(|_| ()) }) - .and_then(|dur| Ok(dur.as_secs())) + .map(|dur| dur.as_secs()) .ok() } #[cfg(unix)] diff --git a/src/ghjk/deno/00_runtime.js b/src/ghjk/js/00_runtime.js similarity index 74% rename from src/ghjk/deno/00_runtime.js rename to src/ghjk/js/00_runtime.js index e4ce282..70e342e 100644 --- a/src/ghjk/deno/00_runtime.js +++ b/src/ghjk/js/00_runtime.js @@ -21,14 +21,23 @@ function getOp(name) { return op; } +const op_callbacks_set = getOp("op_callbacks_set"); + /** * @type {import('./runtime.d.ts').GhjkNs} */ const Ghjk = { blackboard: { - get: getOp("op_get_blackboard"), - set: getOp("op_set_blackboard"), + get: getOp("op_blackboard_get"), + set: getOp("op_blackboard_set"), + }, + callbacks: { + set: (key, fn) => { + op_callbacks_set(key, fn); + return key; + }, }, + hostcall: getOp("op_hostcall"), }; globalThis.Ghjk = Ghjk; diff --git a/src/ghjk/js/mock.sfx.ts b/src/ghjk/js/mock.sfx.ts new file mode 100644 index 0000000..76de814 --- /dev/null +++ b/src/ghjk/js/mock.sfx.ts @@ -0,0 +1,20 @@ +//! Import this as a side effect for a mock the Ghjk object. +//! Useful to sanity check code that relies on the Ghjk extension. + +import { Ghjk } from "./runtime.js"; + +const bb = new Map(); +Object.assign(Ghjk, { + callbacks: { + set: (key: string) => key, + }, + hostcall: () => Promise.resolve({}), + blackboard: { + set: (key: string, value: any) => { + const old = bb.get(key); + bb.set(key, value); + return old; + }, + get: (key: string) => bb.get(key), + }, +}); diff --git a/src/ghjk/deno/runtime.d.ts b/src/ghjk/js/runtime.d.ts similarity index 59% rename from src/ghjk/deno/runtime.d.ts rename to src/ghjk/js/runtime.d.ts index 1e2a931..8d4e664 100644 --- a/src/ghjk/deno/runtime.d.ts +++ b/src/ghjk/js/runtime.d.ts @@ -1,5 +1,4 @@ -type JsonLiteral = string | number | boolean | null; - +export type JsonLiteral = string | number | boolean | null; export type JsonObject = { [key: string]: Json }; export type JsonArray = Json[]; export type Json = JsonLiteral | JsonObject | JsonArray; @@ -9,5 +8,9 @@ type GhjkNs = { get: (key: string) => Json | undefined; set: (key: string, value: Json) => Json | undefined; }; + callbacks: { + set: (key: string, fn: (arg: Json) => Json | Promise) => string; + }; + hostcall: (key: string, args: Json) => Promise; }; export const Ghjk: GhjkNs; diff --git a/src/ghjk/deno/runtime.js b/src/ghjk/js/runtime.js similarity index 100% rename from src/ghjk/deno/runtime.js rename to src/ghjk/js/runtime.js diff --git a/src/ghjk/log.rs b/src/ghjk/log.rs new file mode 100644 index 0000000..76ade7e --- /dev/null +++ b/src/ghjk/log.rs @@ -0,0 +1,95 @@ +use crate::interlude::*; + +pub fn init() { + static INIT: std::sync::Once = std::sync::Once::new(); + INIT.call_once(|| { + let eyre_panic_hook = color_eyre::config::HookBuilder::default().display_location_section( + std::env::var("RUST_ERR_LOCATION") + .map(|var| var != "0") + .unwrap_or(true), + ); + + #[cfg(not(debug_assertions))] + let eyre_panic_hook = eyre_panic_hook.panic_section(format!( + r#"Ghjk has panicked. This is a bug, please report this +at https://github.com/metatypedev/ghjk/issues/new. +If you can reliably reproduce this panic, try to include the +following items in your report: +- Reproduction steps +- Output of meta-cli doctor and +- A panic backtrace. Set the following environment variables as shown to enable full backtraces. + - RUST_BACKTRACE=1 + - RUST_LIB_BACKTRACE=full + - RUST_SPANTRACE=1 +Platform: {platform} +Version: {version} +Args: {args:?} +"#, + platform = crate::build::BUILD_TARGET, + // TODO: include commit sha + version = crate::build::PKG_VERSION, + args = std::env::args().collect::>() + )); + + let (eyre_panic_hook, _eyre_hook) = eyre_panic_hook.try_into_hooks().unwrap(); + let eyre_panic_hook = eyre_panic_hook.into_panic_hook(); + + std::panic::set_hook(Box::new(move |panic_info| { + if let Some(msg) = panic_info.payload().downcast_ref::<&str>() { + if msg.contains("A Tokio 1.x context was found, but it is being shutdown.") { + warn!("improper shutdown, make sure to terminate all workers first"); + return; + } + } else if let Some(msg) = panic_info.payload().downcast_ref::() { + if msg.contains("A Tokio 1.x context was found, but it is being shutdown.") { + warn!("improper shutdown, make sure to terminate all workers first"); + return; + } + } + eyre_panic_hook(panic_info); + // - Tokio does not exit the process when a task panics, so we define a custom + // panic hook to implement this behaviour. + // std::process::exit(1); + })); + + // FIXME: for some reason, the tests already have + // an eyre_hook + #[cfg(not(test))] + _eyre_hook.install().unwrap(); + + if std::env::var("RUST_LOG").is_err() { + std::env::set_var("RUST_LOG", "info,actix_server=warn"); + } + #[cfg(not(debug_assertions))] + if std::env::var("RUST_SPANTRACE").is_err() { + std::env::set_var("RUST_SPANTRACE", "0"); + } + + use tracing_subscriber::prelude::*; + + let fmt = tracing_subscriber::fmt::layer() + .without_time() + .with_writer(std::io::stderr) + // .pretty() + // .with_file(true) + // .with_line_number(true) + .with_target(false); + + #[cfg(test)] + let fmt = fmt.with_test_writer(); + + #[cfg(debug_assertions)] + let fmt = fmt.with_target(true); + + let filter = tracing_subscriber::EnvFilter::from_default_env(); + + tracing_subscriber::registry() + // filter on values from RUST_LOG + .with(filter) + // subscriber that emits to stderr + .with(fmt) + // instrument errors with SpanTraces, used by color-eyre + .with(tracing_error::ErrorLayer::default()) + .init(); + }); +} diff --git a/src/ghjk/main.rs b/src/ghjk/main.rs index 25f172f..7427988 100644 --- a/src/ghjk/main.rs +++ b/src/ghjk/main.rs @@ -25,14 +25,16 @@ mod interlude { pub use serde_json::json; pub use smallvec::smallvec as svec; pub use smallvec::SmallVec as Svec; - pub use tracing::{debug, error, info, trace, warn}; + pub use tracing::{debug, error, info, trace, warn, Instrument}; pub use tracing_unwrap::*; } mod host; mod cli {} -mod deno; +mod ext; +mod log; +mod systems; mod utils; use crate::interlude::*; @@ -44,7 +46,7 @@ fn main() -> Res<()> { use nix::sys::signal::*; signal(Signal::SIGPIPE, SigHandler::SigDfl)?; } - utils::setup_tracing()?; + log::init(); denort::init(); tokio::runtime::Builder::new_current_thread() @@ -60,78 +62,104 @@ shadow!(build); const DENO_UNSTABLE_FLAGS: &[&str] = &["worker-options", "kv"]; async fn cli() -> Res<()> { - debug!(version = build::VERSION, "ghjk CLI"); + debug!(version = build::PKG_VERSION, "ghjk CLI"); let cwd = std::env::current_dir()?; - let ghjk_dir_path = match std::env::var("GHJK_DIR") { - Ok(path) => Some(PathBuf::from(path)), - Err(std::env::VarError::NotUnicode(os_str)) => Some(PathBuf::from(os_str)), - Err(std::env::VarError::NotPresent) => utils::find_entry_recursive(&cwd, ".ghjk").await?, - }; + let config = { + let ghjk_dir_path = match std::env::var("GHJK_DIR") { + Ok(path) => Some(PathBuf::from(path)), + Err(std::env::VarError::NotUnicode(os_str)) => Some(PathBuf::from(os_str)), + Err(std::env::VarError::NotPresent) => { + utils::find_entry_recursive(&cwd, ".ghjk").await? + } + }; - let ghjk_dir_path = if let Some(path) = ghjk_dir_path { - Some(tokio::fs::canonicalize(path).await?) - } else { - None - }; + let ghjk_dir_path = if let Some(path) = ghjk_dir_path { + Some(tokio::fs::canonicalize(path).await?) + } else { + None + }; - let ghjkfile_path = match &ghjk_dir_path { - Some(ghjkfile_path) => { - utils::find_entry_recursive( - ghjkfile_path - .parent() - .expect_or_log("invalid GHJK_DIR path"), - "ghjk.ts", - ) - .await? - } - None => utils::find_entry_recursive(&cwd, "ghjk.ts").await?, - }; + let ghjkfile_path = match &ghjk_dir_path { + Some(ghjkfile_path) => { + utils::find_entry_recursive( + ghjkfile_path + .parent() + .expect_or_log("invalid GHJK_DIR path"), + "ghjk.ts", + ) + .await? + } + None => utils::find_entry_recursive(&cwd, "ghjk.ts").await?, + }; - let ghjkfile_path = if let Some(path) = ghjkfile_path { - Some(tokio::fs::canonicalize(path).await?) - } else { - None - }; + let ghjkfile_path = if let Some(path) = ghjkfile_path { + Some(tokio::fs::canonicalize(path).await?) + } else { + None + }; - if ghjk_dir_path.is_none() && ghjkfile_path.is_none() { - warn!("ghjk could not find any ghjkfiles or ghjkdirs, try creating a `ghjk.ts` script.",); - } + if ghjk_dir_path.is_none() && ghjkfile_path.is_none() { + warn!( + "ghjk could not find any ghjkfiles or ghjkdirs, try creating a `ghjk.ts` script.", + ); + } - let share_dir_path = match std::env::var("GHJK_SHARE_DIR") { - Ok(path) => PathBuf::from(path), - Err(std::env::VarError::NotUnicode(os_str)) => PathBuf::from(os_str), - Err(std::env::VarError::NotPresent) => directories::BaseDirs::new() - .expect_or_log("unable to resolve home dir") - .data_local_dir() - .join("ghjk"), + let share_dir_path = match std::env::var("GHJK_SHARE_DIR") { + Ok(path) => PathBuf::from(path), + Err(std::env::VarError::NotUnicode(os_str)) => PathBuf::from(os_str), + Err(std::env::VarError::NotPresent) => directories::BaseDirs::new() + .expect_or_log("unable to resolve home dir") + .data_local_dir() + .join("ghjk"), + }; + Config { + ghjkfile_path, + ghjk_dir_path, + share_dir_path, + } }; - if let Some(ghjk_dir_path) = ghjk_dir_path { + let deno_cx = denort::worker( + denort::deno::args::Flags { + unstable_config: denort::deno::args::UnstableConfig { + features: DENO_UNSTABLE_FLAGS + .iter() + .copied() + .map(String::from) + .collect(), + ..default() + }, + ..default() + }, + Some(Arc::new(Vec::new)), + ) + .await?; + + if let Some(ghjk_dir_path) = config.ghjk_dir_path { let gcx = GhjkCtx { ghjk_dir_path, - ghjkfile_path, - share_dir_path, - repo_root: cwd.clone(), - deno: denort::worker( - denort::deno::args::Flags { - unstable_config: denort::deno::args::UnstableConfig { - features: DENO_UNSTABLE_FLAGS - .iter() - .copied() - .map(String::from) - .collect(), - ..default() - }, - ..default() - }, - Some(Arc::new(Vec::new)), - ) - .await?, + ghjkfile_path: config.ghjkfile_path, + share_dir_path: config.share_dir_path, + repo_root: url::Url::from_file_path(&cwd) + .expect_or_log("cwd error") + // repo root url must end in slash due to + // how Url::join works + .join(&format!("{}/", cwd.file_name().unwrap().to_string_lossy())) + .wrap_err("repo url error")?, + deno: deno_cx.clone(), }; let gcx = Arc::new(gcx); + let systems_deno = systems::deno::systems_from_deno( + &gcx, + &gcx.repo_root + .join("src/deno_systems/mod.ts") + .wrap_err("repo url error")?, + ) + .await?; + let hcx = host::HostCtx::new( gcx, host::Config { @@ -141,25 +169,29 @@ async fn cli() -> Res<()> { env_vars: std::env::vars().collect(), cwd, }, + systems_deno, ); let hcx = Arc::new(hcx); - if let Some(modules) = host::modules_from_ghjkfile(hcx).await? { - let conf_json = serde_json::to_string_pretty(&modules.config)?; + if let Some(mut systems) = host::systems_from_ghjkfile(hcx).await? { + let conf_json = serde_json::to_string_pretty(&systems.config)?; info!(%conf_json); + systems.write_lockfile().await?; } else { warn!("no ghjkfile found"); } } + // tokio::task::spawn_blocking(|| deno_cx.terminate()); + Ok(()) } #[derive(Debug)] pub struct GhjkCtx { deno: denort::DenoWorkerHandle, - repo_root: PathBuf, + repo_root: url::Url, ghjkfile_path: Option, ghjk_dir_path: PathBuf, share_dir_path: PathBuf, diff --git a/src/ghjk/systems.rs b/src/ghjk/systems.rs new file mode 100644 index 0000000..0795e70 --- /dev/null +++ b/src/ghjk/systems.rs @@ -0,0 +1,112 @@ +//! Systems (formerly modules) are units of implementation that bundle together +//! related functionality. + +use std::any::Any; + +use crate::interlude::*; + +pub mod deno; + +pub enum SystemManifest { + Deno(deno::DenoSystemManifest), +} + +impl SystemManifest { + pub async fn init(&self) -> Res { + match self { + SystemManifest::Deno(man) => Ok(ErasedSystemInstance::new(Arc::new(man.ctor().await?))), + } + } +} + +#[async_trait::async_trait] +pub trait SystemInstance { + type LockState; + + async fn load_config( + &self, + config: serde_json::Value, + bb: ConfigBlackboard, + state: Option, + ) -> Res<()>; + + async fn load_lock_entry(&self, raw: serde_json::Value) -> Res; + + async fn gen_lock_entry(&self) -> Res; +} + +type BoxAny = Box; + +#[allow(clippy::type_complexity)] +pub struct ErasedSystemInstance { + load_lock_entry_fn: Box BoxFuture<'static, Res>>, + gen_lock_entry_fn: Box BoxFuture<'static, Res>>, + load_config_fn: Box< + dyn Fn(serde_json::Value, ConfigBlackboard, Option) -> BoxFuture<'static, Res<()>>, + >, +} + +impl ErasedSystemInstance { + pub fn new(instance: Arc) -> Self + where + S: SystemInstance + Send + Sync + 'static, + L: std::any::Any + Send + Sync, + { + Self { + load_lock_entry_fn: { + let instance = instance.clone(); + Box::new(move |raw| { + let instance = instance.clone(); + async move { + let res: BoxAny = Box::new(instance.load_lock_entry(raw).await?); + Ok(res) + } + .boxed() + }) + }, + gen_lock_entry_fn: { + let instance = instance.clone(); + Box::new(move || { + let instance = instance.clone(); + async move { instance.gen_lock_entry().await }.boxed() + }) + }, + load_config_fn: Box::new(move |config, bb, state| { + let instance = instance.clone(); + async move { + let state: Option> = + state.map(|st| st.downcast().expect_or_log("downcast error")); + instance.load_config(config, bb, state.map(|bx| *bx)).await + } + .boxed() + }), + } + } + + pub async fn load_config( + &self, + config: serde_json::Value, + bb: ConfigBlackboard, + state: Option, + ) -> Res<()> { + (self.load_config_fn)(config, bb, state).await + } + + pub async fn load_lock_entry(&self, raw: serde_json::Value) -> Res { + (self.load_lock_entry_fn)(raw).await + } + + pub async fn gen_lock_entry(&self) -> Res { + (self.gen_lock_entry_fn)().await + } +} + +pub type SystemId = CHeapStr; + +#[derive(Debug, Serialize, Deserialize, PartialEq)] +pub struct SystemConfig { + pub id: SystemId, + pub config: serde_json::Value, +} + +pub type ConfigBlackboard = Arc>; diff --git a/src/ghjk/systems/deno.rs b/src/ghjk/systems/deno.rs new file mode 100644 index 0000000..15ef2e3 --- /dev/null +++ b/src/ghjk/systems/deno.rs @@ -0,0 +1,256 @@ +//! This module implements support for systems written in typescript +//! running on top of deno. + +use crate::interlude::*; + +use super::{SystemId, SystemInstance, SystemManifest}; + +#[derive(Clone)] +pub struct DenoSystemsContext { + callbacks: crate::ext::CallbacksHandle, + exit_code_channel: Arc>>>, + term_signal: Arc, + #[allow(unused)] + hostcalls: crate::ext::Hostcalls, +} + +impl DenoSystemsContext { + #[allow(unused)] + pub async fn terminate(&mut self) -> Res { + let channel = { + let mut opt = self.exit_code_channel.lock().expect_or_log("mutex error"); + opt.take() + }; + let Some(channel) = channel else { + eyre::bail!("already terminated") + }; + self.term_signal + .store(true, std::sync::atomic::Ordering::Relaxed); + Ok(channel.await.expect_or_log("channel error")) + } +} + +#[tracing::instrument(skip(gcx))] +pub async fn systems_from_deno( + gcx: &GhjkCtx, + source_uri: &url::Url, +) -> Res> { + let main_module = gcx + .repo_root + .join("src/deno_systems/bindings.ts") + .wrap_err("repo url error")?; + + let mut ext_conf = crate::ext::ExtConfig::new(); + + let bb = ext_conf.blackboard.clone(); + bb.insert("args".into(), { + #[derive(Serialize)] + struct GhjkCtxBean<'a> { + ghjkfile_path: Option<&'a Path>, + ghjk_dir_path: &'a Path, + share_dir_path: &'a Path, + } + + #[derive(Serialize)] + struct BindingArgs<'a> { + uri: url::Url, + gcx: GhjkCtxBean<'a>, + } + let GhjkCtx { + deno, + repo_root, + ghjkfile_path, + ghjk_dir_path, + share_dir_path, + } = gcx; + _ = (deno, repo_root); + + serde_json::json!(BindingArgs { + uri: source_uri.clone(), + gcx: GhjkCtxBean { + ghjkfile_path: ghjkfile_path.as_ref().map(|path| path.as_path()), + ghjk_dir_path, + share_dir_path, + }, + }) + }); + let hostcalls = ext_conf.hostcalls.clone(); + + let (manifests_tx, mut manifests_rx) = tokio::sync::mpsc::channel(1); + hostcalls.funcs.insert( + "register_systems".into(), + Box::new(move |args| { + let tx = manifests_tx.clone(); + async move { + tx.send(args).await.expect_or_log("channel error"); + Ok(serde_json::Value::Null) + } + .boxed() + }), + ); + let cb_line = ext_conf.callbacks_handle(); + + let mut worker = gcx + .deno + .prepare_module( + main_module, + deno_runtime::deno_permissions::PermissionsOptions { + allow_env: Some(vec![]), + allow_import: Some(vec![]), + allow_read: Some(vec![]), + allow_net: Some(vec![]), + allow_ffi: Some(vec![]), + allow_run: Some(vec![]), + allow_sys: Some(vec![]), + allow_write: Some(vec![]), + allow_all: true, + prompt: false, + ..default() + }, + deno_runtime::WorkerExecutionMode::Run, + default(), + Some(crate::ext::extensions(ext_conf)), + ) + .await?; + worker.execute().await?; + let (exit_code_channel, term_signal, _) = worker.drive_till_exit().await?; + + let join_exit_code_watcher = tokio::spawn(async { + let exit_code = exit_code_channel + .await + .expect_or_log("channel error") + .wrap_err("error on event loop for deno systems") + .unwrap_or_log(); + if exit_code != 0 { + // TODO: exit signals + error!(%exit_code, "deno systems died with non-zero exit code"); + } else { + info!(%exit_code, "deno systems exit") + } + exit_code + }); + let exit_code_channel = Arc::new(std::sync::Mutex::new(Some(join_exit_code_watcher))); + + let scx = DenoSystemsContext { + callbacks: cb_line, + hostcalls, + term_signal, + exit_code_channel, + }; + let scx = Arc::new(scx); + + let manifests = manifests_rx.recv().await.expect_or_log("channel error"); + let manifests: Vec = + serde_json::from_value(manifests).wrap_err("protocol error")?; + let manifests = manifests + .into_iter() + .map(|desc| { + ( + desc.id.clone(), + SystemManifest::Deno(DenoSystemManifest { + desc, + scx: scx.clone(), + }), + ) + }) + .collect(); + + Ok(manifests) +} + +#[derive(Debug, Deserialize)] +struct ManifestDesc { + id: SystemId, + ctor_cb_key: CHeapStr, +} + +#[derive(educe::Educe)] +#[educe(Debug)] +pub struct DenoSystemManifest { + desc: ManifestDesc, + #[educe(Debug(ignore))] + scx: Arc, +} + +impl DenoSystemManifest { + pub async fn ctor(&self) -> Res { + debug!(id = %self.desc.id, "initializing deno system"); + let desc = self + .scx + .callbacks + .exec(self.desc.ctor_cb_key.clone(), serde_json::Value::Null) + .await?; + + let desc = serde_json::from_value(desc).wrap_err("protocol error")?; + + debug!(id = %self.desc.id, "deno system initialized"); + + Ok(DenoSystemInstance { + desc, + scx: self.scx.clone(), + }) + } +} + +#[derive(Debug, Deserialize)] +/// This is the description sent from the typescript side for a registered manifest. +struct InstanceDesc { + load_lock_entry_cb_key: CHeapStr, + gen_lock_entry_cb_key: CHeapStr, + load_config_cb_key: CHeapStr, +} + +pub struct DenoSystemInstance { + desc: InstanceDesc, + scx: Arc, +} + +#[async_trait::async_trait] +impl SystemInstance for DenoSystemInstance { + type LockState = serde_json::Value; + + async fn load_config( + &self, + config: serde_json::Value, + bb: Arc>, + state: Option, + ) -> Res<()> { + self.scx + .callbacks + .exec( + self.desc.load_config_cb_key.clone(), + serde_json::json!({ + "config": config, + "bb": bb, + "state": state + }), + ) + .await + .wrap_err("callback error")?; + Ok(()) + } + + async fn load_lock_entry(&self, raw: serde_json::Value) -> Res { + self.scx + .callbacks + .exec( + self.desc.load_lock_entry_cb_key.clone(), + serde_json::json!({ + "raw": raw + }), + ) + .await + .wrap_err("callback error") + } + + async fn gen_lock_entry(&self) -> Res { + self.scx + .callbacks + .exec( + self.desc.gen_lock_entry_cb_key.clone(), + serde_json::json!({}), + ) + .await + .wrap_err("callback error") + } +} diff --git a/src/ghjk/utils.rs b/src/ghjk/utils.rs index d71783f..30e1201 100644 --- a/src/ghjk/utils.rs +++ b/src/ghjk/utils.rs @@ -1,33 +1,6 @@ -use std::io::Write; - use crate::interlude::*; -// Ensure that the `tracing` stack is only initialised once using `once_cell` -// isn't required in cargo-nextest since each test runs in a new process -pub fn _setup_tracing_once() { - use once_cell::sync::Lazy; - static TRACING: Lazy<()> = Lazy::new(|| { - setup_tracing().expect("failed to init tracing"); - }); - Lazy::force(&TRACING); -} - -pub fn setup_tracing() -> eyre::Result<()> { - color_eyre::install()?; - if std::env::var("RUST_LOG").is_err() { - std::env::set_var("RUST_LOG", "info"); - } - - // tracing_log::LogTracer::init()?; - tracing_subscriber::fmt() - .compact() - .with_env_filter(tracing_subscriber::EnvFilter::from_default_env()) - .with_timer(tracing_subscriber::fmt::time::uptime()) - .try_init() - .map_err(|err| eyre::eyre!(err))?; - - Ok(()) -} +use std::io::Write; #[inline] pub fn default() -> T { @@ -75,10 +48,13 @@ mod cheapstr { } } - impl From<&str> for CHeapStr { + impl From for CHeapStr + where + T: Into>, + { #[inline(always)] - fn from(string: &str) -> Self { - Self::new(string.to_owned()) + fn from(string: T) -> Self { + Self::new(string) } } @@ -127,14 +103,6 @@ mod cheapstr { } } - impl From for CHeapStr { - fn from(string: String) -> Self { - /* let byte_arc: Arc<[u8]> = string.into_bytes().into(); - let str_arc = unsafe { Arc::from_raw(Arc::into_raw(byte_arc) as *const str) }; */ - Self::new(string) - } - } - impl From for String { fn from(value: CHeapStr) -> String { // FIXME: optmize this @@ -167,10 +135,11 @@ const SHA2_256: u64 = 0x12; pub fn hash_obj(obj: &T) -> String { use sha2::Digest; let mut hash = sha2::Sha256::new(); - json_canon::to_writer(&mut hash, obj).expect("error serializing manifest"); + json_canon::to_writer(&mut hash, obj).expect_or_log("error serializing manifest"); let hash = hash.finalize(); - let hash = multihash::Multihash::<32>::wrap(SHA2_256, &hash[..]).expect("error multihashing"); + let hash = + multihash::Multihash::<32>::wrap(SHA2_256, &hash[..]).expect_or_log("error multihashing"); encode_base32_multibase(hash.digest()) } @@ -181,14 +150,21 @@ pub fn hash_str(string: &str) -> String { .expect_or_log("error writing to hasher"); let hash = hash.finalize(); - let hash = multihash::Multihash::<32>::wrap(SHA2_256, &hash[..]).expect("error multihashing"); + let hash = + multihash::Multihash::<32>::wrap(SHA2_256, &hash[..]).expect_or_log("error multihashing"); encode_base32_multibase(hash.digest()) } pub fn encode_base32_multibase>(source: T) -> String { - format!("b{}", data_encoding::BASE32_NOPAD.encode(source.as_ref())) + format!( + "b{}", + data_encoding::BASE32_NOPAD + .encode(source.as_ref()) + .to_lowercase() + ) } +#[allow(unused)] // Consider z-base32 https://en.wikipedia.org/wiki/Base32#z-base-32 pub fn decode_base32_multibase(source: &str) -> eyre::Result> { match ( @@ -203,6 +179,7 @@ pub fn decode_base32_multibase(source: &str) -> eyre::Result> { } } +#[allow(unused)] pub fn encode_hex_multibase>(source: T) -> String { format!( "f{}", @@ -210,6 +187,7 @@ pub fn encode_hex_multibase>(source: T) -> String { ) } +#[allow(unused)] pub fn decode_hex_multibase(source: &str) -> eyre::Result> { match ( &source[0..1], diff --git a/utils/mod.ts b/utils/mod.ts index 8ec530f..2e77256 100644 --- a/utils/mod.ts +++ b/utils/mod.ts @@ -603,13 +603,12 @@ export function unwrapZodRes( const zodErr = zod_val_err.fromZodError(res.error, { includePath: true, maxIssuesInMessage: 3, + prefix: errMessage, }); - throw new Error(`${errMessage}: ${zodErr}`, { - cause: { - issues: res.error.issues, - ...cause, - }, - }); + zodErr.cause = { + ...cause, + }; + throw zodErr; } return res.data; }