diff --git a/.ghjk/deno.lock b/.ghjk/deno.lock index 93382d85..934a1934 100644 --- a/.ghjk/deno.lock +++ b/.ghjk/deno.lock @@ -2,15 +2,110 @@ "version": "3", "packages": { "specifiers": { - "npm:zod-validation-error": "npm:zod-validation-error@3.1.0_zod@3.22.4" + "jsr:@david/dax@0.41.0": "jsr:@david/dax@0.41.0", + "jsr:@david/which@0.3": "jsr:@david/which@0.3.0", + "jsr:@david/which@^0.4.1": "jsr:@david/which@0.4.1", + "jsr:@ghjk/dax@0.40.2-alpha-ghjk": "jsr:@ghjk/dax@0.40.2-alpha-ghjk", + "jsr:@std/assert@^0.221.0": "jsr:@std/assert@0.221.0", + "jsr:@std/bytes@^0.221.0": "jsr:@std/bytes@0.221.0", + "jsr:@std/fmt@^0.221.0": "jsr:@std/fmt@0.221.0", + "jsr:@std/fs@0.221.0": "jsr:@std/fs@0.221.0", + "jsr:@std/io@0.221.0": "jsr:@std/io@0.221.0", + "jsr:@std/io@^0.221.0": "jsr:@std/io@0.221.0", + "jsr:@std/path@0.221.0": "jsr:@std/path@0.221.0", + "jsr:@std/path@^0.221.0": "jsr:@std/path@0.221.0", + "jsr:@std/streams@0.221.0": "jsr:@std/streams@0.221.0", + "npm:@noble/hashes@1.4.0": "npm:@noble/hashes@1.4.0", + "npm:multiformats@13.1.0": "npm:multiformats@13.1.0", + "npm:zod-validation-error": "npm:zod-validation-error@3.1.0_zod@3.22.4", + "npm:zod-validation-error@3.2.0": "npm:zod-validation-error@3.2.0_zod@3.22.4" + }, + "jsr": { + "@david/dax@0.41.0": { + "integrity": "9e1ecf66a0415962cc8ad3ba4e3fa93ce0f1a1cc797dd95c36fdfb6977dc7fc8", + "dependencies": [ + "jsr:@david/which@^0.4.1", + "jsr:@std/fmt@^0.221.0", + "jsr:@std/fs@0.221.0", + "jsr:@std/io@0.221.0", + "jsr:@std/path@0.221.0", + "jsr:@std/streams@0.221.0" + ] + }, + "@david/which@0.3.0": { + "integrity": "6bdb62c40ac90edcf328e854fa8103a8db21e7c326089cbe3c3a1cf7887d3204" + }, + "@david/which@0.4.1": { + "integrity": "896a682b111f92ab866cc70c5b4afab2f5899d2f9bde31ed00203b9c250f225e" + }, + "@ghjk/dax@0.40.2-alpha-ghjk": { + "integrity": "87bc93e9947779cb2f3922fe277e21ea8c716de804b2627f80ba9e7bc3d0d019", + "dependencies": [ + "jsr:@david/which@0.3", + "jsr:@std/fmt@^0.221.0", + "jsr:@std/fs@0.221.0", + "jsr:@std/io@0.221.0", + "jsr:@std/path@0.221.0", + "jsr:@std/streams@0.221.0" + ] + }, + "@std/assert@0.221.0": { + "integrity": "a5f1aa6e7909dbea271754fd4ab3f4e687aeff4873b4cef9a320af813adb489a" + }, + "@std/bytes@0.221.0": { + "integrity": "64a047011cf833890a4a2ab7293ac55a1b4f5a050624ebc6a0159c357de91966" + }, + "@std/fmt@0.221.0": { + "integrity": "379fed69bdd9731110f26b9085aeb740606b20428ce6af31ef6bd45ef8efa62a" + }, + "@std/fs@0.221.0": { + "integrity": "028044450299de8ed5a716ade4e6d524399f035513b85913794f4e81f07da286", + "dependencies": [ + "jsr:@std/assert@^0.221.0", + "jsr:@std/path@^0.221.0" + ] + }, + "@std/io@0.221.0": { + "integrity": "faf7f8700d46ab527fa05cc6167f4b97701a06c413024431c6b4d207caa010da", + "dependencies": [ + "jsr:@std/assert@^0.221.0", + "jsr:@std/bytes@^0.221.0" + ] + }, + "@std/path@0.221.0": { + "integrity": "0a36f6b17314ef653a3a1649740cc8db51b25a133ecfe838f20b79a56ebe0095", + "dependencies": [ + "jsr:@std/assert@^0.221.0" + ] + }, + "@std/streams@0.221.0": { + "integrity": "47f2f74634b47449277c0ee79fe878da4424b66bd8975c032e3afdca88986e61", + "dependencies": [ + "jsr:@std/io@^0.221.0" + ] + } }, "npm": { + "@noble/hashes@1.4.0": { + "integrity": "sha512-V1JJ1WTRUqHHrOSh597hURcMqVKVGL/ea3kv0gSnEdsEZ0/+VyPghM1lMNGc00z7CIQorSvbKpuJkxvuHbvdbg==", + "dependencies": {} + }, + "multiformats@13.1.0": { + "integrity": "sha512-HzdtdBwxsIkzpeXzhQ5mAhhuxcHbjEHH+JQoxt7hG/2HGFjjwyolLo7hbaexcnhoEuV4e0TNJ8kkpMjiEYY4VQ==", + "dependencies": {} + }, "zod-validation-error@3.1.0_zod@3.22.4": { "integrity": "sha512-zujS6HqJjMZCsvjfbnRs7WI3PXN39ovTcY1n8a+KTm4kOH0ZXYsNiJkH1odZf4xZKMkBDL7M2rmQ913FCS1p9w==", "dependencies": { "zod": "zod@3.22.4" } }, + "zod-validation-error@3.2.0_zod@3.22.4": { + "integrity": "sha512-cYlPR6zuyrgmu2wRTdumEAJGuwI7eHVHGT+VyneAQxmRAKtGRL1/7pjz4wfLhz4J05f5qoSZc3rGacswgyTjjw==", + "dependencies": { + "zod": "zod@3.22.4" + } + }, "zod@3.22.4": { "integrity": "sha512-iC+8Io04lddc+mVqQ9AZ7OQ2MrUKGN+oIQyq1vemgt46jwCwLfhq7/pwnBnNXXXZb8VTVLKwp9EDkx+ryxIWmg==", "dependencies": {} @@ -459,6 +554,19 @@ "https://deno.land/x/zod@v3.22.4/locales/en.ts": "a7a25cd23563ccb5e0eed214d9b31846305ddbcdb9c5c8f508b108943366ab4c", "https://deno.land/x/zod@v3.22.4/mod.ts": "64e55237cb4410e17d968cd08975566059f27638ebb0b86048031b987ba251c4", "https://deno.land/x/zod@v3.22.4/types.ts": "724185522fafe43ee56a52333958764c8c8cd6ad4effa27b42651df873fc151e", + "https://deno.land/x/zod@v3.23.5/ZodError.ts": "528da200fbe995157b9ae91498b103c4ef482217a5c086249507ac850bd78f52", + "https://deno.land/x/zod@v3.23.5/errors.ts": "5285922d2be9700cc0c70c95e4858952b07ae193aa0224be3cbd5cd5567eabef", + "https://deno.land/x/zod@v3.23.5/external.ts": "a6cfbd61e9e097d5f42f8a7ed6f92f93f51ff927d29c9fbaec04f03cbce130fe", + "https://deno.land/x/zod@v3.23.5/helpers/enumUtil.ts": "54efc393cc9860e687d8b81ff52e980def00fa67377ad0bf8b3104f8a5bf698c", + "https://deno.land/x/zod@v3.23.5/helpers/errorUtil.ts": "7a77328240be7b847af6de9189963bd9f79cab32bbc61502a9db4fe6683e2ea7", + "https://deno.land/x/zod@v3.23.5/helpers/parseUtil.ts": "c14814d167cc286972b6e094df88d7d982572a08424b7cd50f862036b6fcaa77", + "https://deno.land/x/zod@v3.23.5/helpers/partialUtil.ts": "998c2fe79795257d4d1cf10361e74492f3b7d852f61057c7c08ac0a46488b7e7", + "https://deno.land/x/zod@v3.23.5/helpers/typeAliases.ts": "0fda31a063c6736fc3cf9090dd94865c811dfff4f3cb8707b932bf937c6f2c3e", + "https://deno.land/x/zod@v3.23.5/helpers/util.ts": "3301a69867c9e589ac5b3bc4d7a518b5212858cd6a25e8b02d635c9c32ba331c", + "https://deno.land/x/zod@v3.23.5/index.ts": "d27aabd973613985574bc31f39e45cb5d856aa122ef094a9f38a463b8ef1a268", + "https://deno.land/x/zod@v3.23.5/locales/en.ts": "a7a25cd23563ccb5e0eed214d9b31846305ddbcdb9c5c8f508b108943366ab4c", + "https://deno.land/x/zod@v3.23.5/mod.ts": "ec6e2b1255c1a350b80188f97bd0a6bac45801bb46fc48f50b9763aa66046039", + "https://deno.land/x/zod@v3.23.5/types.ts": "78d3f06eb313ea754fad0ee389d3c0fa55bc01cf708e6ce0ea7fddd41f31eca2", "https://esm.sh/jszip@3.7.1": "f3872a819b015715edb05f81d973b5cd05d3d213d8eb28293ca5471fe7a71773", "https://esm.sh/v135/jszip@3.7.1/denonext/jszip.mjs": "d31d7f9e0de9c6db3c07ca93f7301b756273d4dccb41b600461978fc313504c9", "https://raw.githubusercontent.com/metatypedev/ghjk/423d38e/deps/cli.ts": "4eacc555cf80686b487e7502db63a4cfbc2060a7b847d15b14cf1cc008a3b65c", diff --git a/.ghjk/lock.json b/.ghjk/lock.json index 509afa1c..e5e4dc9d 100644 --- a/.ghjk/lock.json +++ b/.ghjk/lock.json @@ -5,12 +5,12 @@ "ports": { "version": "0", "configResolutions": { - "95dbc2b8c604a5996b88c5b1b4fb0c10b3e0d9cac68f57eb915b012c44288e93": { - "version": "v0.2.61", + "bciqjlw6cxddajjmznoemlmnu7mgbbm7a3hfmnd2x5oivwajmiqui5ey": { + "version": "v0.2.62", "buildDepConfigs": {}, "portRef": "act_ghrel@0.1.0" }, - "076a5b8ee3bdc68ebf20a696378458465042bb7dc1e49ac2dc98e5fa0dab3e25": { + "bciqao2s3r3r33ruox4qknfrxqrmemuccxn64dze2ylojrzp2bwvt4ji": { "version": "3.7.0", "buildDepConfigs": { "cpy_bs_ghrel": { @@ -33,7 +33,7 @@ "portRef": "pipi_pypi@0.1.0", "packageName": "pre-commit" }, - "84ecde630296f01e7cb8443c58d1596d668c357a0d9837c0a678b8a541ed0a39": { + "bciqij3g6mmbjn4a6ps4eipcy2fmw2zumgv5a3gbxycthroffihwquoi": { "version": "3.12.3", "buildDepConfigs": { "tar_aa": { @@ -49,17 +49,17 @@ }, "portRef": "cpy_bs_ghrel@0.1.0" }, - "9e3fa7742c431c34ae7ba8d1e907e50c937ccfb631fb4dcfb7a1773742abe267": { + "bciqj4p5hoqweghbuvz52rupja7sqze34z63dd62nz632c5zxikv6ezy": { "version": "1.34", "buildDepConfigs": {}, "portRef": "tar_aa@0.1.0" }, - "4f16c72030e922711abf15474d30e3cb232b18144beb73322b297edecfcdb86f": { + "bciqe6fwheayositrdk7rkr2ngdr4wizldakex23tgivss7w6z7g3q3y": { "version": "v1.5.5,", "buildDepConfigs": {}, "portRef": "zstd_aa@0.1.0" }, - "a79698808eea53aedd8e83387b2f44e90a1a48d76193c5ccf0fc6efe29bd70f6": { + "bciqkpfuyqchouu5o3whigod3f5coscq2jdlwde6fztypy3x6fg6xb5q": { "version": "v26.1", "buildDepConfigs": {}, "portRef": "protoc_ghrel@0.1.0" @@ -81,20 +81,20 @@ "sets": { "ghjkEnvProvInstSet___main": { "installs": [ - "c4cf06e095dadfbdd5e26070bc2b7baffc5ff45f", - "2a0176fec803325cc31d4a9b15f77f4e07938cc4", - "b6c49b375643a285e20b6ec0f7a692214bd0f392" + "bciqe72molvtvcuj3tuh47ziue2oqd6t4qetxn3rsoa764ofup6uwjmi", + "bciqe4zlekl4uqqbhxunac7br24mrf6cdpfrfblahqa4vrgaqjujcl4i", + "bciqjyl5um6634zwpw6cewv22chzlrsvhedbjahyghhy2zraqqgyiv2q" ], - "allowedDeps": "3c71ccb92f3785a685b27d7b897fef4b80ad6b24" + "allowedDeps": "bciqjx7llw7t6pfczypzmhbwv7sxaicruj5pdbuac47m4c5qyildiowi" }, "ghjkEnvProvInstSet___test": { "installs": [ - "aa103d26454710ca5d7f43358123341380389864", - "c4cf06e095dadfbdd5e26070bc2b7baffc5ff45f", - "2a0176fec803325cc31d4a9b15f77f4e07938cc4", - "b6c49b375643a285e20b6ec0f7a692214bd0f392" + "bciqikjfnbntvagpghawbzlfp2es6lnqzhba3qx5de7tdrmvhuzhsjqa", + "bciqe72molvtvcuj3tuh47ziue2oqd6t4qetxn3rsoa764ofup6uwjmi", + "bciqe4zlekl4uqqbhxunac7br24mrf6cdpfrfblahqa4vrgaqjujcl4i", + "bciqjyl5um6634zwpw6cewv22chzlrsvhedbjahyghhy2zraqqgyiv2q" ], - "allowedDeps": "3c71ccb92f3785a685b27d7b897fef4b80ad6b24" + "allowedDeps": "bciqjx7llw7t6pfczypzmhbwv7sxaicruj5pdbuac47m4c5qyildiowi" } } } @@ -102,8 +102,24 @@ { "id": "tasks", "config": { - "envs": {}, - "tasks": {} + "envs": { + "bciqmhz5op4n2p2xhzgtqdjjho6dafxi5xsx4qx5kxkbhqss3mza3mja": { + "provides": [] + } + }, + "tasks": { + "bciqe2qc66fi4voc5zoaujvysa3yffxgokfpsuxpebchmflgjaceeqry": { + "ty": "denoFile@v1", + "key": "UEiB15QTt_KnJPsbHJIOCnssrKFfjKyZxq8UqIFTCsXb3SA==", + "envHash": "bciqmhz5op4n2p2xhzgtqdjjho6dafxi5xsx4qx5kxkbhqss3mza3mja" + }, + "bciqezzz3obs4torm2uxhgwloj6meas2wvmpnxobmwib4ey6x226qpza": { + "ty": "denoFile@v1", + "key": "UEiAGQuHMWAC4VRQJE9YCMI99mgodAeTV86EAv8ROiTRRHA==", + "envHash": "bciqmhz5op4n2p2xhzgtqdjjho6dafxi5xsx4qx5kxkbhqss3mza3mja" + } + }, + "tasksNamed": [] } }, { @@ -113,6 +129,22 @@ "main": { "desc": "the default default environment.", "provides": [ + { + "ty": "hook.onEnter.posixExec", + "program": "ghjk", + "arguments": [ + "x", + "bciqezzz3obs4torm2uxhgwloj6meas2wvmpnxobmwib4ey6x226qpza" + ] + }, + { + "ty": "hook.onExit.posixExec", + "program": "ghjk", + "arguments": [ + "x", + "bciqe2qc66fi4voc5zoaujvysa3yffxgokfpsuxpebchmflgjaceeqry" + ] + }, { "ty": "ghjk.ports.InstallSetRef", "setId": "ghjkEnvProvInstSet___main" @@ -133,7 +165,7 @@ } ], "blackboard": { - "c4cf06e095dadfbdd5e26070bc2b7baffc5ff45f": { + "bciqe72molvtvcuj3tuh47ziue2oqd6t4qetxn3rsoa764ofup6uwjmi": { "port": { "ty": "denoWorker@v1", "name": "act_ghrel", @@ -149,7 +181,7 @@ "moduleSpecifier": "file:///ports/act.ts" } }, - "2a0176fec803325cc31d4a9b15f77f4e07938cc4": { + "bciqe4zlekl4uqqbhxunac7br24mrf6cdpfrfblahqa4vrgaqjujcl4i": { "port": { "ty": "denoWorker@v1", "name": "pipi_pypi", @@ -183,7 +215,7 @@ }, "packageName": "pre-commit" }, - "b6c49b375643a285e20b6ec0f7a692214bd0f392": { + "bciqjyl5um6634zwpw6cewv22chzlrsvhedbjahyghhy2zraqqgyiv2q": { "port": { "ty": "denoWorker@v1", "name": "cpy_bs_ghrel", @@ -207,7 +239,7 @@ "moduleSpecifier": "file:///ports/cpy_bs.ts" } }, - "e0d1f160d2d7755765f6f01a27a0c33a02ff98d2": { + "bciqb6ua63xodzwxngnbjq35hfikiwzb3dclbqkc7e6xgjdt5jin4pia": { "manifest": { "ty": "ambientAccess@v1", "name": "tar_aa", @@ -227,7 +259,7 @@ "portRef": "tar_aa@0.1.0" } }, - "9d26d0d90f6ecdd69d0705a042b01a344aa626ee": { + "bciqfl5s36w335ducrb6f6gwb3vuwup7vzqwwg67pq42xtkngsnxqobi": { "manifest": { "ty": "ambientAccess@v1", "name": "git_aa", @@ -261,7 +293,7 @@ "portRef": "git_aa@0.1.0" } }, - "3c447f912abf18883bd05314f946740975ee0dd3": { + "bciqcfe7qyxmokpn6pgtaj35r5qg74jkehuu6cvyrtcsnegvwlm64oqy": { "manifest": { "ty": "ambientAccess@v1", "name": "curl_aa", @@ -295,7 +327,7 @@ "portRef": "curl_aa@0.1.0" } }, - "dfb0f5e74666817e6ab8cbceca0c9da271142bca": { + "bciqgkpwxjmo5phw5se4ugyiz4xua3xrd54quzmk7wdwpq3vghglogjy": { "manifest": { "ty": "ambientAccess@v1", "name": "unzip_aa", @@ -317,7 +349,7 @@ "portRef": "unzip_aa@0.1.0" } }, - "d9122eff1fe3ef56872e53dae725ff3ccb37472e": { + "bciqmcvyepuficjj3mwshsbfecwdmzch5gwxqo557icnq4zujtdllh4a": { "manifest": { "ty": "ambientAccess@v1", "name": "zstd_aa", @@ -337,7 +369,7 @@ "portRef": "zstd_aa@0.1.0" } }, - "5314c90de340dfd1ef21421dcbdcba726b4d03b9": { + "bciqk4ivbyqvpxwcaj5reufmveqldiizo6xmqiqq7njtaczgappydoka": { "manifest": { "ty": "denoWorker@v1", "name": "rustup_rustlang", @@ -368,7 +400,7 @@ "portRef": "rustup_rustlang@0.1.0" } }, - "ebba9b42698f7f065a359575f195153ca1adba7b": { + "bciqjcmf46h2h6teenwbsda35igg4hea6ro5vh6nfieehk4jkuiqaj2a": { "manifest": { "ty": "denoWorker@v1", "name": "rust_rustup", @@ -404,7 +436,7 @@ "portRef": "rust_rustup@0.1.0" } }, - "45999e7561d7f6a661191f58ee35e67755d375e0": { + "bciqpgt5wsiw4y7qzovqbt2yrdgq5mvhhjpcg6cxzt4w4taudyen44ca": { "manifest": { "ty": "denoWorker@v1", "name": "cargo_binstall_ghrel", @@ -421,7 +453,7 @@ "portRef": "cargo_binstall_ghrel@0.1.0" } }, - "b80f4de14adc81c11569bf5f3a2d10b92ad5f1a7": { + "bciqo7cq7igschrhers3wiibbqpaavdf33fdfdalr4cu7gxr7cblifby": { "manifest": { "ty": "denoWorker@v1", "name": "pnpm_ghrel", @@ -440,7 +472,7 @@ "portRef": "pnpm_ghrel@0.1.0" } }, - "16e0e281e0f961fcc805896fc146d2c011c8d694": { + "bciqoxx4uhfhw77sux6kzqhy6bvxhxkk4cqigrxdrmggillzkfjgjnli": { "manifest": { "ty": "denoWorker@v1", "name": "asdf_plugin_git", @@ -469,7 +501,7 @@ "portRef": "asdf_plugin_git@0.1.0" } }, - "65ca6fb1b829a92d6423b3ea701d9602d84cf6f8": { + "bciqboouqnp54fnumgxvl7uay2k6ho4vhlbibvgoyyt5yt3rkwqaohzi": { "manifest": { "ty": "denoWorker@v1", "name": "node_org", @@ -493,7 +525,7 @@ "portRef": "node_org@0.1.0" } }, - "d82c92542f0ed9c49a0383922c1d968ba88f0c4b": { + "bciqctvtiscapp6cmlaxuaxnyac664hs3y3xsa5kqh4ctmhbsiehusly": { "manifest": { "ty": "denoWorker@v1", "name": "cpy_bs_ghrel", @@ -520,21 +552,21 @@ "portRef": "cpy_bs_ghrel@0.1.0" } }, - "3c71ccb92f3785a685b27d7b897fef4b80ad6b24": { - "tar_aa": "e0d1f160d2d7755765f6f01a27a0c33a02ff98d2", - "git_aa": "9d26d0d90f6ecdd69d0705a042b01a344aa626ee", - "curl_aa": "3c447f912abf18883bd05314f946740975ee0dd3", - "unzip_aa": "dfb0f5e74666817e6ab8cbceca0c9da271142bca", - "zstd_aa": "d9122eff1fe3ef56872e53dae725ff3ccb37472e", - "rustup_rustlang": "5314c90de340dfd1ef21421dcbdcba726b4d03b9", - "rust_rustup": "ebba9b42698f7f065a359575f195153ca1adba7b", - "cargo_binstall_ghrel": "45999e7561d7f6a661191f58ee35e67755d375e0", - "pnpm_ghrel": "b80f4de14adc81c11569bf5f3a2d10b92ad5f1a7", - "asdf_plugin_git": "16e0e281e0f961fcc805896fc146d2c011c8d694", - "node_org": "65ca6fb1b829a92d6423b3ea701d9602d84cf6f8", - "cpy_bs_ghrel": "d82c92542f0ed9c49a0383922c1d968ba88f0c4b" + "bciqjx7llw7t6pfczypzmhbwv7sxaicruj5pdbuac47m4c5qyildiowi": { + "tar_aa": "bciqb6ua63xodzwxngnbjq35hfikiwzb3dclbqkc7e6xgjdt5jin4pia", + "git_aa": "bciqfl5s36w335ducrb6f6gwb3vuwup7vzqwwg67pq42xtkngsnxqobi", + "curl_aa": "bciqcfe7qyxmokpn6pgtaj35r5qg74jkehuu6cvyrtcsnegvwlm64oqy", + "unzip_aa": "bciqgkpwxjmo5phw5se4ugyiz4xua3xrd54quzmk7wdwpq3vghglogjy", + "zstd_aa": "bciqmcvyepuficjj3mwshsbfecwdmzch5gwxqo557icnq4zujtdllh4a", + "rustup_rustlang": "bciqk4ivbyqvpxwcaj5reufmveqldiizo6xmqiqq7njtaczgappydoka", + "rust_rustup": "bciqjcmf46h2h6teenwbsda35igg4hea6ro5vh6nfieehk4jkuiqaj2a", + "cargo_binstall_ghrel": "bciqpgt5wsiw4y7qzovqbt2yrdgq5mvhhjpcg6cxzt4w4taudyen44ca", + "pnpm_ghrel": "bciqo7cq7igschrhers3wiibbqpaavdf33fdfdalr4cu7gxr7cblifby", + "asdf_plugin_git": "bciqoxx4uhfhw77sux6kzqhy6bvxhxkk4cqigrxdrmggillzkfjgjnli", + "node_org": "bciqboouqnp54fnumgxvl7uay2k6ho4vhlbibvgoyyt5yt3rkwqaohzi", + "cpy_bs_ghrel": "bciqctvtiscapp6cmlaxuaxnyac664hs3y3xsa5kqh4ctmhbsiehusly" }, - "aa103d26454710ca5d7f43358123341380389864": { + "bciqikjfnbntvagpghawbzlfp2es6lnqzhba3qx5de7tdrmvhuzhsjqa": { "port": { "ty": "denoWorker@v1", "name": "protoc_ghrel", diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index f26d0499..02efe78d 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -4,7 +4,7 @@ on: workflow_dispatch: env: - DENO_VERSION: "1.42.1" + DENO_VERSION: "1.43.1" GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GHJK_LOG_PANIC_LEVEL: error DENO_DIR: .deno-dir @@ -55,7 +55,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - uses: metatypedev/setup-ghjk@32fe7ad4eab41d5e62189208afa6fe17112a5563 + - uses: metatypedev/setup-ghjk@318209a9d215f70716a4ac89dbeb9653a2deb8bc with: installer-url: ./install.ts env: diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index fbeda562..d05d849a 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -9,11 +9,13 @@ on: - ready_for_review env: - DENO_VERSION: "1.42.1" + DENO_VERSION: "1.43.1" GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GHJK_LOG: debug GHJK_LOG_PANIC_LEVEL: error DENO_DIR: .deno-dir + # removing the images after every test is unncessary + DOCKER_NO_RMI: 1 jobs: changes: @@ -77,7 +79,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - uses: metatypedev/setup-ghjk@32fe7ad4eab41d5e62189208afa6fe17112a5563 + - uses: metatypedev/setup-ghjk@318209a9d215f70716a4ac89dbeb9653a2deb8bc with: installer-url: ./install.ts env: diff --git a/.gitignore b/.gitignore index dc99f6cd..3f14b39f 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,3 @@ .DS_Store play.* +examples/**/.ghjk diff --git a/README.md b/README.md index c15ecf6e..a36dc4cb 100644 --- a/README.md +++ b/README.md @@ -137,6 +137,11 @@ TBD: this feature is in development. TBD: this feature is still in development. +#### Anonymous tasks + +Tasks that aren't give names can not be invoked from the CLI. They can be useful +for tasks that are meant to be common dependencies of other tasks. + ### Secure configs Certain options are configured through the `secureConfig` object. diff --git a/check.ts b/check.ts index f82503a3..77a9dfd7 100755 --- a/check.ts +++ b/check.ts @@ -1,5 +1,4 @@ #!/bin/env -S ghjk deno run --allow-env --allow-run --allow-read --allow-write=. -// # FIXME: find a way to resolve !DENO_EXEC_PATH in shebangs import "./setup_logger.ts"; import { $ } from "./utils/mod.ts"; diff --git a/deno.jsonc b/deno.jsonc index dafb6678..a2187dd4 100644 --- a/deno.jsonc +++ b/deno.jsonc @@ -1,8 +1,8 @@ { "tasks": { - "test": "GHJK_LOG=info deno test --parallel --unstable-worker-options --unstable-kv -A tests/*", + "test": "deno test --parallel --unstable-worker-options --unstable-kv -A tests/*", "cache": "deno cache deps/*", - "check": "deno run -A check.ts" + "check": "deno run -A ./check.ts" }, "fmt": { "exclude": [ @@ -22,7 +22,14 @@ "rules": { "include": [ "no-console", - "no-sync-fn-in-async-fn" + "no-sync-fn-in-async-fn", + "no-external-import", + "no-inferrable-types", + "no-self-compare", + "no-throw-literal" + // "verbatim-module-syntax" + // "no-await-in-loop" + // "ban-untagged-todo" ], "exclude": [ "no-explicit-any" diff --git a/deno.lock b/deno.lock index 60d8a39e..08ec8ec5 100644 --- a/deno.lock +++ b/deno.lock @@ -3,7 +3,9 @@ "packages": { "specifiers": { "jsr:@david/dax@0.40.1": "jsr:@david/dax@0.40.1", + "jsr:@david/dax@0.41.0": "jsr:@david/dax@0.41.0", "jsr:@david/which@0.3": "jsr:@david/which@0.3.0", + "jsr:@david/which@^0.4.1": "jsr:@david/which@0.4.1", "jsr:@ghjk/dax@0.40.2-alpha-ghjk": "jsr:@ghjk/dax@0.40.2-alpha-ghjk", "jsr:@std/assert@^0.221.0": "jsr:@std/assert@0.221.0", "jsr:@std/bytes@^0.221.0": "jsr:@std/bytes@0.221.0", @@ -14,7 +16,9 @@ "jsr:@std/path@0.221.0": "jsr:@std/path@0.221.0", "jsr:@std/path@^0.221.0": "jsr:@std/path@0.221.0", "jsr:@std/streams@0.221.0": "jsr:@std/streams@0.221.0", + "npm:@noble/hashes@1.4.0": "npm:@noble/hashes@1.4.0", "npm:@types/node": "npm:@types/node@18.16.19", + "npm:multiformats@13.1.0": "npm:multiformats@13.1.0", "npm:zod-validation-error": "npm:zod-validation-error@3.1.0_zod@3.23.3", "npm:zod-validation-error@3.2.0": "npm:zod-validation-error@3.2.0_zod@3.23.3" }, @@ -30,9 +34,23 @@ "jsr:@std/streams@0.221.0" ] }, + "@david/dax@0.41.0": { + "integrity": "9e1ecf66a0415962cc8ad3ba4e3fa93ce0f1a1cc797dd95c36fdfb6977dc7fc8", + "dependencies": [ + "jsr:@david/which@^0.4.1", + "jsr:@std/fmt@^0.221.0", + "jsr:@std/fs@0.221.0", + "jsr:@std/io@0.221.0", + "jsr:@std/path@0.221.0", + "jsr:@std/streams@0.221.0" + ] + }, "@david/which@0.3.0": { "integrity": "6bdb62c40ac90edcf328e854fa8103a8db21e7c326089cbe3c3a1cf7887d3204" }, + "@david/which@0.4.1": { + "integrity": "896a682b111f92ab866cc70c5b4afab2f5899d2f9bde31ed00203b9c250f225e" + }, "@ghjk/dax@0.40.2-alpha-ghjk": { "integrity": "87bc93e9947779cb2f3922fe277e21ea8c716de804b2627f80ba9e7bc3d0d019", "dependencies": [ @@ -81,10 +99,18 @@ } }, "npm": { + "@noble/hashes@1.4.0": { + "integrity": "sha512-V1JJ1WTRUqHHrOSh597hURcMqVKVGL/ea3kv0gSnEdsEZ0/+VyPghM1lMNGc00z7CIQorSvbKpuJkxvuHbvdbg==", + "dependencies": {} + }, "@types/node@18.16.19": { "integrity": "sha512-IXl7o+R9iti9eBW4Wg2hx1xQDig183jj7YLn8F7udNceyfkbn1ZxmzZXuak20gR40D7pIkIY1kYGx5VIGbaHKA==", "dependencies": {} }, + "multiformats@13.1.0": { + "integrity": "sha512-HzdtdBwxsIkzpeXzhQ5mAhhuxcHbjEHH+JQoxt7hG/2HGFjjwyolLo7hbaexcnhoEuV4e0TNJ8kkpMjiEYY4VQ==", + "dependencies": {} + }, "zod-validation-error@3.1.0_zod@3.23.3": { "integrity": "sha512-zujS6HqJjMZCsvjfbnRs7WI3PXN39ovTcY1n8a+KTm4kOH0ZXYsNiJkH1odZf4xZKMkBDL7M2rmQ913FCS1p9w==", "dependencies": { diff --git a/deps/common.ts b/deps/common.ts index 13627211..56e6f7ed 100644 --- a/deps/common.ts +++ b/deps/common.ts @@ -11,9 +11,19 @@ export * as std_fmt_colors from "https://deno.land/std@0.213.0/fmt/colors.ts"; export * as std_url from "https://deno.land/std@0.213.0/url/mod.ts"; export * as std_path from "https://deno.land/std@0.213.0/path/mod.ts"; export * as std_fs from "https://deno.land/std@0.213.0/fs/mod.ts"; -// export * as dax from "jsr:@david/dax@0.40.1"; -export * as dax from "jsr:@ghjk/dax@0.40.2-alpha-ghjk"; -export * as jsonHash from "https://deno.land/x/json_hash@0.2.0/mod.ts"; -export { default as objectHash } from "https://deno.land/x/object_hash@2.0.3/mod.ts"; +// avoid using the following directly and go through the +// wrappers in ./utils/mod.ts +export * as dax from "jsr:@david/dax@0.41.0"; +// class re-exports are tricky. +export { Path as _DaxPath } from "jsr:@david/dax@0.41.0"; +// export * as dax from "jsr:@ghjk/dax@0.40.2-alpha-ghjk"; + +export { canonicalize as json_canonicalize } from "https://deno.land/x/json_hash@0.2.0/canon.ts"; export { default as deep_eql } from "https://deno.land/x/deep_eql@v5.0.1/index.js"; +// export * as multibase16 from "npm:multiformats@13.1.0/bases/base16"; +export * as multibase32 from "npm:multiformats@13.1.0/bases/base32"; +export * as multibase64 from "npm:multiformats@13.1.0/bases/base64"; +export * as multisha2 from "npm:multiformats@13.1.0/hashes/sha2"; +export * as multihasher from "npm:multiformats@13.1.0/hashes/hasher"; +export { sha256 as syncSha256 } from "npm:@noble/hashes@1.4.0/sha256"; diff --git a/docs/architecture.md b/docs/architecture.md index 81519ee9..90d607f8 100644 --- a/docs/architecture.md +++ b/docs/architecture.md @@ -62,7 +62,7 @@ the game in ghjk so we prefer to err on the side of modularity. argument to `getConfig`. - `ghjk/mod.ts` exposes a bunch of helpers for authoring conventional `ghjk.ts` but as far as the host is concerned, it's only aware of the - `getConfig(secureConfig?): SerializedConfig` interface. + `getConfig(ghjkfileUrl, secureConfig?): SerializedConfig` interface. ### Ghjkdir diff --git a/examples/tasks/ghjk.ts b/examples/tasks/ghjk.ts index 4869b9d4..225ed3a7 100644 --- a/examples/tasks/ghjk.ts +++ b/examples/tasks/ghjk.ts @@ -2,17 +2,15 @@ export { ghjk } from "../../mod.ts"; import { logger, task } from "../../mod.ts"; import * as ports from "../../ports/mod.ts"; -task("greet", async ({ $, argv: [name] }) => { +task("greet", async ($, { argv: [name] }) => { await $`echo Hello ${name}!`; }); const ha = task({ name: "ha", - installs: [ - ports.protoc(), - ], + installs: [ports.protoc()], envVars: { STUFF: "stuffier" }, - async fn({ $ }) { + async fn($) { await $`echo $STUFF; protoc --version; `; @@ -35,7 +33,11 @@ task("hum", { fn: () => logger().info(`hum`), }); +// not all tasks need to be named +// but anon tasks can't be accessed from the CLI +const anon = task(() => logger().info("anon")); + task("hey", { - dependsOn: ["hii", "ho"], + dependsOn: ["hii", "ho", anon], fn: () => logger().info(`hey`), }); diff --git a/ghjkfiles/deno/mod.ts b/files/deno/mod.ts similarity index 100% rename from ghjkfiles/deno/mod.ts rename to files/deno/mod.ts diff --git a/ghjkfiles/deno/worker.ts b/files/deno/worker.ts similarity index 98% rename from ghjkfiles/deno/worker.ts rename to files/deno/worker.ts index c53be849..57c8201d 100644 --- a/ghjkfiles/deno/worker.ts +++ b/files/deno/worker.ts @@ -37,7 +37,7 @@ async function serializeConfig(uri: string, envVars: Record) { const { setup: setupLogger } = await import("../../utils/logger.ts"); setupLogger(); const mod = await import(uri); - const rawConfig = await mod.ghjk.getConfig(mod.secureConfig); + const rawConfig = await mod.ghjk.getConfig(uri, mod.secureConfig); const config = JSON.parse(JSON.stringify(rawConfig)); return { config, diff --git a/ghjkfiles/mod.ts b/files/mod.ts similarity index 55% rename from ghjkfiles/mod.ts rename to files/mod.ts index 1885cd3f..2a476e56 100644 --- a/ghjkfiles/mod.ts +++ b/files/mod.ts @@ -1,7 +1,11 @@ +//! This provides the backing implementation of the Ghjkfile frontends. + // NOTE: avoid adding sources of randomness // here to make the resulting config reasonably stable // across serializaiton. No random identifiers. +import { multibase32, multibase64 } from "../deps/common.ts"; + // ports specific imports import portsValidators from "../modules/ports/types.ts"; import type { @@ -10,12 +14,12 @@ import type { InstallSet, InstallSetRefProvision, PortsModuleConfigHashed, - PortsModuleSecureConfig, } from "../modules/ports/types.ts"; import logger from "../utils/logger.ts"; import { $, defaultCommandBuilder, + objectHash, Path, thinInstallConfig, unwrapParseRes, @@ -27,15 +31,15 @@ import * as node from "../ports/node.ts"; import type { SerializedConfig } from "../host/types.ts"; import * as std_modules from "../modules/std.ts"; // tasks -import { dax, jsonHash, objectHash } from "../deps/common.ts"; // WARN: this module has side-effects and only ever import // types from it import type { ExecTaskArgs } from "../modules/tasks/deno.ts"; -import { TasksModuleConfig } from "../modules/tasks/types.ts"; +import { TaskDefHashed, TasksModuleConfig } from "../modules/tasks/types.ts"; // envs -import { +import type { EnvRecipe, EnvsModuleConfig, + Provision, WellKnownProvision, } from "../modules/envs/types.ts"; @@ -43,7 +47,7 @@ export type EnvDefArgs = { name: string; installs?: InstallConfigFat[]; allowedPortDeps?: AllowedPortDep[]; - /* + /** * If true or not set, will base the task's env on top * of the default env (usually `main`). If false, will build on * top of a new env. If given a string, will use the identified env as a base @@ -52,22 +56,33 @@ export type EnvDefArgs = { base?: string | boolean; desc?: string; vars?: Record; + /** + * Task to execute when environment is activated. + */ + onEnter?: string | string[]; + /** + * Task to execute when environment is deactivated. + */ + onExit?: string | string[]; }; export type TaskFnArgs = { - $: dax.$Type; + $: ReturnType; argv: string[]; env: Record; + workingDir: string; }; -export type TaskFn = (args: TaskFnArgs) => Promise | any; +export type TaskFn = ( + $: ReturnType, + args: TaskFnArgs, +) => Promise | any; -/* - * Configuration for a task. +/** + * Configure a task under the given name or key. */ export type TaskDefArgs = { - name: string; - fn: TaskFn; + name?: string; desc?: string; dependsOn?: string[]; workingDir?: string | Path; @@ -77,12 +92,60 @@ export type TaskDefArgs = { base?: string | boolean; }; -export class GhjkfileBuilder { +export type DenoTaskDefArgs = TaskDefArgs & { + /** + * The logic to run when the task is invoked. + * + * Note: functions are optional for tasks. If none is set, + * it'll be a no-op. The task it depends on will still be run. + */ + fn?: TaskFn; + /** + * In order to key the right task when ghjk is requesting + * execution of a specific task, we identify each using a hash. + * The {@field fn} is `toString`ed in the hash input. + * If a ghjkfile is produing identical anonymous tasks for + * instance, it can provide a none to disambiguate beteween each + * through hash differences. + * + * NOTE: the nonce must be stable across serialization. + * NOTE: closing over values is generally ill-advised on tasks + * fns. If you want to close over values, make sure they're stable + * across re-serializations. + */ + nonce?: string; +}; + +type TaskDefTyped = DenoTaskDefArgs & { ty: "denoFile@v1" }; + +export class Ghjkfile { #installSets = new Map(); - #tasks = {} as Record; + #tasks = new Map(); #bb = new Map(); #seenEnvs: Record = {}; + /* dump() { + return { + installSets: Object.fromEntries(this.#installSets), + bb: Object.fromEntries(this.#bb), + seenEnvs: Object.fromEntries( + Object.entries(this.#seenEnvs).map(( + [key, [_builder, finalizer]], + ) => [key, finalizer()]), + ), + tasks: Object.fromEntries( + Object.entries(this.#tasks).map(([key, task]) => [key, { + ...task, + ...(task.ty === "denoFile@v1" + ? { + fn: task.fn.toString(), + } + : {}), + }]), + ), + }; + } */ + addInstall(setId: string, configUnclean: InstallConfigFat) { const config = unwrapParseRes( portsValidators.installConfigFat.safeParse(configUnclean), @@ -94,7 +157,7 @@ export class GhjkfileBuilder { const set = this.#getSet(setId); set.installs.push(config); - logger().debug("install added", config); + logger(import.meta).debug("install added", config); } setAllowedPortDeps(setId: string, deps: AllowedPortDep[]) { @@ -106,7 +169,7 @@ export class GhjkfileBuilder { ); } - addTask(args: TaskDefArgs) { + addTask(args: TaskDefTyped) { // NOTE: we make sure the env base declared here exists // this call is necessary to make sure that a `task` can // be declared before the `env` but still depend on it. @@ -115,12 +178,37 @@ export class GhjkfileBuilder { if (typeof args.base == "string") { this.addEnv({ name: args.base }); } - - this.#tasks[args.name] = { + let key = args.name; + if (!key) { + switch (args.ty) { + case "denoFile@v1": { + const { fn, workingDir, ...argsRest } = args; + key = objectHash(JSON.parse(JSON.stringify({ + ...argsRest, + workingDir: workingDir instanceof Path + ? workingDir.toString() + : workingDir, + ...(fn + ? { + // NOTE: we serialize the function to a string before + // hashing. + fn: fn.toString(), + } + : {}), + }))); + key = multibase64.base64urlpad.encode( + multibase32.base32.decode(key), + ); + break; + } + default: + throw new Error(`unexpected task type: ${args.ty}`); + } + } + this.#tasks.set(key, { ...args, - name, - }; - return args.name; + }); + return key; } addEnv(args: EnvDefArgs) { @@ -145,40 +233,51 @@ export class GhjkfileBuilder { if (args.vars) { env.vars(args.vars); } + if (args.onEnter) { + env.onEnter(...args.onEnter); + } + if (args.onExit) { + env.onEnter(...args.onExit); + } return env; } async execTask( - { name, workingDir, envVars, argv }: ExecTaskArgs, + { key, workingDir, envVars, argv }: ExecTaskArgs, ) { - const task = this.#tasks[name]; + const task = this.#tasks.get(key); if (!task) { - throw new Error(`no task defined under "${name}"`); + throw new Error(`no task defined under "${key}"`); + } + if (task.ty != "denoFile@v1") { + throw new Error(`task under "${key}" has unexpected type ${task.ty}`); + } + if (task.fn) { + const custom$ = task$(argv, envVars, workingDir); + await task.fn(custom$, { argv, env: envVars, $: custom$, workingDir }); } - const custom$ = $.build$({ - commandBuilder: defaultCommandBuilder().env(envVars).cwd(workingDir), - }); - await task.fn({ argv, env: envVars, $: custom$ }); } toConfig( - { defaultEnv, defaultBaseEnv, secureConfig }: { + { defaultEnv, defaultBaseEnv, masterPortDepAllowList }: { defaultEnv: string; defaultBaseEnv: string; - secureConfig: PortsModuleSecureConfig | undefined; + ghjkfileUrl: string; + masterPortDepAllowList: AllowedPortDep[]; }, ) { try { - const envsConfig = this.#processEnvs( - defaultEnv, + const envsConfig = this.#processEnvs(defaultEnv, defaultBaseEnv); + const tasksConfig = this.#processTasks( + envsConfig, defaultBaseEnv, ); - const tasksConfig = this.#processTasks(envsConfig, defaultBaseEnv); const portsConfig = this.#processInstalls( - secureConfig?.masterPortDepAllowList ?? stdDeps(), + masterPortDepAllowList ?? stdDeps(), ); const config: SerializedConfig = { + blackboard: Object.fromEntries(this.#bb.entries()), modules: [{ id: std_modules.ports, config: portsConfig, @@ -189,7 +288,6 @@ export class GhjkfileBuilder { id: std_modules.envs, config: envsConfig, }], - blackboard: Object.fromEntries(this.#bb.entries()), }; return config; } catch (cause) { @@ -208,7 +306,7 @@ export class GhjkfileBuilder { #addToBlackboard(inp: unknown) { // jsonHash.digest is async - const hash = objectHash(jsonHash.canonicalize(inp as jsonHash.Tree)); + const hash = objectHash(JSON.parse(JSON.stringify(inp))); if (!this.#bb.has(hash)) { this.#bb.set(hash, inp); @@ -216,8 +314,9 @@ export class GhjkfileBuilder { return hash; } - // this processes the defined envs, normalizing dependency (i.e. "envBase") - // relationships to produce the standard EnvsModuleConfig + /** this processes the defined envs, normalizing dependency (i.e. "envBase") + * relationships to produce the standard EnvsModuleConfig + */ #processEnvs( defaultEnv: string, defaultBaseEnv: string, @@ -232,30 +331,30 @@ export class GhjkfileBuilder { const [_name, [_builder, finalizer]] of Object.entries(this.#seenEnvs) ) { const final = finalizer(); - const { name, base } = final; - const envBaseResolved = typeof base === "string" - ? base - : base + const envBaseResolved = typeof final.base === "string" + ? final.base + : final.base ? defaultBaseEnv : null; - all[name] = { ...final, envBaseResolved }; + all[final.name] = { ...final, envBaseResolved }; if (envBaseResolved) { - let parentRevDeps = revDeps.get(envBaseResolved); - if (!parentRevDeps) { - parentRevDeps = []; - revDeps.set(envBaseResolved, parentRevDeps); + const parentRevDeps = revDeps.get(envBaseResolved); + if (parentRevDeps) { + parentRevDeps.push(final.name); + } else { + revDeps.set(envBaseResolved, [final.name]); } - parentRevDeps.push(final.name); } else { - indie.push(name); + indie.push(final.name); } } + const processed = {} as Record< string, { installSetId?: string; vars: Record } >; - const out: EnvsModuleConfig = { envs: {}, defaultEnv }; - const workingSet = [...indie]; + const moduleConfig: EnvsModuleConfig = { envs: {}, defaultEnv }; + const workingSet = indie; while (workingSet.length > 0) { const item = workingSet.pop()!; const final = all[item]; @@ -307,7 +406,42 @@ export class GhjkfileBuilder { installSetId: processedInstallSetId, vars: processedVars, }; - out.envs[final.name] = { + const hooks = [ + ...final.onEnterHookTasks.map( + (key) => [key, "hook.onEnter.posixExec"] as const, + ), + ...final.onExitHookTasks.map( + (key) => [key, "hook.onExit.posixExec"] as const, + ), + ].map(([taskKey, ty]) => { + const task = this.#tasks.get(taskKey); + if (!task) { + throw new Error("unable to find task for onEnterHook", { + cause: { + env: final.name, + taskKey, + }, + }); + } + if (task.ty == "denoFile@v1") { + const prov: InlineTaskHookProvision = { + ty: "inline.hook.ghjkTask", + finalTy: ty, + taskKey, + }; + return prov; + } + throw new Error( + `unsupported task type "${task.ty}" used for environment hook`, + { + cause: { + taskKey, + task, + }, + }, + ); + }); + moduleConfig.envs[final.name] = { desc: final.desc, provides: [ ...Object.entries(processedVars).map(( @@ -316,6 +450,8 @@ export class GhjkfileBuilder { const prov: WellKnownProvision = { ty: "posix.envVar", key, val }; return prov; }), + // env hooks + ...hooks, ], }; if (processedInstallSetId) { @@ -323,7 +459,7 @@ export class GhjkfileBuilder { ty: "ghjk.ports.InstallSetRef", setId: processedInstallSetId, }; - out.envs[final.name].provides.push(prov); + moduleConfig.envs[final.name].provides.push(prov); } const curRevDeps = revDeps.get(final.name); @@ -332,21 +468,55 @@ export class GhjkfileBuilder { revDeps.delete(final.name); } } - return out; + // sanity checks + if (revDeps.size > 0) { + throw new Error("working set empty but pending items found"); + } + return moduleConfig; } - #processTasks(envsConfig: EnvsModuleConfig, defaultBaseEnv: string) { - const out: TasksModuleConfig = { + #processTasks( + envsConfig: EnvsModuleConfig, + defaultBaseEnv: string, + ) { + const indie = [] as string[]; + const deps = new Map(); + const revDeps = new Map(); + const nameToKey = Object.fromEntries( + Object.entries(this.#tasks) + .filter(([_, { name }]) => !!name) + .map(([hash, { name }]) => [name, hash] as const), + ); + for (const [key, args] of this.#tasks) { + if (args.dependsOn && args.dependsOn.length > 0) { + const depKeys = args.dependsOn.map((nameOrKey) => + nameToKey[nameOrKey] ?? nameOrKey + ); + deps.set(key, depKeys); + for (const depKey of depKeys) { + const depRevDeps = revDeps.get(depKey); + if (depRevDeps) { + depRevDeps.push(key); + } else { + revDeps.set(depKey, [key]); + } + } + } else { + indie.push(key); + } + } + const workingSet = indie; + const localToFinalKey = {} as Record; + const moduleConfig: TasksModuleConfig = { envs: {}, tasks: {}, + tasksNamed: [], }; - for ( - const [name, args] of Object - .entries( - this.#tasks, - ) - ) { + while (workingSet.length > 0) { + const key = workingSet.pop()!; + const args = this.#tasks.get(key)!; const { workingDir, desc, dependsOn, base } = args; + const envBaseResolved = typeof base === "string" ? base : base @@ -403,7 +573,7 @@ export class GhjkfileBuilder { } } if (taskInstallSet.installs.length > 0) { - const setId = `ghjkTaskInstSet___${name}`; + const setId = `ghjkTaskInstSet___${key}`; this.#installSets.set(setId, taskInstallSet); const prov: InstallSetRefProvision = { ty: "ghjk.ports.InstallSetRef", @@ -421,32 +591,97 @@ export class GhjkfileBuilder { }), ); - const envHash = objectHash( - jsonHash.canonicalize(taskEnvRecipe as jsonHash.Tree), - ); - out.envs[envHash] = taskEnvRecipe; + const envHash = objectHash(JSON.parse(JSON.stringify(taskEnvRecipe))); + moduleConfig.envs[envHash] = taskEnvRecipe; - out.tasks[name] = { - name, + const def: TaskDefHashed = { + ty: args.ty, + key, workingDir: typeof workingDir == "object" ? workingDir.toString() : workingDir, desc, - dependsOn, + dependsOn: dependsOn?.map((keyOrHash) => + localToFinalKey[nameToKey[keyOrHash] ?? keyOrHash] + ), envHash, }; + const taskHash = objectHash(def); + // we prefer the name as a key if present + const finalKey = args.name ?? taskHash; + moduleConfig.tasks[finalKey] = def; + localToFinalKey[key] = finalKey; + + if (args.name) { + moduleConfig.tasksNamed.push(args.name); + } + for (const revDepKey of revDeps.get(key) ?? []) { + const revDepDeps = deps.get(revDepKey)!; + // swap remove + const idx = revDepDeps.indexOf(key); + const last = revDepDeps.pop()!; + if (revDepDeps.length > idx) { + revDepDeps[idx] = last; + } + + if (revDepDeps.length == 0) { + deps.delete(revDepKey); + workingSet.push(revDepKey); + } + } } - for (const [name, { dependsOn }] of Object.entries(out.tasks)) { + + // do some sanity checks + for (const [key, { dependsOn }] of Object.entries(moduleConfig.tasks)) { for (const depName of dependsOn ?? []) { - if (!out.tasks[depName]) { + if (!moduleConfig.tasks[depName]) { throw new Error( - `task "${name}" depend on non-existent task "${depName}"`, + `task "${key}" depend on non-existent task "${depName}"`, + { + cause: { + workingSet, + revDeps, + moduleConfig, + tasks: this.#tasks, + nameToKey, + }, + }, ); } } } + if (deps.size > 0) { + throw new Error("working set empty but pending items found", { + cause: { + workingSet, + revDeps, + moduleConfig, + tasks: this.#tasks, + }, + }); + } - return out; + for (const [_name, env] of Object.entries(envsConfig.envs)) { + env.provides = env.provides.map( + (prov) => { + if ( + prov.ty == "inline.hook.ghjkTask" + ) { + const inlineProv = prov as InlineTaskHookProvision; + const taskKey = localToFinalKey[inlineProv.taskKey]; + const out: WellKnownProvision = { + ty: inlineProv.finalTy, + program: "ghjk", + arguments: ["x", taskKey], + }; + return out; + } + return prov; + }, + ); + } + + return moduleConfig; } #processInstalls(masterAllowList: AllowedPortDep[]) { @@ -490,6 +725,8 @@ type EnvFinalizer = () => { base: string | boolean; vars: Record; desc?: string; + onEnterHookTasks: string[]; + onExitHookTasks: string[]; }; // this class will be exposed to users and thus features @@ -497,13 +734,15 @@ type EnvFinalizer = () => { // all to avoid exposing the function in the public api export class EnvBuilder { #installSetId: string; - #file: GhjkfileBuilder; + #file: Ghjkfile; #base: string | boolean = true; #vars: Record = {}; #desc?: string; + #onEnterHookTasks: string[] = []; + #onExitHookTasks: string[] = []; constructor( - file: GhjkfileBuilder, + file: Ghjkfile, setFinalizer: (fin: EnvFinalizer) => void, public name: string, ) { @@ -515,6 +754,8 @@ export class EnvBuilder { base: this.#base, vars: this.#vars, desc: this.#desc, + onExitHookTasks: this.#onExitHookTasks, + onEnterHookTasks: this.#onEnterHookTasks, })); } @@ -523,7 +764,7 @@ export class EnvBuilder { return this; } - /* + /** * Provision a port install in the environment. */ install(...configs: InstallConfigFat[]) { @@ -533,7 +774,7 @@ export class EnvBuilder { return this; } - /* + /** * This is treated as a single set and will replace previously any configured set. */ allowedPortDeps(deps: AllowedPortDep[]) { @@ -541,7 +782,7 @@ export class EnvBuilder { return this; } - /* + /** * Add an environment variable. */ var(key: string, val: string) { @@ -549,7 +790,7 @@ export class EnvBuilder { return this; } - /* + /** * Add multiple environment variable. */ vars(envVars: Record) { @@ -557,29 +798,29 @@ export class EnvBuilder { return this; } - /* + /** * Description of the environment. */ desc(str: string) { this.#desc = str; return this; } -} -export function stdSecureConfig( - args: { - additionalAllowedPorts?: PortsModuleSecureConfig["masterPortDepAllowList"]; - enableRuntimes?: boolean; - } & Pick, -): PortsModuleSecureConfig { - const { additionalAllowedPorts, enableRuntimes = false } = args; - const out: PortsModuleSecureConfig = { - masterPortDepAllowList: [ - ...stdDeps({ enableRuntimes }), - ...additionalAllowedPorts ?? [], - ], - }; - return out; + /** + * Tasks to execute on enter. + */ + onEnter(...taskKey: string[]) { + this.#onEnterHookTasks.push(...taskKey); + return this; + } + + /** + * Tasks to execute on enter. + */ + onExit(...taskKey: string[]) { + this.#onExitHookTasks.push(...taskKey); + return this; + } } export function stdDeps(args = { enableRuntimes: false }) { @@ -592,12 +833,41 @@ export function stdDeps(args = { enableRuntimes: false }) { node.default(), cpy.default(), ].map((fatInst) => { - return portsValidators.allowedPortDep.parse({ + const out: AllowedPortDep = { manifest: fatInst.port, defaultInst: thinInstallConfig(fatInst), - }); + }; + return portsValidators.allowedPortDep.parse(out); }), ); } return out; } + +function task$( + argv: string[], + env: Record, + workingDir: string, +) { + const custom$ = Object.assign( + // NOTE: order is important on who assigns to who + // here + $.build$({ + commandBuilder: defaultCommandBuilder().env(env).cwd(workingDir), + }), + { + argv, + env, + workingDir, + }, + ); + return custom$; +} + +type InlineTaskHookProvision = Provision & { + ty: "inline.hook.ghjkTask"; + finalTy: + | "hook.onEnter.posixExec" + | "hook.onExit.posixExec"; + taskKey: string; +}; diff --git a/ghjk.ts b/ghjk.ts index cc7acabf..33925ad7 100644 --- a/ghjk.ts +++ b/ghjk.ts @@ -1,5 +1,5 @@ export { ghjk } from "./mod.ts"; -import { env, install, stdSecureConfig } from "./mod.ts"; +import { env, install, stdSecureConfig, task } from "./mod.ts"; import * as ports from "./ports/mod.ts"; // these are just for quick testing @@ -9,9 +9,13 @@ install(); install( ports.act(), ports.pipi({ packageName: "pre-commit" })[0], - ports.cpy_bs({}), + ports.cpy_bs(), ); +env("main") + .onEnter(task(($) => $`echo enter`)) + .onExit(task(($) => $`echo exit`)); + env("test", { installs: [ports.protoc()], }); diff --git a/host/mod.ts b/host/mod.ts index d56afa84..e5b2cd2b 100644 --- a/host/mod.ts +++ b/host/mod.ts @@ -1,23 +1,16 @@ -import { - cliffy_cmd, - deep_eql, - jsonHash, - zod, - zod_val_err, -} from "../deps/cli.ts"; -import logger, { isColorfulTty } from "../utils/logger.ts"; - +import { cliffy_cmd, deep_eql, zod, zod_val_err } from "../deps/cli.ts"; +import logger from "../utils/logger.ts"; import { $, - bufferHashHex, + bufferHashAsync, Json, - objectHashHex, + objectHash, Path, - stringHashHex, + stringHash, } from "../utils/mod.ts"; import validators, { SerializedConfig } from "./types.ts"; import * as std_modules from "../modules/std.ts"; -import * as denoFile from "../ghjkfiles/deno/mod.ts"; +import * as denoFile from "../files/deno/mod.ts"; import type { ModuleBase } from "../modules/mod.ts"; import { GhjkCtx } from "../modules/types.ts"; import { serializePlatform } from "../modules/ports/types/platform.ts"; @@ -26,57 +19,68 @@ import { DePromisify } from "../port.ts"; export interface CliArgs { ghjkShareDir: string; ghjkfilePath?: string; + ghjkDirPath?: string; } type HostCtx = { fileHashMemoStore: Map>; + curEnvVars: Record; }; export async function cli(args: CliArgs) { - const ghjkShareDir = $.path(args.ghjkShareDir).resolve().normalize() - .toString(); // items to run at end of function const defer = [] as (() => Promise)[]; - const subcmds: Record = {}; - + const ghjkShareDir = $.path(args.ghjkShareDir).resolve().normalize(); let serializedConfig: object | undefined; - let ghjkDir: string | undefined; - let ghjkfilePath: string | undefined; + let gcx: GhjkCtx | undefined; - // most of the CLI is only avail if there's a - // ghjkfile detected - if (args.ghjkfilePath) { - ghjkfilePath = $.path(args.ghjkfilePath).resolve().normalize() - .toString(); - ghjkDir = $.path(ghjkfilePath).parentOrThrow().join(".ghjk") + if (!args.ghjkDirPath && args.ghjkfilePath) { + args.ghjkDirPath = $.path(args.ghjkfilePath).parentOrThrow().join(".ghjk") .toString(); - logger().debug({ ghjkfilePath, ghjkDir }); + } - const gcx = { ghjkShareDir, ghjkfilePath, ghjkDir, blackboard: new Map() }; - const hcx = { fileHashMemoStore: new Map() }; + const subcmds: Record = {}; - const { - subCommands: configCommands, - serializedConfig: config, - writeLockFile, - } = await readConfig( - gcx, - hcx, - ); - serializedConfig = config; - // lock entries are generated across program usage - // so we defer writing it out until the end - defer.push(writeLockFile); + // most of the CLI is only avail if there's a + // ghjkfile detected + if (args.ghjkDirPath) { + gcx = { + ghjkShareDir, + ghjkDir: $.path(args.ghjkDirPath).resolve().normalize(), + ghjkfilePath: args.ghjkfilePath + ? $.path(args.ghjkfilePath).resolve().normalize() + : undefined, + blackboard: new Map(), + }; + logger().debug({ ghjkfilePath: gcx.ghjkfilePath, ghjkDir: gcx?.ghjkDir }); - for (const [cmdName, [cmd, src]] of Object.entries(configCommands)) { - const conflict = subcmds[cmdName]; - if (conflict) { - throw new Error( - `CLI command conflict under name "${cmdName}" from host and module "${src}"`, - ); + if (!await gcx.ghjkDir.join(".gitignore").exists()) { + gcx.ghjkDir.join(".gitignore").writeText($.dedent` + envs + hash.json`); + } + + // this returns nothing if no valid lockifle or ghjkfile + // is found + const commands = await commandsFromConfig(gcx); + if (commands) { + serializedConfig = commands.config; + // lock entries are generated across program usage + // so we defer writing it out until the end + defer.push(commands.writeLockFile); + + for ( + const [cmdName, [cmd, src]] of Object.entries(commands.subCommands) + ) { + const conflict = subcmds[cmdName]; + if (conflict) { + throw new Error( + `CLI command conflict under name "${cmdName}" from host and module "${src}"`, + ); + } + subcmds[cmdName] = cmd; } - subcmds[cmdName] = cmd; } } @@ -118,7 +122,7 @@ export async function cli(args: CliArgs) { throw new Error("no ghjkfile found."); } // deno-lint-ignore no-console - console.log(ghjkShareDir); + console.log(ghjkShareDir.toString()); }), ) .command( @@ -126,11 +130,11 @@ export async function cli(args: CliArgs) { new cliffy_cmd.Command() .description("Print the path where ghjk is installed in.") .action(function () { - if (!ghjkDir) { + if (!gcx) { throw new Error("no ghjkfile found."); } // deno-lint-ignore no-console - console.log(ghjkDir); + console.log(gcx.ghjkDir.toString()); }), ) .command( @@ -138,11 +142,11 @@ export async function cli(args: CliArgs) { new cliffy_cmd.Command() .description("Print the path of the ghjk.ts used") .action(function () { - if (!ghjkfilePath) { + if (!gcx?.ghjkfilePath) { throw new Error("no ghjkfile found."); } // deno-lint-ignore no-console - console.log(ghjkfilePath); + console.log(gcx.ghjkfilePath.toString()); }), ) .command( @@ -151,15 +155,20 @@ export async function cli(args: CliArgs) { .description( "Print the extracted ans serialized config from the ghjkfile", ) - .action(function () { + .option( + "--json", + `Use json format when printing config.`, + ) + .action(function ({ json }) { if (!serializedConfig) { throw new Error("no ghjkfile found."); } // deno-lint-ignore no-console - console.log(Deno.inspect(serializedConfig, { - depth: 10, - colors: isColorfulTty(), - })); + console.log( + json + ? JSON.stringify(serializedConfig) + : $.inspect(serializedConfig), + ); }), ), ); @@ -170,42 +179,41 @@ export async function cli(args: CliArgs) { await Promise.all(defer.map((fn) => fn())); } -async function readConfig(gcx: GhjkCtx, hcx: HostCtx) { - const configPath = $.path(gcx.ghjkfilePath); - const configFileStat = await configPath.stat(); - // FIXME: subset of ghjk commands should be functional - // even if config file not found - if (!configFileStat) { - throw new Error("unable to locate config file", { - cause: gcx, - }); - } - const ghjkDirPath = $.path(gcx.ghjkDir); - if (!await ghjkDirPath.join(".gitignore").exists()) { - ghjkDirPath.join(".gitignore").writeText($.dedent` - envs - hash.json`); - } - const lockFilePath = ghjkDirPath.join("lock.json"); - const hashFilePath = ghjkDirPath.join("hash.json"); +async function commandsFromConfig(gcx: GhjkCtx) { + const hcx: HostCtx = { + fileHashMemoStore: new Map(), + curEnvVars: Deno.env.toObject(), + }; - // command name to [cmd, source module id] - const subCommands = {} as Record; - const lockEntries = {} as Record; + const lockFilePath = gcx.ghjkDir.join("lock.json"); + const hashFilePath = gcx.ghjkDir.join("hash.json"); const foundLockObj = await readLockFile(lockFilePath); const foundHashObj = await readHashFile(hashFilePath); - const ghjkfileHash = await fileDigestHex(hcx, configPath); + const lockEntries = {} as Record; - const curEnvVars = Deno.env.toObject(); + const ghjkfileHash = await gcx.ghjkfilePath?.exists() + ? await fileDigestHex(hcx, gcx.ghjkfilePath!) + : undefined; let configExt: SerializedConfigExt | null = null; // TODO: figure out cross platform lockfiles :O if ( foundLockObj && // lockfile found - foundLockObj.version == "0" + foundHashObj && + foundLockObj.version == "0" && + // avoid reserializing the config if + // the ghjkfile and environment is _satisfcatorily_ + // similar. "cache validation" + await isHashFileValid(hcx, foundLockObj, foundHashObj, ghjkfileHash) ) { + configExt = { + config: foundLockObj.config, + envVarHashes: foundHashObj.envVarHashes, + readFileHashes: foundHashObj.readFileHashes, + listedFiles: foundHashObj.listedFiles, + }; logger().debug("loading lockfile", lockFilePath); for (const man of foundLockObj.config.modules) { const mod = std_modules.map[man.id]; @@ -226,65 +234,13 @@ async function readConfig(gcx: GhjkCtx, hcx: HostCtx) { entry as Json, ); } - - const platformMatch = () => - serializePlatform(Deno.build) == foundLockObj.platform; - - const envHashesMatch = async () => { - const oldHashes = foundHashObj!.envVarHashes; - const newHashes = await envVarDigests(curEnvVars, [ - ...Object.keys(oldHashes), - ]); - return deep_eql(oldHashes, newHashes); - }; - - const cwd = $.path(Deno.cwd()); - const fileHashesMatch = async () => { - const oldHashes = foundHashObj!.readFileHashes; - const newHashes = await fileDigests(hcx, [ - ...Object.keys(oldHashes), - ], cwd); - return deep_eql(oldHashes, newHashes); - }; - - const fileListingsMatch = async () => { - const oldListed = foundHashObj!.listedFiles; - for (const path of oldListed) { - if (!await cwd.resolve(path).exists()) { - return false; - } - } - return true; - }; - // avoid reserializing the config if - // the ghjkfile and environment is _satisfcatorily_ - // similar. "cache validation" - if ( - // NOTE: these are ordered by the amount effort it takes - // to check each - foundHashObj && - foundHashObj.ghjkfileHash == ghjkfileHash && - platformMatch() && - await envHashesMatch() && - await fileListingsMatch() && - await fileHashesMatch() - ) { - configExt = { - config: foundLockObj.config, - envVarHashes: foundHashObj.envVarHashes, - readFileHashes: foundHashObj.readFileHashes, - listedFiles: foundHashObj.listedFiles, - }; - } - } - - // configExt will be falsy if no lockfile was found - // or if it failed cache validation - if (!configExt) { - logger().info("serializing ghjkfile", configPath); - configExt = await readAndSerializeConfig(hcx, configPath, curEnvVars); + } else if (gcx.ghjkfilePath) { + logger().info("serializing ghjkfile", gcx.ghjkfilePath); + configExt = await readGhjkfile(hcx, gcx.ghjkfilePath); + } else { + // nothing to get the commands from + return; } - const newHashObj: zod.infer = { version: "0", ghjkfileHash, @@ -292,7 +248,10 @@ async function readConfig(gcx: GhjkCtx, hcx: HostCtx) { readFileHashes: configExt.readFileHashes, listedFiles: configExt.listedFiles, }; + // command name to [cmd, source module id] + const subCommands = {} as Record; const instances = [] as [string, ModuleBase, unknown][]; + for (const man of configExt.config.modules) { const mod = std_modules.map[man.id]; if (!mod) { @@ -324,7 +283,7 @@ async function readConfig(gcx: GhjkCtx, hcx: HostCtx) { return { subCommands, - serializedConfig: configExt.config, + config: configExt.config, async writeLockFile() { const newLockObj: zod.infer = { version: "0", @@ -354,16 +313,60 @@ async function readConfig(gcx: GhjkCtx, hcx: HostCtx) { }; } +async function isHashFileValid( + hcx: HostCtx, + foundLockFile: zod.infer, + foundHashFile: zod.infer, + ghjkfileHash?: string, +) { + const platformMatch = () => + serializePlatform(Deno.build) == foundLockFile.platform; + + const envHashesMatch = () => { + const oldHashes = foundHashFile!.envVarHashes; + const newHashes = envVarDigests(hcx.curEnvVars, [ + ...Object.keys(oldHashes), + ]); + return deep_eql(oldHashes, newHashes); + }; + + const cwd = $.path(Deno.cwd()); + const fileHashesMatch = async () => { + const oldHashes = foundHashFile!.readFileHashes; + const newHashes = await fileDigests(hcx, [ + ...Object.keys(oldHashes), + ], cwd); + return deep_eql(oldHashes, newHashes); + }; + + const fileListingsMatch = async () => { + const oldListed = foundHashFile!.listedFiles; + for (const path of oldListed) { + if (!await cwd.resolve(path).exists()) { + return false; + } + } + return true; + }; + // NOTE: these are ordered by the amount effort it takes + // to check each + // we only check file hash of the ghjk file if it's present + return (ghjkfileHash ? foundHashFile.ghjkfileHash == ghjkfileHash : true) && + platformMatch() && + envHashesMatch() && + await fileListingsMatch() && + await fileHashesMatch(); +} + type DigestsMap = Record; type SerializedConfigExt = DePromisify< - ReturnType + ReturnType >; -async function readAndSerializeConfig( +async function readGhjkfile( hcx: HostCtx, configPath: Path, - envVars: Record, ) { switch (configPath.extname()) { case "": @@ -373,9 +376,9 @@ async function readAndSerializeConfig( logger().debug("serializing ts config", configPath); const res = await denoFile.getSerializedConfig( configPath.toFileUrl().href, - envVars, + hcx.curEnvVars, ); - const envVarHashes = await envVarDigests(envVars, res.accessedEnvKeys); + const envVarHashes = envVarDigests(hcx.curEnvVars, res.accessedEnvKeys); const cwd = $.path(Deno.cwd()); const cwdStr = cwd.toString(); const listedFiles = res.listedFiles @@ -456,7 +459,7 @@ async function readLockFile(lockFilePath: Path) { const hashObjValidator = zod.object({ version: zod.string(), - ghjkfileHash: zod.string(), + ghjkfileHash: zod.string().nullish(), envVarHashes: zod.record(zod.string(), zod.string().nullish()), readFileHashes: zod.record(zod.string(), zod.string().nullish()), listedFiles: zod.string().array(), @@ -488,7 +491,7 @@ async function readHashFile(hashFilePath: Path) { } } -async function envVarDigests(all: Record, accessed: string[]) { +function envVarDigests(all: Record, accessed: string[]) { const hashes = {} as DigestsMap; for (const key of accessed) { const val = all[key]; @@ -496,7 +499,7 @@ async function envVarDigests(all: Record, accessed: string[]) { // use null if the serializer accessed hashes[key] = null; } else { - hashes[key] = await stringHashHex(val); + hashes[key] = stringHash(val); } } return hashes; @@ -505,21 +508,21 @@ async function envVarDigests(all: Record, accessed: string[]) { async function fileDigests(hcx: HostCtx, readFiles: string[], cwd: Path) { const cwdStr = cwd.toString(); const readFileHashes = {} as DigestsMap; - await Promise.all(readFiles.map(async (path) => { - const pathRef = cwd.resolve(path); - const relativePath = pathRef + await Promise.all(readFiles.map(async (pathStr) => { + const path = cwd.resolve(pathStr); + const relativePath = path .toString() .replace(cwdStr, "."); // FIXME: stream read into hash to improve mem usage - const stat = await pathRef.lstat(); + const stat = await path.lstat(); if (stat) { const contentHash = (stat.isFile || stat.isSymlink) - ? await fileDigestHex(hcx, pathRef) + ? await fileDigestHex(hcx, path) : null; - readFileHashes[relativePath] = await objectHashHex({ - ...stat, + readFileHashes[relativePath] = objectHash({ + ...JSON.parse(JSON.stringify(stat)), contentHash, - } as jsonHash.Tree); + }); } else { readFileHashes[relativePath] = null; } @@ -540,7 +543,7 @@ function fileDigestHex(hcx: HostCtx, path: Path) { } return promise; async function inner() { - return await bufferHashHex( + return await bufferHashAsync( await path.readBytes(), ); } diff --git a/install.sh b/install.sh index d10d6aca..89de7e54 100755 --- a/install.sh +++ b/install.sh @@ -5,7 +5,7 @@ set -e -u GHJK_VERSION="${GHJK_VERSION:-v0.1.0-alpha}" GHJK_INSTALLER_URL="${GHJK_INSTALLER_URL:-https://raw.github.com/metatypedev/ghjk/$GHJK_VERSION/install.ts}" GHJK_SHARE_DIR="${GHJK_SHARE_DIR:-$HOME/.local/share/ghjk}" -DENO_VERSION="${DENO_VERSION:-v1.42.1}" +DENO_VERSION="${DENO_VERSION:-v1.43.1}" # make sure the version is prepended with v if [ "${DENO_VERSION#"v"}" = "$DENO_VERSION" ]; then diff --git a/install/ghjk.sh b/install/ghjk.sh index 4ea0977c..f711d3ac 100644 --- a/install/ghjk.sh +++ b/install/ghjk.sh @@ -2,9 +2,13 @@ export GHJK_SHARE_DIR="${GHJK_SHARE_DIR:-__GHJK_SHARE_DIR__}" export DENO_DIR="${GHJK_DENO_DIR:-__DENO_CACHE_DIR}" export DENO_NO_UPDATE_CHECK=1 +GHJK_MAIN_URL="${GHJK_MAIN_URL:-__MAIN_TS_URL__}" -# NOTE: avoid putting too much in here as the ghjk bin is meant -# to be optional. +# NOTE: avoid putting too much in here as this is only one +# method of getting the ghjk bin which is all utlimately optional +# anyways. + +# NOTE: keep this in sync with impls in install/exec.ts # if ghjkfile var is set, set the GHJK_DIR overriding # any set by the user @@ -39,4 +43,4 @@ fi # we don't want to quote $lock_flag as it's not exactly a single # string param to deno # shellcheck disable=SC2086 -exec __DENO_EXEC__ run --unstable-kv --unstable-worker-options -A $lock_flag __MAIN_TS_URL__ "$@" +exec __DENO_EXEC__ run __UNSTABLE_FLAGS__ -A $lock_flag $GHJK_MAIN_URL "$@" diff --git a/install/mod.ts b/install/mod.ts index 219b497d..7af7f4fb 100644 --- a/install/mod.ts +++ b/install/mod.ts @@ -1,10 +1,26 @@ //! this installs the different shell ghjk hooks in ~/.local/share/ghjk //! and a `ghjk` bin at ~/.local/share/bin -import logger from "../utils/logger.ts"; -import { std_fs, std_path } from "../deps/cli.ts"; +// TODO: explore installing deno.lock from ghjk repo and +// relying on --frozen-lockfile + +import getLogger from "../utils/logger.ts"; import { $, dirs, importRaw } from "../utils/mod.ts"; +import type { Path } from "../utils/mod.ts"; + +const logger = getLogger(import.meta); +/** + * Deno unstable flags needed for ghjk host. + */ +export const unstableFlags = [ + "--unstable-kv", + "--unstable-worker-options", +]; + +// TODO: calculate and add integrity hashes to these raw imports +// as they won't be covered by deno.lock +// - use pre-commit-hook plus ghjk tasks to do find+replace // null means it should be removed (for cleaning up old versions) const getHooksVfs = async () => ({ "env.sh": ( @@ -35,38 +51,39 @@ const getHooksVfs = async () => ({ async function unpackVFS( vfs: Record, - baseDir: string, + baseDirRaw: Path, replacements: [RegExp, string][], ): Promise { - await $.path(baseDir).ensureDir(); + const baseDir = await $.path(baseDirRaw).ensureDir(); for (const [subpath, content] of Object.entries(vfs)) { - const path = std_path.resolve(baseDir, subpath); + const path = baseDir.join(subpath); if (content === null) { - await $.path(baseDir).remove({ recursive: true }); + await path.remove({ recursive: true }); } else { let text = content.trim(); for (const [re, repl] of replacements) { text = text.replace(re, repl); } - await $.path(std_path.dirname(path)).ensureDir(); - await $.path(path).writeText(text); + await path.parentOrThrow().ensureDir(); + await path.writeText(text); } } } async function filterAddContent( - path: string, + path: Path, marker: RegExp, content: string | null, ) { - const file = await Deno.readTextFile(path).catch(async (err) => { - if (err instanceof Deno.errors.NotFound) { - await Deno.mkdir(std_path.dirname(path), { recursive: true }); - return ""; - } - throw err; - }); + const file = await path.readText() + .catch(async (err) => { + if (err instanceof Deno.errors.NotFound) { + await $.path(path).parentOrThrow().ensureDir(); + return ""; + } + throw err; + }); const lines = file.split("\n"); let i = 0; @@ -82,7 +99,7 @@ async function filterAddContent( lines.push(content); } - await Deno.writeTextFile(path, lines.join("\n")); + await path.writeText(lines.join("\n")); } interface InstallArgs { @@ -117,17 +134,15 @@ interface InstallArgs { noLockfile: boolean; } -/** - * @field: - */ export const defaultInstallArgs: InstallArgs = { - ghjkShareDir: std_path.resolve(dirs().shareDir, "ghjk"), + ghjkShareDir: $.path(dirs().shareDir).resolve("ghjk").toString(), homeDir: dirs().homeDir, shellsToHook: [], shellHookMarker: "ghjk-hook-default", skipExecInstall: true, // TODO: respect xdg dirs - ghjkExecInstallDir: std_path.resolve(dirs().homeDir, ".local", "bin"), + ghjkExecInstallDir: $.path(dirs().homeDir).resolve(".local", "bin") + .toString(), ghjkExecDenoExec: Deno.execPath(), /** * the default behvaior kicks in with ghjkDenoCacheDir is falsy @@ -145,21 +160,19 @@ const shellConfig: Record = { export async function install( args: InstallArgs = defaultInstallArgs, ) { - logger().debug("installing", args); + logger.debug("installing", args); if (Deno.build.os == "windows") { throw new Error("windows is not yet supported, please use wsl"); } - const ghjkShareDir = std_path.resolve( - Deno.cwd(), - std_path.normalize(args.ghjkShareDir), - ); + const ghjkShareDir = $.path(Deno.cwd()) + .resolve(args.ghjkShareDir); - logger().debug("unpacking vfs", { ghjkShareDir }); + logger.debug("unpacking vfs", { ghjkShareDir }); await unpackVFS( await getHooksVfs(), ghjkShareDir, - [[/__GHJK_SHARE_DIR__/g, ghjkShareDir]], + [[/__GHJK_SHARE_DIR__/g, ghjkShareDir.toString()]], ); for (const shell of args.shellsToHook) { @@ -169,8 +182,8 @@ export async function install( throw new Error(`unsupported shell: ${shell}`); } - const rcPath = std_path.resolve(homeDir, shellConfig[shell]); - logger().debug("installing hook", { + const rcPath = $.path(homeDir).join(shellConfig[shell]); + logger.debug("installing hook", { ghjkShareDir, shell, marker: args.shellHookMarker, @@ -190,28 +203,32 @@ export async function install( case "solaris": case "illumos": case "darwin": { - await std_fs.ensureDir(args.ghjkExecInstallDir); - const exePath = std_path.resolve(args.ghjkExecInstallDir, `ghjk`); - logger().debug("installing executable", { exePath }); + const installDir = await $.path(args.ghjkExecInstallDir).ensureDir(); + const exePath = installDir.resolve(`ghjk`); + logger.debug("installing executable", { exePath }); // use an isolated cache by default - const denoCacheDir = args.ghjkDenoCacheDir ?? - std_path.resolve(ghjkShareDir, "deno"); - await Deno.writeTextFile( - exePath, + const denoCacheDir = args.ghjkDenoCacheDir + ? $.path(args.ghjkDenoCacheDir) + : ghjkShareDir.resolve("deno"); + await exePath.writeText( (await importRaw(import.meta.resolve("./ghjk.sh"))) .replaceAll( "__GHJK_SHARE_DIR__", - ghjkShareDir, + ghjkShareDir.toString(), ) .replaceAll( "__DENO_CACHE_DIR", - denoCacheDir, + denoCacheDir.toString(), ) .replaceAll( "__DENO_EXEC__", args.ghjkExecDenoExec, ) + .replaceAll( + "__UNSTABLE_FLAGS__", + unstableFlags.join(" "), + ) .replaceAll( "__MAIN_TS_URL__", import.meta.resolve("../main.ts"), @@ -224,5 +241,5 @@ export async function install( throw new Error(`${Deno.build.os} is not yet supported`); } } - logger().info("install success"); + logger.info("install success"); } diff --git a/install/utils.ts b/install/utils.ts new file mode 100644 index 00000000..1f5bb9b5 --- /dev/null +++ b/install/utils.ts @@ -0,0 +1,44 @@ +//! Please keep these in sync with `./ghjk.ts` + +import type { GhjkCtx } from "../modules/types.ts"; +import { unstableFlags } from "./mod.ts"; + +/** + * Returns a simple posix function to invoke the ghjk CLI. + */ +export function ghjk_sh( + gcx: GhjkCtx, + denoDir: string, + functionName = "__ghjk_shim", +) { + return `${functionName} () { + GHJK_SHARE_DIR="${gcx.ghjkShareDir}" \\ + DENO_DIR="${denoDir}" \\ + DENO_NO_UPDATE_CHECK=1 \\ + GHJK_DIR="${gcx.ghjkDir}" \\ + ${Deno.execPath()} run ${ + unstableFlags.join(" ") + } -A --lock ${gcx.ghjkDir}/deno.lock ${import.meta.resolve("../main.ts")} "$@" +}`; +} + +/** + * Returns a simple fish function to invoke the ghjk CLI. + */ +export function ghjk_fish( + gcx: GhjkCtx, + denoDir: string, + functionName = "__ghjk_shim", +) { + return `function ${functionName} + GHJK_SHARE_DIR="${gcx.ghjkShareDir}" \\ + DENO_DIR="${denoDir}" \\ + DENO_NO_UPDATE_CHECK=1 \\ + GHJK_DIR="${gcx.ghjkDir}" \\ + ${Deno.execPath()} run ${ + unstableFlags.join(" ") + } -A --lock ${gcx.ghjkDir}/deno.lock ${ + import.meta.resolve("../main.ts") + } $argv +end`; +} diff --git a/main.ts b/main.ts index 81571da7..42aed1fd 100755 --- a/main.ts +++ b/main.ts @@ -4,21 +4,29 @@ import "./setup_logger.ts"; import { cli } from "./host/mod.ts"; import { std_path } from "./deps/common.ts"; import logger from "./utils/logger.ts"; -import { dirs, findConfig } from "./utils/mod.ts"; +import { dirs, findEntryRecursive } from "./utils/mod.ts"; if (import.meta.main) { - const ghjkfile = Deno.env.get("GHJKFILE") ?? - await findConfig(Deno.cwd()); - if (!ghjkfile) { + // look for ghjkdir + let ghjkdir = Deno.env.get("GHJK_DIR") ?? + await findEntryRecursive(Deno.cwd(), ".ghjk"); + const ghjkfile = ghjkdir + ? await findEntryRecursive(std_path.dirname(ghjkdir), "ghjk.ts") + : await findEntryRecursive(Deno.cwd(), "ghjk.ts"); + if (!ghjkdir && !ghjkfile) { logger().warn( - "ghjk could not find any ghjkfiles, try creating a `ghjk.ts` script.", + "ghjk could not find any ghjkfiles or ghjkdirs, try creating a `ghjk.ts` script.", ); // Deno.exit(2); } + if (ghjkfile && !ghjkdir) { + ghjkdir = std_path.resolve(std_path.dirname(ghjkfile), ".ghjk"); + } await cli({ ghjkShareDir: Deno.env.get("GHJK_SHARE_DIR") ?? - std_path.resolve(dirs().shareDir, "ghjk"), + dirs().shareDir.resolve("ghjk").toString(), ghjkfilePath: ghjkfile ? std_path.resolve(Deno.cwd(), ghjkfile) : undefined, + ghjkDirPath: ghjkdir ? std_path.resolve(Deno.cwd(), ghjkdir) : undefined, }); } else { throw new Error( diff --git a/mod.ts b/mod.ts index 65fd9bff..4bb39ad0 100644 --- a/mod.ts +++ b/mod.ts @@ -5,27 +5,24 @@ import "./setup_logger.ts"; +import { zod } from "./deps/common.ts"; // ports specific imports +import portsValidators from "./modules/ports/types.ts"; import type { + AllowedPortDep, InstallConfigFat, - PortsModuleSecureConfig, } from "./modules/ports/types.ts"; import logger from "./utils/logger.ts"; -import { $ } from "./utils/mod.ts"; -import { - EnvBuilder, - GhjkfileBuilder, - stdDeps, - stdSecureConfig, -} from "./ghjkfiles/mod.ts"; -import type { EnvDefArgs, TaskDefArgs, TaskFn } from "./ghjkfiles/mod.ts"; +import { $, thinInstallConfig } from "./utils/mod.ts"; +import { EnvBuilder, Ghjkfile, stdDeps } from "./files/mod.ts"; +import type { DenoTaskDefArgs, EnvDefArgs, TaskFn } from "./files/mod.ts"; // WARN: this module has side-effects and only ever import // types from it import type { ExecTaskArgs } from "./modules/tasks/deno.ts"; const DEFAULT_BASE_ENV_NAME = "main"; -const file = new GhjkfileBuilder(); +const file = new Ghjkfile(); const mainEnv = file.addEnv({ name: DEFAULT_BASE_ENV_NAME, base: false, @@ -33,21 +30,32 @@ const mainEnv = file.addEnv({ desc: "the default default environment.", }); -export type { EnvDefArgs, TaskDefArgs, TaskFn } from "./ghjkfiles/mod.ts"; +export type { DenoTaskDefArgs, EnvDefArgs, TaskFn } from "./files/mod.ts"; export { $, logger, stdDeps, stdSecureConfig }; // FIXME: ses.lockdown to freeze primoridials // freeze the object to prevent malicious tampering of the secureConfig export const ghjk = Object.freeze({ getConfig: Object.freeze( - (secureConfig: PortsModuleSecureConfig | undefined) => { + ( + ghjkfileUrl: string, + secureConfig: DenoFileSecureConfig | undefined, + ) => { const defaultEnv = secureConfig?.defaultEnv ?? DEFAULT_BASE_ENV_NAME; const defaultBaseEnv = secureConfig?.defaultBaseEnv ?? DEFAULT_BASE_ENV_NAME; - return file.toConfig({ defaultEnv, defaultBaseEnv, secureConfig }); + return file.toConfig({ + defaultEnv, + defaultBaseEnv, + ghjkfileUrl, + masterPortDepAllowList: secureConfig?.masterPortDepAllowList ?? + stdDeps(), + }); }, ), execTask: Object.freeze( + // TODO: do we need to source the default base env from + // the secure config here? (args: ExecTaskArgs) => file.execTask(args), ), }); @@ -59,27 +67,44 @@ export function install(...configs: InstallConfigFat[]) { mainEnv.install(...configs); } -export function task(args: TaskDefArgs): string; -export function task(name: string, args: Omit): string; -export function task(name: string, fn: TaskFn): string; +/** + * Define and register a task. + */ +export function task(args: DenoTaskDefArgs): string; +export function task(name: string, args: Omit): string; +export function task( + name: string, + fn: TaskFn, + args?: Omit, +): string; +export function task(fn: TaskFn, args?: Omit): string; export function task( - nameOrArgs: string | TaskDefArgs, - argsOrFn?: Omit | TaskFn, + nameOrArgsOrFn: string | DenoTaskDefArgs | TaskFn, + argsOrFn?: Omit | TaskFn, + argsMaybe?: Omit, ): string { - let args: TaskDefArgs; - if (typeof nameOrArgs == "object") { - args = nameOrArgs; + let args: DenoTaskDefArgs; + if (typeof nameOrArgsOrFn == "object") { + args = nameOrArgsOrFn; + } else if (typeof nameOrArgsOrFn == "function") { + args = { + ...(argsOrFn ?? {}), + fn: nameOrArgsOrFn, + }; } else if (typeof argsOrFn == "object") { - args = { ...argsOrFn, name: nameOrArgs }; + args = { ...argsOrFn, name: nameOrArgsOrFn }; } else if (argsOrFn) { args = { - name: nameOrArgs, + ...(argsMaybe ?? {}), + name: nameOrArgsOrFn, fn: argsOrFn, }; } else { - throw new Error("no function provided when defining task"); + args = { + name: nameOrArgsOrFn, + }; } - return file.addTask(args); + return file.addTask({ ...args, ty: "denoFile@v1" }); } export function env(args: EnvDefArgs): EnvBuilder; @@ -93,3 +118,48 @@ export function env( : { ...argsMaybe, name: nameOrArgs }; return file.addEnv(args); } + +const denoFileSecureConfig = zod.object({ + masterPortDepAllowList: zod.array(portsValidators.allowedPortDep).nullish(), + // TODO: move into envs/types + defaultEnv: zod.string().nullish(), + defaultBaseEnv: zod.string().nullish(), +}); +/* + * This is a secure sections of the config intended to be direct exports + * from the config script instead of the global variable approach the + * main [`GhjkConfig`] can take. + */ +export type DenoFileSecureConfig = zod.input< + typeof denoFileSecureConfig +>; +export type DenoFileSecureConfigX = zod.input< + typeof denoFileSecureConfig +>; + +function stdSecureConfig( + args: { + additionalAllowedPorts?: (InstallConfigFat | AllowedPortDep)[]; + enableRuntimes?: boolean; + } & Pick, +) { + const { additionalAllowedPorts, enableRuntimes = false } = args; + const out: DenoFileSecureConfig = { + ...args, + masterPortDepAllowList: [ + ...stdDeps({ enableRuntimes }), + ...additionalAllowedPorts?.map( + (dep: any) => { + const res = portsValidators.allowedPortDep.safeParse(dep); + if (res.success) return res.data; + const out: AllowedPortDep = { + manifest: dep.port, + defaultInst: thinInstallConfig(dep), + }; + return portsValidators.allowedPortDep.parse(out); + }, + ) ?? [], + ], + }; + return out; +} diff --git a/modules/envs/mod.ts b/modules/envs/mod.ts index 0ecc97c8..b5c96899 100644 --- a/modules/envs/mod.ts +++ b/modules/envs/mod.ts @@ -17,7 +17,6 @@ import type { InstallSetProvision, InstallSetRefProvision, } from "../ports/types.ts"; -import { isColorfulTty } from "../../utils/logger.ts"; import { buildInstallGraph, syncCtxFromGhjk } from "../ports/sync.ts"; export type EnvsCtx = { @@ -48,8 +47,8 @@ export class EnvsModule extends ModuleBase { const config = unwrapParseCurry( validators.envsModuleConfig.safeParse(manifest.config), ); - - const activeEnv = Deno.env.get("GHJK_ENV") ?? config.defaultEnv; + const setEnv = Deno.env.get("GHJK_ENV"); + const activeEnv = setEnv && setEnv != "" ? setEnv : config.defaultEnv; return Promise.resolve({ activeEnv, @@ -139,18 +138,13 @@ export class EnvsModule extends ModuleBase { throw new Error(`No env found under given name "${envName}"`); } // deno-lint-ignore no-console - console.log(Deno.inspect( - await showableEnv(gcx, env, envName), - { - depth: 10, - colors: isColorfulTty(), - }, - )); + console.log($.inspect(await showableEnv(gcx, env, envName))); }), ), sync: new cliffy_cmd.Command() - .description(`Cooks and activates an environment. + .description(`Synchronize your shell to what's in your config. +Just simply cooks and activates an environment. - If no [envName] is specified and no env is currently active, this syncs the configured default env [${ecx.config.defaultEnv}]. - If the environment is already active, this doesn't launch a new shell.`) .arguments("[envName:string]") @@ -250,7 +244,7 @@ async function reduceAndCookEnv( if (envName == ecx.config.defaultEnv) { const defaultEnvDir = $.path(gcx.ghjkDir).join("envs", "default"); await $.removeIfExists(defaultEnvDir); - await defaultEnvDir.createSymlinkTo(envDir, { kind: "relative" }); + await defaultEnvDir.symlinkTo(envDir, { kind: "relative" }); } } diff --git a/modules/envs/posix.ts b/modules/envs/posix.ts index 14d9a250..833fe091 100644 --- a/modules/envs/posix.ts +++ b/modules/envs/posix.ts @@ -1,9 +1,10 @@ import { std_fs, std_path } from "../../deps/cli.ts"; import type { EnvRecipeX } from "./types.ts"; -import getLogger from "../../utils/logger.ts"; import { $, Path } from "../../utils/mod.ts"; import type { GhjkCtx } from "../types.ts"; import { reduceStrangeProvisions } from "./reducer.ts"; +import { ghjk_fish, ghjk_sh } from "../../install/utils.ts"; +import getLogger from "../../utils/logger.ts"; const logger = getLogger(import.meta); @@ -16,6 +17,7 @@ export async function cookPosixEnv( createShellLoaders?: boolean; }, ) { + logger.debug("cooking env", envName, { envDir }); const reducedRecipe = await reduceStrangeProvisions(gcx, recipe); await $.removeIfExists(envDir); // create the shims for the user's environment @@ -35,6 +37,8 @@ export async function cookPosixEnv( const vars = { GHJK_ENV: envName, } as Record; + const onEnterHooks = [] as [string, string[]][]; + const onExitHooks = [] as [string, string[]][]; // FIXME: detect shim conflicts // FIXME: better support for multi installs @@ -59,6 +63,12 @@ export async function cookPosixEnv( } vars[item.key] = item.val; break; + case "hook.onEnter.posixExec": + onEnterHooks.push([item.program, item.arguments]); + break; + case "hook.onExit.posixExec": + onExitHooks.push([item.program, item.arguments]); + break; default: throw Error(`unsupported provision type: ${(item as any).provision}`); } @@ -81,8 +91,6 @@ export async function cookPosixEnv( ), $.path(envDir).join("recipe.json").writeJsonPretty(reducedRecipe), ]); - // write loader for the env vars mandated by the installs - logger.debug("adding vars to loader", vars); // FIXME: prevent malicious env manipulations let LD_LIBRARY_ENV: string; switch (Deno.build.os) { @@ -103,10 +111,14 @@ export async function cookPosixEnv( CPLUS_INCLUDE_PATH: `${envDir}/shims/include`, }; if (createShellLoaders) { - await writeLoader( + // write loader for the env vars mandated by the installs + await writeActivators( + gcx, envDir, vars, pathVars, + onEnterHooks, + onExitHooks, ); } return { @@ -151,56 +163,104 @@ async function shimLinkPaths( throw error; } } - await shimPath.createSymlinkTo(filePath, { kind: "absolute" }); + await shimPath.symlinkTo(filePath, { kind: "absolute" }); shims[fileName] = shimPath.toString(); } return shims; } -// create the loader scripts -// loader scripts are responsible for exporting -// different environment variables from the ports -// and mainpulating the path strings -async function writeLoader( +/** + * Create the activate scripts. + * + * Activate scripts are responsible for: + * - exporting different environment variables from the ports + * - mainpulating the path strings + * - running the environment hooks + */ +async function writeActivators( + gcx: GhjkCtx, envDir: string, env: Record, pathVars: Record, + onEnterHooks: [string, string[]][], + onExitHooks: [string, string[]][], ) { + // ghjk.sh sets the DENO_DIR so we can usually + // assume it's set + const denoDir = Deno.env.get("DENO_DIR") ?? ""; + const ghjkShimName = "__ghjk_shim"; + const onEnterHooksEscaped = onEnterHooks.map( + ([cmd, args]) => + [cmd == "ghjk" ? ghjkShimName : cmd, ...args] + .join(" ").replaceAll("'", "'\\''"), + ); + const onExitHooksEscaped = onExitHooks.map( + ([cmd, args]) => + [cmd == "ghjk" ? ghjkShimName : cmd, ...args] + .join(" ").replaceAll("'", "'\\''"), + ); const activate = { + // + // posix shell version posix: [ `if [ -n "$\{GHJK_CLEANUP_POSIX+x}" ]; then eval "$GHJK_CLEANUP_POSIX" fi`, `export GHJK_CLEANUP_POSIX="";`, - ...Object.entries(env).map(([k, v]) => + "\n# env vars", + ...Object.entries(env).map(([key, val]) => // NOTE: single quote the port supplied envs to avoid any embedded expansion/execution - `GHJK_CLEANUP_POSIX=$GHJK_CLEANUP_POSIX"export ${k}='$${k}';"; -export ${k}='${v}';` + `GHJK_CLEANUP_POSIX=$GHJK_CLEANUP_POSIX"export ${key}='$${key}';"; +export ${key}='${val}';` ), - ...Object.entries(pathVars).map(([k, v]) => + "\n# path vars", + ...Object.entries(pathVars).map(([key, val]) => // NOTE: double quote the path vars for expansion // single quote GHJK_CLEANUP additions to avoid expansion/exec before eval - `GHJK_CLEANUP_POSIX=$GHJK_CLEANUP_POSIX'${k}=$(echo "$${k}" | tr ":" "\\n" | grep -vE "^${envDir}" | tr "\\n" ":");${k}="\${${k}%:}";'; -export ${k}="${v}:$${k}"; + `GHJK_CLEANUP_POSIX=$GHJK_CLEANUP_POSIX'${key}=$(echo "$${key}" | tr ":" "\\n" | grep -vE "^${val}" | tr "\\n" ":");${key}="\${${key}%:}";'; +export ${key}="${val}:$${key}"; ` ), + "\n# hooks that want to invoke ghjk are made to rely", + "# on this shim instead improving latency", + ghjk_sh(gcx, denoDir, ghjkShimName), + "\n# on enter hooks", + ...onEnterHooksEscaped, + "\n# on exit hooks", + ...onExitHooksEscaped.map( + (command) => `GHJK_CLEANUP_POSIX=$GHJK_CLEANUP_POSIX'${command};';`, + ), ].join("\n"), + // + // fish version fish: [ `if set --query GHJK_CLEANUP_FISH eval $GHJK_CLEANUP_FISH set --erase GHJK_CLEANUP_FISH end`, - ...Object.entries(env).map(([k, v]) => - `set --global --append GHJK_CLEANUP_FISH "set --global --export ${k} '$${k}';"; -set --global --export ${k} '${v}';` + "\n# env vars", + ...Object.entries(env).map(([key, val]) => + `set --global --append GHJK_CLEANUP_FISH "set --global --export ${key} '$${key}';"; +set --global --export ${key} '${val}';` ), - ...Object.entries(pathVars).map(([k, v]) => - `set --global --append GHJK_CLEANUP_FISH 'set --global --export --path ${k} (string match --invert --regex "^${envDir}" $${k});'; -set --global --export --prepend ${k} ${v}; + "\n# path vars", + ...Object.entries(pathVars).map(([key, val]) => + `set --global --append GHJK_CLEANUP_FISH 'set --global --export --path ${key} (string match --invert --regex "^${val}" $${key});'; +set --global --export --prepend ${key} ${val}; ` ), + "\n# hooks that want to invoke ghjk are made to rely", + "# on this shim instead improving latency", + ghjk_fish(gcx, denoDir, ghjkShimName), + "\n# on enter hooks", + ...onEnterHooksEscaped, + "\n# on exit hooks", + ...onExitHooksEscaped.map( + (command) => `set --global --append GHJK_CLEANUP_FISH '${command};';`, + ), ].join("\n"), }; + const envPathR = await $.path(envDir).ensureDir(); await Promise.all([ envPathR.join(`activate.fish`).writeText(activate.fish), diff --git a/modules/envs/reducer.ts b/modules/envs/reducer.ts index 64fe228d..864e9ef1 100644 --- a/modules/envs/reducer.ts +++ b/modules/envs/reducer.ts @@ -31,6 +31,11 @@ export function getProvisionReducerStore( return store; } +/** + * Looks at each provision in the recipe and if it's not a type of + * {@link WellKnownProvision}, looks for reducers in + * {@link ProvisionReducer} to convert it to one. + */ export async function reduceStrangeProvisions( gcx: GhjkCtx, env: EnvRecipeX, diff --git a/modules/envs/types.ts b/modules/envs/types.ts index 5850483d..90194fd0 100644 --- a/modules/envs/types.ts +++ b/modules/envs/types.ts @@ -10,11 +10,17 @@ const posixFileProvisionTypes = [ "posix.headerFile", ] as const; +export const hookProvisionTypes = [ + "hook.onEnter.posixExec", + "hook.onExit.posixExec", +] as const; + // we separate the posix file types in a separate // array in the interest of type inference export const wellKnownProvisionTypes = [ "posix.envVar", ...posixFileProvisionTypes, + ...hookProvisionTypes, ] as const; const wellKnownProvision = zod.discriminatedUnion( @@ -25,6 +31,13 @@ const wellKnownProvision = zod.discriminatedUnion( key: zod.string(), val: zod.string(), }), + ...hookProvisionTypes.map((ty) => + zod.object({ + ty: zod.literal(ty), + program: zod.string(), + arguments: zod.string().array(), + }) + ), ...posixFileProvisionTypes.map((ty) => zod.object({ ty: zod.literal(ty), absolutePath }) ), diff --git a/modules/ports/db.ts b/modules/ports/db.ts index 54934e68..1a9678a5 100644 --- a/modules/ports/db.ts +++ b/modules/ports/db.ts @@ -2,12 +2,16 @@ /// import { zod } from "../../deps/common.ts"; +// import type { PathRef } from "../../utils/mod.ts"; +// import { $ } from "../../utils/mod.ts"; import validators from "./types.ts"; +// import getLogger from "../../utils/logger.ts"; // const logger = getLogger(import.meta); // NOTE: make sure any changes to here are backwards compatible const installRowValidator = zod.object({ + // version: zod.string(), installId: zod.string(), conf: validators.installConfigLite, manifest: validators.portManifest, @@ -16,7 +20,10 @@ const installRowValidator = zod.object({ progress: zod.enum(["downloaded", "installed"]), }).passthrough(); -export type InstallRow = zod.infer; +type InstallRowVersioned = zod.infer; +// FIXME: this breaks typescript +// export type InstallRow = Omit; +export type InstallRow = InstallRowVersioned; export abstract class InstallsDb { abstract all(): Promise; @@ -63,10 +70,10 @@ class DenoKvInstallsDb extends InstallsDb { } } -// TODO: implement me -/* +/* // TODO: implement me + class InlineInstallsDb extends InstallsDb { - #map = new Map(); + #map = new Map(); #dbDir: PathRef; constructor( dbDir: string, @@ -80,17 +87,26 @@ class InlineInstallsDb extends InstallsDb { async get(id: string): Promise { let row = this.#map.get(id); if (!row) { - const res = installRowValidator.safeParse( - await this.#dbDir.join(`${id}.meta`).readMaybeJson(), - ); - if (!res.success) { - logger.warn() + const raw = await this.#dbDir.join(`${id}.meta`).readMaybeText(); + if (raw) { + try { + const rawParsed = installRowValidator.parse(JSON.parse(raw)); + if (rowParsed.version != "0") { + throw new Error(`unexpected version string: ${rowParsed.version}`); + } + row = rowParsed; + this.#map.set(id, row); + } catch (err) { + logger.warn(`error parsing install meta for "${id}"`, err); + } } } return row; } set(id: string, row: InstallRow): Promise { - this.#map.set(id, row); + const versioned = { ...row, version: "0" }; + await this.#dbDir.join(`${id}.meta`).writeJsonPretty(versioned); + this.#map.set(id, versioned); throw new Error("Method not implemented."); } delete(id: string): Promise { @@ -100,4 +116,4 @@ class InlineInstallsDb extends InstallsDb { [Symbol.dispose](): void { throw new Error("Method not implemented."); } -}*/ +} */ diff --git a/modules/ports/sync.ts b/modules/ports/sync.ts index 3a7dda6c..a54ad42d 100644 --- a/modules/ports/sync.ts +++ b/modules/ports/sync.ts @@ -1,4 +1,4 @@ -import { deep_eql, jsonHash, std_fs, std_path, zod } from "../../deps/cli.ts"; +import { deep_eql, std_fs, std_path, zod } from "../../deps/cli.ts"; import getLogger from "../../utils/logger.ts"; import validators from "./types.ts"; import type { @@ -22,7 +22,7 @@ import { DePromisify, getInstallHash, getPortRef, - objectHashHex, + objectHash, type Rc, rc, sameFsTmpRoot, @@ -119,9 +119,9 @@ export async function installFromGraph( dir: tmpPath, prefix: `shims_${installId}_`, }); - for ( - const [depInstallId, depPortName] of graph.depEdges[installId] ?? [] - ) { + await Promise.all((graph.depEdges[installId] ?? []).map(async ( + [depInstallId, depPortName], + ) => { const depArts = installCtx.artifacts.get(depInstallId); if (!depArts) { throw new Error( @@ -153,7 +153,7 @@ export async function installFromGraph( ), env: depArts.env, }; - } + })); return { totalDepArts, depShimsRootPath }; }, @@ -341,7 +341,7 @@ export async function buildInstallGraph( manifest, instLite, ); - const instId = await getInstallHash(resolvedConfig); + const instId = getInstallHash(resolvedConfig); // no dupes allowed in user specified insts if (graph.user.includes(instId)) { @@ -399,7 +399,7 @@ export async function buildInstallGraph( const depInstall = validators.installConfigResolved.parse( inst.config.buildDepConfigs![depId.name], ); - const depInstallId = await getInstallHash(depInstall); + const depInstallId = getInstallHash(depInstall); // only add the install configuration for this dep port // if specific hash hasn't seen before @@ -458,13 +458,13 @@ export async function buildInstallGraph( // This takes user specified InstallConfigs and resolves // their versions to a known, installable version // It also resolves any dependencies that the config specifies -async function resolveConfig( +function resolveConfig( scx: SyncCtx, set: InstallSetX, manifest: PortManifestX, config: InstallConfigLiteX, ) { - const hash = await objectHashHex(config as jsonHash.Tree); + const hash = objectHash(JSON.parse(JSON.stringify(config))); let promise = scx.memoStore.get(hash); if (!promise) { promise = inner(); @@ -620,7 +620,7 @@ async function resolveAndInstall( configLite: InstallConfigLiteX, ) { const config = await resolveConfig(scx, set, manifest, configLite); - const installId = await getInstallHash(config); + const installId = getInstallHash(config); const cached = await scx.db.val.get(installId); // we skip it if it's already installed @@ -717,45 +717,48 @@ async function getShimmedDepArts( installs: [string, string][], ) { const totalDepArts: DepArts = {}; - for ( - const [installId, portName] of installs - ) { - const installRow = await scx.db.val.get(installId); - if (!installRow || !installRow.installArts) { - throw new Error( - `artifacts not found for "${installId}" not found in db when shimming totalDepArts`, - { - cause: { installs }, + await Promise.all( + installs + .map( + async ([installId, portName]) => { + const installRow = await scx.db.val.get(installId); + if (!installRow || !installRow.installArts) { + throw new Error( + `artifacts not found for "${installId}" not found in db when shimming totalDepArts`, + { + cause: { installs }, + }, + ); + } + const installArts = installRow.installArts; + const shimDir = $.path(shimsRootPath).resolve(installId); + const [binShimDir, libShimDir, includeShimDir] = (await Promise.all([ + shimDir.join("bin").ensureDir(), + shimDir.join("lib").ensureDir(), + shimDir.join("include").ensureDir(), + ])).map($.pathToString); + + totalDepArts[portName] = { + execs: await shimLinkPaths( + installArts.binPaths, + installArts.installPath, + binShimDir, + ), + libs: await shimLinkPaths( + installArts.libPaths, + installArts.installPath, + libShimDir, + ), + includes: await shimLinkPaths( + installArts.includePaths, + installArts.installPath, + includeShimDir, + ), + env: installArts.env, + }; }, - ); - } - const installArts = installRow.installArts; - const shimDir = $.path(shimsRootPath).resolve(installId); - const [binShimDir, libShimDir, includeShimDir] = (await Promise.all([ - shimDir.join("bin").ensureDir(), - shimDir.join("lib").ensureDir(), - shimDir.join("include").ensureDir(), - ])).map($.pathToString); - - totalDepArts[portName] = { - execs: await shimLinkPaths( - installArts.binPaths, - installArts.installPath, - binShimDir, ), - libs: await shimLinkPaths( - installArts.libPaths, - installArts.installPath, - libShimDir, - ), - includes: await shimLinkPaths( - installArts.includePaths, - installArts.installPath, - includeShimDir, - ), - env: installArts.env, - }; - } + ); return totalDepArts; } @@ -797,7 +800,7 @@ async function shimLinkPaths( throw error; } } - await $.path(shimPath).createSymlinkTo(filePath, { type: "file" }); + await $.path(shimPath).symlinkTo(filePath, { type: "file" }); shims[fileName] = shimPath; } return shims; diff --git a/modules/ports/types.ts b/modules/ports/types.ts index 69f2c0dc..368cae6a 100644 --- a/modules/ports/types.ts +++ b/modules/ports/types.ts @@ -144,12 +144,6 @@ const allowedPortDep = zod.object({ defaultInst: installConfigLite, }); -const portsModuleSecureConfig = zod.object({ - masterPortDepAllowList: zod.array(allowedPortDep).nullish(), - defaultEnv: zod.string().nullish(), - defaultBaseEnv: zod.string().nullish(), -}); - const allowDepSet = zod.record(zod.string(), allowedPortDep); const allowDepSetHashed = zod.record(zod.string(), zod.string()); @@ -219,7 +213,6 @@ const validators = { installConfig, installConfigResolved, portManifest, - portsModuleSecureConfig, portsModuleConfig, portsModuleConfigHashed, allowedPortDep, @@ -342,18 +335,6 @@ export type InstallSetRefProvisionX = zod.infer< export type AllowedPortDep = zod.input; export type AllowedPortDepX = zod.infer; -/* - * This is a secure sections of the config intended to be direct exports - * from the config script instead of the global variable approach the - * main [`GhjkConfig`] can take. - */ -export type PortsModuleSecureConfig = zod.input< - typeof validators.portsModuleSecureConfig ->; -export type PortsModuleSecureConfigX = zod.input< - typeof validators.portsModuleSecureConfig ->; - export type InstallSet = zod.input; export type InstallSetX = zod.infer< typeof validators.installSet diff --git a/modules/tasks/deno.ts b/modules/tasks/deno.ts index 8a828804..e3c0003d 100644 --- a/modules/tasks/deno.ts +++ b/modules/tasks/deno.ts @@ -31,7 +31,7 @@ export type DriverResponse = { }; export type ExecTaskArgs = { - name: string; + key: string; argv: string[]; workingDir: string; envVars: Record; @@ -106,12 +106,12 @@ async function rpc(moduleUri: string, req: DriverRequests) { } export async function execTaskDeno( - configUri: string, + moduleUri: string, args: ExecTaskArgs, ) { - const resp = await rpc(configUri, { + const resp = await rpc(moduleUri, { ty: "exec", - uri: configUri, + uri: moduleUri, args, }); if (resp.ty != "exec") { diff --git a/modules/tasks/exec.ts b/modules/tasks/exec.ts index 5f7ada62..17374495 100644 --- a/modules/tasks/exec.ts +++ b/modules/tasks/exec.ts @@ -1,4 +1,3 @@ -import { std_path } from "../../deps/cli.ts"; import { $, DePromisify } from "../../utils/mod.ts"; import type { TaskDefHashedX, TasksModuleConfigX } from "./types.ts"; @@ -14,7 +13,7 @@ export type TaskGraph = DePromisify>; export function buildTaskGraph( _gcx: GhjkCtx, - portsConfig: TasksModuleConfigX, + tasksConfig: TasksModuleConfigX, // env: Blackboard, ) { const graph = { @@ -24,25 +23,25 @@ export function buildTaskGraph( // edges from dependent to dependency depEdges: {} as Record, }; - for (const [name, task] of Object.entries(portsConfig.tasks)) { - if (!portsConfig.envs[task.envHash]) { + for (const [hash, task] of Object.entries(tasksConfig.tasks)) { + if (!tasksConfig.envs[task.envHash]) { throw new Error( - `unable to find env referenced by task "${name}" under hash "${task.envHash}"`, + `unable to find env referenced by task "${hash}" under hash "${task.envHash}"`, ); } if (!task.dependsOn || task.dependsOn.length == 0) { - graph.indie.push(name); + graph.indie.push(hash); } else { - for (const depTaskName of task.dependsOn) { + for (const depTaskHash of task.dependsOn) { const testCycle = ( name: string, - depName: string, + depHash: string, ): TaskDefHashedX | undefined => { - const depTask = portsConfig.tasks[depName]; + const depTask = tasksConfig.tasks[depHash]; if (!depTask) { throw new Error(`specified dependency task doesn't exist`, { cause: { - depTaskName, + depHash, task, }, }); @@ -55,7 +54,7 @@ export function buildTaskGraph( } }; - const cycleSource = testCycle(name, depTaskName); + const cycleSource = testCycle(hash, depTaskHash); if ( cycleSource ) { @@ -69,12 +68,14 @@ export function buildTaskGraph( }, ); } - graph.revDepEdges[depTaskName] = [ - ...graph.revDepEdges[depTaskName] ?? [], - name, - ]; + const revDepSet = graph.revDepEdges[depTaskHash]; + if (revDepSet) { + revDepSet.push(hash); + } else { + graph.revDepEdges[depTaskHash] = [hash]; + } } - graph.depEdges[name] = task.dependsOn; + graph.depEdges[hash] = task.dependsOn; } } return graph; @@ -84,17 +85,17 @@ export async function execTask( gcx: GhjkCtx, tasksConfig: TasksModuleConfigX, taskGraph: TaskGraph, - targetName: string, + targetKey: string, args: string[], // taskEnv: TaskEnvX, // installGraph: InstallGraph, ): Promise { - let workSet = new Set([targetName]); + let workSet = new Set([targetKey]); { - const stack = [targetName]; + const stack = [targetKey]; while (stack.length > 0) { - const taskName = stack.pop()!; - const taskDef = tasksConfig.tasks[taskName]; + const taskHash = stack.pop()!; + const taskDef = tasksConfig.tasks[taskHash]; stack.push(...taskDef.dependsOn ?? []); workSet = new Set([...workSet.keys(), ...taskDef.dependsOn ?? []]); } @@ -102,58 +103,80 @@ export async function execTask( const pendingDepEdges = new Map( Object.entries(taskGraph.depEdges).map(([key, val]) => [key, val!]), ); - const pendingTasks = taskGraph.indie.filter((name) => workSet.has(name)); + const pendingTasks = taskGraph.indie.filter((hash) => workSet.has(hash)); if (pendingTasks.length == 0) { throw new Error("something went wrong, task graph starting set is empty"); } while (pendingTasks.length > 0) { - const taskName = pendingTasks.pop()!; - const taskDef = tasksConfig.tasks[taskName]; + const taskKey = pendingTasks.pop()!; + const taskDef = tasksConfig.tasks[taskKey]; const taskEnvDir = await Deno.makeTempDir({ - prefix: `ghjkTaskEnv_${taskName}_`, + prefix: `ghjkTaskEnv_${taskKey}_`, }); const { env: installEnvs } = await cookPosixEnv( { gcx, recipe: tasksConfig.envs[taskDef.envHash], - envName: `taskEnv_${taskName}`, + envName: `taskEnv_${taskKey}`, envDir: taskEnvDir, }, ); - logger.info("executing", taskName, args); - await execTaskDeno( - std_path.toFileUrl(gcx.ghjkfilePath).href, - { - name: taskName, - argv: args, - envVars: { - ...Deno.env.toObject(), - ...Object.fromEntries( - Object.entries(installEnvs).map( - ( - [key, val], - ) => [ - key, - key.match(/PATH/i) ? `${val}:${Deno.env.get(key) ?? ""}` : val, - ], - ), - ), - }, - workingDir: std_path.dirname(gcx.ghjkfilePath), - }, + logger.info( + "executing", + taskKey, + args, ); + + const envVars = { + ...Deno.env.toObject(), + ...Object.fromEntries( + Object.entries(installEnvs).map( + ( + [key, val], + ) => [ + key, + key.match(/PATH/i) ? `${val}:${Deno.env.get(key) ?? ""}` : val, + ], + ), + ), + }; + if (taskDef.ty == "denoFile@v1") { + if (!gcx.ghjkfilePath) { + throw new Error( + "denoFile task found but no ghjkfile. This occurs when ghjk is working just on a lockfile alone", + ); + } + await execTaskDeno( + $.path(gcx.ghjkfilePath).toFileUrl().toString(), + { + key: taskDef.key, + argv: args, + envVars, + workingDir: gcx.ghjkfilePath.parentOrThrow().toString(), + }, + ); + } else { + throw new Error( + `unsupported task type "${taskDef.ty}"`, + { + cause: { + taskDef, + }, + }, + ); + } $.removeIfExists(taskEnvDir); - workSet.delete(taskName); - const dependentTasks = (taskGraph.revDepEdges[taskName] ?? []) + workSet.delete(taskKey); + const dependentTasks = (taskGraph.revDepEdges[taskKey] ?? []) .filter((name) => workSet.has(name)); const readyTasks = []; for (const parentId of dependentTasks) { const parentDeps = pendingDepEdges.get(parentId)!; // swap remove from parent pending deps list - const idx = parentDeps.indexOf(taskName); + const idx = parentDeps.indexOf(taskKey); const last = parentDeps.pop()!; if (parentDeps.length > idx) { parentDeps[idx] = last; diff --git a/modules/tasks/mod.ts b/modules/tasks/mod.ts index b34ffdda..fd642bd5 100644 --- a/modules/tasks/mod.ts +++ b/modules/tasks/mod.ts @@ -50,24 +50,28 @@ export class TasksModule extends ModuleBase { gcx: GhjkCtx, tcx: TasksCtx, ) { + const namedSet = new Set(tcx.config.tasksNamed); const commands = Object.entries(tcx.config.tasks).map( - ([name, task]) => { - const cliffyCmd = new cliffy_cmd.Command() - .name(name) - .useRawArgs() + ([key, def]) => { + const cmd = new cliffy_cmd.Command() + .name(key) + .arguments("[argv...]") .action(async (_, ...args) => { await execTask( gcx, tcx.config, tcx.taskGraph, - name, + key, args, ); }); - if (task.desc) { - cliffyCmd.description(task.desc); + if (def.desc) { + cmd.description(def.desc); } - return cliffyCmd; + if (!namedSet.has(key)) { + cmd.hidden(); + } + return cmd; }, ); const root = new cliffy_cmd.Command() diff --git a/modules/tasks/types.ts b/modules/tasks/types.ts index 914e8bc6..eefd0519 100644 --- a/modules/tasks/types.ts +++ b/modules/tasks/types.ts @@ -6,28 +6,65 @@ import envsValidators from "../envs/types.ts"; const taskName = zod.string().regex(/[^\s]/); const taskDefBase = zod.object({ - name: zod.string(), - dependsOn: taskName.array().nullish(), + ty: zod.string(), desc: zod.string().nullish(), workingDir: zod.string().nullish(), + dependsOn: zod.string().array().nullish(), }); -const taskDef = taskDefBase.merge(zod.object({ +const taskDefFullBase = taskDefBase.merge(zod.object({ env: envsValidators.envRecipe, })); -const taskDefHashed = taskDefBase.merge(zod.object({ +const taskDefHashedBase = taskDefBase.merge(zod.object({ envHash: zod.string(), })); +const denoWorkerTaskDefBase = zod.object({ + ty: zod.literal("denoFile@v1"), + /** + * A single module might host multiple tasks so we need keys to identify + * each with. Names aren't enough since some tasks are anonymous. + */ + // This field primarily exists as an optimization actually. + // The tasksModuleConfig keys the tasks by their hash + // but we use a separate key when asking for exec from the denoFile. + // This is because the denoFile only constructs the hashes for the config + // laziliy but uses separate task keys internally due to different hashing concerns. + // This key will correspond to the internal keys used by the denoFile + // and not the config. + key: zod.string(), +}); + +const denoWorkerTaskDef = taskDefFullBase.merge(denoWorkerTaskDefBase); +const denoWorkerTaskDefHashed = taskDefHashedBase.merge(denoWorkerTaskDefBase); + +const taskDef = + // zod.discriminatedUnion("ty", [ + denoWorkerTaskDef; +// ]); + +const taskDefHashed = + // zod.discriminatedUnion("ty", [ + denoWorkerTaskDefHashed; +// ]); + const tasksModuleConfig = zod.object({ envs: zod.record(zod.string(), envsValidators.envRecipe), - tasks: zod.record(taskName, taskDefHashed), + /** + * Tasks can be keyed with any old string. The keys + * that also appear in {@field tasksNamed} will shown + * in the CLI. + */ + tasks: zod.record(zod.string(), taskDefHashed), + tasksNamed: taskName.array(), }); const validators = { taskDef, taskDefHashed, + denoWorkerTaskDefHashed, + denoWorkerTaskDef, tasksModuleConfig, }; export default validators; diff --git a/modules/types.ts b/modules/types.ts index e35b0606..03a6752a 100644 --- a/modules/types.ts +++ b/modules/types.ts @@ -1,4 +1,5 @@ import { zod } from "../deps/common.ts"; +import type { Path } from "../utils/mod.ts"; // TODO: better module ident/versioning const moduleId = zod.string().regex(/[^ @]*/); @@ -11,9 +12,9 @@ const moduleManifest = zod.object({ export type ModuleId = zod.infer; export type ModuleManifest = zod.infer; export type GhjkCtx = { - ghjkfilePath: string; - ghjkDir: string; - ghjkShareDir: string; + ghjkfilePath?: Path; + ghjkDir: Path; + ghjkShareDir: Path; blackboard: Map; }; diff --git a/ports/mold.ts b/ports/mold.ts index cc32e3da..3d0f7ce9 100644 --- a/ports/mold.ts +++ b/ports/mold.ts @@ -92,7 +92,7 @@ export class Port extends GithubReleasePort { ); if ((args.config as unknown as MoldInstallConfig).replaceLd) { await installPath.join("bin", "ld") - .createSymlinkTo(installPath.join("bin", "mold").toString(), { + .symlinkTo(installPath.join("bin", "mold").toString(), { kind: "relative", }); } diff --git a/ports/npmi.ts b/ports/npmi.ts index 835f191a..b05ba128 100644 --- a/ports/npmi.ts +++ b/ports/npmi.ts @@ -141,7 +141,7 @@ export class Port extends PortBase { await tmpDirPath.join("bin").ensureDir(); for (const [name] of bins) { await tmpDirPath.join("bin", name) - .createSymlinkTo( + .symlinkTo( installPath .join("node_modules", ".bin", name) .toString(), diff --git a/ports/pipi.ts b/ports/pipi.ts index 1e3c05bc..3079aa09 100644 --- a/ports/pipi.ts +++ b/ports/pipi.ts @@ -146,7 +146,7 @@ export class Port extends PortBase { // the cpy_bs port smuggles out the real path of it's python executable const realPyExecPath = args.depArts[std_ports.cpy_bs_ghrel.name].env.REAL_PYTHON_EXEC_PATH; - (await venvPath.join("bin", "python3").remove()).createSymlinkTo( + (await venvPath.join("bin", "python3").remove()).symlinkTo( realPyExecPath, ); diff --git a/tests/envHooks.ts b/tests/envHooks.ts new file mode 100644 index 00000000..860790cd --- /dev/null +++ b/tests/envHooks.ts @@ -0,0 +1,157 @@ +import "../setup_logger.ts"; +import { E2eTestCase, harness } from "./utils.ts"; + +const posixInteractiveScript = ` +set -eux +export GHJK_WD=$PWD + +# hook creates a marker file +[ $(cat "$GHJK_WD/marker") = 'remark' ] || exit 101 + +pushd ../ +# marker should be gone by now +[ ! -e "$GHJK_WD/marker" ] || exit 102 + +# cd back in +popd + +# marker should be avail now +[ $(cat $GHJK_WD/marker) = 'remark' ] || exit 103 +`; + +const bashInteractiveScript = [ + // simulate interactive mode by evaluating the prompt + // before each line + ` +eval_PROMPT_COMMAND() { + local prompt_command + for prompt_command in "\${PROMPT_COMMAND[@]}"; do + eval "$prompt_command" + done +} +`, + ...posixInteractiveScript + .split("\n").map((line) => + `eval_PROMPT_COMMAND +${line} +` + ), +] + .join("\n"); + +const zshInteractiveScript = [ + // simulate interactive mode by evaluating precmd + // before each line + ...posixInteractiveScript + .split("\n").map((line) => + `precmd +${line} +` + ), +] + .join("\n"); + +const posixNonInteractiveScript = ` +set -eux + +export GHJK_WD=$PWD + +# test that ghjk_reload is avail because BASH_ENV exposed by the suite +ghjk_reload + +# hook creates a marker file +[ $(cat "$GHJK_WD/marker") = 'remark' ] || exit 101 + +pushd ../ +# no reload so it's stil avail +[ $(cat "$GHJK_WD/marker") = 'remark' ] || exit 102 + +ghjk_reload +# marker should be gone by now +[ ! -e "$GHJK_WD/marker" ] || exit 103 + +# cd back in +popd + +# not avail yet +[ ! -e "$GHJK_WD/marker" ] || exit 104 + +ghjk_reload +# now it should be avail +[ $(cat "$GHJK_WD/marker") = 'remark' ] || exit 105 +`; + +const fishScript = ` +set fish_trace 1 +export GHJK_WD=$PWD + +# hook creates a marker file +test (cat "$GHJK_WD/marker") = 'remark'; or exit 101 + +pushd ../ +# marker should be gone by now +not test -e "$GHJK_WD/marker"; or exit 102 + +# cd back in +popd + +# marker should be avail now +test (cat $GHJK_WD/marker) = 'remark'; or exit 103 +`; + +type CustomE2eTestCase = Omit & { + ePoint: string; + stdin: string; +}; +const cases: CustomE2eTestCase[] = [ + { + name: "bash_interactive", + // -s: read from stdin + // -l: login/interactive mode + ePoint: `bash -sl`, + stdin: bashInteractiveScript, + }, + { + name: "bash_scripting", + ePoint: `bash -s`, + stdin: posixNonInteractiveScript, + }, + { + name: "zsh_interactive", + ePoint: `zsh -sl`, + stdin: zshInteractiveScript, + }, + { + name: "zsh_scripting", + ePoint: `zsh -s`, + stdin: posixNonInteractiveScript, + }, + { + name: "fish_interactive", + ePoint: `fish -l`, + stdin: fishScript, + }, + { + name: "fish_scripting", + ePoint: `fish`, + // the fish implementation triggers changes + // on any pwd changes so it's identical to + // interactive usage + stdin: fishScript, + }, +]; + +harness(cases.map((testCase) => ({ + ...testCase, + tsGhjkfileStr: ` +export { ghjk } from "$ghjk/mod.ts"; +import { task, env } from "$ghjk/mod.ts"; + +env("main") + .onEnter(task($ => $\`/bin/sh -c 'echo remark > marker'\`)) + .onExit(task($ => $\`/bin/sh -c 'rm marker'\`)) +`, + ePoints: [{ cmd: testCase.ePoint, stdin: testCase.stdin }], + name: `envHooks/${testCase.name}`, + timeout_ms: 5 * 60 * 1000, +}))); diff --git a/tests/envs.ts b/tests/envs.ts index f4736b34..9c7882a9 100644 --- a/tests/envs.ts +++ b/tests/envs.ts @@ -1,18 +1,29 @@ import "../setup_logger.ts"; import { - dockerE2eTest, E2eTestCase, type EnvDefArgs, genTsGhjkFile, - localE2eTest, + harness, } from "./utils.ts"; +import { stdSecureConfig } from "../mod.ts"; import dummy from "../ports/dummy.ts"; +import type { DenoFileSecureConfig } from "../mod.ts"; -type CustomE2eTestCase = Omit & { - ePoint: string; - stdin: string; - envs: EnvDefArgs[]; -}; +type CustomE2eTestCase = + & Omit + & { + ePoint: string; + stdin: string; + } + & ( + | { + envs: EnvDefArgs[]; + secureConfig?: DenoFileSecureConfig; + } + | { + ghjkTs: string; + } + ); const envVarTestEnvs: EnvDefArgs[] = [ { @@ -39,6 +50,7 @@ const envVarTestsPosix = ` set -ex # by default, we should be in main [ "$SONG" = "ditto" ] || exit 101 +[ "$GHJK_ENV" = "main" ] || exit 1011 ghjk envs cook sss . .ghjk/envs/sss/activate.sh @@ -46,21 +58,26 @@ ghjk envs cook sss # so they should inherit it's env vars [ "$SONG" = "ditto" ] || exit 102 [ "$SING" = "Seoul Sonyo Sound" ] || exit 103 +[ "$GHJK_ENV" = "sss" ] || exit 1012 # go back to main and "sss" variables shouldn't be around . .ghjk/envs/main/activate.sh [ "$SONG" = "ditto" ] || exit 104 [ "$SING" = "Seoul Sonyo Sound" ] && exit 105 +[ "$GHJK_ENV" = "main" ] || exit 1013 # env base is false for "yuki" and thus no vars from "main" ghjk envs cook yuki . .ghjk/envs/yuki/activate.sh [ "$SONG" = "ditto" ] && exit 102 [ "$HUMM" = "Soul Lady" ] || exit 103 +[ "$GHJK_ENV" = "yuki" ] || exit 1014 `; const envVarTestsFish = ` +set fish_trace 1 # by default, we should be in main test "$SONG" = "ditto"; or exit 101; +test "$GHJK_ENV" = "main"; or exit 1010; ghjk envs cook sss . .ghjk/envs/sss/activate.fish @@ -68,17 +85,20 @@ ghjk envs cook sss # so they should inherit it's env vars test "$SONG" = "ditto"; or exit 103 test "$SING" = "Seoul Sonyo Sound"; or exit 104 +test "$GHJK_ENV" = "sss"; or exit 1011; # go back to main and "sss" variables shouldn't be around . .ghjk/envs/main/activate.fish test $SONG" = "ditto"; or exit 105 test $SING" = "Seoul Sonyo Sound"; and exit 106 +test "$GHJK_ENV" = "main"; or exit 1012; # env base is false for "yuki" and thus no vars from "main" ghjk envs cook yuki . .ghjk/envs/yuki/activate.fish test "$SONG" = "ditto"; and exit 107 test "$HUMM" = "Soul Lady"; or exit 108 +test "$GHJK_ENV" = "yuki"; or exit 1013; `; const installTestEnvs: EnvDefArgs[] = [ @@ -111,6 +131,7 @@ ghjk envs cook foo `; const installTestsFish = ` +set fish_trace 1 # by default, we should be in main test (dummy) = "main"; or exit 101; @@ -159,46 +180,27 @@ const cases: CustomE2eTestCase[] = [ envs: installTestEnvs, stdin: installTestsFish, }, + { + name: "default_env_loader", + ePoint: "fish", + envs: envVarTestEnvs, + secureConfig: stdSecureConfig({ defaultEnv: "yuki" }), + stdin: ` +set fish_trace 1 +# env base is false for "yuki" and thus no vars from "main" +test "$GHJK_ENV" = "yuki"; or exit 106 +test "$SONG" = "ditto"; and exit 107 +test "$HUMM" = "Soul Lady"; or exit 108 +`, + }, ]; -function testMany( - testGroup: string, - cases: CustomE2eTestCase[], - testFn: (inp: E2eTestCase) => Promise, - defaultEnvs: Record = {}, -) { - for (const testCase of cases) { - Deno.test( - `${testGroup} - ${testCase.name}`, - () => - testFn({ - ...testCase, - tsGhjkfileStr: genTsGhjkFile( - { envDefs: testCase.envs }, - ), - ePoints: [{ cmd: testCase.ePoint, stdin: testCase.stdin }], - envVars: { - ...defaultEnvs, - ...testCase.envVars, - }, - }), - ); - } -} - -const e2eType = Deno.env.get("GHJK_TEST_E2E_TYPE"); -if (e2eType == "both") { - testMany("envsDockerE2eTest", cases, dockerE2eTest); - testMany(`envsLocalE2eTest`, cases, localE2eTest); -} else if (e2eType == "local") { - testMany("envsLocalE2eTest", cases, localE2eTest); -} else if ( - e2eType == "docker" || - !e2eType -) { - testMany("envsDockerE2eTest", cases, dockerE2eTest); -} else { - throw new Error( - `unexpected GHJK_TEST_E2E_TYPE: ${e2eType}`, - ); -} +harness(cases.map((testCase) => ({ + ...testCase, + tsGhjkfileStr: "ghjkTs" in testCase ? testCase.ghjkTs : genTsGhjkFile( + { envDefs: testCase.envs, secureConf: testCase.secureConfig }, + ), + ePoints: [{ cmd: testCase.ePoint, stdin: testCase.stdin }], + name: `envs/${testCase.name}`, + timeout_ms: 5 * 60 * 1000, +}))); diff --git a/tests/ports.ts b/tests/ports.ts index 40cc5b42..b94f440f 100644 --- a/tests/ports.ts +++ b/tests/ports.ts @@ -1,24 +1,14 @@ import "../setup_logger.ts"; -import { std_async } from "../deps/dev.ts"; -import { stdSecureConfig } from "../mod.ts"; -import { - dockerE2eTest, - E2eTestCase, - genTsGhjkFile, - localE2eTest, -} from "./utils.ts"; +import { DenoFileSecureConfig, stdSecureConfig } from "../mod.ts"; +import { E2eTestCase, genTsGhjkFile, harness } from "./utils.ts"; import * as ports from "../ports/mod.ts"; import dummy from "../ports/dummy.ts"; -import type { - InstallConfigFat, - PortsModuleSecureConfig, -} from "../modules/ports/types.ts"; +import type { InstallConfigFat } from "../modules/ports/types.ts"; type CustomE2eTestCase = Omit & { ePoint: string; installConf: InstallConfigFat | InstallConfigFat[]; - secureConf?: PortsModuleSecureConfig; - ignore?: boolean; + secureConf?: DenoFileSecureConfig; }; // order tests by download size to make failed runs less expensive const cases: CustomE2eTestCase[] = [ @@ -206,33 +196,20 @@ const cases: CustomE2eTestCase[] = [ }, ]; -function testMany( - testGroup: string, - cases: CustomE2eTestCase[], - testFn: (inp: E2eTestCase) => Promise, - defaultEnvs: Record = {}, -) { - for (const testCase of cases) { - Deno.test( - { - name: `${testGroup} - ${testCase.name}`, - ignore: testCase.ignore, - fn: () => - std_async.deadline( - testFn({ - ...testCase, - tsGhjkfileStr: genTsGhjkFile( - { - installConf: testCase.installConf, - secureConf: testCase.secureConf, - taskDefs: [], - }, - ), - ePoints: [ - ...["bash -c", "fish -c", "zsh -c"].map((sh) => ({ - cmd: [...`env ${sh}`.split(" "), `"${testCase.ePoint}"`], - })), - /* // FIXME: better tests for the `InstallDb` +harness(cases.map((testCase) => ({ + ...testCase, + tsGhjkfileStr: genTsGhjkFile( + { + installConf: testCase.installConf, + secureConf: testCase.secureConf, + taskDefs: [], + }, + ), + ePoints: [ + ...["bash -c", "fish -c", "zsh -c"].map((sh) => ({ + cmd: [...`env ${sh}`.split(" "), `"${testCase.ePoint}"`], + })), + /* // FIXME: better tests for the `InstallDb` // installs db means this shouldn't take too long // as it's the second sync { @@ -241,35 +218,10 @@ function testMany( "bash -c 'timeout 1 ghjk envs cook'", ], }, */ - ], - envVars: { - ...defaultEnvs, - ...testCase.envVars, - }, - }), - // building the test docker image might taka a while - // but we don't want some bug spinlocking the ci for - // an hour - 5 * 60 * 1000, - ), - }, - ); - } -} - -const e2eType = Deno.env.get("GHJK_TEST_E2E_TYPE"); -if (e2eType == "both") { - testMany("portsDockerE2eTest", cases, dockerE2eTest); - testMany(`portsLocalE2eTest`, cases, localE2eTest); -} else if (e2eType == "local") { - testMany("portsLocalE2eTest", cases, localE2eTest); -} else if ( - e2eType == "docker" || - !e2eType -) { - testMany("portsDockerE2eTest", cases, dockerE2eTest); -} else { - throw new Error( - `unexpected GHJK_TEST_E2E_TYPE: ${e2eType}`, - ); -} + ], + // building the test docker image might taka a while + // but we don't want some bug spinlocking the ci for + // an hour + timeout_ms: 5 * 60 * 1000, + name: `ports/${testCase.name}`, +}))); diff --git a/tests/hooks.ts b/tests/reloadHooks.ts similarity index 66% rename from tests/hooks.ts rename to tests/reloadHooks.ts index 120153da..68d22036 100644 --- a/tests/hooks.ts +++ b/tests/reloadHooks.ts @@ -1,10 +1,5 @@ import "../setup_logger.ts"; -import { - dockerE2eTest, - E2eTestCase, - genTsGhjkFile, - localE2eTest, -} from "./utils.ts"; +import { E2eTestCase, genTsGhjkFile, harness } from "./utils.ts"; import dummy from "../ports/dummy.ts"; import type { InstallConfigFat } from "../port.ts"; @@ -97,6 +92,7 @@ dummy `; const fishScript = ` +set fish_trace 1 dummy; or exit 101 test $DUMMY_ENV = "dummy"; or exit 102 @@ -123,34 +119,34 @@ type CustomE2eTestCase = Omit & { }; const cases: CustomE2eTestCase[] = [ { - name: "hook_test_bash_interactive", + name: "bash_interactive", // -s: read from stdin // -l: login/interactive mode ePoint: `bash -sl`, stdin: bashInteractiveScript, }, { - name: "hook_test_bash_scripting", + name: "bash_scripting", ePoint: `bash -s`, stdin: posixNonInteractiveScript, }, { - name: "hook_test_zsh_interactive", + name: "zsh_interactive", ePoint: `zsh -sl`, stdin: zshInteractiveScript, }, { - name: "hook_test_zsh_scripting", + name: "zsh_scripting", ePoint: `zsh -s`, stdin: posixNonInteractiveScript, }, { - name: "hook_test_fish_interactive", + name: "fish_interactive", ePoint: `fish -l`, stdin: fishScript, }, { - name: "hook_test_fish_scripting", + name: "fish_scripting", ePoint: `fish`, // the fish implementation triggers changes // on any pwd changes so it's identical to @@ -159,44 +155,12 @@ const cases: CustomE2eTestCase[] = [ }, ]; -function testMany( - testGroup: string, - cases: CustomE2eTestCase[], - testFn: (inp: E2eTestCase) => Promise, - defaultEnvs: Record = {}, -) { - for (const testCase of cases) { - Deno.test( - `${testGroup} - ${testCase.name}`, - () => - testFn({ - ...testCase, - tsGhjkfileStr: genTsGhjkFile( - { installConf: testCase.installConf ?? dummy(), taskDefs: [] }, - ), - ePoints: [{ cmd: testCase.ePoint, stdin: testCase.stdin }], - envVars: { - ...defaultEnvs, - ...testCase.envVars, - }, - }), - ); - } -} - -const e2eType = Deno.env.get("GHJK_TEST_E2E_TYPE"); -if (e2eType == "both") { - testMany("hooksDockerE2eTest", cases, dockerE2eTest); - testMany(`hooksLocalE2eTest`, cases, localE2eTest); -} else if (e2eType == "local") { - testMany("hooksLocalE2eTest", cases, localE2eTest); -} else if ( - e2eType == "docker" || - !e2eType -) { - testMany("hooksDockerE2eTest", cases, dockerE2eTest); -} else { - throw new Error( - `unexpected GHJK_TEST_E2E_TYPE: ${e2eType}`, - ); -} +harness(cases.map((testCase) => ({ + ...testCase, + tsGhjkfileStr: genTsGhjkFile( + { installConf: testCase.installConf ?? dummy(), taskDefs: [] }, + ), + ePoints: [{ cmd: testCase.ePoint, stdin: testCase.stdin }], + name: `reloadHooks/${testCase.name}`, + timeout_ms: 5 * 60 * 1000, +}))); diff --git a/tests/tasks.ts b/tests/tasks.ts index 82ec3a91..cb8cf352 100644 --- a/tests/tasks.ts +++ b/tests/tasks.ts @@ -1,57 +1,59 @@ import "../setup_logger.ts"; -import { - dockerE2eTest, - E2eTestCase, - genTsGhjkFile, - localE2eTest, - type TaskDefArgs, -} from "./utils.ts"; +import { E2eTestCase, genTsGhjkFile, harness, type TaskDef } from "./utils.ts"; import * as ghjk from "../mod.ts"; import * as ports from "../ports/mod.ts"; -import { stdSecureConfig } from "../ghjkfiles/mod.ts"; +import { stdSecureConfig } from "../mod.ts"; -type CustomE2eTestCase = Omit & { - ePoint: string; - stdin: string; - tasks: TaskDefArgs[]; - enableRuntimesOnMasterPDAL?: boolean; -}; +type CustomE2eTestCase = + & Omit + & { + ePoint: string; + stdin: string; + enableRuntimesOnMasterPDAL?: boolean; + } + & ( + | { + tasks: TaskDef[]; + } + | { + ghjkTs: string; + } + ); const cases: CustomE2eTestCase[] = [ { name: "base", tasks: [{ name: "greet", - fn: async ({ $, argv: [name] }) => { - await $`echo Hello ${name}!`; + fn: async ($, { argv: [name], workingDir }) => { + await $`echo Hello ${name} from ${workingDir}!`; }, }], ePoint: `fish`, stdin: ` -cat ghjk.ts -test (ghjk x greet world) = 'Hello world!'`, +test (ghjk x greet world) = "Hello world from $PWD!"`, }, { name: "env_vars", tasks: [{ name: "greet", envVars: { - NAME: "moon", + LUNA: "moon", + SOL: "sun", }, - fn: async ({ $ }) => { - await $`echo Hello $NAME!`; + fn: async ($) => { + await $`echo "Hello $SOL & ${$.env["LUNA"]!}"!`; }, }], ePoint: `fish`, stdin: ` -cat ghjk.ts -test (ghjk x greet world) = 'Hello moon!'`, +test (ghjk x greet world) = 'Hello sun & moon!'`, }, { name: "ports", tasks: [{ name: "protoc", installs: [ports.protoc()], - fn: async ({ $ }) => { + fn: async ($) => { await $`protoc --version`; }, }], @@ -63,10 +65,10 @@ ghjk x protoc`, name: "port_deps", tasks: [{ name: "test", - // node depends on tar_aa + // pipi depends on cpy_bs installs: [...ports.pipi({ packageName: "pre-commit" })], allowedPortDeps: ghjk.stdDeps({ enableRuntimes: true }), - fn: async ({ $ }) => { + fn: async ($) => { await $`pre-commit --version`; }, }], @@ -80,7 +82,7 @@ ghjk x protoc`, name: "test", // node depends on tar_aa installs: [ports.node()], - fn: async ({ $ }) => { + fn: async ($) => { await $`node --version`; }, }], @@ -93,21 +95,21 @@ ghjk x protoc`, { name: "ed", dependsOn: [], - fn: async ({ $ }) => { + fn: async ($) => { await $`/bin/sh -c 'echo ed > ed'`; }, }, { name: "edd", dependsOn: ["ed"], - fn: async ({ $ }) => { + fn: async ($) => { await $`/bin/sh -c 'echo $(/bin/cat ed) edd > edd'`; }, }, { name: "eddy", dependsOn: ["edd"], - fn: async ({ $ }) => { + fn: async ($) => { await $`/bin/sh -c 'echo $(/bin/cat edd) eddy > eddy'`; }, }, @@ -118,51 +120,43 @@ ghjk x eddy test (cat eddy) = 'ed edd eddy' `, }, -]; + { + name: "anon", + ghjkTs: ` +export { ghjk } from "$ghjk/mod.ts"; +import { task } from "$ghjk/mod.ts"; -function testMany( - testGroup: string, - cases: CustomE2eTestCase[], - testFn: (inp: E2eTestCase) => Promise, - defaultEnvs: Record = {}, -) { - for (const testCase of cases) { - Deno.test( - `${testGroup} - ${testCase.name}`, - () => - testFn({ - ...testCase, - tsGhjkfileStr: genTsGhjkFile( - { - taskDefs: testCase.tasks, - secureConf: stdSecureConfig({ - enableRuntimes: testCase.enableRuntimesOnMasterPDAL, - }), - }, - ), - ePoints: [{ cmd: testCase.ePoint, stdin: testCase.stdin }], - envVars: { - ...defaultEnvs, - ...testCase.envVars, - }, - }), - ); - } -} +task({ + dependsOn: [ + task({ + dependsOn: [ + task(($) => $\`/bin/sh -c 'echo ed > ed'\`), + ], + fn: ($) => $\`/bin/sh -c 'echo $(/bin/cat ed) edd > edd'\`, + }), + ], + name: "eddy", + fn: ($) => $\`/bin/sh -c 'echo $(/bin/cat edd) eddy > eddy'\` +}); +`, + ePoint: `fish`, + stdin: ` +ghjk x eddy +test (cat eddy) = 'ed edd eddy' +`, + }, +]; -const e2eType = Deno.env.get("GHJK_TEST_E2E_TYPE"); -if (e2eType == "both") { - testMany("tasksDockerE2eTest", cases, dockerE2eTest); - testMany(`tasksLocalE2eTest`, cases, localE2eTest); -} else if (e2eType == "local") { - testMany("tasksLocalE2eTest", cases, localE2eTest); -} else if ( - e2eType == "docker" || - !e2eType -) { - testMany("tasksDockerE2eTest", cases, dockerE2eTest); -} else { - throw new Error( - `unexpected GHJK_TEST_E2E_TYPE: ${e2eType}`, - ); -} +harness(cases.map((testCase) => ({ + ...testCase, + tsGhjkfileStr: "ghjkTs" in testCase ? testCase.ghjkTs : genTsGhjkFile( + { + taskDefs: testCase.tasks, + secureConf: stdSecureConfig({ + enableRuntimes: testCase.enableRuntimesOnMasterPDAL, + }), + }, + ), + ePoints: [{ cmd: testCase.ePoint, stdin: testCase.stdin }], + name: `tasks/${testCase.name}`, +}))); diff --git a/tests/test.Dockerfile b/tests/test.Dockerfile index 7bec733d..83bef2fe 100644 --- a/tests/test.Dockerfile +++ b/tests/test.Dockerfile @@ -1,4 +1,4 @@ -ARG DENO_VERSION=1.42.1 +ARG DENO_VERSION=1.43.1 FROM denoland/deno:bin-$DENO_VERSION AS deno diff --git a/tests/utils.ts b/tests/utils.ts index 707c32e6..e893dbe3 100644 --- a/tests/utils.ts +++ b/tests/utils.ts @@ -1,19 +1,24 @@ import { defaultInstallArgs, install } from "../install/mod.ts"; import { std_url } from "../deps/dev.ts"; +import { std_async } from "../deps/dev.ts"; import { $, dbg, importRaw } from "../utils/mod.ts"; -import type { - InstallConfigFat, - PortsModuleSecureConfig, -} from "../modules/ports/types.ts"; -import type { EnvDefArgs, TaskDefArgs } from "../mod.ts"; +import type { InstallConfigFat } from "../modules/ports/types.ts"; import logger from "../utils/logger.ts"; -export type { EnvDefArgs, TaskDefArgs } from "../mod.ts"; +import type { + DenoFileSecureConfig, + DenoTaskDefArgs, + EnvDefArgs, +} from "../mod.ts"; +export type { EnvDefArgs } from "../mod.ts"; export type E2eTestCase = { name: string; tsGhjkfileStr: string; envVars?: Record; ePoints: { cmd: string | string[]; stdin?: string }[]; + timeout_ms?: number; + ignore?: boolean; + only?: boolean; }; const dockerCmd = (Deno.env.get("DOCKER_CMD") ?? "docker").split(/\s/); @@ -21,10 +26,11 @@ const dFileTemplate = await importRaw(import.meta.resolve("./test.Dockerfile")); const templateStrings = { addConfig: `#{{CMD_ADD_CONFIG}}`, }; +const noRmi = Deno.env.get("DOCKER_NO_RMI"); export async function dockerE2eTest(testCase: E2eTestCase) { const { name, envVars: testEnvs, ePoints, tsGhjkfileStr } = testCase; - const tag = `ghjk_e2e_${name}`; + const tag = `ghjk_e2e_${name}`.toLowerCase(); const env = { ...testEnvs, }; @@ -71,9 +77,11 @@ export async function dockerE2eTest(testCase: E2eTestCase) { throw err; } } - await $ - .raw`${dockerCmd} rmi '${tag}'` - .env(env); + if (!noRmi) { + await $ + .raw`${dockerCmd} rmi '${tag}'` + .env(env); + } } export async function localE2eTest(testCase: E2eTestCase) { @@ -97,8 +105,6 @@ export async function localE2eTest(testCase: E2eTestCase) { ZDOTDIR: ghjkShareDir.toString(), GHJK_SHARE_DIR: ghjkShareDir.toString(), PATH: `${ghjkShareDir.toString()}:${Deno.env.get("PATH")}`, - // shield tests from external envs - GHJK_ENV: "main", HOME: tmpDir.toString(), }; // install ghjk @@ -116,9 +122,11 @@ export async function localE2eTest(testCase: E2eTestCase) { await $`${ghjkShareDir.join("ghjk").toString()} print config` .cwd(tmpDir.toString()) + .clearEnv() .env(env); await $`${ghjkShareDir.join("ghjk").toString()} envs cook` .cwd(tmpDir.toString()) + .clearEnv() .env(env); /* // print the contents of the ghjk dir for debugging purposes @@ -131,7 +139,7 @@ export async function localE2eTest(testCase: E2eTestCase) { { const confHome = await ghjkShareDir.join(".config").ensureDir(); const fishConfDir = await confHome.join("fish").ensureDir(); - await fishConfDir.join("config.fish").createSymlinkTo( + await fishConfDir.join("config.fish").symlinkTo( ghjkShareDir.join("env.fish").toString(), ); env["XDG_CONFIG_HOME"] = confHome.toString(); @@ -149,11 +157,14 @@ export async function localE2eTest(testCase: E2eTestCase) { await tmpDir.remove({ recursive: true }); } +export type TaskDef = + & Omit + & Required>; export function genTsGhjkFile( { installConf, secureConf, taskDefs, envDefs }: { installConf?: InstallConfigFat | InstallConfigFat[]; - secureConf?: PortsModuleSecureConfig; - taskDefs?: TaskDefArgs[]; + secureConf?: DenoFileSecureConfig; + taskDefs?: TaskDef[]; envDefs?: EnvDefArgs[]; }, ) { @@ -221,3 +232,38 @@ ${tasks} ${envs} `; } + +export function harness( + cases: E2eTestCase[], +) { + const e2eType = Deno.env.get("GHJK_TEST_E2E_TYPE"); + let runners = [[dockerE2eTest, "e2eDocker" as string] as const]; + if (e2eType == "both") { + runners.push([localE2eTest, "e2eLocal"]); + } else if (e2eType == "local") { + runners = [[localE2eTest, "e2eLocal"]]; + } else if ( + e2eType && e2eType != "docker" + ) { + throw new Error( + `unexpected GHJK_TEST_E2E_TYPE: ${e2eType}`, + ); + } + for (const [runner, group] of runners) { + for (const testCase of cases) { + Deno.test( + `${group}/${testCase.name}`, + { + ignore: testCase.ignore, + }, + () => + std_async.deadline( + runner({ + ...testCase, + }), + testCase.timeout_ms ?? 1 * 60 * 1000, + ), + ); + } + } +} diff --git a/utils/logger.ts b/utils/logger.ts index c65b0f4b..e6382011 100644 --- a/utils/logger.ts +++ b/utils/logger.ts @@ -164,7 +164,7 @@ Deno.permissions.query({ } }); -export function isColorfulTty(outFile = Deno.stdout) { +export function isColorfulTty(outFile = Deno.stderr) { if (colorEnvFlagSet) { return true; } diff --git a/utils/mod.ts b/utils/mod.ts index 77fdf4bf..62d782a1 100644 --- a/utils/mod.ts +++ b/utils/mod.ts @@ -1,11 +1,18 @@ import { + _DaxPath as Path, dax, - jsonHash, + json_canonicalize, + multibase32, + multihasher, + multisha2, std_fs, std_path, - std_url, + syncSha256, zod, } from "../deps/common.ts"; +// class re-exports are tricky. We want al importers +// of path to get it from here so we rename in common.ts +export { _DaxPath as Path } from "../deps/common.ts"; import logger, { isColorfulTty } from "./logger.ts"; // NOTE: only use type imports only when getting stuff from "./modules" import type { @@ -18,6 +25,7 @@ import type { } from "../modules/ports/types.ts"; export type DePromisify = T extends Promise ? Inner : T; +export type DeArrayify = T extends Array ? Inner : T; const literalSchema = zod.union([ zod.string(), zod.number(), @@ -46,13 +54,13 @@ export function pathsWithDepArts( const includesSet = new Set(); for (const [_, { execs, libs, includes }] of Object.entries(depArts)) { for (const [_, binPath] of Object.entries(execs)) { - pathSet.add(std_path.dirname(binPath)); + pathSet.add($.path(binPath).parentOrThrow()); } for (const [_, libPath] of Object.entries(libs)) { - libSet.add(std_path.dirname(libPath)); + libSet.add($.path(libPath).parentOrThrow()); } for (const [_, incPath] of Object.entries(includes)) { - includesSet.add(std_path.dirname(incPath)); + includesSet.add($.path(incPath).parentOrThrow()); } } @@ -122,55 +130,51 @@ export function tryDepExecShimPath( return path; } -/** - * Lifted from https://deno.land/x/hextools@v1.0.0 - * MIT License - * Copyright (c) 2020 Santiago Aguilar Hernández - */ -export function bufferToHex(buffer: ArrayBuffer): string { - return Array.prototype.map.call( - new Uint8Array(buffer), - (b) => b.toString(16).padStart(2, "0"), - ).join(""); +const syncSha256Hasher = multihasher.from({ + code: multisha2.sha256.code, + name: multisha2.sha256.name, + encode: (input) => syncSha256(input), +}); + +export async function bufferHashAsync( + buf: Uint8Array, +) { + const hashBuf = await multisha2.sha256.digest(buf); + const hashStr = multibase32.base32.encode(hashBuf.bytes); + return hashStr; } -export async function bufferHashHex( - buf: ArrayBuffer, - algo: AlgorithmIdentifier = "SHA-256", +export function bufferHash( + buf: Uint8Array, ) { - const hashBuf = await crypto.subtle.digest(algo, buf); - return bufferToHex(hashBuf); + const hashBuf = syncSha256Hasher.digest(buf); + if (hashBuf instanceof Promise) throw new Error("impossible"); + const hashStr = multibase32.base32.encode(hashBuf.bytes); + return hashStr; } -export async function stringHashHex( +export function stringHash( val: string, - algo: AlgorithmIdentifier = "SHA-256", ) { const arr = new TextEncoder().encode(val); - return await bufferHashHex(arr, algo); + return bufferHash(arr); } -export async function objectHashHex( - object: jsonHash.Tree, - algo: jsonHash.DigestAlgorithmType = "SHA-256", +export function objectHash( + object: Json, ) { - const hashBuf = await jsonHash.digest(algo, object); - const hashHex = bufferToHex(hashBuf); - return hashHex; + return stringHash(json_canonicalize(object)); } export function getPortRef(manifest: PortManifest) { return `${manifest.name}@${manifest.version}`; } -export async function getInstallHash(install: InstallConfigResolvedX) { - const fullHashHex = await objectHashHex(install as jsonHash.Tree); - const hashHex = fullHashHex.slice(0, 8); - return `${install.portRef}!${hashHex}`; +export function getInstallHash(install: InstallConfigResolvedX) { + const fullHashHex = objectHash(JSON.parse(JSON.stringify(install))); + return `${install.portRef}!${fullHashHex}`; } -export type Path = dax.Path; - export function defaultCommandBuilder() { const builder = new dax.CommandBuilder() .printCommand(true); @@ -189,11 +193,14 @@ export function defaultCommandBuilder() { export const $ = dax.build$( { commandBuilder: defaultCommandBuilder(), + requestBuilder: new dax.RequestBuilder() + .showProgress(Deno.stderr.isTerminal()), extras: { inspect(val: unknown) { return Deno.inspect(val, { colors: isColorfulTty(), iterableLimit: 500, + depth: 10, }); }, pathToString(path: Path) { @@ -214,20 +221,19 @@ export function inWorker() { self instanceof WorkerGlobalScope; } -export async function findConfig(path: string) { - let current = path; +export async function findEntryRecursive(path: string, name: string) { + let current = $.path(path); while (true) { - const location = `${current}/ghjk.ts`; - if (await std_fs.exists(location)) { + const location = `${current}/${name}`; + if (await $.path(location).exists()) { return location; } - const nextCurrent = std_path.dirname(current); - if (nextCurrent == "/" && current == "/") { + const nextCurrent = $.path(current).parent(); + if (!nextCurrent) { break; } current = nextCurrent; } - return null; } export function home_dir(): string | null { @@ -249,7 +255,7 @@ export function dirs() { } return { homeDir: home, - shareDir: std_path.resolve(home, ".local", "share"), + shareDir: $.path(home).resolve(".local", "share"), }; } @@ -261,19 +267,18 @@ if (Number.isNaN(AVAIL_CONCURRENCY)) { throw new Error(`Value of DENO_JOBS is NAN: ${Deno.env.get("DENO_JOBS")}`); } -export async function importRaw(spec: string) { +export async function importRaw(spec: string, timeout: dax.Delay = "1m") { const url = new URL(spec); if (url.protocol == "file:") { - return await Deno.readTextFile(url.pathname); + return await $.path(url.pathname).readText(); } if (url.protocol.match(/^http/)) { - const resp = await fetch(url); - if (!resp.ok) { - throw new Error( - `error importing raw using fetch from ${spec}: ${resp.status} - ${resp.statusText}`, - ); + let request = $.request(url).timeout(timeout); + const integrity = url.searchParams.get("integrity"); + if (integrity) { + request = request.integrity(integrity); } - return await resp.text(); + return await request.text(); } throw new Error( `error importing raw from ${spec}: unrecognized protocol ${url.protocol}`, @@ -351,7 +356,7 @@ export async function downloadFile( args: DownloadFileArgs, ) { const { name, mode, url, downloadPath, tmpDirPath, headers } = { - name: std_url.basename(args.url), + name: $.path(args.url).basename(), mode: 0o666, headers: {}, ...args, @@ -366,7 +371,6 @@ export async function downloadFile( await $.request(url) .header(headers) - .showProgress() .pipeToPath(tmpFilePath, { create: true, mode }); await $.path(downloadPath).ensureDir();