diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f77d8a28..6178832b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -19,33 +19,12 @@ repos: hooks: - id: check-dependabot - id: check-github-workflows - - repo: https://github.com/charliermarsh/ruff-pre-commit - rev: "v0.0.292" - hooks: - - id: ruff - - repo: https://github.com/psf/black - rev: 23.9.1 - hooks: - - id: black - repo: https://github.com/commitizen-tools/commitizen rev: 3.10.0 hooks: - id: commitizen stages: - commit-msg - - repo: https://github.com/doublify/pre-commit-rust - rev: v1.0 - hooks: - - id: fmt - - id: cargo-check - args: - - "--locked" - - id: clippy - args: - - "--locked" - - "--" - - "--deny" - - "warnings" - repo: local hooks: - id: deno-fmt @@ -73,67 +52,3 @@ repos: - ts - tsx files: ^website/ - - repo: https://github.com/Lucas-C/pre-commit-hooks - rev: v1.5.4 - hooks: - - id: insert-license - name: "License MPL-2.0 python" - args: - #- --remove-header - - --license-filepath=dev/license-header-MPL-2.0.txt - - "--comment-style=#" - - "--skip-license-insertion-comment=no-auto-license-header" - types_or: - - python - files: ^typegraph/ - - id: insert-license - name: "License Elastic-2.0 rust" - args: - #- --remove-header - - --license-filepath=dev/license-header-Elastic-2.0.txt - - "--comment-style=//" - - "--skip-license-insertion-comment=no-auto-license-header" - types_or: - - rust - files: ^(typegate|libs)/ - - id: insert-license - name: "License MPL-2.0 rust" - args: - #- --remove-header - - --license-filepath=dev/license-header-MPL-2.0.txt - - "--comment-style=//" - - "--skip-license-insertion-comment=no-auto-license-header" - types_or: - - rust - files: ^(meta-cli|typegraph)/ - - id: insert-license - name: "License Elastic-2.0 deno" - args: - #- --remove-header - - --license-filepath=dev/license-header-Elastic-2.0.txt - - "--comment-style=//" - - "--skip-license-insertion-comment=no-auto-license-header" - types_or: - - ts - files: ^(typegate|dev)/ - - id: insert-license - name: "License MPL-2.0 deno" - args: - #- --remove-header - - --license-filepath=dev/license-header-MPL-2.0.txt - - "--comment-style=//" - - "--skip-license-insertion-comment=no-auto-license-header" - types_or: - - ts - files: ^typegraph/ - - id: insert-license - name: "License Elastic-2.0 typescript" - args: - #- --remove-header - - --license-filepath=dev/license-header-Elastic-2.0.txt - - "--comment-style=//" - - "--skip-license-insertion-comment=no-auto-license-header" - types_or: - - ts - - tsx - files: ^website/ diff --git a/cli/hooks.ts b/cli/hooks.ts index 5f8a402b..e6f825ac 100644 --- a/cli/hooks.ts +++ b/cli/hooks.ts @@ -14,6 +14,17 @@ switch (Deno.build.os) { throw new Error(`unsupported os ${Deno.build.os}`); } +const BASH_PREXEC = await ( + async () => { + const resp = await fetch( + "https://raw.githubusercontent.com/rcaloras/bash-preexec/0.5.0/bash-preexec.sh", + ); + if (!resp.ok) { + throw new Error("error fetching bash-preexec"); + } + return await resp.text(); + } +)(); // null means it should be removed (for cleaning up old versions) const vfs = { // the script executed when users use the ghjk command @@ -27,29 +38,38 @@ console.log = log; mod.ghjk.runCli(Deno.args.slice(1), mod.options); `, - "hooks/bash-preexec.sh": await ( - await fetch( - "https://raw.githubusercontent.com/rcaloras/bash-preexec/master/bash-preexec.sh", - ) - ).text(), + "hooks/bash-preexec.sh": BASH_PREXEC, + + "hooks/.zshenv": ` +if [ -e ~/.zshenv ]; then . ~/.zshenv; fi +hooksDir=$(dirname -- "$(readlink -f -- "\${(%):-%x}")") +. $hooksDir/hook.sh +`, // the hook run before every prompt draw in bash "hooks/hook.sh": ` __ghjk_clean_up_paths() { PATH=$(echo "$PATH" | tr ':' '\\n' | grep -vE "^$HOME/\\.local/share/ghjk/envs" | tr '\\n' ':') - PATH="$\{PATH%:\}" + PATH="\${PATH%:}" LIBRARY_PATH=$(echo "$LIBRARY_PATH" | tr ':' '\\n' | grep -vE "^$HOME/\\.local/share/ghjk/envs" | tr '\\n' ':') - LIBRARY_PATH="$\{LIBRARY_PATH%:\}" + LIBRARY_PATH="\${LIBRARY_PATH%:}" ${LD_LIBRARY_ENV}=$(echo "$${LD_LIBRARY_ENV}" | tr ':' '\\n' | grep -vE "^$HOME/\\.local/share/ghjk/envs" | tr '\\n' ':') - ${LD_LIBRARY_ENV}="$\{${LD_LIBRARY_ENV}%:\}" + ${LD_LIBRARY_ENV}="\${${LD_LIBRARY_ENV}%:}" C_INCLUDE_PATH=$(echo "$C_INCLUDE_PATH" | tr ':' '\\n' | grep -vE "^$HOME/\\.local/share/ghjk/envs" | tr '\\n' ':') - C_INCLUDE_PATH="$\{C_INCLUDE_PATH%:\}" + C_INCLUDE_PATH="\${C_INCLUDE_PATH%:}" CPLUS_INCLUDE_PATH=$(echo "$CPLUS_INCLUDE_PATH" | tr ':' '\\n' | grep -vE "^$HOME/\\.local/share/ghjk/envs" | tr '\\n' ':') - CPLUS_INCLUDE_PATH="$\{CPLUS_INCLUDE_PATH%:\}" + CPLUS_INCLUDE_PATH="\${CPLUS_INCLUDE_PATH%:}" } +# Define color variables +ansi_red='\\033[0;31m' +# GREEN='\\033[0;32m' +ansi_yel='\\033[0;33m' +# BLUE='\\033[0;34m' +ansi_nc='\\033[0m' # No Color + init_ghjk() { - if [[ -v GHJK_CLEANUP ]]; then - eval $GHJK_CLEANUP + if [ -n "\${GHJK_CLEANUP+x}" ]; then + eval "$GHJK_CLEANUP" unset GHJK_CLEANUP fi cur_dir=$PWD @@ -61,16 +81,18 @@ init_ghjk() { PATH="$envDir/shims/bin:$PATH" LIBRARY_PATH="$envDir/shims/lib:$LIBRARY_PATH" - ${LD_LIBRARY_ENV}="$envDir/shims/lib:$${LD_LIBRARY_ENV}" + LD_LIBRARY_PATH="$envDir/shims/lib:$LD_LIBRARY_PATH" C_INCLUDE_PATH="$envDir/shims/include:$C_INCLUDE_PATH" CPLUS_INCLUDE_PATH="$envDir/shims/include:$CPLUS_INCLUDE_PATH" - source "$envDir/loader.sh" + . "$envDir/loader.sh" + # FIXME: -ot not valid in POSIX + # shellcheck disable=SC3000-SC4000 if [ "$envDir/loader.sh" -ot "$cur_dir/ghjk.ts" ]; then - echo -e "\e[38;2;255;69;0m[ghjk] Detected changes, please sync...\e[0m" + echo "\${ansi_yel}[ghjk] Detected changes, please sync...\${ansi_nc}" fi else - echo -e "\e[38;2;255;69;0m[ghjk] Uninstalled runtime found, please sync...\e[0m" + echo "\${ansi_red}[ghjk] Uninstalled runtime found, please sync...\${ansi_nc}" echo "$envDir" fi export ghjk_alias="deno run -A $HOME/.local/share/ghjk/hooks/entrypoint.ts $cur_dir/ghjk.ts" @@ -79,12 +101,12 @@ init_ghjk() { cur_dir="$(dirname "$cur_dir")" done __ghjk_clean_up_paths - export ghjk_alias="echo 'No ghjk.ts config found.'" + export ghjk_alias="echo '\${ansi_red}No ghjk.ts config found.\${ansi_nc}'" } ghjk_alias="echo 'No ghjk.ts config found.'" ghjk () { - eval "$ghjk_alias" $*; + eval "$ghjk_alias" "$*"; } # export function for non-interactive use @@ -93,8 +115,8 @@ export -f init_ghjk export -f __ghjk_clean_up_paths # use precmd to check for ghjk.ts before every prompt draw -hooksDir=$(dirname -- "$(readlink -f -- "$BASH_SOURCE")") -source "$hooksDir/bash-preexec.sh" +hooksDir=$(dirname -- "$(readlink -f -- "\${BASH_SOURCE}")") +. "$hooksDir/bash-preexec.sh" precmd() { init_ghjk } @@ -236,6 +258,12 @@ export async function install() { /\.local\/share\/ghjk\/hooks\/hook.sh/, "source $HOME/.local/share/ghjk/hooks/hook.sh", ); + } else if (shell === "zsh") { + await filterAddFile( + std_path.resolve(homeDir, ".zshrc"), + /\.local\/share\/ghjk\/hooks\/hook.sh/, + "source $HOME/.local/share/ghjk/hooks/hook.sh", + ); } else { throw new Error(`unsupported shell: ${shell}`); } diff --git a/core/utils.ts b/core/utils.ts index 624362c3..bc99de97 100644 --- a/core/utils.ts +++ b/core/utils.ts @@ -28,7 +28,7 @@ export type SpawnOptions = { // pipeErr?: WritableStream; }; -// FIXME: pita function responsible for test flakiness +// FIXME: replace with deidcated ergonomic library export async function spawn( cmd: string[], options: SpawnOptions = {}, @@ -40,8 +40,6 @@ export async function spawn( const child = new Deno.Command(cmd[0], { args: cmd.slice(1), cwd, - // stdout: "piped", - // stderr: "piped", ...(pipeInput ? { stdin: "piped", @@ -50,14 +48,6 @@ export async function spawn( env, }).spawn(); - // keep pipe asynchronous till the command exists - // void child.stdout.pipeTo(options.pipeOut ?? Deno.stdout.writable, { - // preventClose: true, - // }); - // void child.stderr.pipeTo(options.pipeErr ?? Deno.stderr.writable, { - // preventClose: true, - // }); - if (pipeInput) { const writer = child.stdin.getWriter(); await writer.write(new TextEncoder().encode(pipeInput)); @@ -83,20 +73,9 @@ export async function spawnOutput( cwd, stdout: "piped", stderr: "piped", - // ...(pipeInput - // ? { - // stdin: "piped", - // } - // : {}), env, }).spawn(); - // if (pipeInput) { - // const writer = child.stdin.getWriter(); - // await writer.write(new TextEncoder().encode(pipeInput)); - // writer.releaseLock(); - // await child.stdin.close(); - // } const { code, success, stdout, stderr } = await child.output(); if (!success) { throw new Error( diff --git a/ghjk.ts b/ghjk.ts index 4987faaa..3773d60b 100644 --- a/ghjk.ts +++ b/ghjk.ts @@ -26,10 +26,10 @@ wasmedge({}); // jco({}); // mold({}); // act({}); -// asdf({ -// plugRepo: "https://github.com/asdf-community/asdf-zig", -// installType: "version", -// }); +asdf({ + plugRepo: "https://github.com/asdf-community/asdf-python", + installType: "version", +}); // protoc({}); // earthly({}); // ruff({}); diff --git a/plug.ts b/plug.ts index 10f3319e..e6b38489 100644 --- a/plug.ts +++ b/plug.ts @@ -11,17 +11,7 @@ import { registerPlug, validators, } from "./core/mod.ts"; -import { - Foras, - log, - std_fs, - std_io, - std_path, - std_streams, - std_tar, - std_url, - zipjs, -} from "./deps/plug.ts"; +import { log, std_fs, std_path, std_url } from "./deps/plug.ts"; import { initDenoWorkerPlug, isWorker } from "./core/worker.ts"; import * as asdf from "./core/asdf.ts"; import logger from "./core/logger.ts"; @@ -33,6 +23,7 @@ export { default as logger } from "./core/logger.ts"; export { initDenoWorkerPlug, isWorker } from "./core/worker.ts"; export * as asdf from "./core/asdf.ts"; export type * from "./core/mod.ts"; +export * from "./unarchive.ts"; if (isWorker()) { log.setup({ @@ -169,122 +160,4 @@ export async function downloadFile( ); } -/// Uses file extension to determine type -/// Does not support symlinks -export async function unarchive( - path: string, - dest = "./", - ext = std_path.extname(path), -) { - switch (ext) { - case ".gz": - case ".tar.gz": - case ".tgz": - await untgz(path, dest); - break; - case ".tar": - await untar(path, dest); - break; - case ".zip": - await unzip(path, dest); - break; - default: - throw Error("unsupported archive extension: ${ext}"); - } -} - -export async function untgz( - path: string, - dest = "./", -) { - // FIXME: replace Foras with zip.js from below if possible - // this unzips the whole thing into memory first - // but I was not able to figure out the - await Foras.initBundledOnce(); - const tgzFile = await Deno.open(path, { read: true }); - const gzDec = new Foras.GzDecoder(); - await std_streams.copy(tgzFile, { - write(buf) { - const mem = new Foras.Memory(buf); - gzDec.write(mem); - mem.freeNextTick(); - return Promise.resolve(buf.length); - }, - }); - const buf = gzDec.finish().copyAndDispose(); - await Deno.writeFile("/tmp/my.tar", buf); - await untarReader(new std_io.Buffer(buf), dest); -} -export async function untar( - path: string, - dest = "./", -) { - const tarFile = await Deno.open(path, { - read: true, - }); - - try { - await untarReader(tarFile, dest); - } catch (err) { - throw err; - } finally { - tarFile.close(); - } -} - -/// This does not close the reader -export async function untarReader( - reader: Deno.Reader, - dest = "./", -) { - for await (const entry of new std_tar.Untar(reader)) { - const filePath = std_path.resolve(dest, entry.fileName); - if (entry.type === "directory") { - await std_fs.ensureDir(filePath); - continue; - } - await std_fs.ensureDir(std_path.dirname(filePath)); - const file = await Deno.open(filePath, { - create: true, - truncate: true, - write: true, - mode: entry.fileMode, - }); - await std_streams.copy(entry, file); - file.close(); - } -} - -export async function unzip( - path: string, - dest = "./", -) { - const zipFile = await Deno.open(path, { read: true }); - const zipReader = new zipjs.ZipReader(zipFile.readable); - try { - await Promise.allSettled( - (await zipReader.getEntries()).map(async (entry) => { - const filePath = std_path.resolve(dest, entry.filename); - if (entry.directory) { - await std_fs.ensureDir(filePath); - return; - } - await std_fs.ensureDir(std_path.dirname(filePath)); - const file = await Deno.open(filePath, { - create: true, - truncate: true, - write: true, - mode: entry.externalFileAttribute >> 16, - }); - if (!entry.getData) throw Error("impossible"); - await entry.getData(file.writable); - }), - ); - } catch (err) { - throw err; - } finally { - zipReader.close(); - } -} - export const removeFile = Deno.remove; diff --git a/tests/e2e.ts b/tests/e2e.ts index 3f284bf4..655a7a7c 100644 --- a/tests/e2e.ts +++ b/tests/e2e.ts @@ -50,21 +50,19 @@ await (${confFn.toString()})()`; "-f-", ".", ], { env, pipeInput: dFile }); - await spawn([ - ...dockerCmd, - "run", - "--rm", - ...Object.entries(env).map(([key, val]) => ["-e", `${key}=${val}`]) - .flat(), - tag, - "bash", - "-c", - ` - source ~/.bashrc - init_ghjk - ${ePoint} - `, - ], { env }); + for (const shell of ["bash", "fish"]) { + await spawn([ + ...dockerCmd, + "run", + "--rm", + ...Object.entries(env).map(([key, val]) => ["-e", `${key}=${val}`]) + .flat(), + tag, + shell, + "-c", + ePoint, + ], { env }); + } await spawn([ ...dockerCmd, "rmi", diff --git a/tests/test.Dockerfile b/tests/test.Dockerfile index d095baa9..79f8c2ab 100644 --- a/tests/test.Dockerfile +++ b/tests/test.Dockerfile @@ -4,16 +4,17 @@ RUN set -eux; \ export DEBIAN_FRONTEND=noninteractive; \ apt update; \ apt install --yes \ + # test deps + fish zsh \ # asdf deps git curl xz-utils unzip \ ;\ apt clean autoclean; apt autoremove --yes; rm -rf /var/lib/{apt,dpkg,cache,log}/; -# activate ghjk for each bash shell +# activate ghjk non-interactive shells execs ENV BASH_ENV=/root/.local/share/ghjk/hooks/hook.sh -# explicitly set the shell var as detection fails otherwise -# because ps program is not present in this image -ENV SHELL=/bin/bash +ENV ZDOTDIR=/root/.local/share/ghjk/hooks/ + # BASH_ENV behavior is only avail in bash, not sh SHELL [ "/bin/bash", "-c"] @@ -23,7 +24,12 @@ COPY deno.lock ./ COPY deps/* ./deps/ RUN deno cache deps/* COPY . ./ -RUN deno run -A /ghjk/install.ts + +# explicitly set the shell var as detection fails otherwise +# because ps program is not present in this image +RUN SHELL=/bin/bash deno run -A /ghjk/install.ts +RUN SHELL=/bin/fish deno run -A /ghjk/install.ts +RUN SHELL=/bin/zsh deno run -A /ghjk/install.ts WORKDIR /app diff --git a/unarchive.ts b/unarchive.ts new file mode 100644 index 00000000..6eba2f3b --- /dev/null +++ b/unarchive.ts @@ -0,0 +1,127 @@ +import { + Foras, + std_fs, + std_io, + std_path, + std_streams, + std_tar, + zipjs, +} from "./deps/plug.ts"; + +/// Uses file extension to determine type +/// Does not support symlinks +export async function unarchive( + path: string, + dest = "./", + ext = std_path.extname(path), +) { + switch (ext) { + case ".gz": + case ".tar.gz": + case ".tgz": + await untgz(path, dest); + break; + case ".tar": + await untar(path, dest); + break; + case ".zip": + await unzip(path, dest); + break; + default: + throw Error("unsupported archive extension: ${ext}"); + } +} + +export async function untgz( + path: string, + dest = "./", +) { + // FIXME: replace Foras with zip.js from below if possible + // this unzips the whole thing into memory first + // but I was not able to figure out the + await Foras.initBundledOnce(); + const tgzFile = await Deno.open(path, { read: true }); + const gzDec = new Foras.GzDecoder(); + await std_streams.copy(tgzFile, { + write(buf) { + const mem = new Foras.Memory(buf); + gzDec.write(mem); + mem.freeNextTick(); + return Promise.resolve(buf.length); + }, + }); + const buf = gzDec.finish().copyAndDispose(); + await Deno.writeFile("/tmp/my.tar", buf); + await untarReader(new std_io.Buffer(buf), dest); +} +export async function untar( + path: string, + dest = "./", +) { + const tarFile = await Deno.open(path, { + read: true, + }); + + try { + await untarReader(tarFile, dest); + } catch (err) { + throw err; + } finally { + tarFile.close(); + } +} + +/// This does not close the reader +export async function untarReader( + reader: Deno.Reader, + dest = "./", +) { + for await (const entry of new std_tar.Untar(reader)) { + const filePath = std_path.resolve(dest, entry.fileName); + if (entry.type === "directory") { + await std_fs.ensureDir(filePath); + continue; + } + await std_fs.ensureDir(std_path.dirname(filePath)); + const file = await Deno.open(filePath, { + create: true, + truncate: true, + write: true, + mode: entry.fileMode, + }); + await std_streams.copy(entry, file); + file.close(); + } +} + +export async function unzip( + path: string, + dest = "./", +) { + const zipFile = await Deno.open(path, { read: true }); + const zipReader = new zipjs.ZipReader(zipFile.readable); + try { + await Promise.allSettled( + (await zipReader.getEntries()).map(async (entry) => { + const filePath = std_path.resolve(dest, entry.filename); + if (entry.directory) { + await std_fs.ensureDir(filePath); + return; + } + await std_fs.ensureDir(std_path.dirname(filePath)); + const file = await Deno.open(filePath, { + create: true, + truncate: true, + write: true, + mode: entry.externalFileAttribute >> 16, + }); + if (!entry.getData) throw Error("impossible"); + await entry.getData(file.writable); + }), + ); + } catch (err) { + throw err; + } finally { + zipReader.close(); + } +}