diff --git a/llama-tokenizer.js b/llama-tokenizer.js index dd4165a..64ec3fe 100644 --- a/llama-tokenizer.js +++ b/llama-tokenizer.js @@ -11,10 +11,10 @@ * */ -const llamaTokenizer = {} +const llamaTokenizer = {} as any; if (typeof window !== 'undefined') { - window.llamaTokenizer = llamaTokenizer + (window as any).llamaTokenizer = llamaTokenizer } const base64decode = function(encodedString) { @@ -104,6 +104,11 @@ const utf8Encoder = new TextEncoder(); const utf8Decoder = new TextDecoder('utf-8') class PriorityQueue { + // for ts + _heap: any[] + _comparator: any + + // PriorityQueue implementation is copied from https://stackoverflow.com/a/42919752 with minor refactoring constructor(comparator = (a, b) => a > b) { this._heap = []; @@ -192,7 +197,7 @@ const mapCharactersToTokenIds = (prompt, add_bos_token, add_preceding_space) => const charArray = Array.from(promptAltered) // Transform each character to its corresponding token for (let i=0; i