diff --git a/README.md b/README.md index 19acff2..0d786d6 100644 --- a/README.md +++ b/README.md @@ -3,6 +3,8 @@ WebpeerJS enables browser to browser connectivity without a central server. +[Demo](https://nuzulul.github.io/webpeerjs/demo/) + ## Example ``` @@ -16,14 +18,13 @@ void async function main() { const [send,listen,members] = node.joinRoom('myroom') - send('hello') - listen((message,id) => { console.log(`Message from ${id} : ${message}`) }) members((data) => { console.log(`Members : ${data}`) + send('hello') }) }() @@ -31,10 +32,18 @@ void async function main() { ## Install +NPM : + ``` npm i webpeerjs ``` +CDN : + +``` + +``` + ## API - `createWebpeer()` Create a new local node. diff --git a/config/rollup.config.build.js b/config/rollup.config.build.js index 9a3a9eb..28781fb 100644 --- a/config/rollup.config.build.js +++ b/config/rollup.config.build.js @@ -9,8 +9,7 @@ export default [ file: 'dist/esm/webpeerjs.js', format: 'es', } - ], - plugins: [nodeResolve({browser: true}), commonjs()] + ] }, { input: 'src/umd.js', diff --git a/config/tsconfig-esm.json b/config/tsconfig-esm.json index 887beea..41ca196 100644 --- a/config/tsconfig-esm.json +++ b/config/tsconfig-esm.json @@ -2,11 +2,11 @@ "extends": "./tsconfig-base.json", "compilerOptions": { "module": "esnext", - "outDir": "./../src", + "outDir": "./../dist/esm", "target": "esnext", "emitDeclarationOnly": true, - "rootDir": "./../src" + "rootDir": "./../dist/esm" }, - "include": ["./../src/webpeerjs.js"], + "include": ["./../dist/esm/webpeerjs.js"], "exclude": ["./../node_modules"], } \ No newline at end of file diff --git a/demo/index.html b/demo/index.html new file mode 100644 index 0000000..7f03566 --- /dev/null +++ b/demo/index.html @@ -0,0 +1,44 @@ + + + + + + + WebpeerJS + + +
+ + + + diff --git a/demo/webpeerjs.js b/demo/webpeerjs.js new file mode 100644 index 0000000..2bcb0f0 --- /dev/null +++ b/demo/webpeerjs.js @@ -0,0 +1,44254 @@ +(function (global, factory) { + typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() : + typeof define === 'function' && define.amd ? define(factory) : + (global = typeof globalThis !== 'undefined' ? globalThis : global || self, global.webpeerjs = factory()); +})(this, (function () { 'use strict'; + + const prefix$1 = 'webpeerjs'; + const CONFIG_PREFIX = prefix$1; + const CONFIG_DBSTORE_PATH = prefix$1+'-dbstore'; + const CONFIG_MAX_CONNECTIONS = 50; + const CONFIG_MIN_CONNECTIONS = 0; + const CONFIG_DISCOVER_RELAYS = 2; + const CONFIG_PEER_DISCOVERY_UNIVERSAL_CONNECTIVITY = 'universal-connectivity-browser-peer-discovery'; + const CONFIG_PUBSUB_PEER_DISCOVERY = ['_peer-discovery._p2p._pubsub',CONFIG_PEER_DISCOVERY_UNIVERSAL_CONNECTIVITY,prefix$1+'-peer-discovery']; + + const CONFIG_KNOWN_BOOTSTRAP_PEERS_ADDRS = [ + { + "Peers": [ + { + "Addrs": [ + "/ip4/147.28.186.157/udp/9091/quic-v1", + "/ip4/147.28.186.157/udp/9090/webrtc-direct/certhash/uEiBbC9bbdvraVWDvcvCEdJAWDymmUqiJQ964FuyEq0hELw" + ], + "ID": "12D3KooWGahRw3ZnM4gAyd9FK75v4Bp5keFYTvkcAwhpEm28wbV3", + "Schema": "peer" + } + ] + }, + { + "Peers": [ + { + "Addrs": [ + "/ip6/2604:1380:4642:6600::3/udp/9095/quic-v1/webtransport/certhash/uEiCU6MjDlUhqtik_vc8Ps5_MJtGhJKn-XSqvuzn8SJGL9A/certhash/uEiDlk15VyYoXwTaB608Y80ch3OptpMiKFblYdduSVLy2sQ", + "/ip4/147.28.186.157/udp/9095/quic-v1", + "/ip6/2604:1380:4642:6600::3/udp/9095/quic-v1", + "/ip4/147.28.186.157/udp/9095/quic-v1/webtransport/certhash/uEiCU6MjDlUhqtik_vc8Ps5_MJtGhJKn-XSqvuzn8SJGL9A/certhash/uEiDlk15VyYoXwTaB608Y80ch3OptpMiKFblYdduSVLy2sQ" + ], + "ID": "12D3KooWFhXabKDwALpzqMbto94sB7rvmZ6M28hs9Y9xSopDKwQr", + "Schema": "peer" + } + ] + }, + { + "Peers": [ + { + "Addrs": [ + "/ip6/2607:f2f8:a880:0:5054:ff:fe9f:4913/udp/4001/quic-v1/webtransport/certhash/uEiD84ZvAnGWTQx_WlOAqreebO036a5RRB5zfJBo9QJfDBA/certhash/uEiAOMkIVxeNye76-f5ADnLCSNqNlnhOYdkwyRlqLFbhIkQ", + "/ip6/2607:f2f8:a880::70/udp/4001/quic-v1/webtransport/certhash/uEiD84ZvAnGWTQx_WlOAqreebO036a5RRB5zfJBo9QJfDBA/certhash/uEiAOMkIVxeNye76-f5ADnLCSNqNlnhOYdkwyRlqLFbhIkQ", + "/ip6/2607:f2f8:a880:0:5054:ff:fe9f:4913/tcp/4001", + "/ip6/2607:f2f8:a880:0:5054:ff:fe9f:4913/udp/4001/quic-v1", + "/ip4/174.136.97.180/udp/4001/quic-v1/webtransport/certhash/uEiD84ZvAnGWTQx_WlOAqreebO036a5RRB5zfJBo9QJfDBA/certhash/uEiAOMkIVxeNye76-f5ADnLCSNqNlnhOYdkwyRlqLFbhIkQ", + "/ip4/174.136.97.180/udp/4001/quic-v1", + "/ip6/2607:f2f8:a880::70/udp/4001/quic-v1", + "/ip4/174.136.97.180/tcp/4001" + ], + "ID": "12D3KooWPEDBmt7vm6FNNYuqaA4n2qMUZ6wPK5NcRc8t6KpqgRkV", + "Schema": "peer" + } + ] + }, + { + "Peers": [ + { + "Addrs": [ + "/ip4/174.136.97.179/udp/4001/quic-v1", + "/ip6/2607:f2f8:a880::50/udp/4002/quic-v1/webtransport/certhash/uEiBF7KOka9hhb2IjhUd-OkfgTFOf-VpgV7fwvMkKOkMrdQ/certhash/uEiDxVhDwzFlnorujU_rjTnO_TTLRMlXxzPLEgaG-1xOPkA", + "/ip4/174.136.97.179/udp/4001/quic-v1/webtransport/certhash/uEiBF7KOka9hhb2IjhUd-OkfgTFOf-VpgV7fwvMkKOkMrdQ/certhash/uEiDxVhDwzFlnorujU_rjTnO_TTLRMlXxzPLEgaG-1xOPkA", + "/ip6/2607:f2f8:a880::50/udp/4001/quic-v1", + "/ip4/174.136.97.179/udp/4002/quic-v1/webtransport/certhash/uEiBF7KOka9hhb2IjhUd-OkfgTFOf-VpgV7fwvMkKOkMrdQ/certhash/uEiDxVhDwzFlnorujU_rjTnO_TTLRMlXxzPLEgaG-1xOPkA", + "/ip6/2607:f2f8:a880::50/tcp/4001", + "/ip6/2607:f2f8:a880::50/udp/4001/quic-v1/webtransport/certhash/uEiBF7KOka9hhb2IjhUd-OkfgTFOf-VpgV7fwvMkKOkMrdQ/certhash/uEiDxVhDwzFlnorujU_rjTnO_TTLRMlXxzPLEgaG-1xOPkA", + "/ip4/174.136.97.179/tcp/4001" + ], + "ID": "12D3KooWSHbugDEQeWm2LjtRRMpNgLu6oZ8zkX8XcTwYMAewVekP", + "Schema": "peer" + } + ] + }, + { + "Peers": [ + { + "Addrs": [ + "/ip6/2a03:4000:46:26e::c17/udp/443/quic-v1/webtransport/certhash/uEiAortGu7HNi8-pV9onPFkTwykrnWuJEqYf7zbQVE1FEtg/certhash/uEiB5Z8j3pdTJU_TDYoJ-GgUQSaXOmvKIGmASL9s-p3VHQA", + "/ip4/45.83.104.156/udp/443/quic-v1", + "/ip4/45.83.104.156/udp/443/quic-v1/webtransport/certhash/uEiAortGu7HNi8-pV9onPFkTwykrnWuJEqYf7zbQVE1FEtg/certhash/uEiB5Z8j3pdTJU_TDYoJ-GgUQSaXOmvKIGmASL9s-p3VHQA", + "/ip6/2a03:4000:46:26e::c17/tcp/443", + "/ip6/2a03:4000:46:26e::c17/udp/443/quic-v1", + "/ip4/45.83.104.156/tcp/443" + ], + "ID": "12D3KooWASoxFpwwy8JDdu4Tm57mhESsnbFPogam9VVmhR95FGXr", + "Schema": "peer" + } + ] + }, + { + "Peers": [ + { + "Addrs": [ + "/ip4/139.178.91.71/udp/4001/quic-v1", + "/ip6/2604:1380:45e3:6e00::1/tcp/4001", + "/ip4/139.178.91.71/tcp/4001", + "/ip4/139.178.91.71/udp/4001/quic-v1/webtransport/certhash/uEiDYGZMqjz8wsz59DHA4iJin4nqTUfuJhq9AeAZlHBrmvg/certhash/uEiBXLv0dkEqbhmcinRbwj8b_3vWs0kWwf1-fiaz5wS-tew", + "/ip6/2604:1380:45e3:6e00::1/udp/4001/quic-v1/webtransport/certhash/uEiDYGZMqjz8wsz59DHA4iJin4nqTUfuJhq9AeAZlHBrmvg/certhash/uEiBXLv0dkEqbhmcinRbwj8b_3vWs0kWwf1-fiaz5wS-tew", + "/dnsaddr/bootstrap.libp2p.io", + "/dns6/sv15.bootstrap.libp2p.io/tcp/443/wss", + "/ip6/2604:1380:45e3:6e00::1/udp/4001/quic-v1", + "/ip4/139.178.91.71/tcp/443/tls/sni/sv15.bootstrap.libp2p.io/ws", + "/ip6/2604:1380:45e3:6e00::1/tcp/443/tls/sni/sv15.bootstrap.libp2p.io/ws", + "/dns4/sv15.bootstrap.libp2p.io/tcp/443/wss" + ], + "ID": "QmNnooDu7bfjPFoTZYxMNLWUQJyrVwtbZg5gBMjTezGAJN", + "Schema": "peer" + } + ] + }, + { + "Peers": [ + { + "Addrs": [ + "/ip4/139.178.65.157/udp/4001/quic-v1", + "/dns6/ny5.bootstrap.libp2p.io/tcp/443/wss", + "/ip6/2604:1380:45d2:8100::1/tcp/4001", + "/dns4/ny5.bootstrap.libp2p.io/tcp/443/wss", + "/ip4/139.178.65.157/tcp/4001", + "/ip6/2604:1380:45d2:8100::1/udp/4001/quic-v1" + ], + "ID": "QmQCU2EcMqAqQPR2i9bChDtGNJchTbq5TbXJJ16u19uLTa", + "Schema": "peer" + } + ] + }, + { + "Peers": [ + { + "Addrs": [ + "/dns4/am6.bootstrap.libp2p.io/tcp/443/wss", + "/ip6/2604:1380:4602:5c00::3/udp/4001/quic-v1/webtransport/certhash/uEiD3Q2mfd5EYt6Y2M9rsge_nna4hVyCgUVlfSz3IjAK8ew/certhash/uEiAHt8JR08mlUCCGnN8VpqG9G4sfvdjAd0v5ZM5W1lqntw", + "/ip6/2604:1380:4602:5c00::3/tcp/4001", + "/ip4/147.75.87.27/udp/4001/quic-v1", + "/ip6/2604:1380:4602:5c00::3/udp/4001/quic-v1", + "/ip4/147.75.87.27/tcp/4001", + "/dns6/am6.bootstrap.libp2p.io/tcp/443/wss", + "/ip4/147.75.87.27/udp/4001/quic-v1/webtransport/certhash/uEiD3Q2mfd5EYt6Y2M9rsge_nna4hVyCgUVlfSz3IjAK8ew/certhash/uEiAHt8JR08mlUCCGnN8VpqG9G4sfvdjAd0v5ZM5W1lqntw" + ], + "ID": "QmbLHAnMoJPWSCR5Zhtx6BHJX9KiKNN6tpvbUcqanj75Nb", + "Schema": "peer" + } + ] + }, + { + "Peers": [ + { + "Addrs": [ + "/ip6/2604:1380:40e1:9c00::1/udp/4001/quic-v1/webtransport/certhash/uEiBENNd9IIPnU1cTGoVATo6cly-O2fQAjKpIyFi0msoJew/certhash/uEiD8N0sSnHOUIsKxz4pDuVqx4szt-huTZWafW_EY19H7MQ", + "/ip6/2604:1380:40e1:9c00::1/tcp/4001", + "/dnsaddr/bootstrap.libp2p.io", + "/ip4/145.40.118.135/udp/4001/quic-v1/webtransport/certhash/uEiBENNd9IIPnU1cTGoVATo6cly-O2fQAjKpIyFi0msoJew/certhash/uEiD8N0sSnHOUIsKxz4pDuVqx4szt-huTZWafW_EY19H7MQ", + "/ip4/145.40.118.135/udp/4001/quic-v1", + "/dns4/sg1.bootstrap.libp2p.io/tcp/443/wss", + "/ip6/2604:1380:40e1:9c00::1/udp/4001/quic-v1", + "/ip4/145.40.118.135/tcp/4001", + "/dns6/sg1.bootstrap.libp2p.io/tcp/443/wss", + "/ip4/145.40.118.135/tcp/443/tls/sni/sg1.bootstrap.libp2p.io/ws", + "/ip6/2604:1380:40e1:9c00::1/tcp/443/tls/sni/sg1.bootstrap.libp2p.io/ws" + ], + "ID": "QmcZf59bWwK5XFi76CZX8cbJ4BhTzzA3gU1ZjYZcYW3dwt", + "Schema": "peer" + } + ] + }, + { + "Peers": [ + { + "Addrs": [ + "/ip4/104.131.131.82/udp/4001/quic-v1/webtransport/certhash/uEiCIMEw_vvBwFLEqWbOj_wx7I90HmfMabSyVZ9Vn5srjPA/certhash/uEiBy22YtYUPU8T3aJ4rL3jC0lLR8MFZZNkFP-rWRxMrqQA", + "/ip4/104.131.131.82/udp/4001/quic-v1", + "/ip4/104.131.131.82/tcp/4001", + "/ip4/104.131.131.82/udp/4001/quic" + ], + "ID": "QmaCpDMGvV2BGHeYERUEnRQAwe3N8SzbUtfsmvsqQLuvuJ", + "Schema": "peer" + } + ] + }, + { + "Peers": [ + { + "Addrs": [ + "/ip4/16.170.214.173/udp/4001/quic-v1/webtransport/certhash/uEiAKAeeOxU7ExDc81y7d53D96nFaRmmXwSFj0429Ij9T9A/certhash/uEiB4ttGGuaUzqF84q5RFhUTVArF4mb9t_UN_kcsfd4qYEQ", + "/ip4/16.170.214.173/tcp/4001", + "/ip4/16.170.214.173/udp/4001/quic-v1" + ], + "ID": "12D3KooWHh98YpAkJsn3ULjMjK1n9QVkXmi8Sb3gTDMatHxCmDP5", + "Schema": "peer" + } + ] + }, + { + "Peers": [ + { + "Addrs": [ + "/ip4/18.119.248.24/udp/4001/quic-v1", + "/ip4/18.119.248.24/tcp/4001", + "/ip4/18.119.248.24/udp/4001/quic-v1/webtransport/certhash/uEiDwG2YnjoUpoKqmUMX-aYJeLi1UNEsqa8EFhpHFUVs0AQ/certhash/uEiCqy6-Pk3S2iYS0puQ0UhtTZ1s_nw-sIsoB-bX0Le6lFA" + ], + "ID": "12D3KooWS79EhkPU7ESUwgG4vyHHzW9FDNZLoWVth9b5N5NSrvaj", + "Schema": "peer" + } + ] + }, + { + "Peers": [ + { + "Addrs": [ + "/ip4/83.173.236.99/udp/4001/quic-v1/webtransport/certhash/uEiD8mhYDW_6kgJAb07RlZnA-YSScG38df5wb9IxFtZmQwQ/certhash/uEiACG9e-sG_ild6y5t-jvb_dfsNi_gPgZ7nn7Qa5ctT-Wg", + "/ip6/2a02:121e:21e:1:546f:18ff:fea8:8e/tcp/4001", + "/ip4/83.173.236.99/tcp/10201", + "/dns4/ipfs-ws.neaweb.ch/tcp/443/wss", + "/ip4/89.251.251.195/udp/4001/quic-v1", + "/ip4/89.251.251.195/udp/4001/quic-v1/webtransport/certhash/uEiD8mhYDW_6kgJAb07RlZnA-YSScG38df5wb9IxFtZmQwQ/certhash/uEiACG9e-sG_ild6y5t-jvb_dfsNi_gPgZ7nn7Qa5ctT-Wg", + "/ip6/2a02:121e:21e:1:546f:18ff:fea8:8e/udp/4001/quic-v1", + "/ip4/89.251.251.195/tcp/4001/quic-v1/webtransport", + "/ip6/2a02:121e:21e:1:546f:18ff:fea8:8e/tcp/4011/ws", + "/ip4/89.251.251.195/tcp/4001", + "/dns4/ipfs-ws.neaweb.tech/tcp/443/wss", + "/ip4/89.251.251.195/tcp/4001/quic-v1", + "/ip4/83.173.236.99/tcp/4001", + "/ip6/2a02:121e:21e:1:546f:18ff:fea8:8e/udp/4001/quic-v1/webtransport/certhash/uEiD8mhYDW_6kgJAb07RlZnA-YSScG38df5wb9IxFtZmQwQ/certhash/uEiACG9e-sG_ild6y5t-jvb_dfsNi_gPgZ7nn7Qa5ctT-Wg", + "/ip4/83.173.236.99/udp/4001/quic-v1", + "/ip4/83.173.236.99/udp/19707/quic-v1", + "/ip4/83.173.236.99/udp/19707/quic-v1/webtransport/certhash/uEiD8mhYDW_6kgJAb07RlZnA-YSScG38df5wb9IxFtZmQwQ/certhash/uEiACG9e-sG_ild6y5t-jvb_dfsNi_gPgZ7nn7Qa5ctT-Wg" + ], + "ID": "12D3KooWBbkCD5MpJhMc1mfPAVGEyVkQnyxPKGS7AHwDqQM2JUsk", + "Schema": "peer" + } + ] + }, + { + "Peers": [ + { + "Addrs": [ + "/ip4/89.58.11.155/udp/4001/quic-v1/webtransport/certhash/uEiDDgTBtcIstrvU4MRSfcD7tYDIsQrnGAxW6Oh5AoLm4Ig/certhash/uEiAXHXsg1wBb-sPvpBw8BVpRFxso1milPa801TxjPNYQ3w", + "/ip4/89.58.11.155/udp/4001/quic", + "/ip6/64:ff9b::593a:b9b/udp/4001/quic-v1/webtransport/certhash/uEiDDgTBtcIstrvU4MRSfcD7tYDIsQrnGAxW6Oh5AoLm4Ig/certhash/uEiAXHXsg1wBb-sPvpBw8BVpRFxso1milPa801TxjPNYQ3w", + "/ip6/64:ff9b::593a:b9b/udp/4001/quic-v1", + "/ip4/89.58.11.155/udp/4001/quic-v1", + "/ip4/89.58.11.155/tcp/4001" + ], + "ID": "12D3KooWKLdecs31Zmo2pLBjR9HY2vWo3VwM4eBm21Czeucbe6FL", + "Schema": "peer" + } + ] + }, + { + "Peers": [ + { + "Addrs": [ + "/ip4/205.198.64.76/tcp/4001", + "/ip4/205.198.64.76/udp/4001/quic-v1", + "/ip4/205.198.64.76/udp/4001/quic-v1/webtransport/certhash/uEiCT4khDdgvF2NHCw-fu3qUk2qRqx0AgTEt0PHZct8jq5g/certhash/uEiA2RYkSCyOOtJmrpm_Dn8jJr-LLwS0S7K3HX3dbJJlN_w" + ], + "ID": "12D3KooWBdF3g6vSJFRPoZQo7BNnkNzaWb59gpyaVzsgtNTVeu8H", + "Schema": "peer" + } + ] + }, + ]; + + const CONFIG_KNOWN_BOOTSTRAP_PEER_IDS = [ + '12D3KooWFhXabKDwALpzqMbto94sB7rvmZ6M28hs9Y9xSopDKwQr', + '12D3KooWGahRw3ZnM4gAyd9FK75v4Bp5keFYTvkcAwhpEm28wbV3', + '12D3KooWPEDBmt7vm6FNNYuqaA4n2qMUZ6wPK5NcRc8t6KpqgRkV', + '12D3KooWSHbugDEQeWm2LjtRRMpNgLu6oZ8zkX8XcTwYMAewVekP', + '12D3KooWASoxFpwwy8JDdu4Tm57mhESsnbFPogam9VVmhR95FGXr', + 'QmNnooDu7bfjPFoTZYxMNLWUQJyrVwtbZg5gBMjTezGAJN', + 'QmQCU2EcMqAqQPR2i9bChDtGNJchTbq5TbXJJ16u19uLTa', + 'QmbLHAnMoJPWSCR5Zhtx6BHJX9KiKNN6tpvbUcqanj75Nb', + 'QmcZf59bWwK5XFi76CZX8cbJ4BhTzzA3gU1ZjYZcYW3dwt', + 'QmaCpDMGvV2BGHeYERUEnRQAwe3N8SzbUtfsmvsqQLuvuJ', + '12D3KooWHh98YpAkJsn3ULjMjK1n9QVkXmi8Sb3gTDMatHxCmDP5', + '12D3KooWS79EhkPU7ESUwgG4vyHHzW9FDNZLoWVth9b5N5NSrvaj', + '12D3KooWBbkCD5MpJhMc1mfPAVGEyVkQnyxPKGS7AHwDqQM2JUsk', + '12D3KooWKLdecs31Zmo2pLBjR9HY2vWo3VwM4eBm21Czeucbe6FL', + '12D3KooWBdF3g6vSJFRPoZQo7BNnkNzaWb59gpyaVzsgtNTVeu8H' + ]; + + /** + * Returns a `Uint8Array` of the requested size. Referenced memory will + * be initialized to 0. + */ + function alloc$2(size = 0) { + return new Uint8Array(size); + } + /** + * Where possible returns a Uint8Array of the requested size that references + * uninitialized memory. Only use if you are certain you will immediately + * overwrite every value in the returned `Uint8Array`. + */ + function allocUnsafe(size = 0) { + return new Uint8Array(size); + } + + /* eslint-disable no-fallthrough */ + const N1$1 = Math.pow(2, 7); + const N2$1 = Math.pow(2, 14); + const N3$1 = Math.pow(2, 21); + const N4$1 = Math.pow(2, 28); + const N5$1 = Math.pow(2, 35); + const N6$1 = Math.pow(2, 42); + const N7$1 = Math.pow(2, 49); + /** Most significant bit of a byte */ + const MSB$2 = 0x80; + /** Rest of the bits in a byte */ + const REST$2 = 0x7f; + function encodingLength$1(value) { + if (value < N1$1) { + return 1; + } + if (value < N2$1) { + return 2; + } + if (value < N3$1) { + return 3; + } + if (value < N4$1) { + return 4; + } + if (value < N5$1) { + return 5; + } + if (value < N6$1) { + return 6; + } + if (value < N7$1) { + return 7; + } + if (Number.MAX_SAFE_INTEGER != null && value > Number.MAX_SAFE_INTEGER) { + throw new RangeError('Could not encode varint'); + } + return 8; + } + function encodeUint8Array(value, buf, offset = 0) { + switch (encodingLength$1(value)) { + case 8: { + buf[offset++] = (value & 0xFF) | MSB$2; + value /= 128; + } + case 7: { + buf[offset++] = (value & 0xFF) | MSB$2; + value /= 128; + } + case 6: { + buf[offset++] = (value & 0xFF) | MSB$2; + value /= 128; + } + case 5: { + buf[offset++] = (value & 0xFF) | MSB$2; + value /= 128; + } + case 4: { + buf[offset++] = (value & 0xFF) | MSB$2; + value >>>= 7; + } + case 3: { + buf[offset++] = (value & 0xFF) | MSB$2; + value >>>= 7; + } + case 2: { + buf[offset++] = (value & 0xFF) | MSB$2; + value >>>= 7; + } + case 1: { + buf[offset++] = (value & 0xFF); + value >>>= 7; + break; + } + default: throw new Error('unreachable'); + } + return buf; + } + function encodeUint8ArrayList(value, buf, offset = 0) { + switch (encodingLength$1(value)) { + case 8: { + buf.set(offset++, (value & 0xFF) | MSB$2); + value /= 128; + } + case 7: { + buf.set(offset++, (value & 0xFF) | MSB$2); + value /= 128; + } + case 6: { + buf.set(offset++, (value & 0xFF) | MSB$2); + value /= 128; + } + case 5: { + buf.set(offset++, (value & 0xFF) | MSB$2); + value /= 128; + } + case 4: { + buf.set(offset++, (value & 0xFF) | MSB$2); + value >>>= 7; + } + case 3: { + buf.set(offset++, (value & 0xFF) | MSB$2); + value >>>= 7; + } + case 2: { + buf.set(offset++, (value & 0xFF) | MSB$2); + value >>>= 7; + } + case 1: { + buf.set(offset++, (value & 0xFF)); + value >>>= 7; + break; + } + default: throw new Error('unreachable'); + } + return buf; + } + function decodeUint8Array(buf, offset) { + let b = buf[offset]; + let res = 0; + res += b & REST$2; + if (b < MSB$2) { + return res; + } + b = buf[offset + 1]; + res += (b & REST$2) << 7; + if (b < MSB$2) { + return res; + } + b = buf[offset + 2]; + res += (b & REST$2) << 14; + if (b < MSB$2) { + return res; + } + b = buf[offset + 3]; + res += (b & REST$2) << 21; + if (b < MSB$2) { + return res; + } + b = buf[offset + 4]; + res += (b & REST$2) * N4$1; + if (b < MSB$2) { + return res; + } + b = buf[offset + 5]; + res += (b & REST$2) * N5$1; + if (b < MSB$2) { + return res; + } + b = buf[offset + 6]; + res += (b & REST$2) * N6$1; + if (b < MSB$2) { + return res; + } + b = buf[offset + 7]; + res += (b & REST$2) * N7$1; + if (b < MSB$2) { + return res; + } + throw new RangeError('Could not decode varint'); + } + function decodeUint8ArrayList(buf, offset) { + let b = buf.get(offset); + let res = 0; + res += b & REST$2; + if (b < MSB$2) { + return res; + } + b = buf.get(offset + 1); + res += (b & REST$2) << 7; + if (b < MSB$2) { + return res; + } + b = buf.get(offset + 2); + res += (b & REST$2) << 14; + if (b < MSB$2) { + return res; + } + b = buf.get(offset + 3); + res += (b & REST$2) << 21; + if (b < MSB$2) { + return res; + } + b = buf.get(offset + 4); + res += (b & REST$2) * N4$1; + if (b < MSB$2) { + return res; + } + b = buf.get(offset + 5); + res += (b & REST$2) * N5$1; + if (b < MSB$2) { + return res; + } + b = buf.get(offset + 6); + res += (b & REST$2) * N6$1; + if (b < MSB$2) { + return res; + } + b = buf.get(offset + 7); + res += (b & REST$2) * N7$1; + if (b < MSB$2) { + return res; + } + throw new RangeError('Could not decode varint'); + } + function encode$5(value, buf, offset = 0) { + if (buf == null) { + buf = allocUnsafe(encodingLength$1(value)); + } + if (buf instanceof Uint8Array) { + return encodeUint8Array(value, buf, offset); + } + else { + return encodeUint8ArrayList(value, buf, offset); + } + } + function decode$6(buf, offset = 0) { + if (buf instanceof Uint8Array) { + return decodeUint8Array(buf, offset); + } + else { + return decodeUint8ArrayList(buf, offset); + } + } + + const f32 = new Float32Array([-0]); + const f8b = new Uint8Array(f32.buffer); + /** + * Writes a 32 bit float to a buffer using little endian byte order + */ + function writeFloatLE(val, buf, pos) { + f32[0] = val; + buf[pos] = f8b[0]; + buf[pos + 1] = f8b[1]; + buf[pos + 2] = f8b[2]; + buf[pos + 3] = f8b[3]; + } + /** + * Reads a 32 bit float from a buffer using little endian byte order + */ + function readFloatLE(buf, pos) { + f8b[0] = buf[pos]; + f8b[1] = buf[pos + 1]; + f8b[2] = buf[pos + 2]; + f8b[3] = buf[pos + 3]; + return f32[0]; + } + const f64 = new Float64Array([-0]); + const d8b = new Uint8Array(f64.buffer); + /** + * Writes a 64 bit double to a buffer using little endian byte order + */ + function writeDoubleLE(val, buf, pos) { + f64[0] = val; + buf[pos] = d8b[0]; + buf[pos + 1] = d8b[1]; + buf[pos + 2] = d8b[2]; + buf[pos + 3] = d8b[3]; + buf[pos + 4] = d8b[4]; + buf[pos + 5] = d8b[5]; + buf[pos + 6] = d8b[6]; + buf[pos + 7] = d8b[7]; + } + /** + * Reads a 64 bit double from a buffer using little endian byte order + */ + function readDoubleLE(buf, pos) { + d8b[0] = buf[pos]; + d8b[1] = buf[pos + 1]; + d8b[2] = buf[pos + 2]; + d8b[3] = buf[pos + 3]; + d8b[4] = buf[pos + 4]; + d8b[5] = buf[pos + 5]; + d8b[6] = buf[pos + 6]; + d8b[7] = buf[pos + 7]; + return f64[0]; + } + + // the largest BigInt we can safely downcast to a Number + const MAX_SAFE_NUMBER_INTEGER = BigInt(Number.MAX_SAFE_INTEGER); + const MIN_SAFE_NUMBER_INTEGER = BigInt(Number.MIN_SAFE_INTEGER); + /** + * Constructs new long bits. + * + * @classdesc Helper class for working with the low and high bits of a 64 bit value. + * @memberof util + * @function Object() { [native code] } + * @param {number} lo - Low 32 bits, unsigned + * @param {number} hi - High 32 bits, unsigned + */ + class LongBits { + lo; + hi; + constructor(lo, hi) { + // note that the casts below are theoretically unnecessary as of today, but older statically + // generated converter code might still call the ctor with signed 32bits. kept for compat. + /** + * Low bits + */ + this.lo = lo | 0; + /** + * High bits + */ + this.hi = hi | 0; + } + /** + * Converts this long bits to a possibly unsafe JavaScript number + */ + toNumber(unsigned = false) { + if (!unsigned && (this.hi >>> 31) > 0) { + const lo = ~this.lo + 1 >>> 0; + let hi = ~this.hi >>> 0; + if (lo === 0) { + hi = hi + 1 >>> 0; + } + return -(lo + hi * 4294967296); + } + return this.lo + this.hi * 4294967296; + } + /** + * Converts this long bits to a bigint + */ + toBigInt(unsigned = false) { + if (unsigned) { + return BigInt(this.lo >>> 0) + (BigInt(this.hi >>> 0) << 32n); + } + if ((this.hi >>> 31) !== 0) { + const lo = ~this.lo + 1 >>> 0; + let hi = ~this.hi >>> 0; + if (lo === 0) { + hi = hi + 1 >>> 0; + } + return -(BigInt(lo) + (BigInt(hi) << 32n)); + } + return BigInt(this.lo >>> 0) + (BigInt(this.hi >>> 0) << 32n); + } + /** + * Converts this long bits to a string + */ + toString(unsigned = false) { + return this.toBigInt(unsigned).toString(); + } + /** + * Zig-zag encodes this long bits + */ + zzEncode() { + const mask = this.hi >> 31; + this.hi = ((this.hi << 1 | this.lo >>> 31) ^ mask) >>> 0; + this.lo = (this.lo << 1 ^ mask) >>> 0; + return this; + } + /** + * Zig-zag decodes this long bits + */ + zzDecode() { + const mask = -(this.lo & 1); + this.lo = ((this.lo >>> 1 | this.hi << 31) ^ mask) >>> 0; + this.hi = (this.hi >>> 1 ^ mask) >>> 0; + return this; + } + /** + * Calculates the length of this longbits when encoded as a varint. + */ + length() { + const part0 = this.lo; + const part1 = (this.lo >>> 28 | this.hi << 4) >>> 0; + const part2 = this.hi >>> 24; + return part2 === 0 + ? part1 === 0 + ? part0 < 16384 + ? part0 < 128 ? 1 : 2 + : part0 < 2097152 ? 3 : 4 + : part1 < 16384 + ? part1 < 128 ? 5 : 6 + : part1 < 2097152 ? 7 : 8 + : part2 < 128 ? 9 : 10; + } + /** + * Constructs new long bits from the specified number + */ + static fromBigInt(value) { + if (value === 0n) { + return zero; + } + if (value < MAX_SAFE_NUMBER_INTEGER && value > MIN_SAFE_NUMBER_INTEGER) { + return this.fromNumber(Number(value)); + } + const negative = value < 0n; + if (negative) { + value = -value; + } + let hi = value >> 32n; + let lo = value - (hi << 32n); + if (negative) { + hi = ~hi | 0n; + lo = ~lo | 0n; + if (++lo > TWO_32) { + lo = 0n; + if (++hi > TWO_32) { + hi = 0n; + } + } + } + return new LongBits(Number(lo), Number(hi)); + } + /** + * Constructs new long bits from the specified number + */ + static fromNumber(value) { + if (value === 0) { + return zero; + } + const sign = value < 0; + if (sign) { + value = -value; + } + let lo = value >>> 0; + let hi = (value - lo) / 4294967296 >>> 0; + if (sign) { + hi = ~hi >>> 0; + lo = ~lo >>> 0; + if (++lo > 4294967295) { + lo = 0; + if (++hi > 4294967295) { + hi = 0; + } + } + } + return new LongBits(lo, hi); + } + /** + * Constructs new long bits from a number, long or string + */ + static from(value) { + if (typeof value === 'number') { + return LongBits.fromNumber(value); + } + if (typeof value === 'bigint') { + return LongBits.fromBigInt(value); + } + if (typeof value === 'string') { + return LongBits.fromBigInt(BigInt(value)); + } + return value.low != null || value.high != null ? new LongBits(value.low >>> 0, value.high >>> 0) : zero; + } + } + const zero = new LongBits(0, 0); + zero.toBigInt = function () { return 0n; }; + zero.zzEncode = zero.zzDecode = function () { return this; }; + zero.length = function () { return 1; }; + const TWO_32 = 4294967296n; + + /** + * Calculates the UTF8 byte length of a string + */ + function length$2(string) { + let len = 0; + let c = 0; + for (let i = 0; i < string.length; ++i) { + c = string.charCodeAt(i); + if (c < 128) { + len += 1; + } + else if (c < 2048) { + len += 2; + } + else if ((c & 0xFC00) === 0xD800 && (string.charCodeAt(i + 1) & 0xFC00) === 0xDC00) { + ++i; + len += 4; + } + else { + len += 3; + } + } + return len; + } + /** + * Reads UTF8 bytes as a string + */ + function read$2(buffer, start, end) { + const len = end - start; + if (len < 1) { + return ''; + } + let parts; + const chunk = []; + let i = 0; // char offset + let t; // temporary + while (start < end) { + t = buffer[start++]; + if (t < 128) { + chunk[i++] = t; + } + else if (t > 191 && t < 224) { + chunk[i++] = (t & 31) << 6 | buffer[start++] & 63; + } + else if (t > 239 && t < 365) { + t = ((t & 7) << 18 | (buffer[start++] & 63) << 12 | (buffer[start++] & 63) << 6 | buffer[start++] & 63) - 0x10000; + chunk[i++] = 0xD800 + (t >> 10); + chunk[i++] = 0xDC00 + (t & 1023); + } + else { + chunk[i++] = (t & 15) << 12 | (buffer[start++] & 63) << 6 | buffer[start++] & 63; + } + if (i > 8191) { + (parts ?? (parts = [])).push(String.fromCharCode.apply(String, chunk)); + i = 0; + } + } + if (parts != null) { + if (i > 0) { + parts.push(String.fromCharCode.apply(String, chunk.slice(0, i))); + } + return parts.join(''); + } + return String.fromCharCode.apply(String, chunk.slice(0, i)); + } + /** + * Writes a string as UTF8 bytes + */ + function write$1(string, buffer, offset) { + const start = offset; + let c1; // character 1 + let c2; // character 2 + for (let i = 0; i < string.length; ++i) { + c1 = string.charCodeAt(i); + if (c1 < 128) { + buffer[offset++] = c1; + } + else if (c1 < 2048) { + buffer[offset++] = c1 >> 6 | 192; + buffer[offset++] = c1 & 63 | 128; + } + else if ((c1 & 0xFC00) === 0xD800 && ((c2 = string.charCodeAt(i + 1)) & 0xFC00) === 0xDC00) { + c1 = 0x10000 + ((c1 & 0x03FF) << 10) + (c2 & 0x03FF); + ++i; + buffer[offset++] = c1 >> 18 | 240; + buffer[offset++] = c1 >> 12 & 63 | 128; + buffer[offset++] = c1 >> 6 & 63 | 128; + buffer[offset++] = c1 & 63 | 128; + } + else { + buffer[offset++] = c1 >> 12 | 224; + buffer[offset++] = c1 >> 6 & 63 | 128; + buffer[offset++] = c1 & 63 | 128; + } + } + return offset - start; + } + + /* istanbul ignore next */ + function indexOutOfRange(reader, writeLength) { + return RangeError(`index out of range: ${reader.pos} + ${writeLength ?? 1} > ${reader.len}`); + } + function readFixed32End(buf, end) { + return (buf[end - 4] | + buf[end - 3] << 8 | + buf[end - 2] << 16 | + buf[end - 1] << 24) >>> 0; + } + /** + * Constructs a new reader instance using the specified buffer. + */ + class Uint8ArrayReader { + buf; + pos; + len; + _slice = Uint8Array.prototype.subarray; + constructor(buffer) { + /** + * Read buffer + */ + this.buf = buffer; + /** + * Read buffer position + */ + this.pos = 0; + /** + * Read buffer length + */ + this.len = buffer.length; + } + /** + * Reads a varint as an unsigned 32 bit value + */ + uint32() { + let value = 4294967295; + value = (this.buf[this.pos] & 127) >>> 0; + if (this.buf[this.pos++] < 128) + return value; + value = (value | (this.buf[this.pos] & 127) << 7) >>> 0; + if (this.buf[this.pos++] < 128) + return value; + value = (value | (this.buf[this.pos] & 127) << 14) >>> 0; + if (this.buf[this.pos++] < 128) + return value; + value = (value | (this.buf[this.pos] & 127) << 21) >>> 0; + if (this.buf[this.pos++] < 128) + return value; + value = (value | (this.buf[this.pos] & 15) << 28) >>> 0; + if (this.buf[this.pos++] < 128) + return value; + if ((this.pos += 5) > this.len) { + this.pos = this.len; + throw indexOutOfRange(this, 10); + } + return value; + } + /** + * Reads a varint as a signed 32 bit value + */ + int32() { + return this.uint32() | 0; + } + /** + * Reads a zig-zag encoded varint as a signed 32 bit value + */ + sint32() { + const value = this.uint32(); + return value >>> 1 ^ -(value & 1) | 0; + } + /** + * Reads a varint as a boolean + */ + bool() { + return this.uint32() !== 0; + } + /** + * Reads fixed 32 bits as an unsigned 32 bit integer + */ + fixed32() { + if (this.pos + 4 > this.len) { + throw indexOutOfRange(this, 4); + } + const res = readFixed32End(this.buf, this.pos += 4); + return res; + } + /** + * Reads fixed 32 bits as a signed 32 bit integer + */ + sfixed32() { + if (this.pos + 4 > this.len) { + throw indexOutOfRange(this, 4); + } + const res = readFixed32End(this.buf, this.pos += 4) | 0; + return res; + } + /** + * Reads a float (32 bit) as a number + */ + float() { + if (this.pos + 4 > this.len) { + throw indexOutOfRange(this, 4); + } + const value = readFloatLE(this.buf, this.pos); + this.pos += 4; + return value; + } + /** + * Reads a double (64 bit float) as a number + */ + double() { + /* istanbul ignore if */ + if (this.pos + 8 > this.len) { + throw indexOutOfRange(this, 4); + } + const value = readDoubleLE(this.buf, this.pos); + this.pos += 8; + return value; + } + /** + * Reads a sequence of bytes preceded by its length as a varint + */ + bytes() { + const length = this.uint32(); + const start = this.pos; + const end = this.pos + length; + /* istanbul ignore if */ + if (end > this.len) { + throw indexOutOfRange(this, length); + } + this.pos += length; + return start === end // fix for IE 10/Win8 and others' subarray returning array of size 1 + ? new Uint8Array(0) + : this.buf.subarray(start, end); + } + /** + * Reads a string preceded by its byte length as a varint + */ + string() { + const bytes = this.bytes(); + return read$2(bytes, 0, bytes.length); + } + /** + * Skips the specified number of bytes if specified, otherwise skips a varint + */ + skip(length) { + if (typeof length === 'number') { + /* istanbul ignore if */ + if (this.pos + length > this.len) { + throw indexOutOfRange(this, length); + } + this.pos += length; + } + else { + do { + /* istanbul ignore if */ + if (this.pos >= this.len) { + throw indexOutOfRange(this); + } + } while ((this.buf[this.pos++] & 128) !== 0); + } + return this; + } + /** + * Skips the next element of the specified wire type + */ + skipType(wireType) { + switch (wireType) { + case 0: + this.skip(); + break; + case 1: + this.skip(8); + break; + case 2: + this.skip(this.uint32()); + break; + case 3: + while ((wireType = this.uint32() & 7) !== 4) { + this.skipType(wireType); + } + break; + case 5: + this.skip(4); + break; + /* istanbul ignore next */ + default: + throw Error(`invalid wire type ${wireType} at offset ${this.pos}`); + } + return this; + } + readLongVarint() { + // tends to deopt with local vars for octet etc. + const bits = new LongBits(0, 0); + let i = 0; + if (this.len - this.pos > 4) { // fast route (lo) + for (; i < 4; ++i) { + // 1st..4th + bits.lo = (bits.lo | (this.buf[this.pos] & 127) << i * 7) >>> 0; + if (this.buf[this.pos++] < 128) { + return bits; + } + } + // 5th + bits.lo = (bits.lo | (this.buf[this.pos] & 127) << 28) >>> 0; + bits.hi = (bits.hi | (this.buf[this.pos] & 127) >> 4) >>> 0; + if (this.buf[this.pos++] < 128) { + return bits; + } + i = 0; + } + else { + for (; i < 3; ++i) { + /* istanbul ignore if */ + if (this.pos >= this.len) { + throw indexOutOfRange(this); + } + // 1st..3th + bits.lo = (bits.lo | (this.buf[this.pos] & 127) << i * 7) >>> 0; + if (this.buf[this.pos++] < 128) { + return bits; + } + } + // 4th + bits.lo = (bits.lo | (this.buf[this.pos++] & 127) << i * 7) >>> 0; + return bits; + } + if (this.len - this.pos > 4) { // fast route (hi) + for (; i < 5; ++i) { + // 6th..10th + bits.hi = (bits.hi | (this.buf[this.pos] & 127) << i * 7 + 3) >>> 0; + if (this.buf[this.pos++] < 128) { + return bits; + } + } + } + else { + for (; i < 5; ++i) { + if (this.pos >= this.len) { + throw indexOutOfRange(this); + } + // 6th..10th + bits.hi = (bits.hi | (this.buf[this.pos] & 127) << i * 7 + 3) >>> 0; + if (this.buf[this.pos++] < 128) { + return bits; + } + } + } + throw Error('invalid varint encoding'); + } + readFixed64() { + if (this.pos + 8 > this.len) { + throw indexOutOfRange(this, 8); + } + const lo = readFixed32End(this.buf, this.pos += 4); + const hi = readFixed32End(this.buf, this.pos += 4); + return new LongBits(lo, hi); + } + /** + * Reads a varint as a signed 64 bit value + */ + int64() { + return this.readLongVarint().toBigInt(); + } + /** + * Reads a varint as a signed 64 bit value returned as a possibly unsafe + * JavaScript number + */ + int64Number() { + return this.readLongVarint().toNumber(); + } + /** + * Reads a varint as a signed 64 bit value returned as a string + */ + int64String() { + return this.readLongVarint().toString(); + } + /** + * Reads a varint as an unsigned 64 bit value + */ + uint64() { + return this.readLongVarint().toBigInt(true); + } + /** + * Reads a varint as an unsigned 64 bit value returned as a possibly unsafe + * JavaScript number + */ + uint64Number() { + const value = decodeUint8Array(this.buf, this.pos); + this.pos += encodingLength$1(value); + return value; + } + /** + * Reads a varint as an unsigned 64 bit value returned as a string + */ + uint64String() { + return this.readLongVarint().toString(true); + } + /** + * Reads a zig-zag encoded varint as a signed 64 bit value + */ + sint64() { + return this.readLongVarint().zzDecode().toBigInt(); + } + /** + * Reads a zig-zag encoded varint as a signed 64 bit value returned as a + * possibly unsafe JavaScript number + */ + sint64Number() { + return this.readLongVarint().zzDecode().toNumber(); + } + /** + * Reads a zig-zag encoded varint as a signed 64 bit value returned as a + * string + */ + sint64String() { + return this.readLongVarint().zzDecode().toString(); + } + /** + * Reads fixed 64 bits + */ + fixed64() { + return this.readFixed64().toBigInt(); + } + /** + * Reads fixed 64 bits returned as a possibly unsafe JavaScript number + */ + fixed64Number() { + return this.readFixed64().toNumber(); + } + /** + * Reads fixed 64 bits returned as a string + */ + fixed64String() { + return this.readFixed64().toString(); + } + /** + * Reads zig-zag encoded fixed 64 bits + */ + sfixed64() { + return this.readFixed64().toBigInt(); + } + /** + * Reads zig-zag encoded fixed 64 bits returned as a possibly unsafe + * JavaScript number + */ + sfixed64Number() { + return this.readFixed64().toNumber(); + } + /** + * Reads zig-zag encoded fixed 64 bits returned as a string + */ + sfixed64String() { + return this.readFixed64().toString(); + } + } + function createReader(buf) { + return new Uint8ArrayReader(buf instanceof Uint8Array ? buf : buf.subarray()); + } + + function decodeMessage(buf, codec, opts) { + const reader = createReader(buf); + return codec.decode(reader, undefined, opts); + } + + function equals$2(aa, bb) { + if (aa === bb) + return true; + if (aa.byteLength !== bb.byteLength) { + return false; + } + for (let ii = 0; ii < aa.byteLength; ii++) { + if (aa[ii] !== bb[ii]) { + return false; + } + } + return true; + } + function coerce(o) { + if (o instanceof Uint8Array && o.constructor.name === 'Uint8Array') + return o; + if (o instanceof ArrayBuffer) + return new Uint8Array(o); + if (ArrayBuffer.isView(o)) { + return new Uint8Array(o.buffer, o.byteOffset, o.byteLength); + } + throw new Error('Unknown type, must be binary type'); + } + function fromString$1(str) { + return new TextEncoder().encode(str); + } + function toString$2(b) { + return new TextDecoder().decode(b); + } + + /* eslint-disable */ + // base-x encoding / decoding + // Copyright (c) 2018 base-x contributors + // Copyright (c) 2014-2018 The Bitcoin Core developers (base58.cpp) + // Distributed under the MIT software license, see the accompanying + // file LICENSE or http://www.opensource.org/licenses/mit-license.php. + /** + * @param {string} ALPHABET + * @param {any} name + */ + function base$1(ALPHABET, name) { + if (ALPHABET.length >= 255) { + throw new TypeError('Alphabet too long'); + } + var BASE_MAP = new Uint8Array(256); + for (var j = 0; j < BASE_MAP.length; j++) { + BASE_MAP[j] = 255; + } + for (var i = 0; i < ALPHABET.length; i++) { + var x = ALPHABET.charAt(i); + var xc = x.charCodeAt(0); + if (BASE_MAP[xc] !== 255) { + throw new TypeError(x + ' is ambiguous'); + } + BASE_MAP[xc] = i; + } + var BASE = ALPHABET.length; + var LEADER = ALPHABET.charAt(0); + var FACTOR = Math.log(BASE) / Math.log(256); // log(BASE) / log(256), rounded up + var iFACTOR = Math.log(256) / Math.log(BASE); // log(256) / log(BASE), rounded up + /** + * @param {any[] | Iterable} source + */ + function encode(source) { + // @ts-ignore + if (source instanceof Uint8Array) + ; + else if (ArrayBuffer.isView(source)) { + source = new Uint8Array(source.buffer, source.byteOffset, source.byteLength); + } + else if (Array.isArray(source)) { + source = Uint8Array.from(source); + } + if (!(source instanceof Uint8Array)) { + throw new TypeError('Expected Uint8Array'); + } + if (source.length === 0) { + return ''; + } + // Skip & count leading zeroes. + var zeroes = 0; + var length = 0; + var pbegin = 0; + var pend = source.length; + while (pbegin !== pend && source[pbegin] === 0) { + pbegin++; + zeroes++; + } + // Allocate enough space in big-endian base58 representation. + var size = ((pend - pbegin) * iFACTOR + 1) >>> 0; + var b58 = new Uint8Array(size); + // Process the bytes. + while (pbegin !== pend) { + var carry = source[pbegin]; + // Apply "b58 = b58 * 256 + ch". + var i = 0; + for (var it1 = size - 1; (carry !== 0 || i < length) && (it1 !== -1); it1--, i++) { + carry += (256 * b58[it1]) >>> 0; + b58[it1] = (carry % BASE) >>> 0; + carry = (carry / BASE) >>> 0; + } + if (carry !== 0) { + throw new Error('Non-zero carry'); + } + length = i; + pbegin++; + } + // Skip leading zeroes in base58 result. + var it2 = size - length; + while (it2 !== size && b58[it2] === 0) { + it2++; + } + // Translate the result into a string. + var str = LEADER.repeat(zeroes); + for (; it2 < size; ++it2) { + str += ALPHABET.charAt(b58[it2]); + } + return str; + } + /** + * @param {string | string[]} source + */ + function decodeUnsafe(source) { + if (typeof source !== 'string') { + throw new TypeError('Expected String'); + } + if (source.length === 0) { + return new Uint8Array(); + } + var psz = 0; + // Skip leading spaces. + if (source[psz] === ' ') { + return; + } + // Skip and count leading '1's. + var zeroes = 0; + var length = 0; + while (source[psz] === LEADER) { + zeroes++; + psz++; + } + // Allocate enough space in big-endian base256 representation. + var size = (((source.length - psz) * FACTOR) + 1) >>> 0; // log(58) / log(256), rounded up. + var b256 = new Uint8Array(size); + // Process the characters. + while (source[psz]) { + // Decode character + var carry = BASE_MAP[source.charCodeAt(psz)]; + // Invalid character + if (carry === 255) { + return; + } + var i = 0; + for (var it3 = size - 1; (carry !== 0 || i < length) && (it3 !== -1); it3--, i++) { + carry += (BASE * b256[it3]) >>> 0; + b256[it3] = (carry % 256) >>> 0; + carry = (carry / 256) >>> 0; + } + if (carry !== 0) { + throw new Error('Non-zero carry'); + } + length = i; + psz++; + } + // Skip trailing spaces. + if (source[psz] === ' ') { + return; + } + // Skip leading zeroes in b256. + var it4 = size - length; + while (it4 !== size && b256[it4] === 0) { + it4++; + } + var vch = new Uint8Array(zeroes + (size - it4)); + var j = zeroes; + while (it4 !== size) { + vch[j++] = b256[it4++]; + } + return vch; + } + /** + * @param {string | string[]} string + */ + function decode(string) { + var buffer = decodeUnsafe(string); + if (buffer) { + return buffer; + } + throw new Error(`Non-${name} character`); + } + return { + encode: encode, + decodeUnsafe: decodeUnsafe, + decode: decode + }; + } + var src = base$1; + var _brrp__multiformats_scope_baseX = src; + + /** + * Class represents both BaseEncoder and MultibaseEncoder meaning it + * can be used to encode to multibase or base encode without multibase + * prefix. + */ + class Encoder { + name; + prefix; + baseEncode; + constructor(name, prefix, baseEncode) { + this.name = name; + this.prefix = prefix; + this.baseEncode = baseEncode; + } + encode(bytes) { + if (bytes instanceof Uint8Array) { + return `${this.prefix}${this.baseEncode(bytes)}`; + } + else { + throw Error('Unknown type, must be binary type'); + } + } + } + /** + * Class represents both BaseDecoder and MultibaseDecoder so it could be used + * to decode multibases (with matching prefix) or just base decode strings + * with corresponding base encoding. + */ + let Decoder$1 = class Decoder { + name; + prefix; + baseDecode; + prefixCodePoint; + constructor(name, prefix, baseDecode) { + this.name = name; + this.prefix = prefix; + /* c8 ignore next 3 */ + if (prefix.codePointAt(0) === undefined) { + throw new Error('Invalid prefix character'); + } + this.prefixCodePoint = prefix.codePointAt(0); + this.baseDecode = baseDecode; + } + decode(text) { + if (typeof text === 'string') { + if (text.codePointAt(0) !== this.prefixCodePoint) { + throw Error(`Unable to decode multibase string ${JSON.stringify(text)}, ${this.name} decoder only supports inputs prefixed with ${this.prefix}`); + } + return this.baseDecode(text.slice(this.prefix.length)); + } + else { + throw Error('Can only multibase decode strings'); + } + } + or(decoder) { + return or$2(this, decoder); + } + }; + class ComposedDecoder { + decoders; + constructor(decoders) { + this.decoders = decoders; + } + or(decoder) { + return or$2(this, decoder); + } + decode(input) { + const prefix = input[0]; + const decoder = this.decoders[prefix]; + if (decoder != null) { + return decoder.decode(input); + } + else { + throw RangeError(`Unable to decode multibase string ${JSON.stringify(input)}, only inputs prefixed with ${Object.keys(this.decoders)} are supported`); + } + } + } + function or$2(left, right) { + // eslint-disable-next-line @typescript-eslint/consistent-type-assertions + return new ComposedDecoder({ + ...(left.decoders ?? { [left.prefix]: left }), + ...(right.decoders ?? { [right.prefix]: right }) + }); + } + class Codec { + name; + prefix; + baseEncode; + baseDecode; + encoder; + decoder; + constructor(name, prefix, baseEncode, baseDecode) { + this.name = name; + this.prefix = prefix; + this.baseEncode = baseEncode; + this.baseDecode = baseDecode; + this.encoder = new Encoder(name, prefix, baseEncode); + this.decoder = new Decoder$1(name, prefix, baseDecode); + } + encode(input) { + return this.encoder.encode(input); + } + decode(input) { + return this.decoder.decode(input); + } + } + function from$1({ name, prefix, encode, decode }) { + return new Codec(name, prefix, encode, decode); + } + function baseX({ name, prefix, alphabet }) { + const { encode, decode } = _brrp__multiformats_scope_baseX(alphabet, name); + return from$1({ + prefix, + name, + encode, + decode: (text) => coerce(decode(text)) + }); + } + function decode$5(string, alphabet, bitsPerChar, name) { + // Build the character lookup table: + const codes = {}; + for (let i = 0; i < alphabet.length; ++i) { + codes[alphabet[i]] = i; + } + // Count the padding bytes: + let end = string.length; + while (string[end - 1] === '=') { + --end; + } + // Allocate the output: + const out = new Uint8Array((end * bitsPerChar / 8) | 0); + // Parse the data: + let bits = 0; // Number of bits currently in the buffer + let buffer = 0; // Bits waiting to be written out, MSB first + let written = 0; // Next byte to write + for (let i = 0; i < end; ++i) { + // Read one character from the string: + const value = codes[string[i]]; + if (value === undefined) { + throw new SyntaxError(`Non-${name} character`); + } + // Append the bits to the buffer: + buffer = (buffer << bitsPerChar) | value; + bits += bitsPerChar; + // Write out some bits if the buffer has a byte's worth: + if (bits >= 8) { + bits -= 8; + out[written++] = 0xff & (buffer >> bits); + } + } + // Verify that we have received just enough bits: + if (bits >= bitsPerChar || (0xff & (buffer << (8 - bits))) !== 0) { + throw new SyntaxError('Unexpected end of data'); + } + return out; + } + function encode$4(data, alphabet, bitsPerChar) { + const pad = alphabet[alphabet.length - 1] === '='; + const mask = (1 << bitsPerChar) - 1; + let out = ''; + let bits = 0; // Number of bits currently in the buffer + let buffer = 0; // Bits waiting to be written out, MSB first + for (let i = 0; i < data.length; ++i) { + // Slurp data into the buffer: + buffer = (buffer << 8) | data[i]; + bits += 8; + // Write out as much as we can: + while (bits > bitsPerChar) { + bits -= bitsPerChar; + out += alphabet[mask & (buffer >> bits)]; + } + } + // Partial character: + if (bits !== 0) { + out += alphabet[mask & (buffer << (bitsPerChar - bits))]; + } + // Add padding characters until we hit a byte boundary: + if (pad) { + while (((out.length * bitsPerChar) & 7) !== 0) { + out += '='; + } + } + return out; + } + /** + * RFC4648 Factory + */ + function rfc4648({ name, prefix, bitsPerChar, alphabet }) { + return from$1({ + prefix, + name, + encode(input) { + return encode$4(input, alphabet, bitsPerChar); + }, + decode(input) { + return decode$5(input, alphabet, bitsPerChar, name); + } + }); + } + + const base10 = baseX({ + prefix: '9', + name: 'base10', + alphabet: '0123456789' + }); + + var base10$1 = /*#__PURE__*/Object.freeze({ + __proto__: null, + base10: base10 + }); + + const base16 = rfc4648({ + prefix: 'f', + name: 'base16', + alphabet: '0123456789abcdef', + bitsPerChar: 4 + }); + const base16upper = rfc4648({ + prefix: 'F', + name: 'base16upper', + alphabet: '0123456789ABCDEF', + bitsPerChar: 4 + }); + + var base16$1 = /*#__PURE__*/Object.freeze({ + __proto__: null, + base16: base16, + base16upper: base16upper + }); + + const base2 = rfc4648({ + prefix: '0', + name: 'base2', + alphabet: '01', + bitsPerChar: 1 + }); + + var base2$1 = /*#__PURE__*/Object.freeze({ + __proto__: null, + base2: base2 + }); + + const alphabet = Array.from('๐Ÿš€๐Ÿชโ˜„๐Ÿ›ฐ๐ŸŒŒ๐ŸŒ‘๐ŸŒ’๐ŸŒ“๐ŸŒ”๐ŸŒ•๐ŸŒ–๐ŸŒ—๐ŸŒ˜๐ŸŒ๐ŸŒ๐ŸŒŽ๐Ÿ‰โ˜€๐Ÿ’ป๐Ÿ–ฅ๐Ÿ’พ๐Ÿ’ฟ๐Ÿ˜‚โค๐Ÿ˜๐Ÿคฃ๐Ÿ˜Š๐Ÿ™๐Ÿ’•๐Ÿ˜ญ๐Ÿ˜˜๐Ÿ‘๐Ÿ˜…๐Ÿ‘๐Ÿ˜๐Ÿ”ฅ๐Ÿฅฐ๐Ÿ’”๐Ÿ’–๐Ÿ’™๐Ÿ˜ข๐Ÿค”๐Ÿ˜†๐Ÿ™„๐Ÿ’ช๐Ÿ˜‰โ˜บ๐Ÿ‘Œ๐Ÿค—๐Ÿ’œ๐Ÿ˜”๐Ÿ˜Ž๐Ÿ˜‡๐ŸŒน๐Ÿคฆ๐ŸŽ‰๐Ÿ’žโœŒโœจ๐Ÿคท๐Ÿ˜ฑ๐Ÿ˜Œ๐ŸŒธ๐Ÿ™Œ๐Ÿ˜‹๐Ÿ’—๐Ÿ’š๐Ÿ˜๐Ÿ’›๐Ÿ™‚๐Ÿ’“๐Ÿคฉ๐Ÿ˜„๐Ÿ˜€๐Ÿ–ค๐Ÿ˜ƒ๐Ÿ’ฏ๐Ÿ™ˆ๐Ÿ‘‡๐ŸŽถ๐Ÿ˜’๐Ÿคญโฃ๐Ÿ˜œ๐Ÿ’‹๐Ÿ‘€๐Ÿ˜ช๐Ÿ˜‘๐Ÿ’ฅ๐Ÿ™‹๐Ÿ˜ž๐Ÿ˜ฉ๐Ÿ˜ก๐Ÿคช๐Ÿ‘Š๐Ÿฅณ๐Ÿ˜ฅ๐Ÿคค๐Ÿ‘‰๐Ÿ’ƒ๐Ÿ˜ณโœ‹๐Ÿ˜š๐Ÿ˜๐Ÿ˜ด๐ŸŒŸ๐Ÿ˜ฌ๐Ÿ™ƒ๐Ÿ€๐ŸŒท๐Ÿ˜ป๐Ÿ˜“โญโœ…๐Ÿฅบ๐ŸŒˆ๐Ÿ˜ˆ๐Ÿค˜๐Ÿ’ฆโœ”๐Ÿ˜ฃ๐Ÿƒ๐Ÿ’โ˜น๐ŸŽŠ๐Ÿ’˜๐Ÿ˜ โ˜๐Ÿ˜•๐ŸŒบ๐ŸŽ‚๐ŸŒป๐Ÿ˜๐Ÿ–•๐Ÿ’๐Ÿ™Š๐Ÿ˜น๐Ÿ—ฃ๐Ÿ’ซ๐Ÿ’€๐Ÿ‘‘๐ŸŽต๐Ÿคž๐Ÿ˜›๐Ÿ”ด๐Ÿ˜ค๐ŸŒผ๐Ÿ˜ซโšฝ๐Ÿค™โ˜•๐Ÿ†๐Ÿคซ๐Ÿ‘ˆ๐Ÿ˜ฎ๐Ÿ™†๐Ÿป๐Ÿƒ๐Ÿถ๐Ÿ’๐Ÿ˜ฒ๐ŸŒฟ๐Ÿงก๐ŸŽโšก๐ŸŒž๐ŸŽˆโŒโœŠ๐Ÿ‘‹๐Ÿ˜ฐ๐Ÿคจ๐Ÿ˜ถ๐Ÿค๐Ÿšถ๐Ÿ’ฐ๐Ÿ“๐Ÿ’ข๐ŸคŸ๐Ÿ™๐Ÿšจ๐Ÿ’จ๐Ÿคฌโœˆ๐ŸŽ€๐Ÿบ๐Ÿค“๐Ÿ˜™๐Ÿ’Ÿ๐ŸŒฑ๐Ÿ˜–๐Ÿ‘ถ๐Ÿฅดโ–ถโžกโ“๐Ÿ’Ž๐Ÿ’ธโฌ‡๐Ÿ˜จ๐ŸŒš๐Ÿฆ‹๐Ÿ˜ท๐Ÿ•บโš ๐Ÿ™…๐Ÿ˜Ÿ๐Ÿ˜ต๐Ÿ‘Ž๐Ÿคฒ๐Ÿค ๐Ÿคง๐Ÿ“Œ๐Ÿ”ต๐Ÿ’…๐Ÿง๐Ÿพ๐Ÿ’๐Ÿ˜—๐Ÿค‘๐ŸŒŠ๐Ÿคฏ๐Ÿทโ˜Ž๐Ÿ’ง๐Ÿ˜ฏ๐Ÿ’†๐Ÿ‘†๐ŸŽค๐Ÿ™‡๐Ÿ‘โ„๐ŸŒด๐Ÿ’ฃ๐Ÿธ๐Ÿ’Œ๐Ÿ“๐Ÿฅ€๐Ÿคข๐Ÿ‘…๐Ÿ’ก๐Ÿ’ฉ๐Ÿ‘๐Ÿ“ธ๐Ÿ‘ป๐Ÿค๐Ÿคฎ๐ŸŽผ๐Ÿฅต๐Ÿšฉ๐ŸŽ๐ŸŠ๐Ÿ‘ผ๐Ÿ’๐Ÿ“ฃ๐Ÿฅ‚'); + const alphabetBytesToChars = (alphabet.reduce((p, c, i) => { p[i] = c; return p; }, ([]))); + const alphabetCharsToBytes = (alphabet.reduce((p, c, i) => { p[c.codePointAt(0)] = i; return p; }, ([]))); + function encode$3(data) { + return data.reduce((p, c) => { + p += alphabetBytesToChars[c]; + return p; + }, ''); + } + function decode$4(str) { + const byts = []; + for (const char of str) { + const byt = alphabetCharsToBytes[char.codePointAt(0)]; + if (byt === undefined) { + throw new Error(`Non-base256emoji character: ${char}`); + } + byts.push(byt); + } + return new Uint8Array(byts); + } + const base256emoji = from$1({ + prefix: '๐Ÿš€', + name: 'base256emoji', + encode: encode$3, + decode: decode$4 + }); + + var base256emoji$1 = /*#__PURE__*/Object.freeze({ + __proto__: null, + base256emoji: base256emoji + }); + + const base32 = rfc4648({ + prefix: 'b', + name: 'base32', + alphabet: 'abcdefghijklmnopqrstuvwxyz234567', + bitsPerChar: 5 + }); + const base32upper = rfc4648({ + prefix: 'B', + name: 'base32upper', + alphabet: 'ABCDEFGHIJKLMNOPQRSTUVWXYZ234567', + bitsPerChar: 5 + }); + const base32pad = rfc4648({ + prefix: 'c', + name: 'base32pad', + alphabet: 'abcdefghijklmnopqrstuvwxyz234567=', + bitsPerChar: 5 + }); + const base32padupper = rfc4648({ + prefix: 'C', + name: 'base32padupper', + alphabet: 'ABCDEFGHIJKLMNOPQRSTUVWXYZ234567=', + bitsPerChar: 5 + }); + const base32hex = rfc4648({ + prefix: 'v', + name: 'base32hex', + alphabet: '0123456789abcdefghijklmnopqrstuv', + bitsPerChar: 5 + }); + const base32hexupper = rfc4648({ + prefix: 'V', + name: 'base32hexupper', + alphabet: '0123456789ABCDEFGHIJKLMNOPQRSTUV', + bitsPerChar: 5 + }); + const base32hexpad = rfc4648({ + prefix: 't', + name: 'base32hexpad', + alphabet: '0123456789abcdefghijklmnopqrstuv=', + bitsPerChar: 5 + }); + const base32hexpadupper = rfc4648({ + prefix: 'T', + name: 'base32hexpadupper', + alphabet: '0123456789ABCDEFGHIJKLMNOPQRSTUV=', + bitsPerChar: 5 + }); + const base32z = rfc4648({ + prefix: 'h', + name: 'base32z', + alphabet: 'ybndrfg8ejkmcpqxot1uwisza345h769', + bitsPerChar: 5 + }); + + var base32$1 = /*#__PURE__*/Object.freeze({ + __proto__: null, + base32: base32, + base32hex: base32hex, + base32hexpad: base32hexpad, + base32hexpadupper: base32hexpadupper, + base32hexupper: base32hexupper, + base32pad: base32pad, + base32padupper: base32padupper, + base32upper: base32upper, + base32z: base32z + }); + + const base36 = baseX({ + prefix: 'k', + name: 'base36', + alphabet: '0123456789abcdefghijklmnopqrstuvwxyz' + }); + const base36upper = baseX({ + prefix: 'K', + name: 'base36upper', + alphabet: '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ' + }); + + var base36$1 = /*#__PURE__*/Object.freeze({ + __proto__: null, + base36: base36, + base36upper: base36upper + }); + + const base58btc = baseX({ + name: 'base58btc', + prefix: 'z', + alphabet: '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz' + }); + const base58flickr = baseX({ + name: 'base58flickr', + prefix: 'Z', + alphabet: '123456789abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ' + }); + + var base58 = /*#__PURE__*/Object.freeze({ + __proto__: null, + base58btc: base58btc, + base58flickr: base58flickr + }); + + const base64 = rfc4648({ + prefix: 'm', + name: 'base64', + alphabet: 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/', + bitsPerChar: 6 + }); + const base64pad = rfc4648({ + prefix: 'M', + name: 'base64pad', + alphabet: 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=', + bitsPerChar: 6 + }); + const base64url = rfc4648({ + prefix: 'u', + name: 'base64url', + alphabet: 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_', + bitsPerChar: 6 + }); + const base64urlpad = rfc4648({ + prefix: 'U', + name: 'base64urlpad', + alphabet: 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_=', + bitsPerChar: 6 + }); + + var base64$1 = /*#__PURE__*/Object.freeze({ + __proto__: null, + base64: base64, + base64pad: base64pad, + base64url: base64url, + base64urlpad: base64urlpad + }); + + const base8 = rfc4648({ + prefix: '7', + name: 'base8', + alphabet: '01234567', + bitsPerChar: 3 + }); + + var base8$1 = /*#__PURE__*/Object.freeze({ + __proto__: null, + base8: base8 + }); + + const identity$1 = from$1({ + prefix: '\x00', + name: 'identity', + encode: (buf) => toString$2(buf), + decode: (str) => fromString$1(str) + }); + + var identityBase = /*#__PURE__*/Object.freeze({ + __proto__: null, + identity: identity$1 + }); + + new TextEncoder(); + new TextDecoder(); + + /* eslint-disable */ + var encode_1 = encode$2; + var MSB = 0x80, REST = 0x7F, MSBALL = ~REST, INT = Math.pow(2, 31); + /** + * @param {number} num + * @param {number[]} out + * @param {number} offset + */ + function encode$2(num, out, offset) { + out = out || []; + offset = offset || 0; + var oldOffset = offset; + while (num >= INT) { + out[offset++] = (num & 0xFF) | MSB; + num /= 128; + } + while (num & MSBALL) { + out[offset++] = (num & 0xFF) | MSB; + num >>>= 7; + } + out[offset] = num | 0; + // @ts-ignore + encode$2.bytes = offset - oldOffset + 1; + return out; + } + var decode$3 = read$1; + var MSB$1 = 0x80, REST$1 = 0x7F; + /** + * @param {string | any[]} buf + * @param {number} offset + */ + function read$1(buf, offset) { + var res = 0, offset = offset || 0, shift = 0, counter = offset, b, l = buf.length; + do { + if (counter >= l) { + // @ts-ignore + read$1.bytes = 0; + throw new RangeError('Could not decode varint'); + } + b = buf[counter++]; + res += shift < 28 + ? (b & REST$1) << shift + : (b & REST$1) * Math.pow(2, shift); + shift += 7; + } while (b >= MSB$1); + // @ts-ignore + read$1.bytes = counter - offset; + return res; + } + var N1 = Math.pow(2, 7); + var N2 = Math.pow(2, 14); + var N3 = Math.pow(2, 21); + var N4 = Math.pow(2, 28); + var N5 = Math.pow(2, 35); + var N6 = Math.pow(2, 42); + var N7 = Math.pow(2, 49); + var N8 = Math.pow(2, 56); + var N9 = Math.pow(2, 63); + var length$1 = function (/** @type {number} */ value) { + return (value < N1 ? 1 + : value < N2 ? 2 + : value < N3 ? 3 + : value < N4 ? 4 + : value < N5 ? 5 + : value < N6 ? 6 + : value < N7 ? 7 + : value < N8 ? 8 + : value < N9 ? 9 + : 10); + }; + var varint = { + encode: encode_1, + decode: decode$3, + encodingLength: length$1 + }; + var _brrp_varint = varint; + + function decode$2(data, offset = 0) { + const code = _brrp_varint.decode(data, offset); + return [code, _brrp_varint.decode.bytes]; + } + function encodeTo(int, target, offset = 0) { + _brrp_varint.encode(int, target, offset); + return target; + } + function encodingLength(int) { + return _brrp_varint.encodingLength(int); + } + + /** + * Creates a multihash digest. + */ + function create$1(code, digest) { + const size = digest.byteLength; + const sizeOffset = encodingLength(code); + const digestOffset = sizeOffset + encodingLength(size); + const bytes = new Uint8Array(digestOffset + size); + encodeTo(code, bytes, 0); + encodeTo(size, bytes, sizeOffset); + bytes.set(digest, digestOffset); + return new Digest(code, size, digest, bytes); + } + /** + * Turns bytes representation of multihash digest into an instance. + */ + function decode$1(multihash) { + const bytes = coerce(multihash); + const [code, sizeOffset] = decode$2(bytes); + const [size, digestOffset] = decode$2(bytes.subarray(sizeOffset)); + const digest = bytes.subarray(sizeOffset + digestOffset); + if (digest.byteLength !== size) { + throw new Error('Incorrect length'); + } + return new Digest(code, size, digest, bytes); + } + function equals$1(a, b) { + if (a === b) { + return true; + } + else { + const data = b; + return (a.code === data.code && + a.size === data.size && + data.bytes instanceof Uint8Array && + equals$2(a.bytes, data.bytes)); + } + } + /** + * Represents a multihash digest which carries information about the + * hashing algorithm and an actual hash digest. + */ + class Digest { + code; + size; + digest; + bytes; + /** + * Creates a multihash digest. + */ + constructor(code, size, digest, bytes) { + this.code = code; + this.size = size; + this.digest = digest; + this.bytes = bytes; + } + } + + const code = 0x0; + const name$1 = 'identity'; + const encode$1 = coerce; + function digest(input) { + return create$1(code, encode$1(input)); + } + const identity = { code, name: name$1, encode: encode$1, digest }; + + function from({ name, code, encode }) { + return new Hasher(name, code, encode); + } + /** + * Hasher represents a hashing algorithm implementation that produces as + * `MultihashDigest`. + */ + class Hasher { + name; + code; + encode; + constructor(name, code, encode) { + this.name = name; + this.code = code; + this.encode = encode; + } + digest(input) { + if (input instanceof Uint8Array) { + const result = this.encode(input); + return result instanceof Uint8Array + ? create$1(this.code, result) + /* c8 ignore next 1 */ + : result.then(digest => create$1(this.code, digest)); + } + else { + throw Error('Unknown type, must be binary type'); + /* c8 ignore next 1 */ + } + } + } + + /* global crypto */ + function sha(name) { + return async (data) => new Uint8Array(await crypto.subtle.digest(name, data)); + } + const sha256$1 = from({ + name: 'sha2-256', + code: 0x12, + encode: sha('SHA-256') + }); + + function format(link, base) { + const { bytes, version } = link; + switch (version) { + case 0: + return toStringV0(bytes, baseCache(link), base ?? base58btc.encoder); + default: + return toStringV1(bytes, baseCache(link), (base ?? base32.encoder)); + } + } + const cache$2 = new WeakMap(); + function baseCache(cid) { + const baseCache = cache$2.get(cid); + if (baseCache == null) { + const baseCache = new Map(); + cache$2.set(cid, baseCache); + return baseCache; + } + return baseCache; + } + class CID { + code; + version; + multihash; + bytes; + '/'; + /** + * @param version - Version of the CID + * @param code - Code of the codec content is encoded in, see https://github.com/multiformats/multicodec/blob/master/table.csv + * @param multihash - (Multi)hash of the of the content. + */ + constructor(version, code, multihash, bytes) { + this.code = code; + this.version = version; + this.multihash = multihash; + this.bytes = bytes; + // flag to serializers that this is a CID and + // should be treated specially + this['/'] = bytes; + } + /** + * Signalling `cid.asCID === cid` has been replaced with `cid['/'] === cid.bytes` + * please either use `CID.asCID(cid)` or switch to new signalling mechanism + * + * @deprecated + */ + get asCID() { + return this; + } + // ArrayBufferView + get byteOffset() { + return this.bytes.byteOffset; + } + // ArrayBufferView + get byteLength() { + return this.bytes.byteLength; + } + toV0() { + switch (this.version) { + case 0: { + return this; + } + case 1: { + const { code, multihash } = this; + if (code !== DAG_PB_CODE) { + throw new Error('Cannot convert a non dag-pb CID to CIDv0'); + } + // sha2-256 + if (multihash.code !== SHA_256_CODE) { + throw new Error('Cannot convert non sha2-256 multihash CID to CIDv0'); + } + return (CID.createV0(multihash)); + } + default: { + throw Error(`Can not convert CID version ${this.version} to version 0. This is a bug please report`); + } + } + } + toV1() { + switch (this.version) { + case 0: { + const { code, digest } = this.multihash; + const multihash = create$1(code, digest); + return (CID.createV1(this.code, multihash)); + } + case 1: { + return this; + } + default: { + throw Error(`Can not convert CID version ${this.version} to version 1. This is a bug please report`); + } + } + } + equals(other) { + return CID.equals(this, other); + } + static equals(self, other) { + const unknown = other; + return (unknown != null && + self.code === unknown.code && + self.version === unknown.version && + equals$1(self.multihash, unknown.multihash)); + } + toString(base) { + return format(this, base); + } + toJSON() { + return { '/': format(this) }; + } + link() { + return this; + } + [Symbol.toStringTag] = 'CID'; + // Legacy + [Symbol.for('nodejs.util.inspect.custom')]() { + return `CID(${this.toString()})`; + } + /** + * Takes any input `value` and returns a `CID` instance if it was + * a `CID` otherwise returns `null`. If `value` is instanceof `CID` + * it will return value back. If `value` is not instance of this CID + * class, but is compatible CID it will return new instance of this + * `CID` class. Otherwise returns null. + * + * This allows two different incompatible versions of CID library to + * co-exist and interop as long as binary interface is compatible. + */ + static asCID(input) { + if (input == null) { + return null; + } + const value = input; + if (value instanceof CID) { + // If value is instance of CID then we're all set. + return value; + } + else if ((value['/'] != null && value['/'] === value.bytes) || value.asCID === value) { + // If value isn't instance of this CID class but `this.asCID === this` or + // `value['/'] === value.bytes` is true it is CID instance coming from a + // different implementation (diff version or duplicate). In that case we + // rebase it to this `CID` implementation so caller is guaranteed to get + // instance with expected API. + const { version, code, multihash, bytes } = value; + return new CID(version, code, multihash, bytes ?? encodeCID(version, code, multihash.bytes)); + } + else if (value[cidSymbol] === true) { + // If value is a CID from older implementation that used to be tagged via + // symbol we still rebase it to the this `CID` implementation by + // delegating that to a constructor. + const { version, multihash, code } = value; + const digest = decode$1(multihash); + return CID.create(version, code, digest); + } + else { + // Otherwise value is not a CID (or an incompatible version of it) in + // which case we return `null`. + return null; + } + } + /** + * @param version - Version of the CID + * @param code - Code of the codec content is encoded in, see https://github.com/multiformats/multicodec/blob/master/table.csv + * @param digest - (Multi)hash of the of the content. + */ + static create(version, code, digest) { + if (typeof code !== 'number') { + throw new Error('String codecs are no longer supported'); + } + if (!(digest.bytes instanceof Uint8Array)) { + throw new Error('Invalid digest'); + } + switch (version) { + case 0: { + if (code !== DAG_PB_CODE) { + throw new Error(`Version 0 CID must use dag-pb (code: ${DAG_PB_CODE}) block encoding`); + } + else { + return new CID(version, code, digest, digest.bytes); + } + } + case 1: { + const bytes = encodeCID(version, code, digest.bytes); + return new CID(version, code, digest, bytes); + } + default: { + throw new Error('Invalid version'); + } + } + } + /** + * Simplified version of `create` for CIDv0. + */ + static createV0(digest) { + return CID.create(0, DAG_PB_CODE, digest); + } + /** + * Simplified version of `create` for CIDv1. + * + * @param code - Content encoding format code. + * @param digest - Multihash of the content. + */ + static createV1(code, digest) { + return CID.create(1, code, digest); + } + /** + * Decoded a CID from its binary representation. The byte array must contain + * only the CID with no additional bytes. + * + * An error will be thrown if the bytes provided do not contain a valid + * binary representation of a CID. + */ + static decode(bytes) { + const [cid, remainder] = CID.decodeFirst(bytes); + if (remainder.length !== 0) { + throw new Error('Incorrect length'); + } + return cid; + } + /** + * Decoded a CID from its binary representation at the beginning of a byte + * array. + * + * Returns an array with the first element containing the CID and the second + * element containing the remainder of the original byte array. The remainder + * will be a zero-length byte array if the provided bytes only contained a + * binary CID representation. + */ + static decodeFirst(bytes) { + const specs = CID.inspectBytes(bytes); + const prefixSize = specs.size - specs.multihashSize; + const multihashBytes = coerce(bytes.subarray(prefixSize, prefixSize + specs.multihashSize)); + if (multihashBytes.byteLength !== specs.multihashSize) { + throw new Error('Incorrect length'); + } + const digestBytes = multihashBytes.subarray(specs.multihashSize - specs.digestSize); + const digest = new Digest(specs.multihashCode, specs.digestSize, digestBytes, multihashBytes); + const cid = specs.version === 0 + ? CID.createV0(digest) + : CID.createV1(specs.codec, digest); + return [cid, bytes.subarray(specs.size)]; + } + /** + * Inspect the initial bytes of a CID to determine its properties. + * + * Involves decoding up to 4 varints. Typically this will require only 4 to 6 + * bytes but for larger multicodec code values and larger multihash digest + * lengths these varints can be quite large. It is recommended that at least + * 10 bytes be made available in the `initialBytes` argument for a complete + * inspection. + */ + static inspectBytes(initialBytes) { + let offset = 0; + const next = () => { + const [i, length] = decode$2(initialBytes.subarray(offset)); + offset += length; + return i; + }; + let version = next(); + let codec = DAG_PB_CODE; + if (version === 18) { + // CIDv0 + version = 0; + offset = 0; + } + else { + codec = next(); + } + if (version !== 0 && version !== 1) { + throw new RangeError(`Invalid CID version ${version}`); + } + const prefixSize = offset; + const multihashCode = next(); // multihash code + const digestSize = next(); // multihash length + const size = offset + digestSize; + const multihashSize = size - prefixSize; + return { version, codec, multihashCode, digestSize, multihashSize, size }; + } + /** + * Takes cid in a string representation and creates an instance. If `base` + * decoder is not provided will use a default from the configuration. It will + * throw an error if encoding of the CID is not compatible with supplied (or + * a default decoder). + */ + static parse(source, base) { + const [prefix, bytes] = parseCIDtoBytes(source, base); + const cid = CID.decode(bytes); + if (cid.version === 0 && source[0] !== 'Q') { + throw Error('Version 0 CID string must not include multibase prefix'); + } + // Cache string representation to avoid computing it on `this.toString()` + baseCache(cid).set(prefix, source); + return cid; + } + } + function parseCIDtoBytes(source, base) { + switch (source[0]) { + // CIDv0 is parsed differently + case 'Q': { + const decoder = base ?? base58btc; + return [ + base58btc.prefix, + decoder.decode(`${base58btc.prefix}${source}`) + ]; + } + case base58btc.prefix: { + const decoder = base ?? base58btc; + return [base58btc.prefix, decoder.decode(source)]; + } + case base32.prefix: { + const decoder = base ?? base32; + return [base32.prefix, decoder.decode(source)]; + } + default: { + if (base == null) { + throw Error('To parse non base32 or base58btc encoded CID multibase decoder must be provided'); + } + return [source[0], base.decode(source)]; + } + } + } + function toStringV0(bytes, cache, base) { + const { prefix } = base; + if (prefix !== base58btc.prefix) { + throw Error(`Cannot string encode V0 in ${base.name} encoding`); + } + const cid = cache.get(prefix); + if (cid == null) { + const cid = base.encode(bytes).slice(1); + cache.set(prefix, cid); + return cid; + } + else { + return cid; + } + } + function toStringV1(bytes, cache, base) { + const { prefix } = base; + const cid = cache.get(prefix); + if (cid == null) { + const cid = base.encode(bytes); + cache.set(prefix, cid); + return cid; + } + else { + return cid; + } + } + const DAG_PB_CODE = 0x70; + const SHA_256_CODE = 0x12; + function encodeCID(version, code, multihash) { + const codeOffset = encodingLength(version); + const hashOffset = codeOffset + encodingLength(code); + const bytes = new Uint8Array(hashOffset + multihash.byteLength); + encodeTo(version, bytes, 0); + encodeTo(code, bytes, codeOffset); + bytes.set(multihash, hashOffset); + return bytes; + } + const cidSymbol = Symbol.for('@ipld/js-cid/CID'); + + const bases = { ...identityBase, ...base2$1, ...base8$1, ...base10$1, ...base16$1, ...base32$1, ...base36$1, ...base58, ...base64$1, ...base256emoji$1 }; + + function createCodec$1(name, prefix, encode, decode) { + return { + name, + prefix, + encoder: { + name, + prefix, + encode + }, + decoder: { + decode + } + }; + } + const string$1 = createCodec$1('utf8', 'u', (buf) => { + const decoder = new TextDecoder('utf8'); + return 'u' + decoder.decode(buf); + }, (str) => { + const encoder = new TextEncoder(); + return encoder.encode(str.substring(1)); + }); + const ascii = createCodec$1('ascii', 'a', (buf) => { + let string = 'a'; + for (let i = 0; i < buf.length; i++) { + string += String.fromCharCode(buf[i]); + } + return string; + }, (str) => { + str = str.substring(1); + const buf = allocUnsafe(str.length); + for (let i = 0; i < str.length; i++) { + buf[i] = str.charCodeAt(i); + } + return buf; + }); + const BASES = { + utf8: string$1, + 'utf-8': string$1, + hex: bases.base16, + latin1: ascii, + ascii, + binary: ascii, + ...bases + }; + + /** + * Create a `Uint8Array` from the passed string + * + * Supports `utf8`, `utf-8`, `hex`, and any encoding supported by the multiformats module. + * + * Also `ascii` which is similar to node's 'binary' encoding. + */ + function fromString(string, encoding = 'utf8') { + const base = BASES[encoding]; + if (base == null) { + throw new Error(`Unsupported encoding "${encoding}"`); + } + // add multibase prefix + return base.decoder.decode(`${base.prefix}${string}`); // eslint-disable-line @typescript-eslint/restrict-template-expressions + } + + /** + * A general purpose buffer pool + */ + function pool(size) { + const SIZE = 8192; + const MAX = SIZE >>> 1; + let slab; + let offset = SIZE; + return function poolAlloc(size) { + if (size < 1 || size > MAX) { + return allocUnsafe(size); + } + if (offset + size > SIZE) { + slab = allocUnsafe(SIZE); + offset = 0; + } + const buf = slab.subarray(offset, offset += size); + if ((offset & 7) !== 0) { + // align to 32 bit + offset = (offset | 7) + 1; + } + return buf; + }; + } + + /** + * Constructs a new writer operation instance. + * + * @classdesc Scheduled writer operation + */ + class Op { + /** + * Function to call + */ + fn; + /** + * Value byte length + */ + len; + /** + * Next operation + */ + next; + /** + * Value to write + */ + val; + constructor(fn, len, val) { + this.fn = fn; + this.len = len; + this.next = undefined; + this.val = val; // type varies + } + } + /* istanbul ignore next */ + function noop() { } // eslint-disable-line no-empty-function + /** + * Constructs a new writer state instance + */ + class State { + /** + * Current head + */ + head; + /** + * Current tail + */ + tail; + /** + * Current buffer length + */ + len; + /** + * Next state + */ + next; + constructor(writer) { + this.head = writer.head; + this.tail = writer.tail; + this.len = writer.len; + this.next = writer.states; + } + } + const bufferPool = pool(); + /** + * Allocates a buffer of the specified size + */ + function alloc$1(size) { + if (globalThis.Buffer != null) { + return allocUnsafe(size); + } + return bufferPool(size); + } + /** + * When a value is written, the writer calculates its byte length and puts it into a linked + * list of operations to perform when finish() is called. This both allows us to allocate + * buffers of the exact required size and reduces the amount of work we have to do compared + * to first calculating over objects and then encoding over objects. In our case, the encoding + * part is just a linked list walk calling operations with already prepared values. + */ + class Uint8ArrayWriter { + /** + * Current length + */ + len; + /** + * Operations head + */ + head; + /** + * Operations tail + */ + tail; + /** + * Linked forked states + */ + states; + constructor() { + this.len = 0; + this.head = new Op(noop, 0, 0); + this.tail = this.head; + this.states = null; + } + /** + * Pushes a new operation to the queue + */ + _push(fn, len, val) { + this.tail = this.tail.next = new Op(fn, len, val); + this.len += len; + return this; + } + /** + * Writes an unsigned 32 bit value as a varint + */ + uint32(value) { + // here, the call to this.push has been inlined and a varint specific Op subclass is used. + // uint32 is by far the most frequently used operation and benefits significantly from this. + this.len += (this.tail = this.tail.next = new VarintOp((value = value >>> 0) < + 128 + ? 1 + : value < 16384 + ? 2 + : value < 2097152 + ? 3 + : value < 268435456 + ? 4 + : 5, value)).len; + return this; + } + /** + * Writes a signed 32 bit value as a varint` + */ + int32(value) { + return value < 0 + ? this._push(writeVarint64, 10, LongBits.fromNumber(value)) // 10 bytes per spec + : this.uint32(value); + } + /** + * Writes a 32 bit value as a varint, zig-zag encoded + */ + sint32(value) { + return this.uint32((value << 1 ^ value >> 31) >>> 0); + } + /** + * Writes an unsigned 64 bit value as a varint + */ + uint64(value) { + const bits = LongBits.fromBigInt(value); + return this._push(writeVarint64, bits.length(), bits); + } + /** + * Writes an unsigned 64 bit value as a varint + */ + uint64Number(value) { + return this._push(encodeUint8Array, encodingLength$1(value), value); + } + /** + * Writes an unsigned 64 bit value as a varint + */ + uint64String(value) { + return this.uint64(BigInt(value)); + } + /** + * Writes a signed 64 bit value as a varint + */ + int64(value) { + return this.uint64(value); + } + /** + * Writes a signed 64 bit value as a varint + */ + int64Number(value) { + return this.uint64Number(value); + } + /** + * Writes a signed 64 bit value as a varint + */ + int64String(value) { + return this.uint64String(value); + } + /** + * Writes a signed 64 bit value as a varint, zig-zag encoded + */ + sint64(value) { + const bits = LongBits.fromBigInt(value).zzEncode(); + return this._push(writeVarint64, bits.length(), bits); + } + /** + * Writes a signed 64 bit value as a varint, zig-zag encoded + */ + sint64Number(value) { + const bits = LongBits.fromNumber(value).zzEncode(); + return this._push(writeVarint64, bits.length(), bits); + } + /** + * Writes a signed 64 bit value as a varint, zig-zag encoded + */ + sint64String(value) { + return this.sint64(BigInt(value)); + } + /** + * Writes a boolish value as a varint + */ + bool(value) { + return this._push(writeByte, 1, value ? 1 : 0); + } + /** + * Writes an unsigned 32 bit value as fixed 32 bits + */ + fixed32(value) { + return this._push(writeFixed32, 4, value >>> 0); + } + /** + * Writes a signed 32 bit value as fixed 32 bits + */ + sfixed32(value) { + return this.fixed32(value); + } + /** + * Writes an unsigned 64 bit value as fixed 64 bits + */ + fixed64(value) { + const bits = LongBits.fromBigInt(value); + return this._push(writeFixed32, 4, bits.lo)._push(writeFixed32, 4, bits.hi); + } + /** + * Writes an unsigned 64 bit value as fixed 64 bits + */ + fixed64Number(value) { + const bits = LongBits.fromNumber(value); + return this._push(writeFixed32, 4, bits.lo)._push(writeFixed32, 4, bits.hi); + } + /** + * Writes an unsigned 64 bit value as fixed 64 bits + */ + fixed64String(value) { + return this.fixed64(BigInt(value)); + } + /** + * Writes a signed 64 bit value as fixed 64 bits + */ + sfixed64(value) { + return this.fixed64(value); + } + /** + * Writes a signed 64 bit value as fixed 64 bits + */ + sfixed64Number(value) { + return this.fixed64Number(value); + } + /** + * Writes a signed 64 bit value as fixed 64 bits + */ + sfixed64String(value) { + return this.fixed64String(value); + } + /** + * Writes a float (32 bit) + */ + float(value) { + return this._push(writeFloatLE, 4, value); + } + /** + * Writes a double (64 bit float). + * + * @function + * @param {number} value - Value to write + * @returns {Writer} `this` + */ + double(value) { + return this._push(writeDoubleLE, 8, value); + } + /** + * Writes a sequence of bytes + */ + bytes(value) { + const len = value.length >>> 0; + if (len === 0) { + return this._push(writeByte, 1, 0); + } + return this.uint32(len)._push(writeBytes, len, value); + } + /** + * Writes a string + */ + string(value) { + const len = length$2(value); + return len !== 0 + ? this.uint32(len)._push(write$1, len, value) + : this._push(writeByte, 1, 0); + } + /** + * Forks this writer's state by pushing it to a stack. + * Calling {@link Writer#reset|reset} or {@link Writer#ldelim|ldelim} resets the writer to the previous state. + */ + fork() { + this.states = new State(this); + this.head = this.tail = new Op(noop, 0, 0); + this.len = 0; + return this; + } + /** + * Resets this instance to the last state + */ + reset() { + if (this.states != null) { + this.head = this.states.head; + this.tail = this.states.tail; + this.len = this.states.len; + this.states = this.states.next; + } + else { + this.head = this.tail = new Op(noop, 0, 0); + this.len = 0; + } + return this; + } + /** + * Resets to the last state and appends the fork state's current write length as a varint followed by its operations. + */ + ldelim() { + const head = this.head; + const tail = this.tail; + const len = this.len; + this.reset().uint32(len); + if (len !== 0) { + this.tail.next = head.next; // skip noop + this.tail = tail; + this.len += len; + } + return this; + } + /** + * Finishes the write operation + */ + finish() { + let head = this.head.next; // skip noop + const buf = alloc$1(this.len); + let pos = 0; + while (head != null) { + head.fn(head.val, buf, pos); + pos += head.len; + head = head.next; + } + // this.head = this.tail = null; + return buf; + } + } + function writeByte(val, buf, pos) { + buf[pos] = val & 255; + } + function writeVarint32(val, buf, pos) { + while (val > 127) { + buf[pos++] = val & 127 | 128; + val >>>= 7; + } + buf[pos] = val; + } + /** + * Constructs a new varint writer operation instance. + * + * @classdesc Scheduled varint writer operation + */ + class VarintOp extends Op { + next; + constructor(len, val) { + super(writeVarint32, len, val); + this.next = undefined; + } + } + function writeVarint64(val, buf, pos) { + while (val.hi !== 0) { + buf[pos++] = val.lo & 127 | 128; + val.lo = (val.lo >>> 7 | val.hi << 25) >>> 0; + val.hi >>>= 7; + } + while (val.lo > 127) { + buf[pos++] = val.lo & 127 | 128; + val.lo = val.lo >>> 7; + } + buf[pos++] = val.lo; + } + function writeFixed32(val, buf, pos) { + buf[pos] = val & 255; + buf[pos + 1] = val >>> 8 & 255; + buf[pos + 2] = val >>> 16 & 255; + buf[pos + 3] = val >>> 24; + } + function writeBytes(val, buf, pos) { + buf.set(val, pos); + } + if (globalThis.Buffer != null) { + Uint8ArrayWriter.prototype.bytes = function (value) { + const len = value.length >>> 0; + this.uint32(len); + if (len > 0) { + this._push(writeBytesBuffer, len, value); + } + return this; + }; + Uint8ArrayWriter.prototype.string = function (value) { + const len = globalThis.Buffer.byteLength(value); + this.uint32(len); + if (len > 0) { + this._push(writeStringBuffer, len, value); + } + return this; + }; + } + function writeBytesBuffer(val, buf, pos) { + buf.set(val, pos); // faster than copy (requires node >= 4 where Buffers extend Uint8Array and set is properly inherited) + // also works for plain array values + } + function writeStringBuffer(val, buf, pos) { + if (val.length < 40) { + // plain js is faster for short strings (probably due to redundant assertions) + write$1(val, buf, pos); + // @ts-expect-error buf isn't a Uint8Array? + } + else if (buf.utf8Write != null) { + // @ts-expect-error buf isn't a Uint8Array? + buf.utf8Write(val, pos); + } + else { + buf.set(fromString(val), pos); + } + } + /** + * Creates a new writer + */ + function createWriter() { + return new Uint8ArrayWriter(); + } + + function encodeMessage(message, codec) { + const w = createWriter(); + codec.encode(message, w, { + lengthDelimited: false + }); + return w.finish(); + } + + // https://developers.google.com/protocol-buffers/docs/encoding#structure + var CODEC_TYPES; + (function (CODEC_TYPES) { + CODEC_TYPES[CODEC_TYPES["VARINT"] = 0] = "VARINT"; + CODEC_TYPES[CODEC_TYPES["BIT64"] = 1] = "BIT64"; + CODEC_TYPES[CODEC_TYPES["LENGTH_DELIMITED"] = 2] = "LENGTH_DELIMITED"; + CODEC_TYPES[CODEC_TYPES["START_GROUP"] = 3] = "START_GROUP"; + CODEC_TYPES[CODEC_TYPES["END_GROUP"] = 4] = "END_GROUP"; + CODEC_TYPES[CODEC_TYPES["BIT32"] = 5] = "BIT32"; + })(CODEC_TYPES || (CODEC_TYPES = {})); + function createCodec(name, type, encode, decode) { + return { + name, + type, + encode, + decode + }; + } + + function enumeration(v) { + function findValue(val) { + // Use the reverse mapping to look up the enum key for the stored value + // https://www.typescriptlang.org/docs/handbook/enums.html#reverse-mappings + if (v[val.toString()] == null) { + throw new Error('Invalid enum value'); + } + return v[val]; + } + const encode = function enumEncode(val, writer) { + const enumValue = findValue(val); + writer.int32(enumValue); + }; + const decode = function enumDecode(reader) { + const val = reader.int32(); + return findValue(val); + }; + // @ts-expect-error yeah yeah + return createCodec('enum', CODEC_TYPES.VARINT, encode, decode); + } + + function message(encode, decode) { + return createCodec('message', CODEC_TYPES.LENGTH_DELIMITED, encode, decode); + } + + /** + * @packageDocumentation + * + * This module contains serialization/deserialization code used when encoding/decoding protobufs. + * + * It should be declared as a dependency of your project: + * + * ```console + * npm i protons-runtime + * ``` + */ + let CodeError$3 = class CodeError extends Error { + code; + constructor(message, code, options) { + super(message, options); + this.code = code; + } + }; + + /* eslint-disable import/export */ + /* eslint-disable complexity */ + /* eslint-disable @typescript-eslint/no-namespace */ + /* eslint-disable @typescript-eslint/no-unnecessary-boolean-literal-compare */ + /* eslint-disable @typescript-eslint/no-empty-interface */ + var Peer$3; + (function (Peer) { + let _codec; + Peer.codec = () => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork(); + } + if ((obj.publicKey != null && obj.publicKey.byteLength > 0)) { + w.uint32(10); + w.bytes(obj.publicKey); + } + if (obj.addrs != null) { + for (const value of obj.addrs) { + w.uint32(18); + w.bytes(value); + } + } + if (opts.lengthDelimited !== false) { + w.ldelim(); + } + }, (reader, length) => { + const obj = { + publicKey: alloc$2(0), + addrs: [] + }; + const end = length == null ? reader.len : reader.pos + length; + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + obj.publicKey = reader.bytes(); + break; + } + case 2: { + obj.addrs.push(reader.bytes()); + break; + } + default: { + reader.skipType(tag & 7); + break; + } + } + } + return obj; + }); + } + return _codec; + }; + Peer.encode = (obj) => { + return encodeMessage(obj, Peer.codec()); + }; + Peer.decode = (buf) => { + return decodeMessage(buf, Peer.codec()); + }; + })(Peer$3 || (Peer$3 = {})); + + /** + * Turns a `Uint8Array` into a string. + * + * Supports `utf8`, `utf-8` and any encoding supported by the multibase module. + * + * Also `ascii` which is similar to node's 'binary' encoding. + */ + function toString$1(array, encoding = 'utf8') { + const base = BASES[encoding]; + if (base == null) { + throw new Error(`Unsupported encoding "${encoding}"`); + } + // strip multibase prefix + return base.encoder.encode(array).substring(1); + } + + const pathSepS = '/'; + const pathSepB = new TextEncoder().encode(pathSepS); + const pathSep = pathSepB[0]; + /** + * A Key represents the unique identifier of an object. + * Our Key scheme is inspired by file systems and Google App Engine key model. + * Keys are meant to be unique across a system. Keys are hierarchical, + * incorporating more and more specific namespaces. Thus keys can be deemed + * 'children' or 'ancestors' of other keys: + * - `new Key('/Comedy')` + * - `new Key('/Comedy/MontyPython')` + * Also, every namespace can be parametrized to embed relevant object + * information. For example, the Key `name` (most specific namespace) could + * include the object type: + * - `new Key('/Comedy/MontyPython/Actor:JohnCleese')` + * - `new Key('/Comedy/MontyPython/Sketch:CheeseShop')` + * - `new Key('/Comedy/MontyPython/Sketch:CheeseShop/Character:Mousebender')` + * + */ + class Key { + _buf; + /** + * @param {string | Uint8Array} s + * @param {boolean} [clean] + */ + constructor(s, clean) { + if (typeof s === 'string') { + this._buf = fromString(s); + } + else if (s instanceof Uint8Array) { + this._buf = s; + } + else { + throw new Error('Invalid key, should be String of Uint8Array'); + } + if (clean == null) { + clean = true; + } + if (clean) { + this.clean(); + } + if (this._buf.byteLength === 0 || this._buf[0] !== pathSep) { + throw new Error('Invalid key'); + } + } + /** + * Convert to the string representation + * + * @param {import('uint8arrays/to-string').SupportedEncodings} [encoding='utf8'] - The encoding to use. + * @returns {string} + */ + toString(encoding = 'utf8') { + return toString$1(this._buf, encoding); + } + /** + * Return the Uint8Array representation of the key + * + * @returns {Uint8Array} + */ + uint8Array() { + return this._buf; + } + /** + * Return string representation of the key + * + * @returns {string} + */ + get [Symbol.toStringTag]() { + return `Key(${this.toString()})`; + } + /** + * Constructs a key out of a namespace array. + * + * @param {Array} list - The array of namespaces + * @returns {Key} + * + * @example + * ```js + * Key.withNamespaces(['one', 'two']) + * // => Key('/one/two') + * ``` + */ + static withNamespaces(list) { + return new Key(list.join(pathSepS)); + } + /** + * Returns a randomly (uuid) generated key. + * + * @returns {Key} + * + * @example + * ```js + * Key.random() + * // => Key('/344502982398') + * ``` + */ + static random() { + return new Key(Math.random().toString().substring(2)); + } + /** + * @param {*} other + */ + static asKey(other) { + if (other instanceof Uint8Array || typeof other === 'string') { + // we can create a key from this + return new Key(other); + } + if (typeof other.uint8Array === 'function') { + // this is an older version or may have crossed the esm/cjs boundary + return new Key(other.uint8Array()); + } + return null; + } + /** + * Cleanup the current key + * + * @returns {void} + */ + clean() { + if (this._buf == null || this._buf.byteLength === 0) { + this._buf = pathSepB; + } + if (this._buf[0] !== pathSep) { + const bytes = new Uint8Array(this._buf.byteLength + 1); + bytes.fill(pathSep, 0, 1); + bytes.set(this._buf, 1); + this._buf = bytes; + } + // normalize does not remove trailing slashes + while (this._buf.byteLength > 1 && this._buf[this._buf.byteLength - 1] === pathSep) { + this._buf = this._buf.subarray(0, -1); + } + } + /** + * Check if the given key is sorted lower than ourself. + * + * @param {Key} key - The other Key to check against + * @returns {boolean} + */ + less(key) { + const list1 = this.list(); + const list2 = key.list(); + for (let i = 0; i < list1.length; i++) { + if (list2.length < i + 1) { + return false; + } + const c1 = list1[i]; + const c2 = list2[i]; + if (c1 < c2) { + return true; + } + else if (c1 > c2) { + return false; + } + } + return list1.length < list2.length; + } + /** + * Returns the key with all parts in reversed order. + * + * @returns {Key} + * + * @example + * ```js + * new Key('/Comedy/MontyPython/Actor:JohnCleese').reverse() + * // => Key('/Actor:JohnCleese/MontyPython/Comedy') + * ``` + */ + reverse() { + return Key.withNamespaces(this.list().slice().reverse()); + } + /** + * Returns the `namespaces` making up this Key. + * + * @returns {Array} + */ + namespaces() { + return this.list(); + } + /** Returns the "base" namespace of this key. + * + * @returns {string} + * + * @example + * ```js + * new Key('/Comedy/MontyPython/Actor:JohnCleese').baseNamespace() + * // => 'Actor:JohnCleese' + * ``` + */ + baseNamespace() { + const ns = this.namespaces(); + return ns[ns.length - 1]; + } + /** + * Returns the `list` representation of this key. + * + * @returns {Array} + * + * @example + * ```js + * new Key('/Comedy/MontyPython/Actor:JohnCleese').list() + * // => ['Comedy', 'MontyPythong', 'Actor:JohnCleese'] + * ``` + */ + list() { + return this.toString().split(pathSepS).slice(1); + } + /** + * Returns the "type" of this key (value of last namespace). + * + * @returns {string} + * + * @example + * ```js + * new Key('/Comedy/MontyPython/Actor:JohnCleese').type() + * // => 'Actor' + * ``` + */ + type() { + return namespaceType(this.baseNamespace()); + } + /** + * Returns the "name" of this key (field of last namespace). + * + * @returns {string} + * + * @example + * ```js + * new Key('/Comedy/MontyPython/Actor:JohnCleese').name() + * // => 'JohnCleese' + * ``` + */ + name() { + return namespaceValue(this.baseNamespace()); + } + /** + * Returns an "instance" of this type key (appends value to namespace). + * + * @param {string} s - The string to append. + * @returns {Key} + * + * @example + * ```js + * new Key('/Comedy/MontyPython/Actor').instance('JohnClesse') + * // => Key('/Comedy/MontyPython/Actor:JohnCleese') + * ``` + */ + instance(s) { + return new Key(this.toString() + ':' + s); + } + /** + * Returns the "path" of this key (parent + type). + * + * @returns {Key} + * + * @example + * ```js + * new Key('/Comedy/MontyPython/Actor:JohnCleese').path() + * // => Key('/Comedy/MontyPython/Actor') + * ``` + */ + path() { + let p = this.parent().toString(); + if (!p.endsWith(pathSepS)) { + p += pathSepS; + } + p += this.type(); + return new Key(p); + } + /** + * Returns the `parent` Key of this Key. + * + * @returns {Key} + * + * @example + * ```js + * new Key("/Comedy/MontyPython/Actor:JohnCleese").parent() + * // => Key("/Comedy/MontyPython") + * ``` + */ + parent() { + const list = this.list(); + if (list.length === 1) { + return new Key(pathSepS); + } + return new Key(list.slice(0, -1).join(pathSepS)); + } + /** + * Returns the `child` Key of this Key. + * + * @param {Key} key - The child Key to add + * @returns {Key} + * + * @example + * ```js + * new Key('/Comedy/MontyPython').child(new Key('Actor:JohnCleese')) + * // => Key('/Comedy/MontyPython/Actor:JohnCleese') + * ``` + */ + child(key) { + if (this.toString() === pathSepS) { + return key; + } + else if (key.toString() === pathSepS) { + return this; + } + return new Key(this.toString() + key.toString(), false); + } + /** + * Returns whether this key is a prefix of `other` + * + * @param {Key} other - The other key to test against + * @returns {boolean} + * + * @example + * ```js + * new Key('/Comedy').isAncestorOf('/Comedy/MontyPython') + * // => true + * ``` + */ + isAncestorOf(other) { + if (other.toString() === this.toString()) { + return false; + } + return other.toString().startsWith(this.toString()); + } + /** + * Returns whether this key is a contains another as prefix. + * + * @param {Key} other - The other Key to test against + * @returns {boolean} + * + * @example + * ```js + * new Key('/Comedy/MontyPython').isDecendantOf('/Comedy') + * // => true + * ``` + */ + isDecendantOf(other) { + if (other.toString() === this.toString()) { + return false; + } + return this.toString().startsWith(other.toString()); + } + /** + * Checks if this key has only one namespace. + * + * @returns {boolean} + */ + isTopLevel() { + return this.list().length === 1; + } + /** + * Concats one or more Keys into one new Key. + * + * @param {Array} keys - The array of keys to concatenate + * @returns {Key} + */ + concat(...keys) { + return Key.withNamespaces([...this.namespaces(), ...flatten(keys.map(key => key.namespaces()))]); + } + } + /** + * The first component of a namespace. `foo` in `foo:bar` + * + * @param {string} ns + * @returns {string} + */ + function namespaceType(ns) { + const parts = ns.split(':'); + if (parts.length < 2) { + return ''; + } + return parts.slice(0, -1).join(':'); + } + /** + * The last component of a namespace, `baz` in `foo:bar:baz`. + * + * @param {string} ns + * @returns {string} + */ + function namespaceValue(ns) { + const parts = ns.split(':'); + return parts[parts.length - 1]; + } + /** + * Flatten array of arrays (only one level) + * + * @template T + * @param {Array} arr + * @returns {T[]} + */ + function flatten(arr) { + return ([]).concat(...arr); + } + + const prefix = CONFIG_PREFIX; + + const mkErr = msg => new Error(`${prefix}: ${msg}`); + + function uint8ArrayToString(uint8Array){ + const string = new TextDecoder().decode(uint8Array); + return string + } + + function uint8ArrayFromString(string){ + const uint8Array = new TextEncoder().encode(string); + return uint8Array + } + + //Add id to pupsub message + async function msgIdFnStrictNoSign$1(msg){ + var enc = new TextEncoder(); + const signedMessage = msg; + const encodedSeqNum = enc.encode(signedMessage.sequenceNumber.toString()); + return await sha256$1.encode(encodedSeqNum) + } + + let totals = { + readyErrored: 0, + noiseErrored: 0, + upgradeErrored: 0, + readyTimedout: 0, + noiseTimedout: 0, + success: 0 + }; + + let stats = { + pending: 0, + open: 0, + + ready_error: 0, + noise_error: 0, + upgrade_error: 0, + + ready_timeout: 0, + noise_timeout: 0, + + close: 0, + abort: 0, + remote_close: 0, + }; + + let lastStats = { + pending: 0, + ready_error: 0, + noise_error: 0, + upgrade_error: 0, + close: 0, + remote_close: 0, + ready: 0, + abort: 0, + ready_timeout: 0, + noise_timeout: 0, + open: 0 + }; + + let isDialEnabled = true; + let lastfailtreshold = 0; + + function metrics(data){ + try{ + const webTransportEvents = data.libp2p_webtransport_dialer_events_total; + + const newPending = (webTransportEvents.pending ?? 0) - (lastStats.pending ?? 0); + const newReadyError = (webTransportEvents.ready_error ?? 0) - (lastStats.ready_error ?? 0); + const newNoiseError = (webTransportEvents.noise_error ?? 0) - (lastStats.noise_error ?? 0); + const newUpgradeError = (webTransportEvents.upgrade_error ?? 0) - (lastStats.upgrade_error ?? 0); + const newClose = (webTransportEvents.close ?? 0) - (lastStats.close ?? 0); + const newReady = (webTransportEvents.ready ?? 0) - (lastStats.ready ?? 0); + const newAbort = (webTransportEvents.abort ?? 0) - (lastStats.abort ?? 0); + const newReadyTimeout = (webTransportEvents.ready_timeout ?? 0) - (lastStats.ready_timeout ?? 0); + const newNoiseTimeout = (webTransportEvents.noise_timeout ?? 0) - (lastStats.noise_timeout ?? 0); + const newOpen = (webTransportEvents.open ?? 0) - (lastStats.open ?? 0); + const newRemoteClose = (webTransportEvents.remote_close ?? 0) - (lastStats.remote_close ?? 0); + + stats.pending += newPending; + stats.pending -= newReadyTimeout; + stats.pending -= newNoiseTimeout; + stats.pending -= newReadyError; + stats.pending -= newNoiseError; + stats.pending -= newUpgradeError; + stats.pending -= newOpen; + + stats.open += newOpen; + stats.open -= newClose; + stats.open -= newRemoteClose; + stats.open -= newAbort; + + stats.ready_error = newReadyError; + stats.noise_error = newNoiseError; + stats.upgrade_error = newUpgradeError; + stats.ready_timeout = newReadyTimeout; + stats.noise_timeout = newNoiseTimeout; + stats.close = newClose; + stats.abort = newAbort; + stats.remote_close = newRemoteClose; + + totals.success += newReady; + totals.readyErrored += newReadyError; + totals.noiseErrored += newNoiseError; + totals.upgradeErrored += newUpgradeError; + totals.readyTimedout += newReadyTimeout; + totals.noiseTimedout += newNoiseTimeout; + + const errors = totals.readyErrored + totals.noiseErrored + totals.upgradeErrored; + const timeouts = totals.readyTimedout + totals.noiseTimedout; + //const failureRate = ((errors + timeouts) / (errors + timeouts + totals.success) * 100).toFixed(2) + + lastStats = webTransportEvents; + + const fail = errors+timeouts; + const treshold = errors+timeouts+stats.open+stats.pending; + + if(treshold>50){ + //console.log(`Treeshold hit : ${treshold}`) + } + + if(fail>50){ + //console.log(`Open : ${stats.open} , Pending : ${stats.pending} , Succes : ${totals.success} , Fail : ${fail} `) + + } + + if ((fail-lastfailtreshold)>50){ + if(isDialEnabled){ + isDialEnabled = false; + //const str = JSON.stringify({isDialEnabled,fail,lastfailtreshold}) + //console.warn(str) + setTimeout(()=>{ + if(!isDialEnabled){ + isDialEnabled = true; + lastfailtreshold = fail; + //const str = JSON.stringify({isDialEnabled,fail,lastfailtreshold}) + //console.warn(str) + } + },6*60*1000); + } + } + + return isDialEnabled + + } + catch{ + console.debug('Metrics error'); + } + } + + /* + onunhandledrejection = function(evt) { + console.warn(evt.reason); + return + } + */ + + const connectionSymbol = Symbol.for('@libp2p/connection'); + + /** + * Any object that implements this Symbol as a property should return a + * ContentRouting instance as the property value, similar to how + * `Symbol.Iterable` can be used to return an `Iterable` from an `Iterator`. + * + * @example + * + * ```TypeScript + * import { contentRoutingSymbol, ContentRouting } from '@libp2p/content-routing' + * + * class MyContentRouter implements ContentRouting { + * get [contentRoutingSymbol] () { + * return this + * } + * + * // ...other methods + * } + * ``` + */ + const contentRoutingSymbol = Symbol.for('@libp2p/content-routing'); + + /** + * Any object that implements this Symbol as a property should return a + * PeerDiscovery instance as the property value, similar to how + * `Symbol.Iterable` can be used to return an `Iterable` from an `Iterator`. + * + * @example + * + * ```TypeScript + * import { peerDiscovery, PeerDiscovery } from '@libp2p/peer-discovery' + * + * class MyPeerDiscoverer implements PeerDiscovery { + * get [peerDiscovery] () { + * return this + * } + * + * // ...other methods + * } + * ``` + */ + const peerDiscoverySymbol = Symbol.for('@libp2p/peer-discovery'); + + const peerIdSymbol = Symbol.for('@libp2p/peer-id'); + function isPeerId(other) { + return other != null && Boolean(other[peerIdSymbol]); + } + + /** + * Any object that implements this Symbol as a property should return a + * PeerRouting instance as the property value, similar to how + * `Symbol.Iterable` can be used to return an `Iterable` from an `Iterator`. + * + * @example + * + * ```TypeScript + * import { peerRouting, PeerRouting } from '@libp2p/peer-routing' + * + * class MyPeerRouter implements PeerRouting { + * get [peerRouting] () { + * return this + * } + * + * // ...other methods + * } + * ``` + */ + const peerRoutingSymbol = Symbol.for('@libp2p/peer-routing'); + + const KEEP_ALIVE = 'keep-alive'; + + /** + * On the producing side: + * * Build messages with the signature, key (from may be enough for certain inlineable public key types), from and seqno fields. + * + * On the consuming side: + * * Enforce the fields to be present, reject otherwise. + * * Propagate only if the fields are valid and signature can be verified, reject otherwise. + */ + const StrictSign = 'StrictSign'; + /** + * On the producing side: + * * Build messages without the signature, key, from and seqno fields. + * * The corresponding protobuf key-value pairs are absent from the marshalled message, not just empty. + * + * On the consuming side: + * * Enforce the fields to be absent, reject otherwise. + * * Propagate only if the fields are absent, reject otherwise. + * * A message_id function will not be able to use the above fields, and should instead rely on the data field. A commonplace strategy is to calculate a hash. + */ + const StrictNoSign = 'StrictNoSign'; + var TopicValidatorResult; + (function (TopicValidatorResult) { + /** + * The message is considered valid, and it should be delivered and forwarded to the network + */ + TopicValidatorResult["Accept"] = "accept"; + /** + * The message is neither delivered nor forwarded to the network + */ + TopicValidatorResult["Ignore"] = "ignore"; + /** + * The message is considered invalid, and it should be rejected + */ + TopicValidatorResult["Reject"] = "reject"; + })(TopicValidatorResult || (TopicValidatorResult = {})); + + const transportSymbol = Symbol.for('@libp2p/transport'); + /** + * Enum Transport Manager Fault Tolerance values + */ + var FaultTolerance; + (function (FaultTolerance) { + /** + * should be used for failing in any listen circumstance + */ + FaultTolerance[FaultTolerance["FATAL_ALL"] = 0] = "FATAL_ALL"; + /** + * should be used for not failing when not listening + */ + FaultTolerance[FaultTolerance["NO_FATAL"] = 1] = "NO_FATAL"; + })(FaultTolerance || (FaultTolerance = {})); + + /** + * When this error is thrown it means an operation was aborted, + * usually in response to the `abort` event being emitted by an + * AbortSignal. + */ + let AbortError$5 = class AbortError extends Error { + code; + type; + constructor(message = 'The operation was aborted') { + super(message); + this.name = 'AbortError'; + this.code = AbortError.code; + this.type = AbortError.type; + } + static code = 'ABORT_ERR'; + static type = 'aborted'; + }; + let CodeError$2 = class CodeError extends Error { + code; + props; + constructor(message, code, props) { + super(message); + this.code = code; + this.name = props?.name ?? 'CodeError'; + this.props = props ?? {}; // eslint-disable-line @typescript-eslint/consistent-type-assertions + } + }; + class AggregateCodeError extends AggregateError { + code; + props; + constructor(errors, message, code, props) { + super(errors, message); + this.code = code; + this.name = props?.name ?? 'AggregateCodeError'; + this.props = props ?? {}; // eslint-disable-line @typescript-eslint/consistent-type-assertions + } + } + // Error codes + const ERR_TIMEOUT = 'ERR_TIMEOUT'; + + /** Noop for browser compatibility */ + function setMaxListeners$1() { } + + // create a setMaxListeners that doesn't break browser usage + const setMaxListeners = (n, ...eventTargets) => { + try { + setMaxListeners$1(n, ...eventTargets); + } + catch { + // swallow error, gulp + } + }; + + /** + * An implementation of a typed event target + * etc + */ + class TypedEventEmitter extends EventTarget { + #listeners = new Map(); + constructor() { + super(); + // silence MaxListenersExceededWarning warning on Node.js, this is a red + // herring almost all of the time + setMaxListeners(Infinity, this); + } + listenerCount(type) { + const listeners = this.#listeners.get(type); + if (listeners == null) { + return 0; + } + return listeners.length; + } + addEventListener(type, listener, options) { + super.addEventListener(type, listener, options); + let list = this.#listeners.get(type); + if (list == null) { + list = []; + this.#listeners.set(type, list); + } + list.push({ + callback: listener, + once: (options !== true && options !== false && options?.once) ?? false + }); + } + removeEventListener(type, listener, options) { + super.removeEventListener(type.toString(), listener ?? null, options); + let list = this.#listeners.get(type); + if (list == null) { + return; + } + list = list.filter(({ callback }) => callback !== listener); + this.#listeners.set(type, list); + } + dispatchEvent(event) { + const result = super.dispatchEvent(event); + let list = this.#listeners.get(event.type); + if (list == null) { + return result; + } + list = list.filter(({ once }) => !once); + this.#listeners.set(event.type, list); + return result; + } + safeDispatchEvent(type, detail = {}) { + return this.dispatchEvent(new CustomEvent$1(type, detail)); + } + } + /** + * CustomEvent is a standard event but it's not supported by node. + * + * Remove this when https://github.com/nodejs/node/issues/40678 is closed. + * + * Ref: https://developer.mozilla.org/en-US/docs/Web/API/CustomEvent + */ + class CustomEventPolyfill extends Event { + /** Returns any custom data event was created with. Typically used for synthetic events. */ + detail; + constructor(message, data) { + super(message, data); + // @ts-expect-error could be undefined + this.detail = data?.detail; + } + } + const CustomEvent$1 = globalThis.CustomEvent ?? CustomEventPolyfill; + + function isStartable(obj) { + return obj != null && typeof obj.start === 'function' && typeof obj.stop === 'function'; + } + async function start(...objs) { + const startables = []; + for (const obj of objs) { + if (isStartable(obj)) { + startables.push(obj); + } + } + await Promise.all(startables.map(async (s) => { + if (s.beforeStart != null) { + await s.beforeStart(); + } + })); + await Promise.all(startables.map(async (s) => { + await s.start(); + })); + await Promise.all(startables.map(async (s) => { + if (s.afterStart != null) { + await s.afterStart(); + } + })); + } + async function stop(...objs) { + const startables = []; + for (const obj of objs) { + if (isStartable(obj)) { + startables.push(obj); + } + } + await Promise.all(startables.map(async (s) => { + if (s.beforeStop != null) { + await s.beforeStop(); + } + })); + await Promise.all(startables.map(async (s) => { + await s.stop(); + })); + await Promise.all(startables.map(async (s) => { + if (s.afterStop != null) { + await s.afterStop(); + } + })); + } + + /** + * Returns true if the two passed Uint8Arrays have the same content + */ + function equals(a, b) { + if (a === b) { + return true; + } + if (a.byteLength !== b.byteLength) { + return false; + } + for (let i = 0; i < a.byteLength; i++) { + if (a[i] !== b[i]) { + return false; + } + } + return true; + } + + /** + * To guarantee Uint8Array semantics, convert nodejs Buffers + * into vanilla Uint8Arrays + */ + function asUint8Array$1(buf) { + return buf; + } + + /** + * Returns a new Uint8Array created by concatenating the passed Uint8Arrays + */ + function concat$1(arrays, length) { + if (length == null) { + length = arrays.reduce((acc, curr) => acc + curr.length, 0); + } + const output = allocUnsafe(length); + let offset = 0; + for (const arr of arrays) { + output.set(arr, offset); + offset += arr.length; + } + return asUint8Array$1(output); + } + + function isPromise$4(thing) { + if (thing == null) { + return false; + } + return typeof thing.then === 'function' && + typeof thing.catch === 'function' && + typeof thing.finally === 'function'; + } + + function number$2(n) { + if (!Number.isSafeInteger(n) || n < 0) + throw new Error(`positive integer expected, not ${n}`); + } + // copied from utils + function isBytes$3(a) { + return (a instanceof Uint8Array || + (a != null && typeof a === 'object' && a.constructor.name === 'Uint8Array')); + } + function bytes$1(b, ...lengths) { + if (!isBytes$3(b)) + throw new Error('Uint8Array expected'); + if (lengths.length > 0 && !lengths.includes(b.length)) + throw new Error(`Uint8Array expected of length ${lengths}, not of length=${b.length}`); + } + function hash(h) { + if (typeof h !== 'function' || typeof h.create !== 'function') + throw new Error('Hash should be wrapped by utils.wrapConstructor'); + number$2(h.outputLen); + number$2(h.blockLen); + } + function exists$1(instance, checkFinished = true) { + if (instance.destroyed) + throw new Error('Hash instance has been destroyed'); + if (checkFinished && instance.finished) + throw new Error('Hash#digest() has already been called'); + } + function output$1(out, instance) { + bytes$1(out); + const min = instance.outputLen; + if (out.length < min) { + throw new Error(`digestInto() expects output buffer of length at least ${min}`); + } + } + + const crypto$1 = typeof globalThis === 'object' && 'crypto' in globalThis ? globalThis.crypto : undefined; + + /*! noble-hashes - MIT License (c) 2022 Paul Miller (paulmillr.com) */ + // We use WebCrypto aka globalThis.crypto, which exists in browsers and node.js 16+. + // node.js versions earlier than v19 don't declare it in global scope. + // For node.js, package.json#exports field mapping rewrites import + // from `crypto` to `cryptoNode`, which imports native module. + // Makes the utils un-importable in browsers without a bundler. + // Once node.js 18 is deprecated (2025-04-30), we can just drop the import. + // Cast array to view + const createView$1 = (arr) => new DataView(arr.buffer, arr.byteOffset, arr.byteLength); + // The rotate right (circular right shift) operation for uint32 + const rotr = (word, shift) => (word << (32 - shift)) | (word >>> shift); + new Uint8Array(new Uint32Array([0x11223344]).buffer)[0] === 0x44; + // There is no setImmediate in browser and setTimeout is slow. + // call of async fn will return Promise, which will be fullfiled only on + // next scheduler queue processing step and this is exactly what we need. + const nextTick = async () => { }; + // Returns control to thread each 'tick' ms to avoid blocking + async function asyncLoop(iters, tick, cb) { + let ts = Date.now(); + for (let i = 0; i < iters; i++) { + cb(i); + // Date.now() is not monotonic, so in case if clock goes backwards we return return control too + const diff = Date.now() - ts; + if (diff >= 0 && diff < tick) + continue; + await nextTick(); + ts += diff; + } + } + /** + * @example utf8ToBytes('abc') // new Uint8Array([97, 98, 99]) + */ + function utf8ToBytes$2(str) { + if (typeof str !== 'string') + throw new Error(`utf8ToBytes expected string, got ${typeof str}`); + return new Uint8Array(new TextEncoder().encode(str)); // https://bugzil.la/1681809 + } + /** + * Normalizes (non-hex) string or Uint8Array to Uint8Array. + * Warning: when Uint8Array is passed, it would NOT get copied. + * Keep in mind for future mutable operations. + */ + function toBytes$2(data) { + if (typeof data === 'string') + data = utf8ToBytes$2(data); + bytes$1(data); + return data; + } + /** + * Copies several Uint8Arrays into one. + */ + function concatBytes$1(...arrays) { + let sum = 0; + for (let i = 0; i < arrays.length; i++) { + const a = arrays[i]; + bytes$1(a); + sum += a.length; + } + const res = new Uint8Array(sum); + for (let i = 0, pad = 0; i < arrays.length; i++) { + const a = arrays[i]; + res.set(a, pad); + pad += a.length; + } + return res; + } + // For runtime check if class implements interface + class Hash { + // Safe version that clones internal state + clone() { + return this._cloneInto(); + } + } + const toStr = {}.toString; + function checkOpts$1(defaults, opts) { + if (opts !== undefined && toStr.call(opts) !== '[object Object]') + throw new Error('Options should be object or undefined'); + const merged = Object.assign(defaults, opts); + return merged; + } + function wrapConstructor(hashCons) { + const hashC = (msg) => hashCons().update(toBytes$2(msg)).digest(); + const tmp = hashCons(); + hashC.outputLen = tmp.outputLen; + hashC.blockLen = tmp.blockLen; + hashC.create = () => hashCons(); + return hashC; + } + /** + * Secure PRNG. Uses `crypto.getRandomValues`, which defers to OS. + */ + function randomBytes$1(bytesLength = 32) { + if (crypto$1 && typeof crypto$1.getRandomValues === 'function') { + return crypto$1.getRandomValues(new Uint8Array(bytesLength)); + } + throw new Error('crypto.getRandomValues must be defined'); + } + + // Polyfill for Safari 14 + function setBigUint64$1(view, byteOffset, value, isLE) { + if (typeof view.setBigUint64 === 'function') + return view.setBigUint64(byteOffset, value, isLE); + const _32n = BigInt(32); + const _u32_max = BigInt(0xffffffff); + const wh = Number((value >> _32n) & _u32_max); + const wl = Number(value & _u32_max); + const h = isLE ? 4 : 0; + const l = isLE ? 0 : 4; + view.setUint32(byteOffset + h, wh, isLE); + view.setUint32(byteOffset + l, wl, isLE); + } + // Choice: a ? b : c + const Chi = (a, b, c) => (a & b) ^ (~a & c); + // Majority function, true if any two inpust is true + const Maj = (a, b, c) => (a & b) ^ (a & c) ^ (b & c); + /** + * Merkle-Damgard hash construction base class. + * Could be used to create MD5, RIPEMD, SHA1, SHA2. + */ + class HashMD extends Hash { + constructor(blockLen, outputLen, padOffset, isLE) { + super(); + this.blockLen = blockLen; + this.outputLen = outputLen; + this.padOffset = padOffset; + this.isLE = isLE; + this.finished = false; + this.length = 0; + this.pos = 0; + this.destroyed = false; + this.buffer = new Uint8Array(blockLen); + this.view = createView$1(this.buffer); + } + update(data) { + exists$1(this); + const { view, buffer, blockLen } = this; + data = toBytes$2(data); + const len = data.length; + for (let pos = 0; pos < len;) { + const take = Math.min(blockLen - this.pos, len - pos); + // Fast path: we have at least one block in input, cast it to view and process + if (take === blockLen) { + const dataView = createView$1(data); + for (; blockLen <= len - pos; pos += blockLen) + this.process(dataView, pos); + continue; + } + buffer.set(data.subarray(pos, pos + take), this.pos); + this.pos += take; + pos += take; + if (this.pos === blockLen) { + this.process(view, 0); + this.pos = 0; + } + } + this.length += data.length; + this.roundClean(); + return this; + } + digestInto(out) { + exists$1(this); + output$1(out, this); + this.finished = true; + // Padding + // We can avoid allocation of buffer for padding completely if it + // was previously not allocated here. But it won't change performance. + const { buffer, view, blockLen, isLE } = this; + let { pos } = this; + // append the bit '1' to the message + buffer[pos++] = 0b10000000; + this.buffer.subarray(pos).fill(0); + // we have less than padOffset left in buffer, so we cannot put length in + // current block, need process it and pad again + if (this.padOffset > blockLen - pos) { + this.process(view, 0); + pos = 0; + } + // Pad until full block byte with zeros + for (let i = pos; i < blockLen; i++) + buffer[i] = 0; + // Note: sha512 requires length to be 128bit integer, but length in JS will overflow before that + // You need to write around 2 exabytes (u64_max / 8 / (1024**6)) for this to happen. + // So we just write lowest 64 bits of that value. + setBigUint64$1(view, blockLen - 8, BigInt(this.length * 8), isLE); + this.process(view, 0); + const oview = createView$1(out); + const len = this.outputLen; + // NOTE: we do division by 4 later, which should be fused in single op with modulo by JIT + if (len % 4) + throw new Error('_sha2: outputLen should be aligned to 32bit'); + const outLen = len / 4; + const state = this.get(); + if (outLen > state.length) + throw new Error('_sha2: outputLen bigger than state'); + for (let i = 0; i < outLen; i++) + oview.setUint32(4 * i, state[i], isLE); + } + digest() { + const { buffer, outputLen } = this; + this.digestInto(buffer); + const res = buffer.slice(0, outputLen); + this.destroy(); + return res; + } + _cloneInto(to) { + to || (to = new this.constructor()); + to.set(...this.get()); + const { blockLen, buffer, length, finished, destroyed, pos } = this; + to.length = length; + to.pos = pos; + to.finished = finished; + to.destroyed = destroyed; + if (length % blockLen) + to.buffer.set(buffer); + return to; + } + } + + const U32_MASK64 = /* @__PURE__ */ BigInt(2 ** 32 - 1); + const _32n = /* @__PURE__ */ BigInt(32); + // We are not using BigUint64Array, because they are extremely slow as per 2022 + function fromBig(n, le = false) { + if (le) + return { h: Number(n & U32_MASK64), l: Number((n >> _32n) & U32_MASK64) }; + return { h: Number((n >> _32n) & U32_MASK64) | 0, l: Number(n & U32_MASK64) | 0 }; + } + function split(lst, le = false) { + let Ah = new Uint32Array(lst.length); + let Al = new Uint32Array(lst.length); + for (let i = 0; i < lst.length; i++) { + const { h, l } = fromBig(lst[i], le); + [Ah[i], Al[i]] = [h, l]; + } + return [Ah, Al]; + } + const toBig = (h, l) => (BigInt(h >>> 0) << _32n) | BigInt(l >>> 0); + // for Shift in [0, 32) + const shrSH = (h, _l, s) => h >>> s; + const shrSL = (h, l, s) => (h << (32 - s)) | (l >>> s); + // Right rotate for Shift in [1, 32) + const rotrSH = (h, l, s) => (h >>> s) | (l << (32 - s)); + const rotrSL = (h, l, s) => (h << (32 - s)) | (l >>> s); + // Right rotate for Shift in (32, 64), NOTE: 32 is special case. + const rotrBH = (h, l, s) => (h << (64 - s)) | (l >>> (s - 32)); + const rotrBL = (h, l, s) => (h >>> (s - 32)) | (l << (64 - s)); + // Right rotate for shift===32 (just swaps l&h) + const rotr32H = (_h, l) => l; + const rotr32L = (h, _l) => h; + // Left rotate for Shift in [1, 32) + const rotlSH = (h, l, s) => (h << s) | (l >>> (32 - s)); + const rotlSL = (h, l, s) => (l << s) | (h >>> (32 - s)); + // Left rotate for Shift in (32, 64), NOTE: 32 is special case. + const rotlBH = (h, l, s) => (l << (s - 32)) | (h >>> (64 - s)); + const rotlBL = (h, l, s) => (h << (s - 32)) | (l >>> (64 - s)); + // JS uses 32-bit signed integers for bitwise operations which means we cannot + // simple take carry out of low bit sum by shift, we need to use division. + function add(Ah, Al, Bh, Bl) { + const l = (Al >>> 0) + (Bl >>> 0); + return { h: (Ah + Bh + ((l / 2 ** 32) | 0)) | 0, l: l | 0 }; + } + // Addition with more than 2 elements + const add3L = (Al, Bl, Cl) => (Al >>> 0) + (Bl >>> 0) + (Cl >>> 0); + const add3H = (low, Ah, Bh, Ch) => (Ah + Bh + Ch + ((low / 2 ** 32) | 0)) | 0; + const add4L = (Al, Bl, Cl, Dl) => (Al >>> 0) + (Bl >>> 0) + (Cl >>> 0) + (Dl >>> 0); + const add4H = (low, Ah, Bh, Ch, Dh) => (Ah + Bh + Ch + Dh + ((low / 2 ** 32) | 0)) | 0; + const add5L = (Al, Bl, Cl, Dl, El) => (Al >>> 0) + (Bl >>> 0) + (Cl >>> 0) + (Dl >>> 0) + (El >>> 0); + const add5H = (low, Ah, Bh, Ch, Dh, Eh) => (Ah + Bh + Ch + Dh + Eh + ((low / 2 ** 32) | 0)) | 0; + // prettier-ignore + const u64 = { + fromBig, split, toBig, + shrSH, shrSL, + rotrSH, rotrSL, rotrBH, rotrBL, + rotr32H, rotr32L, + rotlSH, rotlSL, rotlBH, rotlBL, + add, add3L, add3H, add4L, add4H, add5H, add5L, + }; + + // Round contants (first 32 bits of the fractional parts of the cube roots of the first 80 primes 2..409): + // prettier-ignore + const [SHA512_Kh, SHA512_Kl] = /* @__PURE__ */ (() => u64.split([ + '0x428a2f98d728ae22', '0x7137449123ef65cd', '0xb5c0fbcfec4d3b2f', '0xe9b5dba58189dbbc', + '0x3956c25bf348b538', '0x59f111f1b605d019', '0x923f82a4af194f9b', '0xab1c5ed5da6d8118', + '0xd807aa98a3030242', '0x12835b0145706fbe', '0x243185be4ee4b28c', '0x550c7dc3d5ffb4e2', + '0x72be5d74f27b896f', '0x80deb1fe3b1696b1', '0x9bdc06a725c71235', '0xc19bf174cf692694', + '0xe49b69c19ef14ad2', '0xefbe4786384f25e3', '0x0fc19dc68b8cd5b5', '0x240ca1cc77ac9c65', + '0x2de92c6f592b0275', '0x4a7484aa6ea6e483', '0x5cb0a9dcbd41fbd4', '0x76f988da831153b5', + '0x983e5152ee66dfab', '0xa831c66d2db43210', '0xb00327c898fb213f', '0xbf597fc7beef0ee4', + '0xc6e00bf33da88fc2', '0xd5a79147930aa725', '0x06ca6351e003826f', '0x142929670a0e6e70', + '0x27b70a8546d22ffc', '0x2e1b21385c26c926', '0x4d2c6dfc5ac42aed', '0x53380d139d95b3df', + '0x650a73548baf63de', '0x766a0abb3c77b2a8', '0x81c2c92e47edaee6', '0x92722c851482353b', + '0xa2bfe8a14cf10364', '0xa81a664bbc423001', '0xc24b8b70d0f89791', '0xc76c51a30654be30', + '0xd192e819d6ef5218', '0xd69906245565a910', '0xf40e35855771202a', '0x106aa07032bbd1b8', + '0x19a4c116b8d2d0c8', '0x1e376c085141ab53', '0x2748774cdf8eeb99', '0x34b0bcb5e19b48a8', + '0x391c0cb3c5c95a63', '0x4ed8aa4ae3418acb', '0x5b9cca4f7763e373', '0x682e6ff3d6b2b8a3', + '0x748f82ee5defb2fc', '0x78a5636f43172f60', '0x84c87814a1f0ab72', '0x8cc702081a6439ec', + '0x90befffa23631e28', '0xa4506cebde82bde9', '0xbef9a3f7b2c67915', '0xc67178f2e372532b', + '0xca273eceea26619c', '0xd186b8c721c0c207', '0xeada7dd6cde0eb1e', '0xf57d4f7fee6ed178', + '0x06f067aa72176fba', '0x0a637dc5a2c898a6', '0x113f9804bef90dae', '0x1b710b35131c471b', + '0x28db77f523047d84', '0x32caab7b40c72493', '0x3c9ebe0a15c9bebc', '0x431d67c49c100d4c', + '0x4cc5d4becb3e42b6', '0x597f299cfc657e2a', '0x5fcb6fab3ad6faec', '0x6c44198c4a475817' + ].map(n => BigInt(n))))(); + // Temporary buffer, not used to store anything between runs + const SHA512_W_H = /* @__PURE__ */ new Uint32Array(80); + const SHA512_W_L = /* @__PURE__ */ new Uint32Array(80); + class SHA512 extends HashMD { + constructor() { + super(128, 64, 16, false); + // We cannot use array here since array allows indexing by variable which means optimizer/compiler cannot use registers. + // Also looks cleaner and easier to verify with spec. + // Initial state (first 32 bits of the fractional parts of the square roots of the first 8 primes 2..19): + // h -- high 32 bits, l -- low 32 bits + this.Ah = 0x6a09e667 | 0; + this.Al = 0xf3bcc908 | 0; + this.Bh = 0xbb67ae85 | 0; + this.Bl = 0x84caa73b | 0; + this.Ch = 0x3c6ef372 | 0; + this.Cl = 0xfe94f82b | 0; + this.Dh = 0xa54ff53a | 0; + this.Dl = 0x5f1d36f1 | 0; + this.Eh = 0x510e527f | 0; + this.El = 0xade682d1 | 0; + this.Fh = 0x9b05688c | 0; + this.Fl = 0x2b3e6c1f | 0; + this.Gh = 0x1f83d9ab | 0; + this.Gl = 0xfb41bd6b | 0; + this.Hh = 0x5be0cd19 | 0; + this.Hl = 0x137e2179 | 0; + } + // prettier-ignore + get() { + const { Ah, Al, Bh, Bl, Ch, Cl, Dh, Dl, Eh, El, Fh, Fl, Gh, Gl, Hh, Hl } = this; + return [Ah, Al, Bh, Bl, Ch, Cl, Dh, Dl, Eh, El, Fh, Fl, Gh, Gl, Hh, Hl]; + } + // prettier-ignore + set(Ah, Al, Bh, Bl, Ch, Cl, Dh, Dl, Eh, El, Fh, Fl, Gh, Gl, Hh, Hl) { + this.Ah = Ah | 0; + this.Al = Al | 0; + this.Bh = Bh | 0; + this.Bl = Bl | 0; + this.Ch = Ch | 0; + this.Cl = Cl | 0; + this.Dh = Dh | 0; + this.Dl = Dl | 0; + this.Eh = Eh | 0; + this.El = El | 0; + this.Fh = Fh | 0; + this.Fl = Fl | 0; + this.Gh = Gh | 0; + this.Gl = Gl | 0; + this.Hh = Hh | 0; + this.Hl = Hl | 0; + } + process(view, offset) { + // Extend the first 16 words into the remaining 64 words w[16..79] of the message schedule array + for (let i = 0; i < 16; i++, offset += 4) { + SHA512_W_H[i] = view.getUint32(offset); + SHA512_W_L[i] = view.getUint32((offset += 4)); + } + for (let i = 16; i < 80; i++) { + // s0 := (w[i-15] rightrotate 1) xor (w[i-15] rightrotate 8) xor (w[i-15] rightshift 7) + const W15h = SHA512_W_H[i - 15] | 0; + const W15l = SHA512_W_L[i - 15] | 0; + const s0h = u64.rotrSH(W15h, W15l, 1) ^ u64.rotrSH(W15h, W15l, 8) ^ u64.shrSH(W15h, W15l, 7); + const s0l = u64.rotrSL(W15h, W15l, 1) ^ u64.rotrSL(W15h, W15l, 8) ^ u64.shrSL(W15h, W15l, 7); + // s1 := (w[i-2] rightrotate 19) xor (w[i-2] rightrotate 61) xor (w[i-2] rightshift 6) + const W2h = SHA512_W_H[i - 2] | 0; + const W2l = SHA512_W_L[i - 2] | 0; + const s1h = u64.rotrSH(W2h, W2l, 19) ^ u64.rotrBH(W2h, W2l, 61) ^ u64.shrSH(W2h, W2l, 6); + const s1l = u64.rotrSL(W2h, W2l, 19) ^ u64.rotrBL(W2h, W2l, 61) ^ u64.shrSL(W2h, W2l, 6); + // SHA256_W[i] = s0 + s1 + SHA256_W[i - 7] + SHA256_W[i - 16]; + const SUMl = u64.add4L(s0l, s1l, SHA512_W_L[i - 7], SHA512_W_L[i - 16]); + const SUMh = u64.add4H(SUMl, s0h, s1h, SHA512_W_H[i - 7], SHA512_W_H[i - 16]); + SHA512_W_H[i] = SUMh | 0; + SHA512_W_L[i] = SUMl | 0; + } + let { Ah, Al, Bh, Bl, Ch, Cl, Dh, Dl, Eh, El, Fh, Fl, Gh, Gl, Hh, Hl } = this; + // Compression function main loop, 80 rounds + for (let i = 0; i < 80; i++) { + // S1 := (e rightrotate 14) xor (e rightrotate 18) xor (e rightrotate 41) + const sigma1h = u64.rotrSH(Eh, El, 14) ^ u64.rotrSH(Eh, El, 18) ^ u64.rotrBH(Eh, El, 41); + const sigma1l = u64.rotrSL(Eh, El, 14) ^ u64.rotrSL(Eh, El, 18) ^ u64.rotrBL(Eh, El, 41); + //const T1 = (H + sigma1 + Chi(E, F, G) + SHA256_K[i] + SHA256_W[i]) | 0; + const CHIh = (Eh & Fh) ^ (~Eh & Gh); + const CHIl = (El & Fl) ^ (~El & Gl); + // T1 = H + sigma1 + Chi(E, F, G) + SHA512_K[i] + SHA512_W[i] + // prettier-ignore + const T1ll = u64.add5L(Hl, sigma1l, CHIl, SHA512_Kl[i], SHA512_W_L[i]); + const T1h = u64.add5H(T1ll, Hh, sigma1h, CHIh, SHA512_Kh[i], SHA512_W_H[i]); + const T1l = T1ll | 0; + // S0 := (a rightrotate 28) xor (a rightrotate 34) xor (a rightrotate 39) + const sigma0h = u64.rotrSH(Ah, Al, 28) ^ u64.rotrBH(Ah, Al, 34) ^ u64.rotrBH(Ah, Al, 39); + const sigma0l = u64.rotrSL(Ah, Al, 28) ^ u64.rotrBL(Ah, Al, 34) ^ u64.rotrBL(Ah, Al, 39); + const MAJh = (Ah & Bh) ^ (Ah & Ch) ^ (Bh & Ch); + const MAJl = (Al & Bl) ^ (Al & Cl) ^ (Bl & Cl); + Hh = Gh | 0; + Hl = Gl | 0; + Gh = Fh | 0; + Gl = Fl | 0; + Fh = Eh | 0; + Fl = El | 0; + ({ h: Eh, l: El } = u64.add(Dh | 0, Dl | 0, T1h | 0, T1l | 0)); + Dh = Ch | 0; + Dl = Cl | 0; + Ch = Bh | 0; + Cl = Bl | 0; + Bh = Ah | 0; + Bl = Al | 0; + const All = u64.add3L(T1l, sigma0l, MAJl); + Ah = u64.add3H(All, T1h, sigma0h, MAJh); + Al = All | 0; + } + // Add the compressed chunk to the current hash value + ({ h: Ah, l: Al } = u64.add(this.Ah | 0, this.Al | 0, Ah | 0, Al | 0)); + ({ h: Bh, l: Bl } = u64.add(this.Bh | 0, this.Bl | 0, Bh | 0, Bl | 0)); + ({ h: Ch, l: Cl } = u64.add(this.Ch | 0, this.Cl | 0, Ch | 0, Cl | 0)); + ({ h: Dh, l: Dl } = u64.add(this.Dh | 0, this.Dl | 0, Dh | 0, Dl | 0)); + ({ h: Eh, l: El } = u64.add(this.Eh | 0, this.El | 0, Eh | 0, El | 0)); + ({ h: Fh, l: Fl } = u64.add(this.Fh | 0, this.Fl | 0, Fh | 0, Fl | 0)); + ({ h: Gh, l: Gl } = u64.add(this.Gh | 0, this.Gl | 0, Gh | 0, Gl | 0)); + ({ h: Hh, l: Hl } = u64.add(this.Hh | 0, this.Hl | 0, Hh | 0, Hl | 0)); + this.set(Ah, Al, Bh, Bl, Ch, Cl, Dh, Dl, Eh, El, Fh, Fl, Gh, Gl, Hh, Hl); + } + roundClean() { + SHA512_W_H.fill(0); + SHA512_W_L.fill(0); + } + destroy() { + this.buffer.fill(0); + this.set(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0); + } + } + const sha512 = /* @__PURE__ */ wrapConstructor(() => new SHA512()); + + /*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ + // 100 lines of code in the file are duplicated from noble-hashes (utils). + // This is OK: `abstract` directory does not use noble-hashes. + // User may opt-in into using different hashing library. This way, noble-hashes + // won't be included into their bundle. + const _0n$5 = BigInt(0); + const _1n$7 = BigInt(1); + const _2n$4 = BigInt(2); + function isBytes$2(a) { + return (a instanceof Uint8Array || + (a != null && typeof a === 'object' && a.constructor.name === 'Uint8Array')); + } + function abytes(item) { + if (!isBytes$2(item)) + throw new Error('Uint8Array expected'); + } + // Array where index 0xf0 (240) is mapped to string 'f0' + const hexes = /* @__PURE__ */ Array.from({ length: 256 }, (_, i) => i.toString(16).padStart(2, '0')); + /** + * @example bytesToHex(Uint8Array.from([0xca, 0xfe, 0x01, 0x23])) // 'cafe0123' + */ + function bytesToHex(bytes) { + abytes(bytes); + // pre-caching improves the speed 6x + let hex = ''; + for (let i = 0; i < bytes.length; i++) { + hex += hexes[bytes[i]]; + } + return hex; + } + function numberToHexUnpadded(num) { + const hex = num.toString(16); + return hex.length & 1 ? `0${hex}` : hex; + } + function hexToNumber(hex) { + if (typeof hex !== 'string') + throw new Error('hex string expected, got ' + typeof hex); + // Big Endian + return BigInt(hex === '' ? '0' : `0x${hex}`); + } + // We use optimized technique to convert hex string to byte array + const asciis = { _0: 48, _9: 57, _A: 65, _F: 70, _a: 97, _f: 102 }; + function asciiToBase16(char) { + if (char >= asciis._0 && char <= asciis._9) + return char - asciis._0; + if (char >= asciis._A && char <= asciis._F) + return char - (asciis._A - 10); + if (char >= asciis._a && char <= asciis._f) + return char - (asciis._a - 10); + return; + } + /** + * @example hexToBytes('cafe0123') // Uint8Array.from([0xca, 0xfe, 0x01, 0x23]) + */ + function hexToBytes(hex) { + if (typeof hex !== 'string') + throw new Error('hex string expected, got ' + typeof hex); + const hl = hex.length; + const al = hl / 2; + if (hl % 2) + throw new Error('padded hex string expected, got unpadded hex of length ' + hl); + const array = new Uint8Array(al); + for (let ai = 0, hi = 0; ai < al; ai++, hi += 2) { + const n1 = asciiToBase16(hex.charCodeAt(hi)); + const n2 = asciiToBase16(hex.charCodeAt(hi + 1)); + if (n1 === undefined || n2 === undefined) { + const char = hex[hi] + hex[hi + 1]; + throw new Error('hex string expected, got non-hex character "' + char + '" at index ' + hi); + } + array[ai] = n1 * 16 + n2; + } + return array; + } + // BE: Big Endian, LE: Little Endian + function bytesToNumberBE(bytes) { + return hexToNumber(bytesToHex(bytes)); + } + function bytesToNumberLE(bytes) { + abytes(bytes); + return hexToNumber(bytesToHex(Uint8Array.from(bytes).reverse())); + } + function numberToBytesBE(n, len) { + return hexToBytes(n.toString(16).padStart(len * 2, '0')); + } + function numberToBytesLE(n, len) { + return numberToBytesBE(n, len).reverse(); + } + // Unpadded, rarely used + function numberToVarBytesBE(n) { + return hexToBytes(numberToHexUnpadded(n)); + } + /** + * Takes hex string or Uint8Array, converts to Uint8Array. + * Validates output length. + * Will throw error for other types. + * @param title descriptive title for an error e.g. 'private key' + * @param hex hex string or Uint8Array + * @param expectedLength optional, will compare to result array's length + * @returns + */ + function ensureBytes$1(title, hex, expectedLength) { + let res; + if (typeof hex === 'string') { + try { + res = hexToBytes(hex); + } + catch (e) { + throw new Error(`${title} must be valid hex string, got "${hex}". Cause: ${e}`); + } + } + else if (isBytes$2(hex)) { + // Uint8Array.from() instead of hash.slice() because node.js Buffer + // is instance of Uint8Array, and its slice() creates **mutable** copy + res = Uint8Array.from(hex); + } + else { + throw new Error(`${title} must be hex string or Uint8Array`); + } + const len = res.length; + if (typeof expectedLength === 'number' && len !== expectedLength) + throw new Error(`${title} expected ${expectedLength} bytes, got ${len}`); + return res; + } + /** + * Copies several Uint8Arrays into one. + */ + function concatBytes(...arrays) { + let sum = 0; + for (let i = 0; i < arrays.length; i++) { + const a = arrays[i]; + abytes(a); + sum += a.length; + } + const res = new Uint8Array(sum); + for (let i = 0, pad = 0; i < arrays.length; i++) { + const a = arrays[i]; + res.set(a, pad); + pad += a.length; + } + return res; + } + // Compares 2 u8a-s in kinda constant time + function equalBytes$1(a, b) { + if (a.length !== b.length) + return false; + let diff = 0; + for (let i = 0; i < a.length; i++) + diff |= a[i] ^ b[i]; + return diff === 0; + } + /** + * @example utf8ToBytes('abc') // new Uint8Array([97, 98, 99]) + */ + function utf8ToBytes$1(str) { + if (typeof str !== 'string') + throw new Error(`utf8ToBytes expected string, got ${typeof str}`); + return new Uint8Array(new TextEncoder().encode(str)); // https://bugzil.la/1681809 + } + // Bit operations + /** + * Calculates amount of bits in a bigint. + * Same as `n.toString(2).length` + */ + function bitLen(n) { + let len; + for (len = 0; n > _0n$5; n >>= _1n$7, len += 1) + ; + return len; + } + /** + * Gets single bit at position. + * NOTE: first bit position is 0 (same as arrays) + * Same as `!!+Array.from(n.toString(2)).reverse()[pos]` + */ + function bitGet(n, pos) { + return (n >> BigInt(pos)) & _1n$7; + } + /** + * Sets single bit at position. + */ + function bitSet(n, pos, value) { + return n | ((value ? _1n$7 : _0n$5) << BigInt(pos)); + } + /** + * Calculate mask for N bits. Not using ** operator with bigints because of old engines. + * Same as BigInt(`0b${Array(i).fill('1').join('')}`) + */ + const bitMask = (n) => (_2n$4 << BigInt(n - 1)) - _1n$7; + // DRBG + const u8n = (data) => new Uint8Array(data); // creates Uint8Array + const u8fr = (arr) => Uint8Array.from(arr); // another shortcut + /** + * Minimal HMAC-DRBG from NIST 800-90 for RFC6979 sigs. + * @returns function that will call DRBG until 2nd arg returns something meaningful + * @example + * const drbg = createHmacDRBG(32, 32, hmac); + * drbg(seed, bytesToKey); // bytesToKey must return Key or undefined + */ + function createHmacDrbg(hashLen, qByteLen, hmacFn) { + if (typeof hashLen !== 'number' || hashLen < 2) + throw new Error('hashLen must be a number'); + if (typeof qByteLen !== 'number' || qByteLen < 2) + throw new Error('qByteLen must be a number'); + if (typeof hmacFn !== 'function') + throw new Error('hmacFn must be a function'); + // Step B, Step C: set hashLen to 8*ceil(hlen/8) + let v = u8n(hashLen); // Minimal non-full-spec HMAC-DRBG from NIST 800-90 for RFC6979 sigs. + let k = u8n(hashLen); // Steps B and C of RFC6979 3.2: set hashLen, in our case always same + let i = 0; // Iterations counter, will throw when over 1000 + const reset = () => { + v.fill(1); + k.fill(0); + i = 0; + }; + const h = (...b) => hmacFn(k, v, ...b); // hmac(k)(v, ...values) + const reseed = (seed = u8n()) => { + // HMAC-DRBG reseed() function. Steps D-G + k = h(u8fr([0x00]), seed); // k = hmac(k || v || 0x00 || seed) + v = h(); // v = hmac(k || v) + if (seed.length === 0) + return; + k = h(u8fr([0x01]), seed); // k = hmac(k || v || 0x01 || seed) + v = h(); // v = hmac(k || v) + }; + const gen = () => { + // HMAC-DRBG generate() function + if (i++ >= 1000) + throw new Error('drbg: tried 1000 values'); + let len = 0; + const out = []; + while (len < qByteLen) { + v = h(); + const sl = v.slice(); + out.push(sl); + len += v.length; + } + return concatBytes(...out); + }; + const genUntil = (seed, pred) => { + reset(); + reseed(seed); // Steps D-G + let res = undefined; // Step H: grind until k is in [1..n-1] + while (!(res = pred(gen()))) + reseed(); + reset(); + return res; + }; + return genUntil; + } + // Validating curves and fields + const validatorFns = { + bigint: (val) => typeof val === 'bigint', + function: (val) => typeof val === 'function', + boolean: (val) => typeof val === 'boolean', + string: (val) => typeof val === 'string', + stringOrUint8Array: (val) => typeof val === 'string' || isBytes$2(val), + isSafeInteger: (val) => Number.isSafeInteger(val), + array: (val) => Array.isArray(val), + field: (val, object) => object.Fp.isValid(val), + hash: (val) => typeof val === 'function' && Number.isSafeInteger(val.outputLen), + }; + // type Record = { [P in K]: T; } + function validateObject(object, validators, optValidators = {}) { + const checkField = (fieldName, type, isOptional) => { + const checkVal = validatorFns[type]; + if (typeof checkVal !== 'function') + throw new Error(`Invalid validator "${type}", expected function`); + const val = object[fieldName]; + if (isOptional && val === undefined) + return; + if (!checkVal(val, object)) { + throw new Error(`Invalid param ${String(fieldName)}=${val} (${typeof val}), expected ${type}`); + } + }; + for (const [fieldName, type] of Object.entries(validators)) + checkField(fieldName, type, false); + for (const [fieldName, type] of Object.entries(optValidators)) + checkField(fieldName, type, true); + return object; + } + // validate type tests + // const o: { a: number; b: number; c: number } = { a: 1, b: 5, c: 6 }; + // const z0 = validateObject(o, { a: 'isSafeInteger' }, { c: 'bigint' }); // Ok! + // // Should fail type-check + // const z1 = validateObject(o, { a: 'tmp' }, { c: 'zz' }); + // const z2 = validateObject(o, { a: 'isSafeInteger' }, { c: 'zz' }); + // const z3 = validateObject(o, { test: 'boolean', z: 'bug' }); + // const z4 = validateObject(o, { a: 'boolean', z: 'bug' }); + + var ut = /*#__PURE__*/Object.freeze({ + __proto__: null, + abytes: abytes, + bitGet: bitGet, + bitLen: bitLen, + bitMask: bitMask, + bitSet: bitSet, + bytesToHex: bytesToHex, + bytesToNumberBE: bytesToNumberBE, + bytesToNumberLE: bytesToNumberLE, + concatBytes: concatBytes, + createHmacDrbg: createHmacDrbg, + ensureBytes: ensureBytes$1, + equalBytes: equalBytes$1, + hexToBytes: hexToBytes, + hexToNumber: hexToNumber, + isBytes: isBytes$2, + numberToBytesBE: numberToBytesBE, + numberToBytesLE: numberToBytesLE, + numberToHexUnpadded: numberToHexUnpadded, + numberToVarBytesBE: numberToVarBytesBE, + utf8ToBytes: utf8ToBytes$1, + validateObject: validateObject + }); + + /*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ + // Utilities for modular arithmetics and finite fields + // prettier-ignore + const _0n$4 = BigInt(0), _1n$6 = BigInt(1), _2n$3 = BigInt(2), _3n$1 = BigInt(3); + // prettier-ignore + const _4n = BigInt(4), _5n$1 = BigInt(5), _8n$1 = BigInt(8); + // prettier-ignore + BigInt(9); BigInt(16); + // Calculates a modulo b + function mod(a, b) { + const result = a % b; + return result >= _0n$4 ? result : b + result; + } + /** + * Efficiently raise num to power and do modular division. + * Unsafe in some contexts: uses ladder, so can expose bigint bits. + * @example + * pow(2n, 6n, 11n) // 64n % 11n == 9n + */ + // TODO: use field version && remove + function pow(num, power, modulo) { + if (modulo <= _0n$4 || power < _0n$4) + throw new Error('Expected power/modulo > 0'); + if (modulo === _1n$6) + return _0n$4; + let res = _1n$6; + while (power > _0n$4) { + if (power & _1n$6) + res = (res * num) % modulo; + num = (num * num) % modulo; + power >>= _1n$6; + } + return res; + } + // Does x ^ (2 ^ power) mod p. pow2(30, 4) == 30 ^ (2 ^ 4) + function pow2(x, power, modulo) { + let res = x; + while (power-- > _0n$4) { + res *= res; + res %= modulo; + } + return res; + } + // Inverses number over modulo + function invert(number, modulo) { + if (number === _0n$4 || modulo <= _0n$4) { + throw new Error(`invert: expected positive integers, got n=${number} mod=${modulo}`); + } + // Euclidean GCD https://brilliant.org/wiki/extended-euclidean-algorithm/ + // Fermat's little theorem "CT-like" version inv(n) = n^(m-2) mod m is 30x slower. + let a = mod(number, modulo); + let b = modulo; + // prettier-ignore + let x = _0n$4, u = _1n$6; + while (a !== _0n$4) { + // JIT applies optimization if those two lines follow each other + const q = b / a; + const r = b % a; + const m = x - u * q; + // prettier-ignore + b = a, a = r, x = u, u = m; + } + const gcd = b; + if (gcd !== _1n$6) + throw new Error('invert: does not exist'); + return mod(x, modulo); + } + /** + * Tonelli-Shanks square root search algorithm. + * 1. https://eprint.iacr.org/2012/685.pdf (page 12) + * 2. Square Roots from 1; 24, 51, 10 to Dan Shanks + * Will start an infinite loop if field order P is not prime. + * @param P field order + * @returns function that takes field Fp (created from P) and number n + */ + function tonelliShanks(P) { + // Legendre constant: used to calculate Legendre symbol (a | p), + // which denotes the value of a^((p-1)/2) (mod p). + // (a | p) โ‰ก 1 if a is a square (mod p) + // (a | p) โ‰ก -1 if a is not a square (mod p) + // (a | p) โ‰ก 0 if a โ‰ก 0 (mod p) + const legendreC = (P - _1n$6) / _2n$3; + let Q, S, Z; + // Step 1: By factoring out powers of 2 from p - 1, + // find q and s such that p - 1 = q*(2^s) with q odd + for (Q = P - _1n$6, S = 0; Q % _2n$3 === _0n$4; Q /= _2n$3, S++) + ; + // Step 2: Select a non-square z such that (z | p) โ‰ก -1 and set c โ‰ก zq + for (Z = _2n$3; Z < P && pow(Z, legendreC, P) !== P - _1n$6; Z++) + ; + // Fast-path + if (S === 1) { + const p1div4 = (P + _1n$6) / _4n; + return function tonelliFast(Fp, n) { + const root = Fp.pow(n, p1div4); + if (!Fp.eql(Fp.sqr(root), n)) + throw new Error('Cannot find square root'); + return root; + }; + } + // Slow-path + const Q1div2 = (Q + _1n$6) / _2n$3; + return function tonelliSlow(Fp, n) { + // Step 0: Check that n is indeed a square: (n | p) should not be โ‰ก -1 + if (Fp.pow(n, legendreC) === Fp.neg(Fp.ONE)) + throw new Error('Cannot find square root'); + let r = S; + // TODO: will fail at Fp2/etc + let g = Fp.pow(Fp.mul(Fp.ONE, Z), Q); // will update both x and b + let x = Fp.pow(n, Q1div2); // first guess at the square root + let b = Fp.pow(n, Q); // first guess at the fudge factor + while (!Fp.eql(b, Fp.ONE)) { + if (Fp.eql(b, Fp.ZERO)) + return Fp.ZERO; // https://en.wikipedia.org/wiki/Tonelli%E2%80%93Shanks_algorithm (4. If t = 0, return r = 0) + // Find m such b^(2^m)==1 + let m = 1; + for (let t2 = Fp.sqr(b); m < r; m++) { + if (Fp.eql(t2, Fp.ONE)) + break; + t2 = Fp.sqr(t2); // t2 *= t2 + } + // NOTE: r-m-1 can be bigger than 32, need to convert to bigint before shift, otherwise there will be overflow + const ge = Fp.pow(g, _1n$6 << BigInt(r - m - 1)); // ge = 2^(r-m-1) + g = Fp.sqr(ge); // g = ge * ge + x = Fp.mul(x, ge); // x *= ge + b = Fp.mul(b, g); // b *= g + r = m; + } + return x; + }; + } + function FpSqrt(P) { + // NOTE: different algorithms can give different roots, it is up to user to decide which one they want. + // For example there is FpSqrtOdd/FpSqrtEven to choice root based on oddness (used for hash-to-curve). + // P โ‰ก 3 (mod 4) + // โˆšn = n^((P+1)/4) + if (P % _4n === _3n$1) { + // Not all roots possible! + // const ORDER = + // 0x1a0111ea397fe69a4b1ba7b6434bacd764774b84f38512bf6730d2a0f6b0f6241eabfffeb153ffffb9feffffffffaaabn; + // const NUM = 72057594037927816n; + const p1div4 = (P + _1n$6) / _4n; + return function sqrt3mod4(Fp, n) { + const root = Fp.pow(n, p1div4); + // Throw if root**2 != n + if (!Fp.eql(Fp.sqr(root), n)) + throw new Error('Cannot find square root'); + return root; + }; + } + // Atkin algorithm for q โ‰ก 5 (mod 8), https://eprint.iacr.org/2012/685.pdf (page 10) + if (P % _8n$1 === _5n$1) { + const c1 = (P - _5n$1) / _8n$1; + return function sqrt5mod8(Fp, n) { + const n2 = Fp.mul(n, _2n$3); + const v = Fp.pow(n2, c1); + const nv = Fp.mul(n, v); + const i = Fp.mul(Fp.mul(nv, _2n$3), v); + const root = Fp.mul(nv, Fp.sub(i, Fp.ONE)); + if (!Fp.eql(Fp.sqr(root), n)) + throw new Error('Cannot find square root'); + return root; + }; + } + // Other cases: Tonelli-Shanks algorithm + return tonelliShanks(P); + } + // Little-endian check for first LE bit (last BE bit); + const isNegativeLE = (num, modulo) => (mod(num, modulo) & _1n$6) === _1n$6; + // prettier-ignore + const FIELD_FIELDS = [ + 'create', 'isValid', 'is0', 'neg', 'inv', 'sqrt', 'sqr', + 'eql', 'add', 'sub', 'mul', 'pow', 'div', + 'addN', 'subN', 'mulN', 'sqrN' + ]; + function validateField(field) { + const initial = { + ORDER: 'bigint', + MASK: 'bigint', + BYTES: 'isSafeInteger', + BITS: 'isSafeInteger', + }; + const opts = FIELD_FIELDS.reduce((map, val) => { + map[val] = 'function'; + return map; + }, initial); + return validateObject(field, opts); + } + // Generic field functions + /** + * Same as `pow` but for Fp: non-constant-time. + * Unsafe in some contexts: uses ladder, so can expose bigint bits. + */ + function FpPow(f, num, power) { + // Should have same speed as pow for bigints + // TODO: benchmark! + if (power < _0n$4) + throw new Error('Expected power > 0'); + if (power === _0n$4) + return f.ONE; + if (power === _1n$6) + return num; + let p = f.ONE; + let d = num; + while (power > _0n$4) { + if (power & _1n$6) + p = f.mul(p, d); + d = f.sqr(d); + power >>= _1n$6; + } + return p; + } + /** + * Efficiently invert an array of Field elements. + * `inv(0)` will return `undefined` here: make sure to throw an error. + */ + function FpInvertBatch(f, nums) { + const tmp = new Array(nums.length); + // Walk from first to last, multiply them by each other MOD p + const lastMultiplied = nums.reduce((acc, num, i) => { + if (f.is0(num)) + return acc; + tmp[i] = acc; + return f.mul(acc, num); + }, f.ONE); + // Invert last element + const inverted = f.inv(lastMultiplied); + // Walk from last to first, multiply them by inverted each other MOD p + nums.reduceRight((acc, num, i) => { + if (f.is0(num)) + return acc; + tmp[i] = f.mul(acc, tmp[i]); + return f.mul(acc, num); + }, inverted); + return tmp; + } + // CURVE.n lengths + function nLength(n, nBitLength) { + // Bit size, byte size of CURVE.n + const _nBitLength = nBitLength !== undefined ? nBitLength : n.toString(2).length; + const nByteLength = Math.ceil(_nBitLength / 8); + return { nBitLength: _nBitLength, nByteLength }; + } + /** + * Initializes a finite field over prime. **Non-primes are not supported.** + * Do not init in loop: slow. Very fragile: always run a benchmark on a change. + * Major performance optimizations: + * * a) denormalized operations like mulN instead of mul + * * b) same object shape: never add or remove keys + * * c) Object.freeze + * @param ORDER prime positive bigint + * @param bitLen how many bits the field consumes + * @param isLE (def: false) if encoding / decoding should be in little-endian + * @param redef optional faster redefinitions of sqrt and other methods + */ + function Field(ORDER, bitLen, isLE = false, redef = {}) { + if (ORDER <= _0n$4) + throw new Error(`Expected Field ORDER > 0, got ${ORDER}`); + const { nBitLength: BITS, nByteLength: BYTES } = nLength(ORDER, bitLen); + if (BYTES > 2048) + throw new Error('Field lengths over 2048 bytes are not supported'); + const sqrtP = FpSqrt(ORDER); + const f = Object.freeze({ + ORDER, + BITS, + BYTES, + MASK: bitMask(BITS), + ZERO: _0n$4, + ONE: _1n$6, + create: (num) => mod(num, ORDER), + isValid: (num) => { + if (typeof num !== 'bigint') + throw new Error(`Invalid field element: expected bigint, got ${typeof num}`); + return _0n$4 <= num && num < ORDER; // 0 is valid element, but it's not invertible + }, + is0: (num) => num === _0n$4, + isOdd: (num) => (num & _1n$6) === _1n$6, + neg: (num) => mod(-num, ORDER), + eql: (lhs, rhs) => lhs === rhs, + sqr: (num) => mod(num * num, ORDER), + add: (lhs, rhs) => mod(lhs + rhs, ORDER), + sub: (lhs, rhs) => mod(lhs - rhs, ORDER), + mul: (lhs, rhs) => mod(lhs * rhs, ORDER), + pow: (num, power) => FpPow(f, num, power), + div: (lhs, rhs) => mod(lhs * invert(rhs, ORDER), ORDER), + // Same as above, but doesn't normalize + sqrN: (num) => num * num, + addN: (lhs, rhs) => lhs + rhs, + subN: (lhs, rhs) => lhs - rhs, + mulN: (lhs, rhs) => lhs * rhs, + inv: (num) => invert(num, ORDER), + sqrt: redef.sqrt || ((n) => sqrtP(f, n)), + invertBatch: (lst) => FpInvertBatch(f, lst), + // TODO: do we really need constant cmov? + // We don't have const-time bigints anyway, so probably will be not very useful + cmov: (a, b, c) => (c ? b : a), + toBytes: (num) => (isLE ? numberToBytesLE(num, BYTES) : numberToBytesBE(num, BYTES)), + fromBytes: (bytes) => { + if (bytes.length !== BYTES) + throw new Error(`Fp.fromBytes: expected ${BYTES}, got ${bytes.length}`); + return isLE ? bytesToNumberLE(bytes) : bytesToNumberBE(bytes); + }, + }); + return Object.freeze(f); + } + function FpSqrtEven(Fp, elm) { + if (!Fp.isOdd) + throw new Error(`Field doesn't have isOdd`); + const root = Fp.sqrt(elm); + return Fp.isOdd(root) ? Fp.neg(root) : root; + } + /** + * Returns total number of bytes consumed by the field element. + * For example, 32 bytes for usual 256-bit weierstrass curve. + * @param fieldOrder number of field elements, usually CURVE.n + * @returns byte length of field + */ + function getFieldBytesLength(fieldOrder) { + if (typeof fieldOrder !== 'bigint') + throw new Error('field order must be bigint'); + const bitLength = fieldOrder.toString(2).length; + return Math.ceil(bitLength / 8); + } + /** + * Returns minimal amount of bytes that can be safely reduced + * by field order. + * Should be 2^-128 for 128-bit curve such as P256. + * @param fieldOrder number of field elements, usually CURVE.n + * @returns byte length of target hash + */ + function getMinHashLength(fieldOrder) { + const length = getFieldBytesLength(fieldOrder); + return length + Math.ceil(length / 2); + } + /** + * "Constant-time" private key generation utility. + * Can take (n + n/2) or more bytes of uniform input e.g. from CSPRNG or KDF + * and convert them into private scalar, with the modulo bias being negligible. + * Needs at least 48 bytes of input for 32-byte private key. + * https://research.kudelskisecurity.com/2020/07/28/the-definitive-guide-to-modulo-bias-and-how-to-avoid-it/ + * FIPS 186-5, A.2 https://csrc.nist.gov/publications/detail/fips/186/5/final + * RFC 9380, https://www.rfc-editor.org/rfc/rfc9380#section-5 + * @param hash hash output from SHA3 or a similar function + * @param groupOrder size of subgroup - (e.g. secp256k1.CURVE.n) + * @param isLE interpret hash bytes as LE num + * @returns valid private scalar + */ + function mapHashToField(key, fieldOrder, isLE = false) { + const len = key.length; + const fieldLen = getFieldBytesLength(fieldOrder); + const minLen = getMinHashLength(fieldOrder); + // No small numbers: need to understand bias story. No huge numbers: easier to detect JS timings. + if (len < 16 || len < minLen || len > 1024) + throw new Error(`expected ${minLen}-1024 bytes of input, got ${len}`); + const num = isLE ? bytesToNumberBE(key) : bytesToNumberLE(key); + // `mod(x, 11)` can sometimes produce 0. `mod(x, 10) + 1` is the same, but no 0 + const reduced = mod(num, fieldOrder - _1n$6) + _1n$6; + return isLE ? numberToBytesLE(reduced, fieldLen) : numberToBytesBE(reduced, fieldLen); + } + + /*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ + // Abelian group utilities + const _0n$3 = BigInt(0); + const _1n$5 = BigInt(1); + // Elliptic curve multiplication of Point by scalar. Fragile. + // Scalars should always be less than curve order: this should be checked inside of a curve itself. + // Creates precomputation tables for fast multiplication: + // - private scalar is split by fixed size windows of W bits + // - every window point is collected from window's table & added to accumulator + // - since windows are different, same point inside tables won't be accessed more than once per calc + // - each multiplication is 'Math.ceil(CURVE_ORDER / ๐‘Š) + 1' point additions (fixed for any scalar) + // - +1 window is neccessary for wNAF + // - wNAF reduces table size: 2x less memory + 2x faster generation, but 10% slower multiplication + // TODO: Research returning 2d JS array of windows, instead of a single window. This would allow + // windows to be in different memory locations + function wNAF(c, bits) { + const constTimeNegate = (condition, item) => { + const neg = item.negate(); + return condition ? neg : item; + }; + const opts = (W) => { + const windows = Math.ceil(bits / W) + 1; // +1, because + const windowSize = 2 ** (W - 1); // -1 because we skip zero + return { windows, windowSize }; + }; + return { + constTimeNegate, + // non-const time multiplication ladder + unsafeLadder(elm, n) { + let p = c.ZERO; + let d = elm; + while (n > _0n$3) { + if (n & _1n$5) + p = p.add(d); + d = d.double(); + n >>= _1n$5; + } + return p; + }, + /** + * Creates a wNAF precomputation window. Used for caching. + * Default window size is set by `utils.precompute()` and is equal to 8. + * Number of precomputed points depends on the curve size: + * 2^(๐‘Šโˆ’1) * (Math.ceil(๐‘› / ๐‘Š) + 1), where: + * - ๐‘Š is the window size + * - ๐‘› is the bitlength of the curve order. + * For a 256-bit curve and window size 8, the number of precomputed points is 128 * 33 = 4224. + * @returns precomputed point tables flattened to a single array + */ + precomputeWindow(elm, W) { + const { windows, windowSize } = opts(W); + const points = []; + let p = elm; + let base = p; + for (let window = 0; window < windows; window++) { + base = p; + points.push(base); + // =1, because we skip zero + for (let i = 1; i < windowSize; i++) { + base = base.add(p); + points.push(base); + } + p = base.double(); + } + return points; + }, + /** + * Implements ec multiplication using precomputed tables and w-ary non-adjacent form. + * @param W window size + * @param precomputes precomputed tables + * @param n scalar (we don't check here, but should be less than curve order) + * @returns real and fake (for const-time) points + */ + wNAF(W, precomputes, n) { + // TODO: maybe check that scalar is less than group order? wNAF behavious is undefined otherwise + // But need to carefully remove other checks before wNAF. ORDER == bits here + const { windows, windowSize } = opts(W); + let p = c.ZERO; + let f = c.BASE; + const mask = BigInt(2 ** W - 1); // Create mask with W ones: 0b1111 for W=4 etc. + const maxNumber = 2 ** W; + const shiftBy = BigInt(W); + for (let window = 0; window < windows; window++) { + const offset = window * windowSize; + // Extract W bits. + let wbits = Number(n & mask); + // Shift number by W bits. + n >>= shiftBy; + // If the bits are bigger than max size, we'll split those. + // +224 => 256 - 32 + if (wbits > windowSize) { + wbits -= maxNumber; + n += _1n$5; + } + // This code was first written with assumption that 'f' and 'p' will never be infinity point: + // since each addition is multiplied by 2 ** W, it cannot cancel each other. However, + // there is negate now: it is possible that negated element from low value + // would be the same as high element, which will create carry into next window. + // It's not obvious how this can fail, but still worth investigating later. + // Check if we're onto Zero point. + // Add random point inside current window to f. + const offset1 = offset; + const offset2 = offset + Math.abs(wbits) - 1; // -1 because we skip zero + const cond1 = window % 2 !== 0; + const cond2 = wbits < 0; + if (wbits === 0) { + // The most important part for const-time getPublicKey + f = f.add(constTimeNegate(cond1, precomputes[offset1])); + } + else { + p = p.add(constTimeNegate(cond2, precomputes[offset2])); + } + } + // JIT-compiler should not eliminate f here, since it will later be used in normalizeZ() + // Even if the variable is still unused, there are some checks which will + // throw an exception, so compiler needs to prove they won't happen, which is hard. + // At this point there is a way to F be infinity-point even if p is not, + // which makes it less const-time: around 1 bigint multiply. + return { p, f }; + }, + wNAFCached(P, precomputesMap, n, transform) { + // @ts-ignore + const W = P._WINDOW_SIZE || 1; + // Calculate precomputes on a first run, reuse them after + let comp = precomputesMap.get(P); + if (!comp) { + comp = this.precomputeWindow(P, W); + if (W !== 1) { + precomputesMap.set(P, transform(comp)); + } + } + return this.wNAF(W, comp, n); + }, + }; + } + function validateBasic(curve) { + validateField(curve.Fp); + validateObject(curve, { + n: 'bigint', + h: 'bigint', + Gx: 'field', + Gy: 'field', + }, { + nBitLength: 'isSafeInteger', + nByteLength: 'isSafeInteger', + }); + // Set defaults + return Object.freeze({ + ...nLength(curve.n, curve.nBitLength), + ...curve, + ...{ p: curve.Fp.ORDER }, + }); + } + + /*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ + // Twisted Edwards curve. The formula is: axยฒ + yยฒ = 1 + dxยฒyยฒ + // Be friendly to bad ECMAScript parsers by not using bigint literals + // prettier-ignore + const _0n$2 = BigInt(0), _1n$4 = BigInt(1), _2n$2 = BigInt(2), _8n = BigInt(8); + // verification rule is either zip215 or rfc8032 / nist186-5. Consult fromHex: + const VERIFY_DEFAULT = { zip215: true }; + function validateOpts$2(curve) { + const opts = validateBasic(curve); + validateObject(curve, { + hash: 'function', + a: 'bigint', + d: 'bigint', + randomBytes: 'function', + }, { + adjustScalarBytes: 'function', + domain: 'function', + uvRatio: 'function', + mapToCurve: 'function', + }); + // Set defaults + return Object.freeze({ ...opts }); + } + // It is not generic twisted curve for now, but ed25519/ed448 generic implementation + function twistedEdwards(curveDef) { + const CURVE = validateOpts$2(curveDef); + const { Fp, n: CURVE_ORDER, prehash: prehash, hash: cHash, randomBytes, nByteLength, h: cofactor, } = CURVE; + const MASK = _2n$2 << (BigInt(nByteLength * 8) - _1n$4); + const modP = Fp.create; // Function overrides + // sqrt(u/v) + const uvRatio = CURVE.uvRatio || + ((u, v) => { + try { + return { isValid: true, value: Fp.sqrt(u * Fp.inv(v)) }; + } + catch (e) { + return { isValid: false, value: _0n$2 }; + } + }); + const adjustScalarBytes = CURVE.adjustScalarBytes || ((bytes) => bytes); // NOOP + const domain = CURVE.domain || + ((data, ctx, phflag) => { + if (ctx.length || phflag) + throw new Error('Contexts/pre-hash are not supported'); + return data; + }); // NOOP + const inBig = (n) => typeof n === 'bigint' && _0n$2 < n; // n in [1..] + const inRange = (n, max) => inBig(n) && inBig(max) && n < max; // n in [1..max-1] + const in0MaskRange = (n) => n === _0n$2 || inRange(n, MASK); // n in [0..MASK-1] + function assertInRange(n, max) { + // n in [1..max-1] + if (inRange(n, max)) + return n; + throw new Error(`Expected valid scalar < ${max}, got ${typeof n} ${n}`); + } + function assertGE0(n) { + // n in [0..CURVE_ORDER-1] + return n === _0n$2 ? n : assertInRange(n, CURVE_ORDER); // GE = prime subgroup, not full group + } + const pointPrecomputes = new Map(); + function isPoint(other) { + if (!(other instanceof Point)) + throw new Error('ExtendedPoint expected'); + } + // Extended Point works in extended coordinates: (x, y, z, t) โˆ‹ (x=x/z, y=y/z, t=xy). + // https://en.wikipedia.org/wiki/Twisted_Edwards_curve#Extended_coordinates + class Point { + constructor(ex, ey, ez, et) { + this.ex = ex; + this.ey = ey; + this.ez = ez; + this.et = et; + if (!in0MaskRange(ex)) + throw new Error('x required'); + if (!in0MaskRange(ey)) + throw new Error('y required'); + if (!in0MaskRange(ez)) + throw new Error('z required'); + if (!in0MaskRange(et)) + throw new Error('t required'); + } + get x() { + return this.toAffine().x; + } + get y() { + return this.toAffine().y; + } + static fromAffine(p) { + if (p instanceof Point) + throw new Error('extended point not allowed'); + const { x, y } = p || {}; + if (!in0MaskRange(x) || !in0MaskRange(y)) + throw new Error('invalid affine point'); + return new Point(x, y, _1n$4, modP(x * y)); + } + static normalizeZ(points) { + const toInv = Fp.invertBatch(points.map((p) => p.ez)); + return points.map((p, i) => p.toAffine(toInv[i])).map(Point.fromAffine); + } + // "Private method", don't use it directly + _setWindowSize(windowSize) { + this._WINDOW_SIZE = windowSize; + pointPrecomputes.delete(this); + } + // Not required for fromHex(), which always creates valid points. + // Could be useful for fromAffine(). + assertValidity() { + const { a, d } = CURVE; + if (this.is0()) + throw new Error('bad point: ZERO'); // TODO: optimize, with vars below? + // Equation in affine coordinates: axยฒ + yยฒ = 1 + dxยฒyยฒ + // Equation in projective coordinates (X/Z, Y/Z, Z): (aXยฒ + Yยฒ)Zยฒ = Zโด + dXยฒYยฒ + const { ex: X, ey: Y, ez: Z, et: T } = this; + const X2 = modP(X * X); // Xยฒ + const Y2 = modP(Y * Y); // Yยฒ + const Z2 = modP(Z * Z); // Zยฒ + const Z4 = modP(Z2 * Z2); // Zโด + const aX2 = modP(X2 * a); // aXยฒ + const left = modP(Z2 * modP(aX2 + Y2)); // (aXยฒ + Yยฒ)Zยฒ + const right = modP(Z4 + modP(d * modP(X2 * Y2))); // Zโด + dXยฒYยฒ + if (left !== right) + throw new Error('bad point: equation left != right (1)'); + // In Extended coordinates we also have T, which is x*y=T/Z: check X*Y == Z*T + const XY = modP(X * Y); + const ZT = modP(Z * T); + if (XY !== ZT) + throw new Error('bad point: equation left != right (2)'); + } + // Compare one point to another. + equals(other) { + isPoint(other); + const { ex: X1, ey: Y1, ez: Z1 } = this; + const { ex: X2, ey: Y2, ez: Z2 } = other; + const X1Z2 = modP(X1 * Z2); + const X2Z1 = modP(X2 * Z1); + const Y1Z2 = modP(Y1 * Z2); + const Y2Z1 = modP(Y2 * Z1); + return X1Z2 === X2Z1 && Y1Z2 === Y2Z1; + } + is0() { + return this.equals(Point.ZERO); + } + negate() { + // Flips point sign to a negative one (-x, y in affine coords) + return new Point(modP(-this.ex), this.ey, this.ez, modP(-this.et)); + } + // Fast algo for doubling Extended Point. + // https://hyperelliptic.org/EFD/g1p/auto-twisted-extended.html#doubling-dbl-2008-hwcd + // Cost: 4M + 4S + 1*a + 6add + 1*2. + double() { + const { a } = CURVE; + const { ex: X1, ey: Y1, ez: Z1 } = this; + const A = modP(X1 * X1); // A = X12 + const B = modP(Y1 * Y1); // B = Y12 + const C = modP(_2n$2 * modP(Z1 * Z1)); // C = 2*Z12 + const D = modP(a * A); // D = a*A + const x1y1 = X1 + Y1; + const E = modP(modP(x1y1 * x1y1) - A - B); // E = (X1+Y1)2-A-B + const G = D + B; // G = D+B + const F = G - C; // F = G-C + const H = D - B; // H = D-B + const X3 = modP(E * F); // X3 = E*F + const Y3 = modP(G * H); // Y3 = G*H + const T3 = modP(E * H); // T3 = E*H + const Z3 = modP(F * G); // Z3 = F*G + return new Point(X3, Y3, Z3, T3); + } + // Fast algo for adding 2 Extended Points. + // https://hyperelliptic.org/EFD/g1p/auto-twisted-extended.html#addition-add-2008-hwcd + // Cost: 9M + 1*a + 1*d + 7add. + add(other) { + isPoint(other); + const { a, d } = CURVE; + const { ex: X1, ey: Y1, ez: Z1, et: T1 } = this; + const { ex: X2, ey: Y2, ez: Z2, et: T2 } = other; + // Faster algo for adding 2 Extended Points when curve's a=-1. + // http://hyperelliptic.org/EFD/g1p/auto-twisted-extended-1.html#addition-add-2008-hwcd-4 + // Cost: 8M + 8add + 2*2. + // Note: It does not check whether the `other` point is valid. + if (a === BigInt(-1)) { + const A = modP((Y1 - X1) * (Y2 + X2)); + const B = modP((Y1 + X1) * (Y2 - X2)); + const F = modP(B - A); + if (F === _0n$2) + return this.double(); // Same point. Tests say it doesn't affect timing + const C = modP(Z1 * _2n$2 * T2); + const D = modP(T1 * _2n$2 * Z2); + const E = D + C; + const G = B + A; + const H = D - C; + const X3 = modP(E * F); + const Y3 = modP(G * H); + const T3 = modP(E * H); + const Z3 = modP(F * G); + return new Point(X3, Y3, Z3, T3); + } + const A = modP(X1 * X2); // A = X1*X2 + const B = modP(Y1 * Y2); // B = Y1*Y2 + const C = modP(T1 * d * T2); // C = T1*d*T2 + const D = modP(Z1 * Z2); // D = Z1*Z2 + const E = modP((X1 + Y1) * (X2 + Y2) - A - B); // E = (X1+Y1)*(X2+Y2)-A-B + const F = D - C; // F = D-C + const G = D + C; // G = D+C + const H = modP(B - a * A); // H = B-a*A + const X3 = modP(E * F); // X3 = E*F + const Y3 = modP(G * H); // Y3 = G*H + const T3 = modP(E * H); // T3 = E*H + const Z3 = modP(F * G); // Z3 = F*G + return new Point(X3, Y3, Z3, T3); + } + subtract(other) { + return this.add(other.negate()); + } + wNAF(n) { + return wnaf.wNAFCached(this, pointPrecomputes, n, Point.normalizeZ); + } + // Constant-time multiplication. + multiply(scalar) { + const { p, f } = this.wNAF(assertInRange(scalar, CURVE_ORDER)); + return Point.normalizeZ([p, f])[0]; + } + // Non-constant-time multiplication. Uses double-and-add algorithm. + // It's faster, but should only be used when you don't care about + // an exposed private key e.g. sig verification. + // Does NOT allow scalars higher than CURVE.n. + multiplyUnsafe(scalar) { + let n = assertGE0(scalar); // 0 <= scalar < CURVE.n + if (n === _0n$2) + return I; + if (this.equals(I) || n === _1n$4) + return this; + if (this.equals(G)) + return this.wNAF(n).p; + return wnaf.unsafeLadder(this, n); + } + // Checks if point is of small order. + // If you add something to small order point, you will have "dirty" + // point with torsion component. + // Multiplies point by cofactor and checks if the result is 0. + isSmallOrder() { + return this.multiplyUnsafe(cofactor).is0(); + } + // Multiplies point by curve order and checks if the result is 0. + // Returns `false` is the point is dirty. + isTorsionFree() { + return wnaf.unsafeLadder(this, CURVE_ORDER).is0(); + } + // Converts Extended point to default (x, y) coordinates. + // Can accept precomputed Z^-1 - for example, from invertBatch. + toAffine(iz) { + const { ex: x, ey: y, ez: z } = this; + const is0 = this.is0(); + if (iz == null) + iz = is0 ? _8n : Fp.inv(z); // 8 was chosen arbitrarily + const ax = modP(x * iz); + const ay = modP(y * iz); + const zz = modP(z * iz); + if (is0) + return { x: _0n$2, y: _1n$4 }; + if (zz !== _1n$4) + throw new Error('invZ was invalid'); + return { x: ax, y: ay }; + } + clearCofactor() { + const { h: cofactor } = CURVE; + if (cofactor === _1n$4) + return this; + return this.multiplyUnsafe(cofactor); + } + // Converts hash string or Uint8Array to Point. + // Uses algo from RFC8032 5.1.3. + static fromHex(hex, zip215 = false) { + const { d, a } = CURVE; + const len = Fp.BYTES; + hex = ensureBytes$1('pointHex', hex, len); // copy hex to a new array + const normed = hex.slice(); // copy again, we'll manipulate it + const lastByte = hex[len - 1]; // select last byte + normed[len - 1] = lastByte & ~0x80; // clear last bit + const y = bytesToNumberLE(normed); + if (y === _0n$2) ; + else { + // RFC8032 prohibits >= p, but ZIP215 doesn't + if (zip215) + assertInRange(y, MASK); // zip215=true [1..P-1] (2^255-19-1 for ed25519) + else + assertInRange(y, Fp.ORDER); // zip215=false [1..MASK-1] (2^256-1 for ed25519) + } + // Ed25519: xยฒ = (yยฒ-1)/(dyยฒ+1) mod p. Ed448: xยฒ = (yยฒ-1)/(dyยฒ-1) mod p. Generic case: + // axยฒ+yยฒ=1+dxยฒyยฒ => yยฒ-1=dxยฒyยฒ-axยฒ => yยฒ-1=xยฒ(dyยฒ-a) => xยฒ=(yยฒ-1)/(dyยฒ-a) + const y2 = modP(y * y); // denominator is always non-0 mod p. + const u = modP(y2 - _1n$4); // u = yยฒ - 1 + const v = modP(d * y2 - a); // v = d yยฒ + 1. + let { isValid, value: x } = uvRatio(u, v); // โˆš(u/v) + if (!isValid) + throw new Error('Point.fromHex: invalid y coordinate'); + const isXOdd = (x & _1n$4) === _1n$4; // There are 2 square roots. Use x_0 bit to select proper + const isLastByteOdd = (lastByte & 0x80) !== 0; // x_0, last bit + if (!zip215 && x === _0n$2 && isLastByteOdd) + // if x=0 and x_0 = 1, fail + throw new Error('Point.fromHex: x=0 and x_0=1'); + if (isLastByteOdd !== isXOdd) + x = modP(-x); // if x_0 != x mod 2, set x = p-x + return Point.fromAffine({ x, y }); + } + static fromPrivateKey(privKey) { + return getExtendedPublicKey(privKey).point; + } + toRawBytes() { + const { x, y } = this.toAffine(); + const bytes = numberToBytesLE(y, Fp.BYTES); // each y has 2 x values (x, -y) + bytes[bytes.length - 1] |= x & _1n$4 ? 0x80 : 0; // when compressing, it's enough to store y + return bytes; // and use the last byte to encode sign of x + } + toHex() { + return bytesToHex(this.toRawBytes()); // Same as toRawBytes, but returns string. + } + } + Point.BASE = new Point(CURVE.Gx, CURVE.Gy, _1n$4, modP(CURVE.Gx * CURVE.Gy)); + Point.ZERO = new Point(_0n$2, _1n$4, _1n$4, _0n$2); // 0, 1, 1, 0 + const { BASE: G, ZERO: I } = Point; + const wnaf = wNAF(Point, nByteLength * 8); + function modN(a) { + return mod(a, CURVE_ORDER); + } + // Little-endian SHA512 with modulo n + function modN_LE(hash) { + return modN(bytesToNumberLE(hash)); + } + /** Convenience method that creates public key and other stuff. RFC8032 5.1.5 */ + function getExtendedPublicKey(key) { + const len = nByteLength; + key = ensureBytes$1('private key', key, len); + // Hash private key with curve's hash function to produce uniformingly random input + // Check byte lengths: ensure(64, h(ensure(32, key))) + const hashed = ensureBytes$1('hashed private key', cHash(key), 2 * len); + const head = adjustScalarBytes(hashed.slice(0, len)); // clear first half bits, produce FE + const prefix = hashed.slice(len, 2 * len); // second half is called key prefix (5.1.6) + const scalar = modN_LE(head); // The actual private scalar + const point = G.multiply(scalar); // Point on Edwards curve aka public key + const pointBytes = point.toRawBytes(); // Uint8Array representation + return { head, prefix, scalar, point, pointBytes }; + } + // Calculates EdDSA pub key. RFC8032 5.1.5. Privkey is hashed. Use first half with 3 bits cleared + function getPublicKey(privKey) { + return getExtendedPublicKey(privKey).pointBytes; + } + // int('LE', SHA512(dom2(F, C) || msgs)) mod N + function hashDomainToScalar(context = new Uint8Array(), ...msgs) { + const msg = concatBytes(...msgs); + return modN_LE(cHash(domain(msg, ensureBytes$1('context', context), !!prehash))); + } + /** Signs message with privateKey. RFC8032 5.1.6 */ + function sign(msg, privKey, options = {}) { + msg = ensureBytes$1('message', msg); + if (prehash) + msg = prehash(msg); // for ed25519ph etc. + const { prefix, scalar, pointBytes } = getExtendedPublicKey(privKey); + const r = hashDomainToScalar(options.context, prefix, msg); // r = dom2(F, C) || prefix || PH(M) + const R = G.multiply(r).toRawBytes(); // R = rG + const k = hashDomainToScalar(options.context, R, pointBytes, msg); // R || A || PH(M) + const s = modN(r + k * scalar); // S = (r + k * s) mod L + assertGE0(s); // 0 <= s < l + const res = concatBytes(R, numberToBytesLE(s, Fp.BYTES)); + return ensureBytes$1('result', res, nByteLength * 2); // 64-byte signature + } + const verifyOpts = VERIFY_DEFAULT; + function verify(sig, msg, publicKey, options = verifyOpts) { + const { context, zip215 } = options; + const len = Fp.BYTES; // Verifies EdDSA signature against message and public key. RFC8032 5.1.7. + sig = ensureBytes$1('signature', sig, 2 * len); // An extended group equation is checked. + msg = ensureBytes$1('message', msg); + if (prehash) + msg = prehash(msg); // for ed25519ph, etc + const s = bytesToNumberLE(sig.slice(len, 2 * len)); + // zip215: true is good for consensus-critical apps and allows points < 2^256 + // zip215: false follows RFC8032 / NIST186-5 and restricts points to CURVE.p + let A, R, SB; + try { + A = Point.fromHex(publicKey, zip215); + R = Point.fromHex(sig.slice(0, len), zip215); + SB = G.multiplyUnsafe(s); // 0 <= s < l is done inside + } + catch (error) { + return false; + } + if (!zip215 && A.isSmallOrder()) + return false; + const k = hashDomainToScalar(context, R.toRawBytes(), A.toRawBytes(), msg); + const RkA = R.add(A.multiplyUnsafe(k)); + // [8][S]B = [8]R + [8][k]A' + return RkA.subtract(SB).clearCofactor().equals(Point.ZERO); + } + G._setWindowSize(8); // Enable precomputes. Slows down first publicKey computation by 20ms. + const utils = { + getExtendedPublicKey, + // ed25519 private keys are uniform 32b. No need to check for modulo bias, like in secp256k1. + randomPrivateKey: () => randomBytes(Fp.BYTES), + /** + * We're doing scalar multiplication (used in getPublicKey etc) with precomputed BASE_POINT + * values. This slows down first getPublicKey() by milliseconds (see Speed section), + * but allows to speed-up subsequent getPublicKey() calls up to 20x. + * @param windowSize 2, 4, 8, 16 + */ + precompute(windowSize = 8, point = Point.BASE) { + point._setWindowSize(windowSize); + point.multiply(BigInt(3)); + return point; + }, + }; + return { + CURVE, + getPublicKey, + sign, + verify, + ExtendedPoint: Point, + utils, + }; + } + + /*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ + const _0n$1 = BigInt(0); + const _1n$3 = BigInt(1); + function validateOpts$1(curve) { + validateObject(curve, { + a: 'bigint', + }, { + montgomeryBits: 'isSafeInteger', + nByteLength: 'isSafeInteger', + adjustScalarBytes: 'function', + domain: 'function', + powPminus2: 'function', + Gu: 'bigint', + }); + // Set defaults + return Object.freeze({ ...curve }); + } + // NOTE: not really montgomery curve, just bunch of very specific methods for X25519/X448 (RFC 7748, https://www.rfc-editor.org/rfc/rfc7748) + // Uses only one coordinate instead of two + function montgomery(curveDef) { + const CURVE = validateOpts$1(curveDef); + const { P } = CURVE; + const modP = (n) => mod(n, P); + const montgomeryBits = CURVE.montgomeryBits; + const montgomeryBytes = Math.ceil(montgomeryBits / 8); + const fieldLen = CURVE.nByteLength; + const adjustScalarBytes = CURVE.adjustScalarBytes || ((bytes) => bytes); + const powPminus2 = CURVE.powPminus2 || ((x) => pow(x, P - BigInt(2), P)); + // cswap from RFC7748. But it is not from RFC7748! + /* + cswap(swap, x_2, x_3): + dummy = mask(swap) AND (x_2 XOR x_3) + x_2 = x_2 XOR dummy + x_3 = x_3 XOR dummy + Return (x_2, x_3) + Where mask(swap) is the all-1 or all-0 word of the same length as x_2 + and x_3, computed, e.g., as mask(swap) = 0 - swap. + */ + function cswap(swap, x_2, x_3) { + const dummy = modP(swap * (x_2 - x_3)); + x_2 = modP(x_2 - dummy); + x_3 = modP(x_3 + dummy); + return [x_2, x_3]; + } + // Accepts 0 as well + function assertFieldElement(n) { + if (typeof n === 'bigint' && _0n$1 <= n && n < P) + return n; + throw new Error('Expected valid scalar 0 < scalar < CURVE.P'); + } + // x25519 from 4 + // The constant a24 is (486662 - 2) / 4 = 121665 for curve25519/X25519 + const a24 = (CURVE.a - BigInt(2)) / BigInt(4); + /** + * + * @param pointU u coordinate (x) on Montgomery Curve 25519 + * @param scalar by which the point would be multiplied + * @returns new Point on Montgomery curve + */ + function montgomeryLadder(pointU, scalar) { + const u = assertFieldElement(pointU); + // Section 5: Implementations MUST accept non-canonical values and process them as + // if they had been reduced modulo the field prime. + const k = assertFieldElement(scalar); + const x_1 = u; + let x_2 = _1n$3; + let z_2 = _0n$1; + let x_3 = u; + let z_3 = _1n$3; + let swap = _0n$1; + let sw; + for (let t = BigInt(montgomeryBits - 1); t >= _0n$1; t--) { + const k_t = (k >> t) & _1n$3; + swap ^= k_t; + sw = cswap(swap, x_2, x_3); + x_2 = sw[0]; + x_3 = sw[1]; + sw = cswap(swap, z_2, z_3); + z_2 = sw[0]; + z_3 = sw[1]; + swap = k_t; + const A = x_2 + z_2; + const AA = modP(A * A); + const B = x_2 - z_2; + const BB = modP(B * B); + const E = AA - BB; + const C = x_3 + z_3; + const D = x_3 - z_3; + const DA = modP(D * A); + const CB = modP(C * B); + const dacb = DA + CB; + const da_cb = DA - CB; + x_3 = modP(dacb * dacb); + z_3 = modP(x_1 * modP(da_cb * da_cb)); + x_2 = modP(AA * BB); + z_2 = modP(E * (AA + modP(a24 * E))); + } + // (x_2, x_3) = cswap(swap, x_2, x_3) + sw = cswap(swap, x_2, x_3); + x_2 = sw[0]; + x_3 = sw[1]; + // (z_2, z_3) = cswap(swap, z_2, z_3) + sw = cswap(swap, z_2, z_3); + z_2 = sw[0]; + z_3 = sw[1]; + // z_2^(p - 2) + const z2 = powPminus2(z_2); + // Return x_2 * (z_2^(p - 2)) + return modP(x_2 * z2); + } + function encodeUCoordinate(u) { + return numberToBytesLE(modP(u), montgomeryBytes); + } + function decodeUCoordinate(uEnc) { + // Section 5: When receiving such an array, implementations of X25519 + // MUST mask the most significant bit in the final byte. + const u = ensureBytes$1('u coordinate', uEnc, montgomeryBytes); + if (fieldLen === 32) + u[31] &= 127; // 0b0111_1111 + return bytesToNumberLE(u); + } + function decodeScalar(n) { + const bytes = ensureBytes$1('scalar', n); + const len = bytes.length; + if (len !== montgomeryBytes && len !== fieldLen) + throw new Error(`Expected ${montgomeryBytes} or ${fieldLen} bytes, got ${len}`); + return bytesToNumberLE(adjustScalarBytes(bytes)); + } + function scalarMult(scalar, u) { + const pointU = decodeUCoordinate(u); + const _scalar = decodeScalar(scalar); + const pu = montgomeryLadder(pointU, _scalar); + // The result was not contributory + // https://cr.yp.to/ecdh.html#validate + if (pu === _0n$1) + throw new Error('Invalid private or public key received'); + return encodeUCoordinate(pu); + } + // Computes public key from private. By doing scalar multiplication of base point. + const GuBytes = encodeUCoordinate(CURVE.Gu); + function scalarMultBase(scalar) { + return scalarMult(scalar, GuBytes); + } + return { + scalarMult, + scalarMultBase, + getSharedSecret: (privateKey, publicKey) => scalarMult(privateKey, publicKey), + getPublicKey: (privateKey) => scalarMultBase(privateKey), + utils: { randomPrivateKey: () => CURVE.randomBytes(CURVE.nByteLength) }, + GuBytes: GuBytes, + }; + } + + /*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ + /** + * ed25519 Twisted Edwards curve with following addons: + * - X25519 ECDH + * - Ristretto cofactor elimination + * - Elligator hash-to-group / point indistinguishability + */ + const ED25519_P = BigInt('57896044618658097711785492504343953926634992332820282019728792003956564819949'); + // โˆš(-1) aka โˆš(a) aka 2^((p-1)/4) + const ED25519_SQRT_M1 = BigInt('19681161376707505956807079304988542015446066515923890162744021073123829784752'); + // prettier-ignore + BigInt(0); const _1n$2 = BigInt(1), _2n$1 = BigInt(2), _5n = BigInt(5); + // prettier-ignore + const _10n = BigInt(10), _20n = BigInt(20), _40n = BigInt(40), _80n = BigInt(80); + function ed25519_pow_2_252_3(x) { + const P = ED25519_P; + const x2 = (x * x) % P; + const b2 = (x2 * x) % P; // x^3, 11 + const b4 = (pow2(b2, _2n$1, P) * b2) % P; // x^15, 1111 + const b5 = (pow2(b4, _1n$2, P) * x) % P; // x^31 + const b10 = (pow2(b5, _5n, P) * b5) % P; + const b20 = (pow2(b10, _10n, P) * b10) % P; + const b40 = (pow2(b20, _20n, P) * b20) % P; + const b80 = (pow2(b40, _40n, P) * b40) % P; + const b160 = (pow2(b80, _80n, P) * b80) % P; + const b240 = (pow2(b160, _80n, P) * b80) % P; + const b250 = (pow2(b240, _10n, P) * b10) % P; + const pow_p_5_8 = (pow2(b250, _2n$1, P) * x) % P; + // ^ To pow to (p+3)/8, multiply it by x. + return { pow_p_5_8, b2 }; + } + function adjustScalarBytes(bytes) { + // Section 5: For X25519, in order to decode 32 random bytes as an integer scalar, + // set the three least significant bits of the first byte + bytes[0] &= 248; // 0b1111_1000 + // and the most significant bit of the last to zero, + bytes[31] &= 127; // 0b0111_1111 + // set the second most significant bit of the last byte to 1 + bytes[31] |= 64; // 0b0100_0000 + return bytes; + } + // sqrt(u/v) + function uvRatio(u, v) { + const P = ED25519_P; + const v3 = mod(v * v * v, P); // vยณ + const v7 = mod(v3 * v3 * v, P); // vโท + // (p+3)/8 and (p-5)/8 + const pow = ed25519_pow_2_252_3(u * v7).pow_p_5_8; + let x = mod(u * v3 * pow, P); // (uvยณ)(uvโท)^(p-5)/8 + const vx2 = mod(v * x * x, P); // vxยฒ + const root1 = x; // First root candidate + const root2 = mod(x * ED25519_SQRT_M1, P); // Second root candidate + const useRoot1 = vx2 === u; // If vxยฒ = u (mod p), x is a square root + const useRoot2 = vx2 === mod(-u, P); // If vxยฒ = -u, set x <-- x * 2^((p-1)/4) + const noRoot = vx2 === mod(-u * ED25519_SQRT_M1, P); // There is no valid root, vxยฒ = -uโˆš(-1) + if (useRoot1) + x = root1; + if (useRoot2 || noRoot) + x = root2; // We return root2 anyway, for const-time + if (isNegativeLE(x, P)) + x = mod(-x, P); + return { isValid: useRoot1 || useRoot2, value: x }; + } + const Fp$1 = Field(ED25519_P, undefined, true); + const ed25519Defaults = { + // Param: a + a: BigInt(-1), // Fp.create(-1) is proper; our way still works and is faster + // d is equal to -121665/121666 over finite field. + // Negative number is P - number, and division is invert(number, P) + d: BigInt('37095705934669439343138083508754565189542113879843219016388785533085940283555'), + // Finite field ๐”ฝp over which we'll do calculations; 2n**255n - 19n + Fp: Fp$1, + // Subgroup order: how many points curve has + // 2n**252n + 27742317777372353535851937790883648493n; + n: BigInt('7237005577332262213973186563042994240857116359379907606001950938285454250989'), + // Cofactor + h: BigInt(8), + // Base point (x, y) aka generator point + Gx: BigInt('15112221349535400772501151409588531511454012693041857206046113283949847762202'), + Gy: BigInt('46316835694926478169428394003475163141307993866256225615783033603165251855960'), + hash: sha512, + randomBytes: randomBytes$1, + adjustScalarBytes, + // dom2 + // Ratio of u to v. Allows us to combine inversion and square root. Uses algo from RFC8032 5.1.3. + // Constant-time, u/โˆšv + uvRatio, + }; + const ed25519 = /* @__PURE__ */ twistedEdwards(ed25519Defaults); + function ed25519_domain(data, ctx, phflag) { + if (ctx.length > 255) + throw new Error('Context is too big'); + return concatBytes$1(utf8ToBytes$2('SigEd25519 no Ed25519 collisions'), new Uint8Array([phflag ? 1 : 0, ctx.length]), ctx, data); + } + /* @__PURE__ */ twistedEdwards({ + ...ed25519Defaults, + domain: ed25519_domain, + }); + /* @__PURE__ */ twistedEdwards({ + ...ed25519Defaults, + domain: ed25519_domain, + prehash: sha512, + }); + const x25519 = /* @__PURE__ */ (() => montgomery({ + P: ED25519_P, + a: BigInt(486662), + montgomeryBits: 255, // n is 253 bits + nByteLength: 32, + Gu: BigInt(9), + powPminus2: (x) => { + const P = ED25519_P; + // x^(p-2) aka x^(2^255-21) + const { pow_p_5_8, b2 } = ed25519_pow_2_252_3(x); + return mod(pow2(pow_p_5_8, BigInt(3), P) * b2, P); + }, + adjustScalarBytes, + randomBytes: randomBytes$1, + }))(); + // Hash To Curve Elligator2 Map (NOTE: different from ristretto255 elligator) + // NOTE: very important part is usage of FpSqrtEven for ELL2_C1_EDWARDS, since + // SageMath returns different root first and everything falls apart + const ELL2_C1 = (Fp$1.ORDER + BigInt(3)) / BigInt(8); // 1. c1 = (q + 3) / 8 # Integer arithmetic + Fp$1.pow(_2n$1, ELL2_C1); // 2. c2 = 2^c1 + Fp$1.sqrt(Fp$1.neg(Fp$1.ONE)); // 3. c3 = sqrt(-1) + (Fp$1.ORDER - BigInt(5)) / BigInt(8); // 4. c4 = (q - 5) / 8 # Integer arithmetic + BigInt(486662); + FpSqrtEven(Fp$1, Fp$1.neg(BigInt(486664))); // sgn0(c1) MUST equal 0 + // โˆš(ad - 1) + BigInt('25063068953384623474111414158702152701244531502492656460079210482610430750235'); + // 1 / โˆš(a-d) + BigInt('54469307008909316920995813868745141605393597292927456921205312896311721017578'); + // 1-dยฒ + BigInt('1159843021668779879193775521855586647937357759715417654439879720876111806838'); + // (d-1)ยฒ + BigInt('40440834346308536858101042469323190826248399146238708352240133220865137265952'); + BigInt('0x7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff'); + + const PUBLIC_KEY_BYTE_LENGTH = 32; + const PRIVATE_KEY_BYTE_LENGTH = 64; // private key is actually 32 bytes but for historical reasons we concat private and public keys + const KEYS_BYTE_LENGTH = 32; + function generateKey$2() { + // the actual private key (32 bytes) + const privateKeyRaw = ed25519.utils.randomPrivateKey(); + const publicKey = ed25519.getPublicKey(privateKeyRaw); + // concatenated the public key to the private key + const privateKey = concatKeys(privateKeyRaw, publicKey); + return { + privateKey, + publicKey + }; + } + /** + * Generate keypair from a 32 byte uint8array + */ + function generateKeyFromSeed(seed) { + if (seed.length !== KEYS_BYTE_LENGTH) { + throw new TypeError('"seed" must be 32 bytes in length.'); + } + else if (!(seed instanceof Uint8Array)) { + throw new TypeError('"seed" must be a node.js Buffer, or Uint8Array.'); + } + // based on node forges algorithm, the seed is used directly as private key + const privateKeyRaw = seed; + const publicKey = ed25519.getPublicKey(privateKeyRaw); + const privateKey = concatKeys(privateKeyRaw, publicKey); + return { + privateKey, + publicKey + }; + } + function hashAndSign$2(privateKey, msg) { + const privateKeyRaw = privateKey.subarray(0, KEYS_BYTE_LENGTH); + return ed25519.sign(msg instanceof Uint8Array ? msg : msg.subarray(), privateKeyRaw); + } + function hashAndVerify$2(publicKey, sig, msg) { + return ed25519.verify(sig, msg instanceof Uint8Array ? msg : msg.subarray(), publicKey); + } + function concatKeys(privateKeyRaw, publicKey) { + const privateKey = new Uint8Array(PRIVATE_KEY_BYTE_LENGTH); + for (let i = 0; i < KEYS_BYTE_LENGTH; i++) { + privateKey[i] = privateKeyRaw[i]; + privateKey[KEYS_BYTE_LENGTH + i] = publicKey[i]; + } + return privateKey; + } + + /* eslint-env browser */ + // Check native crypto exists and is enabled (In insecure context `self.crypto` + // exists but `self.crypto.subtle` does not). + var webcrypto = { + get(win = globalThis) { + const nativeCrypto = win.crypto; + if (nativeCrypto == null || nativeCrypto.subtle == null) { + throw Object.assign(new Error('Missing Web Crypto API. ' + + 'The most likely cause of this error is that this page is being accessed ' + + 'from an insecure context (i.e. not HTTPS). For more information and ' + + 'possible resolutions see ' + + 'https://github.com/libp2p/js-libp2p/blob/main/packages/crypto/README.md#web-crypto-api'), { code: 'ERR_MISSING_WEB_CRYPTO' }); + } + return nativeCrypto; + } + }; + + // WebKit on Linux does not support deriving a key from an empty PBKDF2 key. + // So, as a workaround, we provide the generated key as a constant. We test that + // this generated key is accurate in test/workaround.spec.ts + // Generated via: + // await crypto.subtle.exportKey('jwk', + // await crypto.subtle.deriveKey( + // { name: 'PBKDF2', salt: new Uint8Array(16), iterations: 32767, hash: { name: 'SHA-256' } }, + // await crypto.subtle.importKey('raw', new Uint8Array(0), { name: 'PBKDF2' }, false, ['deriveKey']), + // { name: 'AES-GCM', length: 128 }, true, ['encrypt', 'decrypt']) + // ) + const derivedEmptyPasswordKey = { alg: 'A128GCM', ext: true, k: 'scm9jmO_4BJAgdwWGVulLg', key_ops: ['encrypt', 'decrypt'], kty: 'oct' }; + // Based off of code from https://github.com/luke-park/SecureCompatibleEncryptionExamples + function create(opts) { + const algorithm = 'AES-GCM'; + let keyLength = 16; + const nonceLength = 12; + const digest = 'SHA-256'; + const saltLength = 16; + const iterations = 32767; + const crypto = webcrypto.get(); + keyLength *= 8; // Browser crypto uses bits instead of bytes + /** + * Uses the provided password to derive a pbkdf2 key. The key + * will then be used to encrypt the data. + */ + async function encrypt(data, password) { + const salt = crypto.getRandomValues(new Uint8Array(saltLength)); + const nonce = crypto.getRandomValues(new Uint8Array(nonceLength)); + const aesGcm = { name: algorithm, iv: nonce }; + if (typeof password === 'string') { + password = fromString(password); + } + let cryptoKey; + if (password.length === 0) { + cryptoKey = await crypto.subtle.importKey('jwk', derivedEmptyPasswordKey, { name: 'AES-GCM' }, true, ['encrypt']); + try { + const deriveParams = { name: 'PBKDF2', salt, iterations, hash: { name: digest } }; + const runtimeDerivedEmptyPassword = await crypto.subtle.importKey('raw', password, { name: 'PBKDF2' }, false, ['deriveKey']); + cryptoKey = await crypto.subtle.deriveKey(deriveParams, runtimeDerivedEmptyPassword, { name: algorithm, length: keyLength }, true, ['encrypt']); + } + catch { + cryptoKey = await crypto.subtle.importKey('jwk', derivedEmptyPasswordKey, { name: 'AES-GCM' }, true, ['encrypt']); + } + } + else { + // Derive a key using PBKDF2. + const deriveParams = { name: 'PBKDF2', salt, iterations, hash: { name: digest } }; + const rawKey = await crypto.subtle.importKey('raw', password, { name: 'PBKDF2' }, false, ['deriveKey']); + cryptoKey = await crypto.subtle.deriveKey(deriveParams, rawKey, { name: algorithm, length: keyLength }, true, ['encrypt']); + } + // Encrypt the string. + const ciphertext = await crypto.subtle.encrypt(aesGcm, cryptoKey, data); + return concat$1([salt, aesGcm.iv, new Uint8Array(ciphertext)]); + } + /** + * Uses the provided password to derive a pbkdf2 key. The key + * will then be used to decrypt the data. The options used to create + * this decryption cipher must be the same as those used to create + * the encryption cipher. + */ + async function decrypt(data, password) { + const salt = data.subarray(0, saltLength); + const nonce = data.subarray(saltLength, saltLength + nonceLength); + const ciphertext = data.subarray(saltLength + nonceLength); + const aesGcm = { name: algorithm, iv: nonce }; + if (typeof password === 'string') { + password = fromString(password); + } + let cryptoKey; + if (password.length === 0) { + try { + const deriveParams = { name: 'PBKDF2', salt, iterations, hash: { name: digest } }; + const runtimeDerivedEmptyPassword = await crypto.subtle.importKey('raw', password, { name: 'PBKDF2' }, false, ['deriveKey']); + cryptoKey = await crypto.subtle.deriveKey(deriveParams, runtimeDerivedEmptyPassword, { name: algorithm, length: keyLength }, true, ['decrypt']); + } + catch { + cryptoKey = await crypto.subtle.importKey('jwk', derivedEmptyPasswordKey, { name: 'AES-GCM' }, true, ['decrypt']); + } + } + else { + // Derive the key using PBKDF2. + const deriveParams = { name: 'PBKDF2', salt, iterations, hash: { name: digest } }; + const rawKey = await crypto.subtle.importKey('raw', password, { name: 'PBKDF2' }, false, ['deriveKey']); + cryptoKey = await crypto.subtle.deriveKey(deriveParams, rawKey, { name: algorithm, length: keyLength }, true, ['decrypt']); + } + // Decrypt the string. + const plaintext = await crypto.subtle.decrypt(aesGcm, cryptoKey, ciphertext); + return new Uint8Array(plaintext); + } + const cipher = { + encrypt, + decrypt + }; + return cipher; + } + + /** + * Exports the given PrivateKey as a base64 encoded string. + * The PrivateKey is encrypted via a password derived PBKDF2 key + * leveraging the aes-gcm cipher algorithm. + */ + async function exporter(privateKey, password) { + const cipher = create(); + const encryptedKey = await cipher.encrypt(privateKey, password); + return base64.encode(encryptedKey); + } + + /* eslint-disable import/export */ + /* eslint-disable complexity */ + /* eslint-disable @typescript-eslint/no-namespace */ + /* eslint-disable @typescript-eslint/no-unnecessary-boolean-literal-compare */ + /* eslint-disable @typescript-eslint/no-empty-interface */ + var KeyType; + (function (KeyType) { + KeyType["RSA"] = "RSA"; + KeyType["Ed25519"] = "Ed25519"; + KeyType["Secp256k1"] = "Secp256k1"; + })(KeyType || (KeyType = {})); + var __KeyTypeValues; + (function (__KeyTypeValues) { + __KeyTypeValues[__KeyTypeValues["RSA"] = 0] = "RSA"; + __KeyTypeValues[__KeyTypeValues["Ed25519"] = 1] = "Ed25519"; + __KeyTypeValues[__KeyTypeValues["Secp256k1"] = 2] = "Secp256k1"; + })(__KeyTypeValues || (__KeyTypeValues = {})); + (function (KeyType) { + KeyType.codec = () => { + return enumeration(__KeyTypeValues); + }; + })(KeyType || (KeyType = {})); + var PublicKey; + (function (PublicKey) { + let _codec; + PublicKey.codec = () => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork(); + } + if (obj.Type != null) { + w.uint32(8); + KeyType.codec().encode(obj.Type, w); + } + if (obj.Data != null) { + w.uint32(18); + w.bytes(obj.Data); + } + if (opts.lengthDelimited !== false) { + w.ldelim(); + } + }, (reader, length) => { + const obj = {}; + const end = length == null ? reader.len : reader.pos + length; + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + obj.Type = KeyType.codec().decode(reader); + break; + case 2: + obj.Data = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return obj; + }); + } + return _codec; + }; + PublicKey.encode = (obj) => { + return encodeMessage(obj, PublicKey.codec()); + }; + PublicKey.decode = (buf) => { + return decodeMessage(buf, PublicKey.codec()); + }; + })(PublicKey || (PublicKey = {})); + var PrivateKey; + (function (PrivateKey) { + let _codec; + PrivateKey.codec = () => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork(); + } + if (obj.Type != null) { + w.uint32(8); + KeyType.codec().encode(obj.Type, w); + } + if (obj.Data != null) { + w.uint32(18); + w.bytes(obj.Data); + } + if (opts.lengthDelimited !== false) { + w.ldelim(); + } + }, (reader, length) => { + const obj = {}; + const end = length == null ? reader.len : reader.pos + length; + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + obj.Type = KeyType.codec().decode(reader); + break; + case 2: + obj.Data = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return obj; + }); + } + return _codec; + }; + PrivateKey.encode = (obj) => { + return encodeMessage(obj, PrivateKey.codec()); + }; + PrivateKey.decode = (buf) => { + return decodeMessage(buf, PrivateKey.codec()); + }; + })(PrivateKey || (PrivateKey = {})); + + class Ed25519PublicKey { + _key; + constructor(key) { + this._key = ensureKey(key, PUBLIC_KEY_BYTE_LENGTH); + } + verify(data, sig) { + return hashAndVerify$2(this._key, sig, data); + } + marshal() { + return this._key; + } + get bytes() { + return PublicKey.encode({ + Type: KeyType.Ed25519, + Data: this.marshal() + }).subarray(); + } + equals(key) { + return equals(this.bytes, key.bytes); + } + hash() { + const p = sha256$1.digest(this.bytes); + if (isPromise$4(p)) { + return p.then(({ bytes }) => bytes); + } + return p.bytes; + } + } + class Ed25519PrivateKey { + _key; + _publicKey; + // key - 64 byte Uint8Array containing private key + // publicKey - 32 byte Uint8Array containing public key + constructor(key, publicKey) { + this._key = ensureKey(key, PRIVATE_KEY_BYTE_LENGTH); + this._publicKey = ensureKey(publicKey, PUBLIC_KEY_BYTE_LENGTH); + } + sign(message) { + return hashAndSign$2(this._key, message); + } + get public() { + return new Ed25519PublicKey(this._publicKey); + } + marshal() { + return this._key; + } + get bytes() { + return PrivateKey.encode({ + Type: KeyType.Ed25519, + Data: this.marshal() + }).subarray(); + } + equals(key) { + return equals(this.bytes, key.bytes); + } + async hash() { + const p = sha256$1.digest(this.bytes); + let bytes; + if (isPromise$4(p)) { + ({ bytes } = await p); + } + else { + bytes = p.bytes; + } + return bytes; + } + /** + * Gets the ID of the key. + * + * The key id is the base58 encoding of the identity multihash containing its public key. + * The public key is a protobuf encoding containing a type and the DER encoding + * of the PKCS SubjectPublicKeyInfo. + * + * @returns {Promise} + */ + async id() { + const encoding = identity.digest(this.public.bytes); + return base58btc.encode(encoding.bytes).substring(1); + } + /** + * Exports the key into a password protected `format` + */ + async export(password, format = 'libp2p-key') { + if (format === 'libp2p-key') { + return exporter(this.bytes, password); + } + else { + throw new CodeError$2(`export format '${format}' is not supported`, 'ERR_INVALID_EXPORT_FORMAT'); + } + } + } + function unmarshalEd25519PrivateKey(bytes) { + // Try the old, redundant public key version + if (bytes.length > PRIVATE_KEY_BYTE_LENGTH) { + bytes = ensureKey(bytes, PRIVATE_KEY_BYTE_LENGTH + PUBLIC_KEY_BYTE_LENGTH); + const privateKeyBytes = bytes.subarray(0, PRIVATE_KEY_BYTE_LENGTH); + const publicKeyBytes = bytes.subarray(PRIVATE_KEY_BYTE_LENGTH, bytes.length); + return new Ed25519PrivateKey(privateKeyBytes, publicKeyBytes); + } + bytes = ensureKey(bytes, PRIVATE_KEY_BYTE_LENGTH); + const privateKeyBytes = bytes.subarray(0, PRIVATE_KEY_BYTE_LENGTH); + const publicKeyBytes = bytes.subarray(PUBLIC_KEY_BYTE_LENGTH); + return new Ed25519PrivateKey(privateKeyBytes, publicKeyBytes); + } + function unmarshalEd25519PublicKey(bytes) { + bytes = ensureKey(bytes, PUBLIC_KEY_BYTE_LENGTH); + return new Ed25519PublicKey(bytes); + } + async function generateKeyPair$3() { + const { privateKey, publicKey } = generateKey$2(); + return new Ed25519PrivateKey(privateKey, publicKey); + } + async function generateKeyPairFromSeed(seed) { + const { privateKey, publicKey } = generateKeyFromSeed(seed); + return new Ed25519PrivateKey(privateKey, publicKey); + } + function ensureKey(key, length) { + key = Uint8Array.from(key ?? []); + if (key.length !== length) { + throw new CodeError$2(`Key must be a Uint8Array of length ${length}, got ${key.length}`, 'ERR_INVALID_KEY_TYPE'); + } + return key; + } + + var Ed25519 = /*#__PURE__*/Object.freeze({ + __proto__: null, + Ed25519PrivateKey: Ed25519PrivateKey, + Ed25519PublicKey: Ed25519PublicKey, + generateKeyPair: generateKeyPair$3, + generateKeyPairFromSeed: generateKeyPairFromSeed, + unmarshalEd25519PrivateKey: unmarshalEd25519PrivateKey, + unmarshalEd25519PublicKey: unmarshalEd25519PublicKey + }); + + /** + * Generates a Uint8Array with length `number` populated by random bytes + */ + function randomBytes(length) { + if (isNaN(length) || length <= 0) { + throw new CodeError$2('random bytes length must be a Number bigger than 0', 'ERR_INVALID_LENGTH'); + } + return randomBytes$1(length); + } + + // HMAC (RFC 2104) + class HMAC extends Hash { + constructor(hash$1, _key) { + super(); + this.finished = false; + this.destroyed = false; + hash(hash$1); + const key = toBytes$2(_key); + this.iHash = hash$1.create(); + if (typeof this.iHash.update !== 'function') + throw new Error('Expected instance of class which extends utils.Hash'); + this.blockLen = this.iHash.blockLen; + this.outputLen = this.iHash.outputLen; + const blockLen = this.blockLen; + const pad = new Uint8Array(blockLen); + // blockLen can be bigger than outputLen + pad.set(key.length > blockLen ? hash$1.create().update(key).digest() : key); + for (let i = 0; i < pad.length; i++) + pad[i] ^= 0x36; + this.iHash.update(pad); + // By doing update (processing of first block) of outer hash here we can re-use it between multiple calls via clone + this.oHash = hash$1.create(); + // Undo internal XOR && apply outer XOR + for (let i = 0; i < pad.length; i++) + pad[i] ^= 0x36 ^ 0x5c; + this.oHash.update(pad); + pad.fill(0); + } + update(buf) { + exists$1(this); + this.iHash.update(buf); + return this; + } + digestInto(out) { + exists$1(this); + bytes$1(out, this.outputLen); + this.finished = true; + this.iHash.digestInto(out); + this.oHash.update(out); + this.oHash.digestInto(out); + this.destroy(); + } + digest() { + const out = new Uint8Array(this.oHash.outputLen); + this.digestInto(out); + return out; + } + _cloneInto(to) { + // Create new instance without calling constructor since key already in state and we don't know it. + to || (to = Object.create(Object.getPrototypeOf(this), {})); + const { oHash, iHash, finished, destroyed, blockLen, outputLen } = this; + to = to; + to.finished = finished; + to.destroyed = destroyed; + to.blockLen = blockLen; + to.outputLen = outputLen; + to.oHash = oHash._cloneInto(to.oHash); + to.iHash = iHash._cloneInto(to.iHash); + return to; + } + destroy() { + this.destroyed = true; + this.oHash.destroy(); + this.iHash.destroy(); + } + } + /** + * HMAC: RFC2104 message authentication code. + * @param hash - function that would be used e.g. sha256 + * @param key - message key + * @param message - message data + */ + const hmac = (hash, key, message) => new HMAC(hash, key).update(message).digest(); + hmac.create = (hash, key) => new HMAC(hash, key); + + // Common prologue and epilogue for sync/async functions + function pbkdf2Init(hash$1, _password, _salt, _opts) { + hash(hash$1); + const opts = checkOpts$1({ dkLen: 32, asyncTick: 10 }, _opts); + const { c, dkLen, asyncTick } = opts; + number$2(c); + number$2(dkLen); + number$2(asyncTick); + if (c < 1) + throw new Error('PBKDF2: iterations (c) should be >= 1'); + const password = toBytes$2(_password); + const salt = toBytes$2(_salt); + // DK = PBKDF2(PRF, Password, Salt, c, dkLen); + const DK = new Uint8Array(dkLen); + // U1 = PRF(Password, Salt + INT_32_BE(i)) + const PRF = hmac.create(hash$1, password); + const PRFSalt = PRF._cloneInto().update(salt); + return { c, dkLen, asyncTick, DK, PRF, PRFSalt }; + } + function pbkdf2Output(PRF, PRFSalt, DK, prfW, u) { + PRF.destroy(); + PRFSalt.destroy(); + if (prfW) + prfW.destroy(); + u.fill(0); + return DK; + } + async function pbkdf2Async(hash, password, salt, opts) { + const { c, dkLen, asyncTick, DK, PRF, PRFSalt } = pbkdf2Init(hash, password, salt, opts); + let prfW; // Working copy + const arr = new Uint8Array(4); + const view = createView$1(arr); + const u = new Uint8Array(PRF.outputLen); + // DK = T1 + T2 + โ‹ฏ + Tdklen/hlen + for (let ti = 1, pos = 0; pos < dkLen; ti++, pos += PRF.outputLen) { + // Ti = F(Password, Salt, c, i) + const Ti = DK.subarray(pos, pos + PRF.outputLen); + view.setInt32(0, ti, false); + // F(Password, Salt, c, i) = U1 ^ U2 ^ โ‹ฏ ^ Uc + // U1 = PRF(Password, Salt + INT_32_BE(i)) + (prfW = PRFSalt._cloneInto(prfW)).update(arr).digestInto(u); + Ti.set(u.subarray(0, Ti.length)); + await asyncLoop(c - 1, asyncTick, () => { + // Uc = PRF(Password, Ucโˆ’1) + PRF._cloneInto(prfW).update(u).digestInto(u); + for (let i = 0; i < Ti.length; i++) + Ti[i] ^= u[i]; + }); + } + return pbkdf2Output(PRF, PRFSalt, DK, prfW, u); + } + + var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {}; + + function getDefaultExportFromCjs (x) { + return x && x.__esModule && Object.prototype.hasOwnProperty.call(x, 'default') ? x['default'] : x; + } + + /*! + * MIT License + * + * Copyright (c) 2017-2022 Peculiar Ventures, LLC + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + * + */ + + const ARRAY_BUFFER_NAME = "[object ArrayBuffer]"; + class BufferSourceConverter { + static isArrayBuffer(data) { + return Object.prototype.toString.call(data) === ARRAY_BUFFER_NAME; + } + static toArrayBuffer(data) { + if (this.isArrayBuffer(data)) { + return data; + } + if (data.byteLength === data.buffer.byteLength) { + return data.buffer; + } + if (data.byteOffset === 0 && data.byteLength === data.buffer.byteLength) { + return data.buffer; + } + return this.toUint8Array(data.buffer) + .slice(data.byteOffset, data.byteOffset + data.byteLength) + .buffer; + } + static toUint8Array(data) { + return this.toView(data, Uint8Array); + } + static toView(data, type) { + if (data.constructor === type) { + return data; + } + if (this.isArrayBuffer(data)) { + return new type(data); + } + if (this.isArrayBufferView(data)) { + return new type(data.buffer, data.byteOffset, data.byteLength); + } + throw new TypeError("The provided value is not of type '(ArrayBuffer or ArrayBufferView)'"); + } + static isBufferSource(data) { + return this.isArrayBufferView(data) + || this.isArrayBuffer(data); + } + static isArrayBufferView(data) { + return ArrayBuffer.isView(data) + || (data && this.isArrayBuffer(data.buffer)); + } + static isEqual(a, b) { + const aView = BufferSourceConverter.toUint8Array(a); + const bView = BufferSourceConverter.toUint8Array(b); + if (aView.length !== bView.byteLength) { + return false; + } + for (let i = 0; i < aView.length; i++) { + if (aView[i] !== bView[i]) { + return false; + } + } + return true; + } + static concat(...args) { + let buffers; + if (Array.isArray(args[0]) && !(args[1] instanceof Function)) { + buffers = args[0]; + } + else if (Array.isArray(args[0]) && args[1] instanceof Function) { + buffers = args[0]; + } + else { + if (args[args.length - 1] instanceof Function) { + buffers = args.slice(0, args.length - 1); + } + else { + buffers = args; + } + } + let size = 0; + for (const buffer of buffers) { + size += buffer.byteLength; + } + const res = new Uint8Array(size); + let offset = 0; + for (const buffer of buffers) { + const view = this.toUint8Array(buffer); + res.set(view, offset); + offset += view.length; + } + if (args[args.length - 1] instanceof Function) { + return this.toView(res, args[args.length - 1]); + } + return res.buffer; + } + } + + const STRING_TYPE = "string"; + const HEX_REGEX = /^[0-9a-f]+$/i; + const BASE64_REGEX = /^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$/; + const BASE64URL_REGEX = /^[a-zA-Z0-9-_]+$/; + class Utf8Converter { + static fromString(text) { + const s = unescape(encodeURIComponent(text)); + const uintArray = new Uint8Array(s.length); + for (let i = 0; i < s.length; i++) { + uintArray[i] = s.charCodeAt(i); + } + return uintArray.buffer; + } + static toString(buffer) { + const buf = BufferSourceConverter.toUint8Array(buffer); + let encodedString = ""; + for (let i = 0; i < buf.length; i++) { + encodedString += String.fromCharCode(buf[i]); + } + const decodedString = decodeURIComponent(escape(encodedString)); + return decodedString; + } + } + class Utf16Converter { + static toString(buffer, littleEndian = false) { + const arrayBuffer = BufferSourceConverter.toArrayBuffer(buffer); + const dataView = new DataView(arrayBuffer); + let res = ""; + for (let i = 0; i < arrayBuffer.byteLength; i += 2) { + const code = dataView.getUint16(i, littleEndian); + res += String.fromCharCode(code); + } + return res; + } + static fromString(text, littleEndian = false) { + const res = new ArrayBuffer(text.length * 2); + const dataView = new DataView(res); + for (let i = 0; i < text.length; i++) { + dataView.setUint16(i * 2, text.charCodeAt(i), littleEndian); + } + return res; + } + } + class Convert { + static isHex(data) { + return typeof data === STRING_TYPE + && HEX_REGEX.test(data); + } + static isBase64(data) { + return typeof data === STRING_TYPE + && BASE64_REGEX.test(data); + } + static isBase64Url(data) { + return typeof data === STRING_TYPE + && BASE64URL_REGEX.test(data); + } + static ToString(buffer, enc = "utf8") { + const buf = BufferSourceConverter.toUint8Array(buffer); + switch (enc.toLowerCase()) { + case "utf8": + return this.ToUtf8String(buf); + case "binary": + return this.ToBinary(buf); + case "hex": + return this.ToHex(buf); + case "base64": + return this.ToBase64(buf); + case "base64url": + return this.ToBase64Url(buf); + case "utf16le": + return Utf16Converter.toString(buf, true); + case "utf16": + case "utf16be": + return Utf16Converter.toString(buf); + default: + throw new Error(`Unknown type of encoding '${enc}'`); + } + } + static FromString(str, enc = "utf8") { + if (!str) { + return new ArrayBuffer(0); + } + switch (enc.toLowerCase()) { + case "utf8": + return this.FromUtf8String(str); + case "binary": + return this.FromBinary(str); + case "hex": + return this.FromHex(str); + case "base64": + return this.FromBase64(str); + case "base64url": + return this.FromBase64Url(str); + case "utf16le": + return Utf16Converter.fromString(str, true); + case "utf16": + case "utf16be": + return Utf16Converter.fromString(str); + default: + throw new Error(`Unknown type of encoding '${enc}'`); + } + } + static ToBase64(buffer) { + const buf = BufferSourceConverter.toUint8Array(buffer); + if (typeof btoa !== "undefined") { + const binary = this.ToString(buf, "binary"); + return btoa(binary); + } + else { + return Buffer.from(buf).toString("base64"); + } + } + static FromBase64(base64) { + const formatted = this.formatString(base64); + if (!formatted) { + return new ArrayBuffer(0); + } + if (!Convert.isBase64(formatted)) { + throw new TypeError("Argument 'base64Text' is not Base64 encoded"); + } + if (typeof atob !== "undefined") { + return this.FromBinary(atob(formatted)); + } + else { + return new Uint8Array(Buffer.from(formatted, "base64")).buffer; + } + } + static FromBase64Url(base64url) { + const formatted = this.formatString(base64url); + if (!formatted) { + return new ArrayBuffer(0); + } + if (!Convert.isBase64Url(formatted)) { + throw new TypeError("Argument 'base64url' is not Base64Url encoded"); + } + return this.FromBase64(this.Base64Padding(formatted.replace(/\-/g, "+").replace(/\_/g, "/"))); + } + static ToBase64Url(data) { + return this.ToBase64(data).replace(/\+/g, "-").replace(/\//g, "_").replace(/\=/g, ""); + } + static FromUtf8String(text, encoding = Convert.DEFAULT_UTF8_ENCODING) { + switch (encoding) { + case "ascii": + return this.FromBinary(text); + case "utf8": + return Utf8Converter.fromString(text); + case "utf16": + case "utf16be": + return Utf16Converter.fromString(text); + case "utf16le": + case "usc2": + return Utf16Converter.fromString(text, true); + default: + throw new Error(`Unknown type of encoding '${encoding}'`); + } + } + static ToUtf8String(buffer, encoding = Convert.DEFAULT_UTF8_ENCODING) { + switch (encoding) { + case "ascii": + return this.ToBinary(buffer); + case "utf8": + return Utf8Converter.toString(buffer); + case "utf16": + case "utf16be": + return Utf16Converter.toString(buffer); + case "utf16le": + case "usc2": + return Utf16Converter.toString(buffer, true); + default: + throw new Error(`Unknown type of encoding '${encoding}'`); + } + } + static FromBinary(text) { + const stringLength = text.length; + const resultView = new Uint8Array(stringLength); + for (let i = 0; i < stringLength; i++) { + resultView[i] = text.charCodeAt(i); + } + return resultView.buffer; + } + static ToBinary(buffer) { + const buf = BufferSourceConverter.toUint8Array(buffer); + let res = ""; + for (let i = 0; i < buf.length; i++) { + res += String.fromCharCode(buf[i]); + } + return res; + } + static ToHex(buffer) { + const buf = BufferSourceConverter.toUint8Array(buffer); + let result = ""; + const len = buf.length; + for (let i = 0; i < len; i++) { + const byte = buf[i]; + if (byte < 16) { + result += "0"; + } + result += byte.toString(16); + } + return result; + } + static FromHex(hexString) { + let formatted = this.formatString(hexString); + if (!formatted) { + return new ArrayBuffer(0); + } + if (!Convert.isHex(formatted)) { + throw new TypeError("Argument 'hexString' is not HEX encoded"); + } + if (formatted.length % 2) { + formatted = `0${formatted}`; + } + const res = new Uint8Array(formatted.length / 2); + for (let i = 0; i < formatted.length; i = i + 2) { + const c = formatted.slice(i, i + 2); + res[i / 2] = parseInt(c, 16); + } + return res.buffer; + } + static ToUtf16String(buffer, littleEndian = false) { + return Utf16Converter.toString(buffer, littleEndian); + } + static FromUtf16String(text, littleEndian = false) { + return Utf16Converter.fromString(text, littleEndian); + } + static Base64Padding(base64) { + const padCount = 4 - (base64.length % 4); + if (padCount < 4) { + for (let i = 0; i < padCount; i++) { + base64 += "="; + } + } + return base64; + } + static formatString(data) { + return (data === null || data === void 0 ? void 0 : data.replace(/[\n\r\t ]/g, "")) || ""; + } + } + Convert.DEFAULT_UTF8_ENCODING = "utf8"; + + var BufferSourceConverter_1 = BufferSourceConverter; + var Convert_1 = Convert; + + /*! + Copyright (c) Peculiar Ventures, LLC + */ + + function utilFromBase(inputBuffer, inputBase) { + let result = 0; + if (inputBuffer.length === 1) { + return inputBuffer[0]; + } + for (let i = (inputBuffer.length - 1); i >= 0; i--) { + result += inputBuffer[(inputBuffer.length - 1) - i] * Math.pow(2, inputBase * i); + } + return result; + } + function utilToBase(value, base, reserved = (-1)) { + const internalReserved = reserved; + let internalValue = value; + let result = 0; + let biggest = Math.pow(2, base); + for (let i = 1; i < 8; i++) { + if (value < biggest) { + let retBuf; + if (internalReserved < 0) { + retBuf = new ArrayBuffer(i); + result = i; + } + else { + if (internalReserved < i) { + return (new ArrayBuffer(0)); + } + retBuf = new ArrayBuffer(internalReserved); + result = internalReserved; + } + const retView = new Uint8Array(retBuf); + for (let j = (i - 1); j >= 0; j--) { + const basis = Math.pow(2, j * base); + retView[result - j - 1] = Math.floor(internalValue / basis); + internalValue -= (retView[result - j - 1]) * basis; + } + return retBuf; + } + biggest *= Math.pow(2, base); + } + return new ArrayBuffer(0); + } + function utilConcatView(...views) { + let outputLength = 0; + let prevLength = 0; + for (const view of views) { + outputLength += view.length; + } + const retBuf = new ArrayBuffer(outputLength); + const retView = new Uint8Array(retBuf); + for (const view of views) { + retView.set(view, prevLength); + prevLength += view.length; + } + return retView; + } + function utilDecodeTC() { + const buf = new Uint8Array(this.valueHex); + if (this.valueHex.byteLength >= 2) { + const condition1 = (buf[0] === 0xFF) && (buf[1] & 0x80); + const condition2 = (buf[0] === 0x00) && ((buf[1] & 0x80) === 0x00); + if (condition1 || condition2) { + this.warnings.push("Needlessly long format"); + } + } + const bigIntBuffer = new ArrayBuffer(this.valueHex.byteLength); + const bigIntView = new Uint8Array(bigIntBuffer); + for (let i = 0; i < this.valueHex.byteLength; i++) { + bigIntView[i] = 0; + } + bigIntView[0] = (buf[0] & 0x80); + const bigInt = utilFromBase(bigIntView, 8); + const smallIntBuffer = new ArrayBuffer(this.valueHex.byteLength); + const smallIntView = new Uint8Array(smallIntBuffer); + for (let j = 0; j < this.valueHex.byteLength; j++) { + smallIntView[j] = buf[j]; + } + smallIntView[0] &= 0x7F; + const smallInt = utilFromBase(smallIntView, 8); + return (smallInt - bigInt); + } + function utilEncodeTC(value) { + const modValue = (value < 0) ? (value * (-1)) : value; + let bigInt = 128; + for (let i = 1; i < 8; i++) { + if (modValue <= bigInt) { + if (value < 0) { + const smallInt = bigInt - modValue; + const retBuf = utilToBase(smallInt, 8, i); + const retView = new Uint8Array(retBuf); + retView[0] |= 0x80; + return retBuf; + } + let retBuf = utilToBase(modValue, 8, i); + let retView = new Uint8Array(retBuf); + if (retView[0] & 0x80) { + const tempBuf = retBuf.slice(0); + const tempView = new Uint8Array(tempBuf); + retBuf = new ArrayBuffer(retBuf.byteLength + 1); + retView = new Uint8Array(retBuf); + for (let k = 0; k < tempBuf.byteLength; k++) { + retView[k + 1] = tempView[k]; + } + retView[0] = 0x00; + } + return retBuf; + } + bigInt *= Math.pow(2, 8); + } + return (new ArrayBuffer(0)); + } + function isEqualBuffer(inputBuffer1, inputBuffer2) { + if (inputBuffer1.byteLength !== inputBuffer2.byteLength) { + return false; + } + const view1 = new Uint8Array(inputBuffer1); + const view2 = new Uint8Array(inputBuffer2); + for (let i = 0; i < view1.length; i++) { + if (view1[i] !== view2[i]) { + return false; + } + } + return true; + } + function padNumber(inputNumber, fullLength) { + const str = inputNumber.toString(10); + if (fullLength < str.length) { + return ""; + } + const dif = fullLength - str.length; + const padding = new Array(dif); + for (let i = 0; i < dif; i++) { + padding[i] = "0"; + } + const paddingString = padding.join(""); + return paddingString.concat(str); + } + + /*! + * Copyright (c) 2014, GMO GlobalSign + * Copyright (c) 2015-2022, Peculiar Ventures + * All rights reserved. + * + * Author 2014-2019, Yury Strozhevsky + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * + * * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * * Neither the name of the copyright holder nor the names of its + * contributors may be used to endorse or promote products derived from + * this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND + * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR + * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON + * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + * + */ + + + function assertBigInt() { + if (typeof BigInt === "undefined") { + throw new Error("BigInt is not defined. Your environment doesn't implement BigInt."); + } + } + function concat(buffers) { + let outputLength = 0; + let prevLength = 0; + for (let i = 0; i < buffers.length; i++) { + const buffer = buffers[i]; + outputLength += buffer.byteLength; + } + const retView = new Uint8Array(outputLength); + for (let i = 0; i < buffers.length; i++) { + const buffer = buffers[i]; + retView.set(new Uint8Array(buffer), prevLength); + prevLength += buffer.byteLength; + } + return retView.buffer; + } + function checkBufferParams(baseBlock, inputBuffer, inputOffset, inputLength) { + if (!(inputBuffer instanceof Uint8Array)) { + baseBlock.error = "Wrong parameter: inputBuffer must be 'Uint8Array'"; + return false; + } + if (!inputBuffer.byteLength) { + baseBlock.error = "Wrong parameter: inputBuffer has zero length"; + return false; + } + if (inputOffset < 0) { + baseBlock.error = "Wrong parameter: inputOffset less than zero"; + return false; + } + if (inputLength < 0) { + baseBlock.error = "Wrong parameter: inputLength less than zero"; + return false; + } + if ((inputBuffer.byteLength - inputOffset - inputLength) < 0) { + baseBlock.error = "End of input reached before message was fully decoded (inconsistent offset and length values)"; + return false; + } + return true; + } + + class ViewWriter { + constructor() { + this.items = []; + } + write(buf) { + this.items.push(buf); + } + final() { + return concat(this.items); + } + } + + const powers2 = [new Uint8Array([1])]; + const digitsString = "0123456789"; + const EMPTY_STRING = ""; + const EMPTY_BUFFER$1 = new ArrayBuffer(0); + const EMPTY_VIEW = new Uint8Array(0); + const END_OF_CONTENT_NAME = "EndOfContent"; + const OCTET_STRING_NAME = "OCTET STRING"; + const BIT_STRING_NAME = "BIT STRING"; + + function HexBlock(BaseClass) { + var _a; + return _a = class Some extends BaseClass { + constructor(...args) { + var _a; + super(...args); + const params = args[0] || {}; + this.isHexOnly = (_a = params.isHexOnly) !== null && _a !== void 0 ? _a : false; + this.valueHexView = params.valueHex ? BufferSourceConverter_1.toUint8Array(params.valueHex) : EMPTY_VIEW; + } + get valueHex() { + return this.valueHexView.slice().buffer; + } + set valueHex(value) { + this.valueHexView = new Uint8Array(value); + } + fromBER(inputBuffer, inputOffset, inputLength) { + const view = inputBuffer instanceof ArrayBuffer ? new Uint8Array(inputBuffer) : inputBuffer; + if (!checkBufferParams(this, view, inputOffset, inputLength)) { + return -1; + } + const endLength = inputOffset + inputLength; + this.valueHexView = view.subarray(inputOffset, endLength); + if (!this.valueHexView.length) { + this.warnings.push("Zero buffer length"); + return inputOffset; + } + this.blockLength = inputLength; + return endLength; + } + toBER(sizeOnly = false) { + if (!this.isHexOnly) { + this.error = "Flag 'isHexOnly' is not set, abort"; + return EMPTY_BUFFER$1; + } + if (sizeOnly) { + return new ArrayBuffer(this.valueHexView.byteLength); + } + return (this.valueHexView.byteLength === this.valueHexView.buffer.byteLength) + ? this.valueHexView.buffer + : this.valueHexView.slice().buffer; + } + toJSON() { + return { + ...super.toJSON(), + isHexOnly: this.isHexOnly, + valueHex: Convert_1.ToHex(this.valueHexView), + }; + } + }, + _a.NAME = "hexBlock", + _a; + } + + class LocalBaseBlock { + constructor({ blockLength = 0, error = EMPTY_STRING, warnings = [], valueBeforeDecode = EMPTY_VIEW, } = {}) { + this.blockLength = blockLength; + this.error = error; + this.warnings = warnings; + this.valueBeforeDecodeView = BufferSourceConverter_1.toUint8Array(valueBeforeDecode); + } + static blockName() { + return this.NAME; + } + get valueBeforeDecode() { + return this.valueBeforeDecodeView.slice().buffer; + } + set valueBeforeDecode(value) { + this.valueBeforeDecodeView = new Uint8Array(value); + } + toJSON() { + return { + blockName: this.constructor.NAME, + blockLength: this.blockLength, + error: this.error, + warnings: this.warnings, + valueBeforeDecode: Convert_1.ToHex(this.valueBeforeDecodeView), + }; + } + } + LocalBaseBlock.NAME = "baseBlock"; + + class ValueBlock extends LocalBaseBlock { + fromBER(inputBuffer, inputOffset, inputLength) { + throw TypeError("User need to make a specific function in a class which extends 'ValueBlock'"); + } + toBER(sizeOnly, writer) { + throw TypeError("User need to make a specific function in a class which extends 'ValueBlock'"); + } + } + ValueBlock.NAME = "valueBlock"; + + class LocalIdentificationBlock extends HexBlock(LocalBaseBlock) { + constructor({ idBlock = {}, } = {}) { + var _a, _b, _c, _d; + super(); + if (idBlock) { + this.isHexOnly = (_a = idBlock.isHexOnly) !== null && _a !== void 0 ? _a : false; + this.valueHexView = idBlock.valueHex ? BufferSourceConverter_1.toUint8Array(idBlock.valueHex) : EMPTY_VIEW; + this.tagClass = (_b = idBlock.tagClass) !== null && _b !== void 0 ? _b : -1; + this.tagNumber = (_c = idBlock.tagNumber) !== null && _c !== void 0 ? _c : -1; + this.isConstructed = (_d = idBlock.isConstructed) !== null && _d !== void 0 ? _d : false; + } + else { + this.tagClass = -1; + this.tagNumber = -1; + this.isConstructed = false; + } + } + toBER(sizeOnly = false) { + let firstOctet = 0; + switch (this.tagClass) { + case 1: + firstOctet |= 0x00; + break; + case 2: + firstOctet |= 0x40; + break; + case 3: + firstOctet |= 0x80; + break; + case 4: + firstOctet |= 0xC0; + break; + default: + this.error = "Unknown tag class"; + return EMPTY_BUFFER$1; + } + if (this.isConstructed) + firstOctet |= 0x20; + if (this.tagNumber < 31 && !this.isHexOnly) { + const retView = new Uint8Array(1); + if (!sizeOnly) { + let number = this.tagNumber; + number &= 0x1F; + firstOctet |= number; + retView[0] = firstOctet; + } + return retView.buffer; + } + if (!this.isHexOnly) { + const encodedBuf = utilToBase(this.tagNumber, 7); + const encodedView = new Uint8Array(encodedBuf); + const size = encodedBuf.byteLength; + const retView = new Uint8Array(size + 1); + retView[0] = (firstOctet | 0x1F); + if (!sizeOnly) { + for (let i = 0; i < (size - 1); i++) + retView[i + 1] = encodedView[i] | 0x80; + retView[size] = encodedView[size - 1]; + } + return retView.buffer; + } + const retView = new Uint8Array(this.valueHexView.byteLength + 1); + retView[0] = (firstOctet | 0x1F); + if (!sizeOnly) { + const curView = this.valueHexView; + for (let i = 0; i < (curView.length - 1); i++) + retView[i + 1] = curView[i] | 0x80; + retView[this.valueHexView.byteLength] = curView[curView.length - 1]; + } + return retView.buffer; + } + fromBER(inputBuffer, inputOffset, inputLength) { + const inputView = BufferSourceConverter_1.toUint8Array(inputBuffer); + if (!checkBufferParams(this, inputView, inputOffset, inputLength)) { + return -1; + } + const intBuffer = inputView.subarray(inputOffset, inputOffset + inputLength); + if (intBuffer.length === 0) { + this.error = "Zero buffer length"; + return -1; + } + const tagClassMask = intBuffer[0] & 0xC0; + switch (tagClassMask) { + case 0x00: + this.tagClass = (1); + break; + case 0x40: + this.tagClass = (2); + break; + case 0x80: + this.tagClass = (3); + break; + case 0xC0: + this.tagClass = (4); + break; + default: + this.error = "Unknown tag class"; + return -1; + } + this.isConstructed = (intBuffer[0] & 0x20) === 0x20; + this.isHexOnly = false; + const tagNumberMask = intBuffer[0] & 0x1F; + if (tagNumberMask !== 0x1F) { + this.tagNumber = (tagNumberMask); + this.blockLength = 1; + } + else { + let count = 1; + let intTagNumberBuffer = this.valueHexView = new Uint8Array(255); + let tagNumberBufferMaxLength = 255; + while (intBuffer[count] & 0x80) { + intTagNumberBuffer[count - 1] = intBuffer[count] & 0x7F; + count++; + if (count >= intBuffer.length) { + this.error = "End of input reached before message was fully decoded"; + return -1; + } + if (count === tagNumberBufferMaxLength) { + tagNumberBufferMaxLength += 255; + const tempBufferView = new Uint8Array(tagNumberBufferMaxLength); + for (let i = 0; i < intTagNumberBuffer.length; i++) + tempBufferView[i] = intTagNumberBuffer[i]; + intTagNumberBuffer = this.valueHexView = new Uint8Array(tagNumberBufferMaxLength); + } + } + this.blockLength = (count + 1); + intTagNumberBuffer[count - 1] = intBuffer[count] & 0x7F; + const tempBufferView = new Uint8Array(count); + for (let i = 0; i < count; i++) + tempBufferView[i] = intTagNumberBuffer[i]; + intTagNumberBuffer = this.valueHexView = new Uint8Array(count); + intTagNumberBuffer.set(tempBufferView); + if (this.blockLength <= 9) + this.tagNumber = utilFromBase(intTagNumberBuffer, 7); + else { + this.isHexOnly = true; + this.warnings.push("Tag too long, represented as hex-coded"); + } + } + if (((this.tagClass === 1)) && + (this.isConstructed)) { + switch (this.tagNumber) { + case 1: + case 2: + case 5: + case 6: + case 9: + case 13: + case 14: + case 23: + case 24: + case 31: + case 32: + case 33: + case 34: + this.error = "Constructed encoding used for primitive type"; + return -1; + } + } + return (inputOffset + this.blockLength); + } + toJSON() { + return { + ...super.toJSON(), + tagClass: this.tagClass, + tagNumber: this.tagNumber, + isConstructed: this.isConstructed, + }; + } + } + LocalIdentificationBlock.NAME = "identificationBlock"; + + class LocalLengthBlock extends LocalBaseBlock { + constructor({ lenBlock = {}, } = {}) { + var _a, _b, _c; + super(); + this.isIndefiniteForm = (_a = lenBlock.isIndefiniteForm) !== null && _a !== void 0 ? _a : false; + this.longFormUsed = (_b = lenBlock.longFormUsed) !== null && _b !== void 0 ? _b : false; + this.length = (_c = lenBlock.length) !== null && _c !== void 0 ? _c : 0; + } + fromBER(inputBuffer, inputOffset, inputLength) { + const view = BufferSourceConverter_1.toUint8Array(inputBuffer); + if (!checkBufferParams(this, view, inputOffset, inputLength)) { + return -1; + } + const intBuffer = view.subarray(inputOffset, inputOffset + inputLength); + if (intBuffer.length === 0) { + this.error = "Zero buffer length"; + return -1; + } + if (intBuffer[0] === 0xFF) { + this.error = "Length block 0xFF is reserved by standard"; + return -1; + } + this.isIndefiniteForm = intBuffer[0] === 0x80; + if (this.isIndefiniteForm) { + this.blockLength = 1; + return (inputOffset + this.blockLength); + } + this.longFormUsed = !!(intBuffer[0] & 0x80); + if (this.longFormUsed === false) { + this.length = (intBuffer[0]); + this.blockLength = 1; + return (inputOffset + this.blockLength); + } + const count = intBuffer[0] & 0x7F; + if (count > 8) { + this.error = "Too big integer"; + return -1; + } + if ((count + 1) > intBuffer.length) { + this.error = "End of input reached before message was fully decoded"; + return -1; + } + const lenOffset = inputOffset + 1; + const lengthBufferView = view.subarray(lenOffset, lenOffset + count); + if (lengthBufferView[count - 1] === 0x00) + this.warnings.push("Needlessly long encoded length"); + this.length = utilFromBase(lengthBufferView, 8); + if (this.longFormUsed && (this.length <= 127)) + this.warnings.push("Unnecessary usage of long length form"); + this.blockLength = count + 1; + return (inputOffset + this.blockLength); + } + toBER(sizeOnly = false) { + let retBuf; + let retView; + if (this.length > 127) + this.longFormUsed = true; + if (this.isIndefiniteForm) { + retBuf = new ArrayBuffer(1); + if (sizeOnly === false) { + retView = new Uint8Array(retBuf); + retView[0] = 0x80; + } + return retBuf; + } + if (this.longFormUsed) { + const encodedBuf = utilToBase(this.length, 8); + if (encodedBuf.byteLength > 127) { + this.error = "Too big length"; + return (EMPTY_BUFFER$1); + } + retBuf = new ArrayBuffer(encodedBuf.byteLength + 1); + if (sizeOnly) + return retBuf; + const encodedView = new Uint8Array(encodedBuf); + retView = new Uint8Array(retBuf); + retView[0] = encodedBuf.byteLength | 0x80; + for (let i = 0; i < encodedBuf.byteLength; i++) + retView[i + 1] = encodedView[i]; + return retBuf; + } + retBuf = new ArrayBuffer(1); + if (sizeOnly === false) { + retView = new Uint8Array(retBuf); + retView[0] = this.length; + } + return retBuf; + } + toJSON() { + return { + ...super.toJSON(), + isIndefiniteForm: this.isIndefiniteForm, + longFormUsed: this.longFormUsed, + length: this.length, + }; + } + } + LocalLengthBlock.NAME = "lengthBlock"; + + const typeStore = {}; + + class BaseBlock extends LocalBaseBlock { + constructor({ name = EMPTY_STRING, optional = false, primitiveSchema, ...parameters } = {}, valueBlockType) { + super(parameters); + this.name = name; + this.optional = optional; + if (primitiveSchema) { + this.primitiveSchema = primitiveSchema; + } + this.idBlock = new LocalIdentificationBlock(parameters); + this.lenBlock = new LocalLengthBlock(parameters); + this.valueBlock = valueBlockType ? new valueBlockType(parameters) : new ValueBlock(parameters); + } + fromBER(inputBuffer, inputOffset, inputLength) { + const resultOffset = this.valueBlock.fromBER(inputBuffer, inputOffset, (this.lenBlock.isIndefiniteForm) ? inputLength : this.lenBlock.length); + if (resultOffset === -1) { + this.error = this.valueBlock.error; + return resultOffset; + } + if (!this.idBlock.error.length) + this.blockLength += this.idBlock.blockLength; + if (!this.lenBlock.error.length) + this.blockLength += this.lenBlock.blockLength; + if (!this.valueBlock.error.length) + this.blockLength += this.valueBlock.blockLength; + return resultOffset; + } + toBER(sizeOnly, writer) { + const _writer = writer || new ViewWriter(); + if (!writer) { + prepareIndefiniteForm(this); + } + const idBlockBuf = this.idBlock.toBER(sizeOnly); + _writer.write(idBlockBuf); + if (this.lenBlock.isIndefiniteForm) { + _writer.write(new Uint8Array([0x80]).buffer); + this.valueBlock.toBER(sizeOnly, _writer); + _writer.write(new ArrayBuffer(2)); + } + else { + const valueBlockBuf = this.valueBlock.toBER(sizeOnly); + this.lenBlock.length = valueBlockBuf.byteLength; + const lenBlockBuf = this.lenBlock.toBER(sizeOnly); + _writer.write(lenBlockBuf); + _writer.write(valueBlockBuf); + } + if (!writer) { + return _writer.final(); + } + return EMPTY_BUFFER$1; + } + toJSON() { + const object = { + ...super.toJSON(), + idBlock: this.idBlock.toJSON(), + lenBlock: this.lenBlock.toJSON(), + valueBlock: this.valueBlock.toJSON(), + name: this.name, + optional: this.optional, + }; + if (this.primitiveSchema) + object.primitiveSchema = this.primitiveSchema.toJSON(); + return object; + } + toString(encoding = "ascii") { + if (encoding === "ascii") { + return this.onAsciiEncoding(); + } + return Convert_1.ToHex(this.toBER()); + } + onAsciiEncoding() { + return `${this.constructor.NAME} : ${Convert_1.ToHex(this.valueBlock.valueBeforeDecodeView)}`; + } + isEqual(other) { + if (this === other) { + return true; + } + if (!(other instanceof this.constructor)) { + return false; + } + const thisRaw = this.toBER(); + const otherRaw = other.toBER(); + return isEqualBuffer(thisRaw, otherRaw); + } + } + BaseBlock.NAME = "BaseBlock"; + function prepareIndefiniteForm(baseBlock) { + if (baseBlock instanceof typeStore.Constructed) { + for (const value of baseBlock.valueBlock.value) { + if (prepareIndefiniteForm(value)) { + baseBlock.lenBlock.isIndefiniteForm = true; + } + } + } + return !!baseBlock.lenBlock.isIndefiniteForm; + } + + class BaseStringBlock extends BaseBlock { + constructor({ value = EMPTY_STRING, ...parameters } = {}, stringValueBlockType) { + super(parameters, stringValueBlockType); + if (value) { + this.fromString(value); + } + } + getValue() { + return this.valueBlock.value; + } + setValue(value) { + this.valueBlock.value = value; + } + fromBER(inputBuffer, inputOffset, inputLength) { + const resultOffset = this.valueBlock.fromBER(inputBuffer, inputOffset, (this.lenBlock.isIndefiniteForm) ? inputLength : this.lenBlock.length); + if (resultOffset === -1) { + this.error = this.valueBlock.error; + return resultOffset; + } + this.fromBuffer(this.valueBlock.valueHexView); + if (!this.idBlock.error.length) + this.blockLength += this.idBlock.blockLength; + if (!this.lenBlock.error.length) + this.blockLength += this.lenBlock.blockLength; + if (!this.valueBlock.error.length) + this.blockLength += this.valueBlock.blockLength; + return resultOffset; + } + onAsciiEncoding() { + return `${this.constructor.NAME} : '${this.valueBlock.value}'`; + } + } + BaseStringBlock.NAME = "BaseStringBlock"; + + class LocalPrimitiveValueBlock extends HexBlock(ValueBlock) { + constructor({ isHexOnly = true, ...parameters } = {}) { + super(parameters); + this.isHexOnly = isHexOnly; + } + } + LocalPrimitiveValueBlock.NAME = "PrimitiveValueBlock"; + + var _a$w; + class Primitive extends BaseBlock { + constructor(parameters = {}) { + super(parameters, LocalPrimitiveValueBlock); + this.idBlock.isConstructed = false; + } + } + _a$w = Primitive; + (() => { + typeStore.Primitive = _a$w; + })(); + Primitive.NAME = "PRIMITIVE"; + + function localChangeType(inputObject, newType) { + if (inputObject instanceof newType) { + return inputObject; + } + const newObject = new newType(); + newObject.idBlock = inputObject.idBlock; + newObject.lenBlock = inputObject.lenBlock; + newObject.warnings = inputObject.warnings; + newObject.valueBeforeDecodeView = inputObject.valueBeforeDecodeView; + return newObject; + } + function localFromBER(inputBuffer, inputOffset = 0, inputLength = inputBuffer.length) { + const incomingOffset = inputOffset; + let returnObject = new BaseBlock({}, ValueBlock); + const baseBlock = new LocalBaseBlock(); + if (!checkBufferParams(baseBlock, inputBuffer, inputOffset, inputLength)) { + returnObject.error = baseBlock.error; + return { + offset: -1, + result: returnObject + }; + } + const intBuffer = inputBuffer.subarray(inputOffset, inputOffset + inputLength); + if (!intBuffer.length) { + returnObject.error = "Zero buffer length"; + return { + offset: -1, + result: returnObject + }; + } + let resultOffset = returnObject.idBlock.fromBER(inputBuffer, inputOffset, inputLength); + if (returnObject.idBlock.warnings.length) { + returnObject.warnings.concat(returnObject.idBlock.warnings); + } + if (resultOffset === -1) { + returnObject.error = returnObject.idBlock.error; + return { + offset: -1, + result: returnObject + }; + } + inputOffset = resultOffset; + inputLength -= returnObject.idBlock.blockLength; + resultOffset = returnObject.lenBlock.fromBER(inputBuffer, inputOffset, inputLength); + if (returnObject.lenBlock.warnings.length) { + returnObject.warnings.concat(returnObject.lenBlock.warnings); + } + if (resultOffset === -1) { + returnObject.error = returnObject.lenBlock.error; + return { + offset: -1, + result: returnObject + }; + } + inputOffset = resultOffset; + inputLength -= returnObject.lenBlock.blockLength; + if (!returnObject.idBlock.isConstructed && + returnObject.lenBlock.isIndefiniteForm) { + returnObject.error = "Indefinite length form used for primitive encoding form"; + return { + offset: -1, + result: returnObject + }; + } + let newASN1Type = BaseBlock; + switch (returnObject.idBlock.tagClass) { + case 1: + if ((returnObject.idBlock.tagNumber >= 37) && + (returnObject.idBlock.isHexOnly === false)) { + returnObject.error = "UNIVERSAL 37 and upper tags are reserved by ASN.1 standard"; + return { + offset: -1, + result: returnObject + }; + } + switch (returnObject.idBlock.tagNumber) { + case 0: + if ((returnObject.idBlock.isConstructed) && + (returnObject.lenBlock.length > 0)) { + returnObject.error = "Type [UNIVERSAL 0] is reserved"; + return { + offset: -1, + result: returnObject + }; + } + newASN1Type = typeStore.EndOfContent; + break; + case 1: + newASN1Type = typeStore.Boolean; + break; + case 2: + newASN1Type = typeStore.Integer; + break; + case 3: + newASN1Type = typeStore.BitString; + break; + case 4: + newASN1Type = typeStore.OctetString; + break; + case 5: + newASN1Type = typeStore.Null; + break; + case 6: + newASN1Type = typeStore.ObjectIdentifier; + break; + case 10: + newASN1Type = typeStore.Enumerated; + break; + case 12: + newASN1Type = typeStore.Utf8String; + break; + case 13: + newASN1Type = typeStore.RelativeObjectIdentifier; + break; + case 14: + newASN1Type = typeStore.TIME; + break; + case 15: + returnObject.error = "[UNIVERSAL 15] is reserved by ASN.1 standard"; + return { + offset: -1, + result: returnObject + }; + case 16: + newASN1Type = typeStore.Sequence; + break; + case 17: + newASN1Type = typeStore.Set; + break; + case 18: + newASN1Type = typeStore.NumericString; + break; + case 19: + newASN1Type = typeStore.PrintableString; + break; + case 20: + newASN1Type = typeStore.TeletexString; + break; + case 21: + newASN1Type = typeStore.VideotexString; + break; + case 22: + newASN1Type = typeStore.IA5String; + break; + case 23: + newASN1Type = typeStore.UTCTime; + break; + case 24: + newASN1Type = typeStore.GeneralizedTime; + break; + case 25: + newASN1Type = typeStore.GraphicString; + break; + case 26: + newASN1Type = typeStore.VisibleString; + break; + case 27: + newASN1Type = typeStore.GeneralString; + break; + case 28: + newASN1Type = typeStore.UniversalString; + break; + case 29: + newASN1Type = typeStore.CharacterString; + break; + case 30: + newASN1Type = typeStore.BmpString; + break; + case 31: + newASN1Type = typeStore.DATE; + break; + case 32: + newASN1Type = typeStore.TimeOfDay; + break; + case 33: + newASN1Type = typeStore.DateTime; + break; + case 34: + newASN1Type = typeStore.Duration; + break; + default: { + const newObject = returnObject.idBlock.isConstructed + ? new typeStore.Constructed() + : new typeStore.Primitive(); + newObject.idBlock = returnObject.idBlock; + newObject.lenBlock = returnObject.lenBlock; + newObject.warnings = returnObject.warnings; + returnObject = newObject; + } + } + break; + case 2: + case 3: + case 4: + default: { + newASN1Type = returnObject.idBlock.isConstructed + ? typeStore.Constructed + : typeStore.Primitive; + } + } + returnObject = localChangeType(returnObject, newASN1Type); + resultOffset = returnObject.fromBER(inputBuffer, inputOffset, returnObject.lenBlock.isIndefiniteForm ? inputLength : returnObject.lenBlock.length); + returnObject.valueBeforeDecodeView = inputBuffer.subarray(incomingOffset, incomingOffset + returnObject.blockLength); + return { + offset: resultOffset, + result: returnObject + }; + } + function fromBER(inputBuffer) { + if (!inputBuffer.byteLength) { + const result = new BaseBlock({}, ValueBlock); + result.error = "Input buffer has zero length"; + return { + offset: -1, + result + }; + } + return localFromBER(BufferSourceConverter_1.toUint8Array(inputBuffer).slice(), 0, inputBuffer.byteLength); + } + + function checkLen(indefiniteLength, length) { + if (indefiniteLength) { + return 1; + } + return length; + } + class LocalConstructedValueBlock extends ValueBlock { + constructor({ value = [], isIndefiniteForm = false, ...parameters } = {}) { + super(parameters); + this.value = value; + this.isIndefiniteForm = isIndefiniteForm; + } + fromBER(inputBuffer, inputOffset, inputLength) { + const view = BufferSourceConverter_1.toUint8Array(inputBuffer); + if (!checkBufferParams(this, view, inputOffset, inputLength)) { + return -1; + } + this.valueBeforeDecodeView = view.subarray(inputOffset, inputOffset + inputLength); + if (this.valueBeforeDecodeView.length === 0) { + this.warnings.push("Zero buffer length"); + return inputOffset; + } + let currentOffset = inputOffset; + while (checkLen(this.isIndefiniteForm, inputLength) > 0) { + const returnObject = localFromBER(view, currentOffset, inputLength); + if (returnObject.offset === -1) { + this.error = returnObject.result.error; + this.warnings.concat(returnObject.result.warnings); + return -1; + } + currentOffset = returnObject.offset; + this.blockLength += returnObject.result.blockLength; + inputLength -= returnObject.result.blockLength; + this.value.push(returnObject.result); + if (this.isIndefiniteForm && returnObject.result.constructor.NAME === END_OF_CONTENT_NAME) { + break; + } + } + if (this.isIndefiniteForm) { + if (this.value[this.value.length - 1].constructor.NAME === END_OF_CONTENT_NAME) { + this.value.pop(); + } + else { + this.warnings.push("No EndOfContent block encoded"); + } + } + return currentOffset; + } + toBER(sizeOnly, writer) { + const _writer = writer || new ViewWriter(); + for (let i = 0; i < this.value.length; i++) { + this.value[i].toBER(sizeOnly, _writer); + } + if (!writer) { + return _writer.final(); + } + return EMPTY_BUFFER$1; + } + toJSON() { + const object = { + ...super.toJSON(), + isIndefiniteForm: this.isIndefiniteForm, + value: [], + }; + for (const value of this.value) { + object.value.push(value.toJSON()); + } + return object; + } + } + LocalConstructedValueBlock.NAME = "ConstructedValueBlock"; + + var _a$v; + class Constructed extends BaseBlock { + constructor(parameters = {}) { + super(parameters, LocalConstructedValueBlock); + this.idBlock.isConstructed = true; + } + fromBER(inputBuffer, inputOffset, inputLength) { + this.valueBlock.isIndefiniteForm = this.lenBlock.isIndefiniteForm; + const resultOffset = this.valueBlock.fromBER(inputBuffer, inputOffset, (this.lenBlock.isIndefiniteForm) ? inputLength : this.lenBlock.length); + if (resultOffset === -1) { + this.error = this.valueBlock.error; + return resultOffset; + } + if (!this.idBlock.error.length) + this.blockLength += this.idBlock.blockLength; + if (!this.lenBlock.error.length) + this.blockLength += this.lenBlock.blockLength; + if (!this.valueBlock.error.length) + this.blockLength += this.valueBlock.blockLength; + return resultOffset; + } + onAsciiEncoding() { + const values = []; + for (const value of this.valueBlock.value) { + values.push(value.toString("ascii").split("\n").map(o => ` ${o}`).join("\n")); + } + const blockName = this.idBlock.tagClass === 3 + ? `[${this.idBlock.tagNumber}]` + : this.constructor.NAME; + return values.length + ? `${blockName} :\n${values.join("\n")}` + : `${blockName} :`; + } + } + _a$v = Constructed; + (() => { + typeStore.Constructed = _a$v; + })(); + Constructed.NAME = "CONSTRUCTED"; + + class LocalEndOfContentValueBlock extends ValueBlock { + fromBER(inputBuffer, inputOffset, inputLength) { + return inputOffset; + } + toBER(sizeOnly) { + return EMPTY_BUFFER$1; + } + } + LocalEndOfContentValueBlock.override = "EndOfContentValueBlock"; + + var _a$u; + class EndOfContent extends BaseBlock { + constructor(parameters = {}) { + super(parameters, LocalEndOfContentValueBlock); + this.idBlock.tagClass = 1; + this.idBlock.tagNumber = 0; + } + } + _a$u = EndOfContent; + (() => { + typeStore.EndOfContent = _a$u; + })(); + EndOfContent.NAME = END_OF_CONTENT_NAME; + + var _a$t; + class Null extends BaseBlock { + constructor(parameters = {}) { + super(parameters, ValueBlock); + this.idBlock.tagClass = 1; + this.idBlock.tagNumber = 5; + } + fromBER(inputBuffer, inputOffset, inputLength) { + if (this.lenBlock.length > 0) + this.warnings.push("Non-zero length of value block for Null type"); + if (!this.idBlock.error.length) + this.blockLength += this.idBlock.blockLength; + if (!this.lenBlock.error.length) + this.blockLength += this.lenBlock.blockLength; + this.blockLength += inputLength; + if ((inputOffset + inputLength) > inputBuffer.byteLength) { + this.error = "End of input reached before message was fully decoded (inconsistent offset and length values)"; + return -1; + } + return (inputOffset + inputLength); + } + toBER(sizeOnly, writer) { + const retBuf = new ArrayBuffer(2); + if (!sizeOnly) { + const retView = new Uint8Array(retBuf); + retView[0] = 0x05; + retView[1] = 0x00; + } + if (writer) { + writer.write(retBuf); + } + return retBuf; + } + onAsciiEncoding() { + return `${this.constructor.NAME}`; + } + } + _a$t = Null; + (() => { + typeStore.Null = _a$t; + })(); + Null.NAME = "NULL"; + + class LocalBooleanValueBlock extends HexBlock(ValueBlock) { + constructor({ value, ...parameters } = {}) { + super(parameters); + if (parameters.valueHex) { + this.valueHexView = BufferSourceConverter_1.toUint8Array(parameters.valueHex); + } + else { + this.valueHexView = new Uint8Array(1); + } + if (value) { + this.value = value; + } + } + get value() { + for (const octet of this.valueHexView) { + if (octet > 0) { + return true; + } + } + return false; + } + set value(value) { + this.valueHexView[0] = value ? 0xFF : 0x00; + } + fromBER(inputBuffer, inputOffset, inputLength) { + const inputView = BufferSourceConverter_1.toUint8Array(inputBuffer); + if (!checkBufferParams(this, inputView, inputOffset, inputLength)) { + return -1; + } + this.valueHexView = inputView.subarray(inputOffset, inputOffset + inputLength); + if (inputLength > 1) + this.warnings.push("Boolean value encoded in more then 1 octet"); + this.isHexOnly = true; + utilDecodeTC.call(this); + this.blockLength = inputLength; + return (inputOffset + inputLength); + } + toBER() { + return this.valueHexView.slice(); + } + toJSON() { + return { + ...super.toJSON(), + value: this.value, + }; + } + } + LocalBooleanValueBlock.NAME = "BooleanValueBlock"; + + var _a$s; + let Boolean$1 = class Boolean extends BaseBlock { + constructor(parameters = {}) { + super(parameters, LocalBooleanValueBlock); + this.idBlock.tagClass = 1; + this.idBlock.tagNumber = 1; + } + getValue() { + return this.valueBlock.value; + } + setValue(value) { + this.valueBlock.value = value; + } + onAsciiEncoding() { + return `${this.constructor.NAME} : ${this.getValue}`; + } + }; + _a$s = Boolean$1; + (() => { + typeStore.Boolean = _a$s; + })(); + Boolean$1.NAME = "BOOLEAN"; + + class LocalOctetStringValueBlock extends HexBlock(LocalConstructedValueBlock) { + constructor({ isConstructed = false, ...parameters } = {}) { + super(parameters); + this.isConstructed = isConstructed; + } + fromBER(inputBuffer, inputOffset, inputLength) { + let resultOffset = 0; + if (this.isConstructed) { + this.isHexOnly = false; + resultOffset = LocalConstructedValueBlock.prototype.fromBER.call(this, inputBuffer, inputOffset, inputLength); + if (resultOffset === -1) + return resultOffset; + for (let i = 0; i < this.value.length; i++) { + const currentBlockName = this.value[i].constructor.NAME; + if (currentBlockName === END_OF_CONTENT_NAME) { + if (this.isIndefiniteForm) + break; + else { + this.error = "EndOfContent is unexpected, OCTET STRING may consists of OCTET STRINGs only"; + return -1; + } + } + if (currentBlockName !== OCTET_STRING_NAME) { + this.error = "OCTET STRING may consists of OCTET STRINGs only"; + return -1; + } + } + } + else { + this.isHexOnly = true; + resultOffset = super.fromBER(inputBuffer, inputOffset, inputLength); + this.blockLength = inputLength; + } + return resultOffset; + } + toBER(sizeOnly, writer) { + if (this.isConstructed) + return LocalConstructedValueBlock.prototype.toBER.call(this, sizeOnly, writer); + return sizeOnly + ? new ArrayBuffer(this.valueHexView.byteLength) + : this.valueHexView.slice().buffer; + } + toJSON() { + return { + ...super.toJSON(), + isConstructed: this.isConstructed, + }; + } + } + LocalOctetStringValueBlock.NAME = "OctetStringValueBlock"; + + var _a$r; + class OctetString extends BaseBlock { + constructor({ idBlock = {}, lenBlock = {}, ...parameters } = {}) { + var _b, _c; + (_b = parameters.isConstructed) !== null && _b !== void 0 ? _b : (parameters.isConstructed = !!((_c = parameters.value) === null || _c === void 0 ? void 0 : _c.length)); + super({ + idBlock: { + isConstructed: parameters.isConstructed, + ...idBlock, + }, + lenBlock: { + ...lenBlock, + isIndefiniteForm: !!parameters.isIndefiniteForm, + }, + ...parameters, + }, LocalOctetStringValueBlock); + this.idBlock.tagClass = 1; + this.idBlock.tagNumber = 4; + } + fromBER(inputBuffer, inputOffset, inputLength) { + this.valueBlock.isConstructed = this.idBlock.isConstructed; + this.valueBlock.isIndefiniteForm = this.lenBlock.isIndefiniteForm; + if (inputLength === 0) { + if (this.idBlock.error.length === 0) + this.blockLength += this.idBlock.blockLength; + if (this.lenBlock.error.length === 0) + this.blockLength += this.lenBlock.blockLength; + return inputOffset; + } + if (!this.valueBlock.isConstructed) { + const view = inputBuffer instanceof ArrayBuffer ? new Uint8Array(inputBuffer) : inputBuffer; + const buf = view.subarray(inputOffset, inputOffset + inputLength); + try { + if (buf.byteLength) { + const asn = localFromBER(buf, 0, buf.byteLength); + if (asn.offset !== -1 && asn.offset === inputLength) { + this.valueBlock.value = [asn.result]; + } + } + } + catch (e) { + } + } + return super.fromBER(inputBuffer, inputOffset, inputLength); + } + onAsciiEncoding() { + if (this.valueBlock.isConstructed || (this.valueBlock.value && this.valueBlock.value.length)) { + return Constructed.prototype.onAsciiEncoding.call(this); + } + return `${this.constructor.NAME} : ${Convert_1.ToHex(this.valueBlock.valueHexView)}`; + } + getValue() { + if (!this.idBlock.isConstructed) { + return this.valueBlock.valueHexView.slice().buffer; + } + const array = []; + for (const content of this.valueBlock.value) { + if (content instanceof OctetString) { + array.push(content.valueBlock.valueHexView); + } + } + return BufferSourceConverter_1.concat(array); + } + } + _a$r = OctetString; + (() => { + typeStore.OctetString = _a$r; + })(); + OctetString.NAME = OCTET_STRING_NAME; + + class LocalBitStringValueBlock extends HexBlock(LocalConstructedValueBlock) { + constructor({ unusedBits = 0, isConstructed = false, ...parameters } = {}) { + super(parameters); + this.unusedBits = unusedBits; + this.isConstructed = isConstructed; + this.blockLength = this.valueHexView.byteLength; + } + fromBER(inputBuffer, inputOffset, inputLength) { + if (!inputLength) { + return inputOffset; + } + let resultOffset = -1; + if (this.isConstructed) { + resultOffset = LocalConstructedValueBlock.prototype.fromBER.call(this, inputBuffer, inputOffset, inputLength); + if (resultOffset === -1) + return resultOffset; + for (const value of this.value) { + const currentBlockName = value.constructor.NAME; + if (currentBlockName === END_OF_CONTENT_NAME) { + if (this.isIndefiniteForm) + break; + else { + this.error = "EndOfContent is unexpected, BIT STRING may consists of BIT STRINGs only"; + return -1; + } + } + if (currentBlockName !== BIT_STRING_NAME) { + this.error = "BIT STRING may consists of BIT STRINGs only"; + return -1; + } + const valueBlock = value.valueBlock; + if ((this.unusedBits > 0) && (valueBlock.unusedBits > 0)) { + this.error = "Using of \"unused bits\" inside constructive BIT STRING allowed for least one only"; + return -1; + } + this.unusedBits = valueBlock.unusedBits; + } + return resultOffset; + } + const inputView = BufferSourceConverter_1.toUint8Array(inputBuffer); + if (!checkBufferParams(this, inputView, inputOffset, inputLength)) { + return -1; + } + const intBuffer = inputView.subarray(inputOffset, inputOffset + inputLength); + this.unusedBits = intBuffer[0]; + if (this.unusedBits > 7) { + this.error = "Unused bits for BitString must be in range 0-7"; + return -1; + } + if (!this.unusedBits) { + const buf = intBuffer.subarray(1); + try { + if (buf.byteLength) { + const asn = localFromBER(buf, 0, buf.byteLength); + if (asn.offset !== -1 && asn.offset === (inputLength - 1)) { + this.value = [asn.result]; + } + } + } + catch (e) { + } + } + this.valueHexView = intBuffer.subarray(1); + this.blockLength = intBuffer.length; + return (inputOffset + inputLength); + } + toBER(sizeOnly, writer) { + if (this.isConstructed) { + return LocalConstructedValueBlock.prototype.toBER.call(this, sizeOnly, writer); + } + if (sizeOnly) { + return new ArrayBuffer(this.valueHexView.byteLength + 1); + } + if (!this.valueHexView.byteLength) { + return EMPTY_BUFFER$1; + } + const retView = new Uint8Array(this.valueHexView.length + 1); + retView[0] = this.unusedBits; + retView.set(this.valueHexView, 1); + return retView.buffer; + } + toJSON() { + return { + ...super.toJSON(), + unusedBits: this.unusedBits, + isConstructed: this.isConstructed, + }; + } + } + LocalBitStringValueBlock.NAME = "BitStringValueBlock"; + + var _a$q; + class BitString extends BaseBlock { + constructor({ idBlock = {}, lenBlock = {}, ...parameters } = {}) { + var _b, _c; + (_b = parameters.isConstructed) !== null && _b !== void 0 ? _b : (parameters.isConstructed = !!((_c = parameters.value) === null || _c === void 0 ? void 0 : _c.length)); + super({ + idBlock: { + isConstructed: parameters.isConstructed, + ...idBlock, + }, + lenBlock: { + ...lenBlock, + isIndefiniteForm: !!parameters.isIndefiniteForm, + }, + ...parameters, + }, LocalBitStringValueBlock); + this.idBlock.tagClass = 1; + this.idBlock.tagNumber = 3; + } + fromBER(inputBuffer, inputOffset, inputLength) { + this.valueBlock.isConstructed = this.idBlock.isConstructed; + this.valueBlock.isIndefiniteForm = this.lenBlock.isIndefiniteForm; + return super.fromBER(inputBuffer, inputOffset, inputLength); + } + onAsciiEncoding() { + if (this.valueBlock.isConstructed || (this.valueBlock.value && this.valueBlock.value.length)) { + return Constructed.prototype.onAsciiEncoding.call(this); + } + else { + const bits = []; + const valueHex = this.valueBlock.valueHexView; + for (const byte of valueHex) { + bits.push(byte.toString(2).padStart(8, "0")); + } + const bitsStr = bits.join(""); + return `${this.constructor.NAME} : ${bitsStr.substring(0, bitsStr.length - this.valueBlock.unusedBits)}`; + } + } + } + _a$q = BitString; + (() => { + typeStore.BitString = _a$q; + })(); + BitString.NAME = BIT_STRING_NAME; + + var _a$p; + function viewAdd(first, second) { + const c = new Uint8Array([0]); + const firstView = new Uint8Array(first); + const secondView = new Uint8Array(second); + let firstViewCopy = firstView.slice(0); + const firstViewCopyLength = firstViewCopy.length - 1; + const secondViewCopy = secondView.slice(0); + const secondViewCopyLength = secondViewCopy.length - 1; + let value = 0; + const max = (secondViewCopyLength < firstViewCopyLength) ? firstViewCopyLength : secondViewCopyLength; + let counter = 0; + for (let i = max; i >= 0; i--, counter++) { + switch (true) { + case (counter < secondViewCopy.length): + value = firstViewCopy[firstViewCopyLength - counter] + secondViewCopy[secondViewCopyLength - counter] + c[0]; + break; + default: + value = firstViewCopy[firstViewCopyLength - counter] + c[0]; + } + c[0] = value / 10; + switch (true) { + case (counter >= firstViewCopy.length): + firstViewCopy = utilConcatView(new Uint8Array([value % 10]), firstViewCopy); + break; + default: + firstViewCopy[firstViewCopyLength - counter] = value % 10; + } + } + if (c[0] > 0) + firstViewCopy = utilConcatView(c, firstViewCopy); + return firstViewCopy; + } + function power2(n) { + if (n >= powers2.length) { + for (let p = powers2.length; p <= n; p++) { + const c = new Uint8Array([0]); + let digits = (powers2[p - 1]).slice(0); + for (let i = (digits.length - 1); i >= 0; i--) { + const newValue = new Uint8Array([(digits[i] << 1) + c[0]]); + c[0] = newValue[0] / 10; + digits[i] = newValue[0] % 10; + } + if (c[0] > 0) + digits = utilConcatView(c, digits); + powers2.push(digits); + } + } + return powers2[n]; + } + function viewSub(first, second) { + let b = 0; + const firstView = new Uint8Array(first); + const secondView = new Uint8Array(second); + const firstViewCopy = firstView.slice(0); + const firstViewCopyLength = firstViewCopy.length - 1; + const secondViewCopy = secondView.slice(0); + const secondViewCopyLength = secondViewCopy.length - 1; + let value; + let counter = 0; + for (let i = secondViewCopyLength; i >= 0; i--, counter++) { + value = firstViewCopy[firstViewCopyLength - counter] - secondViewCopy[secondViewCopyLength - counter] - b; + switch (true) { + case (value < 0): + b = 1; + firstViewCopy[firstViewCopyLength - counter] = value + 10; + break; + default: + b = 0; + firstViewCopy[firstViewCopyLength - counter] = value; + } + } + if (b > 0) { + for (let i = (firstViewCopyLength - secondViewCopyLength + 1); i >= 0; i--, counter++) { + value = firstViewCopy[firstViewCopyLength - counter] - b; + if (value < 0) { + b = 1; + firstViewCopy[firstViewCopyLength - counter] = value + 10; + } + else { + b = 0; + firstViewCopy[firstViewCopyLength - counter] = value; + break; + } + } + } + return firstViewCopy.slice(); + } + class LocalIntegerValueBlock extends HexBlock(ValueBlock) { + constructor({ value, ...parameters } = {}) { + super(parameters); + this._valueDec = 0; + if (parameters.valueHex) { + this.setValueHex(); + } + if (value !== undefined) { + this.valueDec = value; + } + } + setValueHex() { + if (this.valueHexView.length >= 4) { + this.warnings.push("Too big Integer for decoding, hex only"); + this.isHexOnly = true; + this._valueDec = 0; + } + else { + this.isHexOnly = false; + if (this.valueHexView.length > 0) { + this._valueDec = utilDecodeTC.call(this); + } + } + } + set valueDec(v) { + this._valueDec = v; + this.isHexOnly = false; + this.valueHexView = new Uint8Array(utilEncodeTC(v)); + } + get valueDec() { + return this._valueDec; + } + fromDER(inputBuffer, inputOffset, inputLength, expectedLength = 0) { + const offset = this.fromBER(inputBuffer, inputOffset, inputLength); + if (offset === -1) + return offset; + const view = this.valueHexView; + if ((view[0] === 0x00) && ((view[1] & 0x80) !== 0)) { + this.valueHexView = view.subarray(1); + } + else { + if (expectedLength !== 0) { + if (view.length < expectedLength) { + if ((expectedLength - view.length) > 1) + expectedLength = view.length + 1; + this.valueHexView = view.subarray(expectedLength - view.length); + } + } + } + return offset; + } + toDER(sizeOnly = false) { + const view = this.valueHexView; + switch (true) { + case ((view[0] & 0x80) !== 0): + { + const updatedView = new Uint8Array(this.valueHexView.length + 1); + updatedView[0] = 0x00; + updatedView.set(view, 1); + this.valueHexView = updatedView; + } + break; + case ((view[0] === 0x00) && ((view[1] & 0x80) === 0)): + { + this.valueHexView = this.valueHexView.subarray(1); + } + break; + } + return this.toBER(sizeOnly); + } + fromBER(inputBuffer, inputOffset, inputLength) { + const resultOffset = super.fromBER(inputBuffer, inputOffset, inputLength); + if (resultOffset === -1) { + return resultOffset; + } + this.setValueHex(); + return resultOffset; + } + toBER(sizeOnly) { + return sizeOnly + ? new ArrayBuffer(this.valueHexView.length) + : this.valueHexView.slice().buffer; + } + toJSON() { + return { + ...super.toJSON(), + valueDec: this.valueDec, + }; + } + toString() { + const firstBit = (this.valueHexView.length * 8) - 1; + let digits = new Uint8Array((this.valueHexView.length * 8) / 3); + let bitNumber = 0; + let currentByte; + const asn1View = this.valueHexView; + let result = ""; + let flag = false; + for (let byteNumber = (asn1View.byteLength - 1); byteNumber >= 0; byteNumber--) { + currentByte = asn1View[byteNumber]; + for (let i = 0; i < 8; i++) { + if ((currentByte & 1) === 1) { + switch (bitNumber) { + case firstBit: + digits = viewSub(power2(bitNumber), digits); + result = "-"; + break; + default: + digits = viewAdd(digits, power2(bitNumber)); + } + } + bitNumber++; + currentByte >>= 1; + } + } + for (let i = 0; i < digits.length; i++) { + if (digits[i]) + flag = true; + if (flag) + result += digitsString.charAt(digits[i]); + } + if (flag === false) + result += digitsString.charAt(0); + return result; + } + } + _a$p = LocalIntegerValueBlock; + LocalIntegerValueBlock.NAME = "IntegerValueBlock"; + (() => { + Object.defineProperty(_a$p.prototype, "valueHex", { + set: function (v) { + this.valueHexView = new Uint8Array(v); + this.setValueHex(); + }, + get: function () { + return this.valueHexView.slice().buffer; + }, + }); + })(); + + var _a$o; + class Integer extends BaseBlock { + constructor(parameters = {}) { + super(parameters, LocalIntegerValueBlock); + this.idBlock.tagClass = 1; + this.idBlock.tagNumber = 2; + } + toBigInt() { + assertBigInt(); + return BigInt(this.valueBlock.toString()); + } + static fromBigInt(value) { + assertBigInt(); + const bigIntValue = BigInt(value); + const writer = new ViewWriter(); + const hex = bigIntValue.toString(16).replace(/^-/, ""); + const view = new Uint8Array(Convert_1.FromHex(hex)); + if (bigIntValue < 0) { + const first = new Uint8Array(view.length + (view[0] & 0x80 ? 1 : 0)); + first[0] |= 0x80; + const firstInt = BigInt(`0x${Convert_1.ToHex(first)}`); + const secondInt = firstInt + bigIntValue; + const second = BufferSourceConverter_1.toUint8Array(Convert_1.FromHex(secondInt.toString(16))); + second[0] |= 0x80; + writer.write(second); + } + else { + if (view[0] & 0x80) { + writer.write(new Uint8Array([0])); + } + writer.write(view); + } + const res = new Integer({ + valueHex: writer.final(), + }); + return res; + } + convertToDER() { + const integer = new Integer({ valueHex: this.valueBlock.valueHexView }); + integer.valueBlock.toDER(); + return integer; + } + convertFromDER() { + return new Integer({ + valueHex: this.valueBlock.valueHexView[0] === 0 + ? this.valueBlock.valueHexView.subarray(1) + : this.valueBlock.valueHexView, + }); + } + onAsciiEncoding() { + return `${this.constructor.NAME} : ${this.valueBlock.toString()}`; + } + } + _a$o = Integer; + (() => { + typeStore.Integer = _a$o; + })(); + Integer.NAME = "INTEGER"; + + var _a$n; + class Enumerated extends Integer { + constructor(parameters = {}) { + super(parameters); + this.idBlock.tagClass = 1; + this.idBlock.tagNumber = 10; + } + } + _a$n = Enumerated; + (() => { + typeStore.Enumerated = _a$n; + })(); + Enumerated.NAME = "ENUMERATED"; + + class LocalSidValueBlock extends HexBlock(ValueBlock) { + constructor({ valueDec = -1, isFirstSid = false, ...parameters } = {}) { + super(parameters); + this.valueDec = valueDec; + this.isFirstSid = isFirstSid; + } + fromBER(inputBuffer, inputOffset, inputLength) { + if (!inputLength) { + return inputOffset; + } + const inputView = BufferSourceConverter_1.toUint8Array(inputBuffer); + if (!checkBufferParams(this, inputView, inputOffset, inputLength)) { + return -1; + } + const intBuffer = inputView.subarray(inputOffset, inputOffset + inputLength); + this.valueHexView = new Uint8Array(inputLength); + for (let i = 0; i < inputLength; i++) { + this.valueHexView[i] = intBuffer[i] & 0x7F; + this.blockLength++; + if ((intBuffer[i] & 0x80) === 0x00) + break; + } + const tempView = new Uint8Array(this.blockLength); + for (let i = 0; i < this.blockLength; i++) { + tempView[i] = this.valueHexView[i]; + } + this.valueHexView = tempView; + if ((intBuffer[this.blockLength - 1] & 0x80) !== 0x00) { + this.error = "End of input reached before message was fully decoded"; + return -1; + } + if (this.valueHexView[0] === 0x00) + this.warnings.push("Needlessly long format of SID encoding"); + if (this.blockLength <= 8) + this.valueDec = utilFromBase(this.valueHexView, 7); + else { + this.isHexOnly = true; + this.warnings.push("Too big SID for decoding, hex only"); + } + return (inputOffset + this.blockLength); + } + set valueBigInt(value) { + assertBigInt(); + let bits = BigInt(value).toString(2); + while (bits.length % 7) { + bits = "0" + bits; + } + const bytes = new Uint8Array(bits.length / 7); + for (let i = 0; i < bytes.length; i++) { + bytes[i] = parseInt(bits.slice(i * 7, i * 7 + 7), 2) + (i + 1 < bytes.length ? 0x80 : 0); + } + this.fromBER(bytes.buffer, 0, bytes.length); + } + toBER(sizeOnly) { + if (this.isHexOnly) { + if (sizeOnly) + return (new ArrayBuffer(this.valueHexView.byteLength)); + const curView = this.valueHexView; + const retView = new Uint8Array(this.blockLength); + for (let i = 0; i < (this.blockLength - 1); i++) + retView[i] = curView[i] | 0x80; + retView[this.blockLength - 1] = curView[this.blockLength - 1]; + return retView.buffer; + } + const encodedBuf = utilToBase(this.valueDec, 7); + if (encodedBuf.byteLength === 0) { + this.error = "Error during encoding SID value"; + return EMPTY_BUFFER$1; + } + const retView = new Uint8Array(encodedBuf.byteLength); + if (!sizeOnly) { + const encodedView = new Uint8Array(encodedBuf); + const len = encodedBuf.byteLength - 1; + for (let i = 0; i < len; i++) + retView[i] = encodedView[i] | 0x80; + retView[len] = encodedView[len]; + } + return retView; + } + toString() { + let result = ""; + if (this.isHexOnly) + result = Convert_1.ToHex(this.valueHexView); + else { + if (this.isFirstSid) { + let sidValue = this.valueDec; + if (this.valueDec <= 39) + result = "0."; + else { + if (this.valueDec <= 79) { + result = "1."; + sidValue -= 40; + } + else { + result = "2."; + sidValue -= 80; + } + } + result += sidValue.toString(); + } + else + result = this.valueDec.toString(); + } + return result; + } + toJSON() { + return { + ...super.toJSON(), + valueDec: this.valueDec, + isFirstSid: this.isFirstSid, + }; + } + } + LocalSidValueBlock.NAME = "sidBlock"; + + class LocalObjectIdentifierValueBlock extends ValueBlock { + constructor({ value = EMPTY_STRING, ...parameters } = {}) { + super(parameters); + this.value = []; + if (value) { + this.fromString(value); + } + } + fromBER(inputBuffer, inputOffset, inputLength) { + let resultOffset = inputOffset; + while (inputLength > 0) { + const sidBlock = new LocalSidValueBlock(); + resultOffset = sidBlock.fromBER(inputBuffer, resultOffset, inputLength); + if (resultOffset === -1) { + this.blockLength = 0; + this.error = sidBlock.error; + return resultOffset; + } + if (this.value.length === 0) + sidBlock.isFirstSid = true; + this.blockLength += sidBlock.blockLength; + inputLength -= sidBlock.blockLength; + this.value.push(sidBlock); + } + return resultOffset; + } + toBER(sizeOnly) { + const retBuffers = []; + for (let i = 0; i < this.value.length; i++) { + const valueBuf = this.value[i].toBER(sizeOnly); + if (valueBuf.byteLength === 0) { + this.error = this.value[i].error; + return EMPTY_BUFFER$1; + } + retBuffers.push(valueBuf); + } + return concat(retBuffers); + } + fromString(string) { + this.value = []; + let pos1 = 0; + let pos2 = 0; + let sid = ""; + let flag = false; + do { + pos2 = string.indexOf(".", pos1); + if (pos2 === -1) + sid = string.substring(pos1); + else + sid = string.substring(pos1, pos2); + pos1 = pos2 + 1; + if (flag) { + const sidBlock = this.value[0]; + let plus = 0; + switch (sidBlock.valueDec) { + case 0: + break; + case 1: + plus = 40; + break; + case 2: + plus = 80; + break; + default: + this.value = []; + return; + } + const parsedSID = parseInt(sid, 10); + if (isNaN(parsedSID)) + return; + sidBlock.valueDec = parsedSID + plus; + flag = false; + } + else { + const sidBlock = new LocalSidValueBlock(); + if (sid > Number.MAX_SAFE_INTEGER) { + assertBigInt(); + const sidValue = BigInt(sid); + sidBlock.valueBigInt = sidValue; + } + else { + sidBlock.valueDec = parseInt(sid, 10); + if (isNaN(sidBlock.valueDec)) + return; + } + if (!this.value.length) { + sidBlock.isFirstSid = true; + flag = true; + } + this.value.push(sidBlock); + } + } while (pos2 !== -1); + } + toString() { + let result = ""; + let isHexOnly = false; + for (let i = 0; i < this.value.length; i++) { + isHexOnly = this.value[i].isHexOnly; + let sidStr = this.value[i].toString(); + if (i !== 0) + result = `${result}.`; + if (isHexOnly) { + sidStr = `{${sidStr}}`; + if (this.value[i].isFirstSid) + result = `2.{${sidStr} - 80}`; + else + result += sidStr; + } + else + result += sidStr; + } + return result; + } + toJSON() { + const object = { + ...super.toJSON(), + value: this.toString(), + sidArray: [], + }; + for (let i = 0; i < this.value.length; i++) { + object.sidArray.push(this.value[i].toJSON()); + } + return object; + } + } + LocalObjectIdentifierValueBlock.NAME = "ObjectIdentifierValueBlock"; + + var _a$m; + class ObjectIdentifier extends BaseBlock { + constructor(parameters = {}) { + super(parameters, LocalObjectIdentifierValueBlock); + this.idBlock.tagClass = 1; + this.idBlock.tagNumber = 6; + } + getValue() { + return this.valueBlock.toString(); + } + setValue(value) { + this.valueBlock.fromString(value); + } + onAsciiEncoding() { + return `${this.constructor.NAME} : ${this.valueBlock.toString() || "empty"}`; + } + toJSON() { + return { + ...super.toJSON(), + value: this.getValue(), + }; + } + } + _a$m = ObjectIdentifier; + (() => { + typeStore.ObjectIdentifier = _a$m; + })(); + ObjectIdentifier.NAME = "OBJECT IDENTIFIER"; + + class LocalRelativeSidValueBlock extends HexBlock(LocalBaseBlock) { + constructor({ valueDec = 0, ...parameters } = {}) { + super(parameters); + this.valueDec = valueDec; + } + fromBER(inputBuffer, inputOffset, inputLength) { + if (inputLength === 0) + return inputOffset; + const inputView = BufferSourceConverter_1.toUint8Array(inputBuffer); + if (!checkBufferParams(this, inputView, inputOffset, inputLength)) + return -1; + const intBuffer = inputView.subarray(inputOffset, inputOffset + inputLength); + this.valueHexView = new Uint8Array(inputLength); + for (let i = 0; i < inputLength; i++) { + this.valueHexView[i] = intBuffer[i] & 0x7F; + this.blockLength++; + if ((intBuffer[i] & 0x80) === 0x00) + break; + } + const tempView = new Uint8Array(this.blockLength); + for (let i = 0; i < this.blockLength; i++) + tempView[i] = this.valueHexView[i]; + this.valueHexView = tempView; + if ((intBuffer[this.blockLength - 1] & 0x80) !== 0x00) { + this.error = "End of input reached before message was fully decoded"; + return -1; + } + if (this.valueHexView[0] === 0x00) + this.warnings.push("Needlessly long format of SID encoding"); + if (this.blockLength <= 8) + this.valueDec = utilFromBase(this.valueHexView, 7); + else { + this.isHexOnly = true; + this.warnings.push("Too big SID for decoding, hex only"); + } + return (inputOffset + this.blockLength); + } + toBER(sizeOnly) { + if (this.isHexOnly) { + if (sizeOnly) + return (new ArrayBuffer(this.valueHexView.byteLength)); + const curView = this.valueHexView; + const retView = new Uint8Array(this.blockLength); + for (let i = 0; i < (this.blockLength - 1); i++) + retView[i] = curView[i] | 0x80; + retView[this.blockLength - 1] = curView[this.blockLength - 1]; + return retView.buffer; + } + const encodedBuf = utilToBase(this.valueDec, 7); + if (encodedBuf.byteLength === 0) { + this.error = "Error during encoding SID value"; + return EMPTY_BUFFER$1; + } + const retView = new Uint8Array(encodedBuf.byteLength); + if (!sizeOnly) { + const encodedView = new Uint8Array(encodedBuf); + const len = encodedBuf.byteLength - 1; + for (let i = 0; i < len; i++) + retView[i] = encodedView[i] | 0x80; + retView[len] = encodedView[len]; + } + return retView.buffer; + } + toString() { + let result = ""; + if (this.isHexOnly) + result = Convert_1.ToHex(this.valueHexView); + else { + result = this.valueDec.toString(); + } + return result; + } + toJSON() { + return { + ...super.toJSON(), + valueDec: this.valueDec, + }; + } + } + LocalRelativeSidValueBlock.NAME = "relativeSidBlock"; + + class LocalRelativeObjectIdentifierValueBlock extends ValueBlock { + constructor({ value = EMPTY_STRING, ...parameters } = {}) { + super(parameters); + this.value = []; + if (value) { + this.fromString(value); + } + } + fromBER(inputBuffer, inputOffset, inputLength) { + let resultOffset = inputOffset; + while (inputLength > 0) { + const sidBlock = new LocalRelativeSidValueBlock(); + resultOffset = sidBlock.fromBER(inputBuffer, resultOffset, inputLength); + if (resultOffset === -1) { + this.blockLength = 0; + this.error = sidBlock.error; + return resultOffset; + } + this.blockLength += sidBlock.blockLength; + inputLength -= sidBlock.blockLength; + this.value.push(sidBlock); + } + return resultOffset; + } + toBER(sizeOnly, writer) { + const retBuffers = []; + for (let i = 0; i < this.value.length; i++) { + const valueBuf = this.value[i].toBER(sizeOnly); + if (valueBuf.byteLength === 0) { + this.error = this.value[i].error; + return EMPTY_BUFFER$1; + } + retBuffers.push(valueBuf); + } + return concat(retBuffers); + } + fromString(string) { + this.value = []; + let pos1 = 0; + let pos2 = 0; + let sid = ""; + do { + pos2 = string.indexOf(".", pos1); + if (pos2 === -1) + sid = string.substring(pos1); + else + sid = string.substring(pos1, pos2); + pos1 = pos2 + 1; + const sidBlock = new LocalRelativeSidValueBlock(); + sidBlock.valueDec = parseInt(sid, 10); + if (isNaN(sidBlock.valueDec)) + return true; + this.value.push(sidBlock); + } while (pos2 !== -1); + return true; + } + toString() { + let result = ""; + let isHexOnly = false; + for (let i = 0; i < this.value.length; i++) { + isHexOnly = this.value[i].isHexOnly; + let sidStr = this.value[i].toString(); + if (i !== 0) + result = `${result}.`; + if (isHexOnly) { + sidStr = `{${sidStr}}`; + result += sidStr; + } + else + result += sidStr; + } + return result; + } + toJSON() { + const object = { + ...super.toJSON(), + value: this.toString(), + sidArray: [], + }; + for (let i = 0; i < this.value.length; i++) + object.sidArray.push(this.value[i].toJSON()); + return object; + } + } + LocalRelativeObjectIdentifierValueBlock.NAME = "RelativeObjectIdentifierValueBlock"; + + var _a$l; + class RelativeObjectIdentifier extends BaseBlock { + constructor(parameters = {}) { + super(parameters, LocalRelativeObjectIdentifierValueBlock); + this.idBlock.tagClass = 1; + this.idBlock.tagNumber = 13; + } + getValue() { + return this.valueBlock.toString(); + } + setValue(value) { + this.valueBlock.fromString(value); + } + onAsciiEncoding() { + return `${this.constructor.NAME} : ${this.valueBlock.toString() || "empty"}`; + } + toJSON() { + return { + ...super.toJSON(), + value: this.getValue(), + }; + } + } + _a$l = RelativeObjectIdentifier; + (() => { + typeStore.RelativeObjectIdentifier = _a$l; + })(); + RelativeObjectIdentifier.NAME = "RelativeObjectIdentifier"; + + var _a$k; + class Sequence extends Constructed { + constructor(parameters = {}) { + super(parameters); + this.idBlock.tagClass = 1; + this.idBlock.tagNumber = 16; + } + } + _a$k = Sequence; + (() => { + typeStore.Sequence = _a$k; + })(); + Sequence.NAME = "SEQUENCE"; + + var _a$j; + let Set$1 = class Set extends Constructed { + constructor(parameters = {}) { + super(parameters); + this.idBlock.tagClass = 1; + this.idBlock.tagNumber = 17; + } + }; + _a$j = Set$1; + (() => { + typeStore.Set = _a$j; + })(); + Set$1.NAME = "SET"; + + class LocalStringValueBlock extends HexBlock(ValueBlock) { + constructor({ ...parameters } = {}) { + super(parameters); + this.isHexOnly = true; + this.value = EMPTY_STRING; + } + toJSON() { + return { + ...super.toJSON(), + value: this.value, + }; + } + } + LocalStringValueBlock.NAME = "StringValueBlock"; + + class LocalSimpleStringValueBlock extends LocalStringValueBlock { + } + LocalSimpleStringValueBlock.NAME = "SimpleStringValueBlock"; + + class LocalSimpleStringBlock extends BaseStringBlock { + constructor({ ...parameters } = {}) { + super(parameters, LocalSimpleStringValueBlock); + } + fromBuffer(inputBuffer) { + this.valueBlock.value = String.fromCharCode.apply(null, BufferSourceConverter_1.toUint8Array(inputBuffer)); + } + fromString(inputString) { + const strLen = inputString.length; + const view = this.valueBlock.valueHexView = new Uint8Array(strLen); + for (let i = 0; i < strLen; i++) + view[i] = inputString.charCodeAt(i); + this.valueBlock.value = inputString; + } + } + LocalSimpleStringBlock.NAME = "SIMPLE STRING"; + + class LocalUtf8StringValueBlock extends LocalSimpleStringBlock { + fromBuffer(inputBuffer) { + this.valueBlock.valueHexView = BufferSourceConverter_1.toUint8Array(inputBuffer); + try { + this.valueBlock.value = Convert_1.ToUtf8String(inputBuffer); + } + catch (ex) { + this.warnings.push(`Error during "decodeURIComponent": ${ex}, using raw string`); + this.valueBlock.value = Convert_1.ToBinary(inputBuffer); + } + } + fromString(inputString) { + this.valueBlock.valueHexView = new Uint8Array(Convert_1.FromUtf8String(inputString)); + this.valueBlock.value = inputString; + } + } + LocalUtf8StringValueBlock.NAME = "Utf8StringValueBlock"; + + var _a$i; + class Utf8String extends LocalUtf8StringValueBlock { + constructor(parameters = {}) { + super(parameters); + this.idBlock.tagClass = 1; + this.idBlock.tagNumber = 12; + } + } + _a$i = Utf8String; + (() => { + typeStore.Utf8String = _a$i; + })(); + Utf8String.NAME = "UTF8String"; + + class LocalBmpStringValueBlock extends LocalSimpleStringBlock { + fromBuffer(inputBuffer) { + this.valueBlock.value = Convert_1.ToUtf16String(inputBuffer); + this.valueBlock.valueHexView = BufferSourceConverter_1.toUint8Array(inputBuffer); + } + fromString(inputString) { + this.valueBlock.value = inputString; + this.valueBlock.valueHexView = new Uint8Array(Convert_1.FromUtf16String(inputString)); + } + } + LocalBmpStringValueBlock.NAME = "BmpStringValueBlock"; + + var _a$h; + class BmpString extends LocalBmpStringValueBlock { + constructor({ ...parameters } = {}) { + super(parameters); + this.idBlock.tagClass = 1; + this.idBlock.tagNumber = 30; + } + } + _a$h = BmpString; + (() => { + typeStore.BmpString = _a$h; + })(); + BmpString.NAME = "BMPString"; + + class LocalUniversalStringValueBlock extends LocalSimpleStringBlock { + fromBuffer(inputBuffer) { + const copyBuffer = ArrayBuffer.isView(inputBuffer) ? inputBuffer.slice().buffer : inputBuffer.slice(0); + const valueView = new Uint8Array(copyBuffer); + for (let i = 0; i < valueView.length; i += 4) { + valueView[i] = valueView[i + 3]; + valueView[i + 1] = valueView[i + 2]; + valueView[i + 2] = 0x00; + valueView[i + 3] = 0x00; + } + this.valueBlock.value = String.fromCharCode.apply(null, new Uint32Array(copyBuffer)); + } + fromString(inputString) { + const strLength = inputString.length; + const valueHexView = this.valueBlock.valueHexView = new Uint8Array(strLength * 4); + for (let i = 0; i < strLength; i++) { + const codeBuf = utilToBase(inputString.charCodeAt(i), 8); + const codeView = new Uint8Array(codeBuf); + if (codeView.length > 4) + continue; + const dif = 4 - codeView.length; + for (let j = (codeView.length - 1); j >= 0; j--) + valueHexView[i * 4 + j + dif] = codeView[j]; + } + this.valueBlock.value = inputString; + } + } + LocalUniversalStringValueBlock.NAME = "UniversalStringValueBlock"; + + var _a$g; + class UniversalString extends LocalUniversalStringValueBlock { + constructor({ ...parameters } = {}) { + super(parameters); + this.idBlock.tagClass = 1; + this.idBlock.tagNumber = 28; + } + } + _a$g = UniversalString; + (() => { + typeStore.UniversalString = _a$g; + })(); + UniversalString.NAME = "UniversalString"; + + var _a$f; + class NumericString extends LocalSimpleStringBlock { + constructor(parameters = {}) { + super(parameters); + this.idBlock.tagClass = 1; + this.idBlock.tagNumber = 18; + } + } + _a$f = NumericString; + (() => { + typeStore.NumericString = _a$f; + })(); + NumericString.NAME = "NumericString"; + + var _a$e; + class PrintableString extends LocalSimpleStringBlock { + constructor(parameters = {}) { + super(parameters); + this.idBlock.tagClass = 1; + this.idBlock.tagNumber = 19; + } + } + _a$e = PrintableString; + (() => { + typeStore.PrintableString = _a$e; + })(); + PrintableString.NAME = "PrintableString"; + + var _a$d; + class TeletexString extends LocalSimpleStringBlock { + constructor(parameters = {}) { + super(parameters); + this.idBlock.tagClass = 1; + this.idBlock.tagNumber = 20; + } + } + _a$d = TeletexString; + (() => { + typeStore.TeletexString = _a$d; + })(); + TeletexString.NAME = "TeletexString"; + + var _a$c; + class VideotexString extends LocalSimpleStringBlock { + constructor(parameters = {}) { + super(parameters); + this.idBlock.tagClass = 1; + this.idBlock.tagNumber = 21; + } + } + _a$c = VideotexString; + (() => { + typeStore.VideotexString = _a$c; + })(); + VideotexString.NAME = "VideotexString"; + + var _a$b; + class IA5String extends LocalSimpleStringBlock { + constructor(parameters = {}) { + super(parameters); + this.idBlock.tagClass = 1; + this.idBlock.tagNumber = 22; + } + } + _a$b = IA5String; + (() => { + typeStore.IA5String = _a$b; + })(); + IA5String.NAME = "IA5String"; + + var _a$a; + class GraphicString extends LocalSimpleStringBlock { + constructor(parameters = {}) { + super(parameters); + this.idBlock.tagClass = 1; + this.idBlock.tagNumber = 25; + } + } + _a$a = GraphicString; + (() => { + typeStore.GraphicString = _a$a; + })(); + GraphicString.NAME = "GraphicString"; + + var _a$9; + class VisibleString extends LocalSimpleStringBlock { + constructor(parameters = {}) { + super(parameters); + this.idBlock.tagClass = 1; + this.idBlock.tagNumber = 26; + } + } + _a$9 = VisibleString; + (() => { + typeStore.VisibleString = _a$9; + })(); + VisibleString.NAME = "VisibleString"; + + var _a$8; + class GeneralString extends LocalSimpleStringBlock { + constructor(parameters = {}) { + super(parameters); + this.idBlock.tagClass = 1; + this.idBlock.tagNumber = 27; + } + } + _a$8 = GeneralString; + (() => { + typeStore.GeneralString = _a$8; + })(); + GeneralString.NAME = "GeneralString"; + + var _a$7; + class CharacterString extends LocalSimpleStringBlock { + constructor(parameters = {}) { + super(parameters); + this.idBlock.tagClass = 1; + this.idBlock.tagNumber = 29; + } + } + _a$7 = CharacterString; + (() => { + typeStore.CharacterString = _a$7; + })(); + CharacterString.NAME = "CharacterString"; + + var _a$6; + class UTCTime extends VisibleString { + constructor({ value, valueDate, ...parameters } = {}) { + super(parameters); + this.year = 0; + this.month = 0; + this.day = 0; + this.hour = 0; + this.minute = 0; + this.second = 0; + if (value) { + this.fromString(value); + this.valueBlock.valueHexView = new Uint8Array(value.length); + for (let i = 0; i < value.length; i++) + this.valueBlock.valueHexView[i] = value.charCodeAt(i); + } + if (valueDate) { + this.fromDate(valueDate); + this.valueBlock.valueHexView = new Uint8Array(this.toBuffer()); + } + this.idBlock.tagClass = 1; + this.idBlock.tagNumber = 23; + } + fromBuffer(inputBuffer) { + this.fromString(String.fromCharCode.apply(null, BufferSourceConverter_1.toUint8Array(inputBuffer))); + } + toBuffer() { + const str = this.toString(); + const buffer = new ArrayBuffer(str.length); + const view = new Uint8Array(buffer); + for (let i = 0; i < str.length; i++) + view[i] = str.charCodeAt(i); + return buffer; + } + fromDate(inputDate) { + this.year = inputDate.getUTCFullYear(); + this.month = inputDate.getUTCMonth() + 1; + this.day = inputDate.getUTCDate(); + this.hour = inputDate.getUTCHours(); + this.minute = inputDate.getUTCMinutes(); + this.second = inputDate.getUTCSeconds(); + } + toDate() { + return (new Date(Date.UTC(this.year, this.month - 1, this.day, this.hour, this.minute, this.second))); + } + fromString(inputString) { + const parser = /(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})Z/ig; + const parserArray = parser.exec(inputString); + if (parserArray === null) { + this.error = "Wrong input string for conversion"; + return; + } + const year = parseInt(parserArray[1], 10); + if (year >= 50) + this.year = 1900 + year; + else + this.year = 2000 + year; + this.month = parseInt(parserArray[2], 10); + this.day = parseInt(parserArray[3], 10); + this.hour = parseInt(parserArray[4], 10); + this.minute = parseInt(parserArray[5], 10); + this.second = parseInt(parserArray[6], 10); + } + toString(encoding = "iso") { + if (encoding === "iso") { + const outputArray = new Array(7); + outputArray[0] = padNumber(((this.year < 2000) ? (this.year - 1900) : (this.year - 2000)), 2); + outputArray[1] = padNumber(this.month, 2); + outputArray[2] = padNumber(this.day, 2); + outputArray[3] = padNumber(this.hour, 2); + outputArray[4] = padNumber(this.minute, 2); + outputArray[5] = padNumber(this.second, 2); + outputArray[6] = "Z"; + return outputArray.join(""); + } + return super.toString(encoding); + } + onAsciiEncoding() { + return `${this.constructor.NAME} : ${this.toDate().toISOString()}`; + } + toJSON() { + return { + ...super.toJSON(), + year: this.year, + month: this.month, + day: this.day, + hour: this.hour, + minute: this.minute, + second: this.second, + }; + } + } + _a$6 = UTCTime; + (() => { + typeStore.UTCTime = _a$6; + })(); + UTCTime.NAME = "UTCTime"; + + var _a$5; + class GeneralizedTime extends UTCTime { + constructor(parameters = {}) { + var _b; + super(parameters); + (_b = this.millisecond) !== null && _b !== void 0 ? _b : (this.millisecond = 0); + this.idBlock.tagClass = 1; + this.idBlock.tagNumber = 24; + } + fromDate(inputDate) { + super.fromDate(inputDate); + this.millisecond = inputDate.getUTCMilliseconds(); + } + toDate() { + return (new Date(Date.UTC(this.year, this.month - 1, this.day, this.hour, this.minute, this.second, this.millisecond))); + } + fromString(inputString) { + let isUTC = false; + let timeString = ""; + let dateTimeString = ""; + let fractionPart = 0; + let parser; + let hourDifference = 0; + let minuteDifference = 0; + if (inputString[inputString.length - 1] === "Z") { + timeString = inputString.substring(0, inputString.length - 1); + isUTC = true; + } + else { + const number = new Number(inputString[inputString.length - 1]); + if (isNaN(number.valueOf())) + throw new Error("Wrong input string for conversion"); + timeString = inputString; + } + if (isUTC) { + if (timeString.indexOf("+") !== -1) + throw new Error("Wrong input string for conversion"); + if (timeString.indexOf("-") !== -1) + throw new Error("Wrong input string for conversion"); + } + else { + let multiplier = 1; + let differencePosition = timeString.indexOf("+"); + let differenceString = ""; + if (differencePosition === -1) { + differencePosition = timeString.indexOf("-"); + multiplier = -1; + } + if (differencePosition !== -1) { + differenceString = timeString.substring(differencePosition + 1); + timeString = timeString.substring(0, differencePosition); + if ((differenceString.length !== 2) && (differenceString.length !== 4)) + throw new Error("Wrong input string for conversion"); + let number = parseInt(differenceString.substring(0, 2), 10); + if (isNaN(number.valueOf())) + throw new Error("Wrong input string for conversion"); + hourDifference = multiplier * number; + if (differenceString.length === 4) { + number = parseInt(differenceString.substring(2, 4), 10); + if (isNaN(number.valueOf())) + throw new Error("Wrong input string for conversion"); + minuteDifference = multiplier * number; + } + } + } + let fractionPointPosition = timeString.indexOf("."); + if (fractionPointPosition === -1) + fractionPointPosition = timeString.indexOf(","); + if (fractionPointPosition !== -1) { + const fractionPartCheck = new Number(`0${timeString.substring(fractionPointPosition)}`); + if (isNaN(fractionPartCheck.valueOf())) + throw new Error("Wrong input string for conversion"); + fractionPart = fractionPartCheck.valueOf(); + dateTimeString = timeString.substring(0, fractionPointPosition); + } + else + dateTimeString = timeString; + switch (true) { + case (dateTimeString.length === 8): + parser = /(\d{4})(\d{2})(\d{2})/ig; + if (fractionPointPosition !== -1) + throw new Error("Wrong input string for conversion"); + break; + case (dateTimeString.length === 10): + parser = /(\d{4})(\d{2})(\d{2})(\d{2})/ig; + if (fractionPointPosition !== -1) { + let fractionResult = 60 * fractionPart; + this.minute = Math.floor(fractionResult); + fractionResult = 60 * (fractionResult - this.minute); + this.second = Math.floor(fractionResult); + fractionResult = 1000 * (fractionResult - this.second); + this.millisecond = Math.floor(fractionResult); + } + break; + case (dateTimeString.length === 12): + parser = /(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})/ig; + if (fractionPointPosition !== -1) { + let fractionResult = 60 * fractionPart; + this.second = Math.floor(fractionResult); + fractionResult = 1000 * (fractionResult - this.second); + this.millisecond = Math.floor(fractionResult); + } + break; + case (dateTimeString.length === 14): + parser = /(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})/ig; + if (fractionPointPosition !== -1) { + const fractionResult = 1000 * fractionPart; + this.millisecond = Math.floor(fractionResult); + } + break; + default: + throw new Error("Wrong input string for conversion"); + } + const parserArray = parser.exec(dateTimeString); + if (parserArray === null) + throw new Error("Wrong input string for conversion"); + for (let j = 1; j < parserArray.length; j++) { + switch (j) { + case 1: + this.year = parseInt(parserArray[j], 10); + break; + case 2: + this.month = parseInt(parserArray[j], 10); + break; + case 3: + this.day = parseInt(parserArray[j], 10); + break; + case 4: + this.hour = parseInt(parserArray[j], 10) + hourDifference; + break; + case 5: + this.minute = parseInt(parserArray[j], 10) + minuteDifference; + break; + case 6: + this.second = parseInt(parserArray[j], 10); + break; + default: + throw new Error("Wrong input string for conversion"); + } + } + if (isUTC === false) { + const tempDate = new Date(this.year, this.month, this.day, this.hour, this.minute, this.second, this.millisecond); + this.year = tempDate.getUTCFullYear(); + this.month = tempDate.getUTCMonth(); + this.day = tempDate.getUTCDay(); + this.hour = tempDate.getUTCHours(); + this.minute = tempDate.getUTCMinutes(); + this.second = tempDate.getUTCSeconds(); + this.millisecond = tempDate.getUTCMilliseconds(); + } + } + toString(encoding = "iso") { + if (encoding === "iso") { + const outputArray = []; + outputArray.push(padNumber(this.year, 4)); + outputArray.push(padNumber(this.month, 2)); + outputArray.push(padNumber(this.day, 2)); + outputArray.push(padNumber(this.hour, 2)); + outputArray.push(padNumber(this.minute, 2)); + outputArray.push(padNumber(this.second, 2)); + if (this.millisecond !== 0) { + outputArray.push("."); + outputArray.push(padNumber(this.millisecond, 3)); + } + outputArray.push("Z"); + return outputArray.join(""); + } + return super.toString(encoding); + } + toJSON() { + return { + ...super.toJSON(), + millisecond: this.millisecond, + }; + } + } + _a$5 = GeneralizedTime; + (() => { + typeStore.GeneralizedTime = _a$5; + })(); + GeneralizedTime.NAME = "GeneralizedTime"; + + var _a$4; + class DATE extends Utf8String { + constructor(parameters = {}) { + super(parameters); + this.idBlock.tagClass = 1; + this.idBlock.tagNumber = 31; + } + } + _a$4 = DATE; + (() => { + typeStore.DATE = _a$4; + })(); + DATE.NAME = "DATE"; + + var _a$3; + class TimeOfDay extends Utf8String { + constructor(parameters = {}) { + super(parameters); + this.idBlock.tagClass = 1; + this.idBlock.tagNumber = 32; + } + } + _a$3 = TimeOfDay; + (() => { + typeStore.TimeOfDay = _a$3; + })(); + TimeOfDay.NAME = "TimeOfDay"; + + var _a$2; + class DateTime extends Utf8String { + constructor(parameters = {}) { + super(parameters); + this.idBlock.tagClass = 1; + this.idBlock.tagNumber = 33; + } + } + _a$2 = DateTime; + (() => { + typeStore.DateTime = _a$2; + })(); + DateTime.NAME = "DateTime"; + + var _a$1; + class Duration extends Utf8String { + constructor(parameters = {}) { + super(parameters); + this.idBlock.tagClass = 1; + this.idBlock.tagNumber = 34; + } + } + _a$1 = Duration; + (() => { + typeStore.Duration = _a$1; + })(); + Duration.NAME = "Duration"; + + var _a; + class TIME extends Utf8String { + constructor(parameters = {}) { + super(parameters); + this.idBlock.tagClass = 1; + this.idBlock.tagNumber = 14; + } + } + _a = TIME; + (() => { + typeStore.TIME = _a; + })(); + TIME.NAME = "TIME"; + + /** + * Convert a PKCS#1 in ASN1 DER format to a JWK key + */ + function pkcs1ToJwk(bytes) { + const { result } = fromBER(bytes); + // @ts-expect-error this looks fragile but DER is a canonical format so we are + // safe to have deeply property chains like this + const values = result.valueBlock.value; + const key = { + n: toString$1(bnToBuf(values[1].toBigInt()), 'base64url'), + e: toString$1(bnToBuf(values[2].toBigInt()), 'base64url'), + d: toString$1(bnToBuf(values[3].toBigInt()), 'base64url'), + p: toString$1(bnToBuf(values[4].toBigInt()), 'base64url'), + q: toString$1(bnToBuf(values[5].toBigInt()), 'base64url'), + dp: toString$1(bnToBuf(values[6].toBigInt()), 'base64url'), + dq: toString$1(bnToBuf(values[7].toBigInt()), 'base64url'), + qi: toString$1(bnToBuf(values[8].toBigInt()), 'base64url'), + kty: 'RSA', + alg: 'RS256' + }; + return key; + } + /** + * Convert a JWK key into PKCS#1 in ASN1 DER format + */ + function jwkToPkcs1(jwk) { + if (jwk.n == null || jwk.e == null || jwk.d == null || jwk.p == null || jwk.q == null || jwk.dp == null || jwk.dq == null || jwk.qi == null) { + throw new CodeError$2('JWK was missing components', 'ERR_INVALID_PARAMETERS'); + } + const root = new Sequence({ + value: [ + new Integer({ value: 0 }), + Integer.fromBigInt(bufToBn(fromString(jwk.n, 'base64url'))), + Integer.fromBigInt(bufToBn(fromString(jwk.e, 'base64url'))), + Integer.fromBigInt(bufToBn(fromString(jwk.d, 'base64url'))), + Integer.fromBigInt(bufToBn(fromString(jwk.p, 'base64url'))), + Integer.fromBigInt(bufToBn(fromString(jwk.q, 'base64url'))), + Integer.fromBigInt(bufToBn(fromString(jwk.dp, 'base64url'))), + Integer.fromBigInt(bufToBn(fromString(jwk.dq, 'base64url'))), + Integer.fromBigInt(bufToBn(fromString(jwk.qi, 'base64url'))) + ] + }); + const der = root.toBER(); + return new Uint8Array(der, 0, der.byteLength); + } + /** + * Convert a PKCIX in ASN1 DER format to a JWK key + */ + function pkixToJwk(bytes) { + const { result } = fromBER(bytes); + // @ts-expect-error this looks fragile but DER is a canonical format so we are + // safe to have deeply property chains like this + const values = result.valueBlock.value[1].valueBlock.value[0].valueBlock.value; + return { + kty: 'RSA', + n: toString$1(bnToBuf(values[0].toBigInt()), 'base64url'), + e: toString$1(bnToBuf(values[1].toBigInt()), 'base64url') + }; + } + /** + * Convert a JWK key to PKCIX in ASN1 DER format + */ + function jwkToPkix(jwk) { + if (jwk.n == null || jwk.e == null) { + throw new CodeError$2('JWK was missing components', 'ERR_INVALID_PARAMETERS'); + } + const root = new Sequence({ + value: [ + new Sequence({ + value: [ + // rsaEncryption + new ObjectIdentifier({ + value: '1.2.840.113549.1.1.1' + }), + new Null() + ] + }), + // this appears to be a bug in asn1js.js - this should really be a Sequence + // and not a BitString but it generates the same bytes as node-forge so ๐Ÿคทโ€โ™‚๏ธ + new BitString({ + valueHex: new Sequence({ + value: [ + Integer.fromBigInt(bufToBn(fromString(jwk.n, 'base64url'))), + Integer.fromBigInt(bufToBn(fromString(jwk.e, 'base64url'))) + ] + }).toBER() + }) + ] + }); + const der = root.toBER(); + return new Uint8Array(der, 0, der.byteLength); + } + function bnToBuf(bn) { + let hex = bn.toString(16); + if (hex.length % 2 > 0) { + hex = `0${hex}`; + } + const len = hex.length / 2; + const u8 = new Uint8Array(len); + let i = 0; + let j = 0; + while (i < len) { + u8[i] = parseInt(hex.slice(j, j + 2), 16); + i += 1; + j += 2; + } + return u8; + } + function bufToBn(u8) { + const hex = []; + u8.forEach(function (i) { + let h = i.toString(16); + if (h.length % 2 > 0) { + h = `0${h}`; + } + hex.push(h); + }); + return BigInt('0x' + hex.join('')); + } + const SALT_LENGTH = 16; + const KEY_SIZE = 32; + const ITERATIONS = 10000; + async function exportToPem(privateKey, password) { + const crypto = webcrypto.get(); + // PrivateKeyInfo + const keyWrapper = new Sequence({ + value: [ + // version (0) + new Integer({ value: 0 }), + // privateKeyAlgorithm + new Sequence({ + value: [ + // rsaEncryption OID + new ObjectIdentifier({ + value: '1.2.840.113549.1.1.1' + }), + new Null() + ] + }), + // PrivateKey + new OctetString({ + valueHex: privateKey.marshal() + }) + ] + }); + const keyBuf = keyWrapper.toBER(); + const keyArr = new Uint8Array(keyBuf, 0, keyBuf.byteLength); + const salt = randomBytes(SALT_LENGTH); + const encryptionKey = await pbkdf2Async(sha512, password, salt, { + c: ITERATIONS, + dkLen: KEY_SIZE + }); + const iv = randomBytes(16); + const cryptoKey = await crypto.subtle.importKey('raw', encryptionKey, 'AES-CBC', false, ['encrypt']); + const encrypted = await crypto.subtle.encrypt({ + name: 'AES-CBC', + iv + }, cryptoKey, keyArr); + const pbkdf2Params = new Sequence({ + value: [ + // salt + new OctetString({ valueHex: salt }), + // iteration count + new Integer({ value: ITERATIONS }), + // key length + new Integer({ value: KEY_SIZE }), + // AlgorithmIdentifier + new Sequence({ + value: [ + // hmacWithSHA512 + new ObjectIdentifier({ value: '1.2.840.113549.2.11' }), + new Null() + ] + }) + ] + }); + const encryptionAlgorithm = new Sequence({ + value: [ + // pkcs5PBES2 + new ObjectIdentifier({ + value: '1.2.840.113549.1.5.13' + }), + new Sequence({ + value: [ + // keyDerivationFunc + new Sequence({ + value: [ + // pkcs5PBKDF2 + new ObjectIdentifier({ + value: '1.2.840.113549.1.5.12' + }), + // PBKDF2-params + pbkdf2Params + ] + }), + // encryptionScheme + new Sequence({ + value: [ + // aes256-CBC + new ObjectIdentifier({ + value: '2.16.840.1.101.3.4.1.42' + }), + // iv + new OctetString({ + valueHex: iv + }) + ] + }) + ] + }) + ] + }); + const finalWrapper = new Sequence({ + value: [ + encryptionAlgorithm, + new OctetString({ valueHex: encrypted }) + ] + }); + const finalWrapperBuf = finalWrapper.toBER(); + const finalWrapperArr = new Uint8Array(finalWrapperBuf, 0, finalWrapperBuf.byteLength); + return [ + '-----BEGIN ENCRYPTED PRIVATE KEY-----', + ...toString$1(finalWrapperArr, 'base64pad').split(/(.{64})/).filter(Boolean), + '-----END ENCRYPTED PRIVATE KEY-----' + ].join('\n'); + } + + async function generateKey$1(bits) { + const pair = await webcrypto.get().subtle.generateKey({ + name: 'RSASSA-PKCS1-v1_5', + modulusLength: bits, + publicExponent: new Uint8Array([0x01, 0x00, 0x01]), + hash: { name: 'SHA-256' } + }, true, ['sign', 'verify']); + const keys = await exportKey(pair); + return { + privateKey: keys[0], + publicKey: keys[1] + }; + } + // Takes a jwk key + async function unmarshalPrivateKey$1(key) { + const privateKey = await webcrypto.get().subtle.importKey('jwk', key, { + name: 'RSASSA-PKCS1-v1_5', + hash: { name: 'SHA-256' } + }, true, ['sign']); + const pair = [ + privateKey, + await derivePublicFromPrivate(key) + ]; + const keys = await exportKey({ + privateKey: pair[0], + publicKey: pair[1] + }); + return { + privateKey: keys[0], + publicKey: keys[1] + }; + } + async function hashAndSign$1(key, msg) { + const privateKey = await webcrypto.get().subtle.importKey('jwk', key, { + name: 'RSASSA-PKCS1-v1_5', + hash: { name: 'SHA-256' } + }, false, ['sign']); + const sig = await webcrypto.get().subtle.sign({ name: 'RSASSA-PKCS1-v1_5' }, privateKey, msg instanceof Uint8Array ? msg : msg.subarray()); + return new Uint8Array(sig, 0, sig.byteLength); + } + async function hashAndVerify$1(key, sig, msg) { + const publicKey = await webcrypto.get().subtle.importKey('jwk', key, { + name: 'RSASSA-PKCS1-v1_5', + hash: { name: 'SHA-256' } + }, false, ['verify']); + return webcrypto.get().subtle.verify({ name: 'RSASSA-PKCS1-v1_5' }, publicKey, sig, msg instanceof Uint8Array ? msg : msg.subarray()); + } + async function exportKey(pair) { + if (pair.privateKey == null || pair.publicKey == null) { + throw new CodeError$2('Private and public key are required', 'ERR_INVALID_PARAMETERS'); + } + return Promise.all([ + webcrypto.get().subtle.exportKey('jwk', pair.privateKey), + webcrypto.get().subtle.exportKey('jwk', pair.publicKey) + ]); + } + async function derivePublicFromPrivate(jwKey) { + return webcrypto.get().subtle.importKey('jwk', { + kty: jwKey.kty, + n: jwKey.n, + e: jwKey.e + }, { + name: 'RSASSA-PKCS1-v1_5', + hash: { name: 'SHA-256' } + }, true, ['verify']); + } + function keySize(jwk) { + if (jwk.kty !== 'RSA') { + throw new CodeError$2('invalid key type', 'ERR_INVALID_KEY_TYPE'); + } + else if (jwk.n == null) { + throw new CodeError$2('invalid key modulus', 'ERR_INVALID_KEY_MODULUS'); + } + const bytes = fromString(jwk.n, 'base64url'); + return bytes.length * 8; + } + + const MAX_RSA_KEY_SIZE = 8192; + class RsaPublicKey { + _key; + constructor(key) { + this._key = key; + } + verify(data, sig) { + return hashAndVerify$1(this._key, sig, data); + } + marshal() { + return jwkToPkix(this._key); + } + get bytes() { + return PublicKey.encode({ + Type: KeyType.RSA, + Data: this.marshal() + }).subarray(); + } + equals(key) { + return equals(this.bytes, key.bytes); + } + hash() { + const p = sha256$1.digest(this.bytes); + if (isPromise$4(p)) { + return p.then(({ bytes }) => bytes); + } + return p.bytes; + } + } + class RsaPrivateKey { + _key; + _publicKey; + constructor(key, publicKey) { + this._key = key; + this._publicKey = publicKey; + } + genSecret() { + return randomBytes(16); + } + sign(message) { + return hashAndSign$1(this._key, message); + } + get public() { + if (this._publicKey == null) { + throw new CodeError$2('public key not provided', 'ERR_PUBKEY_NOT_PROVIDED'); + } + return new RsaPublicKey(this._publicKey); + } + marshal() { + return jwkToPkcs1(this._key); + } + get bytes() { + return PrivateKey.encode({ + Type: KeyType.RSA, + Data: this.marshal() + }).subarray(); + } + equals(key) { + return equals(this.bytes, key.bytes); + } + hash() { + const p = sha256$1.digest(this.bytes); + if (isPromise$4(p)) { + return p.then(({ bytes }) => bytes); + } + return p.bytes; + } + /** + * Gets the ID of the key. + * + * The key id is the base58 encoding of the SHA-256 multihash of its public key. + * The public key is a protobuf encoding containing a type and the DER encoding + * of the PKCS SubjectPublicKeyInfo. + */ + async id() { + const hash = await this.public.hash(); + return toString$1(hash, 'base58btc'); + } + /** + * Exports the key as libp2p-key - a aes-gcm encrypted value with the key + * derived from the password. + * + * To export it as a password protected PEM file, please use the `exportPEM` + * function from `@libp2p/rsa`. + */ + async export(password, format = 'pkcs-8') { + if (format === 'pkcs-8') { + return exportToPem(this, password); + } + else if (format === 'libp2p-key') { + return exporter(this.bytes, password); + } + else { + throw new CodeError$2(`export format '${format}' is not supported`, 'ERR_INVALID_EXPORT_FORMAT'); + } + } + } + async function unmarshalRsaPrivateKey(bytes) { + const jwk = pkcs1ToJwk(bytes); + if (keySize(jwk) > MAX_RSA_KEY_SIZE) { + throw new CodeError$2('key size is too large', 'ERR_KEY_SIZE_TOO_LARGE'); + } + const keys = await unmarshalPrivateKey$1(jwk); + return new RsaPrivateKey(keys.privateKey, keys.publicKey); + } + function unmarshalRsaPublicKey(bytes) { + const jwk = pkixToJwk(bytes); + if (keySize(jwk) > MAX_RSA_KEY_SIZE) { + throw new CodeError$2('key size is too large', 'ERR_KEY_SIZE_TOO_LARGE'); + } + return new RsaPublicKey(jwk); + } + async function fromJwk(jwk) { + if (keySize(jwk) > MAX_RSA_KEY_SIZE) { + throw new CodeError$2('key size is too large', 'ERR_KEY_SIZE_TOO_LARGE'); + } + const keys = await unmarshalPrivateKey$1(jwk); + return new RsaPrivateKey(keys.privateKey, keys.publicKey); + } + async function generateKeyPair$2(bits) { + if (bits > MAX_RSA_KEY_SIZE) { + throw new CodeError$2('key size is too large', 'ERR_KEY_SIZE_TOO_LARGE'); + } + const keys = await generateKey$1(bits); + return new RsaPrivateKey(keys.privateKey, keys.publicKey); + } + + var RSA = /*#__PURE__*/Object.freeze({ + __proto__: null, + MAX_RSA_KEY_SIZE: MAX_RSA_KEY_SIZE, + RsaPrivateKey: RsaPrivateKey, + RsaPublicKey: RsaPublicKey, + fromJwk: fromJwk, + generateKeyPair: generateKeyPair$2, + unmarshalRsaPrivateKey: unmarshalRsaPrivateKey, + unmarshalRsaPublicKey: unmarshalRsaPublicKey + }); + + // SHA2-256 need to try 2^128 hashes to execute birthday attack. + // BTC network is doing 2^67 hashes/sec as per early 2023. + // Round constants: + // first 32 bits of the fractional parts of the cube roots of the first 64 primes 2..311) + // prettier-ignore + const SHA256_K = /* @__PURE__ */ new Uint32Array([ + 0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5, 0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5, + 0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3, 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174, + 0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc, 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da, + 0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7, 0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967, + 0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13, 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85, + 0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3, 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070, + 0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5, 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3, + 0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208, 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2 + ]); + // Initial state: + // first 32 bits of the fractional parts of the square roots of the first 8 primes 2..19 + // prettier-ignore + const SHA256_IV = /* @__PURE__ */ new Uint32Array([ + 0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a, 0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19 + ]); + // Temporary buffer, not used to store anything between runs + // Named this way because it matches specification. + const SHA256_W = /* @__PURE__ */ new Uint32Array(64); + class SHA256 extends HashMD { + constructor() { + super(64, 32, 8, false); + // We cannot use array here since array allows indexing by variable + // which means optimizer/compiler cannot use registers. + this.A = SHA256_IV[0] | 0; + this.B = SHA256_IV[1] | 0; + this.C = SHA256_IV[2] | 0; + this.D = SHA256_IV[3] | 0; + this.E = SHA256_IV[4] | 0; + this.F = SHA256_IV[5] | 0; + this.G = SHA256_IV[6] | 0; + this.H = SHA256_IV[7] | 0; + } + get() { + const { A, B, C, D, E, F, G, H } = this; + return [A, B, C, D, E, F, G, H]; + } + // prettier-ignore + set(A, B, C, D, E, F, G, H) { + this.A = A | 0; + this.B = B | 0; + this.C = C | 0; + this.D = D | 0; + this.E = E | 0; + this.F = F | 0; + this.G = G | 0; + this.H = H | 0; + } + process(view, offset) { + // Extend the first 16 words into the remaining 48 words w[16..63] of the message schedule array + for (let i = 0; i < 16; i++, offset += 4) + SHA256_W[i] = view.getUint32(offset, false); + for (let i = 16; i < 64; i++) { + const W15 = SHA256_W[i - 15]; + const W2 = SHA256_W[i - 2]; + const s0 = rotr(W15, 7) ^ rotr(W15, 18) ^ (W15 >>> 3); + const s1 = rotr(W2, 17) ^ rotr(W2, 19) ^ (W2 >>> 10); + SHA256_W[i] = (s1 + SHA256_W[i - 7] + s0 + SHA256_W[i - 16]) | 0; + } + // Compression function main loop, 64 rounds + let { A, B, C, D, E, F, G, H } = this; + for (let i = 0; i < 64; i++) { + const sigma1 = rotr(E, 6) ^ rotr(E, 11) ^ rotr(E, 25); + const T1 = (H + sigma1 + Chi(E, F, G) + SHA256_K[i] + SHA256_W[i]) | 0; + const sigma0 = rotr(A, 2) ^ rotr(A, 13) ^ rotr(A, 22); + const T2 = (sigma0 + Maj(A, B, C)) | 0; + H = G; + G = F; + F = E; + E = (D + T1) | 0; + D = C; + C = B; + B = A; + A = (T1 + T2) | 0; + } + // Add the compressed chunk to the current hash value + A = (A + this.A) | 0; + B = (B + this.B) | 0; + C = (C + this.C) | 0; + D = (D + this.D) | 0; + E = (E + this.E) | 0; + F = (F + this.F) | 0; + G = (G + this.G) | 0; + H = (H + this.H) | 0; + this.set(A, B, C, D, E, F, G, H); + } + roundClean() { + SHA256_W.fill(0); + } + destroy() { + this.set(0, 0, 0, 0, 0, 0, 0, 0); + this.buffer.fill(0); + } + } + /** + * SHA2-256 hash function + * @param message - data that would be hashed + */ + const sha256 = /* @__PURE__ */ wrapConstructor(() => new SHA256()); + + /*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ + // Short Weierstrass curve. The formula is: yยฒ = xยณ + ax + b + function validatePointOpts(curve) { + const opts = validateBasic(curve); + validateObject(opts, { + a: 'field', + b: 'field', + }, { + allowedPrivateKeyLengths: 'array', + wrapPrivateKey: 'boolean', + isTorsionFree: 'function', + clearCofactor: 'function', + allowInfinityPoint: 'boolean', + fromBytes: 'function', + toBytes: 'function', + }); + const { endo, Fp, a } = opts; + if (endo) { + if (!Fp.eql(a, Fp.ZERO)) { + throw new Error('Endomorphism can only be defined for Koblitz curves that have a=0'); + } + if (typeof endo !== 'object' || + typeof endo.beta !== 'bigint' || + typeof endo.splitScalar !== 'function') { + throw new Error('Expected endomorphism with beta: bigint and splitScalar: function'); + } + } + return Object.freeze({ ...opts }); + } + // ASN.1 DER encoding utilities + const { bytesToNumberBE: b2n, hexToBytes: h2b } = ut; + const DER = { + // asn.1 DER encoding utils + Err: class DERErr extends Error { + constructor(m = '') { + super(m); + } + }, + _parseInt(data) { + const { Err: E } = DER; + if (data.length < 2 || data[0] !== 0x02) + throw new E('Invalid signature integer tag'); + const len = data[1]; + const res = data.subarray(2, len + 2); + if (!len || res.length !== len) + throw new E('Invalid signature integer: wrong length'); + // https://crypto.stackexchange.com/a/57734 Leftmost bit of first byte is 'negative' flag, + // since we always use positive integers here. It must always be empty: + // - add zero byte if exists + // - if next byte doesn't have a flag, leading zero is not allowed (minimal encoding) + if (res[0] & 0b10000000) + throw new E('Invalid signature integer: negative'); + if (res[0] === 0x00 && !(res[1] & 0b10000000)) + throw new E('Invalid signature integer: unnecessary leading zero'); + return { d: b2n(res), l: data.subarray(len + 2) }; // d is data, l is left + }, + toSig(hex) { + // parse DER signature + const { Err: E } = DER; + const data = typeof hex === 'string' ? h2b(hex) : hex; + abytes(data); + let l = data.length; + if (l < 2 || data[0] != 0x30) + throw new E('Invalid signature tag'); + if (data[1] !== l - 2) + throw new E('Invalid signature: incorrect length'); + const { d: r, l: sBytes } = DER._parseInt(data.subarray(2)); + const { d: s, l: rBytesLeft } = DER._parseInt(sBytes); + if (rBytesLeft.length) + throw new E('Invalid signature: left bytes after parsing'); + return { r, s }; + }, + hexFromSig(sig) { + // Add leading zero if first byte has negative bit enabled. More details in '_parseInt' + const slice = (s) => (Number.parseInt(s[0], 16) & 0b1000 ? '00' + s : s); + const h = (num) => { + const hex = num.toString(16); + return hex.length & 1 ? `0${hex}` : hex; + }; + const s = slice(h(sig.s)); + const r = slice(h(sig.r)); + const shl = s.length / 2; + const rhl = r.length / 2; + const sl = h(shl); + const rl = h(rhl); + return `30${h(rhl + shl + 4)}02${rl}${r}02${sl}${s}`; + }, + }; + // Be friendly to bad ECMAScript parsers by not using bigint literals + // prettier-ignore + const _0n = BigInt(0), _1n$1 = BigInt(1); BigInt(2); const _3n = BigInt(3); BigInt(4); + function weierstrassPoints(opts) { + const CURVE = validatePointOpts(opts); + const { Fp } = CURVE; // All curves has same field / group length as for now, but they can differ + const toBytes = CURVE.toBytes || + ((_c, point, _isCompressed) => { + const a = point.toAffine(); + return concatBytes(Uint8Array.from([0x04]), Fp.toBytes(a.x), Fp.toBytes(a.y)); + }); + const fromBytes = CURVE.fromBytes || + ((bytes) => { + // const head = bytes[0]; + const tail = bytes.subarray(1); + // if (head !== 0x04) throw new Error('Only non-compressed encoding is supported'); + const x = Fp.fromBytes(tail.subarray(0, Fp.BYTES)); + const y = Fp.fromBytes(tail.subarray(Fp.BYTES, 2 * Fp.BYTES)); + return { x, y }; + }); + /** + * yยฒ = xยณ + ax + b: Short weierstrass curve formula + * @returns yยฒ + */ + function weierstrassEquation(x) { + const { a, b } = CURVE; + const x2 = Fp.sqr(x); // x * x + const x3 = Fp.mul(x2, x); // x2 * x + return Fp.add(Fp.add(x3, Fp.mul(x, a)), b); // x3 + a * x + b + } + // Validate whether the passed curve params are valid. + // We check if curve equation works for generator point. + // `assertValidity()` won't work: `isTorsionFree()` is not available at this point in bls12-381. + // ProjectivePoint class has not been initialized yet. + if (!Fp.eql(Fp.sqr(CURVE.Gy), weierstrassEquation(CURVE.Gx))) + throw new Error('bad generator point: equation left != right'); + // Valid group elements reside in range 1..n-1 + function isWithinCurveOrder(num) { + return typeof num === 'bigint' && _0n < num && num < CURVE.n; + } + function assertGE(num) { + if (!isWithinCurveOrder(num)) + throw new Error('Expected valid bigint: 0 < bigint < curve.n'); + } + // Validates if priv key is valid and converts it to bigint. + // Supports options allowedPrivateKeyLengths and wrapPrivateKey. + function normPrivateKeyToScalar(key) { + const { allowedPrivateKeyLengths: lengths, nByteLength, wrapPrivateKey, n } = CURVE; + if (lengths && typeof key !== 'bigint') { + if (isBytes$2(key)) + key = bytesToHex(key); + // Normalize to hex string, pad. E.g. P521 would norm 130-132 char hex to 132-char bytes + if (typeof key !== 'string' || !lengths.includes(key.length)) + throw new Error('Invalid key'); + key = key.padStart(nByteLength * 2, '0'); + } + let num; + try { + num = + typeof key === 'bigint' + ? key + : bytesToNumberBE(ensureBytes$1('private key', key, nByteLength)); + } + catch (error) { + throw new Error(`private key must be ${nByteLength} bytes, hex or bigint, not ${typeof key}`); + } + if (wrapPrivateKey) + num = mod(num, n); // disabled by default, enabled for BLS + assertGE(num); // num in range [1..N-1] + return num; + } + const pointPrecomputes = new Map(); + function assertPrjPoint(other) { + if (!(other instanceof Point)) + throw new Error('ProjectivePoint expected'); + } + /** + * Projective Point works in 3d / projective (homogeneous) coordinates: (x, y, z) โˆ‹ (x=x/z, y=y/z) + * Default Point works in 2d / affine coordinates: (x, y) + * We're doing calculations in projective, because its operations don't require costly inversion. + */ + class Point { + constructor(px, py, pz) { + this.px = px; + this.py = py; + this.pz = pz; + if (px == null || !Fp.isValid(px)) + throw new Error('x required'); + if (py == null || !Fp.isValid(py)) + throw new Error('y required'); + if (pz == null || !Fp.isValid(pz)) + throw new Error('z required'); + } + // Does not validate if the point is on-curve. + // Use fromHex instead, or call assertValidity() later. + static fromAffine(p) { + const { x, y } = p || {}; + if (!p || !Fp.isValid(x) || !Fp.isValid(y)) + throw new Error('invalid affine point'); + if (p instanceof Point) + throw new Error('projective point not allowed'); + const is0 = (i) => Fp.eql(i, Fp.ZERO); + // fromAffine(x:0, y:0) would produce (x:0, y:0, z:1), but we need (x:0, y:1, z:0) + if (is0(x) && is0(y)) + return Point.ZERO; + return new Point(x, y, Fp.ONE); + } + get x() { + return this.toAffine().x; + } + get y() { + return this.toAffine().y; + } + /** + * Takes a bunch of Projective Points but executes only one + * inversion on all of them. Inversion is very slow operation, + * so this improves performance massively. + * Optimization: converts a list of projective points to a list of identical points with Z=1. + */ + static normalizeZ(points) { + const toInv = Fp.invertBatch(points.map((p) => p.pz)); + return points.map((p, i) => p.toAffine(toInv[i])).map(Point.fromAffine); + } + /** + * Converts hash string or Uint8Array to Point. + * @param hex short/long ECDSA hex + */ + static fromHex(hex) { + const P = Point.fromAffine(fromBytes(ensureBytes$1('pointHex', hex))); + P.assertValidity(); + return P; + } + // Multiplies generator point by privateKey. + static fromPrivateKey(privateKey) { + return Point.BASE.multiply(normPrivateKeyToScalar(privateKey)); + } + // "Private method", don't use it directly + _setWindowSize(windowSize) { + this._WINDOW_SIZE = windowSize; + pointPrecomputes.delete(this); + } + // A point on curve is valid if it conforms to equation. + assertValidity() { + if (this.is0()) { + // (0, 1, 0) aka ZERO is invalid in most contexts. + // In BLS, ZERO can be serialized, so we allow it. + // (0, 0, 0) is wrong representation of ZERO and is always invalid. + if (CURVE.allowInfinityPoint && !Fp.is0(this.py)) + return; + throw new Error('bad point: ZERO'); + } + // Some 3rd-party test vectors require different wording between here & `fromCompressedHex` + const { x, y } = this.toAffine(); + // Check if x, y are valid field elements + if (!Fp.isValid(x) || !Fp.isValid(y)) + throw new Error('bad point: x or y not FE'); + const left = Fp.sqr(y); // yยฒ + const right = weierstrassEquation(x); // xยณ + ax + b + if (!Fp.eql(left, right)) + throw new Error('bad point: equation left != right'); + if (!this.isTorsionFree()) + throw new Error('bad point: not in prime-order subgroup'); + } + hasEvenY() { + const { y } = this.toAffine(); + if (Fp.isOdd) + return !Fp.isOdd(y); + throw new Error("Field doesn't support isOdd"); + } + /** + * Compare one point to another. + */ + equals(other) { + assertPrjPoint(other); + const { px: X1, py: Y1, pz: Z1 } = this; + const { px: X2, py: Y2, pz: Z2 } = other; + const U1 = Fp.eql(Fp.mul(X1, Z2), Fp.mul(X2, Z1)); + const U2 = Fp.eql(Fp.mul(Y1, Z2), Fp.mul(Y2, Z1)); + return U1 && U2; + } + /** + * Flips point to one corresponding to (x, -y) in Affine coordinates. + */ + negate() { + return new Point(this.px, Fp.neg(this.py), this.pz); + } + // Renes-Costello-Batina exception-free doubling formula. + // There is 30% faster Jacobian formula, but it is not complete. + // https://eprint.iacr.org/2015/1060, algorithm 3 + // Cost: 8M + 3S + 3*a + 2*b3 + 15add. + double() { + const { a, b } = CURVE; + const b3 = Fp.mul(b, _3n); + const { px: X1, py: Y1, pz: Z1 } = this; + let X3 = Fp.ZERO, Y3 = Fp.ZERO, Z3 = Fp.ZERO; // prettier-ignore + let t0 = Fp.mul(X1, X1); // step 1 + let t1 = Fp.mul(Y1, Y1); + let t2 = Fp.mul(Z1, Z1); + let t3 = Fp.mul(X1, Y1); + t3 = Fp.add(t3, t3); // step 5 + Z3 = Fp.mul(X1, Z1); + Z3 = Fp.add(Z3, Z3); + X3 = Fp.mul(a, Z3); + Y3 = Fp.mul(b3, t2); + Y3 = Fp.add(X3, Y3); // step 10 + X3 = Fp.sub(t1, Y3); + Y3 = Fp.add(t1, Y3); + Y3 = Fp.mul(X3, Y3); + X3 = Fp.mul(t3, X3); + Z3 = Fp.mul(b3, Z3); // step 15 + t2 = Fp.mul(a, t2); + t3 = Fp.sub(t0, t2); + t3 = Fp.mul(a, t3); + t3 = Fp.add(t3, Z3); + Z3 = Fp.add(t0, t0); // step 20 + t0 = Fp.add(Z3, t0); + t0 = Fp.add(t0, t2); + t0 = Fp.mul(t0, t3); + Y3 = Fp.add(Y3, t0); + t2 = Fp.mul(Y1, Z1); // step 25 + t2 = Fp.add(t2, t2); + t0 = Fp.mul(t2, t3); + X3 = Fp.sub(X3, t0); + Z3 = Fp.mul(t2, t1); + Z3 = Fp.add(Z3, Z3); // step 30 + Z3 = Fp.add(Z3, Z3); + return new Point(X3, Y3, Z3); + } + // Renes-Costello-Batina exception-free addition formula. + // There is 30% faster Jacobian formula, but it is not complete. + // https://eprint.iacr.org/2015/1060, algorithm 1 + // Cost: 12M + 0S + 3*a + 3*b3 + 23add. + add(other) { + assertPrjPoint(other); + const { px: X1, py: Y1, pz: Z1 } = this; + const { px: X2, py: Y2, pz: Z2 } = other; + let X3 = Fp.ZERO, Y3 = Fp.ZERO, Z3 = Fp.ZERO; // prettier-ignore + const a = CURVE.a; + const b3 = Fp.mul(CURVE.b, _3n); + let t0 = Fp.mul(X1, X2); // step 1 + let t1 = Fp.mul(Y1, Y2); + let t2 = Fp.mul(Z1, Z2); + let t3 = Fp.add(X1, Y1); + let t4 = Fp.add(X2, Y2); // step 5 + t3 = Fp.mul(t3, t4); + t4 = Fp.add(t0, t1); + t3 = Fp.sub(t3, t4); + t4 = Fp.add(X1, Z1); + let t5 = Fp.add(X2, Z2); // step 10 + t4 = Fp.mul(t4, t5); + t5 = Fp.add(t0, t2); + t4 = Fp.sub(t4, t5); + t5 = Fp.add(Y1, Z1); + X3 = Fp.add(Y2, Z2); // step 15 + t5 = Fp.mul(t5, X3); + X3 = Fp.add(t1, t2); + t5 = Fp.sub(t5, X3); + Z3 = Fp.mul(a, t4); + X3 = Fp.mul(b3, t2); // step 20 + Z3 = Fp.add(X3, Z3); + X3 = Fp.sub(t1, Z3); + Z3 = Fp.add(t1, Z3); + Y3 = Fp.mul(X3, Z3); + t1 = Fp.add(t0, t0); // step 25 + t1 = Fp.add(t1, t0); + t2 = Fp.mul(a, t2); + t4 = Fp.mul(b3, t4); + t1 = Fp.add(t1, t2); + t2 = Fp.sub(t0, t2); // step 30 + t2 = Fp.mul(a, t2); + t4 = Fp.add(t4, t2); + t0 = Fp.mul(t1, t4); + Y3 = Fp.add(Y3, t0); + t0 = Fp.mul(t5, t4); // step 35 + X3 = Fp.mul(t3, X3); + X3 = Fp.sub(X3, t0); + t0 = Fp.mul(t3, t1); + Z3 = Fp.mul(t5, Z3); + Z3 = Fp.add(Z3, t0); // step 40 + return new Point(X3, Y3, Z3); + } + subtract(other) { + return this.add(other.negate()); + } + is0() { + return this.equals(Point.ZERO); + } + wNAF(n) { + return wnaf.wNAFCached(this, pointPrecomputes, n, (comp) => { + const toInv = Fp.invertBatch(comp.map((p) => p.pz)); + return comp.map((p, i) => p.toAffine(toInv[i])).map(Point.fromAffine); + }); + } + /** + * Non-constant-time multiplication. Uses double-and-add algorithm. + * It's faster, but should only be used when you don't care about + * an exposed private key e.g. sig verification, which works over *public* keys. + */ + multiplyUnsafe(n) { + const I = Point.ZERO; + if (n === _0n) + return I; + assertGE(n); // Will throw on 0 + if (n === _1n$1) + return this; + const { endo } = CURVE; + if (!endo) + return wnaf.unsafeLadder(this, n); + // Apply endomorphism + let { k1neg, k1, k2neg, k2 } = endo.splitScalar(n); + let k1p = I; + let k2p = I; + let d = this; + while (k1 > _0n || k2 > _0n) { + if (k1 & _1n$1) + k1p = k1p.add(d); + if (k2 & _1n$1) + k2p = k2p.add(d); + d = d.double(); + k1 >>= _1n$1; + k2 >>= _1n$1; + } + if (k1neg) + k1p = k1p.negate(); + if (k2neg) + k2p = k2p.negate(); + k2p = new Point(Fp.mul(k2p.px, endo.beta), k2p.py, k2p.pz); + return k1p.add(k2p); + } + /** + * Constant time multiplication. + * Uses wNAF method. Windowed method may be 10% faster, + * but takes 2x longer to generate and consumes 2x memory. + * Uses precomputes when available. + * Uses endomorphism for Koblitz curves. + * @param scalar by which the point would be multiplied + * @returns New point + */ + multiply(scalar) { + assertGE(scalar); + let n = scalar; + let point, fake; // Fake point is used to const-time mult + const { endo } = CURVE; + if (endo) { + const { k1neg, k1, k2neg, k2 } = endo.splitScalar(n); + let { p: k1p, f: f1p } = this.wNAF(k1); + let { p: k2p, f: f2p } = this.wNAF(k2); + k1p = wnaf.constTimeNegate(k1neg, k1p); + k2p = wnaf.constTimeNegate(k2neg, k2p); + k2p = new Point(Fp.mul(k2p.px, endo.beta), k2p.py, k2p.pz); + point = k1p.add(k2p); + fake = f1p.add(f2p); + } + else { + const { p, f } = this.wNAF(n); + point = p; + fake = f; + } + // Normalize `z` for both points, but return only real one + return Point.normalizeZ([point, fake])[0]; + } + /** + * Efficiently calculate `aP + bQ`. Unsafe, can expose private key, if used incorrectly. + * Not using Strauss-Shamir trick: precomputation tables are faster. + * The trick could be useful if both P and Q are not G (not in our case). + * @returns non-zero affine point + */ + multiplyAndAddUnsafe(Q, a, b) { + const G = Point.BASE; // No Strauss-Shamir trick: we have 10% faster G precomputes + const mul = (P, a // Select faster multiply() method + ) => (a === _0n || a === _1n$1 || !P.equals(G) ? P.multiplyUnsafe(a) : P.multiply(a)); + const sum = mul(this, a).add(mul(Q, b)); + return sum.is0() ? undefined : sum; + } + // Converts Projective point to affine (x, y) coordinates. + // Can accept precomputed Z^-1 - for example, from invertBatch. + // (x, y, z) โˆ‹ (x=x/z, y=y/z) + toAffine(iz) { + const { px: x, py: y, pz: z } = this; + const is0 = this.is0(); + // If invZ was 0, we return zero point. However we still want to execute + // all operations, so we replace invZ with a random number, 1. + if (iz == null) + iz = is0 ? Fp.ONE : Fp.inv(z); + const ax = Fp.mul(x, iz); + const ay = Fp.mul(y, iz); + const zz = Fp.mul(z, iz); + if (is0) + return { x: Fp.ZERO, y: Fp.ZERO }; + if (!Fp.eql(zz, Fp.ONE)) + throw new Error('invZ was invalid'); + return { x: ax, y: ay }; + } + isTorsionFree() { + const { h: cofactor, isTorsionFree } = CURVE; + if (cofactor === _1n$1) + return true; // No subgroups, always torsion-free + if (isTorsionFree) + return isTorsionFree(Point, this); + throw new Error('isTorsionFree() has not been declared for the elliptic curve'); + } + clearCofactor() { + const { h: cofactor, clearCofactor } = CURVE; + if (cofactor === _1n$1) + return this; // Fast-path + if (clearCofactor) + return clearCofactor(Point, this); + return this.multiplyUnsafe(CURVE.h); + } + toRawBytes(isCompressed = true) { + this.assertValidity(); + return toBytes(Point, this, isCompressed); + } + toHex(isCompressed = true) { + return bytesToHex(this.toRawBytes(isCompressed)); + } + } + Point.BASE = new Point(CURVE.Gx, CURVE.Gy, Fp.ONE); + Point.ZERO = new Point(Fp.ZERO, Fp.ONE, Fp.ZERO); + const _bits = CURVE.nBitLength; + const wnaf = wNAF(Point, CURVE.endo ? Math.ceil(_bits / 2) : _bits); + // Validate if generator point is on curve + return { + CURVE, + ProjectivePoint: Point, + normPrivateKeyToScalar, + weierstrassEquation, + isWithinCurveOrder, + }; + } + function validateOpts(curve) { + const opts = validateBasic(curve); + validateObject(opts, { + hash: 'hash', + hmac: 'function', + randomBytes: 'function', + }, { + bits2int: 'function', + bits2int_modN: 'function', + lowS: 'boolean', + }); + return Object.freeze({ lowS: true, ...opts }); + } + function weierstrass(curveDef) { + const CURVE = validateOpts(curveDef); + const { Fp, n: CURVE_ORDER } = CURVE; + const compressedLen = Fp.BYTES + 1; // e.g. 33 for 32 + const uncompressedLen = 2 * Fp.BYTES + 1; // e.g. 65 for 32 + function isValidFieldElement(num) { + return _0n < num && num < Fp.ORDER; // 0 is banned since it's not invertible FE + } + function modN(a) { + return mod(a, CURVE_ORDER); + } + function invN(a) { + return invert(a, CURVE_ORDER); + } + const { ProjectivePoint: Point, normPrivateKeyToScalar, weierstrassEquation, isWithinCurveOrder, } = weierstrassPoints({ + ...CURVE, + toBytes(_c, point, isCompressed) { + const a = point.toAffine(); + const x = Fp.toBytes(a.x); + const cat = concatBytes; + if (isCompressed) { + return cat(Uint8Array.from([point.hasEvenY() ? 0x02 : 0x03]), x); + } + else { + return cat(Uint8Array.from([0x04]), x, Fp.toBytes(a.y)); + } + }, + fromBytes(bytes) { + const len = bytes.length; + const head = bytes[0]; + const tail = bytes.subarray(1); + // this.assertValidity() is done inside of fromHex + if (len === compressedLen && (head === 0x02 || head === 0x03)) { + const x = bytesToNumberBE(tail); + if (!isValidFieldElement(x)) + throw new Error('Point is not on curve'); + const y2 = weierstrassEquation(x); // yยฒ = xยณ + ax + b + let y; + try { + y = Fp.sqrt(y2); // y = yยฒ ^ (p+1)/4 + } + catch (sqrtError) { + const suffix = sqrtError instanceof Error ? ': ' + sqrtError.message : ''; + throw new Error('Point is not on curve' + suffix); + } + const isYOdd = (y & _1n$1) === _1n$1; + // ECDSA + const isHeadOdd = (head & 1) === 1; + if (isHeadOdd !== isYOdd) + y = Fp.neg(y); + return { x, y }; + } + else if (len === uncompressedLen && head === 0x04) { + const x = Fp.fromBytes(tail.subarray(0, Fp.BYTES)); + const y = Fp.fromBytes(tail.subarray(Fp.BYTES, 2 * Fp.BYTES)); + return { x, y }; + } + else { + throw new Error(`Point of length ${len} was invalid. Expected ${compressedLen} compressed bytes or ${uncompressedLen} uncompressed bytes`); + } + }, + }); + const numToNByteStr = (num) => bytesToHex(numberToBytesBE(num, CURVE.nByteLength)); + function isBiggerThanHalfOrder(number) { + const HALF = CURVE_ORDER >> _1n$1; + return number > HALF; + } + function normalizeS(s) { + return isBiggerThanHalfOrder(s) ? modN(-s) : s; + } + // slice bytes num + const slcNum = (b, from, to) => bytesToNumberBE(b.slice(from, to)); + /** + * ECDSA signature with its (r, s) properties. Supports DER & compact representations. + */ + class Signature { + constructor(r, s, recovery) { + this.r = r; + this.s = s; + this.recovery = recovery; + this.assertValidity(); + } + // pair (bytes of r, bytes of s) + static fromCompact(hex) { + const l = CURVE.nByteLength; + hex = ensureBytes$1('compactSignature', hex, l * 2); + return new Signature(slcNum(hex, 0, l), slcNum(hex, l, 2 * l)); + } + // DER encoded ECDSA signature + // https://bitcoin.stackexchange.com/questions/57644/what-are-the-parts-of-a-bitcoin-transaction-input-script + static fromDER(hex) { + const { r, s } = DER.toSig(ensureBytes$1('DER', hex)); + return new Signature(r, s); + } + assertValidity() { + // can use assertGE here + if (!isWithinCurveOrder(this.r)) + throw new Error('r must be 0 < r < CURVE.n'); + if (!isWithinCurveOrder(this.s)) + throw new Error('s must be 0 < s < CURVE.n'); + } + addRecoveryBit(recovery) { + return new Signature(this.r, this.s, recovery); + } + recoverPublicKey(msgHash) { + const { r, s, recovery: rec } = this; + const h = bits2int_modN(ensureBytes$1('msgHash', msgHash)); // Truncate hash + if (rec == null || ![0, 1, 2, 3].includes(rec)) + throw new Error('recovery id invalid'); + const radj = rec === 2 || rec === 3 ? r + CURVE.n : r; + if (radj >= Fp.ORDER) + throw new Error('recovery id 2 or 3 invalid'); + const prefix = (rec & 1) === 0 ? '02' : '03'; + const R = Point.fromHex(prefix + numToNByteStr(radj)); + const ir = invN(radj); // r^-1 + const u1 = modN(-h * ir); // -hr^-1 + const u2 = modN(s * ir); // sr^-1 + const Q = Point.BASE.multiplyAndAddUnsafe(R, u1, u2); // (sr^-1)R-(hr^-1)G = -(hr^-1)G + (sr^-1) + if (!Q) + throw new Error('point at infinify'); // unsafe is fine: no priv data leaked + Q.assertValidity(); + return Q; + } + // Signatures should be low-s, to prevent malleability. + hasHighS() { + return isBiggerThanHalfOrder(this.s); + } + normalizeS() { + return this.hasHighS() ? new Signature(this.r, modN(-this.s), this.recovery) : this; + } + // DER-encoded + toDERRawBytes() { + return hexToBytes(this.toDERHex()); + } + toDERHex() { + return DER.hexFromSig({ r: this.r, s: this.s }); + } + // padded bytes of r, then padded bytes of s + toCompactRawBytes() { + return hexToBytes(this.toCompactHex()); + } + toCompactHex() { + return numToNByteStr(this.r) + numToNByteStr(this.s); + } + } + const utils = { + isValidPrivateKey(privateKey) { + try { + normPrivateKeyToScalar(privateKey); + return true; + } + catch (error) { + return false; + } + }, + normPrivateKeyToScalar: normPrivateKeyToScalar, + /** + * Produces cryptographically secure private key from random of size + * (groupLen + ceil(groupLen / 2)) with modulo bias being negligible. + */ + randomPrivateKey: () => { + const length = getMinHashLength(CURVE.n); + return mapHashToField(CURVE.randomBytes(length), CURVE.n); + }, + /** + * Creates precompute table for an arbitrary EC point. Makes point "cached". + * Allows to massively speed-up `point.multiply(scalar)`. + * @returns cached point + * @example + * const fast = utils.precompute(8, ProjectivePoint.fromHex(someonesPubKey)); + * fast.multiply(privKey); // much faster ECDH now + */ + precompute(windowSize = 8, point = Point.BASE) { + point._setWindowSize(windowSize); + point.multiply(BigInt(3)); // 3 is arbitrary, just need any number here + return point; + }, + }; + /** + * Computes public key for a private key. Checks for validity of the private key. + * @param privateKey private key + * @param isCompressed whether to return compact (default), or full key + * @returns Public key, full when isCompressed=false; short when isCompressed=true + */ + function getPublicKey(privateKey, isCompressed = true) { + return Point.fromPrivateKey(privateKey).toRawBytes(isCompressed); + } + /** + * Quick and dirty check for item being public key. Does not validate hex, or being on-curve. + */ + function isProbPub(item) { + const arr = isBytes$2(item); + const str = typeof item === 'string'; + const len = (arr || str) && item.length; + if (arr) + return len === compressedLen || len === uncompressedLen; + if (str) + return len === 2 * compressedLen || len === 2 * uncompressedLen; + if (item instanceof Point) + return true; + return false; + } + /** + * ECDH (Elliptic Curve Diffie Hellman). + * Computes shared public key from private key and public key. + * Checks: 1) private key validity 2) shared key is on-curve. + * Does NOT hash the result. + * @param privateA private key + * @param publicB different public key + * @param isCompressed whether to return compact (default), or full key + * @returns shared public key + */ + function getSharedSecret(privateA, publicB, isCompressed = true) { + if (isProbPub(privateA)) + throw new Error('first arg must be private key'); + if (!isProbPub(publicB)) + throw new Error('second arg must be public key'); + const b = Point.fromHex(publicB); // check for being on-curve + return b.multiply(normPrivateKeyToScalar(privateA)).toRawBytes(isCompressed); + } + // RFC6979: ensure ECDSA msg is X bytes and < N. RFC suggests optional truncating via bits2octets. + // FIPS 186-4 4.6 suggests the leftmost min(nBitLen, outLen) bits, which matches bits2int. + // bits2int can produce res>N, we can do mod(res, N) since the bitLen is the same. + // int2octets can't be used; pads small msgs with 0: unacceptatble for trunc as per RFC vectors + const bits2int = CURVE.bits2int || + function (bytes) { + // For curves with nBitLength % 8 !== 0: bits2octets(bits2octets(m)) !== bits2octets(m) + // for some cases, since bytes.length * 8 is not actual bitLength. + const num = bytesToNumberBE(bytes); // check for == u8 done here + const delta = bytes.length * 8 - CURVE.nBitLength; // truncate to nBitLength leftmost bits + return delta > 0 ? num >> BigInt(delta) : num; + }; + const bits2int_modN = CURVE.bits2int_modN || + function (bytes) { + return modN(bits2int(bytes)); // can't use bytesToNumberBE here + }; + // NOTE: pads output with zero as per spec + const ORDER_MASK = bitMask(CURVE.nBitLength); + /** + * Converts to bytes. Checks if num in `[0..ORDER_MASK-1]` e.g.: `[0..2^256-1]`. + */ + function int2octets(num) { + if (typeof num !== 'bigint') + throw new Error('bigint expected'); + if (!(_0n <= num && num < ORDER_MASK)) + throw new Error(`bigint expected < 2^${CURVE.nBitLength}`); + // works with order, can have different size than numToField! + return numberToBytesBE(num, CURVE.nByteLength); + } + // Steps A, D of RFC6979 3.2 + // Creates RFC6979 seed; converts msg/privKey to numbers. + // Used only in sign, not in verify. + // NOTE: we cannot assume here that msgHash has same amount of bytes as curve order, this will be wrong at least for P521. + // Also it can be bigger for P224 + SHA256 + function prepSig(msgHash, privateKey, opts = defaultSigOpts) { + if (['recovered', 'canonical'].some((k) => k in opts)) + throw new Error('sign() legacy options not supported'); + const { hash, randomBytes } = CURVE; + let { lowS, prehash, extraEntropy: ent } = opts; // generates low-s sigs by default + if (lowS == null) + lowS = true; // RFC6979 3.2: we skip step A, because we already provide hash + msgHash = ensureBytes$1('msgHash', msgHash); + if (prehash) + msgHash = ensureBytes$1('prehashed msgHash', hash(msgHash)); + // We can't later call bits2octets, since nested bits2int is broken for curves + // with nBitLength % 8 !== 0. Because of that, we unwrap it here as int2octets call. + // const bits2octets = (bits) => int2octets(bits2int_modN(bits)) + const h1int = bits2int_modN(msgHash); + const d = normPrivateKeyToScalar(privateKey); // validate private key, convert to bigint + const seedArgs = [int2octets(d), int2octets(h1int)]; + // extraEntropy. RFC6979 3.6: additional k' (optional). + if (ent != null && ent !== false) { + // K = HMAC_K(V || 0x00 || int2octets(x) || bits2octets(h1) || k') + const e = ent === true ? randomBytes(Fp.BYTES) : ent; // generate random bytes OR pass as-is + seedArgs.push(ensureBytes$1('extraEntropy', e)); // check for being bytes + } + const seed = concatBytes(...seedArgs); // Step D of RFC6979 3.2 + const m = h1int; // NOTE: no need to call bits2int second time here, it is inside truncateHash! + // Converts signature params into point w r/s, checks result for validity. + function k2sig(kBytes) { + // RFC 6979 Section 3.2, step 3: k = bits2int(T) + const k = bits2int(kBytes); // Cannot use fields methods, since it is group element + if (!isWithinCurveOrder(k)) + return; // Important: all mod() calls here must be done over N + const ik = invN(k); // k^-1 mod n + const q = Point.BASE.multiply(k).toAffine(); // q = Gk + const r = modN(q.x); // r = q.x mod n + if (r === _0n) + return; + // Can use scalar blinding b^-1(bm + bdr) where b โˆˆ [1,qโˆ’1] according to + // https://tches.iacr.org/index.php/TCHES/article/view/7337/6509. We've decided against it: + // a) dependency on CSPRNG b) 15% slowdown c) doesn't really help since bigints are not CT + const s = modN(ik * modN(m + r * d)); // Not using blinding here + if (s === _0n) + return; + let recovery = (q.x === r ? 0 : 2) | Number(q.y & _1n$1); // recovery bit (2 or 3, when q.x > n) + let normS = s; + if (lowS && isBiggerThanHalfOrder(s)) { + normS = normalizeS(s); // if lowS was passed, ensure s is always + recovery ^= 1; // // in the bottom half of N + } + return new Signature(r, normS, recovery); // use normS, not s + } + return { seed, k2sig }; + } + const defaultSigOpts = { lowS: CURVE.lowS, prehash: false }; + const defaultVerOpts = { lowS: CURVE.lowS, prehash: false }; + /** + * Signs message hash with a private key. + * ``` + * sign(m, d, k) where + * (x, y) = G ร— k + * r = x mod n + * s = (m + dr)/k mod n + * ``` + * @param msgHash NOT message. msg needs to be hashed to `msgHash`, or use `prehash`. + * @param privKey private key + * @param opts lowS for non-malleable sigs. extraEntropy for mixing randomness into k. prehash will hash first arg. + * @returns signature with recovery param + */ + function sign(msgHash, privKey, opts = defaultSigOpts) { + const { seed, k2sig } = prepSig(msgHash, privKey, opts); // Steps A, D of RFC6979 3.2. + const C = CURVE; + const drbg = createHmacDrbg(C.hash.outputLen, C.nByteLength, C.hmac); + return drbg(seed, k2sig); // Steps B, C, D, E, F, G + } + // Enable precomputes. Slows down first publicKey computation by 20ms. + Point.BASE._setWindowSize(8); + // utils.precompute(8, ProjectivePoint.BASE) + /** + * Verifies a signature against message hash and public key. + * Rejects lowS signatures by default: to override, + * specify option `{lowS: false}`. Implements section 4.1.4 from https://www.secg.org/sec1-v2.pdf: + * + * ``` + * verify(r, s, h, P) where + * U1 = hs^-1 mod n + * U2 = rs^-1 mod n + * R = U1โ‹…G - U2โ‹…P + * mod(R.x, n) == r + * ``` + */ + function verify(signature, msgHash, publicKey, opts = defaultVerOpts) { + const sg = signature; + msgHash = ensureBytes$1('msgHash', msgHash); + publicKey = ensureBytes$1('publicKey', publicKey); + if ('strict' in opts) + throw new Error('options.strict was renamed to lowS'); + const { lowS, prehash } = opts; + let _sig = undefined; + let P; + try { + if (typeof sg === 'string' || isBytes$2(sg)) { + // Signature can be represented in 2 ways: compact (2*nByteLength) & DER (variable-length). + // Since DER can also be 2*nByteLength bytes, we check for it first. + try { + _sig = Signature.fromDER(sg); + } + catch (derError) { + if (!(derError instanceof DER.Err)) + throw derError; + _sig = Signature.fromCompact(sg); + } + } + else if (typeof sg === 'object' && typeof sg.r === 'bigint' && typeof sg.s === 'bigint') { + const { r, s } = sg; + _sig = new Signature(r, s); + } + else { + throw new Error('PARSE'); + } + P = Point.fromHex(publicKey); + } + catch (error) { + if (error.message === 'PARSE') + throw new Error(`signature must be Signature instance, Uint8Array or hex string`); + return false; + } + if (lowS && _sig.hasHighS()) + return false; + if (prehash) + msgHash = CURVE.hash(msgHash); + const { r, s } = _sig; + const h = bits2int_modN(msgHash); // Cannot use fields methods, since it is group element + const is = invN(s); // s^-1 + const u1 = modN(h * is); // u1 = hs^-1 mod n + const u2 = modN(r * is); // u2 = rs^-1 mod n + const R = Point.BASE.multiplyAndAddUnsafe(P, u1, u2)?.toAffine(); // R = u1โ‹…G + u2โ‹…P + if (!R) + return false; + const v = modN(R.x); + return v === r; + } + return { + CURVE, + getPublicKey, + getSharedSecret, + sign, + verify, + ProjectivePoint: Point, + Signature, + utils, + }; + } + + /*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ + // connects noble-curves to noble-hashes + function getHash(hash) { + return { + hash, + hmac: (key, ...msgs) => hmac(hash, key, concatBytes$1(...msgs)), + randomBytes: randomBytes$1, + }; + } + function createCurve(curveDef, defHash) { + const create = (hash) => weierstrass({ ...curveDef, ...getHash(hash) }); + return Object.freeze({ ...create(defHash), create }); + } + + /*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ + const secp256k1P = BigInt('0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f'); + const secp256k1N = BigInt('0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141'); + const _1n = BigInt(1); + const _2n = BigInt(2); + const divNearest = (a, b) => (a + b / _2n) / b; + /** + * โˆšn = n^((p+1)/4) for fields p = 3 mod 4. We unwrap the loop and multiply bit-by-bit. + * (P+1n/4n).toString(2) would produce bits [223x 1, 0, 22x 1, 4x 0, 11, 00] + */ + function sqrtMod(y) { + const P = secp256k1P; + // prettier-ignore + const _3n = BigInt(3), _6n = BigInt(6), _11n = BigInt(11), _22n = BigInt(22); + // prettier-ignore + const _23n = BigInt(23), _44n = BigInt(44), _88n = BigInt(88); + const b2 = (y * y * y) % P; // x^3, 11 + const b3 = (b2 * b2 * y) % P; // x^7 + const b6 = (pow2(b3, _3n, P) * b3) % P; + const b9 = (pow2(b6, _3n, P) * b3) % P; + const b11 = (pow2(b9, _2n, P) * b2) % P; + const b22 = (pow2(b11, _11n, P) * b11) % P; + const b44 = (pow2(b22, _22n, P) * b22) % P; + const b88 = (pow2(b44, _44n, P) * b44) % P; + const b176 = (pow2(b88, _88n, P) * b88) % P; + const b220 = (pow2(b176, _44n, P) * b44) % P; + const b223 = (pow2(b220, _3n, P) * b3) % P; + const t1 = (pow2(b223, _23n, P) * b22) % P; + const t2 = (pow2(t1, _6n, P) * b2) % P; + const root = pow2(t2, _2n, P); + if (!Fp.eql(Fp.sqr(root), y)) + throw new Error('Cannot find square root'); + return root; + } + const Fp = Field(secp256k1P, undefined, undefined, { sqrt: sqrtMod }); + const secp256k1 = createCurve({ + a: BigInt(0), // equation params: a, b + b: BigInt(7), // Seem to be rigid: bitcointalk.org/index.php?topic=289795.msg3183975#msg3183975 + Fp, // Field's prime: 2n**256n - 2n**32n - 2n**9n - 2n**8n - 2n**7n - 2n**6n - 2n**4n - 1n + n: secp256k1N, // Curve order, total count of valid points in the field + // Base point (x, y) aka generator point + Gx: BigInt('55066263022277343669578718895168534326250603453777594175500187360389116729240'), + Gy: BigInt('32670510020758816978083085130507043184471273380659243275938904335757337482424'), + h: BigInt(1), // Cofactor + lowS: true, // Allow only low-S signatures by default in sign() and verify() + /** + * secp256k1 belongs to Koblitz curves: it has efficiently computable endomorphism. + * Endomorphism uses 2x less RAM, speeds up precomputation by 2x and ECDH / key recovery by 20%. + * For precomputed wNAF it trades off 1/2 init time & 1/3 ram for 20% perf hit. + * Explanation: https://gist.github.com/paulmillr/eb670806793e84df628a7c434a873066 + */ + endo: { + beta: BigInt('0x7ae96a2b657c07106e64479eac3434e99cf0497512f58995c1396c28719501ee'), + splitScalar: (k) => { + const n = secp256k1N; + const a1 = BigInt('0x3086d221a7d46bcde86c90e49284eb15'); + const b1 = -_1n * BigInt('0xe4437ed6010e88286f547fa90abfe4c3'); + const a2 = BigInt('0x114ca50f7a8e2f3f657c1108d9d44cfd8'); + const b2 = a1; + const POW_2_128 = BigInt('0x100000000000000000000000000000000'); // (2n**128n).toString(16) + const c1 = divNearest(b2 * k, n); + const c2 = divNearest(-b1 * k, n); + let k1 = mod(k - c1 * a1 - c2 * a2, n); + let k2 = mod(-c1 * b1 - c2 * b2, n); + const k1neg = k1 > POW_2_128; + const k2neg = k2 > POW_2_128; + if (k1neg) + k1 = n - k1; + if (k2neg) + k2 = n - k2; + if (k1 > POW_2_128 || k2 > POW_2_128) { + throw new Error('splitScalar: Endomorphism failed, k=' + k); + } + return { k1neg, k1, k2neg, k2 }; + }, + }, + }, sha256); + // Schnorr signatures are superior to ECDSA from above. Below is Schnorr-specific BIP0340 code. + // https://github.com/bitcoin/bips/blob/master/bip-0340.mediawiki + BigInt(0); + secp256k1.ProjectivePoint; + + function generateKey() { + return secp256k1.utils.randomPrivateKey(); + } + /** + * Hash and sign message with private key + */ + function hashAndSign(key, msg) { + const p = sha256$1.digest(msg instanceof Uint8Array ? msg : msg.subarray()); + if (isPromise$4(p)) { + return p.then(({ digest }) => secp256k1.sign(digest, key).toDERRawBytes()) + .catch(err => { + throw new CodeError$2(String(err), 'ERR_INVALID_INPUT'); + }); + } + try { + return secp256k1.sign(p.digest, key).toDERRawBytes(); + } + catch (err) { + throw new CodeError$2(String(err), 'ERR_INVALID_INPUT'); + } + } + /** + * Hash message and verify signature with public key + */ + function hashAndVerify(key, sig, msg) { + const p = sha256$1.digest(msg instanceof Uint8Array ? msg : msg.subarray()); + if (isPromise$4(p)) { + return p.then(({ digest }) => secp256k1.verify(sig, digest, key)) + .catch(err => { + throw new CodeError$2(String(err), 'ERR_INVALID_INPUT'); + }); + } + try { + return secp256k1.verify(sig, p.digest, key); + } + catch (err) { + throw new CodeError$2(String(err), 'ERR_INVALID_INPUT'); + } + } + function compressPublicKey(key) { + const point = secp256k1.ProjectivePoint.fromHex(key).toRawBytes(true); + return point; + } + function validatePrivateKey(key) { + try { + secp256k1.getPublicKey(key, true); + } + catch (err) { + throw new CodeError$2(String(err), 'ERR_INVALID_PRIVATE_KEY'); + } + } + function validatePublicKey(key) { + try { + secp256k1.ProjectivePoint.fromHex(key); + } + catch (err) { + throw new CodeError$2(String(err), 'ERR_INVALID_PUBLIC_KEY'); + } + } + function computePublicKey(privateKey) { + try { + return secp256k1.getPublicKey(privateKey, true); + } + catch (err) { + throw new CodeError$2(String(err), 'ERR_INVALID_PRIVATE_KEY'); + } + } + + class Secp256k1PublicKey { + _key; + constructor(key) { + validatePublicKey(key); + this._key = key; + } + verify(data, sig) { + return hashAndVerify(this._key, sig, data); + } + marshal() { + return compressPublicKey(this._key); + } + get bytes() { + return PublicKey.encode({ + Type: KeyType.Secp256k1, + Data: this.marshal() + }).subarray(); + } + equals(key) { + return equals(this.bytes, key.bytes); + } + async hash() { + const p = sha256$1.digest(this.bytes); + let bytes; + if (isPromise$4(p)) { + ({ bytes } = await p); + } + else { + bytes = p.bytes; + } + return bytes; + } + } + class Secp256k1PrivateKey { + _key; + _publicKey; + constructor(key, publicKey) { + this._key = key; + this._publicKey = publicKey ?? computePublicKey(key); + validatePrivateKey(this._key); + validatePublicKey(this._publicKey); + } + sign(message) { + return hashAndSign(this._key, message); + } + get public() { + return new Secp256k1PublicKey(this._publicKey); + } + marshal() { + return this._key; + } + get bytes() { + return PrivateKey.encode({ + Type: KeyType.Secp256k1, + Data: this.marshal() + }).subarray(); + } + equals(key) { + return equals(this.bytes, key.bytes); + } + hash() { + const p = sha256$1.digest(this.bytes); + if (isPromise$4(p)) { + return p.then(({ bytes }) => bytes); + } + return p.bytes; + } + /** + * Gets the ID of the key. + * + * The key id is the base58 encoding of the SHA-256 multihash of its public key. + * The public key is a protobuf encoding containing a type and the DER encoding + * of the PKCS SubjectPublicKeyInfo. + */ + async id() { + const hash = await this.public.hash(); + return toString$1(hash, 'base58btc'); + } + /** + * Exports the key into a password protected `format` + */ + async export(password, format = 'libp2p-key') { + if (format === 'libp2p-key') { + return exporter(this.bytes, password); + } + else { + throw new CodeError$2(`export format '${format}' is not supported`, 'ERR_INVALID_EXPORT_FORMAT'); + } + } + } + function unmarshalSecp256k1PrivateKey(bytes) { + return new Secp256k1PrivateKey(bytes); + } + function unmarshalSecp256k1PublicKey(bytes) { + return new Secp256k1PublicKey(bytes); + } + async function generateKeyPair$1() { + const privateKeyBytes = generateKey(); + return new Secp256k1PrivateKey(privateKeyBytes); + } + + var Secp256k1 = /*#__PURE__*/Object.freeze({ + __proto__: null, + Secp256k1PrivateKey: Secp256k1PrivateKey, + Secp256k1PublicKey: Secp256k1PublicKey, + generateKeyPair: generateKeyPair$1, + unmarshalSecp256k1PrivateKey: unmarshalSecp256k1PrivateKey, + unmarshalSecp256k1PublicKey: unmarshalSecp256k1PublicKey + }); + + /** + * @packageDocumentation + * + * **Supported Key Types** + * + * The {@link generateKeyPair}, {@link marshalPublicKey}, and {@link marshalPrivateKey} functions accept a string `type` argument. + * + * Currently the `'RSA'`, `'ed25519'`, and `secp256k1` types are supported, although ed25519 and secp256k1 keys support only signing and verification of messages. + * + * For encryption / decryption support, RSA keys should be used. + */ + const supportedKeys = { + rsa: RSA, + ed25519: Ed25519, + secp256k1: Secp256k1 + }; + function unsupportedKey(type) { + const supported = Object.keys(supportedKeys).join(' / '); + return new CodeError$2(`invalid or unsupported key type ${type}. Must be ${supported}`, 'ERR_UNSUPPORTED_KEY_TYPE'); + } + function typeToKey(type) { + type = type.toLowerCase(); + if (type === 'rsa' || type === 'ed25519' || type === 'secp256k1') { + return supportedKeys[type]; + } + throw unsupportedKey(type); + } + /** + * Generates a keypair of the given type and bitsize + */ + async function generateKeyPair(type, bits) { + return typeToKey(type).generateKeyPair(2048); + } + /** + * Converts a protobuf serialized public key into its representative object + */ + function unmarshalPublicKey(buf) { + const decoded = PublicKey.decode(buf); + const data = decoded.Data ?? new Uint8Array(); + switch (decoded.Type) { + case KeyType.RSA: + return supportedKeys.rsa.unmarshalRsaPublicKey(data); + case KeyType.Ed25519: + return supportedKeys.ed25519.unmarshalEd25519PublicKey(data); + case KeyType.Secp256k1: + return supportedKeys.secp256k1.unmarshalSecp256k1PublicKey(data); + default: + throw unsupportedKey(decoded.Type ?? 'unknown'); + } + } + /** + * Converts a public key object into a protobuf serialized public key + */ + function marshalPublicKey(key, type) { + type = (type ?? 'rsa').toLowerCase(); + typeToKey(type); // check type + return key.bytes; + } + /** + * Converts a protobuf serialized private key into its representative object + */ + async function unmarshalPrivateKey(buf) { + const decoded = PrivateKey.decode(buf); + const data = decoded.Data ?? new Uint8Array(); + switch (decoded.Type) { + case KeyType.RSA: + return supportedKeys.rsa.unmarshalRsaPrivateKey(data); + case KeyType.Ed25519: + return supportedKeys.ed25519.unmarshalEd25519PrivateKey(data); + case KeyType.Secp256k1: + return supportedKeys.secp256k1.unmarshalSecp256k1PrivateKey(data); + default: + throw unsupportedKey(decoded.Type ?? 'RSA'); + } + } + /** + * Converts a private key object into a protobuf serialized private key + */ + function marshalPrivateKey(key, type) { + type = (type ?? 'rsa').toLowerCase(); + typeToKey(type); // check type + return key.bytes; + } + + var browser = {exports: {}}; + + /** + * Helpers. + */ + + var ms; + var hasRequiredMs; + + function requireMs () { + if (hasRequiredMs) return ms; + hasRequiredMs = 1; + var s = 1000; + var m = s * 60; + var h = m * 60; + var d = h * 24; + var w = d * 7; + var y = d * 365.25; + + /** + * Parse or format the given `val`. + * + * Options: + * + * - `long` verbose formatting [false] + * + * @param {String|Number} val + * @param {Object} [options] + * @throws {Error} throw an error if val is not a non-empty string or a number + * @return {String|Number} + * @api public + */ + + ms = function(val, options) { + options = options || {}; + var type = typeof val; + if (type === 'string' && val.length > 0) { + return parse(val); + } else if (type === 'number' && isFinite(val)) { + return options.long ? fmtLong(val) : fmtShort(val); + } + throw new Error( + 'val is not a non-empty string or a valid number. val=' + + JSON.stringify(val) + ); + }; + + /** + * Parse the given `str` and return milliseconds. + * + * @param {String} str + * @return {Number} + * @api private + */ + + function parse(str) { + str = String(str); + if (str.length > 100) { + return; + } + var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec( + str + ); + if (!match) { + return; + } + var n = parseFloat(match[1]); + var type = (match[2] || 'ms').toLowerCase(); + switch (type) { + case 'years': + case 'year': + case 'yrs': + case 'yr': + case 'y': + return n * y; + case 'weeks': + case 'week': + case 'w': + return n * w; + case 'days': + case 'day': + case 'd': + return n * d; + case 'hours': + case 'hour': + case 'hrs': + case 'hr': + case 'h': + return n * h; + case 'minutes': + case 'minute': + case 'mins': + case 'min': + case 'm': + return n * m; + case 'seconds': + case 'second': + case 'secs': + case 'sec': + case 's': + return n * s; + case 'milliseconds': + case 'millisecond': + case 'msecs': + case 'msec': + case 'ms': + return n; + default: + return undefined; + } + } + + /** + * Short format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + + function fmtShort(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return Math.round(ms / d) + 'd'; + } + if (msAbs >= h) { + return Math.round(ms / h) + 'h'; + } + if (msAbs >= m) { + return Math.round(ms / m) + 'm'; + } + if (msAbs >= s) { + return Math.round(ms / s) + 's'; + } + return ms + 'ms'; + } + + /** + * Long format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + + function fmtLong(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return plural(ms, msAbs, d, 'day'); + } + if (msAbs >= h) { + return plural(ms, msAbs, h, 'hour'); + } + if (msAbs >= m) { + return plural(ms, msAbs, m, 'minute'); + } + if (msAbs >= s) { + return plural(ms, msAbs, s, 'second'); + } + return ms + ' ms'; + } + + /** + * Pluralization helper. + */ + + function plural(ms, msAbs, n, name) { + var isPlural = msAbs >= n * 1.5; + return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : ''); + } + return ms; + } + + /** + * This is the common logic for both the Node.js and web browser + * implementations of `debug()`. + */ + + function setup(env) { + createDebug.debug = createDebug; + createDebug.default = createDebug; + createDebug.coerce = coerce; + createDebug.disable = disable; + createDebug.enable = enable; + createDebug.enabled = enabled; + createDebug.humanize = requireMs(); + createDebug.destroy = destroy; + + Object.keys(env).forEach(key => { + createDebug[key] = env[key]; + }); + + /** + * The currently active debug mode names, and names to skip. + */ + + createDebug.names = []; + createDebug.skips = []; + + /** + * Map of special "%n" handling functions, for the debug "format" argument. + * + * Valid key names are a single, lower or upper-case letter, i.e. "n" and "N". + */ + createDebug.formatters = {}; + + /** + * Selects a color for a debug namespace + * @param {String} namespace The namespace string for the debug instance to be colored + * @return {Number|String} An ANSI color code for the given namespace + * @api private + */ + function selectColor(namespace) { + let hash = 0; + + for (let i = 0; i < namespace.length; i++) { + hash = ((hash << 5) - hash) + namespace.charCodeAt(i); + hash |= 0; // Convert to 32bit integer + } + + return createDebug.colors[Math.abs(hash) % createDebug.colors.length]; + } + createDebug.selectColor = selectColor; + + /** + * Create a debugger with the given `namespace`. + * + * @param {String} namespace + * @return {Function} + * @api public + */ + function createDebug(namespace) { + let prevTime; + let enableOverride = null; + let namespacesCache; + let enabledCache; + + function debug(...args) { + // Disabled? + if (!debug.enabled) { + return; + } + + const self = debug; + + // Set `diff` timestamp + const curr = Number(new Date()); + const ms = curr - (prevTime || curr); + self.diff = ms; + self.prev = prevTime; + self.curr = curr; + prevTime = curr; + + args[0] = createDebug.coerce(args[0]); + + if (typeof args[0] !== 'string') { + // Anything else let's inspect with %O + args.unshift('%O'); + } + + // Apply any `formatters` transformations + let index = 0; + args[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => { + // If we encounter an escaped % then don't increase the array index + if (match === '%%') { + return '%'; + } + index++; + const formatter = createDebug.formatters[format]; + if (typeof formatter === 'function') { + const val = args[index]; + match = formatter.call(self, val); + + // Now we need to remove `args[index]` since it's inlined in the `format` + args.splice(index, 1); + index--; + } + return match; + }); + + // Apply env-specific formatting (colors, etc.) + createDebug.formatArgs.call(self, args); + + const logFn = self.log || createDebug.log; + logFn.apply(self, args); + } + + debug.namespace = namespace; + debug.useColors = createDebug.useColors(); + debug.color = createDebug.selectColor(namespace); + debug.extend = extend; + debug.destroy = createDebug.destroy; // XXX Temporary. Will be removed in the next major release. + + Object.defineProperty(debug, 'enabled', { + enumerable: true, + configurable: false, + get: () => { + if (enableOverride !== null) { + return enableOverride; + } + if (namespacesCache !== createDebug.namespaces) { + namespacesCache = createDebug.namespaces; + enabledCache = createDebug.enabled(namespace); + } + + return enabledCache; + }, + set: v => { + enableOverride = v; + } + }); + + // Env-specific initialization logic for debug instances + if (typeof createDebug.init === 'function') { + createDebug.init(debug); + } + + return debug; + } + + function extend(namespace, delimiter) { + const newDebug = createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace); + newDebug.log = this.log; + return newDebug; + } + + /** + * Enables a debug mode by namespaces. This can include modes + * separated by a colon and wildcards. + * + * @param {String} namespaces + * @api public + */ + function enable(namespaces) { + createDebug.save(namespaces); + createDebug.namespaces = namespaces; + + createDebug.names = []; + createDebug.skips = []; + + let i; + const split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/); + const len = split.length; + + for (i = 0; i < len; i++) { + if (!split[i]) { + // ignore empty strings + continue; + } + + namespaces = split[i].replace(/\*/g, '.*?'); + + if (namespaces[0] === '-') { + createDebug.skips.push(new RegExp('^' + namespaces.slice(1) + '$')); + } else { + createDebug.names.push(new RegExp('^' + namespaces + '$')); + } + } + } + + /** + * Disable debug output. + * + * @return {String} namespaces + * @api public + */ + function disable() { + const namespaces = [ + ...createDebug.names.map(toNamespace), + ...createDebug.skips.map(toNamespace).map(namespace => '-' + namespace) + ].join(','); + createDebug.enable(''); + return namespaces; + } + + /** + * Returns true if the given mode name is enabled, false otherwise. + * + * @param {String} name + * @return {Boolean} + * @api public + */ + function enabled(name) { + if (name[name.length - 1] === '*') { + return true; + } + + let i; + let len; + + for (i = 0, len = createDebug.skips.length; i < len; i++) { + if (createDebug.skips[i].test(name)) { + return false; + } + } + + for (i = 0, len = createDebug.names.length; i < len; i++) { + if (createDebug.names[i].test(name)) { + return true; + } + } + + return false; + } + + /** + * Convert regexp to namespace + * + * @param {RegExp} regxep + * @return {String} namespace + * @api private + */ + function toNamespace(regexp) { + return regexp.toString() + .substring(2, regexp.toString().length - 2) + .replace(/\.\*\?$/, '*'); + } + + /** + * Coerce `val`. + * + * @param {Mixed} val + * @return {Mixed} + * @api private + */ + function coerce(val) { + if (val instanceof Error) { + return val.stack || val.message; + } + return val; + } + + /** + * XXX DO NOT USE. This is a temporary stub function. + * XXX It WILL be removed in the next major release. + */ + function destroy() { + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } + + createDebug.enable(createDebug.load()); + + return createDebug; + } + + var common = setup; + + /* eslint-env browser */ + + (function (module, exports) { + /** + * This is the web browser implementation of `debug()`. + */ + + exports.formatArgs = formatArgs; + exports.save = save; + exports.load = load; + exports.useColors = useColors; + exports.storage = localstorage(); + exports.destroy = (() => { + let warned = false; + + return () => { + if (!warned) { + warned = true; + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } + }; + })(); + + /** + * Colors. + */ + + exports.colors = [ + '#0000CC', + '#0000FF', + '#0033CC', + '#0033FF', + '#0066CC', + '#0066FF', + '#0099CC', + '#0099FF', + '#00CC00', + '#00CC33', + '#00CC66', + '#00CC99', + '#00CCCC', + '#00CCFF', + '#3300CC', + '#3300FF', + '#3333CC', + '#3333FF', + '#3366CC', + '#3366FF', + '#3399CC', + '#3399FF', + '#33CC00', + '#33CC33', + '#33CC66', + '#33CC99', + '#33CCCC', + '#33CCFF', + '#6600CC', + '#6600FF', + '#6633CC', + '#6633FF', + '#66CC00', + '#66CC33', + '#9900CC', + '#9900FF', + '#9933CC', + '#9933FF', + '#99CC00', + '#99CC33', + '#CC0000', + '#CC0033', + '#CC0066', + '#CC0099', + '#CC00CC', + '#CC00FF', + '#CC3300', + '#CC3333', + '#CC3366', + '#CC3399', + '#CC33CC', + '#CC33FF', + '#CC6600', + '#CC6633', + '#CC9900', + '#CC9933', + '#CCCC00', + '#CCCC33', + '#FF0000', + '#FF0033', + '#FF0066', + '#FF0099', + '#FF00CC', + '#FF00FF', + '#FF3300', + '#FF3333', + '#FF3366', + '#FF3399', + '#FF33CC', + '#FF33FF', + '#FF6600', + '#FF6633', + '#FF9900', + '#FF9933', + '#FFCC00', + '#FFCC33' + ]; + + /** + * Currently only WebKit-based Web Inspectors, Firefox >= v31, + * and the Firebug extension (any Firefox version) are known + * to support "%c" CSS customizations. + * + * TODO: add a `localStorage` variable to explicitly enable/disable colors + */ + + // eslint-disable-next-line complexity + function useColors() { + // NB: In an Electron preload script, document will be defined but not fully + // initialized. Since we know we're in Chrome, we'll just detect this case + // explicitly + if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) { + return true; + } + + // Internet Explorer and Edge do not support colors. + if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) { + return false; + } + + // Is webkit? http://stackoverflow.com/a/16459606/376773 + // document is undefined in react-native: https://github.com/facebook/react-native/pull/1632 + return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) || + // Is firebug? http://stackoverflow.com/a/398120/376773 + (typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) || + // Is firefox >= v31? + // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31) || + // Double check webkit in userAgent just in case we are in a worker + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/)); + } + + /** + * Colorize log arguments if enabled. + * + * @api public + */ + + function formatArgs(args) { + args[0] = (this.useColors ? '%c' : '') + + this.namespace + + (this.useColors ? ' %c' : ' ') + + args[0] + + (this.useColors ? '%c ' : ' ') + + '+' + module.exports.humanize(this.diff); + + if (!this.useColors) { + return; + } + + const c = 'color: ' + this.color; + args.splice(1, 0, c, 'color: inherit'); + + // The final "%c" is somewhat tricky, because there could be other + // arguments passed either before or after the %c, so we need to + // figure out the correct index to insert the CSS into + let index = 0; + let lastC = 0; + args[0].replace(/%[a-zA-Z%]/g, match => { + if (match === '%%') { + return; + } + index++; + if (match === '%c') { + // We only are interested in the *last* %c + // (the user may have provided their own) + lastC = index; + } + }); + + args.splice(lastC, 0, c); + } + + /** + * Invokes `console.debug()` when available. + * No-op when `console.debug` is not a "function". + * If `console.debug` is not available, falls back + * to `console.log`. + * + * @api public + */ + exports.log = console.debug || console.log || (() => {}); + + /** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ + function save(namespaces) { + try { + if (namespaces) { + exports.storage.setItem('debug', namespaces); + } else { + exports.storage.removeItem('debug'); + } + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } + } + + /** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ + function load() { + let r; + try { + r = exports.storage.getItem('debug'); + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } + + // If debug isn't set in LS, and we're in Electron, try to load $DEBUG + if (!r && typeof process !== 'undefined' && 'env' in process) { + r = process.env.DEBUG; + } + + return r; + } + + /** + * Localstorage attempts to return the localstorage. + * + * This is necessary because safari throws + * when a user disables cookies/localstorage + * and you attempt to access it. + * + * @return {LocalStorage} + * @api private + */ + + function localstorage() { + try { + // TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context + // The Browser also has localStorage in the global context. + return localStorage; + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } + } + + module.exports = common(exports); + + const {formatters} = module.exports; + + /** + * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default. + */ + + formatters.j = function (v) { + try { + return JSON.stringify(v); + } catch (error) { + return '[UnexpectedJSONParseError]: ' + error.message; + } + }; + } (browser, browser.exports)); + + var browserExports = browser.exports; + var debug = /*@__PURE__*/getDefaultExportFromCjs(browserExports); + + /** + * @packageDocumentation + * + * A logger for libp2p based on the venerable [debug](https://www.npmjs.com/package/debug) module. + * + * @example + * + * ```TypeScript + * import { logger } from '@libp2p/logger' + * + * const log = logger('libp2p:my:component:name') + * + * try { + * // an operation + * log('something happened: %s', 'it was ok') + * } catch (err) { + * log.error('something bad happened: %o', err) + * } + * + * log('with this peer: %p', {}) + * log('and this base58btc: %b', Uint8Array.from([0, 1, 2, 3])) + * log('and this base32: %t', Uint8Array.from([4, 5, 6, 7])) + * ``` + * + * ```console + * $ DEBUG=libp2p:* node index.js + * something happened: it was ok + * something bad happened: + * with this peer: 12D3Foo + * with this base58btc: Qmfoo + * with this base32: bafyfoo + * ``` + */ + // Add a formatter for converting to a base58 string + debug.formatters.b = (v) => { + return v == null ? 'undefined' : base58btc.baseEncode(v); + }; + // Add a formatter for converting to a base32 string + debug.formatters.t = (v) => { + return v == null ? 'undefined' : base32.baseEncode(v); + }; + // Add a formatter for converting to a base64 string + debug.formatters.m = (v) => { + return v == null ? 'undefined' : base64.baseEncode(v); + }; + // Add a formatter for stringifying peer ids + debug.formatters.p = (v) => { + return v == null ? 'undefined' : v.toString(); + }; + // Add a formatter for stringifying CIDs + debug.formatters.c = (v) => { + return v == null ? 'undefined' : v.toString(); + }; + // Add a formatter for stringifying Datastore keys + debug.formatters.k = (v) => { + return v == null ? 'undefined' : v.toString(); + }; + // Add a formatter for stringifying Multiaddrs + debug.formatters.a = (v) => { + return v == null ? 'undefined' : v.toString(); + }; + function createDisabledLogger(namespace) { + const logger = () => { }; + logger.enabled = false; + logger.color = ''; + logger.diff = 0; + logger.log = () => { }; + logger.namespace = namespace; + logger.destroy = () => true; + logger.extend = () => logger; + return logger; + } + /** + * Create a component logger + * + * @example + * + * ```TypeScript + * import { defaultLogger } from '@libp2p/logger' + * import { peerIdFromString } from '@libp2p/peer-id' + * + * const logger = defaultLogger() + * + * const log = logger.forComponent('my-component') + * log.info('hello world') + * // logs "my-component hello world" + * ``` + */ + function defaultLogger() { + return { + forComponent(name) { + return logger(name); + } + }; + } + /** + * Creates a logger for the passed component name. + * + * @example + * + * ```TypeScript + * import { logger } from '@libp2p/logger' + * + * const log = logger('my-component') + * log.info('hello world') + * // logs "my-component hello world" + * ``` + */ + function logger(name) { + // trace logging is a no-op by default + let trace = createDisabledLogger(`${name}:trace`); + // look at all the debug names and see if trace logging has explicitly been enabled + if (debug.enabled(`${name}:trace`) && debug.names.map(r => r.toString()).find(n => n.includes(':trace')) != null) { + trace = debug(`${name}:trace`); + } + return Object.assign(debug(name), { + error: debug(`${name}:error`), + trace + }); + } + + /** + * @packageDocumentation + * + * An implementation of a peer id + * + * @example + * + * ```TypeScript + * import { peerIdFromString } from '@libp2p/peer-id' + * const peer = peerIdFromString('k51qzi5uqu5dkwkqm42v9j9kqcam2jiuvloi16g72i4i4amoo2m8u3ol3mqu6s') + * + * console.log(peer.toCID()) // CID(bafzaa...) + * console.log(peer.toString()) // "12D3K..." + * ``` + */ + const inspect$1 = Symbol.for('nodejs.util.inspect.custom'); + const baseDecoder = Object + .values(bases) + .map(codec => codec.decoder) + // @ts-expect-error https://github.com/multiformats/js-multiformats/issues/141 + .reduce((acc, curr) => acc.or(curr), bases.identity.decoder); + // these values are from https://github.com/multiformats/multicodec/blob/master/table.csv + const LIBP2P_KEY_CODE = 0x72; + const MARSHALLED_ED225519_PUBLIC_KEY_LENGTH = 36; + const MARSHALLED_SECP256K1_PUBLIC_KEY_LENGTH = 37; + class PeerIdImpl { + type; + multihash; + privateKey; + publicKey; + string; + constructor(init) { + this.type = init.type; + this.multihash = init.multihash; + this.privateKey = init.privateKey; + // mark string cache as non-enumerable + Object.defineProperty(this, 'string', { + enumerable: false, + writable: true + }); + } + get [Symbol.toStringTag]() { + return `PeerId(${this.toString()})`; + } + [peerIdSymbol] = true; + toString() { + if (this.string == null) { + this.string = base58btc.encode(this.multihash.bytes).slice(1); + } + return this.string; + } + // return self-describing String representation + // in default format from RFC 0001: https://github.com/libp2p/specs/pull/209 + toCID() { + return CID.createV1(LIBP2P_KEY_CODE, this.multihash); + } + toBytes() { + return this.multihash.bytes; + } + /** + * Returns Multiaddr as a JSON string + */ + toJSON() { + return this.toString(); + } + /** + * Checks the equality of `this` peer against a given PeerId + */ + equals(id) { + if (id == null) { + return false; + } + if (id instanceof Uint8Array) { + return equals(this.multihash.bytes, id); + } + else if (typeof id === 'string') { + return peerIdFromString(id).equals(this); + } + else if (id?.multihash?.bytes != null) { + return equals(this.multihash.bytes, id.multihash.bytes); + } + else { + throw new Error('not valid Id'); + } + } + /** + * Returns PeerId as a human-readable string + * https://nodejs.org/api/util.html#utilinspectcustom + * + * @example + * ```TypeScript + * import { peerIdFromString } from '@libp2p/peer-id' + * + * console.info(peerIdFromString('QmFoo')) + * // 'PeerId(QmFoo)' + * ``` + */ + [inspect$1]() { + return `PeerId(${this.toString()})`; + } + } + class RSAPeerIdImpl extends PeerIdImpl { + type = 'RSA'; + publicKey; + constructor(init) { + super({ ...init, type: 'RSA' }); + this.publicKey = init.publicKey; + } + } + class Ed25519PeerIdImpl extends PeerIdImpl { + type = 'Ed25519'; + publicKey; + constructor(init) { + super({ ...init, type: 'Ed25519' }); + this.publicKey = init.multihash.digest; + } + } + class Secp256k1PeerIdImpl extends PeerIdImpl { + type = 'secp256k1'; + publicKey; + constructor(init) { + super({ ...init, type: 'secp256k1' }); + this.publicKey = init.multihash.digest; + } + } + function peerIdFromPeerId(other) { + if (other.type === 'RSA') { + return new RSAPeerIdImpl(other); + } + if (other.type === 'Ed25519') { + return new Ed25519PeerIdImpl(other); + } + if (other.type === 'secp256k1') { + return new Secp256k1PeerIdImpl(other); + } + throw new CodeError$2('Not a PeerId', 'ERR_INVALID_PARAMETERS'); + } + function peerIdFromString(str, decoder) { + decoder = decoder ?? baseDecoder; + if (str.charAt(0) === '1' || str.charAt(0) === 'Q') { + // identity hash ed25519/secp256k1 key or sha2-256 hash of + // rsa public key - base58btc encoded either way + const multihash = decode$1(base58btc.decode(`z${str}`)); + if (str.startsWith('12D')) { + return new Ed25519PeerIdImpl({ multihash }); + } + else if (str.startsWith('16U')) { + return new Secp256k1PeerIdImpl({ multihash }); + } + else { + return new RSAPeerIdImpl({ multihash }); + } + } + return peerIdFromBytes(baseDecoder.decode(str)); + } + function peerIdFromBytes(buf) { + try { + const multihash = decode$1(buf); + if (multihash.code === identity.code) { + if (multihash.digest.length === MARSHALLED_ED225519_PUBLIC_KEY_LENGTH) { + return new Ed25519PeerIdImpl({ multihash }); + } + else if (multihash.digest.length === MARSHALLED_SECP256K1_PUBLIC_KEY_LENGTH) { + return new Secp256k1PeerIdImpl({ multihash }); + } + } + if (multihash.code === sha256$1.code) { + return new RSAPeerIdImpl({ multihash }); + } + } + catch { + return peerIdFromCID(CID.decode(buf)); + } + throw new Error('Supplied PeerID CID is invalid'); + } + function peerIdFromCID(cid) { + if (cid == null || cid.multihash == null || cid.version == null || (cid.version === 1 && cid.code !== LIBP2P_KEY_CODE)) { + throw new Error('Supplied PeerID CID is invalid'); + } + const multihash = cid.multihash; + if (multihash.code === sha256$1.code) { + return new RSAPeerIdImpl({ multihash: cid.multihash }); + } + else if (multihash.code === identity.code) { + if (multihash.digest.length === MARSHALLED_ED225519_PUBLIC_KEY_LENGTH) { + return new Ed25519PeerIdImpl({ multihash: cid.multihash }); + } + else if (multihash.digest.length === MARSHALLED_SECP256K1_PUBLIC_KEY_LENGTH) { + return new Secp256k1PeerIdImpl({ multihash: cid.multihash }); + } + } + throw new Error('Supplied PeerID CID is invalid'); + } + /** + * @param publicKey - A marshalled public key + * @param privateKey - A marshalled private key + */ + async function peerIdFromKeys(publicKey, privateKey) { + if (publicKey.length === MARSHALLED_ED225519_PUBLIC_KEY_LENGTH) { + return new Ed25519PeerIdImpl({ multihash: create$1(identity.code, publicKey), privateKey }); + } + if (publicKey.length === MARSHALLED_SECP256K1_PUBLIC_KEY_LENGTH) { + return new Secp256k1PeerIdImpl({ multihash: create$1(identity.code, publicKey), privateKey }); + } + return new RSAPeerIdImpl({ multihash: await sha256$1.digest(publicKey), publicKey, privateKey }); + } + + /** + * Calls the passed map function on every entry of the passed iterable iterator + */ + function mapIterable(iter, map) { + const iterator = { + [Symbol.iterator]: () => { + return iterator; + }, + next: () => { + const next = iter.next(); + const val = next.value; + if (next.done === true || val == null) { + const result = { + done: true, + value: undefined + }; + return result; + } + return { + done: false, + value: map(val) + }; + } + }; + return iterator; + } + + /** + * We can't use PeerIds as map keys because map keys are + * compared using same-value-zero equality, so this is just + * a map that stringifies the PeerIds before storing them. + * + * PeerIds cache stringified versions of themselves so this + * should be a cheap operation. + * + * @example + * + * ```TypeScript + * import { peerMap } from '@libp2p/peer-collections' + * + * const map = peerMap() + * map.set(peerId, 'value') + * ``` + */ + class PeerMap { + map; + constructor(map) { + this.map = new Map(); + if (map != null) { + for (const [key, value] of map.entries()) { + this.map.set(key.toString(), value); + } + } + } + [Symbol.iterator]() { + return this.entries(); + } + clear() { + this.map.clear(); + } + delete(peer) { + return this.map.delete(peer.toString()); + } + entries() { + return mapIterable(this.map.entries(), (val) => { + return [peerIdFromString(val[0]), val[1]]; + }); + } + forEach(fn) { + this.map.forEach((value, key) => { + fn(value, peerIdFromString(key), this); + }); + } + get(peer) { + return this.map.get(peer.toString()); + } + has(peer) { + return this.map.has(peer.toString()); + } + set(peer, value) { + this.map.set(peer.toString(), value); + } + keys() { + return mapIterable(this.map.keys(), (val) => { + return peerIdFromString(val); + }); + } + values() { + return this.map.values(); + } + get size() { + return this.map.size; + } + } + + /** + * We can't use PeerIds as set entries because set entries are + * compared using same-value-zero equality, so this is just + * a map that stringifies the PeerIds before storing them. + * + * PeerIds cache stringified versions of themselves so this + * should be a cheap operation. + * + * @example + * + * ```TypeScript + * import { peerSet } from '@libp2p/peer-collections' + * + * const set = peerSet() + * set.add(peerId) + * ``` + */ + class PeerSet { + set; + constructor(set) { + this.set = new Set(); + if (set != null) { + for (const key of set) { + this.set.add(key.toString()); + } + } + } + get size() { + return this.set.size; + } + [Symbol.iterator]() { + return this.values(); + } + add(peer) { + this.set.add(peer.toString()); + } + clear() { + this.set.clear(); + } + delete(peer) { + this.set.delete(peer.toString()); + } + entries() { + return mapIterable(this.set.entries(), (val) => { + const peerId = peerIdFromString(val[0]); + return [peerId, peerId]; + }); + } + forEach(predicate) { + this.set.forEach((str) => { + const id = peerIdFromString(str); + predicate(id, id, this); + }); + } + has(peer) { + return this.set.has(peer.toString()); + } + values() { + return mapIterable(this.set.values(), (val) => { + return peerIdFromString(val); + }); + } + intersection(other) { + const output = new PeerSet(); + for (const peerId of other) { + if (this.has(peerId)) { + output.add(peerId); + } + } + return output; + } + difference(other) { + const output = new PeerSet(); + for (const peerId of this) { + if (!other.has(peerId)) { + output.add(peerId); + } + } + return output; + } + union(other) { + const output = new PeerSet(); + for (const peerId of other) { + output.add(peerId); + } + for (const peerId of this) { + output.add(peerId); + } + return output; + } + } + + /** + * @packageDocumentation + * + * A class that lets you do operations over a list of Uint8Arrays without + * copying them. + * + * ```js + * import { Uint8ArrayList } from 'uint8arraylist' + * + * const list = new Uint8ArrayList() + * list.append(Uint8Array.from([0, 1, 2])) + * list.append(Uint8Array.from([3, 4, 5])) + * + * list.subarray() + * // -> Uint8Array([0, 1, 2, 3, 4, 5]) + * + * list.consume(3) + * list.subarray() + * // -> Uint8Array([3, 4, 5]) + * + * // you can also iterate over the list + * for (const buf of list) { + * // ..do something with `buf` + * } + * + * list.subarray(0, 1) + * // -> Uint8Array([0]) + * ``` + * + * ## Converting Uint8ArrayLists to Uint8Arrays + * + * There are two ways to turn a `Uint8ArrayList` into a `Uint8Array` - `.slice` and `.subarray` and one way to turn a `Uint8ArrayList` into a `Uint8ArrayList` with different contents - `.sublist`. + * + * ### slice + * + * Slice follows the same semantics as [Uint8Array.slice](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray/slice) in that it creates a new `Uint8Array` and copies bytes into it using an optional offset & length. + * + * ```js + * const list = new Uint8ArrayList() + * list.append(Uint8Array.from([0, 1, 2])) + * list.append(Uint8Array.from([3, 4, 5])) + * + * list.slice(0, 1) + * // -> Uint8Array([0]) + * ``` + * + * ### subarray + * + * Subarray attempts to follow the same semantics as [Uint8Array.subarray](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray/subarray) with one important different - this is a no-copy operation, unless the requested bytes span two internal buffers in which case it is a copy operation. + * + * ```js + * const list = new Uint8ArrayList() + * list.append(Uint8Array.from([0, 1, 2])) + * list.append(Uint8Array.from([3, 4, 5])) + * + * list.subarray(0, 1) + * // -> Uint8Array([0]) - no-copy + * + * list.subarray(2, 5) + * // -> Uint8Array([2, 3, 4]) - copy + * ``` + * + * ### sublist + * + * Sublist creates and returns a new `Uint8ArrayList` that shares the underlying buffers with the original so is always a no-copy operation. + * + * ```js + * const list = new Uint8ArrayList() + * list.append(Uint8Array.from([0, 1, 2])) + * list.append(Uint8Array.from([3, 4, 5])) + * + * list.sublist(0, 1) + * // -> Uint8ArrayList([0]) - no-copy + * + * list.sublist(2, 5) + * // -> Uint8ArrayList([2], [3, 4]) - no-copy + * ``` + * + * ## Inspiration + * + * Borrows liberally from [bl](https://www.npmjs.com/package/bl) but only uses native JS types. + */ + const symbol$1 = Symbol.for('@achingbrain/uint8arraylist'); + function findBufAndOffset(bufs, index) { + if (index == null || index < 0) { + throw new RangeError('index is out of bounds'); + } + let offset = 0; + for (const buf of bufs) { + const bufEnd = offset + buf.byteLength; + if (index < bufEnd) { + return { + buf, + index: index - offset + }; + } + offset = bufEnd; + } + throw new RangeError('index is out of bounds'); + } + /** + * Check if object is a CID instance + * + * @example + * + * ```js + * import { isUint8ArrayList, Uint8ArrayList } from 'uint8arraylist' + * + * isUint8ArrayList(true) // false + * isUint8ArrayList([]) // false + * isUint8ArrayList(new Uint8ArrayList()) // true + * ``` + */ + function isUint8ArrayList(value) { + return Boolean(value?.[symbol$1]); + } + class Uint8ArrayList { + bufs; + length; + [symbol$1] = true; + constructor(...data) { + this.bufs = []; + this.length = 0; + if (data.length > 0) { + this.appendAll(data); + } + } + *[Symbol.iterator]() { + yield* this.bufs; + } + get byteLength() { + return this.length; + } + /** + * Add one or more `bufs` to the end of this Uint8ArrayList + */ + append(...bufs) { + this.appendAll(bufs); + } + /** + * Add all `bufs` to the end of this Uint8ArrayList + */ + appendAll(bufs) { + let length = 0; + for (const buf of bufs) { + if (buf instanceof Uint8Array) { + length += buf.byteLength; + this.bufs.push(buf); + } + else if (isUint8ArrayList(buf)) { + length += buf.byteLength; + this.bufs.push(...buf.bufs); + } + else { + throw new Error('Could not append value, must be an Uint8Array or a Uint8ArrayList'); + } + } + this.length += length; + } + /** + * Add one or more `bufs` to the start of this Uint8ArrayList + */ + prepend(...bufs) { + this.prependAll(bufs); + } + /** + * Add all `bufs` to the start of this Uint8ArrayList + */ + prependAll(bufs) { + let length = 0; + for (const buf of bufs.reverse()) { + if (buf instanceof Uint8Array) { + length += buf.byteLength; + this.bufs.unshift(buf); + } + else if (isUint8ArrayList(buf)) { + length += buf.byteLength; + this.bufs.unshift(...buf.bufs); + } + else { + throw new Error('Could not prepend value, must be an Uint8Array or a Uint8ArrayList'); + } + } + this.length += length; + } + /** + * Read the value at `index` + */ + get(index) { + const res = findBufAndOffset(this.bufs, index); + return res.buf[res.index]; + } + /** + * Set the value at `index` to `value` + */ + set(index, value) { + const res = findBufAndOffset(this.bufs, index); + res.buf[res.index] = value; + } + /** + * Copy bytes from `buf` to the index specified by `offset` + */ + write(buf, offset = 0) { + if (buf instanceof Uint8Array) { + for (let i = 0; i < buf.length; i++) { + this.set(offset + i, buf[i]); + } + } + else if (isUint8ArrayList(buf)) { + for (let i = 0; i < buf.length; i++) { + this.set(offset + i, buf.get(i)); + } + } + else { + throw new Error('Could not write value, must be an Uint8Array or a Uint8ArrayList'); + } + } + /** + * Remove bytes from the front of the pool + */ + consume(bytes) { + // first, normalize the argument, in accordance with how Buffer does it + bytes = Math.trunc(bytes); + // do nothing if not a positive number + if (Number.isNaN(bytes) || bytes <= 0) { + return; + } + // if consuming all bytes, skip iterating + if (bytes === this.byteLength) { + this.bufs = []; + this.length = 0; + return; + } + while (this.bufs.length > 0) { + if (bytes >= this.bufs[0].byteLength) { + bytes -= this.bufs[0].byteLength; + this.length -= this.bufs[0].byteLength; + this.bufs.shift(); + } + else { + this.bufs[0] = this.bufs[0].subarray(bytes); + this.length -= bytes; + break; + } + } + } + /** + * Extracts a section of an array and returns a new array. + * + * This is a copy operation as it is with Uint8Arrays and Arrays + * - note this is different to the behaviour of Node Buffers. + */ + slice(beginInclusive, endExclusive) { + const { bufs, length } = this._subList(beginInclusive, endExclusive); + return concat$1(bufs, length); + } + /** + * Returns a alloc from the given start and end element index. + * + * In the best case where the data extracted comes from a single Uint8Array + * internally this is a no-copy operation otherwise it is a copy operation. + */ + subarray(beginInclusive, endExclusive) { + const { bufs, length } = this._subList(beginInclusive, endExclusive); + if (bufs.length === 1) { + return bufs[0]; + } + return concat$1(bufs, length); + } + /** + * Returns a allocList from the given start and end element index. + * + * This is a no-copy operation. + */ + sublist(beginInclusive, endExclusive) { + const { bufs, length } = this._subList(beginInclusive, endExclusive); + const list = new Uint8ArrayList(); + list.length = length; + // don't loop, just set the bufs + list.bufs = [...bufs]; + return list; + } + _subList(beginInclusive, endExclusive) { + beginInclusive = beginInclusive ?? 0; + endExclusive = endExclusive ?? this.length; + if (beginInclusive < 0) { + beginInclusive = this.length + beginInclusive; + } + if (endExclusive < 0) { + endExclusive = this.length + endExclusive; + } + if (beginInclusive < 0 || endExclusive > this.length) { + throw new RangeError('index is out of bounds'); + } + if (beginInclusive === endExclusive) { + return { bufs: [], length: 0 }; + } + if (beginInclusive === 0 && endExclusive === this.length) { + return { bufs: this.bufs, length: this.length }; + } + const bufs = []; + let offset = 0; + for (let i = 0; i < this.bufs.length; i++) { + const buf = this.bufs[i]; + const bufStart = offset; + const bufEnd = bufStart + buf.byteLength; + // for next loop + offset = bufEnd; + if (beginInclusive >= bufEnd) { + // start after this buf + continue; + } + const sliceStartInBuf = beginInclusive >= bufStart && beginInclusive < bufEnd; + const sliceEndsInBuf = endExclusive > bufStart && endExclusive <= bufEnd; + if (sliceStartInBuf && sliceEndsInBuf) { + // slice is wholly contained within this buffer + if (beginInclusive === bufStart && endExclusive === bufEnd) { + // requested whole buffer + bufs.push(buf); + break; + } + // requested part of buffer + const start = beginInclusive - bufStart; + bufs.push(buf.subarray(start, start + (endExclusive - beginInclusive))); + break; + } + if (sliceStartInBuf) { + // slice starts in this buffer + if (beginInclusive === 0) { + // requested whole buffer + bufs.push(buf); + continue; + } + // requested part of buffer + bufs.push(buf.subarray(beginInclusive - bufStart)); + continue; + } + if (sliceEndsInBuf) { + if (endExclusive === bufEnd) { + // requested whole buffer + bufs.push(buf); + break; + } + // requested part of buffer + bufs.push(buf.subarray(0, endExclusive - bufStart)); + break; + } + // slice started before this buffer and ends after it + bufs.push(buf); + } + return { bufs, length: endExclusive - beginInclusive }; + } + indexOf(search, offset = 0) { + if (!isUint8ArrayList(search) && !(search instanceof Uint8Array)) { + throw new TypeError('The "value" argument must be a Uint8ArrayList or Uint8Array'); + } + const needle = search instanceof Uint8Array ? search : search.subarray(); + offset = Number(offset ?? 0); + if (isNaN(offset)) { + offset = 0; + } + if (offset < 0) { + offset = this.length + offset; + } + if (offset < 0) { + offset = 0; + } + if (search.length === 0) { + return offset > this.length ? this.length : offset; + } + // https://en.wikipedia.org/wiki/Boyer%E2%80%93Moore_string-search_algorithm + const M = needle.byteLength; + if (M === 0) { + throw new TypeError('search must be at least 1 byte long'); + } + // radix + const radix = 256; + const rightmostPositions = new Int32Array(radix); + // position of the rightmost occurrence of the byte c in the pattern + for (let c = 0; c < radix; c++) { + // -1 for bytes not in pattern + rightmostPositions[c] = -1; + } + for (let j = 0; j < M; j++) { + // rightmost position for bytes in pattern + rightmostPositions[needle[j]] = j; + } + // Return offset of first match, -1 if no match + const right = rightmostPositions; + const lastIndex = this.byteLength - needle.byteLength; + const lastPatIndex = needle.byteLength - 1; + let skip; + for (let i = offset; i <= lastIndex; i += skip) { + skip = 0; + for (let j = lastPatIndex; j >= 0; j--) { + const char = this.get(i + j); + if (needle[j] !== char) { + skip = Math.max(1, j - right[char]); + break; + } + } + if (skip === 0) { + return i; + } + } + return -1; + } + getInt8(byteOffset) { + const buf = this.subarray(byteOffset, byteOffset + 1); + const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength); + return view.getInt8(0); + } + setInt8(byteOffset, value) { + const buf = allocUnsafe(1); + const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength); + view.setInt8(0, value); + this.write(buf, byteOffset); + } + getInt16(byteOffset, littleEndian) { + const buf = this.subarray(byteOffset, byteOffset + 2); + const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength); + return view.getInt16(0, littleEndian); + } + setInt16(byteOffset, value, littleEndian) { + const buf = alloc$2(2); + const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength); + view.setInt16(0, value, littleEndian); + this.write(buf, byteOffset); + } + getInt32(byteOffset, littleEndian) { + const buf = this.subarray(byteOffset, byteOffset + 4); + const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength); + return view.getInt32(0, littleEndian); + } + setInt32(byteOffset, value, littleEndian) { + const buf = alloc$2(4); + const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength); + view.setInt32(0, value, littleEndian); + this.write(buf, byteOffset); + } + getBigInt64(byteOffset, littleEndian) { + const buf = this.subarray(byteOffset, byteOffset + 8); + const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength); + return view.getBigInt64(0, littleEndian); + } + setBigInt64(byteOffset, value, littleEndian) { + const buf = alloc$2(8); + const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength); + view.setBigInt64(0, value, littleEndian); + this.write(buf, byteOffset); + } + getUint8(byteOffset) { + const buf = this.subarray(byteOffset, byteOffset + 1); + const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength); + return view.getUint8(0); + } + setUint8(byteOffset, value) { + const buf = allocUnsafe(1); + const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength); + view.setUint8(0, value); + this.write(buf, byteOffset); + } + getUint16(byteOffset, littleEndian) { + const buf = this.subarray(byteOffset, byteOffset + 2); + const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength); + return view.getUint16(0, littleEndian); + } + setUint16(byteOffset, value, littleEndian) { + const buf = alloc$2(2); + const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength); + view.setUint16(0, value, littleEndian); + this.write(buf, byteOffset); + } + getUint32(byteOffset, littleEndian) { + const buf = this.subarray(byteOffset, byteOffset + 4); + const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength); + return view.getUint32(0, littleEndian); + } + setUint32(byteOffset, value, littleEndian) { + const buf = alloc$2(4); + const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength); + view.setUint32(0, value, littleEndian); + this.write(buf, byteOffset); + } + getBigUint64(byteOffset, littleEndian) { + const buf = this.subarray(byteOffset, byteOffset + 8); + const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength); + return view.getBigUint64(0, littleEndian); + } + setBigUint64(byteOffset, value, littleEndian) { + const buf = alloc$2(8); + const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength); + view.setBigUint64(0, value, littleEndian); + this.write(buf, byteOffset); + } + getFloat32(byteOffset, littleEndian) { + const buf = this.subarray(byteOffset, byteOffset + 4); + const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength); + return view.getFloat32(0, littleEndian); + } + setFloat32(byteOffset, value, littleEndian) { + const buf = alloc$2(4); + const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength); + view.setFloat32(0, value, littleEndian); + this.write(buf, byteOffset); + } + getFloat64(byteOffset, littleEndian) { + const buf = this.subarray(byteOffset, byteOffset + 8); + const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength); + return view.getFloat64(0, littleEndian); + } + setFloat64(byteOffset, value, littleEndian) { + const buf = alloc$2(8); + const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength); + view.setFloat64(0, value, littleEndian); + this.write(buf, byteOffset); + } + equals(other) { + if (other == null) { + return false; + } + if (!(other instanceof Uint8ArrayList)) { + return false; + } + if (other.bufs.length !== this.bufs.length) { + return false; + } + for (let i = 0; i < this.bufs.length; i++) { + if (!equals(this.bufs[i], other.bufs[i])) { + return false; + } + } + return true; + } + /** + * Create a Uint8ArrayList from a pre-existing list of Uint8Arrays. Use this + * method if you know the total size of all the Uint8Arrays ahead of time. + */ + static fromUint8Arrays(bufs, length) { + const list = new Uint8ArrayList(); + list.bufs = bufs; + if (length == null) { + length = bufs.reduce((acc, curr) => acc + curr.byteLength, 0); + } + list.length = length; + return list; + } + } + /* + function indexOf (needle: Uint8Array, haystack: Uint8Array, offset = 0) { + for (let i = offset; i < haystack.byteLength; i++) { + for (let j = 0; j < needle.length; j++) { + if (haystack[i + j] !== needle[j]) { + break + } + + if (j === needle.byteLength -1) { + return i + } + } + + if (haystack.byteLength - i < needle.byteLength) { + break + } + } + + return -1 + } + */ + + /** + * @packageDocumentation + * + * Generate, import, and export PeerIDs. + * + * A Peer ID is the SHA-256 [multihash](https://github.com/multiformats/multihash) of a public key. + * + * The public key is a base64 encoded string of a protobuf containing an RSA DER buffer. This uses a node buffer to pass the base64 encoded public key protobuf to the multihash for ID generation. + * + * @example + * + * ```TypeScript + * import { createEd25519PeerId } from '@libp2p/peer-id-factory' + * + * const peerId = await createEd25519PeerId() + * console.log(peerId.toString()) + * ``` + * + * ```bash + * 12D3KooWRm8J3iL796zPFi2EtGGtUJn58AG67gcqzMFHZnnsTzqD + * ``` + */ + const createEd25519PeerId = async () => { + const key = await generateKeyPair('Ed25519'); + const id = await createFromPrivKey(key); + if (id.type === 'Ed25519') { + return id; + } + throw new Error(`Generated unexpected PeerId type "${id.type}"`); + }; + async function createFromPrivKey(privateKey) { + return peerIdFromKeys(marshalPublicKey(privateKey.public), marshalPrivateKey(privateKey)); + } + + const codes$3 = { + ERR_SIGNATURE_NOT_VALID: 'ERR_SIGNATURE_NOT_VALID' + }; + + /* eslint-disable import/export */ + /* eslint-disable complexity */ + /* eslint-disable @typescript-eslint/no-namespace */ + /* eslint-disable @typescript-eslint/no-unnecessary-boolean-literal-compare */ + /* eslint-disable @typescript-eslint/no-empty-interface */ + var Envelope; + (function (Envelope) { + let _codec; + Envelope.codec = () => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork(); + } + if ((obj.publicKey != null && obj.publicKey.byteLength > 0)) { + w.uint32(10); + w.bytes(obj.publicKey); + } + if ((obj.payloadType != null && obj.payloadType.byteLength > 0)) { + w.uint32(18); + w.bytes(obj.payloadType); + } + if ((obj.payload != null && obj.payload.byteLength > 0)) { + w.uint32(26); + w.bytes(obj.payload); + } + if ((obj.signature != null && obj.signature.byteLength > 0)) { + w.uint32(42); + w.bytes(obj.signature); + } + if (opts.lengthDelimited !== false) { + w.ldelim(); + } + }, (reader, length) => { + const obj = { + publicKey: new Uint8Array(0), + payloadType: new Uint8Array(0), + payload: new Uint8Array(0), + signature: new Uint8Array(0) + }; + const end = length == null ? reader.len : reader.pos + length; + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + obj.publicKey = reader.bytes(); + break; + case 2: + obj.payloadType = reader.bytes(); + break; + case 3: + obj.payload = reader.bytes(); + break; + case 5: + obj.signature = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return obj; + }); + } + return _codec; + }; + Envelope.encode = (obj) => { + return encodeMessage(obj, Envelope.codec()); + }; + Envelope.decode = (buf) => { + return decodeMessage(buf, Envelope.codec()); + }; + })(Envelope || (Envelope = {})); + + class RecordEnvelope { + /** + * Unmarshal a serialized Envelope protobuf message + */ + static createFromProtobuf = async (data) => { + const envelopeData = Envelope.decode(data); + const peerId = await peerIdFromKeys(envelopeData.publicKey); + return new RecordEnvelope({ + peerId, + payloadType: envelopeData.payloadType, + payload: envelopeData.payload, + signature: envelopeData.signature + }); + }; + /** + * Seal marshals the given Record, places the marshaled bytes inside an Envelope + * and signs it with the given peerId's private key + */ + static seal = async (record, peerId) => { + if (peerId.privateKey == null) { + throw new Error('Missing private key'); + } + const domain = record.domain; + const payloadType = record.codec; + const payload = record.marshal(); + const signData = formatSignaturePayload(domain, payloadType, payload); + const key = await unmarshalPrivateKey(peerId.privateKey); + const signature = await key.sign(signData.subarray()); + return new RecordEnvelope({ + peerId, + payloadType, + payload, + signature + }); + }; + /** + * Open and certify a given marshalled envelope. + * Data is unmarshalled and the signature validated for the given domain. + */ + static openAndCertify = async (data, domain) => { + const envelope = await RecordEnvelope.createFromProtobuf(data); + const valid = await envelope.validate(domain); + if (!valid) { + throw new CodeError$2('envelope signature is not valid for the given domain', codes$3.ERR_SIGNATURE_NOT_VALID); + } + return envelope; + }; + peerId; + payloadType; + payload; + signature; + marshaled; + /** + * The Envelope is responsible for keeping an arbitrary signed record + * by a libp2p peer. + */ + constructor(init) { + const { peerId, payloadType, payload, signature } = init; + this.peerId = peerId; + this.payloadType = payloadType; + this.payload = payload; + this.signature = signature; + } + /** + * Marshal the envelope content + */ + marshal() { + if (this.peerId.publicKey == null) { + throw new Error('Missing public key'); + } + if (this.marshaled == null) { + this.marshaled = Envelope.encode({ + publicKey: this.peerId.publicKey, + payloadType: this.payloadType, + payload: this.payload.subarray(), + signature: this.signature + }); + } + return this.marshaled; + } + /** + * Verifies if the other Envelope is identical to this one + */ + equals(other) { + return equals(this.marshal(), other.marshal()); + } + /** + * Validate envelope data signature for the given domain + */ + async validate(domain) { + const signData = formatSignaturePayload(domain, this.payloadType, this.payload); + if (this.peerId.publicKey == null) { + throw new Error('Missing public key'); + } + const key = unmarshalPublicKey(this.peerId.publicKey); + return key.verify(signData.subarray(), this.signature); + } + } + /** + * Helper function that prepares a Uint8Array to sign or verify a signature + */ + const formatSignaturePayload = (domain, payloadType, payload) => { + // When signing, a peer will prepare a Uint8Array by concatenating the following: + // - The length of the domain separation string string in bytes + // - The domain separation string, encoded as UTF-8 + // - The length of the payload_type field in bytes + // - The value of the payload_type field + // - The length of the payload field in bytes + // - The value of the payload field + const domainUint8Array = fromString(domain); + const domainLength = encode$5(domainUint8Array.byteLength); + const payloadTypeLength = encode$5(payloadType.length); + const payloadLength = encode$5(payload.length); + return new Uint8ArrayList(domainLength, domainUint8Array, payloadTypeLength, payloadType, payloadLength, payload); + }; + + /** + * @packageDocumentation + * + * Provides strategies ensure arrays are equivalent. + * + * @example + * + * ```typescript + * import { arrayEquals } from '@libp2p/utils/array-equals' + * import { multiaddr } from '@multformats/multiaddr' + * + * const ma1 = multiaddr('/ip4/127.0.0.1/tcp/9000'), + * const ma2 = multiaddr('/ip4/82.41.53.1/tcp/9000') + * + * console.info(arrayEquals([ma1], [ma1])) // true + * console.info(arrayEquals([ma1], [ma2])) // false + * ``` + */ + /** + * Verify if two arrays of non primitive types with the "equals" function are equal. + * Compatible with multiaddr, peer-id and others. + */ + function arrayEquals$1(a, b) { + const sort = (a, b) => a.toString().localeCompare(b.toString()); + if (a.length !== b.length) { + return false; + } + b.sort(sort); + return a.sort(sort).every((item, index) => b[index].equals(item)); + } + + /* eslint-disable @typescript-eslint/no-unsafe-return */ + class Parser { + index = 0; + input = ""; + new(input) { + this.index = 0; + this.input = input; + return this; + } + /** Run a parser, and restore the pre-parse state if it fails. */ + readAtomically(fn) { + const index = this.index; + const result = fn(); + if (result === undefined) { + this.index = index; + } + return result; + } + /** Run a parser, but fail if the entire input wasn't consumed. Doesn't run atomically. */ + parseWith(fn) { + const result = fn(); + if (this.index !== this.input.length) { + return undefined; + } + return result; + } + /** Peek the next character from the input */ + peekChar() { + if (this.index >= this.input.length) { + return undefined; + } + return this.input[this.index]; + } + /** Read the next character from the input */ + readChar() { + if (this.index >= this.input.length) { + return undefined; + } + return this.input[this.index++]; + } + /** Read the next character from the input if it matches the target. */ + readGivenChar(target) { + return this.readAtomically(() => { + const char = this.readChar(); + if (char !== target) { + return undefined; + } + return char; + }); + } + /** + * Helper for reading separators in an indexed loop. Reads the separator + * character iff index > 0, then runs the parser. When used in a loop, + * the separator character will only be read on index > 0 (see + * readIPv4Addr for an example) + */ + readSeparator(sep, index, inner) { + return this.readAtomically(() => { + if (index > 0) { + if (this.readGivenChar(sep) === undefined) { + return undefined; + } + } + return inner(); + }); + } + /** + * Read a number off the front of the input in the given radix, stopping + * at the first non-digit character or eof. Fails if the number has more + * digits than max_digits or if there is no number. + */ + readNumber(radix, maxDigits, allowZeroPrefix, maxBytes) { + return this.readAtomically(() => { + let result = 0; + let digitCount = 0; + const leadingChar = this.peekChar(); + if (leadingChar === undefined) { + return undefined; + } + const hasLeadingZero = leadingChar === "0"; + const maxValue = 2 ** (8 * maxBytes) - 1; + // eslint-disable-next-line no-constant-condition + while (true) { + const digit = this.readAtomically(() => { + const char = this.readChar(); + if (char === undefined) { + return undefined; + } + const num = Number.parseInt(char, radix); + if (Number.isNaN(num)) { + return undefined; + } + return num; + }); + if (digit === undefined) { + break; + } + result *= radix; + result += digit; + if (result > maxValue) { + return undefined; + } + digitCount += 1; + if (maxDigits !== undefined) { + if (digitCount > maxDigits) { + return undefined; + } + } + } + if (digitCount === 0) { + return undefined; + } + else if (!allowZeroPrefix && hasLeadingZero && digitCount > 1) { + return undefined; + } + else { + return result; + } + }); + } + /** Read an IPv4 address. */ + readIPv4Addr() { + return this.readAtomically(() => { + const out = new Uint8Array(4); + for (let i = 0; i < out.length; i++) { + const ix = this.readSeparator(".", i, () => this.readNumber(10, 3, false, 1)); + if (ix === undefined) { + return undefined; + } + out[i] = ix; + } + return out; + }); + } + /** Read an IPv6 Address. */ + readIPv6Addr() { + /** + * Read a chunk of an IPv6 address into `groups`. Returns the number + * of groups read, along with a bool indicating if an embedded + * trailing IPv4 address was read. Specifically, read a series of + * colon-separated IPv6 groups (0x0000 - 0xFFFF), with an optional + * trailing embedded IPv4 address. + */ + const readGroups = (groups) => { + for (let i = 0; i < groups.length / 2; i++) { + const ix = i * 2; + // Try to read a trailing embedded IPv4 address. There must be at least 4 groups left. + if (i < groups.length - 3) { + const ipv4 = this.readSeparator(":", i, () => this.readIPv4Addr()); + if (ipv4 !== undefined) { + groups[ix] = ipv4[0]; + groups[ix + 1] = ipv4[1]; + groups[ix + 2] = ipv4[2]; + groups[ix + 3] = ipv4[3]; + return [ix + 4, true]; + } + } + const group = this.readSeparator(":", i, () => this.readNumber(16, 4, true, 2)); + if (group === undefined) { + return [ix, false]; + } + groups[ix] = group >> 8; + groups[ix + 1] = group & 255; + } + return [groups.length, false]; + }; + return this.readAtomically(() => { + // Read the front part of the address; either the whole thing, or up to the first :: + const head = new Uint8Array(16); + const [headSize, headIp4] = readGroups(head); + if (headSize === 16) { + return head; + } + // IPv4 part is not allowed before `::` + if (headIp4) { + return undefined; + } + // Read `::` if previous code parsed less than 8 groups. + // `::` indicates one or more groups of 16 bits of zeros. + if (this.readGivenChar(":") === undefined) { + return undefined; + } + if (this.readGivenChar(":") === undefined) { + return undefined; + } + // Read the back part of the address. The :: must contain at least one + // set of zeroes, so our max length is 7. + const tail = new Uint8Array(14); + const limit = 16 - (headSize + 2); + const [tailSize] = readGroups(tail.subarray(0, limit)); + // Concat the head and tail of the IP address + head.set(tail.subarray(0, tailSize), 16 - tailSize); + return head; + }); + } + /** Read an IP Address, either IPv4 or IPv6. */ + readIPAddr() { + return this.readIPv4Addr() ?? this.readIPv6Addr(); + } + } + + // See https://stackoverflow.com/questions/166132/maximum-length-of-the-textual-representation-of-an-ipv6-address + const MAX_IPV6_LENGTH = 45; + const MAX_IPV4_LENGTH = 15; + const parser = new Parser(); + /** Parse `input` into IPv4 bytes. */ + function parseIPv4(input) { + if (input.length > MAX_IPV4_LENGTH) { + return undefined; + } + return parser.new(input).parseWith(() => parser.readIPv4Addr()); + } + /** Parse `input` into IPv6 bytes. */ + function parseIPv6(input) { + // strip zone index if it is present + if (input.includes("%")) { + input = input.split("%")[0]; + } + if (input.length > MAX_IPV6_LENGTH) { + return undefined; + } + return parser.new(input).parseWith(() => parser.readIPv6Addr()); + } + /** Parse `input` into IPv4 or IPv6 bytes. */ + function parseIP(input) { + // strip zone index if it is present + if (input.includes("%")) { + input = input.split("%")[0]; + } + if (input.length > MAX_IPV6_LENGTH) { + return undefined; + } + return parser.new(input).parseWith(() => parser.readIPAddr()); + } + + /** Check if `input` is IPv4. */ + function isIPv4(input) { + return Boolean(parseIPv4(input)); + } + /** Check if `input` is IPv6. */ + function isIPv6(input) { + return Boolean(parseIPv6(input)); + } + /** Check if `input` is IPv4 or IPv6. */ + function isIP(input) { + return Boolean(parseIP(input)); + } + + const isV4 = isIPv4; + const isV6 = isIPv6; + // Copied from https://github.com/indutny/node-ip/blob/master/lib/ip.js#L7 + // but with buf/offset args removed because we don't use them + const toBytes$1 = function (ip) { + let offset = 0; + ip = ip.toString().trim(); + if (isV4(ip)) { + const bytes = new Uint8Array(offset + 4); + ip.split(/\./g).forEach((byte) => { + bytes[offset++] = parseInt(byte, 10) & 0xff; + }); + return bytes; + } + if (isV6(ip)) { + const sections = ip.split(':', 8); + let i; + for (i = 0; i < sections.length; i++) { + const isv4 = isV4(sections[i]); + let v4Buffer; + if (isv4) { + v4Buffer = toBytes$1(sections[i]); + sections[i] = toString$1(v4Buffer.slice(0, 2), 'base16'); + } + if (v4Buffer != null && ++i < 8) { + sections.splice(i, 0, toString$1(v4Buffer.slice(2, 4), 'base16')); + } + } + if (sections[0] === '') { + while (sections.length < 8) + sections.unshift('0'); + } + else if (sections[sections.length - 1] === '') { + while (sections.length < 8) + sections.push('0'); + } + else if (sections.length < 8) { + for (i = 0; i < sections.length && sections[i] !== ''; i++) + ; + const argv = [i, 1]; + for (i = 9 - sections.length; i > 0; i--) { + argv.push('0'); + } + sections.splice.apply(sections, argv); + } + const bytes = new Uint8Array(offset + 16); + for (i = 0; i < sections.length; i++) { + const word = parseInt(sections[i], 16); + bytes[offset++] = (word >> 8) & 0xff; + bytes[offset++] = word & 0xff; + } + return bytes; + } + throw new Error('invalid ip address'); + }; + // Copied from https://github.com/indutny/node-ip/blob/master/lib/ip.js#L63 + const toString = function (buf, offset = 0, length) { + offset = ~~offset; + length = length ?? (buf.length - offset); + const view = new DataView(buf.buffer); + if (length === 4) { + const result = []; + // IPv4 + for (let i = 0; i < length; i++) { + result.push(buf[offset + i]); + } + return result.join('.'); + } + if (length === 16) { + const result = []; + // IPv6 + for (let i = 0; i < length; i += 2) { + result.push(view.getUint16(offset + i).toString(16)); + } + return result.join(':') + .replace(/(^|:)0(:0)*:0(:|$)/, '$1::$3') + .replace(/:{3,4}/, '::'); + } + return ''; + }; + + const V = -1; + const names = {}; + const codes$2 = {}; + const table = [ + [4, 32, 'ip4'], + [6, 16, 'tcp'], + [33, 16, 'dccp'], + [41, 128, 'ip6'], + [42, V, 'ip6zone'], + [43, 8, 'ipcidr'], + [53, V, 'dns', true], + [54, V, 'dns4', true], + [55, V, 'dns6', true], + [56, V, 'dnsaddr', true], + [132, 16, 'sctp'], + [273, 16, 'udp'], + [275, 0, 'p2p-webrtc-star'], + [276, 0, 'p2p-webrtc-direct'], + [277, 0, 'p2p-stardust'], + [280, 0, 'webrtc-direct'], + [281, 0, 'webrtc'], + [290, 0, 'p2p-circuit'], + [301, 0, 'udt'], + [302, 0, 'utp'], + [400, V, 'unix', false, true], + // `ipfs` is added before `p2p` for legacy support. + // All text representations will default to `p2p`, but `ipfs` will + // still be supported + [421, V, 'ipfs'], + // `p2p` is the preferred name for 421, and is now the default + [421, V, 'p2p'], + [443, 0, 'https'], + [444, 96, 'onion'], + [445, 296, 'onion3'], + [446, V, 'garlic64'], + [448, 0, 'tls'], + [449, V, 'sni'], + [460, 0, 'quic'], + [461, 0, 'quic-v1'], + [465, 0, 'webtransport'], + [466, V, 'certhash'], + [477, 0, 'ws'], + [478, 0, 'wss'], + [479, 0, 'p2p-websocket-star'], + [480, 0, 'http'], + [777, V, 'memory'] + ]; + // populate tables + table.forEach(row => { + const proto = createProtocol(...row); + codes$2[proto.code] = proto; + names[proto.name] = proto; + }); + function createProtocol(code, size, name, resolvable, path) { + return { + code, + size, + name, + resolvable: Boolean(resolvable), + path: Boolean(path) + }; + } + /** + * For the passed proto string or number, return a {@link Protocol} + * + * @example + * + * ```js + * import { protocol } from '@multiformats/multiaddr' + * + * console.info(protocol(4)) + * // { code: 4, size: 32, name: 'ip4', resolvable: false, path: false } + * ``` + */ + function getProtocol(proto) { + if (typeof proto === 'number') { + if (codes$2[proto] != null) { + return codes$2[proto]; + } + throw new Error(`no protocol with code: ${proto}`); + } + else if (typeof proto === 'string') { + if (names[proto] != null) { + return names[proto]; + } + throw new Error(`no protocol with name: ${proto}`); + } + throw new Error(`invalid protocol id type: ${typeof proto}`); + } + + /** + * @packageDocumentation + * + * Provides methods for converting + */ + getProtocol('ip4'); + getProtocol('ip6'); + getProtocol('ipcidr'); + /** + * Convert [code,Uint8Array] to string + */ + function convertToString(proto, buf) { + const protocol = getProtocol(proto); + switch (protocol.code) { + case 4: // ipv4 + case 41: // ipv6 + return bytes2ip(buf); + case 42: // ipv6zone + return bytes2str(buf); + case 6: // tcp + case 273: // udp + case 33: // dccp + case 132: // sctp + return bytes2port(buf).toString(); + case 53: // dns + case 54: // dns4 + case 55: // dns6 + case 56: // dnsaddr + case 400: // unix + case 449: // sni + case 777: // memory + return bytes2str(buf); + case 421: // ipfs + return bytes2mh(buf); + case 444: // onion + return bytes2onion(buf); + case 445: // onion3 + return bytes2onion(buf); + case 466: // certhash + return bytes2mb(buf); + default: + return toString$1(buf, 'base16'); // no clue. convert to hex + } + } + function convertToBytes(proto, str) { + const protocol = getProtocol(proto); + switch (protocol.code) { + case 4: // ipv4 + return ip2bytes(str); + case 41: // ipv6 + return ip2bytes(str); + case 42: // ipv6zone + return str2bytes(str); + case 6: // tcp + case 273: // udp + case 33: // dccp + case 132: // sctp + return port2bytes(parseInt(str, 10)); + case 53: // dns + case 54: // dns4 + case 55: // dns6 + case 56: // dnsaddr + case 400: // unix + case 449: // sni + case 777: // memory + return str2bytes(str); + case 421: // ipfs + return mh2bytes(str); + case 444: // onion + return onion2bytes(str); + case 445: // onion3 + return onion32bytes(str); + case 466: // certhash + return mb2bytes(str); + default: + return fromString(str, 'base16'); // no clue. convert from hex + } + } + const decoders = Object.values(bases).map((c) => c.decoder); + const anybaseDecoder = (function () { + let acc = decoders[0].or(decoders[1]); + decoders.slice(2).forEach((d) => (acc = acc.or(d))); + return acc; + })(); + function ip2bytes(ipString) { + if (!isIP(ipString)) { + throw new Error('invalid ip address'); + } + return toBytes$1(ipString); + } + function bytes2ip(ipBuff) { + const ipString = toString(ipBuff, 0, ipBuff.length); + if (ipString == null) { + throw new Error('ipBuff is required'); + } + if (!isIP(ipString)) { + throw new Error('invalid ip address'); + } + return ipString; + } + function port2bytes(port) { + const buf = new ArrayBuffer(2); + const view = new DataView(buf); + view.setUint16(0, port); + return new Uint8Array(buf); + } + function bytes2port(buf) { + const view = new DataView(buf.buffer); + return view.getUint16(buf.byteOffset); + } + function str2bytes(str) { + const buf = fromString(str); + const size = Uint8Array.from(encode$5(buf.length)); + return concat$1([size, buf], size.length + buf.length); + } + function bytes2str(buf) { + const size = decode$6(buf); + buf = buf.slice(encodingLength$1(size)); + if (buf.length !== size) { + throw new Error('inconsistent lengths'); + } + return toString$1(buf); + } + function mh2bytes(hash) { + let mh; + if (hash[0] === 'Q' || hash[0] === '1') { + mh = decode$1(base58btc.decode(`z${hash}`)).bytes; + } + else { + mh = CID.parse(hash).multihash.bytes; + } + // the address is a varint prefixed multihash string representation + const size = Uint8Array.from(encode$5(mh.length)); + return concat$1([size, mh], size.length + mh.length); + } + function mb2bytes(mbstr) { + const mb = anybaseDecoder.decode(mbstr); + const size = Uint8Array.from(encode$5(mb.length)); + return concat$1([size, mb], size.length + mb.length); + } + function bytes2mb(buf) { + const size = decode$6(buf); + const hash = buf.slice(encodingLength$1(size)); + if (hash.length !== size) { + throw new Error('inconsistent lengths'); + } + return 'u' + toString$1(hash, 'base64url'); + } + /** + * Converts bytes to bas58btc string + */ + function bytes2mh(buf) { + const size = decode$6(buf); + const address = buf.slice(encodingLength$1(size)); + if (address.length !== size) { + throw new Error('inconsistent lengths'); + } + return toString$1(address, 'base58btc'); + } + function onion2bytes(str) { + const addr = str.split(':'); + if (addr.length !== 2) { + throw new Error(`failed to parse onion addr: ["'${addr.join('", "')}'"]' does not contain a port number`); + } + if (addr[0].length !== 16) { + throw new Error(`failed to parse onion addr: ${addr[0]} not a Tor onion address.`); + } + // onion addresses do not include the multibase prefix, add it before decoding + const buf = base32.decode('b' + addr[0]); + // onion port number + const port = parseInt(addr[1], 10); + if (port < 1 || port > 65536) { + throw new Error('Port number is not in range(1, 65536)'); + } + const portBuf = port2bytes(port); + return concat$1([buf, portBuf], buf.length + portBuf.length); + } + function onion32bytes(str) { + const addr = str.split(':'); + if (addr.length !== 2) { + throw new Error(`failed to parse onion addr: ["'${addr.join('", "')}'"]' does not contain a port number`); + } + if (addr[0].length !== 56) { + throw new Error(`failed to parse onion addr: ${addr[0]} not a Tor onion3 address.`); + } + // onion addresses do not include the multibase prefix, add it before decoding + const buf = base32.decode(`b${addr[0]}`); + // onion port number + const port = parseInt(addr[1], 10); + if (port < 1 || port > 65536) { + throw new Error('Port number is not in range(1, 65536)'); + } + const portBuf = port2bytes(port); + return concat$1([buf, portBuf], buf.length + portBuf.length); + } + function bytes2onion(buf) { + const addrBytes = buf.slice(0, buf.length - 2); + const portBytes = buf.slice(buf.length - 2); + const addr = toString$1(addrBytes, 'base32'); + const port = bytes2port(portBytes); + return `${addr}:${port}`; + } + + function stringToMultiaddrParts(str) { + str = cleanPath(str); + const tuples = []; + const stringTuples = []; + let path = null; + const parts = str.split('/').slice(1); + if (parts.length === 1 && parts[0] === '') { + return { + bytes: new Uint8Array(), + string: '/', + tuples: [], + stringTuples: [], + path: null + }; + } + for (let p = 0; p < parts.length; p++) { + const part = parts[p]; + const proto = getProtocol(part); + if (proto.size === 0) { + tuples.push([proto.code]); + stringTuples.push([proto.code]); + // eslint-disable-next-line no-continue + continue; + } + p++; // advance addr part + if (p >= parts.length) { + throw ParseError('invalid address: ' + str); + } + // if it's a path proto, take the rest + if (proto.path === true) { + // should we need to check each path part to see if it's a proto? + // This would allow for other protocols to be added after a unix path, + // however it would have issues if the path had a protocol name in the path + path = cleanPath(parts.slice(p).join('/')); + tuples.push([proto.code, convertToBytes(proto.code, path)]); + stringTuples.push([proto.code, path]); + break; + } + const bytes = convertToBytes(proto.code, parts[p]); + tuples.push([proto.code, bytes]); + stringTuples.push([proto.code, convertToString(proto.code, bytes)]); + } + return { + string: stringTuplesToString(stringTuples), + bytes: tuplesToBytes(tuples), + tuples, + stringTuples, + path + }; + } + function bytesToMultiaddrParts(bytes) { + const tuples = []; + const stringTuples = []; + let path = null; + let i = 0; + while (i < bytes.length) { + const code = decode$6(bytes, i); + const n = encodingLength$1(code); + const p = getProtocol(code); + const size = sizeForAddr(p, bytes.slice(i + n)); + if (size === 0) { + tuples.push([code]); + stringTuples.push([code]); + i += n; + // eslint-disable-next-line no-continue + continue; + } + const addr = bytes.slice(i + n, i + n + size); + i += (size + n); + if (i > bytes.length) { // did not end _exactly_ at buffer.length + throw ParseError('Invalid address Uint8Array: ' + toString$1(bytes, 'base16')); + } + // ok, tuple seems good. + tuples.push([code, addr]); + const stringAddr = convertToString(code, addr); + stringTuples.push([code, stringAddr]); + if (p.path === true) { + // should we need to check each path part to see if it's a proto? + // This would allow for other protocols to be added after a unix path, + // however it would have issues if the path had a protocol name in the path + path = stringAddr; + break; + } + } + return { + bytes: Uint8Array.from(bytes), + string: stringTuplesToString(stringTuples), + tuples, + stringTuples, + path + }; + } + /** + * [[str name, str addr]... ] -> string + */ + function stringTuplesToString(tuples) { + const parts = []; + tuples.map((tup) => { + const proto = getProtocol(tup[0]); + parts.push(proto.name); + if (tup.length > 1 && tup[1] != null) { + parts.push(tup[1]); + } + return null; + }); + return cleanPath(parts.join('/')); + } + /** + * [[int code, Uint8Array ]... ] -> Uint8Array + */ + function tuplesToBytes(tuples) { + return concat$1(tuples.map((tup) => { + const proto = getProtocol(tup[0]); + let buf = Uint8Array.from(encode$5(proto.code)); + if (tup.length > 1 && tup[1] != null) { + buf = concat$1([buf, tup[1]]); // add address buffer + } + return buf; + })); + } + /** + * For the passed address, return the serialized size + */ + function sizeForAddr(p, addr) { + if (p.size > 0) { + return p.size / 8; + } + else if (p.size === 0) { + return 0; + } + else { + const size = decode$6(addr instanceof Uint8Array ? addr : Uint8Array.from(addr)); + return size + encodingLength$1(size); + } + } + function cleanPath(str) { + return '/' + str.trim().split('/').filter((a) => a).join('/'); + } + function ParseError(str) { + return new Error('Error parsing address: ' + str); + } + + /** + * @packageDocumentation + * + * An implementation of a Multiaddr in JavaScript + * + * @example + * + * ```js + * import { multiaddr } from '@multiformats/multiaddr' + * + * const ma = multiaddr('/ip4/127.0.0.1/tcp/1234') + * ``` + */ + const inspect = Symbol.for('nodejs.util.inspect.custom'); + const symbol = Symbol.for('@multiformats/js-multiaddr/multiaddr'); + const DNS_CODES = [ + getProtocol('dns').code, + getProtocol('dns4').code, + getProtocol('dns6').code, + getProtocol('dnsaddr').code + ]; + /** + * Creates a {@link Multiaddr} from a {@link MultiaddrInput} + */ + class Multiaddr { + bytes; + #string; + #tuples; + #stringTuples; + #path; + [symbol] = true; + constructor(addr) { + // default + if (addr == null) { + addr = ''; + } + let parts; + if (addr instanceof Uint8Array) { + parts = bytesToMultiaddrParts(addr); + } + else if (typeof addr === 'string') { + if (addr.length > 0 && addr.charAt(0) !== '/') { + throw new Error(`multiaddr "${addr}" must start with a "/"`); + } + parts = stringToMultiaddrParts(addr); + } + else if (isMultiaddr(addr)) { // Multiaddr + parts = bytesToMultiaddrParts(addr.bytes); + } + else { + throw new Error('addr must be a string, Buffer, or another Multiaddr'); + } + this.bytes = parts.bytes; + this.#string = parts.string; + this.#tuples = parts.tuples; + this.#stringTuples = parts.stringTuples; + this.#path = parts.path; + } + toString() { + return this.#string; + } + toJSON() { + return this.toString(); + } + toOptions() { + let family; + let transport; + let host; + let port; + let zone = ''; + const tcp = getProtocol('tcp'); + const udp = getProtocol('udp'); + const ip4 = getProtocol('ip4'); + const ip6 = getProtocol('ip6'); + const dns6 = getProtocol('dns6'); + const ip6zone = getProtocol('ip6zone'); + for (const [code, value] of this.stringTuples()) { + if (code === ip6zone.code) { + zone = `%${value ?? ''}`; + } + // default to https when protocol & port are omitted from DNS addrs + if (DNS_CODES.includes(code)) { + transport = tcp.name; + port = 443; + host = `${value ?? ''}${zone}`; + family = code === dns6.code ? 6 : 4; + } + if (code === tcp.code || code === udp.code) { + transport = getProtocol(code).name; + port = parseInt(value ?? ''); + } + if (code === ip4.code || code === ip6.code) { + transport = getProtocol(code).name; + host = `${value ?? ''}${zone}`; + family = code === ip6.code ? 6 : 4; + } + } + if (family == null || transport == null || host == null || port == null) { + throw new Error('multiaddr must have a valid format: "/{ip4, ip6, dns4, dns6, dnsaddr}/{address}/{tcp, udp}/{port}".'); + } + const opts = { + family, + host, + transport, + port + }; + return opts; + } + protos() { + return this.#tuples.map(([code]) => Object.assign({}, getProtocol(code))); + } + protoCodes() { + return this.#tuples.map(([code]) => code); + } + protoNames() { + return this.#tuples.map(([code]) => getProtocol(code).name); + } + tuples() { + return this.#tuples; + } + stringTuples() { + return this.#stringTuples; + } + encapsulate(addr) { + addr = new Multiaddr(addr); + return new Multiaddr(this.toString() + addr.toString()); + } + decapsulate(addr) { + const addrString = addr.toString(); + const s = this.toString(); + const i = s.lastIndexOf(addrString); + if (i < 0) { + throw new Error(`Address ${this.toString()} does not contain subaddress: ${addr.toString()}`); + } + return new Multiaddr(s.slice(0, i)); + } + decapsulateCode(code) { + const tuples = this.tuples(); + for (let i = tuples.length - 1; i >= 0; i--) { + if (tuples[i][0] === code) { + return new Multiaddr(tuplesToBytes(tuples.slice(0, i))); + } + } + return this; + } + getPeerId() { + try { + let tuples = []; + this.stringTuples().forEach(([code, name]) => { + if (code === names.p2p.code) { + tuples.push([code, name]); + } + // if this is a p2p-circuit address, return the target peer id if present + // not the peer id of the relay + if (code === names['p2p-circuit'].code) { + tuples = []; + } + }); + // Get the last ipfs tuple ['p2p', 'peerid string'] + const tuple = tuples.pop(); + if (tuple?.[1] != null) { + const peerIdStr = tuple[1]; + // peer id is base58btc encoded string but not multibase encoded so add the `z` + // prefix so we can validate that it is correctly encoded + if (peerIdStr[0] === 'Q' || peerIdStr[0] === '1') { + return toString$1(base58btc.decode(`z${peerIdStr}`), 'base58btc'); + } + // try to parse peer id as CID + return toString$1(CID.parse(peerIdStr).multihash.bytes, 'base58btc'); + } + return null; + } + catch (e) { + return null; + } + } + getPath() { + return this.#path; + } + equals(addr) { + return equals(this.bytes, addr.bytes); + } + async resolve(options) { + const resolvableProto = this.protos().find((p) => p.resolvable); + // Multiaddr is not resolvable? + if (resolvableProto == null) { + return [this]; + } + const resolver = resolvers.get(resolvableProto.name); + if (resolver == null) { + throw new CodeError$2(`no available resolver for ${resolvableProto.name}`, 'ERR_NO_AVAILABLE_RESOLVER'); + } + const result = await resolver(this, options); + return result.map(str => multiaddr(str)); + } + nodeAddress() { + const options = this.toOptions(); + if (options.transport !== 'tcp' && options.transport !== 'udp') { + throw new Error(`multiaddr must have a valid format - no protocol with name: "${options.transport}". Must have a valid transport protocol: "{tcp, udp}"`); + } + return { + family: options.family, + address: options.host, + port: options.port + }; + } + isThinWaistAddress(addr) { + const protos = (addr ?? this).protos(); + if (protos.length !== 2) { + return false; + } + if (protos[0].code !== 4 && protos[0].code !== 41) { + return false; + } + if (protos[1].code !== 6 && protos[1].code !== 273) { + return false; + } + return true; + } + /** + * Returns Multiaddr as a human-readable string + * https://nodejs.org/api/util.html#utilinspectcustom + * + * @example + * ```js + * import { multiaddr } from '@multiformats/multiaddr' + * + * console.info(multiaddr('/ip4/127.0.0.1/tcp/4001')) + * // 'Multiaddr(/ip4/127.0.0.1/tcp/4001)' + * ``` + */ + [inspect]() { + return `Multiaddr(${this.#string})`; + } + } + + /** + * @packageDocumentation + * + * A standard way to represent addresses that + * + * - support any standard network protocol + * - are self-describing + * - have a binary packed format + * - have a nice string representation + * - encapsulate well + * + * @example + * + * ```TypeScript + * import { multiaddr } from '@multiformats/multiaddr' + * const addr = multiaddr("/ip4/127.0.0.1/udp/1234") + * // Multiaddr(/ip4/127.0.0.1/udp/1234) + * + * const addr = multiaddr("/ip4/127.0.0.1/udp/1234") + * // Multiaddr(/ip4/127.0.0.1/udp/1234) + * + * addr.bytes + * // + * + * addr.toString() + * // '/ip4/127.0.0.1/udp/1234' + * + * addr.protos() + * // [ + * // {code: 4, name: 'ip4', size: 32}, + * // {code: 273, name: 'udp', size: 16} + * // ] + * + * // gives you an object that is friendly with what Node.js core modules expect for addresses + * addr.nodeAddress() + * // { + * // family: 4, + * // port: 1234, + * // address: "127.0.0.1" + * // } + * + * addr.encapsulate('/sctp/5678') + * // Multiaddr(/ip4/127.0.0.1/udp/1234/sctp/5678) + * ``` + * + * ## Resolving DNSADDR addresses + * + * [DNSADDR](https://github.com/multiformats/multiaddr/blob/master/protocols/DNSADDR.md) is a spec that allows storing a TXT DNS record that contains a Multiaddr. + * + * To resolve DNSADDR addresses, call the `.resolve()` function the multiaddr, optionally passing a `DNS` resolver. + * + * DNSADDR addresses can resolve to multiple multiaddrs, since there is no limit to the number of TXT records that can be stored. + * + * @example Resolving DNSADDR Multiaddrs + * + * ```TypeScript + * import { multiaddr, resolvers } from '@multiformats/multiaddr' + * import { dnsaddr } from '@multiformats/multiaddr/resolvers' + * + * resolvers.set('dnsaddr', dnsaddr) + * + * const ma = multiaddr('/dnsaddr/bootstrap.libp2p.io') + * + * // resolve with a 5s timeout + * const resolved = await ma.resolve({ + * signal: AbortSignal.timeout(5000) + * }) + * + * console.info(await ma.resolve(resolved) + * // [Multiaddr('/ip4/147.75...'), Multiaddr('/ip4/147.75...'), Multiaddr('/ip4/147.75...')...] + * ``` + * + * @example Using a custom DNS resolver to resolve DNSADDR Multiaddrs + * + * See the docs for [@multiformats/dns](https://www.npmjs.com/package/@multiformats/dns) for a full breakdown of how to specify multiple resolvers or resolvers that can be used for specific TLDs. + * + * ```TypeScript + * import { multiaddr } from '@multiformats/multiaddr' + * import { dns } from '@multiformats/dns' + * import { dnsJsonOverHttps } from '@multiformats/dns/resolvers' + * + * const resolver = dns({ + * '.': dnsJsonOverHttps('https://cloudflare-dns.com/dns-query') + * }) + * + * const ma = multiaddr('/dnsaddr/bootstrap.libp2p.io') + * const resolved = await ma.resolve({ + * dns: resolver + * }) + * + * console.info(resolved) + * // [Multiaddr('/ip4/147.75...'), Multiaddr('/ip4/147.75...'), Multiaddr('/ip4/147.75...')...] + * ``` + */ + /** + * All configured {@link Resolver}s + */ + const resolvers = new Map(); + /** + * Check if object is a {@link Multiaddr} instance + * + * @example + * + * ```js + * import { isMultiaddr, multiaddr } from '@multiformats/multiaddr' + * + * isMultiaddr(5) + * // false + * isMultiaddr(multiaddr('/ip4/127.0.0.1')) + * // true + * ``` + */ + function isMultiaddr(value) { + return Boolean(value?.[symbol]); + } + /** + * A function that takes a {@link MultiaddrInput} and returns a {@link Multiaddr} + * + * @example + * ```js + * import { multiaddr } from '@libp2p/multiaddr' + * + * multiaddr('/ip4/127.0.0.1/tcp/4001') + * // Multiaddr(/ip4/127.0.0.1/tcp/4001) + * ``` + * + * @param {MultiaddrInput} [addr] - If String or Uint8Array, needs to adhere to the address format of a [multiaddr](https://github.com/multiformats/multiaddr#string-format) + */ + function multiaddr(addr) { + return new Multiaddr(addr); + } + + // The domain string used for peer records contained in a Envelope. + const ENVELOPE_DOMAIN_PEER_RECORD = 'libp2p-peer-record'; + // The type hint used to identify peer records in a Envelope. + // Defined in https://github.com/multiformats/multicodec/blob/master/table.csv + // with name "libp2p-peer-record" + const ENVELOPE_PAYLOAD_TYPE_PEER_RECORD = Uint8Array.from([3, 1]); + + /* eslint-disable import/export */ + /* eslint-disable complexity */ + /* eslint-disable @typescript-eslint/no-namespace */ + /* eslint-disable @typescript-eslint/no-unnecessary-boolean-literal-compare */ + /* eslint-disable @typescript-eslint/no-empty-interface */ + var PeerRecord$1; + (function (PeerRecord) { + (function (AddressInfo) { + let _codec; + AddressInfo.codec = () => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork(); + } + if ((obj.multiaddr != null && obj.multiaddr.byteLength > 0)) { + w.uint32(10); + w.bytes(obj.multiaddr); + } + if (opts.lengthDelimited !== false) { + w.ldelim(); + } + }, (reader, length) => { + const obj = { + multiaddr: new Uint8Array(0) + }; + const end = length == null ? reader.len : reader.pos + length; + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + obj.multiaddr = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return obj; + }); + } + return _codec; + }; + AddressInfo.encode = (obj) => { + return encodeMessage(obj, AddressInfo.codec()); + }; + AddressInfo.decode = (buf) => { + return decodeMessage(buf, AddressInfo.codec()); + }; + })(PeerRecord.AddressInfo || (PeerRecord.AddressInfo = {})); + let _codec; + PeerRecord.codec = () => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork(); + } + if ((obj.peerId != null && obj.peerId.byteLength > 0)) { + w.uint32(10); + w.bytes(obj.peerId); + } + if ((obj.seq != null && obj.seq !== 0n)) { + w.uint32(16); + w.uint64(obj.seq); + } + if (obj.addresses != null) { + for (const value of obj.addresses) { + w.uint32(26); + PeerRecord.AddressInfo.codec().encode(value, w); + } + } + if (opts.lengthDelimited !== false) { + w.ldelim(); + } + }, (reader, length) => { + const obj = { + peerId: new Uint8Array(0), + seq: 0n, + addresses: [] + }; + const end = length == null ? reader.len : reader.pos + length; + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + obj.peerId = reader.bytes(); + break; + case 2: + obj.seq = reader.uint64(); + break; + case 3: + obj.addresses.push(PeerRecord.AddressInfo.codec().decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return obj; + }); + } + return _codec; + }; + PeerRecord.encode = (obj) => { + return encodeMessage(obj, PeerRecord.codec()); + }; + PeerRecord.decode = (buf) => { + return decodeMessage(buf, PeerRecord.codec()); + }; + })(PeerRecord$1 || (PeerRecord$1 = {})); + + /** + * The PeerRecord is used for distributing peer routing records across the network. + * It contains the peer's reachable listen addresses. + */ + class PeerRecord { + /** + * Unmarshal Peer Record Protobuf + */ + static createFromProtobuf = (buf) => { + const peerRecord = PeerRecord$1.decode(buf); + const peerId = peerIdFromBytes(peerRecord.peerId); + const multiaddrs = (peerRecord.addresses ?? []).map((a) => multiaddr(a.multiaddr)); + const seqNumber = peerRecord.seq; + return new PeerRecord({ peerId, multiaddrs, seqNumber }); + }; + static DOMAIN = ENVELOPE_DOMAIN_PEER_RECORD; + static CODEC = ENVELOPE_PAYLOAD_TYPE_PEER_RECORD; + peerId; + multiaddrs; + seqNumber; + domain = PeerRecord.DOMAIN; + codec = PeerRecord.CODEC; + marshaled; + constructor(init) { + const { peerId, multiaddrs, seqNumber } = init; + this.peerId = peerId; + this.multiaddrs = multiaddrs ?? []; + this.seqNumber = seqNumber ?? BigInt(Date.now()); + } + /** + * Marshal a record to be used in an envelope + */ + marshal() { + if (this.marshaled == null) { + this.marshaled = PeerRecord$1.encode({ + peerId: this.peerId.toBytes(), + seq: BigInt(this.seqNumber), + addresses: this.multiaddrs.map((m) => ({ + multiaddr: m.bytes + })) + }); + } + return this.marshaled; + } + /** + * Returns true if `this` record equals the `other` + */ + equals(other) { + if (!(other instanceof PeerRecord)) { + return false; + } + // Validate PeerId + if (!this.peerId.equals(other.peerId)) { + return false; + } + // Validate seqNumber + if (this.seqNumber !== other.seqNumber) { + return false; + } + // Validate multiaddrs + if (!arrayEquals$1(this.multiaddrs, other.multiaddrs)) { + return false; + } + return true; + } + } + + /** + * @packageDocumentation + * + * For when you need a one-liner to collect iterable values. + * + * @example + * + * ```javascript + * import all from 'it-all' + * + * // This can also be an iterator, etc + * const values = function * () { + * yield * [0, 1, 2, 3, 4] + * } + * + * const arr = all(values) + * + * console.info(arr) // 0, 1, 2, 3, 4 + * ``` + * + * Async sources must be awaited: + * + * ```javascript + * const values = async function * () { + * yield * [0, 1, 2, 3, 4] + * } + * + * const arr = await all(values()) + * + * console.info(arr) // 0, 1, 2, 3, 4 + * ``` + */ + function isAsyncIterable$a(thing) { + return thing[Symbol.asyncIterator] != null; + } + function all(source) { + if (isAsyncIterable$a(source)) { + return (async () => { + const arr = []; + for await (const entry of source) { + arr.push(entry); + } + return arr; + })(); + } + const arr = []; + for (const entry of source) { + arr.push(entry); + } + return arr; + } + + var eventemitter3 = {exports: {}}; + + (function (module) { + + var has = Object.prototype.hasOwnProperty + , prefix = '~'; + + /** + * Constructor to create a storage for our `EE` objects. + * An `Events` instance is a plain object whose properties are event names. + * + * @constructor + * @private + */ + function Events() {} + + // + // We try to not inherit from `Object.prototype`. In some engines creating an + // instance in this way is faster than calling `Object.create(null)` directly. + // If `Object.create(null)` is not supported we prefix the event names with a + // character to make sure that the built-in object properties are not + // overridden or used as an attack vector. + // + if (Object.create) { + Events.prototype = Object.create(null); + + // + // This hack is needed because the `__proto__` property is still inherited in + // some old browsers like Android 4, iPhone 5.1, Opera 11 and Safari 5. + // + if (!new Events().__proto__) prefix = false; + } + + /** + * Representation of a single event listener. + * + * @param {Function} fn The listener function. + * @param {*} context The context to invoke the listener with. + * @param {Boolean} [once=false] Specify if the listener is a one-time listener. + * @constructor + * @private + */ + function EE(fn, context, once) { + this.fn = fn; + this.context = context; + this.once = once || false; + } + + /** + * Add a listener for a given event. + * + * @param {EventEmitter} emitter Reference to the `EventEmitter` instance. + * @param {(String|Symbol)} event The event name. + * @param {Function} fn The listener function. + * @param {*} context The context to invoke the listener with. + * @param {Boolean} once Specify if the listener is a one-time listener. + * @returns {EventEmitter} + * @private + */ + function addListener(emitter, event, fn, context, once) { + if (typeof fn !== 'function') { + throw new TypeError('The listener must be a function'); + } + + var listener = new EE(fn, context || emitter, once) + , evt = prefix ? prefix + event : event; + + if (!emitter._events[evt]) emitter._events[evt] = listener, emitter._eventsCount++; + else if (!emitter._events[evt].fn) emitter._events[evt].push(listener); + else emitter._events[evt] = [emitter._events[evt], listener]; + + return emitter; + } + + /** + * Clear event by name. + * + * @param {EventEmitter} emitter Reference to the `EventEmitter` instance. + * @param {(String|Symbol)} evt The Event name. + * @private + */ + function clearEvent(emitter, evt) { + if (--emitter._eventsCount === 0) emitter._events = new Events(); + else delete emitter._events[evt]; + } + + /** + * Minimal `EventEmitter` interface that is molded against the Node.js + * `EventEmitter` interface. + * + * @constructor + * @public + */ + function EventEmitter() { + this._events = new Events(); + this._eventsCount = 0; + } + + /** + * Return an array listing the events for which the emitter has registered + * listeners. + * + * @returns {Array} + * @public + */ + EventEmitter.prototype.eventNames = function eventNames() { + var names = [] + , events + , name; + + if (this._eventsCount === 0) return names; + + for (name in (events = this._events)) { + if (has.call(events, name)) names.push(prefix ? name.slice(1) : name); + } + + if (Object.getOwnPropertySymbols) { + return names.concat(Object.getOwnPropertySymbols(events)); + } + + return names; + }; + + /** + * Return the listeners registered for a given event. + * + * @param {(String|Symbol)} event The event name. + * @returns {Array} The registered listeners. + * @public + */ + EventEmitter.prototype.listeners = function listeners(event) { + var evt = prefix ? prefix + event : event + , handlers = this._events[evt]; + + if (!handlers) return []; + if (handlers.fn) return [handlers.fn]; + + for (var i = 0, l = handlers.length, ee = new Array(l); i < l; i++) { + ee[i] = handlers[i].fn; + } + + return ee; + }; + + /** + * Return the number of listeners listening to a given event. + * + * @param {(String|Symbol)} event The event name. + * @returns {Number} The number of listeners. + * @public + */ + EventEmitter.prototype.listenerCount = function listenerCount(event) { + var evt = prefix ? prefix + event : event + , listeners = this._events[evt]; + + if (!listeners) return 0; + if (listeners.fn) return 1; + return listeners.length; + }; + + /** + * Calls each of the listeners registered for a given event. + * + * @param {(String|Symbol)} event The event name. + * @returns {Boolean} `true` if the event had listeners, else `false`. + * @public + */ + EventEmitter.prototype.emit = function emit(event, a1, a2, a3, a4, a5) { + var evt = prefix ? prefix + event : event; + + if (!this._events[evt]) return false; + + var listeners = this._events[evt] + , len = arguments.length + , args + , i; + + if (listeners.fn) { + if (listeners.once) this.removeListener(event, listeners.fn, undefined, true); + + switch (len) { + case 1: return listeners.fn.call(listeners.context), true; + case 2: return listeners.fn.call(listeners.context, a1), true; + case 3: return listeners.fn.call(listeners.context, a1, a2), true; + case 4: return listeners.fn.call(listeners.context, a1, a2, a3), true; + case 5: return listeners.fn.call(listeners.context, a1, a2, a3, a4), true; + case 6: return listeners.fn.call(listeners.context, a1, a2, a3, a4, a5), true; + } + + for (i = 1, args = new Array(len -1); i < len; i++) { + args[i - 1] = arguments[i]; + } + + listeners.fn.apply(listeners.context, args); + } else { + var length = listeners.length + , j; + + for (i = 0; i < length; i++) { + if (listeners[i].once) this.removeListener(event, listeners[i].fn, undefined, true); + + switch (len) { + case 1: listeners[i].fn.call(listeners[i].context); break; + case 2: listeners[i].fn.call(listeners[i].context, a1); break; + case 3: listeners[i].fn.call(listeners[i].context, a1, a2); break; + case 4: listeners[i].fn.call(listeners[i].context, a1, a2, a3); break; + default: + if (!args) for (j = 1, args = new Array(len -1); j < len; j++) { + args[j - 1] = arguments[j]; + } + + listeners[i].fn.apply(listeners[i].context, args); + } + } + } + + return true; + }; + + /** + * Add a listener for a given event. + * + * @param {(String|Symbol)} event The event name. + * @param {Function} fn The listener function. + * @param {*} [context=this] The context to invoke the listener with. + * @returns {EventEmitter} `this`. + * @public + */ + EventEmitter.prototype.on = function on(event, fn, context) { + return addListener(this, event, fn, context, false); + }; + + /** + * Add a one-time listener for a given event. + * + * @param {(String|Symbol)} event The event name. + * @param {Function} fn The listener function. + * @param {*} [context=this] The context to invoke the listener with. + * @returns {EventEmitter} `this`. + * @public + */ + EventEmitter.prototype.once = function once(event, fn, context) { + return addListener(this, event, fn, context, true); + }; + + /** + * Remove the listeners of a given event. + * + * @param {(String|Symbol)} event The event name. + * @param {Function} fn Only remove the listeners that match this function. + * @param {*} context Only remove the listeners that have this context. + * @param {Boolean} once Only remove one-time listeners. + * @returns {EventEmitter} `this`. + * @public + */ + EventEmitter.prototype.removeListener = function removeListener(event, fn, context, once) { + var evt = prefix ? prefix + event : event; + + if (!this._events[evt]) return this; + if (!fn) { + clearEvent(this, evt); + return this; + } + + var listeners = this._events[evt]; + + if (listeners.fn) { + if ( + listeners.fn === fn && + (!once || listeners.once) && + (!context || listeners.context === context) + ) { + clearEvent(this, evt); + } + } else { + for (var i = 0, events = [], length = listeners.length; i < length; i++) { + if ( + listeners[i].fn !== fn || + (once && !listeners[i].once) || + (context && listeners[i].context !== context) + ) { + events.push(listeners[i]); + } + } + + // + // Reset the array, or remove it completely if we have no more listeners. + // + if (events.length) this._events[evt] = events.length === 1 ? events[0] : events; + else clearEvent(this, evt); + } + + return this; + }; + + /** + * Remove all listeners, or those of the specified event. + * + * @param {(String|Symbol)} [event] The event name. + * @returns {EventEmitter} `this`. + * @public + */ + EventEmitter.prototype.removeAllListeners = function removeAllListeners(event) { + var evt; + + if (event) { + evt = prefix ? prefix + event : event; + if (this._events[evt]) clearEvent(this, evt); + } else { + this._events = new Events(); + this._eventsCount = 0; + } + + return this; + }; + + // + // Alias methods names because people roll like that. + // + EventEmitter.prototype.off = EventEmitter.prototype.removeListener; + EventEmitter.prototype.addListener = EventEmitter.prototype.on; + + // + // Expose the prefix. + // + EventEmitter.prefixed = prefix; + + // + // Allow `EventEmitter` to be imported as module namespace. + // + EventEmitter.EventEmitter = EventEmitter; + + // + // Expose the module. + // + { + module.exports = EventEmitter; + } + } (eventemitter3)); + + var eventemitter3Exports = eventemitter3.exports; + var EventEmitter = /*@__PURE__*/getDefaultExportFromCjs(eventemitter3Exports); + + class TimeoutError extends Error { + constructor(message) { + super(message); + this.name = 'TimeoutError'; + } + } + + /** + An error to be thrown when the request is aborted by AbortController. + DOMException is thrown instead of this Error when DOMException is available. + */ + let AbortError$4 = class AbortError extends Error { + constructor(message) { + super(); + this.name = 'AbortError'; + this.message = message; + } + }; + + /** + TODO: Remove AbortError and just throw DOMException when targeting Node 18. + */ + const getDOMException = errorMessage => globalThis.DOMException === undefined + ? new AbortError$4(errorMessage) + : new DOMException(errorMessage); + + /** + TODO: Remove below function and just 'reject(signal.reason)' when targeting Node 18. + */ + const getAbortedReason = signal => { + const reason = signal.reason === undefined + ? getDOMException('This operation was aborted.') + : signal.reason; + + return reason instanceof Error ? reason : getDOMException(reason); + }; + + function pTimeout(promise, options) { + const { + milliseconds, + fallback, + message, + customTimers = {setTimeout, clearTimeout}, + } = options; + + let timer; + + const wrappedPromise = new Promise((resolve, reject) => { + if (typeof milliseconds !== 'number' || Math.sign(milliseconds) !== 1) { + throw new TypeError(`Expected \`milliseconds\` to be a positive number, got \`${milliseconds}\``); + } + + if (options.signal) { + const {signal} = options; + if (signal.aborted) { + reject(getAbortedReason(signal)); + } + + signal.addEventListener('abort', () => { + reject(getAbortedReason(signal)); + }); + } + + if (milliseconds === Number.POSITIVE_INFINITY) { + promise.then(resolve, reject); + return; + } + + // We create the error outside of `setTimeout` to preserve the stack trace. + const timeoutError = new TimeoutError(); + + timer = customTimers.setTimeout.call(undefined, () => { + if (fallback) { + try { + resolve(fallback()); + } catch (error) { + reject(error); + } + + return; + } + + if (typeof promise.cancel === 'function') { + promise.cancel(); + } + + if (message === false) { + resolve(); + } else if (message instanceof Error) { + reject(message); + } else { + timeoutError.message = message ?? `Promise timed out after ${milliseconds} milliseconds`; + reject(timeoutError); + } + }, milliseconds); + + (async () => { + try { + resolve(await promise); + } catch (error) { + reject(error); + } + })(); + }); + + const cancelablePromise = wrappedPromise.finally(() => { + cancelablePromise.clear(); + }); + + cancelablePromise.clear = () => { + customTimers.clearTimeout.call(undefined, timer); + timer = undefined; + }; + + return cancelablePromise; + } + + // Port of lower_bound from https://en.cppreference.com/w/cpp/algorithm/lower_bound + // Used to compute insertion index to keep queue sorted after insertion + function lowerBound(array, value, comparator) { + let first = 0; + let count = array.length; + while (count > 0) { + const step = Math.trunc(count / 2); + let it = first + step; + if (comparator(array[it], value) <= 0) { + first = ++it; + count -= step + 1; + } + else { + count = step; + } + } + return first; + } + + let PriorityQueue$1 = class PriorityQueue { + #queue = []; + enqueue(run, options) { + options = { + priority: 0, + ...options, + }; + const element = { + priority: options.priority, + run, + }; + if (this.size && this.#queue[this.size - 1].priority >= options.priority) { + this.#queue.push(element); + return; + } + const index = lowerBound(this.#queue, element, (a, b) => b.priority - a.priority); + this.#queue.splice(index, 0, element); + } + dequeue() { + const item = this.#queue.shift(); + return item?.run; + } + filter(options) { + return this.#queue.filter((element) => element.priority === options.priority).map((element) => element.run); + } + get size() { + return this.#queue.length; + } + }; + + /** + Promise queue with concurrency control. + */ + class PQueue extends EventEmitter { + #carryoverConcurrencyCount; + #isIntervalIgnored; + #intervalCount = 0; + #intervalCap; + #interval; + #intervalEnd = 0; + #intervalId; + #timeoutId; + #queue; + #queueClass; + #pending = 0; + // The `!` is needed because of https://github.com/microsoft/TypeScript/issues/32194 + #concurrency; + #isPaused; + #throwOnTimeout; + /** + Per-operation timeout in milliseconds. Operations fulfill once `timeout` elapses if they haven't already. + + Applies to each future operation. + */ + timeout; + // TODO: The `throwOnTimeout` option should affect the return types of `add()` and `addAll()` + constructor(options) { + super(); + // eslint-disable-next-line @typescript-eslint/consistent-type-assertions + options = { + carryoverConcurrencyCount: false, + intervalCap: Number.POSITIVE_INFINITY, + interval: 0, + concurrency: Number.POSITIVE_INFINITY, + autoStart: true, + queueClass: PriorityQueue$1, + ...options, + }; + if (!(typeof options.intervalCap === 'number' && options.intervalCap >= 1)) { + throw new TypeError(`Expected \`intervalCap\` to be a number from 1 and up, got \`${options.intervalCap?.toString() ?? ''}\` (${typeof options.intervalCap})`); + } + if (options.interval === undefined || !(Number.isFinite(options.interval) && options.interval >= 0)) { + throw new TypeError(`Expected \`interval\` to be a finite number >= 0, got \`${options.interval?.toString() ?? ''}\` (${typeof options.interval})`); + } + this.#carryoverConcurrencyCount = options.carryoverConcurrencyCount; + this.#isIntervalIgnored = options.intervalCap === Number.POSITIVE_INFINITY || options.interval === 0; + this.#intervalCap = options.intervalCap; + this.#interval = options.interval; + this.#queue = new options.queueClass(); + this.#queueClass = options.queueClass; + this.concurrency = options.concurrency; + this.timeout = options.timeout; + this.#throwOnTimeout = options.throwOnTimeout === true; + this.#isPaused = options.autoStart === false; + } + get #doesIntervalAllowAnother() { + return this.#isIntervalIgnored || this.#intervalCount < this.#intervalCap; + } + get #doesConcurrentAllowAnother() { + return this.#pending < this.#concurrency; + } + #next() { + this.#pending--; + this.#tryToStartAnother(); + this.emit('next'); + } + #onResumeInterval() { + this.#onInterval(); + this.#initializeIntervalIfNeeded(); + this.#timeoutId = undefined; + } + get #isIntervalPaused() { + const now = Date.now(); + if (this.#intervalId === undefined) { + const delay = this.#intervalEnd - now; + if (delay < 0) { + // Act as the interval was done + // We don't need to resume it here because it will be resumed on line 160 + this.#intervalCount = (this.#carryoverConcurrencyCount) ? this.#pending : 0; + } + else { + // Act as the interval is pending + if (this.#timeoutId === undefined) { + this.#timeoutId = setTimeout(() => { + this.#onResumeInterval(); + }, delay); + } + return true; + } + } + return false; + } + #tryToStartAnother() { + if (this.#queue.size === 0) { + // We can clear the interval ("pause") + // Because we can redo it later ("resume") + if (this.#intervalId) { + clearInterval(this.#intervalId); + } + this.#intervalId = undefined; + this.emit('empty'); + if (this.#pending === 0) { + this.emit('idle'); + } + return false; + } + if (!this.#isPaused) { + const canInitializeInterval = !this.#isIntervalPaused; + if (this.#doesIntervalAllowAnother && this.#doesConcurrentAllowAnother) { + const job = this.#queue.dequeue(); + if (!job) { + return false; + } + this.emit('active'); + job(); + if (canInitializeInterval) { + this.#initializeIntervalIfNeeded(); + } + return true; + } + } + return false; + } + #initializeIntervalIfNeeded() { + if (this.#isIntervalIgnored || this.#intervalId !== undefined) { + return; + } + this.#intervalId = setInterval(() => { + this.#onInterval(); + }, this.#interval); + this.#intervalEnd = Date.now() + this.#interval; + } + #onInterval() { + if (this.#intervalCount === 0 && this.#pending === 0 && this.#intervalId) { + clearInterval(this.#intervalId); + this.#intervalId = undefined; + } + this.#intervalCount = this.#carryoverConcurrencyCount ? this.#pending : 0; + this.#processQueue(); + } + /** + Executes all queued functions until it reaches the limit. + */ + #processQueue() { + // eslint-disable-next-line no-empty + while (this.#tryToStartAnother()) { } + } + get concurrency() { + return this.#concurrency; + } + set concurrency(newConcurrency) { + if (!(typeof newConcurrency === 'number' && newConcurrency >= 1)) { + throw new TypeError(`Expected \`concurrency\` to be a number from 1 and up, got \`${newConcurrency}\` (${typeof newConcurrency})`); + } + this.#concurrency = newConcurrency; + this.#processQueue(); + } + async #throwOnAbort(signal) { + return new Promise((_resolve, reject) => { + signal.addEventListener('abort', () => { + reject(signal.reason); + }, { once: true }); + }); + } + async add(function_, options = {}) { + options = { + timeout: this.timeout, + throwOnTimeout: this.#throwOnTimeout, + ...options, + }; + return new Promise((resolve, reject) => { + this.#queue.enqueue(async () => { + this.#pending++; + this.#intervalCount++; + try { + options.signal?.throwIfAborted(); + let operation = function_({ signal: options.signal }); + if (options.timeout) { + operation = pTimeout(Promise.resolve(operation), { milliseconds: options.timeout }); + } + if (options.signal) { + operation = Promise.race([operation, this.#throwOnAbort(options.signal)]); + } + const result = await operation; + resolve(result); + this.emit('completed', result); + } + catch (error) { + if (error instanceof TimeoutError && !options.throwOnTimeout) { + resolve(); + return; + } + reject(error); + this.emit('error', error); + } + finally { + this.#next(); + } + }, options); + this.emit('add'); + this.#tryToStartAnother(); + }); + } + async addAll(functions, options) { + return Promise.all(functions.map(async (function_) => this.add(function_, options))); + } + /** + Start (or resume) executing enqueued tasks within concurrency limit. No need to call this if queue is not paused (via `options.autoStart = false` or by `.pause()` method.) + */ + start() { + if (!this.#isPaused) { + return this; + } + this.#isPaused = false; + this.#processQueue(); + return this; + } + /** + Put queue execution on hold. + */ + pause() { + this.#isPaused = true; + } + /** + Clear the queue. + */ + clear() { + this.#queue = new this.#queueClass(); + } + /** + Can be called multiple times. Useful if you for example add additional items at a later time. + + @returns A promise that settles when the queue becomes empty. + */ + async onEmpty() { + // Instantly resolve if the queue is empty + if (this.#queue.size === 0) { + return; + } + await this.#onEvent('empty'); + } + /** + @returns A promise that settles when the queue size is less than the given limit: `queue.size < limit`. + + If you want to avoid having the queue grow beyond a certain size you can `await queue.onSizeLessThan()` before adding a new item. + + Note that this only limits the number of items waiting to start. There could still be up to `concurrency` jobs already running that this call does not include in its calculation. + */ + async onSizeLessThan(limit) { + // Instantly resolve if the queue is empty. + if (this.#queue.size < limit) { + return; + } + await this.#onEvent('next', () => this.#queue.size < limit); + } + /** + The difference with `.onEmpty` is that `.onIdle` guarantees that all work from the queue has finished. `.onEmpty` merely signals that the queue is empty, but it could mean that some promises haven't completed yet. + + @returns A promise that settles when the queue becomes empty, and all promises have completed; `queue.size === 0 && queue.pending === 0`. + */ + async onIdle() { + // Instantly resolve if none pending and if nothing else is queued + if (this.#pending === 0 && this.#queue.size === 0) { + return; + } + await this.#onEvent('idle'); + } + async #onEvent(event, filter) { + return new Promise(resolve => { + const listener = () => { + if (filter && !filter()) { + return; + } + this.off(event, listener); + resolve(); + }; + this.on(event, listener); + }); + } + /** + Size of the queue, the number of queued items waiting to run. + */ + get size() { + return this.#queue.size; + } + /** + Size of the queue, filtered by the given options. + + For example, this can be used to find the number of items remaining in the queue with a specific priority level. + */ + sizeBy(options) { + // eslint-disable-next-line unicorn/no-array-callback-reference + return this.#queue.filter(options).length; + } + /** + Number of running items (no longer in the queue). + */ + get pending() { + return this.#pending; + } + /** + Whether the queue is currently paused. + */ + get isPaused() { + return this.#isPaused; + } + } + + const events = {}; + const observable = (worker) => { + worker.addEventListener('message', (event) => { + observable.dispatchEvent('message', worker, event); + }); + if (worker.port != null) { + worker.port.addEventListener('message', (event) => { + observable.dispatchEvent('message', worker, event); + }); + } + }; + observable.addEventListener = (type, fn) => { + if (events[type] == null) { + events[type] = []; + } + events[type].push(fn); + }; + observable.removeEventListener = (type, fn) => { + if (events[type] == null) { + return; + } + events[type] = events[type] + .filter(listener => listener === fn); + }; + observable.dispatchEvent = function (type, worker, event) { + if (events[type] == null) { + return; + } + events[type].forEach(fn => fn(worker, event)); + }; + + const WORKER_REQUEST_READ_LOCK = 'lock:worker:request-read'; + const WORKER_RELEASE_READ_LOCK = 'lock:worker:release-read'; + const MASTER_GRANT_READ_LOCK = 'lock:master:grant-read'; + const WORKER_REQUEST_WRITE_LOCK = 'lock:worker:request-write'; + const WORKER_RELEASE_WRITE_LOCK = 'lock:worker:release-write'; + const MASTER_GRANT_WRITE_LOCK = 'lock:master:grant-write'; + + const nanoid = (size = 21) => { + return Math.random().toString().substring(2); + }; + + const handleWorkerLockRequest = (emitter, masterEvent, requestType, releaseType, grantType) => { + return (worker, event) => { + if (event.data.type !== requestType) { + return; + } + const requestEvent = { + type: event.data.type, + name: event.data.name, + identifier: event.data.identifier + }; + emitter.dispatchEvent(new MessageEvent(masterEvent, { + data: { + name: requestEvent.name, + handler: async () => { + // grant lock to worker + worker.postMessage({ + type: grantType, + name: requestEvent.name, + identifier: requestEvent.identifier + }); + // wait for worker to finish + await new Promise((resolve) => { + const releaseEventListener = (event) => { + if (event == null || event.data == null) { + return; + } + const releaseEvent = { + type: event.data.type, + name: event.data.name, + identifier: event.data.identifier + }; + if (releaseEvent.type === releaseType && releaseEvent.identifier === requestEvent.identifier) { + worker.removeEventListener('message', releaseEventListener); + resolve(); + } + }; + worker.addEventListener('message', releaseEventListener); + }); + } + } + })); + }; + }; + const makeWorkerLockRequest = (name, requestType, grantType, releaseType) => { + return async () => { + const id = nanoid(); + globalThis.postMessage({ + type: requestType, + identifier: id, + name + }); + return new Promise((resolve) => { + const listener = (event) => { + if (event == null || event.data == null) { + return; + } + const responseEvent = { + type: event.data.type, + identifier: event.data.identifier + }; + if (responseEvent.type === grantType && responseEvent.identifier === id) { + globalThis.removeEventListener('message', listener); + // grant lock + resolve(() => { + // release lock + globalThis.postMessage({ + type: releaseType, + identifier: id, + name + }); + }); + } + }; + globalThis.addEventListener('message', listener); + }); + }; + }; + const defaultOptions$5 = { + singleProcess: false + }; + var impl = (options) => { + options = Object.assign({}, defaultOptions$5, options); + const isPrimary = Boolean(globalThis.document) || options.singleProcess; + if (isPrimary) { + const emitter = new EventTarget(); + observable.addEventListener('message', handleWorkerLockRequest(emitter, 'requestReadLock', WORKER_REQUEST_READ_LOCK, WORKER_RELEASE_READ_LOCK, MASTER_GRANT_READ_LOCK)); + observable.addEventListener('message', handleWorkerLockRequest(emitter, 'requestWriteLock', WORKER_REQUEST_WRITE_LOCK, WORKER_RELEASE_WRITE_LOCK, MASTER_GRANT_WRITE_LOCK)); + return emitter; + } + return { + isWorker: true, + readLock: (name) => makeWorkerLockRequest(name, WORKER_REQUEST_READ_LOCK, MASTER_GRANT_READ_LOCK, WORKER_RELEASE_READ_LOCK), + writeLock: (name) => makeWorkerLockRequest(name, WORKER_REQUEST_WRITE_LOCK, MASTER_GRANT_WRITE_LOCK, WORKER_RELEASE_WRITE_LOCK) + }; + }; + + /** + * @packageDocumentation + * + * - Reads occur concurrently + * - Writes occur one at a time + * - No reads occur while a write operation is in progress + * - Locks can be created with different names + * - Reads/writes can time out + * + * ## Usage + * + * ```javascript + * import mortice from 'mortice' + * import delay from 'delay' + * + * // the lock name & options objects are both optional + * const mutex = mortice('my-lock', { + * + * // how long before write locks time out (default: 24 hours) + * timeout: 30000, + * + * // control how many read operations are executed concurrently (default: Infinity) + * concurrency: 5, + * + * // by default the the lock will be held on the main thread, set this to true if the + * // a lock should reside on each worker (default: false) + * singleProcess: false + * }) + * + * Promise.all([ + * (async () => { + * const release = await mutex.readLock() + * + * try { + * console.info('read 1') + * } finally { + * release() + * } + * })(), + * (async () => { + * const release = await mutex.readLock() + * + * try { + * console.info('read 2') + * } finally { + * release() + * } + * })(), + * (async () => { + * const release = await mutex.writeLock() + * + * try { + * await delay(1000) + * + * console.info('write 1') + * } finally { + * release() + * } + * })(), + * (async () => { + * const release = await mutex.readLock() + * + * try { + * console.info('read 3') + * } finally { + * release() + * } + * })() + * ]) + * ``` + * + * read 1 + * read 2 + * + * write 1 + * read 3 + * + * ## Browser + * + * Because there's no global way to evesdrop on messages sent by Web Workers, please pass all created Web Workers to the [`observable-webworkers`](https://npmjs.org/package/observable-webworkers) module: + * + * ```javascript + * // main.js + * import mortice from 'mortice' + * import observe from 'observable-webworkers' + * + * // create our lock on the main thread, it will be held here + * const mutex = mortice() + * + * const worker = new Worker('worker.js') + * + * observe(worker) + * ``` + * + * ```javascript + * // worker.js + * import mortice from 'mortice' + * import delay from 'delay' + * + * const mutex = mortice() + * + * let release = await mutex.readLock() + * // read something + * release() + * + * release = await mutex.writeLock() + * // write something + * release() + * ``` + */ + const mutexes = {}; + let implementation; + async function createReleaseable(queue, options) { + let res; + const p = new Promise((resolve) => { + res = resolve; + }); + void queue.add(async () => pTimeout((async () => { + await new Promise((resolve) => { + res(() => { + resolve(); + }); + }); + })(), { + milliseconds: options.timeout + })); + return p; + } + const createMutex = (name, options) => { + if (implementation.isWorker === true) { + return { + readLock: implementation.readLock(name, options), + writeLock: implementation.writeLock(name, options) + }; + } + const masterQueue = new PQueue({ concurrency: 1 }); + let readQueue; + return { + async readLock() { + // If there's already a read queue, just add the task to it + if (readQueue != null) { + return createReleaseable(readQueue, options); + } + // Create a new read queue + readQueue = new PQueue({ + concurrency: options.concurrency, + autoStart: false + }); + const localReadQueue = readQueue; + // Add the task to the read queue + const readPromise = createReleaseable(readQueue, options); + void masterQueue.add(async () => { + // Start the task only once the master queue has completed processing + // any previous tasks + localReadQueue.start(); + // Once all the tasks in the read queue have completed, remove it so + // that the next read lock will occur after any write locks that were + // started in the interim + await localReadQueue.onIdle() + .then(() => { + if (readQueue === localReadQueue) { + readQueue = null; + } + }); + }); + return readPromise; + }, + async writeLock() { + // Remove the read queue reference, so that any later read locks will be + // added to a new queue that starts after this write lock has been + // released + readQueue = null; + return createReleaseable(masterQueue, options); + } + }; + }; + const defaultOptions$4 = { + name: 'lock', + concurrency: Infinity, + timeout: 84600000, + singleProcess: false + }; + function createMortice(options) { + const opts = Object.assign({}, defaultOptions$4, options); + if (implementation == null) { + implementation = impl(opts); + if (implementation.isWorker !== true) { + // we are master, set up worker requests + implementation.addEventListener('requestReadLock', (event) => { + if (mutexes[event.data.name] == null) { + return; + } + void mutexes[event.data.name].readLock() + .then(async (release) => event.data.handler().finally(() => { release(); })); + }); + implementation.addEventListener('requestWriteLock', async (event) => { + if (mutexes[event.data.name] == null) { + return; + } + void mutexes[event.data.name].writeLock() + .then(async (release) => event.data.handler().finally(() => { release(); })); + }); + } + } + if (mutexes[opts.name] == null) { + mutexes[opts.name] = createMutex(opts.name, opts); + } + return mutexes[opts.name]; + } + + const codes$1 = { + ERR_INVALID_PARAMETERS: 'ERR_INVALID_PARAMETERS' + }; + + /* eslint-disable import/export */ + /* eslint-disable complexity */ + /* eslint-disable @typescript-eslint/no-namespace */ + /* eslint-disable @typescript-eslint/no-unnecessary-boolean-literal-compare */ + /* eslint-disable @typescript-eslint/no-empty-interface */ + var Peer$2; + (function (Peer) { + (function (Peer$metadataEntry) { + let _codec; + Peer$metadataEntry.codec = () => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork(); + } + if ((obj.key != null && obj.key !== '')) { + w.uint32(10); + w.string(obj.key); + } + if ((obj.value != null && obj.value.byteLength > 0)) { + w.uint32(18); + w.bytes(obj.value); + } + if (opts.lengthDelimited !== false) { + w.ldelim(); + } + }, (reader, length) => { + const obj = { + key: '', + value: new Uint8Array(0) + }; + const end = length == null ? reader.len : reader.pos + length; + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + obj.key = reader.string(); + break; + case 2: + obj.value = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return obj; + }); + } + return _codec; + }; + Peer$metadataEntry.encode = (obj) => { + return encodeMessage(obj, Peer$metadataEntry.codec()); + }; + Peer$metadataEntry.decode = (buf) => { + return decodeMessage(buf, Peer$metadataEntry.codec()); + }; + })(Peer.Peer$metadataEntry || (Peer.Peer$metadataEntry = {})); + (function (Peer$tagsEntry) { + let _codec; + Peer$tagsEntry.codec = () => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork(); + } + if ((obj.key != null && obj.key !== '')) { + w.uint32(10); + w.string(obj.key); + } + if (obj.value != null) { + w.uint32(18); + Tag.codec().encode(obj.value, w); + } + if (opts.lengthDelimited !== false) { + w.ldelim(); + } + }, (reader, length) => { + const obj = { + key: '' + }; + const end = length == null ? reader.len : reader.pos + length; + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + obj.key = reader.string(); + break; + case 2: + obj.value = Tag.codec().decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return obj; + }); + } + return _codec; + }; + Peer$tagsEntry.encode = (obj) => { + return encodeMessage(obj, Peer$tagsEntry.codec()); + }; + Peer$tagsEntry.decode = (buf) => { + return decodeMessage(buf, Peer$tagsEntry.codec()); + }; + })(Peer.Peer$tagsEntry || (Peer.Peer$tagsEntry = {})); + let _codec; + Peer.codec = () => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork(); + } + if (obj.addresses != null) { + for (const value of obj.addresses) { + w.uint32(10); + Address.codec().encode(value, w); + } + } + if (obj.protocols != null) { + for (const value of obj.protocols) { + w.uint32(18); + w.string(value); + } + } + if (obj.publicKey != null) { + w.uint32(34); + w.bytes(obj.publicKey); + } + if (obj.peerRecordEnvelope != null) { + w.uint32(42); + w.bytes(obj.peerRecordEnvelope); + } + if (obj.metadata != null && obj.metadata.size !== 0) { + for (const [key, value] of obj.metadata.entries()) { + w.uint32(50); + Peer.Peer$metadataEntry.codec().encode({ key, value }, w); + } + } + if (obj.tags != null && obj.tags.size !== 0) { + for (const [key, value] of obj.tags.entries()) { + w.uint32(58); + Peer.Peer$tagsEntry.codec().encode({ key, value }, w); + } + } + if (opts.lengthDelimited !== false) { + w.ldelim(); + } + }, (reader, length) => { + const obj = { + addresses: [], + protocols: [], + metadata: new Map(), + tags: new Map() + }; + const end = length == null ? reader.len : reader.pos + length; + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + obj.addresses.push(Address.codec().decode(reader, reader.uint32())); + break; + case 2: + obj.protocols.push(reader.string()); + break; + case 4: + obj.publicKey = reader.bytes(); + break; + case 5: + obj.peerRecordEnvelope = reader.bytes(); + break; + case 6: { + const entry = Peer.Peer$metadataEntry.codec().decode(reader, reader.uint32()); + obj.metadata.set(entry.key, entry.value); + break; + } + case 7: { + const entry = Peer.Peer$tagsEntry.codec().decode(reader, reader.uint32()); + obj.tags.set(entry.key, entry.value); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return obj; + }); + } + return _codec; + }; + Peer.encode = (obj) => { + return encodeMessage(obj, Peer.codec()); + }; + Peer.decode = (buf) => { + return decodeMessage(buf, Peer.codec()); + }; + })(Peer$2 || (Peer$2 = {})); + var Address; + (function (Address) { + let _codec; + Address.codec = () => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork(); + } + if ((obj.multiaddr != null && obj.multiaddr.byteLength > 0)) { + w.uint32(10); + w.bytes(obj.multiaddr); + } + if (obj.isCertified != null) { + w.uint32(16); + w.bool(obj.isCertified); + } + if (opts.lengthDelimited !== false) { + w.ldelim(); + } + }, (reader, length) => { + const obj = { + multiaddr: new Uint8Array(0) + }; + const end = length == null ? reader.len : reader.pos + length; + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + obj.multiaddr = reader.bytes(); + break; + case 2: + obj.isCertified = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return obj; + }); + } + return _codec; + }; + Address.encode = (obj) => { + return encodeMessage(obj, Address.codec()); + }; + Address.decode = (buf) => { + return decodeMessage(buf, Address.codec()); + }; + })(Address || (Address = {})); + var Tag; + (function (Tag) { + let _codec; + Tag.codec = () => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork(); + } + if ((obj.value != null && obj.value !== 0)) { + w.uint32(8); + w.uint32(obj.value); + } + if (obj.expiry != null) { + w.uint32(16); + w.uint64(obj.expiry); + } + if (opts.lengthDelimited !== false) { + w.ldelim(); + } + }, (reader, length) => { + const obj = { + value: 0 + }; + const end = length == null ? reader.len : reader.pos + length; + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + obj.value = reader.uint32(); + break; + case 2: + obj.expiry = reader.uint64(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return obj; + }); + } + return _codec; + }; + Tag.encode = (obj) => { + return encodeMessage(obj, Tag.codec()); + }; + Tag.decode = (buf) => { + return decodeMessage(buf, Tag.codec()); + }; + })(Tag || (Tag = {})); + + function bytesToPeer(peerId, buf) { + const peer = Peer$2.decode(buf); + if (peer.publicKey != null && peerId.publicKey == null) { + peerId = peerIdFromPeerId({ + ...peerId, + publicKey: peerId.publicKey + }); + } + const tags = new Map(); + // remove any expired tags + const now = BigInt(Date.now()); + for (const [key, tag] of peer.tags.entries()) { + if (tag.expiry != null && tag.expiry < now) { + continue; + } + tags.set(key, tag); + } + return { + ...peer, + id: peerId, + addresses: peer.addresses.map(({ multiaddr: ma, isCertified }) => { + return { + multiaddr: multiaddr(ma), + isCertified: isCertified ?? false + }; + }), + metadata: peer.metadata, + peerRecordEnvelope: peer.peerRecordEnvelope ?? undefined, + tags + }; + } + + const NAMESPACE_COMMON = '/peers/'; + function peerIdToDatastoreKey(peerId) { + if (!isPeerId(peerId) || peerId.type == null) { + throw new CodeError$2('Invalid PeerId', codes$1.ERR_INVALID_PARAMETERS); + } + const b32key = peerId.toCID().toString(); + return new Key(`${NAMESPACE_COMMON}${b32key}`); + } + + async function dedupeFilterAndSortAddresses(peerId, filter, addresses) { + const addressMap = new Map(); + for (const addr of addresses) { + if (addr == null) { + continue; + } + if (addr.multiaddr instanceof Uint8Array) { + addr.multiaddr = multiaddr(addr.multiaddr); + } + if (!isMultiaddr(addr.multiaddr)) { + throw new CodeError$2('Multiaddr was invalid', codes$1.ERR_INVALID_PARAMETERS); + } + if (!(await filter(peerId, addr.multiaddr))) { + continue; + } + const isCertified = addr.isCertified ?? false; + const maStr = addr.multiaddr.toString(); + const existingAddr = addressMap.get(maStr); + if (existingAddr != null) { + addr.isCertified = existingAddr.isCertified || isCertified; + } + else { + addressMap.set(maStr, { + multiaddr: addr.multiaddr, + isCertified + }); + } + } + return [...addressMap.values()] + .sort((a, b) => { + return a.multiaddr.toString().localeCompare(b.multiaddr.toString()); + }) + .map(({ isCertified, multiaddr }) => ({ + isCertified, + multiaddr: multiaddr.bytes + })); + } + + async function toPeerPB(peerId, data, strategy, options) { + if (data == null) { + throw new CodeError$2('Invalid PeerData', codes$1.ERR_INVALID_PARAMETERS); + } + if (data.publicKey != null && peerId.publicKey != null && !equals(data.publicKey, peerId.publicKey)) { + throw new CodeError$2('publicKey bytes do not match peer id publicKey bytes', codes$1.ERR_INVALID_PARAMETERS); + } + const existingPeer = options.existingPeer; + if (existingPeer != null && !peerId.equals(existingPeer.id)) { + throw new CodeError$2('peer id did not match existing peer id', codes$1.ERR_INVALID_PARAMETERS); + } + let addresses = existingPeer?.addresses ?? []; + let protocols = new Set(existingPeer?.protocols ?? []); + let metadata = existingPeer?.metadata ?? new Map(); + let tags = existingPeer?.tags ?? new Map(); + let peerRecordEnvelope = existingPeer?.peerRecordEnvelope; + // when patching, we replace the original fields with passed values + if (strategy === 'patch') { + if (data.multiaddrs != null || data.addresses != null) { + addresses = []; + if (data.multiaddrs != null) { + addresses.push(...data.multiaddrs.map(multiaddr => ({ + isCertified: false, + multiaddr + }))); + } + if (data.addresses != null) { + addresses.push(...data.addresses); + } + } + if (data.protocols != null) { + protocols = new Set(data.protocols); + } + if (data.metadata != null) { + const metadataEntries = data.metadata instanceof Map ? [...data.metadata.entries()] : Object.entries(data.metadata); + metadata = createSortedMap(metadataEntries, { + validate: validateMetadata + }); + } + if (data.tags != null) { + const tagsEntries = data.tags instanceof Map ? [...data.tags.entries()] : Object.entries(data.tags); + tags = createSortedMap(tagsEntries, { + validate: validateTag, + map: mapTag + }); + } + if (data.peerRecordEnvelope != null) { + peerRecordEnvelope = data.peerRecordEnvelope; + } + } + // when merging, we join the original fields with passed values + if (strategy === 'merge') { + if (data.multiaddrs != null) { + addresses.push(...data.multiaddrs.map(multiaddr => ({ + isCertified: false, + multiaddr + }))); + } + if (data.addresses != null) { + addresses.push(...data.addresses); + } + if (data.protocols != null) { + protocols = new Set([...protocols, ...data.protocols]); + } + if (data.metadata != null) { + const metadataEntries = data.metadata instanceof Map ? [...data.metadata.entries()] : Object.entries(data.metadata); + for (const [key, value] of metadataEntries) { + if (value == null) { + metadata.delete(key); + } + else { + metadata.set(key, value); + } + } + metadata = createSortedMap([...metadata.entries()], { + validate: validateMetadata + }); + } + if (data.tags != null) { + const tagsEntries = data.tags instanceof Map ? [...data.tags.entries()] : Object.entries(data.tags); + const mergedTags = new Map(tags); + for (const [key, value] of tagsEntries) { + if (value == null) { + mergedTags.delete(key); + } + else { + mergedTags.set(key, value); + } + } + tags = createSortedMap([...mergedTags.entries()], { + validate: validateTag, + map: mapTag + }); + } + if (data.peerRecordEnvelope != null) { + peerRecordEnvelope = data.peerRecordEnvelope; + } + } + const output = { + addresses: await dedupeFilterAndSortAddresses(peerId, options.addressFilter ?? (async () => true), addresses), + protocols: [...protocols.values()].sort((a, b) => { + return a.localeCompare(b); + }), + metadata, + tags, + publicKey: existingPeer?.id.publicKey ?? data.publicKey ?? peerId.publicKey, + peerRecordEnvelope + }; + // Ed25519 and secp256k1 have their public key embedded in them so no need to duplicate it + if (peerId.type !== 'RSA') { + delete output.publicKey; + } + return output; + } + /** + * In JS maps are ordered by insertion order so create a new map with the + * keys inserted in alphabetical order. + */ + function createSortedMap(entries, options) { + const output = new Map(); + for (const [key, value] of entries) { + if (value == null) { + continue; + } + options.validate(key, value); + } + for (const [key, value] of entries.sort(([a], [b]) => { + return a.localeCompare(b); + })) { + if (value != null) { + output.set(key, options.map?.(key, value) ?? value); + } + } + return output; + } + function validateMetadata(key, value) { + if (typeof key !== 'string') { + throw new CodeError$2('Metadata key must be a string', codes$1.ERR_INVALID_PARAMETERS); + } + if (!(value instanceof Uint8Array)) { + throw new CodeError$2('Metadata value must be a Uint8Array', codes$1.ERR_INVALID_PARAMETERS); + } + } + function validateTag(key, tag) { + if (typeof key !== 'string') { + throw new CodeError$2('Tag name must be a string', codes$1.ERR_INVALID_PARAMETERS); + } + if (tag.value != null) { + if (parseInt(`${tag.value}`, 10) !== tag.value) { + throw new CodeError$2('Tag value must be an integer', codes$1.ERR_INVALID_PARAMETERS); + } + if (tag.value < 0 || tag.value > 100) { + throw new CodeError$2('Tag value must be between 0-100', codes$1.ERR_INVALID_PARAMETERS); + } + } + if (tag.ttl != null) { + if (parseInt(`${tag.ttl}`, 10) !== tag.ttl) { + throw new CodeError$2('Tag ttl must be an integer', codes$1.ERR_INVALID_PARAMETERS); + } + if (tag.ttl < 0) { + throw new CodeError$2('Tag ttl must be between greater than 0', codes$1.ERR_INVALID_PARAMETERS); + } + } + } + function mapTag(key, tag) { + let expiry; + if (tag.expiry != null) { + expiry = tag.expiry; + } + if (tag.ttl != null) { + expiry = BigInt(Date.now() + Number(tag.ttl)); + } + return { + value: tag.value ?? 0, + expiry + }; + } + + function decodePeer(key, value, cache) { + // /peers/${peer-id-as-libp2p-key-cid-string-in-base-32} + const base32Str = key.toString().split('/')[2]; + const buf = base32.decode(base32Str); + const peerId = peerIdFromBytes(buf); + const cached = cache.get(peerId); + if (cached != null) { + return cached; + } + const peer = bytesToPeer(peerId, value); + cache.set(peerId, peer); + return peer; + } + function mapQuery(query, cache) { + if (query == null) { + return {}; + } + return { + prefix: NAMESPACE_COMMON, + filters: (query.filters ?? []).map(fn => ({ key, value }) => { + return fn(decodePeer(key, value, cache)); + }), + orders: (query.orders ?? []).map(fn => (a, b) => { + return fn(decodePeer(a.key, a.value, cache), decodePeer(b.key, b.value, cache)); + }) + }; + } + class PersistentStore { + peerId; + datastore; + lock; + addressFilter; + constructor(components, init = {}) { + this.peerId = components.peerId; + this.datastore = components.datastore; + this.addressFilter = init.addressFilter; + this.lock = createMortice({ + name: 'peer-store', + singleProcess: true + }); + } + async has(peerId) { + return this.datastore.has(peerIdToDatastoreKey(peerId)); + } + async delete(peerId) { + if (this.peerId.equals(peerId)) { + throw new CodeError$2('Cannot delete self peer', codes$1.ERR_INVALID_PARAMETERS); + } + await this.datastore.delete(peerIdToDatastoreKey(peerId)); + } + async load(peerId) { + const buf = await this.datastore.get(peerIdToDatastoreKey(peerId)); + return bytesToPeer(peerId, buf); + } + async save(peerId, data) { + const { existingBuf, existingPeer } = await this.#findExistingPeer(peerId); + const peerPb = await toPeerPB(peerId, data, 'patch', { + addressFilter: this.addressFilter + }); + return this.#saveIfDifferent(peerId, peerPb, existingBuf, existingPeer); + } + async patch(peerId, data) { + const { existingBuf, existingPeer } = await this.#findExistingPeer(peerId); + const peerPb = await toPeerPB(peerId, data, 'patch', { + addressFilter: this.addressFilter, + existingPeer + }); + return this.#saveIfDifferent(peerId, peerPb, existingBuf, existingPeer); + } + async merge(peerId, data) { + const { existingBuf, existingPeer } = await this.#findExistingPeer(peerId); + const peerPb = await toPeerPB(peerId, data, 'merge', { + addressFilter: this.addressFilter, + existingPeer + }); + return this.#saveIfDifferent(peerId, peerPb, existingBuf, existingPeer); + } + async *all(query) { + const peerCache = new PeerMap(); + for await (const { key, value } of this.datastore.query(mapQuery(query ?? {}, peerCache))) { + const peer = decodePeer(key, value, peerCache); + if (peer.id.equals(this.peerId)) { + // Skip self peer if present + continue; + } + yield peer; + } + } + async #findExistingPeer(peerId) { + try { + const existingBuf = await this.datastore.get(peerIdToDatastoreKey(peerId)); + const existingPeer = bytesToPeer(peerId, existingBuf); + return { + existingBuf, + existingPeer + }; + } + catch (err) { + if (err.code !== 'ERR_NOT_FOUND') { + throw err; + } + } + return {}; + } + async #saveIfDifferent(peerId, peer, existingBuf, existingPeer) { + const buf = Peer$2.encode(peer); + if (existingBuf != null && equals(buf, existingBuf)) { + return { + peer: bytesToPeer(peerId, buf), + previous: existingPeer, + updated: false + }; + } + await this.datastore.put(peerIdToDatastoreKey(peerId), buf); + return { + peer: bytesToPeer(peerId, buf), + previous: existingPeer, + updated: true + }; + } + } + + /** + * @packageDocumentation + * + * The peer store is where libp2p stores data about the peers it has encountered on the network. + */ + /** + * An implementation of PeerStore that stores data in a Datastore + */ + class PersistentPeerStore { + store; + events; + peerId; + log; + constructor(components, init = {}) { + this.log = components.logger.forComponent('libp2p:peer-store'); + this.events = components.events; + this.peerId = components.peerId; + this.store = new PersistentStore(components, init); + } + async forEach(fn, query) { + this.log.trace('forEach await read lock'); + const release = await this.store.lock.readLock(); + this.log.trace('forEach got read lock'); + try { + for await (const peer of this.store.all(query)) { + fn(peer); + } + } + finally { + this.log.trace('forEach release read lock'); + release(); + } + } + async all(query) { + this.log.trace('all await read lock'); + const release = await this.store.lock.readLock(); + this.log.trace('all got read lock'); + try { + return await all(this.store.all(query)); + } + finally { + this.log.trace('all release read lock'); + release(); + } + } + async delete(peerId) { + this.log.trace('delete await write lock'); + const release = await this.store.lock.writeLock(); + this.log.trace('delete got write lock'); + try { + await this.store.delete(peerId); + } + finally { + this.log.trace('delete release write lock'); + release(); + } + } + async has(peerId) { + this.log.trace('has await read lock'); + const release = await this.store.lock.readLock(); + this.log.trace('has got read lock'); + try { + return await this.store.has(peerId); + } + finally { + this.log.trace('has release read lock'); + release(); + } + } + async get(peerId) { + this.log.trace('get await read lock'); + const release = await this.store.lock.readLock(); + this.log.trace('get got read lock'); + try { + return await this.store.load(peerId); + } + finally { + this.log.trace('get release read lock'); + release(); + } + } + async save(id, data) { + this.log.trace('save await write lock'); + const release = await this.store.lock.writeLock(); + this.log.trace('save got write lock'); + try { + const result = await this.store.save(id, data); + this.#emitIfUpdated(id, result); + return result.peer; + } + finally { + this.log.trace('save release write lock'); + release(); + } + } + async patch(id, data) { + this.log.trace('patch await write lock'); + const release = await this.store.lock.writeLock(); + this.log.trace('patch got write lock'); + try { + const result = await this.store.patch(id, data); + this.#emitIfUpdated(id, result); + return result.peer; + } + finally { + this.log.trace('patch release write lock'); + release(); + } + } + async merge(id, data) { + this.log.trace('merge await write lock'); + const release = await this.store.lock.writeLock(); + this.log.trace('merge got write lock'); + try { + const result = await this.store.merge(id, data); + this.#emitIfUpdated(id, result); + return result.peer; + } + finally { + this.log.trace('merge release write lock'); + release(); + } + } + async consumePeerRecord(buf, expectedPeer) { + const envelope = await RecordEnvelope.openAndCertify(buf, PeerRecord.DOMAIN); + if (expectedPeer?.equals(envelope.peerId) === false) { + this.log('envelope peer id was not the expected peer id - expected: %p received: %p', expectedPeer, envelope.peerId); + return false; + } + const peerRecord = PeerRecord.createFromProtobuf(envelope.payload); + let peer; + try { + peer = await this.get(envelope.peerId); + } + catch (err) { + if (err.code !== 'ERR_NOT_FOUND') { + throw err; + } + } + // ensure seq is greater than, or equal to, the last received + if (peer?.peerRecordEnvelope != null) { + const storedEnvelope = await RecordEnvelope.createFromProtobuf(peer.peerRecordEnvelope); + const storedRecord = PeerRecord.createFromProtobuf(storedEnvelope.payload); + if (storedRecord.seqNumber >= peerRecord.seqNumber) { + this.log('sequence number was lower or equal to existing sequence number - stored: %d received: %d', storedRecord.seqNumber, peerRecord.seqNumber); + return false; + } + } + await this.patch(peerRecord.peerId, { + peerRecordEnvelope: buf, + addresses: peerRecord.multiaddrs.map(multiaddr => ({ + isCertified: true, + multiaddr + })) + }); + return true; + } + #emitIfUpdated(id, result) { + if (!result.updated) { + return; + } + if (this.peerId.equals(id)) { + this.events.safeDispatchEvent('self:peer:update', { detail: result }); + } + else { + this.events.safeDispatchEvent('peer:update', { detail: result }); + } + } + } + + /** + * @packageDocumentation + * + * Mostly useful for tests or when you want to be explicit about consuming an iterable without doing anything with any yielded values. + * + * @example + * + * ```javascript + * import drain from 'it-drain' + * + * // This can also be an iterator, generator, etc + * const values = [0, 1, 2, 3, 4] + * + * drain(values) + * ``` + * + * Async sources must be awaited: + * + * ```javascript + * import drain from 'it-drain' + * + * const values = async function * { + * yield * [0, 1, 2, 3, 4] + * } + * + * await drain(values()) + * ``` + */ + function isAsyncIterable$9(thing) { + return thing[Symbol.asyncIterator] != null; + } + function drain(source) { + if (isAsyncIterable$9(source)) { + return (async () => { + for await (const _ of source) { } // eslint-disable-line no-unused-vars,no-empty,@typescript-eslint/no-unused-vars + })(); + } + else { + for (const _ of source) { } // eslint-disable-line no-unused-vars,no-empty,@typescript-eslint/no-unused-vars + } + } + + /** + * @packageDocumentation + * + * Lets you look at the contents of an async iterator and decide what to do + * + * @example + * + * ```javascript + * import peekable from 'it-peekable' + * + * // This can also be an iterator, generator, etc + * const values = [0, 1, 2, 3, 4] + * + * const it = peekable(value) + * + * const first = it.peek() + * + * console.info(first) // 0 + * + * it.push(first) + * + * console.info([...it]) + * // [ 0, 1, 2, 3, 4 ] + * ``` + * + * Async sources must be awaited: + * + * ```javascript + * import peekable from 'it-peekable' + * + * const values = async function * () { + * yield * [0, 1, 2, 3, 4] + * } + * + * const it = peekable(values()) + * + * const first = await it.peek() + * + * console.info(first) // 0 + * + * it.push(first) + * + * console.info(await all(it)) + * // [ 0, 1, 2, 3, 4 ] + * ``` + */ + function peekable(iterable) { + // @ts-expect-error can't use Symbol.asyncIterator to index iterable since it might be Iterable + const [iterator, symbol] = iterable[Symbol.asyncIterator] != null + // @ts-expect-error can't use Symbol.asyncIterator to index iterable since it might be Iterable + ? [iterable[Symbol.asyncIterator](), Symbol.asyncIterator] + // @ts-expect-error can't use Symbol.iterator to index iterable since it might be AsyncIterable + : [iterable[Symbol.iterator](), Symbol.iterator]; + const queue = []; + // @ts-expect-error can't use symbol to index peekable + return { + peek: () => { + return iterator.next(); + }, + push: (value) => { + queue.push(value); + }, + next: () => { + if (queue.length > 0) { + return { + done: false, + value: queue.shift() + }; + } + return iterator.next(); + }, + [symbol]() { + return this; + } + }; + } + + /** + * @packageDocumentation + * + * Filter values out of an (async)iterable + * + * @example + * + * ```javascript + * import all from 'it-all' + * import filter from 'it-filter' + * + * // This can also be an iterator, generator, etc + * const values = [0, 1, 2, 3, 4] + * + * const fn = (val, index) => val > 2 // Return boolean to keep item + * + * const arr = all(filter(values, fn)) + * + * console.info(arr) // 3, 4 + * ``` + * + * Async sources and filter functions must be awaited: + * + * ```javascript + * import all from 'it-all' + * import filter from 'it-filter' + * + * const values = async function * () { + * yield * [0, 1, 2, 3, 4] + * } + * + * const fn = async val => (val, index) > 2 // Return boolean or promise of boolean to keep item + * + * const arr = await all(filter(values, fn)) + * + * console.info(arr) // 3, 4 + * ``` + */ + function isAsyncIterable$8(thing) { + return thing[Symbol.asyncIterator] != null; + } + function filter(source, fn) { + let index = 0; + if (isAsyncIterable$8(source)) { + return (async function* () { + for await (const entry of source) { + if (await fn(entry, index++)) { + yield entry; + } + } + })(); + } + // if mapping function returns a promise we have to return an async generator + const peekable$1 = peekable(source); + const { value, done } = peekable$1.next(); + if (done === true) { + return (function* () { }()); + } + const res = fn(value, index++); + // @ts-expect-error .then is not present on O + if (typeof res.then === 'function') { + return (async function* () { + if (await res) { + yield value; + } + for await (const entry of peekable$1) { + if (await fn(entry, index++)) { + yield entry; + } + } + })(); + } + const func = fn; + return (function* () { + if (res === true) { + yield value; + } + for (const entry of peekable$1) { + if (func(entry, index++)) { + yield entry; + } + } + })(); + } + + /** + * @packageDocumentation + * + * Consumes all values from an (async)iterable and returns them sorted by the passed sort function. + * + * @example + * + * ```javascript + * import sort from 'it-sort' + * import all from 'it-all' + * + * const sorter = (a, b) => { + * return a.localeCompare(b) + * } + * + * // This can also be an iterator, generator, etc + * const values = ['foo', 'bar'] + * + * const arr = all(sort(values, sorter)) + * + * console.info(arr) // 'bar', 'foo' + * ``` + * + * Async sources must be awaited: + * + * ```javascript + * import sort from 'it-sort' + * import all from 'it-all' + * + * const sorter = (a, b) => { + * return a.localeCompare(b) + * } + * + * const values = async function * () { + * yield * ['foo', 'bar'] + * } + * + * const arr = await all(sort(values, sorter)) + * + * console.info(arr) // 'bar', 'foo' + * ``` + */ + function isAsyncIterable$7(thing) { + return thing[Symbol.asyncIterator] != null; + } + function sort(source, sorter) { + if (isAsyncIterable$7(source)) { + return (async function* () { + const arr = await all(source); + yield* arr.sort(sorter); + })(); + } + return (function* () { + const arr = all(source); + yield* arr.sort(sorter); + })(); + } + + /** + * @packageDocumentation + * + * For when you only want a few values out of an (async)iterable. + * + * @example + * + * ```javascript + * import take from 'it-take' + * import all from 'it-all' + * + * // This can also be an iterator, generator, etc + * const values = [0, 1, 2, 3, 4] + * + * const arr = all(take(values, 2)) + * + * console.info(arr) // 0, 1 + * ``` + * + * Async sources must be awaited: + * + * ```javascript + * import take from 'it-take' + * import all from 'it-all' + * + * const values = async function * () { + * yield * [0, 1, 2, 3, 4] + * } + * + * const arr = await all(take(values(), 2)) + * + * console.info(arr) // 0, 1 + * ``` + */ + function isAsyncIterable$6(thing) { + return thing[Symbol.asyncIterator] != null; + } + function take(source, limit) { + if (isAsyncIterable$6(source)) { + return (async function* () { + let items = 0; + if (limit < 1) { + return; + } + for await (const entry of source) { + yield entry; + items++; + if (items === limit) { + return; + } + } + })(); + } + return (function* () { + let items = 0; + if (limit < 1) { + return; + } + for (const entry of source) { + yield entry; + items++; + if (items === limit) { + return; + } + } + })(); + } + + class BaseDatastore { + put(key, val, options) { + return Promise.reject(new Error('.put is not implemented')); + } + get(key, options) { + return Promise.reject(new Error('.get is not implemented')); + } + has(key, options) { + return Promise.reject(new Error('.has is not implemented')); + } + delete(key, options) { + return Promise.reject(new Error('.delete is not implemented')); + } + async *putMany(source, options = {}) { + for await (const { key, value } of source) { + await this.put(key, value, options); + yield key; + } + } + async *getMany(source, options = {}) { + for await (const key of source) { + yield { + key, + value: await this.get(key, options) + }; + } + } + async *deleteMany(source, options = {}) { + for await (const key of source) { + await this.delete(key, options); + yield key; + } + } + batch() { + let puts = []; + let dels = []; + return { + put(key, value) { + puts.push({ key, value }); + }, + delete(key) { + dels.push(key); + }, + commit: async (options) => { + await drain(this.putMany(puts, options)); + puts = []; + await drain(this.deleteMany(dels, options)); + dels = []; + } + }; + } + /** + * Extending classes should override `query` or implement this method + */ + // eslint-disable-next-line require-yield + async *_all(q, options) { + throw new Error('._all is not implemented'); + } + /** + * Extending classes should override `queryKeys` or implement this method + */ + // eslint-disable-next-line require-yield + async *_allKeys(q, options) { + throw new Error('._allKeys is not implemented'); + } + query(q, options) { + let it = this._all(q, options); + if (q.prefix != null) { + const prefix = q.prefix; + it = filter(it, (e) => e.key.toString().startsWith(prefix)); + } + if (Array.isArray(q.filters)) { + it = q.filters.reduce((it, f) => filter(it, f), it); + } + if (Array.isArray(q.orders)) { + it = q.orders.reduce((it, f) => sort(it, f), it); + } + if (q.offset != null) { + let i = 0; + const offset = q.offset; + it = filter(it, () => i++ >= offset); + } + if (q.limit != null) { + it = take(it, q.limit); + } + return it; + } + queryKeys(q, options) { + let it = this._allKeys(q, options); + if (q.prefix != null) { + const prefix = q.prefix; + it = filter(it, (key) => key.toString().startsWith(prefix)); + } + if (Array.isArray(q.filters)) { + it = q.filters.reduce((it, f) => filter(it, f), it); + } + if (Array.isArray(q.orders)) { + it = q.orders.reduce((it, f) => sort(it, f), it); + } + if (q.offset != null) { + const offset = q.offset; + let i = 0; + it = filter(it, () => i++ >= offset); + } + if (q.limit != null) { + it = take(it, q.limit); + } + return it; + } + } + + /** + * @typedef {{ [key: string]: any }} Extensions + * @typedef {Error} Err + * @property {string} message + */ + + /** + * + * @param {Error} obj + * @param {Extensions} props + * @returns {Error & Extensions} + */ + function assign(obj, props) { + for (const key in props) { + Object.defineProperty(obj, key, { + value: props[key], + enumerable: true, + configurable: true, + }); + } + + return obj; + } + + /** + * + * @param {any} err - An Error + * @param {string|Extensions} code - A string code or props to set on the error + * @param {Extensions} [props] - Props to set on the error + * @returns {Error & Extensions} + */ + function createError(err, code, props) { + if (!err || typeof err === 'string') { + throw new TypeError('Please pass an Error to err-code'); + } + + if (!props) { + props = {}; + } + + if (typeof code === 'object') { + props = code; + code = ''; + } + + if (code) { + props.code = code; + } + + try { + return assign(err, props); + } catch (_) { + props.message = err.message; + props.stack = err.stack; + + const ErrClass = function () {}; + + ErrClass.prototype = Object.create(Object.getPrototypeOf(err)); + + // @ts-ignore + const output = assign(new ErrClass(), props); + + return output; + } + } + + var errCode = createError; + + var errCode$1 = /*@__PURE__*/getDefaultExportFromCjs(errCode); + + function dbOpenFailedError(err) { + err = err ?? new Error('Cannot open database'); + return errCode$1(err, 'ERR_DB_OPEN_FAILED'); + } + function dbDeleteFailedError(err) { + err = err ?? new Error('Delete failed'); + return errCode$1(err, 'ERR_DB_DELETE_FAILED'); + } + function dbWriteFailedError(err) { + err = err ?? new Error('Write failed'); + return errCode$1(err, 'ERR_DB_WRITE_FAILED'); + } + function dbReadFailedError(err) { + err = err ?? new Error('Read failed'); + return errCode$1(err, 'ERR_DB_READ_FAILED'); + } + function notFoundError(err) { + err = err ?? new Error('Not Found'); + return errCode$1(err, 'ERR_NOT_FOUND'); + } + + class MemoryDatastore extends BaseDatastore { + data; + constructor() { + super(); + this.data = new Map(); + } + put(key, val) { + this.data.set(key.toString(), val); + return key; + } + get(key) { + const result = this.data.get(key.toString()); + if (result == null) { + throw notFoundError(); + } + return result; + } + has(key) { + return this.data.has(key.toString()); + } + delete(key) { + this.data.delete(key.toString()); + } + *_all() { + for (const [key, value] of this.data.entries()) { + yield { key: new Key(key), value }; + } + } + *_allKeys() { + for (const key of this.data.keys()) { + yield new Key(key); + } + } + } + + function debounce$1(func, wait) { + let timeout; + return function () { + const later = function () { + timeout = undefined; + func(); + }; + clearTimeout(timeout); + timeout = setTimeout(later, wait); + }; + } + + const defaultAddressFilter = (addrs) => addrs; + /** + * If the passed multiaddr contains the passed peer id, remove it + */ + function stripPeerId(ma, peerId) { + const observedPeerIdStr = ma.getPeerId(); + // strip our peer id if it has been passed + if (observedPeerIdStr != null) { + const observedPeerId = peerIdFromString(observedPeerIdStr); + // use same encoding for comparison + if (observedPeerId.equals(peerId)) { + ma = ma.decapsulate(multiaddr(`/p2p/${peerId.toString()}`)); + } + } + return ma; + } + class DefaultAddressManager { + log; + components; + // this is an array to allow for duplicates, e.g. multiples of `/ip4/0.0.0.0/tcp/0` + listen; + announce; + observed; + announceFilter; + /** + * Responsible for managing the peer addresses. + * Peers can specify their listen and announce addresses. + * The listen addresses will be used by the libp2p transports to listen for new connections, + * while the announce addresses will be used for the peer addresses' to other peers in the network. + */ + constructor(components, init = {}) { + const { listen = [], announce = [] } = init; + this.components = components; + this.log = components.logger.forComponent('libp2p:address-manager'); + this.listen = listen.map(ma => ma.toString()); + this.announce = new Set(announce.map(ma => ma.toString())); + this.observed = new Map(); + this.announceFilter = init.announceFilter ?? defaultAddressFilter; + // this method gets called repeatedly on startup when transports start listening so + // debounce it so we don't cause multiple self:peer:update events to be emitted + this._updatePeerStoreAddresses = debounce$1(this._updatePeerStoreAddresses.bind(this), 1000); + // update our stored addresses when new transports listen + components.events.addEventListener('transport:listening', () => { + this._updatePeerStoreAddresses(); + }); + // update our stored addresses when existing transports stop listening + components.events.addEventListener('transport:close', () => { + this._updatePeerStoreAddresses(); + }); + } + _updatePeerStoreAddresses() { + // if announce addresses have been configured, ensure they make it into our peer + // record for things like identify + const addrs = this.getAnnounceAddrs() + .concat(this.components.transportManager.getAddrs()) + .concat([...this.observed.entries()] + .filter(([_, metadata]) => metadata.confident) + .map(([str]) => multiaddr(str))).map(ma => { + // strip our peer id if it is present + if (ma.getPeerId() === this.components.peerId.toString()) { + return ma.decapsulate(`/p2p/${this.components.peerId.toString()}`); + } + return ma; + }); + this.components.peerStore.patch(this.components.peerId, { + multiaddrs: addrs + }) + .catch(err => { this.log.error('error updating addresses', err); }); + } + /** + * Get peer listen multiaddrs + */ + getListenAddrs() { + return Array.from(this.listen).map((a) => multiaddr(a)); + } + /** + * Get peer announcing multiaddrs + */ + getAnnounceAddrs() { + return Array.from(this.announce).map((a) => multiaddr(a)); + } + /** + * Get observed multiaddrs + */ + getObservedAddrs() { + return Array.from(this.observed).map(([a]) => multiaddr(a)); + } + /** + * Add peer observed addresses + */ + addObservedAddr(addr) { + addr = stripPeerId(addr, this.components.peerId); + const addrString = addr.toString(); + // do not trigger the change:addresses event if we already know about this address + if (this.observed.has(addrString)) { + return; + } + this.observed.set(addrString, { + confident: false + }); + } + confirmObservedAddr(addr) { + addr = stripPeerId(addr, this.components.peerId); + const addrString = addr.toString(); + const metadata = this.observed.get(addrString) ?? { + confident: false + }; + const startingConfidence = metadata.confident; + this.observed.set(addrString, { + confident: true + }); + // only trigger the 'self:peer:update' event if our confidence in an address has changed + if (!startingConfidence) { + this._updatePeerStoreAddresses(); + } + } + removeObservedAddr(addr) { + addr = stripPeerId(addr, this.components.peerId); + const addrString = addr.toString(); + this.observed.delete(addrString); + } + getAddresses() { + let addrs = this.getAnnounceAddrs().map(ma => ma.toString()); + if (addrs.length === 0) { + // no configured announce addrs, add configured listen addresses + addrs = this.components.transportManager.getAddrs().map(ma => ma.toString()); + } + // add observed addresses we are confident in + addrs = addrs.concat(Array.from(this.observed) + .filter(([ma, metadata]) => metadata.confident) + .map(([ma]) => ma)); + // dedupe multiaddrs + const addrSet = new Set(addrs); + // Create advertising list + return this.announceFilter(Array.from(addrSet) + .map(str => multiaddr(str))) + .map(ma => { + // do not append our peer id to a path multiaddr as it will become invalid + if (ma.protos().pop()?.path === true) { + return ma; + } + if (ma.getPeerId() === this.components.peerId.toString()) { + return ma; + } + return ma.encapsulate(`/p2p/${this.components.peerId.toString()}`); + }); + } + } + + class DefaultComponents { + components = {}; + _started = false; + constructor(init = {}) { + this.components = {}; + for (const [key, value] of Object.entries(init)) { + this.components[key] = value; + } + if (this.components.logger == null) { + this.components.logger = defaultLogger(); + } + } + isStarted() { + return this._started; + } + async _invokeStartableMethod(methodName) { + await Promise.all(Object.values(this.components) + .filter(obj => isStartable(obj)) + .map(async (startable) => { + await startable[methodName]?.(); + })); + } + async beforeStart() { + await this._invokeStartableMethod('beforeStart'); + } + async start() { + await this._invokeStartableMethod('start'); + this._started = true; + } + async afterStart() { + await this._invokeStartableMethod('afterStart'); + } + async beforeStop() { + await this._invokeStartableMethod('beforeStop'); + } + async stop() { + await this._invokeStartableMethod('stop'); + this._started = false; + } + async afterStop() { + await this._invokeStartableMethod('afterStop'); + } + } + const OPTIONAL_SERVICES = [ + 'metrics', + 'connectionProtector', + 'dns' + ]; + const NON_SERVICE_PROPERTIES = [ + 'components', + 'isStarted', + 'beforeStart', + 'start', + 'afterStart', + 'beforeStop', + 'stop', + 'afterStop', + 'then', + '_invokeStartableMethod' + ]; + function defaultComponents(init = {}) { + const components = new DefaultComponents(init); + const proxy = new Proxy(components, { + get(target, prop, receiver) { + if (typeof prop === 'string' && !NON_SERVICE_PROPERTIES.includes(prop)) { + const service = components.components[prop]; + if (service == null && !OPTIONAL_SERVICES.includes(prop)) { + throw new CodeError$2(`${prop} not set`, 'ERR_SERVICE_MISSING'); + } + return service; + } + return Reflect.get(target, prop, receiver); + }, + set(target, prop, value) { + if (typeof prop === 'string') { + components.components[prop] = value; + } + else { + Reflect.set(target, prop, value); + } + return true; + } + }); + // @ts-expect-error component keys are proxied + return proxy; + } + + var Netmask_1; + // Generated by CoffeeScript 1.12.7 + (function() { + var Netmask, atob, chr, chr0, chrA, chra, ip2long, long2ip; + + long2ip = function(long) { + var a, b, c, d; + a = (long & (0xff << 24)) >>> 24; + b = (long & (0xff << 16)) >>> 16; + c = (long & (0xff << 8)) >>> 8; + d = long & 0xff; + return [a, b, c, d].join('.'); + }; + + ip2long = function(ip) { + var b, c, i, j, n, ref; + b = []; + for (i = j = 0; j <= 3; i = ++j) { + if (ip.length === 0) { + break; + } + if (i > 0) { + if (ip[0] !== '.') { + throw new Error('Invalid IP'); + } + ip = ip.substring(1); + } + ref = atob(ip), n = ref[0], c = ref[1]; + ip = ip.substring(c); + b.push(n); + } + if (ip.length !== 0) { + throw new Error('Invalid IP'); + } + switch (b.length) { + case 1: + if (b[0] > 0xFFFFFFFF) { + throw new Error('Invalid IP'); + } + return b[0] >>> 0; + case 2: + if (b[0] > 0xFF || b[1] > 0xFFFFFF) { + throw new Error('Invalid IP'); + } + return (b[0] << 24 | b[1]) >>> 0; + case 3: + if (b[0] > 0xFF || b[1] > 0xFF || b[2] > 0xFFFF) { + throw new Error('Invalid IP'); + } + return (b[0] << 24 | b[1] << 16 | b[2]) >>> 0; + case 4: + if (b[0] > 0xFF || b[1] > 0xFF || b[2] > 0xFF || b[3] > 0xFF) { + throw new Error('Invalid IP'); + } + return (b[0] << 24 | b[1] << 16 | b[2] << 8 | b[3]) >>> 0; + default: + throw new Error('Invalid IP'); + } + }; + + chr = function(b) { + return b.charCodeAt(0); + }; + + chr0 = chr('0'); + + chra = chr('a'); + + chrA = chr('A'); + + atob = function(s) { + var base, dmax, i, n, start; + n = 0; + base = 10; + dmax = '9'; + i = 0; + if (s.length > 1 && s[i] === '0') { + if (s[i + 1] === 'x' || s[i + 1] === 'X') { + i += 2; + base = 16; + } else if ('0' <= s[i + 1] && s[i + 1] <= '9') { + i++; + base = 8; + dmax = '7'; + } + } + start = i; + while (i < s.length) { + if ('0' <= s[i] && s[i] <= dmax) { + n = (n * base + (chr(s[i]) - chr0)) >>> 0; + } else if (base === 16) { + if ('a' <= s[i] && s[i] <= 'f') { + n = (n * base + (10 + chr(s[i]) - chra)) >>> 0; + } else if ('A' <= s[i] && s[i] <= 'F') { + n = (n * base + (10 + chr(s[i]) - chrA)) >>> 0; + } else { + break; + } + } else { + break; + } + if (n > 0xFFFFFFFF) { + throw new Error('too large'); + } + i++; + } + if (i === start) { + throw new Error('empty octet'); + } + return [n, i]; + }; + + Netmask = (function() { + function Netmask(net, mask) { + var i, j, ref; + if (typeof net !== 'string') { + throw new Error("Missing `net' parameter"); + } + if (!mask) { + ref = net.split('/', 2), net = ref[0], mask = ref[1]; + } + if (!mask) { + mask = 32; + } + if (typeof mask === 'string' && mask.indexOf('.') > -1) { + try { + this.maskLong = ip2long(mask); + } catch (error1) { + throw new Error("Invalid mask: " + mask); + } + for (i = j = 32; j >= 0; i = --j) { + if (this.maskLong === (0xffffffff << (32 - i)) >>> 0) { + this.bitmask = i; + break; + } + } + } else if (mask || mask === 0) { + this.bitmask = parseInt(mask, 10); + this.maskLong = 0; + if (this.bitmask > 0) { + this.maskLong = (0xffffffff << (32 - this.bitmask)) >>> 0; + } + } else { + throw new Error("Invalid mask: empty"); + } + try { + this.netLong = (ip2long(net) & this.maskLong) >>> 0; + } catch (error1) { + throw new Error("Invalid net address: " + net); + } + if (!(this.bitmask <= 32)) { + throw new Error("Invalid mask for ip4: " + mask); + } + this.size = Math.pow(2, 32 - this.bitmask); + this.base = long2ip(this.netLong); + this.mask = long2ip(this.maskLong); + this.hostmask = long2ip(~this.maskLong); + this.first = this.bitmask <= 30 ? long2ip(this.netLong + 1) : this.base; + this.last = this.bitmask <= 30 ? long2ip(this.netLong + this.size - 2) : long2ip(this.netLong + this.size - 1); + this.broadcast = this.bitmask <= 30 ? long2ip(this.netLong + this.size - 1) : void 0; + } + + Netmask.prototype.contains = function(ip) { + if (typeof ip === 'string' && (ip.indexOf('/') > 0 || ip.split('.').length !== 4)) { + ip = new Netmask(ip); + } + if (ip instanceof Netmask) { + return this.contains(ip.base) && this.contains(ip.broadcast || ip.last); + } else { + return (ip2long(ip) & this.maskLong) >>> 0 === (this.netLong & this.maskLong) >>> 0; + } + }; + + Netmask.prototype.next = function(count) { + if (count == null) { + count = 1; + } + return new Netmask(long2ip(this.netLong + (this.size * count)), this.mask); + }; + + Netmask.prototype.forEach = function(fn) { + var index, lastLong, long; + long = ip2long(this.first); + lastLong = ip2long(this.last); + index = 0; + while (long <= lastLong) { + fn(long2ip(long), long, index); + index++; + long++; + } + }; + + Netmask.prototype.toString = function() { + return this.base + "/" + this.bitmask; + }; + + return Netmask; + + })(); + + Netmask_1 = Netmask; + + }).call(commonjsGlobal); + + const PRIVATE_IP_RANGES = [ + '0.0.0.0/8', + '10.0.0.0/8', + '100.64.0.0/10', + '127.0.0.0/8', + '169.254.0.0/16', + '172.16.0.0/12', + '192.0.0.0/24', + '192.0.0.0/29', + '192.0.0.8/32', + '192.0.0.9/32', + '192.0.0.10/32', + '192.0.0.170/32', + '192.0.0.171/32', + '192.0.2.0/24', + '192.31.196.0/24', + '192.52.193.0/24', + '192.88.99.0/24', + '192.168.0.0/16', + '192.175.48.0/24', + '198.18.0.0/15', + '198.51.100.0/24', + '203.0.113.0/24', + '240.0.0.0/4', + '255.255.255.255/32' + ]; + const NETMASK_RANGES = PRIVATE_IP_RANGES.map(ipRange => new Netmask_1(ipRange)); + function ipv4Check(ipAddr) { + for (const r of NETMASK_RANGES) { + if (r.contains(ipAddr)) + return true; + } + return false; + } + function ipv6Check(ipAddr) { + return /^::$/.test(ipAddr) || + /^::1$/.test(ipAddr) || + /^::f{4}:([0-9]{1,3})\.([0-9]{1,3})\.([0-9]{1,3})\.([0-9]{1,3})$/.test(ipAddr) || + /^::f{4}:0.([0-9]{1,3})\.([0-9]{1,3})\.([0-9]{1,3})\.([0-9]{1,3})$/.test(ipAddr) || + /^64:ff9b::([0-9]{1,3})\.([0-9]{1,3})\.([0-9]{1,3})\.([0-9]{1,3})$/.test(ipAddr) || + /^100::([0-9a-fA-F]{0,4}):?([0-9a-fA-F]{0,4}):?([0-9a-fA-F]{0,4}):?([0-9a-fA-F]{0,4})$/.test(ipAddr) || + /^2001::([0-9a-fA-F]{0,4}):?([0-9a-fA-F]{0,4}):?([0-9a-fA-F]{0,4}):?([0-9a-fA-F]{0,4}):?([0-9a-fA-F]{0,4}):?([0-9a-fA-F]{0,4})$/.test(ipAddr) || + /^2001:2[0-9a-fA-F]:([0-9a-fA-F]{0,4}):?([0-9a-fA-F]{0,4}):?([0-9a-fA-F]{0,4}):?([0-9a-fA-F]{0,4}):?([0-9a-fA-F]{0,4}):?([0-9a-fA-F]{0,4})$/.test(ipAddr) || + /^2001:db8:([0-9a-fA-F]{0,4}):?([0-9a-fA-F]{0,4}):?([0-9a-fA-F]{0,4}):?([0-9a-fA-F]{0,4}):?([0-9a-fA-F]{0,4}):?([0-9a-fA-F]{0,4})$/.test(ipAddr) || + /^2002:([0-9a-fA-F]{0,4}):?([0-9a-fA-F]{0,4}):?([0-9a-fA-F]{0,4}):?([0-9a-fA-F]{0,4}):?([0-9a-fA-F]{0,4}):?([0-9a-fA-F]{0,4}):?([0-9a-fA-F]{0,4})$/.test(ipAddr) || + /^f[c-d]([0-9a-fA-F]{2,2}):/i.test(ipAddr) || + /^fe[8-9a-bA-B][0-9a-fA-F]:/i.test(ipAddr) || + /^ff([0-9a-fA-F]{2,2}):/i.test(ipAddr); + } + function isPrivateIp(ip) { + if (isIPv4(ip)) + return ipv4Check(ip); + else if (isIPv6(ip)) + return ipv6Check(ip); + else + return undefined; + } + + /** + * Returns a connection gater that disallows dialling private addresses by + * default. Browsers are severely limited in their resource usage so don't + * waste time trying to dial undiallable addresses. + */ + function connectionGater(gater = {}) { + return { + denyDialPeer: async () => false, + denyDialMultiaddr: async (multiaddr) => { + const tuples = multiaddr.stringTuples(); + if (tuples[0][0] === 4 || tuples[0][0] === 41) { + return Boolean(isPrivateIp(`${tuples[0][1]}`)); + } + return false; + }, + denyInboundConnection: async () => false, + denyOutboundConnection: async () => false, + denyInboundEncryptedConnection: async () => false, + denyOutboundEncryptedConnection: async () => false, + denyInboundUpgradedConnection: async () => false, + denyOutboundUpgradedConnection: async () => false, + filterMultiaddrForPeer: async () => true, + ...gater + }; + } + + /** + * @packageDocumentation + * + * This module exports various matchers that can be used to infer the type of a + * passed multiaddr. + * + * @example + * + * ```ts + * import { multiaddr } from '@multiformats/multiaddr' + * import { DNS } from '@multiformats/multiaddr-matcher' + * + * const ma = multiaddr('/dnsaddr/example.org') + * + * DNS.matches(ma) // true - this is a multiaddr with a DNS address at the start + * ``` + * + * @example + * + * The default matching behaviour ignores any subsequent tuples in the multiaddr. + * If you want stricter matching you can use `.exactMatch`: + * + * ```ts + * import { multiaddr } from '@multiformats/multiaddr' + * import { DNS, Circuit } from '@multiformats/multiaddr-matcher' + * + * const ma = multiaddr('/dnsaddr/example.org/p2p/QmFoo/p2p-circuit/p2p/QmBar') + * + * DNS.exactMatch(ma) // false - this address has extra tuples after the DNS component + * Circuit.matches(ma) // true + * Circuit.exactMatch(ma) // true - the extra tuples are circuit relay related + * ``` + */ + /** + * Split a multiaddr into path components + */ + const toParts = (ma) => { + return ma.toString().split('/').slice(1); + }; + const func = (fn) => { + return { + match: (vals) => { + if (vals.length < 1) { + return false; + } + if (fn(vals[0])) { + return vals.slice(1); + } + return false; + }, + pattern: 'fn' + }; + }; + const literal = (str) => { + return { + match: (vals) => func((val) => val === str).match(vals), + pattern: str + }; + }; + const string = () => { + return { + match: (vals) => func((val) => typeof val === 'string').match(vals), + pattern: '{string}' + }; + }; + const number$1 = () => { + return { + match: (vals) => func((val) => !isNaN(parseInt(val))).match(vals), + pattern: '{number}' + }; + }; + const peerId = () => { + return { + match: (vals) => { + if (vals.length < 2) { + return false; + } + if (vals[0] !== 'p2p' && vals[0] !== 'ipfs') { + return false; + } + // Q is RSA, 1 is Ed25519 or Secp256k1 + if (vals[1].startsWith('Q') || vals[1].startsWith('1')) { + try { + base58btc.decode(`z${vals[1]}`); + } + catch (err) { + return false; + } + } + else { + return false; + } + return vals.slice(2); + }, + pattern: '/p2p/{peerid}' + }; + }; + const certhash = () => { + return { + match: (vals) => { + if (vals.length < 2) { + return false; + } + if (vals[0] !== 'certhash') { + return false; + } + try { + base64url.decode(vals[1]); + } + catch { + return false; + } + return vals.slice(2); + }, + pattern: '/certhash/{certhash}' + }; + }; + const optional = (matcher) => { + return { + match: (vals) => { + const result = matcher.match(vals); + if (result === false) { + return vals; + } + return result; + }, + pattern: `optional(${matcher.pattern})` + }; + }; + const or$1 = (...matchers) => { + return { + match: (vals) => { + let matches; + for (const matcher of matchers) { + const result = matcher.match(vals); + // no match + if (result === false) { + continue; + } + // choose greediest matcher + if (matches == null || result.length < matches.length) { + matches = result; + } + } + if (matches == null) { + return false; + } + return matches; + }, + pattern: `or(${matchers.map(m => m.pattern).join(', ')})` + }; + }; + const and$1 = (...matchers) => { + return { + match: (vals) => { + for (const matcher of matchers) { + // pass what's left of the array + const result = matcher.match(vals); + // no match + if (result === false) { + return false; + } + vals = result; + } + return vals; + }, + pattern: `and(${matchers.map(m => m.pattern).join(', ')})` + }; + }; + function fmt(...matchers) { + function match(ma) { + let parts = toParts(ma); + for (const matcher of matchers) { + const result = matcher.match(parts); + if (result === false) { + return false; + } + parts = result; + } + return parts; + } + function matches(ma) { + const result = match(ma); + return result !== false; + } + function exactMatch(ma) { + const result = match(ma); + if (result === false) { + return false; + } + return result.length === 0; + } + return { + matches, + exactMatch + }; + } + /** + * DNS matchers + */ + const _DNS4 = and$1(literal('dns4'), string()); + const _DNS6 = and$1(literal('dns6'), string()); + const _DNSADDR = and$1(literal('dnsaddr'), string()); + const _DNS = and$1(literal('dns'), string()); + /** + * Matches any dns address. + * + * @example + * + * ```ts + * import { multiaddr } from '@multiformats/multiaddr' + * import { DNS } from '@multiformats/multiaddr-matcher' + * + * DNS.matches(multiaddr('/dnsaddr/example.org')) // true + * DNS.matches(multiaddr('/dns4/example.org')) // true + * DNS.matches(multiaddr('/dns6/example.org')) // true + * ``` + */ + fmt(or$1(_DNS, _DNSADDR, _DNS4, _DNS6)); + const _IP4 = and$1(literal('ip4'), func(isIPv4)); + const _IP6 = and$1(literal('ip6'), func(isIPv6)); + const _IP = or$1(_IP4, _IP6); + const _IP_OR_DOMAIN = or$1(_IP, _DNS, _DNS4, _DNS6, _DNSADDR); + /** + * A matcher for addresses that start with IP or DNS tuples. + * + * @example + * + * ```ts + * import { multiaddr } from '@multiformats/multiaddr' + * import { IP_OR_DOMAIN } from '@multiformats/multiaddr-matcher' + * + * IP_OR_DOMAIN.matches(multiaddr('/ip4/123.123.123.123/p2p/QmFoo')) // true + * IP_OR_DOMAIN.matches(multiaddr('/dns/example.com/p2p/QmFoo')) // true + * IP_OR_DOMAIN.matches(multiaddr('/p2p/QmFoo')) // false + * ``` + */ + const IP_OR_DOMAIN = fmt(_IP_OR_DOMAIN); + const _TCP = and$1(_IP_OR_DOMAIN, literal('tcp'), number$1()); + const _UDP = and$1(_IP_OR_DOMAIN, literal('udp'), number$1()); + const TCP_OR_UDP = or$1(_TCP, _UDP); + const _QUIC = and$1(_UDP, literal('quic')); + const _QUICV1 = and$1(_UDP, literal('quic-v1')); + const QUIC_V0_OR_V1 = or$1(_QUIC, _QUICV1); + const _WEB = or$1(_IP_OR_DOMAIN, _TCP, _UDP, _QUIC, _QUICV1); + const _WebSockets$1 = or$1(and$1(_WEB, literal('ws'), optional(peerId()))); + const _WebSocketsSecure$1 = or$1(and$1(_WEB, literal('wss'), optional(peerId())), and$1(_WEB, literal('tls'), literal('ws'), optional(peerId()))); + const _WebRTCDirect$1 = and$1(TCP_OR_UDP, literal('webrtc-direct'), certhash(), optional(certhash()), optional(peerId())); + const _WebTransport$1 = and$1(_QUICV1, literal('webtransport'), optional(certhash()), optional(certhash()), optional(peerId())); + /** + * Matches WebTransport addresses. + * + * @example + * + * ```ts + * import { multiaddr } from '@multiformats/multiaddr' + * import { WebRTCDirect } from '@multiformats/multiaddr-matcher' + * + * WebRTCDirect.matches(multiaddr('/ip4/123.123.123.123/udp/1234/quic-v1/webtransport/certhash/u..../certhash/u..../p2p/QmFoo')) // true + * ``` + */ + const WebTransport$3 = fmt(_WebTransport$1); + const _P2P$1 = or$1(_WebSockets$1, _WebSocketsSecure$1, and$1(_TCP, optional(peerId())), and$1(QUIC_V0_OR_V1, optional(peerId())), and$1(_IP_OR_DOMAIN, optional(peerId())), _WebRTCDirect$1, _WebTransport$1, peerId()); + const _Circuit$1 = and$1(_P2P$1, literal('p2p-circuit'), peerId()); + /** + * Matches circuit relay addresses + * + * @example + * + * ```ts + * import { multiaddr } from '@multiformats/multiaddr' + * import { Circuit } from '@multiformats/multiaddr-matcher' + * + * Circuit.matches(multiaddr('/ip4/123.123.123.123/tcp/1234/p2p/QmRelay/p2p-circuit/p2p/QmTarget')) // true + * ``` + */ + const Circuit$1 = fmt(_Circuit$1); + or$1(and$1(_P2P$1, literal('p2p-circuit'), literal('webrtc'), peerId()), and$1(_P2P$1, literal('webrtc'), optional(peerId())), literal('webrtc')); + or$1(and$1(_IP_OR_DOMAIN, literal('tcp'), number$1(), literal('http'), optional(peerId())), and$1(_IP_OR_DOMAIN, literal('http'), optional(peerId()))); + or$1(and$1(_IP_OR_DOMAIN, literal('tcp'), or$1(and$1(literal('443'), literal('http')), and$1(number$1(), literal('https'))), optional(peerId())), and$1(_IP_OR_DOMAIN, literal('tls'), literal('http'), optional(peerId())), and$1(_IP_OR_DOMAIN, literal('https'), optional(peerId()))); + + /** + * Check if a given multiaddr has a private address. + */ + function isPrivate(ma) { + try { + const { address } = ma.nodeAddress(); + return Boolean(isPrivateIp(address)); + } + catch { + return true; + } + } + + /** + * @packageDocumentation + * + * Provides strategies to sort a list of multiaddrs. + * + * @example + * + * ```typescript + * import { publicAddressesFirst } from '@libp2p/utils/address-sort' + * import { multiaddr } from '@multformats/multiaddr' + * + * + * const addresses = [ + * multiaddr('/ip4/127.0.0.1/tcp/9000'), + * multiaddr('/ip4/82.41.53.1/tcp/9000') + * ].sort(publicAddressesFirst) + * + * console.info(addresses) + * // ['/ip4/82.41.53.1/tcp/9000', '/ip4/127.0.0.1/tcp/9000'] + * ``` + */ + /** + * Compare function for array.sort() that moves public addresses to the start + * of the array. + */ + function publicAddressesFirst(a, b) { + const isAPrivate = isPrivate(a.multiaddr); + const isBPrivate = isPrivate(b.multiaddr); + if (isAPrivate && !isBPrivate) { + return 1; + } + else if (!isAPrivate && isBPrivate) { + return -1; + } + return 0; + } + /** + * Compare function for array.sort() that moves certified addresses to the start + * of the array. + */ + function certifiedAddressesFirst(a, b) { + if (a.isCertified && !b.isCertified) { + return -1; + } + else if (!a.isCertified && b.isCertified) { + return 1; + } + return 0; + } + /** + * Compare function for array.sort() that moves circuit relay addresses to the + * start of the array. + */ + function circuitRelayAddressesLast(a, b) { + const isACircuit = Circuit$1.exactMatch(a.multiaddr); + const isBCircuit = Circuit$1.exactMatch(b.multiaddr); + if (isACircuit && !isBCircuit) { + return 1; + } + else if (!isACircuit && isBCircuit) { + return -1; + } + return 0; + } + function defaultAddressSort(a, b) { + const publicResult = publicAddressesFirst(a, b); + if (publicResult !== 0) { + return publicResult; + } + const relayResult = circuitRelayAddressesLast(a, b); + if (relayResult !== 0) { + return relayResult; + } + const certifiedResult = certifiedAddressesFirst(a, b); + return certifiedResult; + } + + /** + * An implementation of the ProgressEvent interface, this is essentially + * a typed `CustomEvent` with a `type` property that lets us disambiguate + * events passed to `progress` callbacks. + */ + class CustomProgressEvent extends Event { + constructor(type, detail) { + super(type); + this.detail = detail; + } + } + + function getTypes(types) { + const DEFAULT_TYPES = [ + RecordType.A + ]; + if (types == null) { + return DEFAULT_TYPES; + } + if (Array.isArray(types)) { + if (types.length === 0) { + return DEFAULT_TYPES; + } + return types; + } + return [ + types + ]; + } + + /** + * This TTL will be used if the remote service does not return one + */ + const DEFAULT_TTL = 60; + function toDNSResponse(obj) { + return { + Status: obj.Status ?? 0, + TC: obj.TC ?? obj.flag_tc ?? false, + RD: obj.RD ?? obj.flag_rd ?? false, + RA: obj.RA ?? obj.flag_ra ?? false, + AD: obj.AD ?? obj.flag_ad ?? false, + CD: obj.CD ?? obj.flag_cd ?? false, + Question: (obj.Question ?? obj.questions ?? []).map((question) => { + return { + name: question.name, + type: RecordType[question.type] + }; + }), + Answer: (obj.Answer ?? obj.answers ?? []).map((answer) => { + return { + name: answer.name, + type: RecordType[answer.type], + TTL: (answer.TTL ?? answer.ttl ?? DEFAULT_TTL), + data: answer.data instanceof Uint8Array ? toString$1(answer.data) : answer.data + }; + }) + }; + } + + /* eslint-env browser */ + /** + * Browsers limit concurrent connections per host (~6), we don't want to exhaust + * the limit so this value controls how many DNS queries can be in flight at + * once. + */ + const DEFAULT_QUERY_CONCURRENCY = 4; + /** + * Uses the RFC 8427 'application/dns-json' content-type to resolve DNS queries. + * + * Supports and server that uses the same schema as Google's DNS over HTTPS + * resolver. + * + * This resolver needs fewer dependencies than the regular DNS-over-HTTPS + * resolver so can result in a smaller bundle size and consequently is preferred + * for browser use. + * + * @see https://developers.cloudflare.com/1.1.1.1/encryption/dns-over-https/make-api-requests/dns-json/ + * @see https://github.com/curl/curl/wiki/DNS-over-HTTPS#publicly-available-servers + * @see https://dnsprivacy.org/public_resolvers/ + * @see https://datatracker.ietf.org/doc/html/rfc8427 + */ + function dnsJsonOverHttps(url, init = {}) { + const httpQueue = new PQueue({ + concurrency: init.queryConcurrency ?? DEFAULT_QUERY_CONCURRENCY + }); + return async (fqdn, options = {}) => { + const searchParams = new URLSearchParams(); + searchParams.set('name', fqdn); + getTypes(options.types).forEach(type => { + // We pass record type as a string to the server because cloudflare DNS bug. see https://github.com/ipfs/helia/issues/474 + searchParams.append('type', RecordType[type]); + }); + options.onProgress?.(new CustomProgressEvent('dns:query', { detail: fqdn })); + // query DNS-JSON over HTTPS server + const response = await httpQueue.add(async () => { + const res = await fetch(`${url}?${searchParams}`, { + headers: { + accept: 'application/dns-json' + }, + signal: options?.signal + }); + if (res.status !== 200) { + throw new Error(`Unexpected HTTP status: ${res.status} - ${res.statusText}`); + } + const response = toDNSResponse(await res.json()); + options.onProgress?.(new CustomProgressEvent('dns:response', { detail: response })); + return response; + }, { + signal: options.signal + }); + if (response == null) { + throw new Error('No DNS response received'); + } + return response; + }; + } + + function defaultResolver() { + return [ + dnsJsonOverHttps('https://cloudflare-dns.com/dns-query'), + dnsJsonOverHttps('https://dns.google/resolve') + ]; + } + + var hashlru = function (max) { + + if (!max) throw Error('hashlru must have a max value, of type number, greater than 0') + + var size = 0, cache = Object.create(null), _cache = Object.create(null); + + function update (key, value) { + cache[key] = value; + size ++; + if(size >= max) { + size = 0; + _cache = cache; + cache = Object.create(null); + } + } + + return { + has: function (key) { + return cache[key] !== undefined || _cache[key] !== undefined + }, + remove: function (key) { + if(cache[key] !== undefined) + cache[key] = undefined; + if(_cache[key] !== undefined) + _cache[key] = undefined; + }, + get: function (key) { + var v = cache[key]; + if(v !== undefined) return v + if((v = _cache[key]) !== undefined) { + update(key, v); + return v + } + }, + set: function (key, value) { + if(cache[key] !== undefined) cache[key] = value; + else update(key, value); + }, + clear: function () { + cache = Object.create(null); + _cache = Object.create(null); + } + } + }; + + var cache$1 = /*@__PURE__*/getDefaultExportFromCjs(hashlru); + + /** + * Time Aware Least Recent Used Cache + * + * @see https://arxiv.org/pdf/1801.00390 + */ + class CachedAnswers { + lru; + constructor(maxSize) { + this.lru = cache$1(maxSize); + } + get(fqdn, types) { + let foundAllAnswers = true; + const answers = []; + for (const type of types) { + const cached = this.getAnswers(fqdn, type); + if (cached.length === 0) { + foundAllAnswers = false; + break; + } + answers.push(...cached); + } + if (foundAllAnswers) { + return toDNSResponse({ answers }); + } + } + getAnswers(domain, type) { + const key = `${domain.toLowerCase()}-${type}`; + const answers = this.lru.get(key); + if (answers != null) { + const cachedAnswers = answers + .filter((entry) => { + return entry.expires > Date.now(); + }) + .map(({ expires, value }) => ({ + ...value, + TTL: Math.round((expires - Date.now()) / 1000), + type: RecordType[value.type] + })); + if (cachedAnswers.length === 0) { + this.lru.remove(key); + } + // @ts-expect-error hashlru stringifies stored types which turns enums + // into strings, we convert back into enums above but tsc doesn't know + return cachedAnswers; + } + return []; + } + add(domain, answer) { + const key = `${domain.toLowerCase()}-${answer.type}`; + const answers = this.lru.get(key) ?? []; + answers.push({ + expires: Date.now() + ((answer.TTL ?? DEFAULT_TTL) * 1000), + value: answer + }); + this.lru.set(key, answers); + } + remove(domain, type) { + const key = `${domain.toLowerCase()}-${type}`; + this.lru.remove(key); + } + clear() { + this.lru.clear(); + } + } + /** + * Avoid sending multiple queries for the same hostname by caching results + */ + function cache(size) { + return new CachedAnswers(size); + } + + const DEFAULT_ANSWER_CACHE_SIZE = 1000; + let DNS$1 = class DNS { + resolvers; + cache; + constructor(init) { + this.resolvers = {}; + this.cache = cache(init.cacheSize ?? DEFAULT_ANSWER_CACHE_SIZE); + Object.entries(init.resolvers ?? {}).forEach(([tld, resolver]) => { + if (!Array.isArray(resolver)) { + resolver = [resolver]; + } + // convert `com` -> `com.` + if (!tld.endsWith('.')) { + tld = `${tld}.`; + } + this.resolvers[tld] = resolver; + }); + // configure default resolver if none specified + if (this.resolvers['.'] == null) { + this.resolvers['.'] = defaultResolver(); + } + } + /** + * Queries DNS resolvers for the passed record types for the passed domain. + * + * If cached records exist for all desired types they will be returned + * instead. + * + * Any new responses will be added to the cache for subsequent requests. + */ + async query(domain, options = {}) { + const types = getTypes(options.types); + const cached = options.cached !== false ? this.cache.get(domain, types) : undefined; + if (cached != null) { + options.onProgress?.(new CustomProgressEvent('dns:cache', { detail: cached })); + return cached; + } + const tld = `${domain.split('.').pop()}.`; + const resolvers = (this.resolvers[tld] ?? this.resolvers['.']).sort(() => { + return (Math.random() > 0.5) ? -1 : 1; + }); + const errors = []; + for (const resolver of resolvers) { + // skip further resolutions if the user aborted the signal + if (options.signal?.aborted === true) { + break; + } + try { + const result = await resolver(domain, { + ...options, + types + }); + for (const answer of result.Answer) { + this.cache.add(domain, answer); + } + return result; + } + catch (err) { + errors.push(err); + options.onProgress?.(new CustomProgressEvent('dns:error', { detail: err })); + } + } + if (errors.length === 1) { + throw errors[0]; + } + throw new AggregateError(errors, `DNS lookup of ${domain} ${types} failed`); + } + }; + + /** + * @packageDocumentation + * + * Query DNS records using `node:dns`, DNS over HTTP and/or DNSJSON over HTTP. + * + * A list of publicly accessible servers can be found [here](https://github.com/curl/curl/wiki/DNS-over-HTTPS#publicly-available-servers). + * + * @example Using the default resolver + * + * ```TypeScript + * import { dns } from '@multiformats/dns' + * + * const resolver = dns() + * + * // resolve A records with a 5s timeout + * const result = await dns.query('google.com', { + * signal: AbortSignal.timeout(5000) + * }) + * ``` + * + * @example Using per-TLD resolvers + * + * ```TypeScript + * import { dns } from '@multiformats/dns' + * import { dnsJsonOverHttps } from '@multiformats/dns/resolvers' + * + * const resolver = dns({ + * resolvers: { + * // will only be used to resolve `.com` addresses + * 'com.': dnsJsonOverHttps('https://cloudflare-dns.com/dns-query'), + * + * // this can also be an array, resolvers will be shuffled and tried in + * // series + * 'net.': [ + * dnsJsonOverHttps('https://dns.google/resolve'), + * dnsJsonOverHttps('https://dns.pub/dns-query') + * ], + * + * // will only be used to resolve all other addresses + * '.': dnsJsonOverHttps('https://dnsforge.de/dns-query'), + * } + * }) + * ``` + * + * @example Query for specific record types + * + * ```TypeScript + * import { dns, RecordType } from '@multiformats/dns' + * + * const resolver = dns() + * + * // resolve only TXT records + * const result = await dns.query('google.com', { + * types: [ + * RecordType.TXT + * ] + * }) + * ``` + * + * ## Caching + * + * Individual Aanswers are cached so. If you make a request, for which all + * record types are cached, all values will be pulled from the cache. + * + * If any of the record types are not cached, a new request will be resolved as + * if none of the records were cached, and the cache will be updated to include + * the new results. + * + * @example Ignoring the cache + * + * ```TypeScript + * import { dns, RecordType } from '@multiformats/dns' + * + * const resolver = dns() + * + * // do not used cached results, always resolve a new query + * const result = await dns.query('google.com', { + * cached: false + * }) + * ``` + */ + /** + * A subset of DNS Record Types + * + * @see https://www.iana.org/assignments/dns-parameters/dns-parameters.xhtml#dns-parameters-4. + */ + var RecordType; + (function (RecordType) { + RecordType[RecordType["A"] = 1] = "A"; + RecordType[RecordType["CNAME"] = 5] = "CNAME"; + RecordType[RecordType["TXT"] = 16] = "TXT"; + RecordType[RecordType["AAAA"] = 28] = "AAAA"; + })(RecordType || (RecordType = {})); + function dns(init = {}) { + return new DNS$1(init); + } + + const MAX_RECURSIVE_DEPTH = 32; + const { code: dnsaddrCode } = getProtocol('dnsaddr'); + const dnsaddrResolver = async function dnsaddrResolver(ma, options = {}) { + const recursionLimit = options.maxRecursiveDepth ?? MAX_RECURSIVE_DEPTH; + if (recursionLimit === 0) { + throw new CodeError$2('Max recursive depth reached', 'ERR_MAX_RECURSIVE_DEPTH_REACHED'); + } + const [, hostname] = ma.stringTuples().find(([proto]) => proto === dnsaddrCode) ?? []; + const resolver = options?.dns ?? dns(); + const result = await resolver.query(`_dnsaddr.${hostname}`, { + signal: options?.signal, + types: [ + RecordType.TXT + ] + }); + const peerId = ma.getPeerId(); + const output = []; + for (const answer of result.Answer) { + const addr = answer.data + .replace(/["']/g, '') + .trim() + .split('=')[1]; + if (addr == null) { + continue; + } + if (peerId != null && !addr.includes(peerId)) { + continue; + } + const ma = multiaddr(addr); + if (addr.startsWith('/dnsaddr')) { + const resolved = await ma.resolve({ + ...options, + maxRecursiveDepth: recursionLimit - 1 + }); + output.push(...resolved.map(ma => ma.toString())); + } + else { + output.push(ma.toString()); + } + } + return output; + }; + + var isPlainObj = value => { + if (Object.prototype.toString.call(value) !== '[object Object]') { + return false; + } + + const prototype = Object.getPrototypeOf(value); + return prototype === null || prototype === Object.prototype; + }; + + const isOptionObject = isPlainObj; + + const {hasOwnProperty} = Object.prototype; + const {propertyIsEnumerable} = Object; + const defineProperty = (object, name, value) => Object.defineProperty(object, name, { + value, + writable: true, + enumerable: true, + configurable: true + }); + + const globalThis$1 = commonjsGlobal; + const defaultMergeOptions = { + concatArrays: false, + ignoreUndefined: false + }; + + const getEnumerableOwnPropertyKeys = value => { + const keys = []; + + for (const key in value) { + if (hasOwnProperty.call(value, key)) { + keys.push(key); + } + } + + /* istanbul ignore else */ + if (Object.getOwnPropertySymbols) { + const symbols = Object.getOwnPropertySymbols(value); + + for (const symbol of symbols) { + if (propertyIsEnumerable.call(value, symbol)) { + keys.push(symbol); + } + } + } + + return keys; + }; + + function clone(value) { + if (Array.isArray(value)) { + return cloneArray(value); + } + + if (isOptionObject(value)) { + return cloneOptionObject(value); + } + + return value; + } + + function cloneArray(array) { + const result = array.slice(0, 0); + + getEnumerableOwnPropertyKeys(array).forEach(key => { + defineProperty(result, key, clone(array[key])); + }); + + return result; + } + + function cloneOptionObject(object) { + const result = Object.getPrototypeOf(object) === null ? Object.create(null) : {}; + + getEnumerableOwnPropertyKeys(object).forEach(key => { + defineProperty(result, key, clone(object[key])); + }); + + return result; + } + + /** + * @param {*} merged already cloned + * @param {*} source something to merge + * @param {string[]} keys keys to merge + * @param {Object} config Config Object + * @returns {*} cloned Object + */ + const mergeKeys = (merged, source, keys, config) => { + keys.forEach(key => { + if (typeof source[key] === 'undefined' && config.ignoreUndefined) { + return; + } + + // Do not recurse into prototype chain of merged + if (key in merged && merged[key] !== Object.getPrototypeOf(merged)) { + defineProperty(merged, key, merge$1(merged[key], source[key], config)); + } else { + defineProperty(merged, key, clone(source[key])); + } + }); + + return merged; + }; + + /** + * @param {*} merged already cloned + * @param {*} source something to merge + * @param {Object} config Config Object + * @returns {*} cloned Object + * + * see [Array.prototype.concat ( ...arguments )](http://www.ecma-international.org/ecma-262/6.0/#sec-array.prototype.concat) + */ + const concatArrays = (merged, source, config) => { + let result = merged.slice(0, 0); + let resultIndex = 0; + + [merged, source].forEach(array => { + const indices = []; + + // `result.concat(array)` with cloning + for (let k = 0; k < array.length; k++) { + if (!hasOwnProperty.call(array, k)) { + continue; + } + + indices.push(String(k)); + + if (array === merged) { + // Already cloned + defineProperty(result, resultIndex++, array[k]); + } else { + defineProperty(result, resultIndex++, clone(array[k])); + } + } + + // Merge non-index keys + result = mergeKeys(result, array, getEnumerableOwnPropertyKeys(array).filter(key => !indices.includes(key)), config); + }); + + return result; + }; + + /** + * @param {*} merged already cloned + * @param {*} source something to merge + * @param {Object} config Config Object + * @returns {*} cloned Object + */ + function merge$1(merged, source, config) { + if (config.concatArrays && Array.isArray(merged) && Array.isArray(source)) { + return concatArrays(merged, source, config); + } + + if (!isOptionObject(source) || !isOptionObject(merged)) { + return clone(source); + } + + return mergeKeys(merged, source, getEnumerableOwnPropertyKeys(source), config); + } + + var mergeOptions = function (...options) { + const config = merge$1(clone(defaultMergeOptions), (this !== globalThis$1 && this) || {}, defaultMergeOptions); + let merged = {_: {}}; + + for (const option of options) { + if (option === undefined) { + continue; + } + + if (!isOptionObject(option)) { + throw new TypeError('`' + option + '` is not an Option Object'); + } + + merged = merge$1(merged, {_: option}, config); + } + + return merged._; + }; + + var mergeOptions$1 = /*@__PURE__*/getDefaultExportFromCjs(mergeOptions); + + var messages; + (function (messages) { + messages["NOT_STARTED_YET"] = "The libp2p node is not started yet"; + messages["ERR_PROTECTOR_REQUIRED"] = "Private network is enforced, but no protector was provided"; + messages["NOT_FOUND"] = "Not found"; + })(messages || (messages = {})); + var codes; + (function (codes) { + codes["ERR_PROTECTOR_REQUIRED"] = "ERR_PROTECTOR_REQUIRED"; + codes["ERR_PEER_DIAL_INTERCEPTED"] = "ERR_PEER_DIAL_INTERCEPTED"; + codes["ERR_CONNECTION_INTERCEPTED"] = "ERR_CONNECTION_INTERCEPTED"; + codes["ERR_INVALID_PROTOCOLS_FOR_STREAM"] = "ERR_INVALID_PROTOCOLS_FOR_STREAM"; + codes["ERR_CONNECTION_ENDED"] = "ERR_CONNECTION_ENDED"; + codes["ERR_CONNECTION_FAILED"] = "ERR_CONNECTION_FAILED"; + codes["ERR_NODE_NOT_STARTED"] = "ERR_NODE_NOT_STARTED"; + codes["ERR_ALREADY_ABORTED"] = "ERR_ALREADY_ABORTED"; + codes["ERR_TOO_MANY_ADDRESSES"] = "ERR_TOO_MANY_ADDRESSES"; + codes["ERR_NO_VALID_ADDRESSES"] = "ERR_NO_VALID_ADDRESSES"; + codes["ERR_RELAYED_DIAL"] = "ERR_RELAYED_DIAL"; + codes["ERR_DIALED_SELF"] = "ERR_DIALED_SELF"; + codes["ERR_DISCOVERED_SELF"] = "ERR_DISCOVERED_SELF"; + codes["ERR_DUPLICATE_TRANSPORT"] = "ERR_DUPLICATE_TRANSPORT"; + codes["ERR_ENCRYPTION_FAILED"] = "ERR_ENCRYPTION_FAILED"; + codes["ERR_HOP_REQUEST_FAILED"] = "ERR_HOP_REQUEST_FAILED"; + codes["ERR_INVALID_KEY"] = "ERR_INVALID_KEY"; + codes["ERR_INVALID_MESSAGE"] = "ERR_INVALID_MESSAGE"; + codes["ERR_INVALID_PARAMETERS"] = "ERR_INVALID_PARAMETERS"; + codes["ERR_INVALID_PEER"] = "ERR_INVALID_PEER"; + codes["ERR_MUXER_UNAVAILABLE"] = "ERR_MUXER_UNAVAILABLE"; + codes["ERR_NOT_FOUND"] = "ERR_NOT_FOUND"; + codes["ERR_TRANSPORT_UNAVAILABLE"] = "ERR_TRANSPORT_UNAVAILABLE"; + codes["ERR_TRANSPORT_DIAL_FAILED"] = "ERR_TRANSPORT_DIAL_FAILED"; + codes["ERR_UNSUPPORTED_PROTOCOL"] = "ERR_UNSUPPORTED_PROTOCOL"; + codes["ERR_PROTOCOL_HANDLER_ALREADY_REGISTERED"] = "ERR_PROTOCOL_HANDLER_ALREADY_REGISTERED"; + codes["ERR_INVALID_MULTIADDR"] = "ERR_INVALID_MULTIADDR"; + codes["ERR_SIGNATURE_NOT_VALID"] = "ERR_SIGNATURE_NOT_VALID"; + codes["ERR_FIND_SELF"] = "ERR_FIND_SELF"; + codes["ERR_NO_ROUTERS_AVAILABLE"] = "ERR_NO_ROUTERS_AVAILABLE"; + codes["ERR_CONNECTION_NOT_MULTIPLEXED"] = "ERR_CONNECTION_NOT_MULTIPLEXED"; + codes["ERR_NO_DIAL_TOKENS"] = "ERR_NO_DIAL_TOKENS"; + codes["ERR_INVALID_CMS"] = "ERR_INVALID_CMS"; + codes["ERR_MISSING_KEYS"] = "ERR_MISSING_KEYS"; + codes["ERR_NO_KEY"] = "ERR_NO_KEY"; + codes["ERR_INVALID_KEY_NAME"] = "ERR_INVALID_KEY_NAME"; + codes["ERR_INVALID_KEY_TYPE"] = "ERR_INVALID_KEY_TYPE"; + codes["ERR_KEY_ALREADY_EXISTS"] = "ERR_KEY_ALREADY_EXISTS"; + codes["ERR_INVALID_KEY_SIZE"] = "ERR_INVALID_KEY_SIZE"; + codes["ERR_KEY_NOT_FOUND"] = "ERR_KEY_NOT_FOUND"; + codes["ERR_OLD_KEY_NAME_INVALID"] = "ERR_OLD_KEY_NAME_INVALID"; + codes["ERR_NEW_KEY_NAME_INVALID"] = "ERR_NEW_KEY_NAME_INVALID"; + codes["ERR_PASSWORD_REQUIRED"] = "ERR_PASSWORD_REQUIRED"; + codes["ERR_PEM_REQUIRED"] = "ERR_PEM_REQUIRED"; + codes["ERR_CANNOT_READ_KEY"] = "ERR_CANNOT_READ_KEY"; + codes["ERR_MISSING_PRIVATE_KEY"] = "ERR_MISSING_PRIVATE_KEY"; + codes["ERR_MISSING_PUBLIC_KEY"] = "ERR_MISSING_PUBLIC_KEY"; + codes["ERR_INVALID_OLD_PASS_TYPE"] = "ERR_INVALID_OLD_PASS_TYPE"; + codes["ERR_INVALID_NEW_PASS_TYPE"] = "ERR_INVALID_NEW_PASS_TYPE"; + codes["ERR_INVALID_PASS_LENGTH"] = "ERR_INVALID_PASS_LENGTH"; + codes["ERR_NOT_IMPLEMENTED"] = "ERR_NOT_IMPLEMENTED"; + codes["ERR_WRONG_PING_ACK"] = "ERR_WRONG_PING_ACK"; + codes["ERR_INVALID_RECORD"] = "ERR_INVALID_RECORD"; + codes["ERR_ALREADY_SUCCEEDED"] = "ERR_ALREADY_SUCCEEDED"; + codes["ERR_NO_HANDLER_FOR_PROTOCOL"] = "ERR_NO_HANDLER_FOR_PROTOCOL"; + codes["ERR_TOO_MANY_OUTBOUND_PROTOCOL_STREAMS"] = "ERR_TOO_MANY_OUTBOUND_PROTOCOL_STREAMS"; + codes["ERR_TOO_MANY_INBOUND_PROTOCOL_STREAMS"] = "ERR_TOO_MANY_INBOUND_PROTOCOL_STREAMS"; + codes["ERR_CONNECTION_DENIED"] = "ERR_CONNECTION_DENIED"; + codes["ERR_TRANSFER_LIMIT_EXCEEDED"] = "ERR_TRANSFER_LIMIT_EXCEEDED"; + })(codes || (codes = {})); + + const DefaultConfig = { + addresses: { + listen: [], + announce: [], + noAnnounce: [], + announceFilter: (multiaddrs) => multiaddrs + }, + connectionManager: { + resolvers: { + dnsaddr: dnsaddrResolver + }, + addressSorter: defaultAddressSort + }, + transportManager: { + faultTolerance: FaultTolerance.FATAL_ALL + } + }; + async function validateConfig(opts) { + const resultingOptions = mergeOptions$1(DefaultConfig, opts); + if (resultingOptions.connectionProtector === null && globalThis.process?.env?.LIBP2P_FORCE_PNET != null) { // eslint-disable-line no-undef + throw new CodeError$2(messages.ERR_PROTECTOR_REQUIRED, codes.ERR_PROTECTOR_REQUIRED); + } + if (!(await peerIdFromKeys(resultingOptions.privateKey.public.bytes, resultingOptions.privateKey.bytes)).equals(resultingOptions.peerId)) { + throw new CodeError$2('Private key doesn\'t match peer id', codes.ERR_INVALID_KEY); + } + return resultingOptions; + } + + // From https://github.com/sindresorhus/random-int/blob/c37741b56f76b9160b0b63dae4e9c64875128146/index.js#L13-L15 + + const createAbortError = () => { + const error = new Error('Delay aborted'); + error.name = 'AbortError'; + return error; + }; + + const clearMethods = new WeakMap(); + + function createDelay({clearTimeout: defaultClear, setTimeout: defaultSet} = {}) { + // We cannot use `async` here as we need the promise identity. + return (milliseconds, {value, signal} = {}) => { + // TODO: Use `signal?.throwIfAborted()` when targeting Node.js 18. + if (signal?.aborted) { + return Promise.reject(createAbortError()); + } + + let timeoutId; + let settle; + let rejectFunction; + const clear = defaultClear ?? clearTimeout; + + const signalListener = () => { + clear(timeoutId); + rejectFunction(createAbortError()); + }; + + const cleanup = () => { + if (signal) { + signal.removeEventListener('abort', signalListener); + } + }; + + const delayPromise = new Promise((resolve, reject) => { + settle = () => { + cleanup(); + resolve(value); + }; + + rejectFunction = reject; + timeoutId = (defaultSet ?? setTimeout)(settle, milliseconds); + }); + + if (signal) { + signal.addEventListener('abort', signalListener, {once: true}); + } + + clearMethods.set(delayPromise, () => { + clear(timeoutId); + timeoutId = null; + settle(); + }); + + return delayPromise; + }; + } + + const delay = createDelay(); + + class RateLimiter { + memoryStorage; + points; + duration; + blockDuration; + execEvenly; + execEvenlyMinDelayMs; + keyPrefix; + constructor(opts = {}) { + this.points = opts.points ?? 4; + this.duration = opts.duration ?? 1; + this.blockDuration = opts.blockDuration ?? 0; + this.execEvenly = opts.execEvenly ?? false; + this.execEvenlyMinDelayMs = opts.execEvenlyMinDelayMs ?? (this.duration * 1000 / this.points); + this.keyPrefix = opts.keyPrefix ?? 'rlflx'; + this.memoryStorage = new MemoryStorage(); + } + async consume(key, pointsToConsume = 1, options = {}) { + const rlKey = this.getKey(key); + const secDuration = this._getKeySecDuration(options); + let res = this.memoryStorage.incrby(rlKey, pointsToConsume, secDuration); + res.remainingPoints = Math.max(this.points - res.consumedPoints, 0); + if (res.consumedPoints > this.points) { + // Block only first time when consumed more than points + if (this.blockDuration > 0 && res.consumedPoints <= (this.points + pointsToConsume)) { + // Block key + res = this.memoryStorage.set(rlKey, res.consumedPoints, this.blockDuration); + } + throw new CodeError$2('Rate limit exceeded', 'ERR_RATE_LIMIT_EXCEEDED', res); + } + else if (this.execEvenly && res.msBeforeNext > 0 && !res.isFirstInDuration) { + // Execute evenly + let delayMs = Math.ceil(res.msBeforeNext / (res.remainingPoints + 2)); + if (delayMs < this.execEvenlyMinDelayMs) { + delayMs = res.consumedPoints * this.execEvenlyMinDelayMs; + } + await delay(delayMs); + } + return res; + } + penalty(key, points = 1, options = {}) { + const rlKey = this.getKey(key); + const secDuration = this._getKeySecDuration(options); + const res = this.memoryStorage.incrby(rlKey, points, secDuration); + res.remainingPoints = Math.max(this.points - res.consumedPoints, 0); + return res; + } + reward(key, points = 1, options = {}) { + const rlKey = this.getKey(key); + const secDuration = this._getKeySecDuration(options); + const res = this.memoryStorage.incrby(rlKey, -points, secDuration); + res.remainingPoints = Math.max(this.points - res.consumedPoints, 0); + return res; + } + /** + * Block any key for secDuration seconds + * + * @param key + * @param secDuration + */ + block(key, secDuration) { + const msDuration = secDuration * 1000; + const initPoints = this.points + 1; + this.memoryStorage.set(this.getKey(key), initPoints, secDuration); + return { + remainingPoints: 0, + msBeforeNext: msDuration === 0 ? -1 : msDuration, + consumedPoints: initPoints, + isFirstInDuration: false + }; + } + set(key, points, secDuration = 0) { + const msDuration = (secDuration >= 0 ? secDuration : this.duration) * 1000; + this.memoryStorage.set(this.getKey(key), points, secDuration); + return { + remainingPoints: 0, + msBeforeNext: msDuration === 0 ? -1 : msDuration, + consumedPoints: points, + isFirstInDuration: false + }; + } + get(key) { + const res = this.memoryStorage.get(this.getKey(key)); + if (res != null) { + res.remainingPoints = Math.max(this.points - res.consumedPoints, 0); + } + return res; + } + delete(key) { + this.memoryStorage.delete(this.getKey(key)); + } + _getKeySecDuration(options) { + if (options?.customDuration != null && options.customDuration >= 0) { + return options.customDuration; + } + return this.duration; + } + getKey(key) { + return this.keyPrefix.length > 0 ? `${this.keyPrefix}:${key}` : key; + } + parseKey(rlKey) { + return rlKey.substring(this.keyPrefix.length); + } + } + class MemoryStorage { + storage; + constructor() { + this.storage = new Map(); + } + incrby(key, value, durationSec) { + const existing = this.storage.get(key); + if (existing != null) { + const msBeforeExpires = existing.expiresAt != null + ? existing.expiresAt.getTime() - new Date().getTime() + : -1; + if (existing.expiresAt == null || msBeforeExpires > 0) { + // Change value + existing.value += value; + return { + remainingPoints: 0, + msBeforeNext: msBeforeExpires, + consumedPoints: existing.value, + isFirstInDuration: false + }; + } + return this.set(key, value, durationSec); + } + return this.set(key, value, durationSec); + } + set(key, value, durationSec) { + const durationMs = durationSec * 1000; + const existing = this.storage.get(key); + if (existing != null) { + clearTimeout(existing.timeoutId); + } + const record = { + value, + expiresAt: durationMs > 0 ? new Date(Date.now() + durationMs) : undefined + }; + this.storage.set(key, record); + if (durationMs > 0) { + record.timeoutId = setTimeout(() => { + this.storage.delete(key); + }, durationMs); + if (record.timeoutId.unref != null) { + record.timeoutId.unref(); + } + } + return { + remainingPoints: 0, + msBeforeNext: durationMs === 0 ? -1 : durationMs, + consumedPoints: record.value, + isFirstInDuration: true + }; + } + get(key) { + const existing = this.storage.get(key); + if (existing != null) { + const msBeforeExpires = existing.expiresAt != null + ? existing.expiresAt.getTime() - new Date().getTime() + : -1; + return { + remainingPoints: 0, + msBeforeNext: msBeforeExpires, + consumedPoints: existing.value, + isFirstInDuration: false + }; + } + } + delete(key) { + const record = this.storage.get(key); + if (record != null) { + if (record.timeoutId != null) { + clearTimeout(record.timeoutId); + } + this.storage.delete(key); + return true; + } + return false; + } + } + + /** + * Extracts a PeerId and/or multiaddr from the passed PeerId or Multiaddr or an array of Multiaddrs + */ + function getPeerAddress(peer) { + if (isPeerId(peer)) { + return { peerId: peer, multiaddrs: [] }; + } + if (!Array.isArray(peer)) { + peer = [peer]; + } + let peerId; + if (peer.length > 0) { + const peerIdStr = peer[0].getPeerId(); + peerId = peerIdStr == null ? undefined : peerIdFromString(peerIdStr); + // ensure PeerId is either not set or is consistent + peer.forEach(ma => { + if (!isMultiaddr(ma)) { + throw new CodeError$2('Invalid Multiaddr', codes.ERR_INVALID_MULTIADDR); + } + const maPeerIdStr = ma.getPeerId(); + if (maPeerIdStr == null) { + if (peerId != null) { + throw new CodeError$2('Multiaddrs must all have the same peer id or have no peer id', codes.ERR_INVALID_PARAMETERS); + } + } + else { + const maPeerId = peerIdFromString(maPeerIdStr); + if (peerId == null || !peerId.equals(maPeerId)) { + throw new CodeError$2('Multiaddrs must all have the same peer id or have no peer id', codes.ERR_INVALID_PARAMETERS); + } + } + }); + } + return { + peerId, + multiaddrs: peer + }; + } + + function pDefer() { + const deferred = {}; + + deferred.promise = new Promise((resolve, reject) => { + deferred.resolve = resolve; + deferred.reject = reject; + }); + + return deferred; + } + + // ported from https://www.npmjs.com/package/fast-fifo + class FixedFIFO { + buffer; + mask; + top; + btm; + next; + constructor(hwm) { + if (!(hwm > 0) || ((hwm - 1) & hwm) !== 0) { + throw new Error('Max size for a FixedFIFO should be a power of two'); + } + this.buffer = new Array(hwm); + this.mask = hwm - 1; + this.top = 0; + this.btm = 0; + this.next = null; + } + push(data) { + if (this.buffer[this.top] !== undefined) { + return false; + } + this.buffer[this.top] = data; + this.top = (this.top + 1) & this.mask; + return true; + } + shift() { + const last = this.buffer[this.btm]; + if (last === undefined) { + return undefined; + } + this.buffer[this.btm] = undefined; + this.btm = (this.btm + 1) & this.mask; + return last; + } + isEmpty() { + return this.buffer[this.btm] === undefined; + } + } + class FIFO { + size; + hwm; + head; + tail; + constructor(options = {}) { + this.hwm = options.splitLimit ?? 16; + this.head = new FixedFIFO(this.hwm); + this.tail = this.head; + this.size = 0; + } + calculateSize(obj) { + if (obj?.byteLength != null) { + return obj.byteLength; + } + return 1; + } + push(val) { + if (val?.value != null) { + this.size += this.calculateSize(val.value); + } + if (!this.head.push(val)) { + const prev = this.head; + this.head = prev.next = new FixedFIFO(2 * this.head.buffer.length); + this.head.push(val); + } + } + shift() { + let val = this.tail.shift(); + if (val === undefined && (this.tail.next != null)) { + const next = this.tail.next; + this.tail.next = null; + this.tail = next; + val = this.tail.shift(); + } + if (val?.value != null) { + this.size -= this.calculateSize(val.value); + } + return val; + } + isEmpty() { + return this.head.isEmpty(); + } + } + + /** + * @packageDocumentation + * + * An iterable that you can push values into. + * + * @example + * + * ```js + * import { pushable } from 'it-pushable' + * + * const source = pushable() + * + * setTimeout(() => source.push('hello'), 100) + * setTimeout(() => source.push('world'), 200) + * setTimeout(() => source.end(), 300) + * + * const start = Date.now() + * + * for await (const value of source) { + * console.log(`got "${value}" after ${Date.now() - start}ms`) + * } + * console.log(`done after ${Date.now() - start}ms`) + * + * // Output: + * // got "hello" after 105ms + * // got "world" after 207ms + * // done after 309ms + * ``` + * + * @example + * + * ```js + * import { pushableV } from 'it-pushable' + * import all from 'it-all' + * + * const source = pushableV() + * + * source.push(1) + * source.push(2) + * source.push(3) + * source.end() + * + * console.info(await all(source)) + * + * // Output: + * // [ [1, 2, 3] ] + * ``` + */ + let AbortError$3 = class AbortError extends Error { + type; + code; + constructor(message, code) { + super(message ?? 'The operation was aborted'); + this.type = 'aborted'; + this.code = code ?? 'ABORT_ERR'; + } + }; + function pushable$1(options = {}) { + const getNext = (buffer) => { + const next = buffer.shift(); + if (next == null) { + return { done: true }; + } + if (next.error != null) { + throw next.error; + } + return { + done: next.done === true, + // @ts-expect-error if done is false, value will be present + value: next.value + }; + }; + return _pushable(getNext, options); + } + function _pushable(getNext, options) { + options = options ?? {}; + let onEnd = options.onEnd; + let buffer = new FIFO(); + let pushable; + let onNext; + let ended; + let drain = pDefer(); + const waitNext = async () => { + try { + if (!buffer.isEmpty()) { + return getNext(buffer); + } + if (ended) { + return { done: true }; + } + return await new Promise((resolve, reject) => { + onNext = (next) => { + onNext = null; + buffer.push(next); + try { + resolve(getNext(buffer)); + } + catch (err) { + reject(err); + } + return pushable; + }; + }); + } + finally { + if (buffer.isEmpty()) { + // settle promise in the microtask queue to give consumers a chance to + // await after calling .push + queueMicrotask(() => { + drain.resolve(); + drain = pDefer(); + }); + } + } + }; + const bufferNext = (next) => { + if (onNext != null) { + return onNext(next); + } + buffer.push(next); + return pushable; + }; + const bufferError = (err) => { + buffer = new FIFO(); + if (onNext != null) { + return onNext({ error: err }); + } + buffer.push({ error: err }); + return pushable; + }; + const push = (value) => { + if (ended) { + return pushable; + } + // @ts-expect-error `byteLength` is not declared on PushType + if (options?.objectMode !== true && value?.byteLength == null) { + throw new Error('objectMode was not true but tried to push non-Uint8Array value'); + } + return bufferNext({ done: false, value }); + }; + const end = (err) => { + if (ended) + return pushable; + ended = true; + return (err != null) ? bufferError(err) : bufferNext({ done: true }); + }; + const _return = () => { + buffer = new FIFO(); + end(); + return { done: true }; + }; + const _throw = (err) => { + end(err); + return { done: true }; + }; + pushable = { + [Symbol.asyncIterator]() { return this; }, + next: waitNext, + return: _return, + throw: _throw, + push, + end, + get readableLength() { + return buffer.size; + }, + onEmpty: async (options) => { + const signal = options?.signal; + signal?.throwIfAborted(); + if (buffer.isEmpty()) { + return; + } + let cancel; + let listener; + if (signal != null) { + cancel = new Promise((resolve, reject) => { + listener = () => { + reject(new AbortError$3()); + }; + signal.addEventListener('abort', listener); + }); + } + try { + await Promise.race([ + drain.promise, + cancel + ]); + } + finally { + if (listener != null && signal != null) { + signal?.removeEventListener('abort', listener); + } + } + } + }; + if (onEnd == null) { + return pushable; + } + const _pushable = pushable; + pushable = { + [Symbol.asyncIterator]() { return this; }, + next() { + return _pushable.next(); + }, + throw(err) { + _pushable.throw(err); + if (onEnd != null) { + onEnd(err); + onEnd = undefined; + } + return { done: true }; + }, + return() { + _pushable.return(); + if (onEnd != null) { + onEnd(); + onEnd = undefined; + } + return { done: true }; + }, + push, + end(err) { + _pushable.end(err); + if (onEnd != null) { + onEnd(err); + onEnd = undefined; + } + return pushable; + }, + get readableLength() { + return _pushable.readableLength; + }, + onEmpty: (opts) => { + return _pushable.onEmpty(opts); + } + }; + return pushable; + } + + /** + * @packageDocumentation + * + * Race an event against an AbortSignal, taking care to remove any event + * listeners that were added. + * + * @example Getting started + * + * ```TypeScript + * import { raceEvent } from 'race-event' + * + * const controller = new AbortController() + * const emitter = new EventTarget() + * + * setTimeout(() => { + * controller.abort() + * }, 500) + * + * setTimeout(() => { + * // too late + * emitter.dispatchEvent(new CustomEvent('event')) + * }, 1000) + * + * // throws an AbortError + * const resolve = await raceEvent(emitter, 'event', controller.signal) + * ``` + * + * @example Aborting the promise with an error event + * + * ```TypeScript + * import { raceEvent } from 'race-event' + * + * const emitter = new EventTarget() + * + * setTimeout(() => { + * emitter.dispatchEvent(new CustomEvent('failure', { + * detail: new Error('Oh no!') + * })) + * }, 1000) + * + * // throws 'Oh no!' error + * const resolve = await raceEvent(emitter, 'success', AbortSignal.timeout(5000), { + * errorEvent: 'failure' + * }) + * ``` + * + * @example Customising the thrown AbortError + * + * The error message and `.code` property of the thrown `AbortError` can be + * specified by passing options: + * + * ```TypeScript + * import { raceEvent } from 'race-event' + * + * const controller = new AbortController() + * const emitter = new EventTarget() + * + * setTimeout(() => { + * controller.abort() + * }, 500) + * + * // throws a Error: Oh no! + * const resolve = await raceEvent(emitter, 'event', controller.signal, { + * errorMessage: 'Oh no!', + * errorCode: 'ERR_OH_NO' + * }) + * ``` + * + * @example Only resolving on specific events + * + * Where multiple events with the same type are emitted, a `filter` function can + * be passed to only resolve on one of them: + * + * ```TypeScript + * import { raceEvent } from 'race-event' + * + * const controller = new AbortController() + * const emitter = new EventTarget() + * + * // throws a Error: Oh no! + * const resolve = await raceEvent(emitter, 'event', controller.signal, { + * filter: (evt: Event) => { + * return evt.detail.foo === 'bar' + * } + * }) + * ``` + * + * @example Terminating early by throwing from the filter + * + * You can cause listening for the event to cease and all event listeners to be + * removed by throwing from the filter: + * + * ```TypeScript + * import { raceEvent } from 'race-event' + * + * const controller = new AbortController() + * const emitter = new EventTarget() + * + * // throws Error: Cannot continue + * const resolve = await raceEvent(emitter, 'event', controller.signal, { + * filter: (evt) => { + * if (...reasons) { + * throw new Error('Cannot continue') + * } + * + * return true + * } + * }) + * ``` + */ + /** + * An abort error class that extends error + */ + let AbortError$2 = class AbortError extends Error { + type; + code; + constructor(message, code) { + super(message ?? 'The operation was aborted'); + this.type = 'aborted'; + this.name = 'AbortError'; + this.code = code ?? 'ABORT_ERR'; + } + }; + /** + * Race a promise against an abort signal + */ + async function raceEvent(emitter, eventName, signal, opts) { + // create the error here so we have more context in the stack trace + const error = new AbortError$2(opts?.errorMessage, opts?.errorCode); + if (signal?.aborted === true) { + return Promise.reject(error); + } + return new Promise((resolve, reject) => { + function removeListeners() { + signal?.removeEventListener('abort', abortListener); + emitter.removeEventListener(eventName, eventListener); + if (opts?.errorEvent != null) { + emitter.removeEventListener(opts.errorEvent, errorEventListener); + } + } + const eventListener = (evt) => { + try { + if (opts?.filter?.(evt) === false) { + return; + } + } + catch (err) { + removeListeners(); + reject(err); + return; + } + removeListeners(); + resolve(evt); + }; + const errorEventListener = (evt) => { + removeListeners(); + reject(evt.detail); + }; + const abortListener = () => { + removeListeners(); + reject(error); + }; + signal?.addEventListener('abort', abortListener); + emitter.addEventListener(eventName, eventListener); + if (opts?.errorEvent != null) { + emitter.addEventListener(opts.errorEvent, errorEventListener); + } + }); + } + + /** + * An abort error class that extends error + */ + let AbortError$1 = class AbortError extends Error { + type; + code; + constructor(message, code) { + super(message ?? 'The operation was aborted'); + this.type = 'aborted'; + this.name = 'AbortError'; + this.code = code ?? 'ABORT_ERR'; + } + }; + /** + * Race a promise against an abort signal + */ + async function raceSignal(promise, signal, opts) { + if (signal == null) { + return promise; + } + if (signal.aborted) { + return Promise.reject(new AbortError$1(opts?.errorMessage, opts?.errorCode)); + } + let listener; + // create the error here so we have more context in the stack trace + const error = new AbortError$1(opts?.errorMessage, opts?.errorCode); + try { + return await Promise.race([ + promise, + new Promise((resolve, reject) => { + listener = () => { + reject(error); + }; + signal.addEventListener('abort', listener); + }) + ]); + } + finally { + if (listener != null) { + signal.removeEventListener('abort', listener); + } + } + } + + class JobRecipient { + deferred; + signal; + where; + constructor(where, signal) { + this.signal = signal; + this.deferred = pDefer(); + this.where = where; + this.onAbort = this.onAbort.bind(this); + this.signal?.addEventListener('abort', this.onAbort); + } + onAbort() { + this.deferred.reject(this.signal?.reason ?? new AbortError$5()); + } + cleanup() { + this.signal?.removeEventListener('abort', this.onAbort); + } + } + + /** + * Returns a random string + */ + function randomId() { + return `${(parseInt(String(Math.random() * 1e9), 10)).toString()}${Date.now()}`; + } + class Job { + id; + fn; + options; + recipients; + status; + timeline; + controller; + constructor(fn, options) { + this.id = randomId(); + this.status = 'queued'; + this.fn = fn; + this.options = options; + this.recipients = []; + this.timeline = { + created: Date.now() + }; + this.controller = new AbortController(); + setMaxListeners(Infinity, this.controller.signal); + this.onAbort = this.onAbort.bind(this); + } + abort(err) { + this.controller.abort(err); + } + onAbort() { + const allAborted = this.recipients.reduce((acc, curr) => { + return acc && (curr.signal?.aborted === true); + }, true); + // if all recipients have aborted the job, actually abort the job + if (allAborted) { + this.controller.abort(new AbortError$5()); + this.cleanup(); + } + } + async join(options = {}) { + const recipient = new JobRecipient((new Error('where')).stack, options.signal); + this.recipients.push(recipient); + options.signal?.addEventListener('abort', this.onAbort); + return recipient.deferred.promise; + } + async run() { + this.status = 'running'; + this.timeline.started = Date.now(); + try { + this.controller.signal.throwIfAborted(); + const result = await raceSignal(this.fn({ + ...(this.options ?? {}), + signal: this.controller.signal + }), this.controller.signal); + this.recipients.forEach(recipient => { + recipient.deferred.resolve(result); + }); + this.status = 'complete'; + } + catch (err) { + this.recipients.forEach(recipient => { + recipient.deferred.reject(err); + }); + this.status = 'errored'; + } + finally { + this.timeline.finished = Date.now(); + this.cleanup(); + } + } + cleanup() { + this.recipients.forEach(recipient => { + recipient.cleanup(); + recipient.signal?.removeEventListener('abort', this.onAbort); + }); + } + } + + /** + * Heavily influence by `p-queue` with the following differences: + * + * 1. Items remain at the head of the queue while they are running so `queue.size` includes `queue.pending` items - this is so interested parties can join the results of a queue item while it is running + * 2. The options for a job are stored separately to the job in order for them to be modified while they are still in the queue + */ + class Queue extends TypedEventEmitter { + concurrency; + queue; + pending; + sort; + constructor(init = {}) { + super(); + this.concurrency = init.concurrency ?? Number.POSITIVE_INFINITY; + this.pending = 0; + if (init.metricName != null) { + init.metrics?.registerMetricGroup(init.metricName, { + calculate: () => { + return { + size: this.queue.length, + running: this.pending, + queued: this.queue.length - this.pending + }; + } + }); + } + this.sort = init.sort; + this.queue = []; + } + tryToStartAnother() { + if (this.size === 0) { + // do this in the microtask queue so all job recipients receive the + // result before the "empty" event fires + queueMicrotask(() => { + this.safeDispatchEvent('empty'); + }); + if (this.running === 0) { + // do this in the microtask queue so all job recipients receive the + // result before the "idle" event fires + queueMicrotask(() => { + this.safeDispatchEvent('idle'); + }); + } + return false; + } + if (this.pending < this.concurrency) { + let job; + for (const j of this.queue) { + if (j.status === 'queued') { + job = j; + break; + } + } + if (job == null) { + return false; + } + this.safeDispatchEvent('active'); + this.pending++; + job.run() + .finally(() => { + // remove the job from the queue + for (let i = 0; i < this.queue.length; i++) { + if (this.queue[i] === job) { + this.queue.splice(i, 1); + break; + } + } + this.pending--; + this.tryToStartAnother(); + this.safeDispatchEvent('next'); + }); + return true; + } + return false; + } + enqueue(job) { + this.queue.push(job); + if (this.sort != null) { + this.queue.sort(this.sort); + } + } + /** + * Adds a sync or async task to the queue. Always returns a promise. + */ + async add(fn, options) { + options?.signal?.throwIfAborted(); + const job = new Job(fn, options); + const p = job.join(options) + .then(result => { + this.safeDispatchEvent('completed', { detail: result }); + this.safeDispatchEvent('success', { detail: { job, result } }); + return result; + }) + .catch(err => { + if (job.status === 'queued') { + // job was aborted before it started - remove the job from the queue + for (let i = 0; i < this.queue.length; i++) { + if (this.queue[i] === job) { + this.queue.splice(i, 1); + break; + } + } + } + this.safeDispatchEvent('error', { detail: err }); + this.safeDispatchEvent('failure', { detail: { job, error: err } }); + throw err; + }); + this.enqueue(job); + this.safeDispatchEvent('add'); + this.tryToStartAnother(); + return p; + } + /** + * Clear the queue + */ + clear() { + this.queue.splice(0, this.queue.length); + } + /** + * Abort all jobs in the queue and clear it + */ + abort() { + this.queue.forEach(job => { + job.abort(new AbortError$5()); + }); + this.clear(); + } + /** + * Can be called multiple times. Useful if you for example add additional items at a later time. + * + * @returns A promise that settles when the queue becomes empty. + */ + async onEmpty(options) { + // Instantly resolve if the queue is empty + if (this.size === 0) { + return; + } + await raceEvent(this, 'empty', options?.signal); + } + /** + * @returns A promise that settles when the queue size is less than the given + * limit: `queue.size < limit`. + * + * If you want to avoid having the queue grow beyond a certain size you can + * `await queue.onSizeLessThan()` before adding a new item. + * + * Note that this only limits the number of items waiting to start. There + * could still be up to `concurrency` jobs already running that this call does + * not include in its calculation. + */ + async onSizeLessThan(limit, options) { + // Instantly resolve if the queue is empty. + if (this.size < limit) { + return; + } + await raceEvent(this, 'next', options?.signal, { + filter: () => this.size < limit + }); + } + /** + * The difference with `.onEmpty` is that `.onIdle` guarantees that all work + * from the queue has finished. `.onEmpty` merely signals that the queue is + * empty, but it could mean that some promises haven't completed yet. + * + * @returns A promise that settles when the queue becomes empty, and all + * promises have completed; `queue.size === 0 && queue.pending === 0`. + */ + async onIdle(options) { + // Instantly resolve if none pending and if nothing else is queued + if (this.pending === 0 && this.size === 0) { + return; + } + await raceEvent(this, 'idle', options?.signal); + } + /** + * Size of the queue including running items + */ + get size() { + return this.queue.length; + } + /** + * The number of queued items waiting to run. + */ + get queued() { + return this.queue.length - this.pending; + } + /** + * The number of items currently running. + */ + get running() { + return this.pending; + } + /** + * Returns an async generator that makes it easy to iterate over the results + * of jobs added to the queue. + * + * The generator will end when the queue becomes idle, that is there are no + * jobs running and no jobs that have yet to run. + * + * If you need to keep the queue open indefinitely, consider using it-pushable + * instead. + */ + async *toGenerator(options) { + options?.signal?.throwIfAborted(); + const stream = pushable$1({ + objectMode: true + }); + const cleanup = (err) => { + if (err != null) { + this.abort(); + } + else { + this.clear(); + } + stream.end(err); + }; + const onQueueJobComplete = (evt) => { + if (evt.detail != null) { + stream.push(evt.detail); + } + }; + const onQueueError = (evt) => { + cleanup(evt.detail); + }; + const onQueueIdle = () => { + cleanup(); + }; + // clear the queue and throw if the query is aborted + const onSignalAbort = () => { + cleanup(new CodeError$2('Queue aborted', 'ERR_QUEUE_ABORTED')); + }; + // add listeners + this.addEventListener('completed', onQueueJobComplete); + this.addEventListener('error', onQueueError); + this.addEventListener('idle', onQueueIdle); + options?.signal?.addEventListener('abort', onSignalAbort); + try { + yield* stream; + } + finally { + // remove listeners + this.removeEventListener('completed', onQueueJobComplete); + this.removeEventListener('error', onQueueError); + this.removeEventListener('idle', onQueueIdle); + options?.signal?.removeEventListener('abort', onSignalAbort); + // empty the queue for when the user has broken out of a loop early + cleanup(); + } + } + } + + /** + * Extends Queue to add support for querying queued jobs by peer id + */ + class PeerQueue extends Queue { + has(peerId) { + return this.find(peerId) != null; + } + find(peerId) { + return this.queue.find(job => { + return peerId.equals(job.options.peerId); + }); + } + } + + /** + * @see https://libp2p.github.io/js-libp2p/interfaces/index._internal_.ConnectionManagerConfig.html#dialTimeout + */ + const DIAL_TIMEOUT = 5e3; + /** + * @see https://libp2p.github.io/js-libp2p/interfaces/index._internal_.ConnectionManagerConfig.html#inboundUpgradeTimeout + */ + const INBOUND_UPGRADE_TIMEOUT = 2e3; + /** + * @see https://libp2p.github.io/js-libp2p/interfaces/index._internal_.ConnectionManagerConfig.html#maxPeerAddrsToDial + */ + const MAX_PEER_ADDRS_TO_DIAL = 25; + /** + * @see https://libp2p.github.io/js-libp2p/interfaces/index._internal_.ConnectionManagerConfig.html#autoDialInterval + */ + const AUTO_DIAL_INTERVAL = 5000; + /** + * @see https://libp2p.github.io/js-libp2p/interfaces/index._internal_.ConnectionManagerConfig.html#autoDialConcurrency + */ + const AUTO_DIAL_CONCURRENCY = 25; + /** + * @see https://libp2p.github.io/js-libp2p/interfaces/index._internal_.ConnectionManagerConfig.html#autoDialPriority + */ + const AUTO_DIAL_PRIORITY = 0; + /** + * @see https://libp2p.github.io/js-libp2p/interfaces/index._internal_.ConnectionManagerConfig.html#autoDialMaxQueueLength + */ + const AUTO_DIAL_MAX_QUEUE_LENGTH = 100; + /** + * @see https://libp2p.github.io/js-libp2p/interfaces/libp2p.index.unknown.ConnectionManagerInit.html#autoDialDiscoveredPeersDebounce + */ + const AUTO_DIAL_DISCOVERED_PEERS_DEBOUNCE = 10; + /** + * @see https://libp2p.github.io/js-libp2p/interfaces/index._internal_.ConnectionManagerConfig.html#inboundConnectionThreshold + */ + const INBOUND_CONNECTION_THRESHOLD = 5; + /** + * @see https://libp2p.github.io/js-libp2p/interfaces/index._internal_.ConnectionManagerConfig.html#maxIncomingPendingConnections + */ + const MAX_INCOMING_PENDING_CONNECTIONS = 10; + /** + * Store as part of the peer store metadata for a given peer, the value for this + * key is a timestamp of the last time a dial attempted failed with the relevant + * peer stored as a string. + * + * Used to insure we do not endlessly try to auto dial peers we have recently + * failed to dial. + */ + const LAST_DIAL_FAILURE_KEY = 'last-dial-failure'; + /** + * @see https://libp2p.github.io/js-libp2p/interfaces/index._internal_.ConnectionManagerConfig.html#maxDialQueueLength + */ + const MAX_DIAL_QUEUE_LENGTH = 500; + + /** + * @see https://libp2p.github.io/js-libp2p/interfaces/index._internal_.ConnectionManagerConfig.html#minConnections + */ + const MIN_CONNECTIONS = 5; + /** + * @see https://libp2p.github.io/js-libp2p/interfaces/index._internal_.ConnectionManagerConfig.html#maxConnections + */ + const MAX_CONNECTIONS$1 = 100; + /** + * @see https://libp2p.github.io/js-libp2p/interfaces/index._internal_.ConnectionManagerConfig.html#maxParallelDials + */ + const MAX_PARALLEL_DIALS = 50; + /** + * @see https://libp2p.github.io/js-libp2p/interfaces/libp2p.index.unknown.ConnectionManagerInit.html#autoDialPeerRetryThreshold + */ + const AUTO_DIAL_PEER_RETRY_THRESHOLD = 1000 * 60 * 7; + + const defaultOptions$3 = { + minConnections: MIN_CONNECTIONS, + maxQueueLength: AUTO_DIAL_MAX_QUEUE_LENGTH, + autoDialConcurrency: AUTO_DIAL_CONCURRENCY, + autoDialPriority: AUTO_DIAL_PRIORITY, + autoDialInterval: AUTO_DIAL_INTERVAL, + autoDialPeerRetryThreshold: AUTO_DIAL_PEER_RETRY_THRESHOLD, + autoDialDiscoveredPeersDebounce: AUTO_DIAL_DISCOVERED_PEERS_DEBOUNCE + }; + class AutoDial { + connectionManager; + peerStore; + queue; + minConnections; + autoDialPriority; + autoDialIntervalMs; + autoDialMaxQueueLength; + autoDialPeerRetryThresholdMs; + autoDialDiscoveredPeersDebounce; + autoDialInterval; + started; + running; + log; + /** + * Proactively tries to connect to known peers stored in the PeerStore. + * It will keep the number of connections below the upper limit and sort + * the peers to connect based on whether we know their keys and protocols. + */ + constructor(components, init) { + this.connectionManager = components.connectionManager; + this.peerStore = components.peerStore; + this.minConnections = init.minConnections ?? defaultOptions$3.minConnections; + this.autoDialPriority = init.autoDialPriority ?? defaultOptions$3.autoDialPriority; + this.autoDialIntervalMs = init.autoDialInterval ?? defaultOptions$3.autoDialInterval; + this.autoDialMaxQueueLength = init.maxQueueLength ?? defaultOptions$3.maxQueueLength; + this.autoDialPeerRetryThresholdMs = init.autoDialPeerRetryThreshold ?? defaultOptions$3.autoDialPeerRetryThreshold; + this.autoDialDiscoveredPeersDebounce = init.autoDialDiscoveredPeersDebounce ?? defaultOptions$3.autoDialDiscoveredPeersDebounce; + this.log = components.logger.forComponent('libp2p:connection-manager:auto-dial'); + this.started = false; + this.running = false; + this.queue = new PeerQueue({ + concurrency: init.autoDialConcurrency ?? defaultOptions$3.autoDialConcurrency, + metricName: 'libp2p_autodial_queue', + metrics: components.metrics + }); + this.queue.addEventListener('error', (evt) => { + this.log.error('error during auto-dial', evt.detail); + }); + // check the min connection limit whenever a peer disconnects + components.events.addEventListener('connection:close', () => { + this.autoDial() + .catch(err => { + this.log.error(err); + }); + }); + // sometimes peers are discovered in quick succession so add a small + // debounce to ensure all eligible peers are autodialed + let debounce; + // when new peers are discovered, dial them if we don't have + // enough connections + components.events.addEventListener('peer:discovery', () => { + clearTimeout(debounce); + debounce = setTimeout(() => { + this.autoDial() + .catch(err => { + this.log.error(err); + }); + }, this.autoDialDiscoveredPeersDebounce); + }); + } + isStarted() { + return this.started; + } + start() { + this.started = true; + } + afterStart() { + this.autoDial() + .catch(err => { + this.log.error('error while autodialing', err); + }); + } + stop() { + // clear the queue + this.queue.clear(); + clearTimeout(this.autoDialInterval); + this.started = false; + this.running = false; + } + async autoDial() { + if (!this.started || this.running) { + return; + } + const connections = this.connectionManager.getConnectionsMap(); + const numConnections = connections.size; + // already have enough connections + if (numConnections >= this.minConnections) { + if (this.minConnections > 0) { + this.log.trace('have enough connections %d/%d', numConnections, this.minConnections); + } + // no need to schedule next autodial as it will be run when on + // connection:close event + return; + } + if (this.queue.size > this.autoDialMaxQueueLength) { + this.log('not enough connections %d/%d but auto dial queue is full', numConnections, this.minConnections); + this.sheduleNextAutodial(); + return; + } + this.running = true; + this.log('not enough connections %d/%d - will dial peers to increase the number of connections', numConnections, this.minConnections); + const dialQueue = new PeerSet( + // @ts-expect-error boolean filter removes falsy peer IDs + this.connectionManager.getDialQueue() + .map(queue => queue.peerId) + .filter(Boolean)); + // sort peers on whether we know protocols or public keys for them + const peers = await this.peerStore.all({ + filters: [ + // remove some peers + (peer) => { + // remove peers without addresses + if (peer.addresses.length === 0) { + this.log.trace('not autodialing %p because they have no addresses', peer.id); + return false; + } + // remove peers we are already connected to + if (connections.has(peer.id)) { + this.log.trace('not autodialing %p because they are already connected', peer.id); + return false; + } + // remove peers we are already dialling + if (dialQueue.has(peer.id)) { + this.log.trace('not autodialing %p because they are already being dialed', peer.id); + return false; + } + // remove peers already in the autodial queue + if (this.queue.has(peer.id)) { + this.log.trace('not autodialing %p because they are already being autodialed', peer.id); + return false; + } + return true; + } + ] + }); + // shuffle the peers - this is so peers with the same tag values will be + // dialled in a different order each time + const shuffledPeers = peers.sort(() => Math.random() > 0.5 ? 1 : -1); + // sort shuffled peers by tag value + const peerValues = new PeerMap(); + for (const peer of shuffledPeers) { + if (peerValues.has(peer.id)) { + continue; + } + // sum all tag values + peerValues.set(peer.id, [...peer.tags.values()].reduce((acc, curr) => { + return acc + curr.value; + }, 0)); + } + // sort by value, highest to lowest + const sortedPeers = shuffledPeers.sort((a, b) => { + const peerAValue = peerValues.get(a.id) ?? 0; + const peerBValue = peerValues.get(b.id) ?? 0; + if (peerAValue > peerBValue) { + return -1; + } + if (peerAValue < peerBValue) { + return 1; + } + return 0; + }); + const peersThatHaveNotFailed = sortedPeers.filter(peer => { + const lastDialFailure = peer.metadata.get(LAST_DIAL_FAILURE_KEY); + if (lastDialFailure == null) { + return true; + } + const lastDialFailureTimestamp = parseInt(toString$1(lastDialFailure)); + if (isNaN(lastDialFailureTimestamp)) { + return true; + } + // only dial if the time since the last failure is above the retry threshold + return Date.now() - lastDialFailureTimestamp > this.autoDialPeerRetryThresholdMs; + }); + this.log('selected %d/%d peers to dial', peersThatHaveNotFailed.length, peers.length); + for (const peer of peersThatHaveNotFailed) { + this.queue.add(async () => { + const numConnections = this.connectionManager.getConnectionsMap().size; + // Check to see if we still need to auto dial + if (numConnections >= this.minConnections) { + this.log('got enough connections now %d/%d', numConnections, this.minConnections); + this.queue.clear(); + return; + } + this.log('connecting to a peerStore stored peer %p', peer.id); + await this.connectionManager.openConnection(peer.id, { + priority: this.autoDialPriority + }); + }, { + peerId: peer.id + }).catch(err => { + this.log.error('could not connect to peerStore stored peer', err); + }); + } + this.running = false; + this.sheduleNextAutodial(); + } + sheduleNextAutodial() { + if (!this.started) { + return; + } + this.autoDialInterval = setTimeout(() => { + this.autoDial() + .catch(err => { + this.log.error('error while autodialing', err); + }); + }, this.autoDialIntervalMs); + } + } + + /** + * Close the passed stream, falling back to aborting the stream if closing + * cleanly fails. + */ + /** + * These are speculative protocols that are run automatically on connection open + * so are usually not the reason the connection was opened. + * + * Consequently when requested it should be safe to close connections that only + * have these protocol streams open. + */ + const DEFAULT_CLOSABLE_PROTOCOLS = [ + // identify + '/ipfs/id/1.0.0', + // identify-push + '/ipfs/id/push/1.0.0', + // autonat + '/libp2p/autonat/1.0.0', + // dcutr + '/libp2p/dcutr' + ]; + /** + * Close the passed connection if it has no streams, or only closable protocol + * streams, falling back to aborting the connection if closing it cleanly fails. + */ + async function safelyCloseConnectionIfUnused(connection, options) { + const streamProtocols = connection?.streams?.map(stream => stream.protocol) ?? []; + const closableProtocols = options?.closableProtocols ?? DEFAULT_CLOSABLE_PROTOCOLS; + // if the connection has protocols not in the closable protocols list, do not + // close the connection + if (streamProtocols.filter(proto => proto != null && !closableProtocols.includes(proto)).length > 0) { + return; + } + try { + await connection?.close(options); + } + catch (err) { + connection?.abort(err); + } + } + + const defaultOptions$2 = { + maxConnections: MAX_CONNECTIONS$1, + allow: [] + }; + /** + * If we go over the max connections limit, choose some connections to close + */ + class ConnectionPruner { + maxConnections; + connectionManager; + peerStore; + allow; + events; + log; + constructor(components, init = {}) { + this.maxConnections = init.maxConnections ?? defaultOptions$2.maxConnections; + this.allow = init.allow ?? defaultOptions$2.allow; + this.connectionManager = components.connectionManager; + this.peerStore = components.peerStore; + this.events = components.events; + this.log = components.logger.forComponent('libp2p:connection-manager:connection-pruner'); + // check the max connection limit whenever a peer connects + components.events.addEventListener('connection:open', () => { + this.maybePruneConnections() + .catch(err => { + this.log.error(err); + }); + }); + } + /** + * If we have more connections than our maximum, select some excess connections + * to prune based on peer value + */ + async maybePruneConnections() { + const connections = this.connectionManager.getConnections(); + const numConnections = connections.length; + this.log('checking max connections limit %d/%d', numConnections, this.maxConnections); + if (numConnections <= this.maxConnections) { + return; + } + const peerValues = new PeerMap(); + // work out peer values + for (const connection of connections) { + const remotePeer = connection.remotePeer; + if (peerValues.has(remotePeer)) { + continue; + } + peerValues.set(remotePeer, 0); + try { + const peer = await this.peerStore.get(remotePeer); + // sum all tag values + peerValues.set(remotePeer, [...peer.tags.values()].reduce((acc, curr) => { + return acc + curr.value; + }, 0)); + } + catch (err) { + if (err.code !== 'ERR_NOT_FOUND') { + this.log.error('error loading peer tags', err); + } + } + } + const sortedConnections = this.sortConnections(connections, peerValues); + // close some connections + const toPrune = Math.max(numConnections - this.maxConnections, 0); + const toClose = []; + for (const connection of sortedConnections) { + this.log('too many connections open - closing a connection to %p', connection.remotePeer); + // check allow list + const connectionInAllowList = this.allow.some((ma) => { + return connection.remoteAddr.toString().startsWith(ma.toString()); + }); + // Connections in the allow list should be excluded from pruning + if (!connectionInAllowList) { + toClose.push(connection); + } + if (toClose.length === toPrune) { + break; + } + } + // close connections + await Promise.all(toClose.map(async (connection) => { + await safelyCloseConnectionIfUnused(connection, { + signal: AbortSignal.timeout(1000) + }); + })); + // despatch prune event + this.events.safeDispatchEvent('connection:prune', { detail: toClose }); + } + sortConnections(connections, peerValues) { + return connections + // sort by connection age, newest to oldest + .sort((a, b) => { + const connectionALifespan = a.timeline.open; + const connectionBLifespan = b.timeline.open; + if (connectionALifespan < connectionBLifespan) { + return 1; + } + if (connectionALifespan > connectionBLifespan) { + return -1; + } + return 0; + }) + // sort by direction, incoming first then outgoing + .sort((a, b) => { + if (a.direction === 'outbound' && b.direction === 'inbound') { + return 1; + } + if (a.direction === 'inbound' && b.direction === 'outbound') { + return -1; + } + return 0; + }) + // sort by number of streams, lowest to highest + .sort((a, b) => { + if (a.streams.length > b.streams.length) { + return 1; + } + if (a.streams.length < b.streams.length) { + return -1; + } + return 0; + }) + // sort by tag value, lowest to highest + .sort((a, b) => { + const peerAValue = peerValues.get(a.remotePeer) ?? 0; + const peerBValue = peerValues.get(b.remotePeer) ?? 0; + if (peerAValue > peerBValue) { + return 1; + } + if (peerAValue < peerBValue) { + return -1; + } + return 0; + }); + } + } + + class PriorityQueue extends Queue { + constructor(init = {}) { + super({ + ...init, + sort: (a, b) => { + if (a.options.priority > b.options.priority) { + return -1; + } + if (a.options.priority < b.options.priority) { + return 1; + } + return 0; + } + }); + } + } + + /** + * Takes an array of AbortSignals and returns a single signal. + * If any signals are aborted, the returned signal will be aborted. + */ + function anySignal(signals) { + const controller = new globalThis.AbortController(); + function onAbort() { + controller.abort(); + for (const signal of signals) { + if (signal?.removeEventListener != null) { + signal.removeEventListener('abort', onAbort); + } + } + } + for (const signal of signals) { + if (signal?.aborted === true) { + onAbort(); + break; + } + if (signal?.addEventListener != null) { + signal.addEventListener('abort', onAbort); + } + } + function clear() { + for (const signal of signals) { + if (signal?.removeEventListener != null) { + signal.removeEventListener('abort', onAbort); + } + } + } + const signal = controller.signal; + signal.clear = clear; + return signal; + } + + /** + * Recursively resolve DNSADDR multiaddrs + */ + async function resolveMultiaddrs(ma, options) { + // check multiaddr resolvers + let resolvable = false; + for (const key of resolvers.keys()) { + resolvable = ma.protoNames().includes(key); + if (resolvable) { + break; + } + } + // return multiaddr if it is not resolvable + if (!resolvable) { + return [ma]; + } + const output = await ma.resolve(options); + options.log('resolved %s to', ma, output.map(ma => ma.toString())); + return output; + } + + /* eslint-disable max-depth */ + const defaultOptions$1 = { + addressSorter: defaultAddressSort, + maxParallelDials: MAX_PARALLEL_DIALS, + maxDialQueueLength: MAX_DIAL_QUEUE_LENGTH, + maxPeerAddrsToDial: MAX_PEER_ADDRS_TO_DIAL, + dialTimeout: DIAL_TIMEOUT, + resolvers: { + dnsaddr: dnsaddrResolver + } + }; + class DialQueue { + queue; + components; + addressSorter; + maxPeerAddrsToDial; + maxDialQueueLength; + dialTimeout; + shutDownController; + connections; + log; + constructor(components, init = {}) { + this.addressSorter = init.addressSorter ?? defaultOptions$1.addressSorter; + this.maxPeerAddrsToDial = init.maxPeerAddrsToDial ?? defaultOptions$1.maxPeerAddrsToDial; + this.maxDialQueueLength = init.maxDialQueueLength ?? defaultOptions$1.maxDialQueueLength; + this.dialTimeout = init.dialTimeout ?? defaultOptions$1.dialTimeout; + this.connections = init.connections ?? new PeerMap(); + this.log = components.logger.forComponent('libp2p:connection-manager:dial-queue'); + this.components = components; + this.shutDownController = new AbortController(); + setMaxListeners(Infinity, this.shutDownController.signal); + for (const [key, value] of Object.entries(init.resolvers ?? {})) { + resolvers.set(key, value); + } + // controls dial concurrency + this.queue = new PriorityQueue({ + concurrency: init.maxParallelDials ?? defaultOptions$1.maxParallelDials, + metricName: 'libp2p_dial_queue', + metrics: components.metrics + }); + // a started job errored + this.queue.addEventListener('error', (event) => { + this.log.error('error in dial queue', event.detail); + }); + } + start() { + this.shutDownController = new AbortController(); + setMaxListeners(Infinity, this.shutDownController.signal); + } + /** + * Clears any pending dials + */ + stop() { + this.shutDownController.abort(); + this.queue.abort(); + } + /** + * Connects to a given peer, multiaddr or list of multiaddrs. + * + * If a peer is passed, all known multiaddrs will be tried. If a multiaddr or + * multiaddrs are passed only those will be dialled. + * + * Where a list of multiaddrs is passed, if any contain a peer id then all + * multiaddrs in the list must contain the same peer id. + * + * The dial to the first address that is successfully able to upgrade a + * connection will be used, all other dials will be aborted when that happens. + */ + async dial(peerIdOrMultiaddr, options = {}) { + const { peerId, multiaddrs } = getPeerAddress(peerIdOrMultiaddr); + // make sure we don't have an existing connection to any of the addresses we + // are about to dial + const existingConnection = Array.from(this.connections.values()).flat().find(conn => { + if (options.force === true) { + return false; + } + if (conn.remotePeer.equals(peerId)) { + return true; + } + return multiaddrs.find(addr => { + return addr.equals(conn.remoteAddr); + }); + }); + if (existingConnection != null) { + this.log('already connected to %a', existingConnection.remoteAddr); + return existingConnection; + } + // ready to dial, all async work finished - make sure we don't have any + // pending dials in progress for this peer or set of multiaddrs + const existingDial = this.queue.queue.find(job => { + if (peerId?.equals(job.options.peerId) === true) { + return true; + } + // does the dial contain any of the target multiaddrs? + const addresses = job.options.multiaddrs; + if (addresses == null) { + return false; + } + for (const multiaddr of multiaddrs) { + if (addresses.has(multiaddr.toString())) { + return true; + } + } + return false; + }); + if (existingDial != null) { + this.log('joining existing dial target for %p', peerId); + // add all multiaddrs to the dial target + for (const multiaddr of multiaddrs) { + existingDial.options.multiaddrs.add(multiaddr.toString()); + } + return existingDial.join(options); + } + if (this.queue.size >= this.maxDialQueueLength) { + throw new CodeError$2('Dial queue is full', 'ERR_DIAL_QUEUE_FULL'); + } + this.log('creating dial target for %p', peerId, multiaddrs.map(ma => ma.toString())); + return this.queue.add(async (options) => { + // create abort conditions - need to do this before `calculateMultiaddrs` as + // we may be about to resolve a dns addr which can time out + const signal = this.createDialAbortController(options?.signal); + let addrsToDial; + try { + // load addresses from address book, resolve and dnsaddrs, filter + // undiallables, add peer IDs, etc + addrsToDial = await this.calculateMultiaddrs(peerId, options?.multiaddrs, { + ...options, + signal + }); + addrsToDial.map(({ multiaddr }) => multiaddr.toString()).forEach(addr => { + options?.multiaddrs.add(addr); + }); + } + catch (err) { + signal.clear(); + throw err; + } + try { + let dialed = 0; + const errors = []; + for (const address of addrsToDial) { + if (dialed === this.maxPeerAddrsToDial) { + this.log('dialed maxPeerAddrsToDial (%d) addresses for %p, not trying any others', dialed, peerId); + throw new CodeError$2('Peer had more than maxPeerAddrsToDial', codes.ERR_TOO_MANY_ADDRESSES); + } + dialed++; + try { + const conn = await this.components.transportManager.dial(address.multiaddr, { + ...options, + signal + }); + this.log('dial to %a succeeded', address.multiaddr); + return conn; + } + catch (err) { + this.log.error('dial failed to %a', address.multiaddr, err); + if (peerId != null) { + // record the failed dial + try { + await this.components.peerStore.patch(peerId, { + metadata: { + [LAST_DIAL_FAILURE_KEY]: fromString(Date.now().toString()) + } + }); + } + catch (err) { + this.log.error('could not update last dial failure key for %p', peerId, err); + } + } + // the user/dial timeout/shutdown controller signal aborted + if (signal.aborted) { + throw new CodeError$2(err.message, ERR_TIMEOUT); + } + errors.push(err); + } + } + if (errors.length === 1) { + throw errors[0]; + } + throw new AggregateCodeError(errors, 'All multiaddr dials failed', codes.ERR_TRANSPORT_DIAL_FAILED); + } + finally { + // clean up abort signals/controllers + signal.clear(); + } + }, { + peerId, + priority: options.priority ?? DEFAULT_DIAL_PRIORITY, + multiaddrs: new Set(multiaddrs.map(ma => ma.toString())), + signal: options.signal + }); + } + createDialAbortController(userSignal) { + // let any signal abort the dial + const signal = anySignal([ + AbortSignal.timeout(this.dialTimeout), + this.shutDownController.signal, + userSignal + ]); + // This emitter gets listened to a lot + setMaxListeners(Infinity, signal); + return signal; + } + // eslint-disable-next-line complexity + async calculateMultiaddrs(peerId, multiaddrs = new Set(), options = {}) { + const addrs = [...multiaddrs].map(ma => ({ + multiaddr: multiaddr(ma), + isCertified: false + })); + // if a peer id or multiaddr(s) with a peer id, make sure it isn't our peer id and that we are allowed to dial it + if (peerId != null) { + if (this.components.peerId.equals(peerId)) { + throw new CodeError$2('Tried to dial self', codes.ERR_DIALED_SELF); + } + if ((await this.components.connectionGater.denyDialPeer?.(peerId)) === true) { + throw new CodeError$2('The dial request is blocked by gater.allowDialPeer', codes.ERR_PEER_DIAL_INTERCEPTED); + } + // if just a peer id was passed, load available multiaddrs for this peer + // from the peer store + if (addrs.length === 0) { + this.log('loading multiaddrs for %p', peerId); + try { + const peer = await this.components.peerStore.get(peerId); + addrs.push(...peer.addresses); + this.log('loaded multiaddrs for %p', peerId, addrs.map(({ multiaddr }) => multiaddr.toString())); + } + catch (err) { + if (err.code !== codes.ERR_NOT_FOUND) { + throw err; + } + } + } + // if we still don't have any addresses for this peer, try a lookup + // using the peer routing + if (addrs.length === 0) { + this.log('looking up multiaddrs for %p in the peer routing', peerId); + try { + const peerInfo = await this.components.peerRouting.findPeer(peerId); + this.log('found multiaddrs for %p in the peer routing', peerId, addrs.map(({ multiaddr }) => multiaddr.toString())); + addrs.push(...peerInfo.multiaddrs.map(multiaddr => ({ + multiaddr, + isCertified: false + }))); + } + catch (err) { + if (err.code !== codes.ERR_NO_ROUTERS_AVAILABLE) { + this.log.error('looking up multiaddrs for %p in the peer routing failed', peerId, err); + } + } + } + } + // resolve addresses - this can result in a one-to-many translation when + // dnsaddrs are resolved + let resolvedAddresses = (await Promise.all(addrs.map(async (addr) => { + const result = await resolveMultiaddrs(addr.multiaddr, { + dns: this.components.dns, + ...options, + log: this.log + }); + if (result.length === 1 && result[0].equals(addr.multiaddr)) { + return addr; + } + return result.map(multiaddr => ({ + multiaddr, + isCertified: false + })); + }))) + .flat(); + // ensure the peer id is appended to the multiaddr + if (peerId != null) { + const peerIdMultiaddr = `/p2p/${peerId.toString()}`; + resolvedAddresses = resolvedAddresses.map(addr => { + const lastProto = addr.multiaddr.protos().pop(); + // do not append peer id to path multiaddrs + if (lastProto?.path === true) { + return addr; + } + // append peer id to multiaddr if it is not already present + if (addr.multiaddr.getPeerId() == null) { + return { + multiaddr: addr.multiaddr.encapsulate(peerIdMultiaddr), + isCertified: addr.isCertified + }; + } + return addr; + }); + } + const filteredAddrs = resolvedAddresses.filter(addr => { + // filter out any multiaddrs that we do not have transports for + if (this.components.transportManager.dialTransportForMultiaddr(addr.multiaddr) == null) { + return false; + } + // if the resolved multiaddr has a PeerID but it's the wrong one, ignore it + // - this can happen with addresses like bootstrap.libp2p.io that resolve + // to multiple different peers + const addrPeerId = addr.multiaddr.getPeerId(); + if (peerId != null && addrPeerId != null) { + return peerId.equals(addrPeerId); + } + return true; + }); + // deduplicate addresses + const dedupedAddrs = new Map(); + for (const addr of filteredAddrs) { + const maStr = addr.multiaddr.toString(); + const existing = dedupedAddrs.get(maStr); + if (existing != null) { + existing.isCertified = existing.isCertified || addr.isCertified || false; + continue; + } + dedupedAddrs.set(maStr, addr); + } + const dedupedMultiaddrs = [...dedupedAddrs.values()]; + // make sure we actually have some addresses to dial + if (dedupedMultiaddrs.length === 0) { + throw new CodeError$2('The dial request has no valid addresses', codes.ERR_NO_VALID_ADDRESSES); + } + const gatedAdrs = []; + for (const addr of dedupedMultiaddrs) { + if (this.components.connectionGater.denyDialMultiaddr != null && await this.components.connectionGater.denyDialMultiaddr(addr.multiaddr)) { + continue; + } + gatedAdrs.push(addr); + } + const sortedGatedAddrs = gatedAdrs.sort(this.addressSorter); + // make sure we actually have some addresses to dial + if (sortedGatedAddrs.length === 0) { + throw new CodeError$2('The connection gater denied all addresses in the dial request', codes.ERR_NO_VALID_ADDRESSES); + } + this.log.trace('addresses for %p before filtering', peerId ?? 'unknown peer', resolvedAddresses.map(({ multiaddr }) => multiaddr.toString())); + this.log.trace('addresses for %p after filtering', peerId ?? 'unknown peer', sortedGatedAddrs.map(({ multiaddr }) => multiaddr.toString())); + return sortedGatedAddrs; + } + async isDialable(multiaddr, options = {}) { + if (!Array.isArray(multiaddr)) { + multiaddr = [multiaddr]; + } + try { + const addresses = await this.calculateMultiaddrs(undefined, new Set(multiaddr.map(ma => ma.toString())), options); + if (options.runOnTransientConnection === false) { + // return true if any resolved multiaddrs are not relay addresses + return addresses.find(addr => { + return !Circuit$1.matches(addr.multiaddr); + }) != null; + } + return true; + } + catch (err) { + this.log.trace('error calculating if multiaddr(s) were dialable', err); + } + return false; + } + } + + const DEFAULT_DIAL_PRIORITY = 50; + const defaultOptions = { + minConnections: MIN_CONNECTIONS, + maxConnections: MAX_CONNECTIONS$1, + inboundConnectionThreshold: INBOUND_CONNECTION_THRESHOLD, + maxIncomingPendingConnections: MAX_INCOMING_PENDING_CONNECTIONS, + autoDialConcurrency: AUTO_DIAL_CONCURRENCY, + autoDialPriority: AUTO_DIAL_PRIORITY, + autoDialMaxQueueLength: AUTO_DIAL_MAX_QUEUE_LENGTH, + autoDialPeerRetryThreshold: AUTO_DIAL_PEER_RETRY_THRESHOLD, + autoDialDiscoveredPeersDebounce: AUTO_DIAL_DISCOVERED_PEERS_DEBOUNCE + }; + /** + * Responsible for managing known connections. + */ + class DefaultConnectionManager { + started; + connections; + allow; + deny; + maxIncomingPendingConnections; + incomingPendingConnections; + maxConnections; + dialQueue; + autoDial; + connectionPruner; + inboundConnectionRateLimiter; + peerStore; + metrics; + events; + log; + constructor(components, init = {}) { + this.maxConnections = init.maxConnections ?? defaultOptions.maxConnections; + const minConnections = init.minConnections ?? defaultOptions.minConnections; + if (this.maxConnections < minConnections) { + throw new CodeError$2('Connection Manager maxConnections must be greater than minConnections', codes.ERR_INVALID_PARAMETERS); + } + /** + * Map of connections per peer + */ + this.connections = new PeerMap(); + this.started = false; + this.peerStore = components.peerStore; + this.metrics = components.metrics; + this.events = components.events; + this.log = components.logger.forComponent('libp2p:connection-manager'); + this.onConnect = this.onConnect.bind(this); + this.onDisconnect = this.onDisconnect.bind(this); + this.events.addEventListener('connection:open', this.onConnect); + this.events.addEventListener('connection:close', this.onDisconnect); + // allow/deny lists + this.allow = (init.allow ?? []).map(ma => multiaddr(ma)); + this.deny = (init.deny ?? []).map(ma => multiaddr(ma)); + this.incomingPendingConnections = 0; + this.maxIncomingPendingConnections = init.maxIncomingPendingConnections ?? defaultOptions.maxIncomingPendingConnections; + // controls individual peers trying to dial us too quickly + this.inboundConnectionRateLimiter = new RateLimiter({ + points: init.inboundConnectionThreshold ?? defaultOptions.inboundConnectionThreshold, + duration: 1 + }); + // controls what happens when we don't have enough connections + this.autoDial = new AutoDial({ + connectionManager: this, + peerStore: components.peerStore, + events: components.events, + logger: components.logger + }, { + minConnections, + autoDialConcurrency: init.autoDialConcurrency ?? defaultOptions.autoDialConcurrency, + autoDialPriority: init.autoDialPriority ?? defaultOptions.autoDialPriority, + autoDialPeerRetryThreshold: init.autoDialPeerRetryThreshold ?? defaultOptions.autoDialPeerRetryThreshold, + autoDialDiscoveredPeersDebounce: init.autoDialDiscoveredPeersDebounce ?? defaultOptions.autoDialDiscoveredPeersDebounce, + maxQueueLength: init.autoDialMaxQueueLength ?? defaultOptions.autoDialMaxQueueLength + }); + // controls what happens when we have too many connections + this.connectionPruner = new ConnectionPruner({ + connectionManager: this, + peerStore: components.peerStore, + events: components.events, + logger: components.logger + }, { + maxConnections: this.maxConnections, + allow: this.allow + }); + this.dialQueue = new DialQueue(components, { + addressSorter: init.addressSorter ?? defaultAddressSort, + maxParallelDials: init.maxParallelDials ?? MAX_PARALLEL_DIALS, + maxDialQueueLength: init.maxDialQueueLength ?? MAX_DIAL_QUEUE_LENGTH, + maxPeerAddrsToDial: init.maxPeerAddrsToDial ?? MAX_PEER_ADDRS_TO_DIAL, + dialTimeout: init.dialTimeout ?? DIAL_TIMEOUT, + resolvers: init.resolvers ?? { + dnsaddr: dnsaddrResolver + }, + connections: this.connections + }); + } + isStarted() { + return this.started; + } + /** + * Starts the Connection Manager. If Metrics are not enabled on libp2p + * only event loop and connection limits will be monitored. + */ + async start() { + // track inbound/outbound connections + this.metrics?.registerMetricGroup('libp2p_connection_manager_connections', { + calculate: () => { + const metric = { + inbound: 0, + outbound: 0 + }; + for (const conns of this.connections.values()) { + for (const conn of conns) { + if (conn.direction === 'inbound') { + metric.inbound++; + } + else { + metric.outbound++; + } + } + } + return metric; + } + }); + // track total number of streams per protocol + this.metrics?.registerMetricGroup('libp2p_protocol_streams_total', { + label: 'protocol', + calculate: () => { + const metric = {}; + for (const conns of this.connections.values()) { + for (const conn of conns) { + for (const stream of conn.streams) { + const key = `${stream.direction} ${stream.protocol ?? 'unnegotiated'}`; + metric[key] = (metric[key] ?? 0) + 1; + } + } + } + return metric; + } + }); + // track 90th percentile of streams per protocol + this.metrics?.registerMetricGroup('libp2p_connection_manager_protocol_streams_per_connection_90th_percentile', { + label: 'protocol', + calculate: () => { + const allStreams = {}; + for (const conns of this.connections.values()) { + for (const conn of conns) { + const streams = {}; + for (const stream of conn.streams) { + const key = `${stream.direction} ${stream.protocol ?? 'unnegotiated'}`; + streams[key] = (streams[key] ?? 0) + 1; + } + for (const [protocol, count] of Object.entries(streams)) { + allStreams[protocol] = allStreams[protocol] ?? []; + allStreams[protocol].push(count); + } + } + } + const metric = {}; + for (let [protocol, counts] of Object.entries(allStreams)) { + counts = counts.sort((a, b) => a - b); + const index = Math.floor(counts.length * 0.9); + metric[protocol] = counts[index]; + } + return metric; + } + }); + this.dialQueue.start(); + this.autoDial.start(); + this.started = true; + this.log('started'); + } + async afterStart() { + // re-connect to any peers with the KEEP_ALIVE tag + void Promise.resolve() + .then(async () => { + const keepAlivePeers = await this.peerStore.all({ + filters: [(peer) => { + return peer.tags.has(KEEP_ALIVE); + }] + }); + await Promise.all(keepAlivePeers.map(async (peer) => { + await this.openConnection(peer.id) + .catch(err => { + this.log.error(err); + }); + })); + }) + .catch(err => { + this.log.error(err); + }); + this.autoDial.afterStart(); + } + /** + * Stops the Connection Manager + */ + async stop() { + this.dialQueue.stop(); + this.autoDial.stop(); + // Close all connections we're tracking + const tasks = []; + for (const connectionList of this.connections.values()) { + for (const connection of connectionList) { + tasks.push((async () => { + try { + await connection.close(); + } + catch (err) { + this.log.error(err); + } + })()); + } + } + this.log('closing %d connections', tasks.length); + await Promise.all(tasks); + this.connections.clear(); + this.log('stopped'); + } + onConnect(evt) { + void this._onConnect(evt).catch(err => { + this.log.error(err); + }); + } + /** + * Tracks the incoming connection and check the connection limit + */ + async _onConnect(evt) { + const { detail: connection } = evt; + if (!this.started) { + // This can happen when we are in the process of shutting down the node + await connection.close(); + return; + } + const peerId = connection.remotePeer; + const storedConns = this.connections.get(peerId); + let isNewPeer = false; + if (storedConns != null) { + storedConns.push(connection); + } + else { + isNewPeer = true; + this.connections.set(peerId, [connection]); + } + // only need to store RSA public keys, all other types are embedded in the peer id + if (peerId.publicKey != null && peerId.type === 'RSA') { + await this.peerStore.patch(peerId, { + publicKey: peerId.publicKey + }); + } + if (isNewPeer) { + this.events.safeDispatchEvent('peer:connect', { detail: connection.remotePeer }); + } + } + /** + * Removes the connection from tracking + */ + onDisconnect(evt) { + const { detail: connection } = evt; + if (!this.started) { + // This can happen when we are in the process of shutting down the node + return; + } + const peerId = connection.remotePeer; + let storedConn = this.connections.get(peerId); + if (storedConn != null && storedConn.length > 1) { + storedConn = storedConn.filter((conn) => conn.id !== connection.id); + this.connections.set(peerId, storedConn); + } + else if (storedConn != null) { + this.connections.delete(peerId); + this.events.safeDispatchEvent('peer:disconnect', { detail: connection.remotePeer }); + } + } + getConnections(peerId) { + if (peerId != null) { + return this.connections.get(peerId) ?? []; + } + let conns = []; + for (const c of this.connections.values()) { + conns = conns.concat(c); + } + return conns; + } + getConnectionsMap() { + return this.connections; + } + async openConnection(peerIdOrMultiaddr, options = {}) { + if (!this.isStarted()) { + throw new CodeError$2('Not started', codes.ERR_NODE_NOT_STARTED); + } + options.signal?.throwIfAborted(); + const { peerId } = getPeerAddress(peerIdOrMultiaddr); + if (peerId != null && options.force !== true) { + this.log('dial %p', peerId); + const existingConnection = this.getConnections(peerId) + .find(conn => !conn.transient); + if (existingConnection != null) { + this.log('had an existing non-transient connection to %p', peerId); + return existingConnection; + } + } + const connection = await this.dialQueue.dial(peerIdOrMultiaddr, { + ...options, + priority: options.priority ?? DEFAULT_DIAL_PRIORITY + }); + let peerConnections = this.connections.get(connection.remotePeer); + if (peerConnections == null) { + peerConnections = []; + this.connections.set(connection.remotePeer, peerConnections); + } + // we get notified of connections via the Upgrader emitting "connection" + // events, double check we aren't already tracking this connection before + // storing it + let trackedConnection = false; + for (const conn of peerConnections) { + if (conn.id === connection.id) { + trackedConnection = true; + } + } + if (!trackedConnection) { + peerConnections.push(connection); + } + return connection; + } + async closeConnections(peerId, options = {}) { + const connections = this.connections.get(peerId) ?? []; + await Promise.all(connections.map(async (connection) => { + try { + await connection.close(options); + } + catch (err) { + connection.abort(err); + } + })); + } + async acceptIncomingConnection(maConn) { + // check deny list + const denyConnection = this.deny.some(ma => { + return maConn.remoteAddr.toString().startsWith(ma.toString()); + }); + if (denyConnection) { + this.log('connection from %a refused - connection remote address was in deny list', maConn.remoteAddr); + return false; + } + // check allow list + const allowConnection = this.allow.some(ma => { + return maConn.remoteAddr.toString().startsWith(ma.toString()); + }); + if (allowConnection) { + this.incomingPendingConnections++; + return true; + } + // check pending connections + if (this.incomingPendingConnections === this.maxIncomingPendingConnections) { + this.log('connection from %a refused - incomingPendingConnections exceeded by host', maConn.remoteAddr); + return false; + } + if (maConn.remoteAddr.isThinWaistAddress()) { + const host = maConn.remoteAddr.nodeAddress().address; + try { + await this.inboundConnectionRateLimiter.consume(host, 1); + } + catch { + this.log('connection from %a refused - inboundConnectionThreshold exceeded by host %s', maConn.remoteAddr, host); + return false; + } + } + if (this.getConnections().length < this.maxConnections) { + this.incomingPendingConnections++; + return true; + } + this.log('connection from %a refused - maxConnections exceeded', maConn.remoteAddr); + return false; + } + afterUpgradeInbound() { + this.incomingPendingConnections--; + } + getDialQueue() { + const statusMap = { + queued: 'queued', + running: 'active', + errored: 'error', + complete: 'success' + }; + return this.dialQueue.queue.queue.map(job => { + return { + id: job.id, + status: statusMap[job.status], + peerId: job.options.peerId, + multiaddrs: [...job.options.multiaddrs].map(ma => multiaddr(ma)) + }; + }); + } + async isDialable(multiaddr, options = {}) { + return this.dialQueue.isDialable(multiaddr, options); + } + } + + /** + * @packageDocumentation + * + * Merge several (async)iterables into one, yield values as they arrive. + * + * Nb. sources are iterated over in parallel so the order of emitted items is not guaranteed. + * + * @example + * + * ```javascript + * import merge from 'it-merge' + * import all from 'it-all' + * + * // This can also be an iterator, generator, etc + * const values1 = [0, 1, 2, 3, 4] + * const values2 = [5, 6, 7, 8, 9] + * + * const arr = all(merge(values1, values2)) + * + * console.info(arr) // 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 + * ``` + * + * Async sources must be awaited: + * + * ```javascript + * import merge from 'it-merge' + * import all from 'it-all' + * + * // This can also be an iterator, async iterator, generator, etc + * const values1 = async function * () { + * yield * [0, 1, 2, 3, 4] + * } + * const values2 = async function * () { + * yield * [5, 6, 7, 8, 9] + * } + * + * const arr = await all(merge(values1(), values2())) + * + * console.info(arr) // 0, 1, 5, 6, 2, 3, 4, 7, 8, 9 <- nb. order is not guaranteed + * ``` + */ + function isAsyncIterable$5(thing) { + return thing[Symbol.asyncIterator] != null; + } + function merge(...sources) { + const syncSources = []; + for (const source of sources) { + if (!isAsyncIterable$5(source)) { + syncSources.push(source); + } + } + if (syncSources.length === sources.length) { + // all sources are synchronous + return (function* () { + for (const source of syncSources) { + yield* source; + } + })(); + } + return (async function* () { + const output = pushable$1({ + objectMode: true + }); + void Promise.resolve().then(async () => { + try { + await Promise.all(sources.map(async (source) => { + for await (const item of source) { + output.push(item); + } + })); + output.end(); + } + catch (err) { + output.end(err); + } + }); + yield* output; + })(); + } + + class CompoundContentRouting { + routers; + started; + components; + constructor(components, init) { + this.routers = init.routers ?? []; + this.started = false; + this.components = components; + } + isStarted() { + return this.started; + } + async start() { + this.started = true; + } + async stop() { + this.started = false; + } + /** + * Iterates over all content routers in parallel to find providers of the given key + */ + async *findProviders(key, options = {}) { + if (this.routers.length === 0) { + throw new CodeError$2('No content routers available', codes.ERR_NO_ROUTERS_AVAILABLE); + } + const self = this; + const seen = new PeerSet(); + for await (const peer of merge(...self.routers.map(router => router.findProviders(key, options)))) { + // the peer was yielded by a content router without multiaddrs and we + // failed to load them + if (peer == null) { + continue; + } + // store the addresses for the peer if found + if (peer.multiaddrs.length > 0) { + await this.components.peerStore.merge(peer.id, { + multiaddrs: peer.multiaddrs + }); + } + // deduplicate peers + if (seen.has(peer.id)) { + continue; + } + seen.add(peer.id); + yield peer; + } + } + /** + * Iterates over all content routers in parallel to notify it is + * a provider of the given key + */ + async provide(key, options = {}) { + if (this.routers.length === 0) { + throw new CodeError$2('No content routers available', codes.ERR_NO_ROUTERS_AVAILABLE); + } + await Promise.all(this.routers.map(async (router) => { + await router.provide(key, options); + })); + } + /** + * Store the given key/value pair in the available content routings + */ + async put(key, value, options) { + if (!this.isStarted()) { + throw new CodeError$2(messages.NOT_STARTED_YET, codes.ERR_NODE_NOT_STARTED); + } + await Promise.all(this.routers.map(async (router) => { + await router.put(key, value, options); + })); + } + /** + * Get the value to the given key. + * Times out after 1 minute by default. + */ + async get(key, options) { + if (!this.isStarted()) { + throw new CodeError$2(messages.NOT_STARTED_YET, codes.ERR_NODE_NOT_STARTED); + } + return Promise.any(this.routers.map(async (router) => { + return router.get(key, options); + })); + } + } + + /** + * @packageDocumentation + * + * Takes an (async) iterable that emits promise-returning functions, invokes them in parallel up to the concurrency limit and emits the results as they become available, optionally in the same order as the input + * + * @example + * + * ```javascript + * import parallel from 'it-parallel' + * import all from 'it-all' + * import delay from 'delay' + * + * // This can also be an iterator, async iterator, generator, etc + * const input = [ + * async () => { + * console.info('start 1') + * await delay(500) + * + * console.info('end 1') + * return 1 + * }, + * async () => { + * console.info('start 2') + * await delay(200) + * + * console.info('end 2') + * return 2 + * }, + * async () => { + * console.info('start 3') + * await delay(100) + * + * console.info('end 3') + * return 3 + * } + * ] + * + * const result = await all(parallel(input, { + * concurrency: 2 + * })) + * + * // output: + * // start 1 + * // start 2 + * // end 2 + * // start 3 + * // end 3 + * // end 1 + * + * console.info(result) // [2, 3, 1] + * ``` + * + * If order is important, pass `ordered: true` as an option: + * + * ```javascript + * const result = await all(parallel(input, { + * concurrency: 2, + * ordered: true + * })) + * + * // output: + * // start 1 + * // start 2 + * // end 2 + * // start 3 + * // end 3 + * // end 1 + * + * console.info(result) // [1, 2, 3] + * ``` + */ + const CustomEvent = globalThis.CustomEvent ?? Event; + /** + * Takes an (async) iterator that emits promise-returning functions, + * invokes them in parallel and emits the results as they become available but + * in the same order as the input + */ + async function* parallel(source, options = {}) { + let concurrency = options.concurrency ?? Infinity; + if (concurrency < 1) { + concurrency = Infinity; + } + const ordered = options.ordered == null ? false : options.ordered; + const emitter = new EventTarget(); + const ops = []; + let slotAvailable = pDefer(); + let resultAvailable = pDefer(); + let sourceFinished = false; + let sourceErr; + let opErred = false; + emitter.addEventListener('task-complete', () => { + resultAvailable.resolve(); + }); + void Promise.resolve().then(async () => { + try { + for await (const task of source) { + if (ops.length === concurrency) { + slotAvailable = pDefer(); + await slotAvailable.promise; + } + if (opErred) { + break; + } + const op = { + done: false + }; + ops.push(op); + task() + .then(result => { + op.done = true; + op.ok = true; + op.value = result; + emitter.dispatchEvent(new CustomEvent('task-complete')); + }, err => { + op.done = true; + op.err = err; + emitter.dispatchEvent(new CustomEvent('task-complete')); + }); + } + sourceFinished = true; + emitter.dispatchEvent(new CustomEvent('task-complete')); + } + catch (err) { + sourceErr = err; + emitter.dispatchEvent(new CustomEvent('task-complete')); + } + }); + function valuesAvailable() { + if (ordered) { + return ops[0]?.done; + } + return Boolean(ops.find(op => op.done)); + } + function* yieldOrderedValues() { + while ((ops.length > 0) && ops[0].done) { + const op = ops[0]; + ops.shift(); + if (op.ok) { + yield op.value; + } + else { + // allow the source to exit + opErred = true; + slotAvailable.resolve(); + throw op.err; + } + slotAvailable.resolve(); + } + } + function* yieldUnOrderedValues() { + // more values can become available while we wait for `yield` + // to return control to this function + while (valuesAvailable()) { + for (let i = 0; i < ops.length; i++) { + if (ops[i].done) { + const op = ops[i]; + ops.splice(i, 1); + i--; + if (op.ok) { + yield op.value; + } + else { + opErred = true; + slotAvailable.resolve(); + throw op.err; + } + slotAvailable.resolve(); + } + } + } + } + while (true) { + if (!valuesAvailable()) { + resultAvailable = pDefer(); + await resultAvailable.promise; + } + if (sourceErr != null) { + // the source threw an error, propagate it + throw sourceErr; + } + if (ordered) { + yield* yieldOrderedValues(); + } + else { + yield* yieldUnOrderedValues(); + } + if (sourceFinished && ops.length === 0) { + // not waiting for any results and no more tasks so we are done + break; + } + } + } + + class DefaultPeerRouting { + log; + peerId; + peerStore; + routers; + constructor(components, init = {}) { + this.log = components.logger.forComponent('libp2p:peer-routing'); + this.peerId = components.peerId; + this.peerStore = components.peerStore; + this.routers = init.routers ?? []; + } + /** + * Iterates over all peer routers in parallel to find the given peer + */ + async findPeer(id, options) { + if (this.routers.length === 0) { + throw new CodeError$2('No peer routers available', codes.ERR_NO_ROUTERS_AVAILABLE); + } + if (id.toString() === this.peerId.toString()) { + throw new CodeError$2('Should not try to find self', codes.ERR_FIND_SELF); + } + const self = this; + const source = merge(...this.routers.map(router => (async function* () { + try { + yield await router.findPeer(id, options); + } + catch (err) { + self.log.error(err); + } + })())); + for await (const peer of source) { + if (peer == null) { + continue; + } + // store the addresses for the peer if found + if (peer.multiaddrs.length > 0) { + await this.peerStore.merge(peer.id, { + multiaddrs: peer.multiaddrs + }); + } + return peer; + } + throw new CodeError$2(messages.NOT_FOUND, codes.ERR_NOT_FOUND); + } + /** + * Attempt to find the closest peers on the network to the given key + */ + async *getClosestPeers(key, options = {}) { + if (this.routers.length === 0) { + throw new CodeError$2('No peer routers available', codes.ERR_NO_ROUTERS_AVAILABLE); + } + const self = this; + const seen = new PeerSet(); + for await (const peer of parallel(async function* () { + const source = merge(...self.routers.map(router => router.getClosestPeers(key, options))); + for await (let peer of source) { + yield async () => { + // find multiaddrs if they are missing + if (peer.multiaddrs.length === 0) { + try { + peer = await self.findPeer(peer.id, { + ...options, + useCache: false + }); + } + catch (err) { + self.log.error('could not find peer multiaddrs', err); + return; + } + } + return peer; + }; + } + }())) { + if (peer == null) { + continue; + } + // store the addresses for the peer if found + if (peer.multiaddrs.length > 0) { + await this.peerStore.merge(peer.id, { + multiaddrs: peer.multiaddrs + }); + } + // deduplicate peers + if (seen.has(peer.id)) { + continue; + } + seen.add(peer.id); + yield peer; + } + } + } + + class RandomWalk extends TypedEventEmitter { + peerRouting; + log; + walking; + walkers; + shutdownController; + walkController; + needNext; + constructor(components) { + super(); + this.log = components.logger.forComponent('libp2p:random-walk'); + this.peerRouting = components.peerRouting; + this.walkers = 0; + this.walking = false; + // stops any in-progress walks when the node is shut down + this.shutdownController = new AbortController(); + setMaxListeners(Infinity, this.shutdownController.signal); + } + start() { + this.shutdownController = new AbortController(); + setMaxListeners(Infinity, this.shutdownController.signal); + } + stop() { + this.shutdownController.abort(); + } + async *walk(options) { + if (!this.walking) { + // start the query that causes walk:peer events to be emitted + this.startWalk(); + } + this.walkers++; + const signal = anySignal([this.shutdownController.signal, options?.signal]); + setMaxListeners(Infinity, signal); + try { + while (true) { + // if another consumer has paused the query, start it again + this.needNext?.resolve(); + this.needNext = pDefer(); + // wait for a walk:peer or walk:error event + const event = await raceEvent(this, 'walk:peer', signal, { + errorEvent: 'walk:error' + }); + yield event.detail; + } + } + finally { + signal.clear(); + this.walkers--; + // stop the walk if no more consumers are interested + if (this.walkers === 0) { + this.walkController?.abort(); + this.walkController = undefined; + } + } + } + startWalk() { + this.walking = true; + // the signal for this controller will be aborted if no more random peers + // are required + this.walkController = new AbortController(); + setMaxListeners(Infinity, this.walkController.signal); + const signal = anySignal([this.walkController.signal, this.shutdownController.signal]); + setMaxListeners(Infinity, signal); + const start = Date.now(); + let found = 0; + Promise.resolve().then(async () => { + this.log('start walk'); + // find peers until no more consumers are interested + while (this.walkers > 0) { + try { + for await (const peer of this.peerRouting.getClosestPeers(randomBytes(32), { signal })) { + signal.throwIfAborted(); + this.log('found peer %p', peer.id); + found++; + this.safeDispatchEvent('walk:peer', { + detail: peer + }); + // if we only have one consumer, pause the query until they request + // another random peer or they signal they are no longer interested + if (this.walkers === 1 && this.needNext != null) { + await raceSignal(this.needNext.promise, signal); + } + } + } + catch (err) { + this.log.error('randomwalk errored', err); + this.safeDispatchEvent('walk:error', { + detail: err + }); + } + } + }) + .catch(err => { + this.log.error('randomwalk errored', err); + }) + .finally(() => { + this.log('finished walk, found %d peers after %dms', found, Date.now() - start); + this.walking = false; + }); + } + } + + const DEFAULT_MAX_INBOUND_STREAMS$1 = 32; + const DEFAULT_MAX_OUTBOUND_STREAMS$1 = 64; + /** + * Responsible for notifying registered protocols of events in the network. + */ + class DefaultRegistrar { + log; + topologies; + handlers; + components; + constructor(components) { + this.log = components.logger.forComponent('libp2p:registrar'); + this.topologies = new Map(); + this.handlers = new Map(); + this.components = components; + this._onDisconnect = this._onDisconnect.bind(this); + this._onPeerUpdate = this._onPeerUpdate.bind(this); + this._onPeerIdentify = this._onPeerIdentify.bind(this); + this.components.events.addEventListener('peer:disconnect', this._onDisconnect); + this.components.events.addEventListener('peer:update', this._onPeerUpdate); + this.components.events.addEventListener('peer:identify', this._onPeerIdentify); + } + getProtocols() { + return Array.from(new Set([ + ...this.handlers.keys() + ])).sort(); + } + getHandler(protocol) { + const handler = this.handlers.get(protocol); + if (handler == null) { + throw new CodeError$2(`No handler registered for protocol ${protocol}`, codes.ERR_NO_HANDLER_FOR_PROTOCOL); + } + return handler; + } + getTopologies(protocol) { + const topologies = this.topologies.get(protocol); + if (topologies == null) { + return []; + } + return [ + ...topologies.values() + ]; + } + /** + * Registers the `handler` for each protocol + */ + async handle(protocol, handler, opts) { + if (this.handlers.has(protocol)) { + throw new CodeError$2(`Handler already registered for protocol ${protocol}`, codes.ERR_PROTOCOL_HANDLER_ALREADY_REGISTERED); + } + const options = mergeOptions$1.bind({ ignoreUndefined: true })({ + maxInboundStreams: DEFAULT_MAX_INBOUND_STREAMS$1, + maxOutboundStreams: DEFAULT_MAX_OUTBOUND_STREAMS$1 + }, opts); + this.handlers.set(protocol, { + handler, + options + }); + // Add new protocol to self protocols in the peer store + await this.components.peerStore.merge(this.components.peerId, { + protocols: [protocol] + }); + } + /** + * Removes the handler for each protocol. The protocol + * will no longer be supported on streams. + */ + async unhandle(protocols) { + const protocolList = Array.isArray(protocols) ? protocols : [protocols]; + protocolList.forEach(protocol => { + this.handlers.delete(protocol); + }); + // Update self protocols in the peer store + await this.components.peerStore.patch(this.components.peerId, { + protocols: this.getProtocols() + }); + } + /** + * Register handlers for a set of multicodecs given + */ + async register(protocol, topology) { + if (topology == null) { + throw new CodeError$2('invalid topology', codes.ERR_INVALID_PARAMETERS); + } + // Create topology + const id = `${(Math.random() * 1e9).toString(36)}${Date.now()}`; + let topologies = this.topologies.get(protocol); + if (topologies == null) { + topologies = new Map(); + this.topologies.set(protocol, topologies); + } + topologies.set(id, topology); + return id; + } + /** + * Unregister topology + */ + unregister(id) { + for (const [protocol, topologies] of this.topologies.entries()) { + if (topologies.has(id)) { + topologies.delete(id); + if (topologies.size === 0) { + this.topologies.delete(protocol); + } + } + } + } + /** + * Remove a disconnected peer from the record + */ + _onDisconnect(evt) { + const remotePeer = evt.detail; + void this.components.peerStore.get(remotePeer) + .then(peer => { + for (const protocol of peer.protocols) { + const topologies = this.topologies.get(protocol); + if (topologies == null) { + // no topologies are interested in this protocol + continue; + } + for (const topology of topologies.values()) { + if (topology.filter?.has(remotePeer) === false) { + continue; + } + topology.filter?.remove(remotePeer); + topology.onDisconnect?.(remotePeer); + } + } + }) + .catch(err => { + if (err.code === codes.ERR_NOT_FOUND) { + // peer has not completed identify so they are not in the peer store + return; + } + this.log.error('could not inform topologies of disconnecting peer %p', remotePeer, err); + }); + } + /** + * When a peer is updated, if they have removed supported protocols notify any + * topologies interested in the removed protocols. + */ + _onPeerUpdate(evt) { + const { peer, previous } = evt.detail; + const removed = (previous?.protocols ?? []).filter(protocol => !peer.protocols.includes(protocol)); + for (const protocol of removed) { + const topologies = this.topologies.get(protocol); + if (topologies == null) { + // no topologies are interested in this protocol + continue; + } + for (const topology of topologies.values()) { + if (topology.filter?.has(peer.id) === false) { + continue; + } + topology.filter?.remove(peer.id); + topology.onDisconnect?.(peer.id); + } + } + } + /** + * After identify has completed and we have received the list of supported + * protocols, notify any topologies interested in those protocols. + */ + _onPeerIdentify(evt) { + const protocols = evt.detail.protocols; + const connection = evt.detail.connection; + const peerId = evt.detail.peerId; + for (const protocol of protocols) { + const topologies = this.topologies.get(protocol); + if (topologies == null) { + // no topologies are interested in this protocol + continue; + } + for (const topology of topologies.values()) { + if (connection.transient && topology.notifyOnTransient !== true) { + continue; + } + if (topology.filter?.has(peerId) === true) { + continue; + } + topology.filter?.add(peerId); + topology.onConnect?.(peerId, connection); + } + } + } + } + + class TrackedMap extends Map { + metric; + constructor(init) { + super(); + const { name, metrics } = init; + this.metric = metrics.registerMetric(name); + this.updateComponentMetric(); + } + set(key, value) { + super.set(key, value); + this.updateComponentMetric(); + return this; + } + delete(key) { + const deleted = super.delete(key); + this.updateComponentMetric(); + return deleted; + } + clear() { + super.clear(); + this.updateComponentMetric(); + } + updateComponentMetric() { + this.metric.update(this.size); + } + } + function trackedMap(config) { + const { name, metrics } = config; + let map; + if (metrics != null) { + map = new TrackedMap({ name, metrics }); + } + else { + map = new Map(); + } + return map; + } + + class DefaultTransportManager { + log; + components; + transports; + listeners; + faultTolerance; + started; + constructor(components, init = {}) { + this.log = components.logger.forComponent('libp2p:transports'); + this.components = components; + this.started = false; + this.transports = new Map(); + this.listeners = trackedMap({ + name: 'libp2p_transport_manager_listeners', + metrics: this.components.metrics + }); + this.faultTolerance = init.faultTolerance ?? FaultTolerance.FATAL_ALL; + } + /** + * Adds a `Transport` to the manager + */ + add(transport) { + const tag = transport[Symbol.toStringTag]; + if (tag == null) { + throw new CodeError$2('Transport must have a valid tag', codes.ERR_INVALID_KEY); + } + if (this.transports.has(tag)) { + throw new CodeError$2(`There is already a transport with the tag ${tag}`, codes.ERR_DUPLICATE_TRANSPORT); + } + this.log('adding transport %s', tag); + this.transports.set(tag, transport); + if (!this.listeners.has(tag)) { + this.listeners.set(tag, []); + } + } + isStarted() { + return this.started; + } + start() { + this.started = true; + } + async afterStart() { + // Listen on the provided transports for the provided addresses + const addrs = this.components.addressManager.getListenAddrs(); + await this.listen(addrs); + } + /** + * Stops all listeners + */ + async stop() { + const tasks = []; + for (const [key, listeners] of this.listeners) { + this.log('closing listeners for %s', key); + while (listeners.length > 0) { + const listener = listeners.pop(); + if (listener == null) { + continue; + } + tasks.push(listener.close()); + } + } + await Promise.all(tasks); + this.log('all listeners closed'); + for (const key of this.listeners.keys()) { + this.listeners.set(key, []); + } + this.started = false; + } + /** + * Dials the given Multiaddr over it's supported transport + */ + async dial(ma, options) { + const transport = this.dialTransportForMultiaddr(ma); + if (transport == null) { + throw new CodeError$2(`No transport available for address ${String(ma)}`, codes.ERR_TRANSPORT_UNAVAILABLE); + } + try { + return await transport.dial(ma, { + ...options, + upgrader: this.components.upgrader + }); + } + catch (err) { + if (err.code == null) { + err.code = codes.ERR_TRANSPORT_DIAL_FAILED; + } + throw err; + } + } + /** + * Returns all Multiaddr's the listeners are using + */ + getAddrs() { + let addrs = []; + for (const listeners of this.listeners.values()) { + for (const listener of listeners) { + addrs = [...addrs, ...listener.getAddrs()]; + } + } + return addrs; + } + /** + * Returns all the transports instances + */ + getTransports() { + return Array.of(...this.transports.values()); + } + /** + * Returns all the listener instances + */ + getListeners() { + return Array.of(...this.listeners.values()).flat(); + } + /** + * Finds a transport that matches the given Multiaddr + */ + dialTransportForMultiaddr(ma) { + for (const transport of this.transports.values()) { + const addrs = transport.dialFilter([ma]); + if (addrs.length > 0) { + return transport; + } + } + } + /** + * Finds a transport that matches the given Multiaddr + */ + listenTransportForMultiaddr(ma) { + for (const transport of this.transports.values()) { + const addrs = transport.listenFilter([ma]); + if (addrs.length > 0) { + return transport; + } + } + } + /** + * Starts listeners for each listen Multiaddr + */ + async listen(addrs) { + if (!this.isStarted()) { + throw new CodeError$2('Not started', codes.ERR_NODE_NOT_STARTED); + } + if (addrs == null || addrs.length === 0) { + this.log('no addresses were provided for listening, this node is dial only'); + return; + } + const couldNotListen = []; + for (const [key, transport] of this.transports.entries()) { + const supportedAddrs = transport.listenFilter(addrs); + const tasks = []; + // For each supported multiaddr, create a listener + for (const addr of supportedAddrs) { + this.log('creating listener for %s on %a', key, addr); + const listener = transport.createListener({ + upgrader: this.components.upgrader + }); + let listeners = this.listeners.get(key) ?? []; + if (listeners == null) { + listeners = []; + this.listeners.set(key, listeners); + } + listeners.push(listener); + // Track listen/close events + listener.addEventListener('listening', () => { + this.components.events.safeDispatchEvent('transport:listening', { + detail: listener + }); + }); + listener.addEventListener('close', () => { + const index = listeners.findIndex(l => l === listener); + // remove the listener + listeners.splice(index, 1); + this.components.events.safeDispatchEvent('transport:close', { + detail: listener + }); + }); + // We need to attempt to listen on everything + tasks.push(listener.listen(addr)); + } + // Keep track of transports we had no addresses for + if (tasks.length === 0) { + couldNotListen.push(key); + continue; + } + const results = await Promise.allSettled(tasks); + // If we are listening on at least 1 address, succeed. + // TODO: we should look at adding a retry (`p-retry`) here to better support + // listening on remote addresses as they may be offline. We could then potentially + // just wait for any (`p-any`) listener to succeed on each transport before returning + const isListening = results.find(r => r.status === 'fulfilled'); + if ((isListening == null) && this.faultTolerance !== FaultTolerance.NO_FATAL) { + throw new CodeError$2(`Transport (${key}) could not listen on any available address`, codes.ERR_NO_VALID_ADDRESSES); + } + } + // If no transports were able to listen, throw an error. This likely + // means we were given addresses we do not have transports for + if (couldNotListen.length === this.transports.size) { + const message = `no valid addresses were provided for transports [${couldNotListen.join(', ')}]`; + if (this.faultTolerance === FaultTolerance.FATAL_ALL) { + throw new CodeError$2(message, codes.ERR_NO_VALID_ADDRESSES); + } + this.log(`libp2p in dial mode only: ${message}`); + } + } + /** + * Removes the given transport from the manager. + * If a transport has any running listeners, they will be closed. + */ + async remove(key) { + const listeners = this.listeners.get(key) ?? []; + this.log.trace('removing transport %s', key); + // Close any running listeners + const tasks = []; + this.log.trace('closing listeners for %s', key); + while (listeners.length > 0) { + const listener = listeners.pop(); + if (listener == null) { + continue; + } + tasks.push(listener.close()); + } + await Promise.all(tasks); + this.transports.delete(key); + this.listeners.delete(key); + } + /** + * Removes all transports from the manager. + * If any listeners are running, they will be closed. + * + * @async + */ + async removeAll() { + const tasks = []; + for (const key of this.transports.keys()) { + tasks.push(this.remove(key)); + } + await Promise.all(tasks); + } + } + + const PROTOCOL_ID = '/multistream/1.0.0'; + // Conforming to go-libp2p + // See https://github.com/multiformats/go-multistream/blob/master/multistream.go#L297 + const MAX_PROTOCOL_LENGTH = 1024; + + class QueuelessPushable { + readNext; + haveNext; + ended; + nextResult; + constructor() { + this.ended = false; + this.readNext = pDefer(); + this.haveNext = pDefer(); + } + [Symbol.asyncIterator]() { + return this; + } + async next() { + if (this.nextResult == null) { + // wait for the supplier to push a value + await this.haveNext.promise; + } + if (this.nextResult == null) { + throw new Error('HaveNext promise resolved but nextResult was undefined'); + } + const nextResult = this.nextResult; + this.nextResult = undefined; + // signal to the supplier that we read the value + this.readNext.resolve(); + this.readNext = pDefer(); + return nextResult; + } + async throw(err) { + this.ended = true; + if (err != null) { + this.haveNext.reject(err); + } + const result = { + done: true, + value: undefined + }; + return result; + } + async return() { + const result = { + done: true, + value: undefined + }; + await this._push(undefined); + return result; + } + async push(value, options) { + await this._push(value, options); + } + async end(err, options) { + if (err != null) { + await this.throw(err); + } + else { + // abortable return + await this._push(undefined, options); + } + } + async _push(value, options) { + if (value != null && this.ended) { + throw new Error('Cannot push value onto an ended pushable'); + } + // already have a value, wait for it to be read + if (this.nextResult != null) { + await this.readNext.promise; + if (this.nextResult != null) { + throw new Error('NeedNext promise resolved but nextResult was not consumed'); + } + } + if (value != null) { + this.nextResult = { done: false, value }; + } + else { + this.ended = true; + this.nextResult = { done: true, value: undefined }; + } + // let the consumer know we have a new value + this.haveNext.resolve(); + this.haveNext = pDefer(); + // wait for the consumer to have finished processing the value and requested + // the next one or for the passed signal to abort the waiting + await raceSignal(this.readNext.promise, options?.signal, options); + } + } + function pushable() { + return new QueuelessPushable(); + } + + /** + * @packageDocumentation + * + * This module makes it easy to send and receive bytes over streams. + * + * @example + * + * ```typescript + * import { byteStream } from 'it-byte-stream' + * + * const stream = byteStream(duplex) + * + * // read the next chunk + * const bytes = await stream.read() + * + * // read the next five bytes + * const fiveBytes = await stream.read(5) + * + * // write bytes into the stream + * await stream.write(Uint8Array.from([0, 1, 2, 3, 4])) + * ``` + */ + let CodeError$1 = class CodeError extends Error { + code; + constructor(message, code) { + super(message); + this.code = code; + } + }; + class AbortError extends CodeError$1 { + type; + constructor(message) { + super(message, 'ABORT_ERR'); + this.type = 'aborted'; + } + } + function byteStream(duplex, opts) { + const write = pushable(); + duplex.sink(write).catch(async (err) => { + await write.end(err); + }); + duplex.sink = async (source) => { + for await (const buf of source) { + await write.push(buf); + } + await write.end(); + }; + let source = duplex.source; + if (duplex.source[Symbol.iterator] != null) { + source = duplex.source[Symbol.iterator](); + } + else if (duplex.source[Symbol.asyncIterator] != null) { + source = duplex.source[Symbol.asyncIterator](); + } + const readBuffer = new Uint8ArrayList(); + const W = { + read: async (bytes, options) => { + options?.signal?.throwIfAborted(); + let listener; + const abortPromise = new Promise((resolve, reject) => { + listener = () => { + reject(new AbortError('Read aborted')); + }; + options?.signal?.addEventListener('abort', listener); + }); + try { + if (bytes == null) { + // just read whatever arrives + const { done, value } = await Promise.race([ + source.next(), + abortPromise + ]); + if (done === true) { + return new Uint8ArrayList(); + } + return value; + } + while (readBuffer.byteLength < bytes) { + const { value, done } = await Promise.race([ + source.next(), + abortPromise + ]); + if (done === true) { + throw new CodeError$1('unexpected end of input', 'ERR_UNEXPECTED_EOF'); + } + readBuffer.append(value); + } + const buf = readBuffer.sublist(0, bytes); + readBuffer.consume(bytes); + return buf; + } + finally { + if (listener != null) { + options?.signal?.removeEventListener('abort', listener); + } + } + }, + write: async (data, options) => { + options?.signal?.throwIfAborted(); + // just write + if (data instanceof Uint8Array) { + await write.push(data, options); + } + else { + await write.push(data.subarray(), options); + } + }, + unwrap: () => { + if (readBuffer.byteLength > 0) { + const originalStream = duplex.source; + duplex.source = (async function* () { + if (opts?.yieldBytes === false) { + yield readBuffer; + } + else { + yield* readBuffer; + } + yield* originalStream; + }()); + } + return duplex; + } + }; + return W; + } + + /** + * @packageDocumentation + * + * This module makes it easy to send and receive length-prefixed byte arrays over streams. + * + * @example + * + * ```typescript + * import { lpStream } from 'it-length-prefixed-stream' + * + * const stream = lpStream(duplex) + * + * // read the next length-prefixed chunk + * const bytes = await stream.read() + * + * // write a length-prefixed chunk + * await stream.write(Uint8Array.from([0, 1, 2, 3, 4])) + * + * // write several chunks, all individually length-prefixed + * await stream.writeV([ + * Uint8Array.from([0, 1, 2, 3, 4]), + * Uint8Array.from([5, 6, 7, 8, 9]) + * ]) + * ``` + */ + class CodeError extends Error { + code; + constructor(message, code) { + super(message); + this.code = code; + } + } + function lpStream(duplex, opts = {}) { + const bytes = byteStream(duplex, opts); + if (opts.maxDataLength != null && opts.maxLengthLength == null) { + // if max data length is set but max length length is not, calculate the + // max length length needed to encode max data length + opts.maxLengthLength = encodingLength$1(opts.maxDataLength); + } + const decodeLength = opts?.lengthDecoder ?? decode$6; + const encodeLength = opts?.lengthEncoder ?? encode$5; + const W = { + read: async (options) => { + let dataLength = -1; + const lengthBuffer = new Uint8ArrayList(); + while (true) { + // read one byte at a time until we can decode a varint + lengthBuffer.append(await bytes.read(1, options)); + try { + dataLength = decodeLength(lengthBuffer); + } + catch (err) { + if (err instanceof RangeError) { + continue; + } + throw err; + } + if (opts?.maxLengthLength != null && lengthBuffer.byteLength > opts.maxLengthLength) { + throw new CodeError('message length length too long', 'ERR_MSG_LENGTH_TOO_LONG'); + } + if (dataLength > -1) { + break; + } + } + if (opts?.maxDataLength != null && dataLength > opts.maxDataLength) { + throw new CodeError('message length too long', 'ERR_MSG_DATA_TOO_LONG'); + } + return bytes.read(dataLength, options); + }, + write: async (data, options) => { + // encode, write + await bytes.write(new Uint8ArrayList(encodeLength(data.byteLength), data), options); + }, + writeV: async (data, options) => { + const list = new Uint8ArrayList(...data.flatMap(buf => ([encodeLength(buf.byteLength), buf]))); + // encode, write + await bytes.write(list, options); + }, + unwrap: () => { + return bytes.unwrap(); + } + }; + return W; + } + + const NewLine = fromString('\n'); + /** + * `write` encodes and writes a single buffer + */ + async function write(writer, buffer, options) { + await writer.write(buffer, options); + } + /** + * `writeAll` behaves like `write`, except it encodes an array of items as a single write + */ + async function writeAll(writer, buffers, options) { + await writer.writeV(buffers, options); + } + /** + * Read a length-prefixed buffer from the passed stream, stripping the final newline character + */ + async function read(reader, options) { + const buf = await reader.read(options); + if (buf.byteLength === 0 || buf.get(buf.byteLength - 1) !== NewLine[0]) { + options.log.error('Invalid mss message - missing newline', buf); + throw new CodeError$2('missing newline', 'ERR_INVALID_MULTISTREAM_SELECT_MESSAGE'); + } + return buf.sublist(0, -1); // Remove newline + } + /** + * Read a length-prefixed string from the passed stream, stripping the final newline character + */ + async function readString(reader, options) { + const buf = await read(reader, options); + return toString$1(buf.subarray()); + } + + /** + * Negotiate a protocol to use from a list of protocols. + * + * @param stream - A duplex iterable stream to dial on + * @param protocols - A list of protocols (or single protocol) to negotiate with. Protocols are attempted in order until a match is made. + * @param options - An options object containing an AbortSignal and an optional boolean `writeBytes` - if this is true, `Uint8Array`s will be written into `duplex`, otherwise `Uint8ArrayList`s will + * @returns A stream for the selected protocol and the protocol that was selected from the list of protocols provided to `select`. + * @example + * + * ```TypeScript + * import { pipe } from 'it-pipe' + * import * as mss from '@libp2p/multistream-select' + * import { Mplex } from '@libp2p/mplex' + * + * const muxer = new Mplex() + * const muxedStream = muxer.newStream() + * + * // mss.select(protocol(s)) + * // Select from one of the passed protocols (in priority order) + * // Returns selected stream and protocol + * const { stream: dhtStream, protocol } = await mss.select(muxedStream, [ + * // This might just be different versions of DHT, but could be different impls + * '/ipfs-dht/2.0.0', // Most of the time this will probably just be one item. + * '/ipfs-dht/1.0.0' + * ]) + * + * // Typically this stream will be passed back to the caller of libp2p.dialProtocol + * // + * // ...it might then do something like this: + * // try { + * // await pipe( + * // [uint8ArrayFromString('Some DHT data')] + * // dhtStream, + * // async source => { + * // for await (const chunk of source) + * // // DHT response data + * // } + * // ) + * // } catch (err) { + * // // Error in stream + * // } + * ``` + */ + async function select(stream, protocols, options) { + protocols = Array.isArray(protocols) ? [...protocols] : [protocols]; + if (protocols.length === 1 && options.negotiateFully === false) { + return optimisticSelect(stream, protocols[0], options); + } + const lp = lpStream(stream, { + ...options, + maxDataLength: MAX_PROTOCOL_LENGTH + }); + const protocol = protocols.shift(); + if (protocol == null) { + throw new Error('At least one protocol must be specified'); + } + options.log.trace('select: write ["%s", "%s"]', PROTOCOL_ID, protocol); + const p1 = fromString(`${PROTOCOL_ID}\n`); + const p2 = fromString(`${protocol}\n`); + await writeAll(lp, [p1, p2], options); + options.log.trace('select: reading multistream-select header'); + let response = await readString(lp, options); + options.log.trace('select: read "%s"', response); + // Read the protocol response if we got the protocolId in return + if (response === PROTOCOL_ID) { + options.log.trace('select: reading protocol response'); + response = await readString(lp, options); + options.log.trace('select: read "%s"', response); + } + // We're done + if (response === protocol) { + return { stream: lp.unwrap(), protocol }; + } + // We haven't gotten a valid ack, try the other protocols + for (const protocol of protocols) { + options.log.trace('select: write "%s"', protocol); + await write(lp, fromString(`${protocol}\n`), options); + options.log.trace('select: reading protocol response'); + const response = await readString(lp, options); + options.log.trace('select: read "%s" for "%s"', response, protocol); + if (response === protocol) { + return { stream: lp.unwrap(), protocol }; + } + } + throw new CodeError$2('protocol selection failed', 'ERR_UNSUPPORTED_PROTOCOL'); + } + /** + * Optimistically negotiates a protocol. + * + * It *does not* block writes waiting for the other end to respond. Instead, it + * simply assumes the negotiation went successfully and starts writing data. + * + * Use when it is known that the receiver supports the desired protocol. + */ + function optimisticSelect(stream, protocol, options) { + const originalSink = stream.sink.bind(stream); + const originalSource = stream.source; + let negotiated = false; + let negotiating = false; + const doneNegotiating = pDefer(); + let sentProtocol = false; + let sendingProtocol = false; + const doneSendingProtocol = pDefer(); + let readProtocol = false; + let readingProtocol = false; + const doneReadingProtocol = pDefer(); + const lp = lpStream({ + sink: originalSink, + source: originalSource + }, { + ...options, + maxDataLength: MAX_PROTOCOL_LENGTH + }); + stream.sink = async (source) => { + const { sink } = lp.unwrap(); + await sink(async function* () { + let sentData = false; + for await (const buf of source) { + // started reading before the source yielded, wait for protocol send + if (sendingProtocol) { + await doneSendingProtocol.promise; + } + // writing before reading, send the protocol and the first chunk of data + if (!sentProtocol) { + sendingProtocol = true; + options.log.trace('optimistic: write ["%s", "%s", data(%d)] in sink', PROTOCOL_ID, protocol, buf.byteLength); + const protocolString = `${protocol}\n`; + // send protocols in first chunk of data written to transport + yield new Uint8ArrayList(Uint8Array.from([19]), // length of PROTOCOL_ID plus newline + fromString(`${PROTOCOL_ID}\n`), encode$5(protocolString.length), fromString(protocolString), buf).subarray(); + options.log.trace('optimistic: wrote ["%s", "%s", data(%d)] in sink', PROTOCOL_ID, protocol, buf.byteLength); + sentProtocol = true; + sendingProtocol = false; + doneSendingProtocol.resolve(); + // read the negotiation response but don't block more sending + negotiate() + .catch(err => { + options.log.error('could not finish optimistic protocol negotiation of %s', protocol, err); + }); + } + else { + yield buf; + } + sentData = true; + } + // special case - the source passed to the sink has ended but we didn't + // negotiated the protocol yet so do it now + if (!sentData) { + await negotiate(); + } + }()); + }; + async function negotiate() { + if (negotiating) { + options.log.trace('optimistic: already negotiating %s stream', protocol); + await doneNegotiating.promise; + return; + } + negotiating = true; + try { + // we haven't sent the protocol yet, send it now + if (!sentProtocol) { + options.log.trace('optimistic: doing send protocol for %s stream', protocol); + await doSendProtocol(); + } + // if we haven't read the protocol response yet, do it now + if (!readProtocol) { + options.log.trace('optimistic: doing read protocol for %s stream', protocol); + await doReadProtocol(); + } + } + finally { + negotiating = false; + negotiated = true; + doneNegotiating.resolve(); + } + } + async function doSendProtocol() { + if (sendingProtocol) { + await doneSendingProtocol.promise; + return; + } + sendingProtocol = true; + try { + options.log.trace('optimistic: write ["%s", "%s", data] in source', PROTOCOL_ID, protocol); + await lp.writeV([ + fromString(`${PROTOCOL_ID}\n`), + fromString(`${protocol}\n`) + ]); + options.log.trace('optimistic: wrote ["%s", "%s", data] in source', PROTOCOL_ID, protocol); + } + finally { + sentProtocol = true; + sendingProtocol = false; + doneSendingProtocol.resolve(); + } + } + async function doReadProtocol() { + if (readingProtocol) { + await doneReadingProtocol.promise; + return; + } + readingProtocol = true; + try { + options.log.trace('optimistic: reading multistream select header'); + let response = await readString(lp, options); + options.log.trace('optimistic: read multistream select header "%s"', response); + if (response === PROTOCOL_ID) { + response = await readString(lp, options); + } + options.log.trace('optimistic: read protocol "%s", expecting "%s"', response, protocol); + if (response !== protocol) { + throw new CodeError$2('protocol selection failed', 'ERR_UNSUPPORTED_PROTOCOL'); + } + } + finally { + readProtocol = true; + readingProtocol = false; + doneReadingProtocol.resolve(); + } + } + stream.source = (async function* () { + // make sure we've done protocol negotiation before we read stream data + await negotiate(); + options.log.trace('optimistic: reading data from "%s" stream', protocol); + yield* lp.unwrap().source; + })(); + if (stream.closeRead != null) { + const originalCloseRead = stream.closeRead.bind(stream); + stream.closeRead = async (opts) => { + // we need to read & write to negotiate the protocol so ensure we've done + // this before closing the readable end of the stream + if (!negotiated) { + await negotiate().catch(err => { + options.log.error('could not negotiate protocol before close read', err); + }); + } + // protocol has been negotiated, ok to close the readable end + await originalCloseRead(opts); + }; + } + if (stream.closeWrite != null) { + const originalCloseWrite = stream.closeWrite.bind(stream); + stream.closeWrite = async (opts) => { + // we need to read & write to negotiate the protocol so ensure we've done + // this before closing the writable end of the stream + if (!negotiated) { + await negotiate().catch(err => { + options.log.error('could not negotiate protocol before close write', err); + }); + } + // protocol has been negotiated, ok to close the writable end + await originalCloseWrite(opts); + }; + } + if (stream.close != null) { + const originalClose = stream.close.bind(stream); + stream.close = async (opts) => { + // if we are in the process of negotiation, let it finish before closing + // because we may have unsent early data + const tasks = []; + if (sendingProtocol) { + tasks.push(doneSendingProtocol.promise); + } + if (readingProtocol) { + tasks.push(doneReadingProtocol.promise); + } + if (tasks.length > 0) { + // let the in-flight protocol negotiation finish gracefully + await raceSignal(Promise.all(tasks), opts?.signal); + } + else { + // no protocol negotiation attempt has occurred so don't start one + negotiated = true; + negotiating = false; + doneNegotiating.resolve(); + } + // protocol has been negotiated, ok to close the writable end + await originalClose(opts); + }; + } + return { + stream, + protocol + }; + } + + function isAsyncIterable$4(thing) { + return thing[Symbol.asyncIterator] != null; + } + + const defaultEncoder = (length) => { + const lengthLength = encodingLength$1(length); + const lengthBuf = allocUnsafe(lengthLength); + encode$5(length, lengthBuf); + defaultEncoder.bytes = lengthLength; + return lengthBuf; + }; + defaultEncoder.bytes = 0; + function encode(source, options) { + options = options ?? {}; + const encodeLength = options.lengthEncoder ?? defaultEncoder; + function* maybeYield(chunk) { + // length + data + const length = encodeLength(chunk.byteLength); + // yield only Uint8Arrays + if (length instanceof Uint8Array) { + yield length; + } + else { + yield* length; + } + // yield only Uint8Arrays + if (chunk instanceof Uint8Array) { + yield chunk; + } + else { + yield* chunk; + } + } + if (isAsyncIterable$4(source)) { + return (async function* () { + for await (const chunk of source) { + yield* maybeYield(chunk); + } + })(); + } + return (function* () { + for (const chunk of source) { + yield* maybeYield(chunk); + } + })(); + } + encode.single = (chunk, options) => { + options = options ?? {}; + const encodeLength = options.lengthEncoder ?? defaultEncoder; + return new Uint8ArrayList(encodeLength(chunk.byteLength), chunk); + }; + + /* eslint max-depth: ["error", 6] */ + // Maximum length of the length section of the message + const MAX_LENGTH_LENGTH = 8; // Varint.encode(Number.MAX_SAFE_INTEGER).length + // Maximum length of the data section of the message + const MAX_DATA_LENGTH = 1024 * 1024 * 4; + var ReadMode; + (function (ReadMode) { + ReadMode[ReadMode["LENGTH"] = 0] = "LENGTH"; + ReadMode[ReadMode["DATA"] = 1] = "DATA"; + })(ReadMode || (ReadMode = {})); + const defaultDecoder = (buf) => { + const length = decode$6(buf); + defaultDecoder.bytes = encodingLength$1(length); + return length; + }; + defaultDecoder.bytes = 0; + function decode(source, options) { + const buffer = new Uint8ArrayList(); + let mode = ReadMode.LENGTH; + let dataLength = -1; + const lengthDecoder = options?.lengthDecoder ?? defaultDecoder; + const maxLengthLength = options?.maxLengthLength ?? MAX_LENGTH_LENGTH; + const maxDataLength = options?.maxDataLength ?? MAX_DATA_LENGTH; + function* maybeYield() { + while (buffer.byteLength > 0) { + if (mode === ReadMode.LENGTH) { + // read length, ignore errors for short reads + try { + dataLength = lengthDecoder(buffer); + if (dataLength < 0) { + throw errCode$1(new Error('invalid message length'), 'ERR_INVALID_MSG_LENGTH'); + } + if (dataLength > maxDataLength) { + throw errCode$1(new Error('message length too long'), 'ERR_MSG_DATA_TOO_LONG'); + } + const dataLengthLength = lengthDecoder.bytes; + buffer.consume(dataLengthLength); + if (options?.onLength != null) { + options.onLength(dataLength); + } + mode = ReadMode.DATA; + } + catch (err) { + if (err instanceof RangeError) { + if (buffer.byteLength > maxLengthLength) { + throw errCode$1(new Error('message length length too long'), 'ERR_MSG_LENGTH_TOO_LONG'); + } + break; + } + throw err; + } + } + if (mode === ReadMode.DATA) { + if (buffer.byteLength < dataLength) { + // not enough data, wait for more + break; + } + const data = buffer.sublist(0, dataLength); + buffer.consume(dataLength); + if (options?.onData != null) { + options.onData(data); + } + yield data; + mode = ReadMode.LENGTH; + } + } + } + if (isAsyncIterable$4(source)) { + return (async function* () { + for await (const buf of source) { + buffer.append(buf); + yield* maybeYield(); + } + if (buffer.byteLength > 0) { + throw errCode$1(new Error('unexpected end of input'), 'ERR_UNEXPECTED_EOF'); + } + })(); + } + return (function* () { + for (const buf of source) { + buffer.append(buf); + yield* maybeYield(); + } + if (buffer.byteLength > 0) { + throw errCode$1(new Error('unexpected end of input'), 'ERR_UNEXPECTED_EOF'); + } + })(); + } + decode.fromReader = (reader, options) => { + let byteLength = 1; // Read single byte chunks until the length is known + const varByteSource = (async function* () { + while (true) { + try { + const { done, value } = await reader.next(byteLength); + if (done === true) { + return; + } + if (value != null) { + yield value; + } + } + catch (err) { + if (err.code === 'ERR_UNDER_READ') { + return { done: true, value: null }; + } + throw err; + } + finally { + // Reset the byteLength so we continue to check for varints + byteLength = 1; + } + } + }()); + /** + * Once the length has been parsed, read chunk for that length + */ + const onLength = (l) => { byteLength = l; }; + return decode(varByteSource, { + ...(options ?? {}), + onLength + }); + }; + + /** + * Handle multistream protocol selections for the given list of protocols. + * + * Note that after a protocol is handled `listener` can no longer be used. + * + * @param stream - A duplex iterable stream to listen on + * @param protocols - A list of protocols (or single protocol) that this listener is able to speak. + * @param options - an options object containing an AbortSignal and an optional boolean `writeBytes` - if this is true, `Uint8Array`s will be written into `duplex`, otherwise `Uint8ArrayList`s will + * @returns A stream for the selected protocol and the protocol that was selected from the list of protocols provided to `select` + * @example + * + * ```TypeScript + * import { pipe } from 'it-pipe' + * import * as mss from '@libp2p/multistream-select' + * import { Mplex } from '@libp2p/mplex' + * + * const muxer = new Mplex({ + * async onStream (muxedStream) { + * // mss.handle(handledProtocols) + * // Returns selected stream and protocol + * const { stream, protocol } = await mss.handle(muxedStream, [ + * '/ipfs-dht/1.0.0', + * '/ipfs-bitswap/1.0.0' + * ]) + * + * // Typically here we'd call the handler function that was registered in + * // libp2p for the given protocol: + * // e.g. handlers[protocol].handler(stream) + * // + * // If protocol was /ipfs-dht/1.0.0 it might do something like this: + * // try { + * // await pipe( + * // dhtStream, + * // source => (async function * () { + * // for await (const chunk of source) + * // // Incoming DHT data -> process and yield to respond + * // })(), + * // dhtStream + * // ) + * // } catch (err) { + * // // Error in stream + * // } + * } + * }) + * ``` + */ + async function handle(stream, protocols, options) { + protocols = Array.isArray(protocols) ? protocols : [protocols]; + options.log.trace('handle: available protocols %s', protocols); + const lp = lpStream(stream, { + ...options, + maxDataLength: MAX_PROTOCOL_LENGTH, + maxLengthLength: 2 // 2 bytes is enough to length-prefix MAX_PROTOCOL_LENGTH + }); + while (true) { + options.log.trace('handle: reading incoming string'); + const protocol = await readString(lp, options); + options.log.trace('handle: read "%s"', protocol); + if (protocol === PROTOCOL_ID) { + options.log.trace('handle: respond with "%s" for "%s"', PROTOCOL_ID, protocol); + await write(lp, fromString(`${PROTOCOL_ID}\n`), options); + options.log.trace('handle: responded with "%s" for "%s"', PROTOCOL_ID, protocol); + continue; + } + if (protocols.includes(protocol)) { + options.log.trace('handle: respond with "%s" for "%s"', protocol, protocol); + await write(lp, fromString(`${protocol}\n`), options); + options.log.trace('handle: responded with "%s" for "%s"', protocol, protocol); + return { stream: lp.unwrap(), protocol }; + } + if (protocol === 'ls') { + // \n\n\n + const protos = new Uint8ArrayList(...protocols.map(p => encode.single(fromString(`${p}\n`))), fromString('\n')); + options.log.trace('handle: respond with "%s" for %s', protocols, protocol); + await write(lp, protos, options); + options.log.trace('handle: responded with "%s" for %s', protocols, protocol); + continue; + } + options.log('handle: respond with "na" for "%s"', protocol); + await write(lp, fromString('na\n'), options); + options.log('handle: responded with "na" for "%s"', protocol); + } + } + + const CLOSE_TIMEOUT$1 = 500; + /** + * An implementation of the js-libp2p connection. + * Any libp2p transport should use an upgrader to return this connection. + */ + class ConnectionImpl { + /** + * Connection identifier. + */ + id; + /** + * Observed multiaddr of the remote peer + */ + remoteAddr; + /** + * Remote peer id + */ + remotePeer; + direction; + timeline; + multiplexer; + encryption; + status; + transient; + log; + /** + * User provided tags + * + */ + tags; + /** + * Reference to the new stream function of the multiplexer + */ + _newStream; + /** + * Reference to the close function of the raw connection + */ + _close; + _abort; + /** + * Reference to the getStreams function of the muxer + */ + _getStreams; + /** + * An implementation of the js-libp2p connection. + * Any libp2p transport should use an upgrader to return this connection. + */ + constructor(init) { + const { remoteAddr, remotePeer, newStream, close, abort, getStreams } = init; + this.id = `${(parseInt(String(Math.random() * 1e9))).toString(36)}${Date.now()}`; + this.remoteAddr = remoteAddr; + this.remotePeer = remotePeer; + this.direction = init.direction; + this.status = 'open'; + this.timeline = init.timeline; + this.multiplexer = init.multiplexer; + this.encryption = init.encryption; + this.transient = init.transient ?? false; + this.log = init.logger.forComponent(`libp2p:connection:${this.direction}:${this.id}`); + if (this.remoteAddr.getPeerId() == null) { + this.remoteAddr = this.remoteAddr.encapsulate(`/p2p/${this.remotePeer}`); + } + this._newStream = newStream; + this._close = close; + this._abort = abort; + this._getStreams = getStreams; + this.tags = []; + } + [Symbol.toStringTag] = 'Connection'; + [connectionSymbol] = true; + /** + * Get all the streams of the muxer + */ + get streams() { + return this._getStreams(); + } + /** + * Create a new stream from this connection + */ + async newStream(protocols, options) { + if (this.status === 'closing') { + throw new CodeError$2('the connection is being closed', 'ERR_CONNECTION_BEING_CLOSED'); + } + if (this.status === 'closed') { + throw new CodeError$2('the connection is closed', 'ERR_CONNECTION_CLOSED'); + } + if (!Array.isArray(protocols)) { + protocols = [protocols]; + } + if (this.transient && options?.runOnTransientConnection !== true) { + throw new CodeError$2('Cannot open protocol stream on transient connection', 'ERR_TRANSIENT_CONNECTION'); + } + const stream = await this._newStream(protocols, options); + stream.direction = 'outbound'; + return stream; + } + /** + * Close the connection + */ + async close(options = {}) { + if (this.status === 'closed' || this.status === 'closing') { + return; + } + this.log('closing connection to %a', this.remoteAddr); + this.status = 'closing'; + if (options.signal == null) { + const signal = AbortSignal.timeout(CLOSE_TIMEOUT$1); + setMaxListeners(Infinity, signal); + options = { + ...options, + signal + }; + } + try { + this.log.trace('closing all streams'); + // close all streams gracefully - this can throw if we're not multiplexed + await Promise.all(this.streams.map(async (s) => s.close(options))); + this.log.trace('closing underlying transport'); + // close raw connection + await this._close(options); + this.log.trace('updating timeline with close time'); + this.status = 'closed'; + this.timeline.close = Date.now(); + } + catch (err) { + this.log.error('error encountered during graceful close of connection to %a', this.remoteAddr, err); + this.abort(err); + } + } + abort(err) { + this.log.error('aborting connection to %a due to error', this.remoteAddr, err); + this.status = 'closing'; + this.streams.forEach(s => { s.abort(err); }); + this.log.error('all streams aborted', this.streams.length); + // Abort raw connection + this._abort(err); + this.timeline.close = Date.now(); + this.status = 'closed'; + } + } + function createConnection(init) { + return new ConnectionImpl(init); + } + + const DEFAULT_PROTOCOL_SELECT_TIMEOUT = 30000; + function findIncomingStreamLimit(protocol, registrar) { + try { + const { options } = registrar.getHandler(protocol); + return options.maxInboundStreams; + } + catch (err) { + if (err.code !== codes.ERR_NO_HANDLER_FOR_PROTOCOL) { + throw err; + } + } + return DEFAULT_MAX_INBOUND_STREAMS$1; + } + function findOutgoingStreamLimit(protocol, registrar, options = {}) { + try { + const { options } = registrar.getHandler(protocol); + if (options.maxOutboundStreams != null) { + return options.maxOutboundStreams; + } + } + catch (err) { + if (err.code !== codes.ERR_NO_HANDLER_FOR_PROTOCOL) { + throw err; + } + } + return options.maxOutboundStreams ?? DEFAULT_MAX_OUTBOUND_STREAMS$1; + } + function countStreams(protocol, direction, connection) { + let streamCount = 0; + connection.streams.forEach(stream => { + if (stream.direction === direction && stream.protocol === protocol) { + streamCount++; + } + }); + return streamCount; + } + class DefaultUpgrader { + components; + connectionEncryption; + muxers; + inboundUpgradeTimeout; + events; + constructor(components, init) { + this.components = components; + this.connectionEncryption = new Map(); + init.connectionEncryption.forEach(encrypter => { + this.connectionEncryption.set(encrypter.protocol, encrypter); + }); + this.muxers = new Map(); + init.muxers.forEach(muxer => { + this.muxers.set(muxer.protocol, muxer); + }); + this.inboundUpgradeTimeout = init.inboundUpgradeTimeout ?? INBOUND_UPGRADE_TIMEOUT; + this.events = components.events; + } + async shouldBlockConnection(remotePeer, maConn, connectionType) { + const connectionGater = this.components.connectionGater[connectionType]; + if (connectionGater !== undefined) { + if (await connectionGater(remotePeer, maConn)) { + throw new CodeError$2(`The multiaddr connection is blocked by gater.${connectionType}`, codes.ERR_CONNECTION_INTERCEPTED); + } + } + } + /** + * Upgrades an inbound connection + */ + async upgradeInbound(maConn, opts) { + const accept = await this.components.connectionManager.acceptIncomingConnection(maConn); + if (!accept) { + throw new CodeError$2('connection denied', codes.ERR_CONNECTION_DENIED); + } + let encryptedConn; + let remotePeer; + let upgradedConn; + let muxerFactory; + let cryptoProtocol; + const signal = AbortSignal.timeout(this.inboundUpgradeTimeout); + const onAbort = () => { + maConn.abort(new CodeError$2('inbound upgrade timeout', ERR_TIMEOUT)); + }; + signal.addEventListener('abort', onAbort, { once: true }); + setMaxListeners(Infinity, signal); + try { + if ((await this.components.connectionGater.denyInboundConnection?.(maConn)) === true) { + throw new CodeError$2('The multiaddr connection is blocked by gater.acceptConnection', codes.ERR_CONNECTION_INTERCEPTED); + } + this.components.metrics?.trackMultiaddrConnection(maConn); + maConn.log('starting the inbound connection upgrade'); + // Protect + let protectedConn = maConn; + if (opts?.skipProtection !== true) { + const protector = this.components.connectionProtector; + if (protector != null) { + maConn.log('protecting the inbound connection'); + protectedConn = await protector.protect(maConn); + } + } + try { + // Encrypt the connection + encryptedConn = protectedConn; + if (opts?.skipEncryption !== true) { + ({ + conn: encryptedConn, + remotePeer, + protocol: cryptoProtocol + } = await this._encryptInbound(protectedConn)); + const maConn = { + ...protectedConn, + ...encryptedConn + }; + await this.shouldBlockConnection(remotePeer, maConn, 'denyInboundEncryptedConnection'); + } + else { + const idStr = maConn.remoteAddr.getPeerId(); + if (idStr == null) { + throw new CodeError$2('inbound connection that skipped encryption must have a peer id', codes.ERR_INVALID_MULTIADDR); + } + const remotePeerId = peerIdFromString(idStr); + cryptoProtocol = 'native'; + remotePeer = remotePeerId; + } + upgradedConn = encryptedConn; + if (opts?.muxerFactory != null) { + muxerFactory = opts.muxerFactory; + } + else if (this.muxers.size > 0) { + // Multiplex the connection + const multiplexed = await this._multiplexInbound({ + ...protectedConn, + ...encryptedConn + }, this.muxers); + muxerFactory = multiplexed.muxerFactory; + upgradedConn = multiplexed.stream; + } + } + catch (err) { + maConn.log.error('failed to upgrade inbound connection', err); + throw err; + } + await this.shouldBlockConnection(remotePeer, maConn, 'denyInboundUpgradedConnection'); + maConn.log('successfully upgraded inbound connection'); + return this._createConnection({ + cryptoProtocol, + direction: 'inbound', + maConn, + upgradedConn, + muxerFactory, + remotePeer, + transient: opts?.transient + }); + } + finally { + signal.removeEventListener('abort', onAbort); + this.components.connectionManager.afterUpgradeInbound(); + } + } + /** + * Upgrades an outbound connection + */ + async upgradeOutbound(maConn, opts) { + const idStr = maConn.remoteAddr.getPeerId(); + let remotePeerId; + if (idStr != null) { + remotePeerId = peerIdFromString(idStr); + await this.shouldBlockConnection(remotePeerId, maConn, 'denyOutboundConnection'); + } + let encryptedConn; + let remotePeer; + let upgradedConn; + let cryptoProtocol; + let muxerFactory; + this.components.metrics?.trackMultiaddrConnection(maConn); + maConn.log('starting the outbound connection upgrade'); + // If the transport natively supports encryption, skip connection + // protector and encryption + // Protect + let protectedConn = maConn; + if (opts?.skipProtection !== true) { + const protector = this.components.connectionProtector; + if (protector != null) { + protectedConn = await protector.protect(maConn); + } + } + try { + // Encrypt the connection + encryptedConn = protectedConn; + if (opts?.skipEncryption !== true) { + ({ + conn: encryptedConn, + remotePeer, + protocol: cryptoProtocol + } = await this._encryptOutbound(protectedConn, remotePeerId)); + const maConn = { + ...protectedConn, + ...encryptedConn + }; + await this.shouldBlockConnection(remotePeer, maConn, 'denyOutboundEncryptedConnection'); + } + else { + if (remotePeerId == null) { + throw new CodeError$2('Encryption was skipped but no peer id was passed', codes.ERR_INVALID_PEER); + } + cryptoProtocol = 'native'; + remotePeer = remotePeerId; + } + upgradedConn = encryptedConn; + if (opts?.muxerFactory != null) { + muxerFactory = opts.muxerFactory; + } + else if (this.muxers.size > 0) { + // Multiplex the connection + const multiplexed = await this._multiplexOutbound({ + ...protectedConn, + ...encryptedConn + }, this.muxers); + muxerFactory = multiplexed.muxerFactory; + upgradedConn = multiplexed.stream; + } + } + catch (err) { + maConn.log.error('failed to upgrade outbound connection', err); + await maConn.close(err); + throw err; + } + await this.shouldBlockConnection(remotePeer, maConn, 'denyOutboundUpgradedConnection'); + maConn.log('successfully upgraded outbound connection'); + return this._createConnection({ + cryptoProtocol, + direction: 'outbound', + maConn, + upgradedConn, + muxerFactory, + remotePeer, + transient: opts?.transient + }); + } + /** + * A convenience method for generating a new `Connection` + */ + _createConnection(opts) { + const { cryptoProtocol, direction, maConn, upgradedConn, remotePeer, muxerFactory, transient } = opts; + let muxer; + let newStream; + let connection; // eslint-disable-line prefer-const + if (muxerFactory != null) { + // Create the muxer + muxer = muxerFactory.createStreamMuxer({ + direction, + // Run anytime a remote stream is created + onIncomingStream: muxedStream => { + if (connection == null) { + return; + } + void Promise.resolve() + .then(async () => { + const protocols = this.components.registrar.getProtocols(); + const { stream, protocol } = await handle(muxedStream, protocols, { + log: muxedStream.log, + yieldBytes: false + }); + if (connection == null) { + return; + } + connection.log('incoming stream opened on %s', protocol); + const incomingLimit = findIncomingStreamLimit(protocol, this.components.registrar); + const streamCount = countStreams(protocol, 'inbound', connection); + if (streamCount === incomingLimit) { + const err = new CodeError$2(`Too many inbound protocol streams for protocol "${protocol}" - limit ${incomingLimit}`, codes.ERR_TOO_MANY_INBOUND_PROTOCOL_STREAMS); + muxedStream.abort(err); + throw err; + } + // after the handshake the returned stream can have early data so override + // the souce/sink + muxedStream.source = stream.source; + muxedStream.sink = stream.sink; + muxedStream.protocol = protocol; + // allow closing the write end of a not-yet-negotiated stream + if (stream.closeWrite != null) { + muxedStream.closeWrite = stream.closeWrite; + } + // allow closing the read end of a not-yet-negotiated stream + if (stream.closeRead != null) { + muxedStream.closeRead = stream.closeRead; + } + // make sure we don't try to negotiate a stream we are closing + if (stream.close != null) { + muxedStream.close = stream.close; + } + // If a protocol stream has been successfully negotiated and is to be passed to the application, + // the peerstore should ensure that the peer is registered with that protocol + await this.components.peerStore.merge(remotePeer, { + protocols: [protocol] + }); + this.components.metrics?.trackProtocolStream(muxedStream, connection); + this._onStream({ connection, stream: muxedStream, protocol }); + }) + .catch(async (err) => { + connection.log.error('error handling incoming stream id %s', muxedStream.id, err.message, err.code, err.stack); + if (muxedStream.timeline.close == null) { + await muxedStream.close(); + } + }); + } + }); + newStream = async (protocols, options = {}) => { + if (muxer == null) { + throw new CodeError$2('Stream is not multiplexed', codes.ERR_MUXER_UNAVAILABLE); + } + connection.log('starting new stream for protocols %s', protocols); + const muxedStream = await muxer.newStream(); + connection.log.trace('started new stream %s for protocols %s', muxedStream.id, protocols); + try { + if (options.signal == null) { + muxedStream.log('no abort signal was passed while trying to negotiate protocols %s falling back to default timeout', protocols); + const signal = AbortSignal.timeout(DEFAULT_PROTOCOL_SELECT_TIMEOUT); + setMaxListeners(Infinity, signal); + options = { + ...options, + signal + }; + } + muxedStream.log.trace('selecting protocol from protocols %s', protocols); + const { stream, protocol } = await select(muxedStream, protocols, { + ...options, + log: muxedStream.log, + yieldBytes: true + }); + muxedStream.log('selected protocol %s', protocol); + const outgoingLimit = findOutgoingStreamLimit(protocol, this.components.registrar, options); + const streamCount = countStreams(protocol, 'outbound', connection); + if (streamCount >= outgoingLimit) { + const err = new CodeError$2(`Too many outbound protocol streams for protocol "${protocol}" - ${streamCount}/${outgoingLimit}`, codes.ERR_TOO_MANY_OUTBOUND_PROTOCOL_STREAMS); + muxedStream.abort(err); + throw err; + } + // If a protocol stream has been successfully negotiated and is to be passed to the application, + // the peerstore should ensure that the peer is registered with that protocol + await this.components.peerStore.merge(remotePeer, { + protocols: [protocol] + }); + // after the handshake the returned stream can have early data so override + // the souce/sink + muxedStream.source = stream.source; + muxedStream.sink = stream.sink; + muxedStream.protocol = protocol; + // allow closing the write end of a not-yet-negotiated stream + if (stream.closeWrite != null) { + muxedStream.closeWrite = stream.closeWrite; + } + // allow closing the read end of a not-yet-negotiated stream + if (stream.closeRead != null) { + muxedStream.closeRead = stream.closeRead; + } + // make sure we don't try to negotiate a stream we are closing + if (stream.close != null) { + muxedStream.close = stream.close; + } + this.components.metrics?.trackProtocolStream(muxedStream, connection); + return muxedStream; + } + catch (err) { + connection.log.error('could not create new stream for protocols %s', protocols, err); + if (muxedStream.timeline.close == null) { + muxedStream.abort(err); + } + if (err.code != null) { + throw err; + } + throw new CodeError$2(String(err), codes.ERR_UNSUPPORTED_PROTOCOL); + } + }; + // Pipe all data through the muxer + void Promise.all([ + muxer.sink(upgradedConn.source), + upgradedConn.sink(muxer.source) + ]).catch(err => { + connection.log.error('error piping data through muxer', err); + }); + } + const _timeline = maConn.timeline; + maConn.timeline = new Proxy(_timeline, { + set: (...args) => { + if (connection != null && args[1] === 'close' && args[2] != null && _timeline.close == null) { + // Wait for close to finish before notifying of the closure + (async () => { + try { + if (connection.status === 'open') { + await connection.close(); + } + } + catch (err) { + connection.log.error('error closing connection after timeline close', err); + } + finally { + this.events.safeDispatchEvent('connection:close', { + detail: connection + }); + } + })().catch(err => { + connection.log.error('error thrown while dispatching connection:close event', err); + }); + } + return Reflect.set(...args); + } + }); + maConn.timeline.upgraded = Date.now(); + const errConnectionNotMultiplexed = () => { + throw new CodeError$2('connection is not multiplexed', codes.ERR_CONNECTION_NOT_MULTIPLEXED); + }; + // Create the connection + connection = createConnection({ + remoteAddr: maConn.remoteAddr, + remotePeer, + status: 'open', + direction, + timeline: maConn.timeline, + multiplexer: muxer?.protocol, + encryption: cryptoProtocol, + transient, + logger: this.components.logger, + newStream: newStream ?? errConnectionNotMultiplexed, + getStreams: () => { if (muxer != null) { + return muxer.streams; + } + else { + return []; + } }, + close: async (options) => { + // Ensure remaining streams are closed gracefully + if (muxer != null) { + connection.log.trace('close muxer'); + await muxer.close(options); + } + connection.log.trace('close maconn'); + // close the underlying transport + await maConn.close(options); + connection.log.trace('closed maconn'); + }, + abort: (err) => { + maConn.abort(err); + // Ensure remaining streams are aborted + if (muxer != null) { + muxer.abort(err); + } + } + }); + this.events.safeDispatchEvent('connection:open', { + detail: connection + }); + return connection; + } + /** + * Routes incoming streams to the correct handler + */ + _onStream(opts) { + const { connection, stream, protocol } = opts; + const { handler, options } = this.components.registrar.getHandler(protocol); + if (connection.transient && options.runOnTransientConnection !== true) { + throw new CodeError$2('Cannot open protocol stream on transient connection', 'ERR_TRANSIENT_CONNECTION'); + } + handler({ connection, stream }); + } + /** + * Attempts to encrypt the incoming `connection` with the provided `cryptos` + */ + async _encryptInbound(connection) { + const protocols = Array.from(this.connectionEncryption.keys()); + connection.log('handling inbound crypto protocol selection', protocols); + try { + const { stream, protocol } = await handle(connection, protocols, { + log: connection.log + }); + const encrypter = this.connectionEncryption.get(protocol); + if (encrypter == null) { + throw new Error(`no crypto module found for ${protocol}`); + } + connection.log('encrypting inbound connection using', protocol); + return { + ...await encrypter.secureInbound(this.components.peerId, stream), + protocol + }; + } + catch (err) { + connection.log.error('encrypting inbound connection failed', err); + throw new CodeError$2(err.message, codes.ERR_ENCRYPTION_FAILED); + } + } + /** + * Attempts to encrypt the given `connection` with the provided connection encrypters. + * The first `ConnectionEncrypter` module to succeed will be used + */ + async _encryptOutbound(connection, remotePeerId) { + const protocols = Array.from(this.connectionEncryption.keys()); + connection.log('selecting outbound crypto protocol', protocols); + try { + connection.log.trace('selecting encrypter from %s', protocols); + const { stream, protocol } = await select(connection, protocols, { + log: connection.log, + yieldBytes: true + }); + const encrypter = this.connectionEncryption.get(protocol); + if (encrypter == null) { + throw new Error(`no crypto module found for ${protocol}`); + } + connection.log('encrypting outbound connection to %p using %s', remotePeerId, encrypter); + return { + ...await encrypter.secureOutbound(this.components.peerId, stream, remotePeerId), + protocol + }; + } + catch (err) { + connection.log.error('encrypting outbound connection to %p failed', remotePeerId, err); + throw new CodeError$2(err.message, codes.ERR_ENCRYPTION_FAILED); + } + } + /** + * Selects one of the given muxers via multistream-select. That + * muxer will be used for all future streams on the connection. + */ + async _multiplexOutbound(connection, muxers) { + const protocols = Array.from(muxers.keys()); + connection.log('outbound selecting muxer %s', protocols); + try { + connection.log.trace('selecting stream muxer from %s', protocols); + const { stream, protocol } = await select(connection, protocols, { + log: connection.log, + yieldBytes: true + }); + connection.log('selected %s as muxer protocol', protocol); + const muxerFactory = muxers.get(protocol); + return { stream, muxerFactory }; + } + catch (err) { + connection.log.error('error multiplexing outbound connection', err); + throw new CodeError$2(String(err), codes.ERR_MUXER_UNAVAILABLE); + } + } + /** + * Registers support for one of the given muxers via multistream-select. The + * selected muxer will be used for all future streams on the connection. + */ + async _multiplexInbound(connection, muxers) { + const protocols = Array.from(muxers.keys()); + connection.log('inbound handling muxers %s', protocols); + try { + const { stream, protocol } = await handle(connection, protocols, { + log: connection.log + }); + const muxerFactory = muxers.get(protocol); + return { stream, muxerFactory }; + } + catch (err) { + connection.log.error('error multiplexing inbound connection', err); + throw new CodeError$2(String(err), codes.ERR_MUXER_UNAVAILABLE); + } + } + } + + const version = '1.6.0'; + const name = 'libp2p'; + + class Libp2pNode extends TypedEventEmitter { + peerId; + peerStore; + contentRouting; + peerRouting; + metrics; + services; + logger; + status; + components; + log; + constructor(init) { + super(); + this.status = 'stopped'; + // event bus - components can listen to this emitter to be notified of system events + // and also cause them to be emitted + const events = new TypedEventEmitter(); + const originalDispatch = events.dispatchEvent.bind(events); + events.dispatchEvent = (evt) => { + const internalResult = originalDispatch(evt); + const externalResult = this.dispatchEvent(new CustomEvent$1(evt.type, { detail: evt.detail })); + return internalResult || externalResult; + }; + // This emitter gets listened to a lot + setMaxListeners(Infinity, events); + this.peerId = init.peerId; + this.logger = init.logger ?? defaultLogger(); + this.log = this.logger.forComponent('libp2p'); + // @ts-expect-error {} may not be of type T + this.services = {}; + const components = this.components = defaultComponents({ + peerId: init.peerId, + privateKey: init.privateKey, + nodeInfo: init.nodeInfo ?? { + name: name, + version: version + }, + logger: this.logger, + events, + datastore: init.datastore ?? new MemoryDatastore(), + connectionGater: connectionGater(init.connectionGater), + dns: init.dns + }); + this.peerStore = this.configureComponent('peerStore', new PersistentPeerStore(components, { + addressFilter: this.components.connectionGater.filterMultiaddrForPeer, + ...init.peerStore + })); + // Create Metrics + if (init.metrics != null) { + this.metrics = this.configureComponent('metrics', init.metrics(this.components)); + } + components.events.addEventListener('peer:update', evt => { + // if there was no peer previously in the peer store this is a new peer + if (evt.detail.previous == null) { + const peerInfo = { + id: evt.detail.peer.id, + multiaddrs: evt.detail.peer.addresses.map(a => a.multiaddr) + }; + components.events.safeDispatchEvent('peer:discovery', { detail: peerInfo }); + } + }); + // Set up connection protector if configured + if (init.connectionProtector != null) { + this.configureComponent('connectionProtector', init.connectionProtector(components)); + } + // Set up the Upgrader + this.components.upgrader = new DefaultUpgrader(this.components, { + connectionEncryption: (init.connectionEncryption ?? []).map((fn, index) => this.configureComponent(`connection-encryption-${index}`, fn(this.components))), + muxers: (init.streamMuxers ?? []).map((fn, index) => this.configureComponent(`stream-muxers-${index}`, fn(this.components))), + inboundUpgradeTimeout: init.connectionManager.inboundUpgradeTimeout + }); + // Setup the transport manager + this.configureComponent('transportManager', new DefaultTransportManager(this.components, init.transportManager)); + // Create the Connection Manager + this.configureComponent('connectionManager', new DefaultConnectionManager(this.components, init.connectionManager)); + // Create the Registrar + this.configureComponent('registrar', new DefaultRegistrar(this.components)); + // Addresses {listen, announce, noAnnounce} + this.configureComponent('addressManager', new DefaultAddressManager(this.components, init.addresses)); + // Peer routers + const peerRouters = (init.peerRouters ?? []).map((fn, index) => this.configureComponent(`peer-router-${index}`, fn(this.components))); + this.peerRouting = this.components.peerRouting = this.configureComponent('peerRouting', new DefaultPeerRouting(this.components, { + routers: peerRouters + })); + // Content routers + const contentRouters = (init.contentRouters ?? []).map((fn, index) => this.configureComponent(`content-router-${index}`, fn(this.components))); + this.contentRouting = this.components.contentRouting = this.configureComponent('contentRouting', new CompoundContentRouting(this.components, { + routers: contentRouters + })); + // Random walk + this.configureComponent('randomWalk', new RandomWalk(this.components)); + (init.peerDiscovery ?? []).forEach((fn, index) => { + const service = this.configureComponent(`peer-discovery-${index}`, fn(this.components)); + service.addEventListener('peer', (evt) => { + this.#onDiscoveryPeer(evt); + }); + }); + // Transport modules + init.transports?.forEach((fn, index) => { + this.components.transportManager.add(this.configureComponent(`transport-${index}`, fn(this.components))); + }); + // User defined modules + if (init.services != null) { + for (const name of Object.keys(init.services)) { + const createService = init.services[name]; + const service = createService(this.components); + if (service == null) { + this.log.error('service factory %s returned null or undefined instance', name); + continue; + } + this.services[name] = service; + this.configureComponent(name, service); + if (service[contentRoutingSymbol] != null) { + this.log('registering service %s for content routing', name); + contentRouters.push(service[contentRoutingSymbol]); + } + if (service[peerRoutingSymbol] != null) { + this.log('registering service %s for peer routing', name); + peerRouters.push(service[peerRoutingSymbol]); + } + if (service[peerDiscoverySymbol] != null) { + this.log('registering service %s for peer discovery', name); + service[peerDiscoverySymbol].addEventListener?.('peer', (evt) => { + this.#onDiscoveryPeer(evt); + }); + } + } + } + } + configureComponent(name, component) { + if (component == null) { + this.log.error('component %s was null or undefined', name); + } + this.components[name] = component; + return component; + } + /** + * Starts the libp2p node and all its subsystems + */ + async start() { + if (this.status !== 'stopped') { + return; + } + this.status = 'starting'; + this.log('libp2p is starting'); + try { + await this.components.beforeStart?.(); + await this.components.start(); + await this.components.afterStart?.(); + this.status = 'started'; + this.safeDispatchEvent('start', { detail: this }); + this.log('libp2p has started'); + } + catch (err) { + this.log.error('An error occurred starting libp2p', err); + // set status to 'started' so this.stop() will stop any running components + this.status = 'started'; + await this.stop(); + throw err; + } + } + /** + * Stop the libp2p node by closing its listeners and open connections + */ + async stop() { + if (this.status !== 'started') { + return; + } + this.log('libp2p is stopping'); + this.status = 'stopping'; + await this.components.beforeStop?.(); + await this.components.stop(); + await this.components.afterStop?.(); + this.status = 'stopped'; + this.safeDispatchEvent('stop', { detail: this }); + this.log('libp2p has stopped'); + } + getConnections(peerId) { + return this.components.connectionManager.getConnections(peerId); + } + getDialQueue() { + return this.components.connectionManager.getDialQueue(); + } + getPeers() { + const peerSet = new PeerSet(); + for (const conn of this.components.connectionManager.getConnections()) { + peerSet.add(conn.remotePeer); + } + return Array.from(peerSet); + } + async dial(peer, options = {}) { + return this.components.connectionManager.openConnection(peer, { + // ensure any userland dials take top priority in the queue + priority: 75, + ...options + }); + } + async dialProtocol(peer, protocols, options = {}) { + if (protocols == null) { + throw new CodeError$2('no protocols were provided to open a stream', codes.ERR_INVALID_PROTOCOLS_FOR_STREAM); + } + protocols = Array.isArray(protocols) ? protocols : [protocols]; + if (protocols.length === 0) { + throw new CodeError$2('no protocols were provided to open a stream', codes.ERR_INVALID_PROTOCOLS_FOR_STREAM); + } + const connection = await this.dial(peer, options); + return connection.newStream(protocols, options); + } + getMultiaddrs() { + return this.components.addressManager.getAddresses(); + } + getProtocols() { + return this.components.registrar.getProtocols(); + } + async hangUp(peer, options = {}) { + if (isMultiaddr(peer)) { + peer = peerIdFromString(peer.getPeerId() ?? ''); + } + await this.components.connectionManager.closeConnections(peer, options); + } + /** + * Get the public key for the given peer id + */ + async getPublicKey(peer, options = {}) { + this.log('getPublicKey %p', peer); + if (peer.publicKey != null) { + return peer.publicKey; + } + try { + const peerInfo = await this.peerStore.get(peer); + if (peerInfo.id.publicKey != null) { + return peerInfo.id.publicKey; + } + } + catch (err) { + if (err.code !== codes.ERR_NOT_FOUND) { + throw err; + } + } + const peerKey = concat$1([ + fromString('/pk/'), + peer.multihash.digest + ]); + // search any available content routing methods + const bytes = await this.contentRouting.get(peerKey, options); + // ensure the returned key is valid + unmarshalPublicKey(bytes); + await this.peerStore.patch(peer, { + publicKey: bytes + }); + return bytes; + } + async handle(protocols, handler, options) { + if (!Array.isArray(protocols)) { + protocols = [protocols]; + } + await Promise.all(protocols.map(async (protocol) => { + await this.components.registrar.handle(protocol, handler, options); + })); + } + async unhandle(protocols) { + if (!Array.isArray(protocols)) { + protocols = [protocols]; + } + await Promise.all(protocols.map(async (protocol) => { + await this.components.registrar.unhandle(protocol); + })); + } + async register(protocol, topology) { + return this.components.registrar.register(protocol, topology); + } + unregister(id) { + this.components.registrar.unregister(id); + } + async isDialable(multiaddr, options = {}) { + return this.components.connectionManager.isDialable(multiaddr, options); + } + /** + * Called whenever peer discovery services emit `peer` events and adds peers + * to the peer store. + */ + #onDiscoveryPeer(evt) { + const { detail: peer } = evt; + if (peer.id.toString() === this.peerId.toString()) { + this.log.error(new Error(codes.ERR_DISCOVERED_SELF)); + return; + } + void this.components.peerStore.merge(peer.id, { + multiaddrs: peer.multiaddrs + }) + .catch(err => { this.log.error(err); }); + } + } + /** + * Returns a new Libp2pNode instance - this exposes more of the internals than the + * libp2p interface and is useful for testing and debugging. + */ + async function createLibp2pNode(options = {}) { + const peerId = options.peerId ??= await createEd25519PeerId(); + if (peerId.privateKey == null) { + throw new CodeError$2('peer id was missing private key', 'ERR_MISSING_PRIVATE_KEY'); + } + options.privateKey ??= await unmarshalPrivateKey(peerId.privateKey); + return new Libp2pNode(await validateConfig(options)); + } + + /** + * @packageDocumentation + * + * Use the `createLibp2p` function to create a libp2p node. + * + * @example + * + * ```typescript + * import { createLibp2p } from 'libp2p' + * + * const node = await createLibp2p({ + * // ...other options + * }) + * ``` + */ + /** + * Returns a new instance of the Libp2p interface, generating a new PeerId + * if one is not passed as part of the options. + * + * The node will be started unless `start: false` is passed as an option. + * + * @example + * + * ```TypeScript + * import { createLibp2p } from 'libp2p' + * import { tcp } from '@libp2p/tcp' + * import { mplex } from '@libp2p/mplex' + * import { noise } from '@chainsafe/libp2p-noise' + * import { yamux } from '@chainsafe/libp2p-yamux' + * + * // specify options + * const options = { + * transports: [tcp()], + * streamMuxers: [yamux(), mplex()], + * connectionEncryption: [noise()] + * } + * + * // create libp2p + * const libp2p = await createLibp2p(options) + * ``` + */ + async function createLibp2p(options = {}) { + const node = await createLibp2pNode(options); + if (options.start !== false) { + await node.start(); + } + return node; + } + + const SHARDING_FN = 'SHARDING'; + + /** + * @packageDocumentation + * + * Convert one value from an (async)iterator into another. + * + * @example + * + * ```javascript + * import map from 'it-map' + * + * // This can also be an iterator, generator, etc + * const values = [0, 1, 2, 3, 4] + * + * const result = map(values, (val, index) => val++) + * + * console.info(result) // [1, 2, 3, 4, 5] + * ``` + * + * Async sources and transforms must be awaited: + * + * ```javascript + * import map from 'it-map' + * + * const values = async function * () { + * yield * [0, 1, 2, 3, 4] + * } + * + * const result = await map(values(), async (val, index) => val++) + * + * console.info(result) // [1, 2, 3, 4, 5] + * ``` + */ + function isAsyncIterable$3(thing) { + return thing[Symbol.asyncIterator] != null; + } + function map(source, func) { + let index = 0; + if (isAsyncIterable$3(source)) { + return (async function* () { + for await (const val of source) { + yield func(val, index++); + } + })(); + } + // if mapping function returns a promise we have to return an async generator + const peekable$1 = peekable(source); + const { value, done } = peekable$1.next(); + if (done === true) { + return (function* () { }()); + } + const res = func(value, index++); + // @ts-expect-error .then is not present on O + if (typeof res.then === 'function') { + return (async function* () { + yield await res; + for await (const val of peekable$1) { + yield func(val, index++); + } + })(); + } + const fn = func; + return (function* () { + yield res; + for (const val of peekable$1) { + yield fn(val, index++); + } + })(); + } + + function pipe(first, ...rest) { + if (first == null) { + throw new Error('Empty pipeline'); + } + // Duplex at start: wrap in function and return duplex source + if (isDuplex(first)) { + const duplex = first; + first = () => duplex.source; + // Iterable at start: wrap in function + } + else if (isIterable(first) || isAsyncIterable$2(first)) { + const source = first; + first = () => source; + } + const fns = [first, ...rest]; + if (fns.length > 1) { + // Duplex at end: use duplex sink + if (isDuplex(fns[fns.length - 1])) { + fns[fns.length - 1] = fns[fns.length - 1].sink; + } + } + if (fns.length > 2) { + // Duplex in the middle, consume source with duplex sink and return duplex source + for (let i = 1; i < fns.length - 1; i++) { + if (isDuplex(fns[i])) { + fns[i] = duplexPipelineFn(fns[i]); + } + } + } + return rawPipe(...fns); + } + const rawPipe = (...fns) => { + let res; + while (fns.length > 0) { + res = fns.shift()(res); + } + return res; + }; + const isAsyncIterable$2 = (obj) => { + return obj?.[Symbol.asyncIterator] != null; + }; + const isIterable = (obj) => { + return obj?.[Symbol.iterator] != null; + }; + const isDuplex = (obj) => { + if (obj == null) { + return false; + } + return obj.sink != null && obj.source != null; + }; + const duplexPipelineFn = (duplex) => { + return (source) => { + const p = duplex.sink(source); + if (p?.then != null) { + const stream = pushable$1({ + objectMode: true + }); + p.then(() => { + stream.end(); + }, (err) => { + stream.end(err); + }); + let sourceWrap; + const source = duplex.source; + if (isAsyncIterable$2(source)) { + sourceWrap = async function* () { + yield* source; + stream.end(); + }; + } + else if (isIterable(source)) { + sourceWrap = function* () { + yield* source; + stream.end(); + }; + } + else { + throw new Error('Unknown duplex source type - must be Iterable or AsyncIterable'); + } + return merge(stream, sourceWrap()); + } + return duplex.source; + }; + }; + + new Key(SHARDING_FN); + + logger('datastore:core:tiered'); + + const instanceOfAny = (object, constructors) => constructors.some((c) => object instanceof c); + + let idbProxyableTypes; + let cursorAdvanceMethods; + // This is a function to prevent it throwing up in node environments. + function getIdbProxyableTypes() { + return (idbProxyableTypes || + (idbProxyableTypes = [ + IDBDatabase, + IDBObjectStore, + IDBIndex, + IDBCursor, + IDBTransaction, + ])); + } + // This is a function to prevent it throwing up in node environments. + function getCursorAdvanceMethods() { + return (cursorAdvanceMethods || + (cursorAdvanceMethods = [ + IDBCursor.prototype.advance, + IDBCursor.prototype.continue, + IDBCursor.prototype.continuePrimaryKey, + ])); + } + const transactionDoneMap = new WeakMap(); + const transformCache = new WeakMap(); + const reverseTransformCache = new WeakMap(); + function promisifyRequest(request) { + const promise = new Promise((resolve, reject) => { + const unlisten = () => { + request.removeEventListener('success', success); + request.removeEventListener('error', error); + }; + const success = () => { + resolve(wrap(request.result)); + unlisten(); + }; + const error = () => { + reject(request.error); + unlisten(); + }; + request.addEventListener('success', success); + request.addEventListener('error', error); + }); + // This mapping exists in reverseTransformCache but doesn't doesn't exist in transformCache. This + // is because we create many promises from a single IDBRequest. + reverseTransformCache.set(promise, request); + return promise; + } + function cacheDonePromiseForTransaction(tx) { + // Early bail if we've already created a done promise for this transaction. + if (transactionDoneMap.has(tx)) + return; + const done = new Promise((resolve, reject) => { + const unlisten = () => { + tx.removeEventListener('complete', complete); + tx.removeEventListener('error', error); + tx.removeEventListener('abort', error); + }; + const complete = () => { + resolve(); + unlisten(); + }; + const error = () => { + reject(tx.error || new DOMException('AbortError', 'AbortError')); + unlisten(); + }; + tx.addEventListener('complete', complete); + tx.addEventListener('error', error); + tx.addEventListener('abort', error); + }); + // Cache it for later retrieval. + transactionDoneMap.set(tx, done); + } + let idbProxyTraps = { + get(target, prop, receiver) { + if (target instanceof IDBTransaction) { + // Special handling for transaction.done. + if (prop === 'done') + return transactionDoneMap.get(target); + // Make tx.store return the only store in the transaction, or undefined if there are many. + if (prop === 'store') { + return receiver.objectStoreNames[1] + ? undefined + : receiver.objectStore(receiver.objectStoreNames[0]); + } + } + // Else transform whatever we get back. + return wrap(target[prop]); + }, + set(target, prop, value) { + target[prop] = value; + return true; + }, + has(target, prop) { + if (target instanceof IDBTransaction && + (prop === 'done' || prop === 'store')) { + return true; + } + return prop in target; + }, + }; + function replaceTraps(callback) { + idbProxyTraps = callback(idbProxyTraps); + } + function wrapFunction(func) { + // Due to expected object equality (which is enforced by the caching in `wrap`), we + // only create one new func per func. + // Cursor methods are special, as the behaviour is a little more different to standard IDB. In + // IDB, you advance the cursor and wait for a new 'success' on the IDBRequest that gave you the + // cursor. It's kinda like a promise that can resolve with many values. That doesn't make sense + // with real promises, so each advance methods returns a new promise for the cursor object, or + // undefined if the end of the cursor has been reached. + if (getCursorAdvanceMethods().includes(func)) { + return function (...args) { + // Calling the original function with the proxy as 'this' causes ILLEGAL INVOCATION, so we use + // the original object. + func.apply(unwrap(this), args); + return wrap(this.request); + }; + } + return function (...args) { + // Calling the original function with the proxy as 'this' causes ILLEGAL INVOCATION, so we use + // the original object. + return wrap(func.apply(unwrap(this), args)); + }; + } + function transformCachableValue(value) { + if (typeof value === 'function') + return wrapFunction(value); + // This doesn't return, it just creates a 'done' promise for the transaction, + // which is later returned for transaction.done (see idbObjectHandler). + if (value instanceof IDBTransaction) + cacheDonePromiseForTransaction(value); + if (instanceOfAny(value, getIdbProxyableTypes())) + return new Proxy(value, idbProxyTraps); + // Return the same value back if we're not going to transform it. + return value; + } + function wrap(value) { + // We sometimes generate multiple promises from a single IDBRequest (eg when cursoring), because + // IDB is weird and a single IDBRequest can yield many responses, so these can't be cached. + if (value instanceof IDBRequest) + return promisifyRequest(value); + // If we've already transformed this value before, reuse the transformed value. + // This is faster, but it also provides object equality. + if (transformCache.has(value)) + return transformCache.get(value); + const newValue = transformCachableValue(value); + // Not all types are transformed. + // These may be primitive types, so they can't be WeakMap keys. + if (newValue !== value) { + transformCache.set(value, newValue); + reverseTransformCache.set(newValue, value); + } + return newValue; + } + const unwrap = (value) => reverseTransformCache.get(value); + + /** + * Open a database. + * + * @param name Name of the database. + * @param version Schema version. + * @param callbacks Additional callbacks. + */ + function openDB(name, version, { blocked, upgrade, blocking, terminated } = {}) { + const request = indexedDB.open(name, version); + const openPromise = wrap(request); + if (upgrade) { + request.addEventListener('upgradeneeded', (event) => { + upgrade(wrap(request.result), event.oldVersion, event.newVersion, wrap(request.transaction), event); + }); + } + if (blocked) { + request.addEventListener('blocked', (event) => blocked( + // Casting due to https://github.com/microsoft/TypeScript-DOM-lib-generator/pull/1405 + event.oldVersion, event.newVersion, event)); + } + openPromise + .then((db) => { + if (terminated) + db.addEventListener('close', () => terminated()); + if (blocking) { + db.addEventListener('versionchange', (event) => blocking(event.oldVersion, event.newVersion, event)); + } + }) + .catch(() => { }); + return openPromise; + } + /** + * Delete a database. + * + * @param name Name of the database. + */ + function deleteDB(name, { blocked } = {}) { + const request = indexedDB.deleteDatabase(name); + if (blocked) { + request.addEventListener('blocked', (event) => blocked( + // Casting due to https://github.com/microsoft/TypeScript-DOM-lib-generator/pull/1405 + event.oldVersion, event)); + } + return wrap(request).then(() => undefined); + } + + const readMethods = ['get', 'getKey', 'getAll', 'getAllKeys', 'count']; + const writeMethods = ['put', 'add', 'delete', 'clear']; + const cachedMethods = new Map(); + function getMethod(target, prop) { + if (!(target instanceof IDBDatabase && + !(prop in target) && + typeof prop === 'string')) { + return; + } + if (cachedMethods.get(prop)) + return cachedMethods.get(prop); + const targetFuncName = prop.replace(/FromIndex$/, ''); + const useIndex = prop !== targetFuncName; + const isWrite = writeMethods.includes(targetFuncName); + if ( + // Bail if the target doesn't exist on the target. Eg, getAll isn't in Edge. + !(targetFuncName in (useIndex ? IDBIndex : IDBObjectStore).prototype) || + !(isWrite || readMethods.includes(targetFuncName))) { + return; + } + const method = async function (storeName, ...args) { + // isWrite ? 'readwrite' : undefined gzipps better, but fails in Edge :( + const tx = this.transaction(storeName, isWrite ? 'readwrite' : 'readonly'); + let target = tx.store; + if (useIndex) + target = target.index(args.shift()); + // Must reject if op rejects. + // If it's a write operation, must reject if tx.done rejects. + // Must reject with op rejection first. + // Must resolve with op value. + // Must handle both promises (no unhandled rejections) + return (await Promise.all([ + target[targetFuncName](...args), + isWrite && tx.done, + ]))[0]; + }; + cachedMethods.set(prop, method); + return method; + } + replaceTraps((oldTraps) => ({ + ...oldTraps, + get: (target, prop, receiver) => getMethod(target, prop) || oldTraps.get(target, prop, receiver), + has: (target, prop) => !!getMethod(target, prop) || oldTraps.has(target, prop), + })); + + const advanceMethodProps = ['continue', 'continuePrimaryKey', 'advance']; + const methodMap = {}; + const advanceResults = new WeakMap(); + const ittrProxiedCursorToOriginalProxy = new WeakMap(); + const cursorIteratorTraps = { + get(target, prop) { + if (!advanceMethodProps.includes(prop)) + return target[prop]; + let cachedFunc = methodMap[prop]; + if (!cachedFunc) { + cachedFunc = methodMap[prop] = function (...args) { + advanceResults.set(this, ittrProxiedCursorToOriginalProxy.get(this)[prop](...args)); + }; + } + return cachedFunc; + }, + }; + async function* iterate(...args) { + // tslint:disable-next-line:no-this-assignment + let cursor = this; + if (!(cursor instanceof IDBCursor)) { + cursor = await cursor.openCursor(...args); + } + if (!cursor) + return; + cursor = cursor; + const proxiedCursor = new Proxy(cursor, cursorIteratorTraps); + ittrProxiedCursorToOriginalProxy.set(proxiedCursor, cursor); + // Map this double-proxy back to the original, so other cursor methods work. + reverseTransformCache.set(proxiedCursor, unwrap(cursor)); + while (cursor) { + yield proxiedCursor; + // If one of the advancing methods was not called, call continue(). + cursor = await (advanceResults.get(proxiedCursor) || cursor.continue()); + advanceResults.delete(proxiedCursor); + } + } + function isIteratorProp(target, prop) { + return ((prop === Symbol.asyncIterator && + instanceOfAny(target, [IDBIndex, IDBObjectStore, IDBCursor])) || + (prop === 'iterate' && instanceOfAny(target, [IDBIndex, IDBObjectStore]))); + } + replaceTraps((oldTraps) => ({ + ...oldTraps, + get(target, prop, receiver) { + if (isIteratorProp(target, prop)) + return iterate; + return oldTraps.get(target, prop, receiver); + }, + has(target, prop) { + return isIteratorProp(target, prop) || oldTraps.has(target, prop); + }, + })); + + /** + * @packageDocumentation + * + * A Datastore implementation for browsers that stores data in [IndexedDB](https://developer.mozilla.org/en-US/docs/Web/API/IndexedDB_API). + * + * @example + * + * ```js + * import { IDBDatastore } from 'datastore-idb' + * + * const store = new IDBDatastore('path/to/store') + * ``` + */ + class IDBDatastore extends BaseDatastore { + location; + version; + db; + constructor(location, init = {}) { + super(); + this.location = `${init.prefix ?? ''}${location}`; + this.version = init.version ?? 1; + } + async open() { + try { + const location = this.location; + this.db = await openDB(location, this.version, { + upgrade(db) { + db.createObjectStore(location); + } + }); + } + catch (err) { + throw dbOpenFailedError(err); + } + } + async close() { + this.db?.close(); + } + async put(key, val) { + if (this.db == null) { + throw new Error('Datastore needs to be opened.'); + } + try { + await this.db.put(this.location, val, key.toString()); + return key; + } + catch (err) { + throw dbWriteFailedError(err); + } + } + async get(key) { + if (this.db == null) { + throw new Error('Datastore needs to be opened.'); + } + let val; + try { + val = await this.db.get(this.location, key.toString()); + } + catch (err) { + throw dbReadFailedError(err); + } + if (val === undefined) { + throw notFoundError(); + } + return val; + } + async has(key) { + if (this.db == null) { + throw new Error('Datastore needs to be opened.'); + } + try { + return Boolean(await this.db.getKey(this.location, key.toString())); + } + catch (err) { + throw dbReadFailedError(err); + } + } + async delete(key) { + if (this.db == null) { + throw new Error('Datastore needs to be opened.'); + } + try { + await this.db.delete(this.location, key.toString()); + } + catch (err) { + throw dbDeleteFailedError(err); + } + } + batch() { + const puts = []; + const dels = []; + return { + put(key, value) { + puts.push({ key, value }); + }, + delete(key) { + dels.push(key); + }, + commit: async () => { + if (this.db == null) { + throw new Error('Datastore needs to be opened.'); + } + const tx = this.db.transaction(this.location, 'readwrite'); + try { + const ops = puts.filter(({ key }) => { + // don't put a key we are about to delete + return dels.find(delKey => delKey.toString() === key.toString()) == null; + }) + .map(put => { + return async () => { + await tx.store.put(put.value, put.key.toString()); + }; + }) + .concat(dels.map(key => { + return async () => { + await tx.store.delete(key.toString()); + }; + })) + .concat(async () => { + await tx.done; + }); + await Promise.all(ops.map(async (op) => { await op(); })); + } + catch { + tx.abort(); + } + } + }; + } + async *query(q) { + let it = this.#queryIt(q, (key, value) => { + return { key, value }; + }); + if (Array.isArray(q.filters)) { + it = q.filters.reduce((it, f) => filter(it, f), it); + } + if (Array.isArray(q.orders)) { + it = q.orders.reduce((it, f) => sort(it, f), it); + } + yield* it; + } + async *queryKeys(q) { + let it = this.#queryIt(q, (key) => key); + if (Array.isArray(q.filters)) { + it = q.filters.reduce((it, f) => filter(it, f), it); + } + if (Array.isArray(q.orders)) { + it = q.orders.reduce((it, f) => sort(it, f), it); + } + yield* it; + } + async *#queryIt(q, transform) { + if (this.db == null) { + throw new Error('Datastore needs to be opened.'); + } + let yielded = 0; + let index = -1; + for (const key of await this.db.getAllKeys(this.location)) { + if (q.prefix != null && !key.toString().startsWith(q.prefix)) { // eslint-disable-line @typescript-eslint/no-base-to-string + continue; + } + if (q.limit != null && yielded === q.limit) { + return; + } + index++; + if (q.offset != null && index < q.offset) { + continue; + } + const k = new Key(key.toString()); // eslint-disable-line @typescript-eslint/no-base-to-string + let value; + try { + value = await this.get(k); + } + catch (err) { + if (err.code !== 'ERR_NOT_FOUND') { + throw err; + } + continue; + } + if (value == null) { + continue; + } + yield transform(k, value); + yielded++; + } + } + async destroy() { + await deleteDB(this.location); + } + } + + /** + * A pair of streams where one drains from the other + */ + function pair() { + const deferred = pDefer(); + let piped = false; + return { + sink: async (source) => { + if (piped) { + throw new Error('already piped'); + } + piped = true; + deferred.resolve(source); + }, + source: (async function* () { + const source = await deferred.promise; + yield* source; + }()) + }; + } + + /** + * Two duplex streams that are attached to each other + */ + function duplexPair() { + const a = pair(); + const b = pair(); + return [ + { + source: a.source, + sink: b.sink + }, + { + source: b.source, + sink: a.sink + } + ]; + } + + const NOISE_MSG_MAX_LENGTH_BYTES = 65535; + const NOISE_MSG_MAX_LENGTH_BYTES_WITHOUT_TAG = NOISE_MSG_MAX_LENGTH_BYTES - 16; + const DUMP_SESSION_KEYS = Boolean(globalThis.process?.env?.DUMP_SESSION_KEYS); + + /*! noble-ciphers - MIT License (c) 2023 Paul Miller (paulmillr.com) */ + // Cast array to different type + const u32 = (arr) => new Uint32Array(arr.buffer, arr.byteOffset, Math.floor(arr.byteLength / 4)); + function isBytes$1(a) { + return (a instanceof Uint8Array || + (a != null && typeof a === 'object' && a.constructor.name === 'Uint8Array')); + } + // Cast array to view + const createView = (arr) => new DataView(arr.buffer, arr.byteOffset, arr.byteLength); + // big-endian hardware is rare. Just in case someone still decides to run ciphers: + // early-throw an error because we don't support BE yet. + const isLE = new Uint8Array(new Uint32Array([0x11223344]).buffer)[0] === 0x44; + if (!isLE) + throw new Error('Non little-endian hardware is not supported'); + /** + * @example utf8ToBytes('abc') // new Uint8Array([97, 98, 99]) + */ + function utf8ToBytes(str) { + if (typeof str !== 'string') + throw new Error(`utf8ToBytes expected string, got ${typeof str}`); + return new Uint8Array(new TextEncoder().encode(str)); // https://bugzil.la/1681809 + } + /** + * Normalizes (non-hex) string or Uint8Array to Uint8Array. + * Warning: when Uint8Array is passed, it would NOT get copied. + * Keep in mind for future mutable operations. + */ + function toBytes(data) { + if (typeof data === 'string') + data = utf8ToBytes(data); + else if (isBytes$1(data)) + data = data.slice(); + else + throw new Error(`expected Uint8Array, got ${typeof data}`); + return data; + } + // Check if object doens't have custom constructor (like Uint8Array/Array) + const isPlainObject = (obj) => Object.prototype.toString.call(obj) === '[object Object]' && obj.constructor === Object; + function checkOpts(defaults, opts) { + if (opts !== undefined && (typeof opts !== 'object' || !isPlainObject(opts))) + throw new Error('options must be object or undefined'); + const merged = Object.assign(defaults, opts); + return merged; + } + function ensureBytes(b, len) { + if (!isBytes$1(b)) + throw new Error('Uint8Array expected'); + if (typeof len === 'number') + if (b.length !== len) + throw new Error(`Uint8Array length ${len} expected`); + } + // Compares 2 u8a-s in kinda constant time + function equalBytes(a, b) { + if (a.length !== b.length) + return false; + let diff = 0; + for (let i = 0; i < a.length; i++) + diff |= a[i] ^ b[i]; + return diff === 0; + } + const wrapCipher = (params, c) => { + Object.assign(c, params); + return c; + }; + // Polyfill for Safari 14 + function setBigUint64(view, byteOffset, value, isLE) { + if (typeof view.setBigUint64 === 'function') + return view.setBigUint64(byteOffset, value, isLE); + const _32n = BigInt(32); + const _u32_max = BigInt(0xffffffff); + const wh = Number((value >> _32n) & _u32_max); + const wl = Number(value & _u32_max); + const h = 4 ; + const l = 0 ; + view.setUint32(byteOffset + h, wh, isLE); + view.setUint32(byteOffset + l, wl, isLE); + } + + function number(n) { + if (!Number.isSafeInteger(n) || n < 0) + throw new Error(`wrong positive integer: ${n}`); + } + function bool(b) { + if (typeof b !== 'boolean') + throw new Error(`boolean expected, not ${b}`); + } + // TODO: merge with utils + function isBytes(a) { + return (a != null && + typeof a === 'object' && + (a instanceof Uint8Array || a.constructor.name === 'Uint8Array')); + } + function bytes(b, ...lengths) { + if (!isBytes(b)) + throw new Error('Uint8Array expected'); + if (lengths.length > 0 && !lengths.includes(b.length)) + throw new Error(`Uint8Array expected of length ${lengths}, not of length=${b.length}`); + } + function exists(instance, checkFinished = true) { + if (instance.destroyed) + throw new Error('Hash instance has been destroyed'); + if (checkFinished && instance.finished) + throw new Error('Hash#digest() has already been called'); + } + function output(out, instance) { + bytes(out); + const min = instance.outputLen; + if (out.length < min) { + throw new Error(`digestInto() expects output buffer of length at least ${min}`); + } + } + + // Poly1305 is a fast and parallel secret-key message-authentication code. + // https://cr.yp.to/mac.html, https://cr.yp.to/mac/poly1305-20050329.pdf + // https://datatracker.ietf.org/doc/html/rfc8439 + // Based on Public Domain poly1305-donna https://github.com/floodyberry/poly1305-donna + const u8to16 = (a, i) => (a[i++] & 0xff) | ((a[i++] & 0xff) << 8); + class Poly1305 { + constructor(key) { + this.blockLen = 16; + this.outputLen = 16; + this.buffer = new Uint8Array(16); + this.r = new Uint16Array(10); + this.h = new Uint16Array(10); + this.pad = new Uint16Array(8); + this.pos = 0; + this.finished = false; + key = toBytes(key); + ensureBytes(key, 32); + const t0 = u8to16(key, 0); + const t1 = u8to16(key, 2); + const t2 = u8to16(key, 4); + const t3 = u8to16(key, 6); + const t4 = u8to16(key, 8); + const t5 = u8to16(key, 10); + const t6 = u8to16(key, 12); + const t7 = u8to16(key, 14); + // https://github.com/floodyberry/poly1305-donna/blob/e6ad6e091d30d7f4ec2d4f978be1fcfcbce72781/poly1305-donna-16.h#L47 + this.r[0] = t0 & 0x1fff; + this.r[1] = ((t0 >>> 13) | (t1 << 3)) & 0x1fff; + this.r[2] = ((t1 >>> 10) | (t2 << 6)) & 0x1f03; + this.r[3] = ((t2 >>> 7) | (t3 << 9)) & 0x1fff; + this.r[4] = ((t3 >>> 4) | (t4 << 12)) & 0x00ff; + this.r[5] = (t4 >>> 1) & 0x1ffe; + this.r[6] = ((t4 >>> 14) | (t5 << 2)) & 0x1fff; + this.r[7] = ((t5 >>> 11) | (t6 << 5)) & 0x1f81; + this.r[8] = ((t6 >>> 8) | (t7 << 8)) & 0x1fff; + this.r[9] = (t7 >>> 5) & 0x007f; + for (let i = 0; i < 8; i++) + this.pad[i] = u8to16(key, 16 + 2 * i); + } + process(data, offset, isLast = false) { + const hibit = isLast ? 0 : 1 << 11; + const { h, r } = this; + const r0 = r[0]; + const r1 = r[1]; + const r2 = r[2]; + const r3 = r[3]; + const r4 = r[4]; + const r5 = r[5]; + const r6 = r[6]; + const r7 = r[7]; + const r8 = r[8]; + const r9 = r[9]; + const t0 = u8to16(data, offset + 0); + const t1 = u8to16(data, offset + 2); + const t2 = u8to16(data, offset + 4); + const t3 = u8to16(data, offset + 6); + const t4 = u8to16(data, offset + 8); + const t5 = u8to16(data, offset + 10); + const t6 = u8to16(data, offset + 12); + const t7 = u8to16(data, offset + 14); + let h0 = h[0] + (t0 & 0x1fff); + let h1 = h[1] + (((t0 >>> 13) | (t1 << 3)) & 0x1fff); + let h2 = h[2] + (((t1 >>> 10) | (t2 << 6)) & 0x1fff); + let h3 = h[3] + (((t2 >>> 7) | (t3 << 9)) & 0x1fff); + let h4 = h[4] + (((t3 >>> 4) | (t4 << 12)) & 0x1fff); + let h5 = h[5] + ((t4 >>> 1) & 0x1fff); + let h6 = h[6] + (((t4 >>> 14) | (t5 << 2)) & 0x1fff); + let h7 = h[7] + (((t5 >>> 11) | (t6 << 5)) & 0x1fff); + let h8 = h[8] + (((t6 >>> 8) | (t7 << 8)) & 0x1fff); + let h9 = h[9] + ((t7 >>> 5) | hibit); + let c = 0; + let d0 = c + h0 * r0 + h1 * (5 * r9) + h2 * (5 * r8) + h3 * (5 * r7) + h4 * (5 * r6); + c = d0 >>> 13; + d0 &= 0x1fff; + d0 += h5 * (5 * r5) + h6 * (5 * r4) + h7 * (5 * r3) + h8 * (5 * r2) + h9 * (5 * r1); + c += d0 >>> 13; + d0 &= 0x1fff; + let d1 = c + h0 * r1 + h1 * r0 + h2 * (5 * r9) + h3 * (5 * r8) + h4 * (5 * r7); + c = d1 >>> 13; + d1 &= 0x1fff; + d1 += h5 * (5 * r6) + h6 * (5 * r5) + h7 * (5 * r4) + h8 * (5 * r3) + h9 * (5 * r2); + c += d1 >>> 13; + d1 &= 0x1fff; + let d2 = c + h0 * r2 + h1 * r1 + h2 * r0 + h3 * (5 * r9) + h4 * (5 * r8); + c = d2 >>> 13; + d2 &= 0x1fff; + d2 += h5 * (5 * r7) + h6 * (5 * r6) + h7 * (5 * r5) + h8 * (5 * r4) + h9 * (5 * r3); + c += d2 >>> 13; + d2 &= 0x1fff; + let d3 = c + h0 * r3 + h1 * r2 + h2 * r1 + h3 * r0 + h4 * (5 * r9); + c = d3 >>> 13; + d3 &= 0x1fff; + d3 += h5 * (5 * r8) + h6 * (5 * r7) + h7 * (5 * r6) + h8 * (5 * r5) + h9 * (5 * r4); + c += d3 >>> 13; + d3 &= 0x1fff; + let d4 = c + h0 * r4 + h1 * r3 + h2 * r2 + h3 * r1 + h4 * r0; + c = d4 >>> 13; + d4 &= 0x1fff; + d4 += h5 * (5 * r9) + h6 * (5 * r8) + h7 * (5 * r7) + h8 * (5 * r6) + h9 * (5 * r5); + c += d4 >>> 13; + d4 &= 0x1fff; + let d5 = c + h0 * r5 + h1 * r4 + h2 * r3 + h3 * r2 + h4 * r1; + c = d5 >>> 13; + d5 &= 0x1fff; + d5 += h5 * r0 + h6 * (5 * r9) + h7 * (5 * r8) + h8 * (5 * r7) + h9 * (5 * r6); + c += d5 >>> 13; + d5 &= 0x1fff; + let d6 = c + h0 * r6 + h1 * r5 + h2 * r4 + h3 * r3 + h4 * r2; + c = d6 >>> 13; + d6 &= 0x1fff; + d6 += h5 * r1 + h6 * r0 + h7 * (5 * r9) + h8 * (5 * r8) + h9 * (5 * r7); + c += d6 >>> 13; + d6 &= 0x1fff; + let d7 = c + h0 * r7 + h1 * r6 + h2 * r5 + h3 * r4 + h4 * r3; + c = d7 >>> 13; + d7 &= 0x1fff; + d7 += h5 * r2 + h6 * r1 + h7 * r0 + h8 * (5 * r9) + h9 * (5 * r8); + c += d7 >>> 13; + d7 &= 0x1fff; + let d8 = c + h0 * r8 + h1 * r7 + h2 * r6 + h3 * r5 + h4 * r4; + c = d8 >>> 13; + d8 &= 0x1fff; + d8 += h5 * r3 + h6 * r2 + h7 * r1 + h8 * r0 + h9 * (5 * r9); + c += d8 >>> 13; + d8 &= 0x1fff; + let d9 = c + h0 * r9 + h1 * r8 + h2 * r7 + h3 * r6 + h4 * r5; + c = d9 >>> 13; + d9 &= 0x1fff; + d9 += h5 * r4 + h6 * r3 + h7 * r2 + h8 * r1 + h9 * r0; + c += d9 >>> 13; + d9 &= 0x1fff; + c = ((c << 2) + c) | 0; + c = (c + d0) | 0; + d0 = c & 0x1fff; + c = c >>> 13; + d1 += c; + h[0] = d0; + h[1] = d1; + h[2] = d2; + h[3] = d3; + h[4] = d4; + h[5] = d5; + h[6] = d6; + h[7] = d7; + h[8] = d8; + h[9] = d9; + } + finalize() { + const { h, pad } = this; + const g = new Uint16Array(10); + let c = h[1] >>> 13; + h[1] &= 0x1fff; + for (let i = 2; i < 10; i++) { + h[i] += c; + c = h[i] >>> 13; + h[i] &= 0x1fff; + } + h[0] += c * 5; + c = h[0] >>> 13; + h[0] &= 0x1fff; + h[1] += c; + c = h[1] >>> 13; + h[1] &= 0x1fff; + h[2] += c; + g[0] = h[0] + 5; + c = g[0] >>> 13; + g[0] &= 0x1fff; + for (let i = 1; i < 10; i++) { + g[i] = h[i] + c; + c = g[i] >>> 13; + g[i] &= 0x1fff; + } + g[9] -= 1 << 13; + let mask = (c ^ 1) - 1; + for (let i = 0; i < 10; i++) + g[i] &= mask; + mask = ~mask; + for (let i = 0; i < 10; i++) + h[i] = (h[i] & mask) | g[i]; + h[0] = (h[0] | (h[1] << 13)) & 0xffff; + h[1] = ((h[1] >>> 3) | (h[2] << 10)) & 0xffff; + h[2] = ((h[2] >>> 6) | (h[3] << 7)) & 0xffff; + h[3] = ((h[3] >>> 9) | (h[4] << 4)) & 0xffff; + h[4] = ((h[4] >>> 12) | (h[5] << 1) | (h[6] << 14)) & 0xffff; + h[5] = ((h[6] >>> 2) | (h[7] << 11)) & 0xffff; + h[6] = ((h[7] >>> 5) | (h[8] << 8)) & 0xffff; + h[7] = ((h[8] >>> 8) | (h[9] << 5)) & 0xffff; + let f = h[0] + pad[0]; + h[0] = f & 0xffff; + for (let i = 1; i < 8; i++) { + f = (((h[i] + pad[i]) | 0) + (f >>> 16)) | 0; + h[i] = f & 0xffff; + } + } + update(data) { + exists(this); + const { buffer, blockLen } = this; + data = toBytes(data); + const len = data.length; + for (let pos = 0; pos < len;) { + const take = Math.min(blockLen - this.pos, len - pos); + // Fast path: we have at least one block in input + if (take === blockLen) { + for (; blockLen <= len - pos; pos += blockLen) + this.process(data, pos); + continue; + } + buffer.set(data.subarray(pos, pos + take), this.pos); + this.pos += take; + pos += take; + if (this.pos === blockLen) { + this.process(buffer, 0, false); + this.pos = 0; + } + } + return this; + } + destroy() { + this.h.fill(0); + this.r.fill(0); + this.buffer.fill(0); + this.pad.fill(0); + } + digestInto(out) { + exists(this); + output(out, this); + this.finished = true; + const { buffer, h } = this; + let { pos } = this; + if (pos) { + buffer[pos++] = 1; + // buffer.subarray(pos).fill(0); + for (; pos < 16; pos++) + buffer[pos] = 0; + this.process(buffer, 0, true); + } + this.finalize(); + let opos = 0; + for (let i = 0; i < 8; i++) { + out[opos++] = h[i] >>> 0; + out[opos++] = h[i] >>> 8; + } + return out; + } + digest() { + const { buffer, outputLen } = this; + this.digestInto(buffer); + const res = buffer.slice(0, outputLen); + this.destroy(); + return res; + } + } + function wrapConstructorWithKey(hashCons) { + const hashC = (msg, key) => hashCons(key).update(toBytes(msg)).digest(); + const tmp = hashCons(new Uint8Array(32)); + hashC.outputLen = tmp.outputLen; + hashC.blockLen = tmp.blockLen; + hashC.create = (key) => hashCons(key); + return hashC; + } + const poly1305 = wrapConstructorWithKey((key) => new Poly1305(key)); + + // Basic utils for ARX (add-rotate-xor) salsa and chacha ciphers. + /* + RFC8439 requires multi-step cipher stream, where + authKey starts with counter: 0, actual msg with counter: 1. + + For this, we need a way to re-use nonce / counter: + + const counter = new Uint8Array(4); + chacha(..., counter, ...); // counter is now 1 + chacha(..., counter, ...); // counter is now 2 + + This is complicated: + + - 32-bit counters are enough, no need for 64-bit: max ArrayBuffer size in JS is 4GB + - Original papers don't allow mutating counters + - Counter overflow is undefined [^1] + - Idea A: allow providing (nonce | counter) instead of just nonce, re-use it + - Caveat: Cannot be re-used through all cases: + - * chacha has (counter | nonce) + - * xchacha has (nonce16 | counter | nonce16) + - Idea B: separate nonce / counter and provide separate API for counter re-use + - Caveat: there are different counter sizes depending on an algorithm. + - salsa & chacha also differ in structures of key & sigma: + salsa20: s[0] | k(4) | s[1] | nonce(2) | ctr(2) | s[2] | k(4) | s[3] + chacha: s(4) | k(8) | ctr(1) | nonce(3) + chacha20orig: s(4) | k(8) | ctr(2) | nonce(2) + - Idea C: helper method such as `setSalsaState(key, nonce, sigma, data)` + - Caveat: we can't re-use counter array + + xchacha [^2] uses the subkey and remaining 8 byte nonce with ChaCha20 as normal + (prefixed by 4 NUL bytes, since [RFC8439] specifies a 12-byte nonce). + + [^1]: https://mailarchive.ietf.org/arch/msg/cfrg/gsOnTJzcbgG6OqD8Sc0GO5aR_tU/ + [^2]: https://datatracker.ietf.org/doc/html/draft-irtf-cfrg-xchacha#appendix-A.2 + */ + const sigma16 = utf8ToBytes('expand 16-byte k'); + const sigma32 = utf8ToBytes('expand 32-byte k'); + const sigma16_32 = u32(sigma16); + const sigma32_32 = u32(sigma32); + function rotl(a, b) { + return (a << b) | (a >>> (32 - b)); + } + // Is byte array aligned to 4 byte offset (u32)? + function isAligned32(b) { + return b.byteOffset % 4 === 0; + } + // Salsa and Chacha block length is always 512-bit + const BLOCK_LEN = 64; + const BLOCK_LEN32 = 16; + // new Uint32Array([2**32]) // => Uint32Array(1) [ 0 ] + // new Uint32Array([2**32-1]) // => Uint32Array(1) [ 4294967295 ] + const MAX_COUNTER = 2 ** 32 - 1; + const U32_EMPTY = new Uint32Array(); + function runCipher(core, sigma, key, nonce, data, output, counter, rounds) { + const len = data.length; + const block = new Uint8Array(BLOCK_LEN); + const b32 = u32(block); + // Make sure that buffers aligned to 4 bytes + const isAligned = isAligned32(data) && isAligned32(output); + const d32 = isAligned ? u32(data) : U32_EMPTY; + const o32 = isAligned ? u32(output) : U32_EMPTY; + for (let pos = 0; pos < len; counter++) { + core(sigma, key, nonce, b32, counter, rounds); + if (counter >= MAX_COUNTER) + throw new Error('arx: counter overflow'); + const take = Math.min(BLOCK_LEN, len - pos); + // aligned to 4 bytes + if (isAligned && take === BLOCK_LEN) { + const pos32 = pos / 4; + if (pos % 4 !== 0) + throw new Error('arx: invalid block position'); + for (let j = 0, posj; j < BLOCK_LEN32; j++) { + posj = pos32 + j; + o32[posj] = d32[posj] ^ b32[j]; + } + pos += BLOCK_LEN; + continue; + } + for (let j = 0, posj; j < take; j++) { + posj = pos + j; + output[posj] = data[posj] ^ block[j]; + } + pos += take; + } + } + function createCipher(core, opts) { + const { allowShortKeys, extendNonceFn, counterLength, counterRight, rounds } = checkOpts({ allowShortKeys: false, counterLength: 8, counterRight: false, rounds: 20 }, opts); + if (typeof core !== 'function') + throw new Error('core must be a function'); + number(counterLength); + number(rounds); + bool(counterRight); + bool(allowShortKeys); + return (key, nonce, data, output, counter = 0) => { + bytes(key); + bytes(nonce); + bytes(data); + const len = data.length; + if (!output) + output = new Uint8Array(len); + bytes(output); + number(counter); + if (counter < 0 || counter >= MAX_COUNTER) + throw new Error('arx: counter overflow'); + if (output.length < len) + throw new Error(`arx: output (${output.length}) is shorter than data (${len})`); + const toClean = []; + // Key & sigma + // key=16 -> sigma16, k=key|key + // key=32 -> sigma32, k=key + let l = key.length, k, sigma; + if (l === 32) { + k = key.slice(); + toClean.push(k); + sigma = sigma32_32; + } + else if (l === 16 && allowShortKeys) { + k = new Uint8Array(32); + k.set(key); + k.set(key, 16); + sigma = sigma16_32; + toClean.push(k); + } + else { + throw new Error(`arx: invalid 32-byte key, got length=${l}`); + } + // Nonce + // salsa20: 8 (8-byte counter) + // chacha20orig: 8 (8-byte counter) + // chacha20: 12 (4-byte counter) + // xsalsa20: 24 (16 -> hsalsa, 8 -> old nonce) + // xchacha20: 24 (16 -> hchacha, 8 -> old nonce) + // Align nonce to 4 bytes + if (!isAligned32(nonce)) { + nonce = nonce.slice(); + toClean.push(nonce); + } + const k32 = u32(k); + // hsalsa & hchacha: handle extended nonce + if (extendNonceFn) { + if (nonce.length !== 24) + throw new Error(`arx: extended nonce must be 24 bytes`); + extendNonceFn(sigma, k32, u32(nonce.subarray(0, 16)), k32); + nonce = nonce.subarray(16); + } + // Handle nonce counter + const nonceNcLen = 16 - counterLength; + if (nonceNcLen !== nonce.length) + throw new Error(`arx: nonce must be ${nonceNcLen} or 16 bytes`); + // Pad counter when nonce is 64 bit + if (nonceNcLen !== 12) { + const nc = new Uint8Array(12); + nc.set(nonce, counterRight ? 0 : 12 - nonce.length); + nonce = nc; + toClean.push(nonce); + } + const n32 = u32(nonce); + runCipher(core, sigma, k32, n32, data, output, counter, rounds); + while (toClean.length > 0) + toClean.pop().fill(0); + return output; + }; + } + + // ChaCha20 stream cipher was released in 2008. ChaCha aims to increase + // the diffusion per round, but had slightly less cryptanalysis. + // https://cr.yp.to/chacha.html, http://cr.yp.to/chacha/chacha-20080128.pdf + /** + * ChaCha core function. + */ + // prettier-ignore + function chachaCore(s, k, n, out, cnt, rounds = 20) { + let y00 = s[0], y01 = s[1], y02 = s[2], y03 = s[3], // "expa" "nd 3" "2-by" "te k" + y04 = k[0], y05 = k[1], y06 = k[2], y07 = k[3], // Key Key Key Key + y08 = k[4], y09 = k[5], y10 = k[6], y11 = k[7], // Key Key Key Key + y12 = cnt, y13 = n[0], y14 = n[1], y15 = n[2]; // Counter Counter Nonce Nonce + // Save state to temporary variables + let x00 = y00, x01 = y01, x02 = y02, x03 = y03, x04 = y04, x05 = y05, x06 = y06, x07 = y07, x08 = y08, x09 = y09, x10 = y10, x11 = y11, x12 = y12, x13 = y13, x14 = y14, x15 = y15; + for (let r = 0; r < rounds; r += 2) { + x00 = (x00 + x04) | 0; + x12 = rotl(x12 ^ x00, 16); + x08 = (x08 + x12) | 0; + x04 = rotl(x04 ^ x08, 12); + x00 = (x00 + x04) | 0; + x12 = rotl(x12 ^ x00, 8); + x08 = (x08 + x12) | 0; + x04 = rotl(x04 ^ x08, 7); + x01 = (x01 + x05) | 0; + x13 = rotl(x13 ^ x01, 16); + x09 = (x09 + x13) | 0; + x05 = rotl(x05 ^ x09, 12); + x01 = (x01 + x05) | 0; + x13 = rotl(x13 ^ x01, 8); + x09 = (x09 + x13) | 0; + x05 = rotl(x05 ^ x09, 7); + x02 = (x02 + x06) | 0; + x14 = rotl(x14 ^ x02, 16); + x10 = (x10 + x14) | 0; + x06 = rotl(x06 ^ x10, 12); + x02 = (x02 + x06) | 0; + x14 = rotl(x14 ^ x02, 8); + x10 = (x10 + x14) | 0; + x06 = rotl(x06 ^ x10, 7); + x03 = (x03 + x07) | 0; + x15 = rotl(x15 ^ x03, 16); + x11 = (x11 + x15) | 0; + x07 = rotl(x07 ^ x11, 12); + x03 = (x03 + x07) | 0; + x15 = rotl(x15 ^ x03, 8); + x11 = (x11 + x15) | 0; + x07 = rotl(x07 ^ x11, 7); + x00 = (x00 + x05) | 0; + x15 = rotl(x15 ^ x00, 16); + x10 = (x10 + x15) | 0; + x05 = rotl(x05 ^ x10, 12); + x00 = (x00 + x05) | 0; + x15 = rotl(x15 ^ x00, 8); + x10 = (x10 + x15) | 0; + x05 = rotl(x05 ^ x10, 7); + x01 = (x01 + x06) | 0; + x12 = rotl(x12 ^ x01, 16); + x11 = (x11 + x12) | 0; + x06 = rotl(x06 ^ x11, 12); + x01 = (x01 + x06) | 0; + x12 = rotl(x12 ^ x01, 8); + x11 = (x11 + x12) | 0; + x06 = rotl(x06 ^ x11, 7); + x02 = (x02 + x07) | 0; + x13 = rotl(x13 ^ x02, 16); + x08 = (x08 + x13) | 0; + x07 = rotl(x07 ^ x08, 12); + x02 = (x02 + x07) | 0; + x13 = rotl(x13 ^ x02, 8); + x08 = (x08 + x13) | 0; + x07 = rotl(x07 ^ x08, 7); + x03 = (x03 + x04) | 0; + x14 = rotl(x14 ^ x03, 16); + x09 = (x09 + x14) | 0; + x04 = rotl(x04 ^ x09, 12); + x03 = (x03 + x04) | 0; + x14 = rotl(x14 ^ x03, 8); + x09 = (x09 + x14) | 0; + x04 = rotl(x04 ^ x09, 7); + } + // Write output + let oi = 0; + out[oi++] = (y00 + x00) | 0; + out[oi++] = (y01 + x01) | 0; + out[oi++] = (y02 + x02) | 0; + out[oi++] = (y03 + x03) | 0; + out[oi++] = (y04 + x04) | 0; + out[oi++] = (y05 + x05) | 0; + out[oi++] = (y06 + x06) | 0; + out[oi++] = (y07 + x07) | 0; + out[oi++] = (y08 + x08) | 0; + out[oi++] = (y09 + x09) | 0; + out[oi++] = (y10 + x10) | 0; + out[oi++] = (y11 + x11) | 0; + out[oi++] = (y12 + x12) | 0; + out[oi++] = (y13 + x13) | 0; + out[oi++] = (y14 + x14) | 0; + out[oi++] = (y15 + x15) | 0; + } + /** + * ChaCha stream cipher. Conforms to RFC 8439 (IETF, TLS). 12-byte nonce, 4-byte counter. + * With 12-byte nonce, it's not safe to use fill it with random (CSPRNG), due to collision chance. + */ + const chacha20 = /* @__PURE__ */ createCipher(chachaCore, { + counterRight: false, + counterLength: 4, + allowShortKeys: false, + }); + const ZEROS16 = /* @__PURE__ */ new Uint8Array(16); + // Pad to digest size with zeros + const updatePadded = (h, msg) => { + h.update(msg); + const left = msg.length % 16; + if (left) + h.update(ZEROS16.subarray(left)); + }; + const ZEROS32 = /* @__PURE__ */ new Uint8Array(32); + function computeTag(fn, key, nonce, data, AAD) { + const authKey = fn(key, nonce, ZEROS32); + const h = poly1305.create(authKey); + if (AAD) + updatePadded(h, AAD); + updatePadded(h, data); + const num = new Uint8Array(16); + const view = createView(num); + setBigUint64(view, 0, BigInt(AAD ? AAD.length : 0), true); + setBigUint64(view, 8, BigInt(data.length), true); + h.update(num); + const res = h.digest(); + authKey.fill(0); + return res; + } + /** + * AEAD algorithm from RFC 8439. + * Salsa20 and chacha (RFC 8439) use poly1305 differently. + * We could have composed them similar to: + * https://github.com/paulmillr/scure-base/blob/b266c73dde977b1dd7ef40ef7a23cc15aab526b3/index.ts#L250 + * But it's hard because of authKey: + * In salsa20, authKey changes position in salsa stream. + * In chacha, authKey can't be computed inside computeTag, it modifies the counter. + */ + const _poly1305_aead = (xorStream) => (key, nonce, AAD) => { + const tagLength = 16; + ensureBytes(key, 32); + ensureBytes(nonce); + return { + encrypt: (plaintext, output) => { + const plength = plaintext.length; + const clength = plength + tagLength; + if (output) { + ensureBytes(output, clength); + } + else { + output = new Uint8Array(clength); + } + xorStream(key, nonce, plaintext, output, 1); + const tag = computeTag(xorStream, key, nonce, output.subarray(0, -tagLength), AAD); + output.set(tag, plength); // append tag + return output; + }, + decrypt: (ciphertext, output) => { + const clength = ciphertext.length; + const plength = clength - tagLength; + if (clength < tagLength) + throw new Error(`encrypted data must be at least ${tagLength} bytes`); + if (output) { + ensureBytes(output, plength); + } + else { + output = new Uint8Array(plength); + } + const data = ciphertext.subarray(0, -tagLength); + const passedTag = ciphertext.subarray(-tagLength); + const tag = computeTag(xorStream, key, nonce, data, AAD); + if (!equalBytes(passedTag, tag)) + throw new Error('invalid tag'); + xorStream(key, nonce, data, output, 1); + return output; + }, + }; + }; + /** + * ChaCha20-Poly1305 from RFC 8439. + * With 12-byte nonce, it's not safe to use fill it with random (CSPRNG), due to collision chance. + */ + const chacha20poly1305 = /* @__PURE__ */ wrapCipher({ blockSize: 64, nonceLength: 12, tagLength: 16 }, _poly1305_aead(chacha20)); + + // HKDF (RFC 5869) + // https://soatok.blog/2021/11/17/understanding-hkdf/ + /** + * HKDF-Extract(IKM, salt) -> PRK + * Arguments position differs from spec (IKM is first one, since it is not optional) + * @param hash + * @param ikm + * @param salt + * @returns + */ + function extract(hash$1, ikm, salt) { + hash(hash$1); + // NOTE: some libraries treat zero-length array as 'not provided'; + // we don't, since we have undefined as 'not provided' + // https://github.com/RustCrypto/KDFs/issues/15 + if (salt === undefined) + salt = new Uint8Array(hash$1.outputLen); // if not provided, it is set to a string of HashLen zeros + return hmac(hash$1, toBytes$2(salt), toBytes$2(ikm)); + } + // HKDF-Expand(PRK, info, L) -> OKM + const HKDF_COUNTER = /* @__PURE__ */ new Uint8Array([0]); + const EMPTY_BUFFER = /* @__PURE__ */ new Uint8Array(); + /** + * HKDF-expand from the spec. + * @param prk - a pseudorandom key of at least HashLen octets (usually, the output from the extract step) + * @param info - optional context and application specific information (can be a zero-length string) + * @param length - length of output keying material in octets + */ + function expand(hash$1, prk, info, length = 32) { + hash(hash$1); + number$2(length); + if (length > 255 * hash$1.outputLen) + throw new Error('Length should be <= 255*HashLen'); + const blocks = Math.ceil(length / hash$1.outputLen); + if (info === undefined) + info = EMPTY_BUFFER; + // first L(ength) octets of T + const okm = new Uint8Array(blocks * hash$1.outputLen); + // Re-use HMAC instance between blocks + const HMAC = hmac.create(hash$1, prk); + const HMACTmp = HMAC._cloneInto(); + const T = new Uint8Array(HMAC.outputLen); + for (let counter = 0; counter < blocks; counter++) { + HKDF_COUNTER[0] = counter + 1; + // T(0) = empty string (zero length) + // T(N) = HMAC-Hash(PRK, T(N-1) | info | N) + HMACTmp.update(counter === 0 ? EMPTY_BUFFER : T) + .update(info) + .update(HKDF_COUNTER) + .digestInto(T); + okm.set(T, hash$1.outputLen * counter); + HMAC._cloneInto(HMACTmp); + } + HMAC.destroy(); + HMACTmp.destroy(); + T.fill(0); + HKDF_COUNTER.fill(0); + return okm.slice(0, length); + } + + const pureJsCrypto = { + hashSHA256(data) { + return sha256(data.subarray()); + }, + getHKDF(ck, ikm) { + const prk = extract(sha256, ikm, ck); + const okmU8Array = expand(sha256, prk, undefined, 96); + const okm = okmU8Array; + const k1 = okm.subarray(0, 32); + const k2 = okm.subarray(32, 64); + const k3 = okm.subarray(64, 96); + return [k1, k2, k3]; + }, + generateX25519KeyPair() { + const secretKey = x25519.utils.randomPrivateKey(); + const publicKey = x25519.getPublicKey(secretKey); + return { + publicKey, + privateKey: secretKey + }; + }, + generateX25519KeyPairFromSeed(seed) { + const publicKey = x25519.getPublicKey(seed); + return { + publicKey, + privateKey: seed + }; + }, + generateX25519SharedKey(privateKey, publicKey) { + return x25519.getSharedSecret(privateKey.subarray(), publicKey.subarray()); + }, + chaCha20Poly1305Encrypt(plaintext, nonce, ad, k) { + return chacha20poly1305(k, nonce, ad).encrypt(plaintext.subarray()); + }, + chaCha20Poly1305Decrypt(ciphertext, nonce, ad, k, dst) { + return chacha20poly1305(k, nonce, ad).decrypt(ciphertext.subarray(), dst); + } + }; + + const defaultCrypto = pureJsCrypto; + + function wrapCrypto(crypto) { + return { + generateKeypair: crypto.generateX25519KeyPair, + dh: (keypair, publicKey) => crypto.generateX25519SharedKey(keypair.privateKey, publicKey).subarray(0, 32), + encrypt: crypto.chaCha20Poly1305Encrypt, + decrypt: crypto.chaCha20Poly1305Decrypt, + hash: crypto.hashSHA256, + hkdf: crypto.getHKDF + }; + } + + const uint16BEEncode = (value) => { + const target = allocUnsafe(2); + target[0] = value >> 8; + target[1] = value; + return target; + }; + uint16BEEncode.bytes = 2; + const uint16BEDecode = (data) => { + if (data.length < 2) + throw RangeError('Could not decode int16BE'); + if (data instanceof Uint8Array) { + let value = 0; + value += data[0] << 8; + value += data[1]; + return value; + } + return data.getUint16(0); + }; + uint16BEDecode.bytes = 2; + + function registerMetrics(metrics) { + return { + xxHandshakeSuccesses: metrics.registerCounter('libp2p_noise_xxhandshake_successes_total', { + help: 'Total count of noise xxHandshakes successes_' + }), + xxHandshakeErrors: metrics.registerCounter('libp2p_noise_xxhandshake_error_total', { + help: 'Total count of noise xxHandshakes errors' + }), + encryptedPackets: metrics.registerCounter('libp2p_noise_encrypted_packets_total', { + help: 'Total count of noise encrypted packets successfully' + }), + decryptedPackets: metrics.registerCounter('libp2p_noise_decrypted_packets_total', { + help: 'Total count of noise decrypted packets' + }), + decryptErrors: metrics.registerCounter('libp2p_noise_decrypt_errors_total', { + help: 'Total count of noise decrypt errors' + }) + }; + } + + function logLocalStaticKeys(s, keyLogger) { + if (!keyLogger.enabled || !DUMP_SESSION_KEYS) { + return; + } + if (s) { + keyLogger(`LOCAL_STATIC_PUBLIC_KEY ${toString$1(s.publicKey, 'hex')}`); + keyLogger(`LOCAL_STATIC_PRIVATE_KEY ${toString$1(s.privateKey, 'hex')}`); + } + else { + keyLogger('Missing local static keys.'); + } + } + function logLocalEphemeralKeys(e, keyLogger) { + if (!keyLogger.enabled || !DUMP_SESSION_KEYS) { + return; + } + if (e) { + keyLogger(`LOCAL_PUBLIC_EPHEMERAL_KEY ${toString$1(e.publicKey, 'hex')}`); + keyLogger(`LOCAL_PRIVATE_EPHEMERAL_KEY ${toString$1(e.privateKey, 'hex')}`); + } + else { + keyLogger('Missing local ephemeral keys.'); + } + } + function logRemoteStaticKey(rs, keyLogger) { + if (!keyLogger.enabled || !DUMP_SESSION_KEYS) { + return; + } + if (rs) { + keyLogger(`REMOTE_STATIC_PUBLIC_KEY ${toString$1(rs.subarray(), 'hex')}`); + } + else { + keyLogger('Missing remote static public key.'); + } + } + function logRemoteEphemeralKey(re, keyLogger) { + if (!keyLogger.enabled || !DUMP_SESSION_KEYS) { + return; + } + if (re) { + keyLogger(`REMOTE_EPHEMERAL_PUBLIC_KEY ${toString$1(re.subarray(), 'hex')}`); + } + else { + keyLogger('Missing remote ephemeral keys.'); + } + } + function logCipherState(cs1, cs2, keyLogger) { + if (!keyLogger.enabled || !DUMP_SESSION_KEYS) { + return; + } + keyLogger(`CIPHER_STATE_1 ${cs1.n.getUint64()} ${cs1.k && toString$1(cs1.k, 'hex')}`); + keyLogger(`CIPHER_STATE_2 ${cs2.n.getUint64()} ${cs2.k && toString$1(cs2.k, 'hex')}`); + } + + /** + * Returns the xor distance between two Uint8Arrays + */ + function xor(a, b) { + if (a.length !== b.length) { + throw new Error('Inputs should have the same length'); + } + const result = allocUnsafe(a.length); + for (let i = 0; i < a.length; i++) { + result[i] = a[i] ^ b[i]; + } + return asUint8Array$1(result); + } + + class UnexpectedPeerError extends Error { + code; + constructor(message = 'Unexpected Peer') { + super(message); + this.code = UnexpectedPeerError.code; + } + static code = 'ERR_UNEXPECTED_PEER'; + } + class InvalidCryptoExchangeError extends Error { + code; + constructor(message = 'Invalid crypto exchange') { + super(message); + this.code = InvalidCryptoExchangeError.code; + } + static code = 'ERR_INVALID_CRYPTO_EXCHANGE'; + } + + const MIN_NONCE = 0; + // For performance reasons, the nonce is represented as a JS `number` + // Although JS `number` can safely represent integers up to 2 ** 53 - 1, we choose to only use + // 4 bytes to store the data for performance reason. + // This is a slight deviation from the noise spec, which describes the max nonce as 2 ** 64 - 2 + // The effect is that this implementation will need a new handshake to be performed after fewer messages are exchanged than other implementations with full uint64 nonces. + // this MAX_NONCE is still a large number of messages, so the practical effect of this is negligible. + const MAX_NONCE = 0xffffffff; + const ERR_MAX_NONCE = 'Cipherstate has reached maximum n, a new handshake must be performed'; + /** + * The nonce is an uint that's increased over time. + * Maintaining different representations help improve performance. + */ + class Nonce { + n; + bytes; + view; + constructor(n = MIN_NONCE) { + this.n = n; + this.bytes = alloc$2(12); + this.view = new DataView(this.bytes.buffer, this.bytes.byteOffset, this.bytes.byteLength); + this.view.setUint32(4, n, true); + } + increment() { + this.n++; + // Even though we're treating the nonce as 8 bytes, RFC7539 specifies 12 bytes for a nonce. + this.view.setUint32(4, this.n, true); + } + getBytes() { + return this.bytes; + } + getUint64() { + return this.n; + } + assertValue() { + if (this.n > MAX_NONCE) { + throw new Error(ERR_MAX_NONCE); + } + } + } + + // Code in this file is a direct translation of a subset of the noise protocol https://noiseprotocol.org/noise.html, + // agnostic to libp2p's usage of noise + const ZEROLEN = alloc$2(0); + class CipherState { + k; + n; + crypto; + constructor(crypto, k = undefined, n = 0) { + this.crypto = crypto; + this.k = k; + this.n = new Nonce(n); + } + hasKey() { + return Boolean(this.k); + } + encryptWithAd(ad, plaintext) { + if (!this.hasKey()) { + return plaintext; + } + this.n.assertValue(); + const e = this.crypto.encrypt(plaintext, this.n.getBytes(), ad, this.k); + this.n.increment(); + return e; + } + decryptWithAd(ad, ciphertext, dst) { + if (!this.hasKey()) { + return ciphertext; + } + this.n.assertValue(); + const plaintext = this.crypto.decrypt(ciphertext, this.n.getBytes(), ad, this.k, dst); + this.n.increment(); + return plaintext; + } + } + class SymmetricState { + cs; + ck; + h; + crypto; + constructor(crypto, protocolName) { + this.crypto = crypto; + const protocolNameBytes = fromString(protocolName, 'utf-8'); + this.h = hashProtocolName(crypto, protocolNameBytes); + this.ck = this.h; + this.cs = new CipherState(crypto); + } + mixKey(ikm) { + const [ck, tempK] = this.crypto.hkdf(this.ck, ikm); + this.ck = ck; + this.cs = new CipherState(this.crypto, tempK); + } + mixHash(data) { + this.h = this.crypto.hash(new Uint8ArrayList(this.h, data)); + } + encryptAndHash(plaintext) { + const ciphertext = this.cs.encryptWithAd(this.h, plaintext); + this.mixHash(ciphertext); + return ciphertext; + } + decryptAndHash(ciphertext) { + const plaintext = this.cs.decryptWithAd(this.h, ciphertext); + this.mixHash(ciphertext); + return plaintext; + } + split() { + const [tempK1, tempK2] = this.crypto.hkdf(this.ck, ZEROLEN); + return [new CipherState(this.crypto, tempK1), new CipherState(this.crypto, tempK2)]; + } + } + class AbstractHandshakeState { + ss; + s; + e; + rs; + re; + initiator; + crypto; + constructor(init) { + const { crypto, protocolName, prologue, initiator, s, e, rs, re } = init; + this.crypto = crypto; + this.ss = new SymmetricState(crypto, protocolName); + this.ss.mixHash(prologue); + this.initiator = initiator; + this.s = s; + this.e = e; + this.rs = rs; + this.re = re; + } + writeE() { + if (this.e) { + throw new Error('ephemeral keypair is already set'); + } + const e = this.crypto.generateKeypair(); + this.ss.mixHash(e.publicKey); + this.e = e; + return e.publicKey; + } + writeS() { + if (!this.s) { + throw new Error('static keypair is not set'); + } + return this.ss.encryptAndHash(this.s.publicKey); + } + writeEE() { + if (!this.e) { + throw new Error('ephemeral keypair is not set'); + } + if (!this.re) { + throw new Error('remote ephemeral public key is not set'); + } + this.ss.mixKey(this.crypto.dh(this.e, this.re)); + } + writeES() { + if (this.initiator) { + if (!this.e) { + throw new Error('ephemeral keypair is not set'); + } + if (!this.rs) { + throw new Error('remote static public key is not set'); + } + this.ss.mixKey(this.crypto.dh(this.e, this.rs)); + } + else { + if (!this.s) { + throw new Error('static keypair is not set'); + } + if (!this.re) { + throw new Error('remote ephemeral public key is not set'); + } + this.ss.mixKey(this.crypto.dh(this.s, this.re)); + } + } + writeSE() { + if (this.initiator) { + if (!this.s) { + throw new Error('static keypair is not set'); + } + if (!this.re) { + throw new Error('remote ephemeral public key is not set'); + } + this.ss.mixKey(this.crypto.dh(this.s, this.re)); + } + else { + if (!this.e) { + throw new Error('ephemeral keypair is not set'); + } + if (!this.rs) { + throw new Error('remote static public key is not set'); + } + this.ss.mixKey(this.crypto.dh(this.e, this.rs)); + } + } + readE(message, offset = 0) { + if (this.re) { + throw new Error('remote ephemeral public key is already set'); + } + if (message.byteLength < offset + 32) { + throw new Error('message is not long enough'); + } + this.re = message.sublist(offset, offset + 32); + this.ss.mixHash(this.re); + } + readS(message, offset = 0) { + if (this.rs) { + throw new Error('remote static public key is already set'); + } + const cipherLength = 32 + (this.ss.cs.hasKey() ? 16 : 0); + if (message.byteLength < offset + cipherLength) { + throw new Error('message is not long enough'); + } + const temp = message.sublist(offset, offset + cipherLength); + this.rs = this.ss.decryptAndHash(temp); + return cipherLength; + } + readEE() { + this.writeEE(); + } + readES() { + this.writeES(); + } + readSE() { + this.writeSE(); + } + } + /** + * A IHandshakeState that's optimized for the XX pattern + */ + class XXHandshakeState extends AbstractHandshakeState { + // e + writeMessageA(payload) { + return new Uint8ArrayList(this.writeE(), this.ss.encryptAndHash(payload)); + } + // e, ee, s, es + writeMessageB(payload) { + const e = this.writeE(); + this.writeEE(); + const encS = this.writeS(); + this.writeES(); + return new Uint8ArrayList(e, encS, this.ss.encryptAndHash(payload)); + } + // s, se + writeMessageC(payload) { + const encS = this.writeS(); + this.writeSE(); + return new Uint8ArrayList(encS, this.ss.encryptAndHash(payload)); + } + // e + readMessageA(message) { + try { + this.readE(message); + return this.ss.decryptAndHash(message.sublist(32)); + } + catch (e) { + throw new InvalidCryptoExchangeError(`handshake stage 0 validation fail: ${e.message}`); + } + } + // e, ee, s, es + readMessageB(message) { + try { + this.readE(message); + this.readEE(); + const consumed = this.readS(message, 32); + this.readES(); + return this.ss.decryptAndHash(message.sublist(32 + consumed)); + } + catch (e) { + throw new InvalidCryptoExchangeError(`handshake stage 1 validation fail: ${e.message}`); + } + } + // s, se + readMessageC(message) { + try { + const consumed = this.readS(message); + this.readSE(); + return this.ss.decryptAndHash(message.sublist(consumed)); + } + catch (e) { + throw new InvalidCryptoExchangeError(`handshake stage 2 validation fail: ${e.message}`); + } + } + } + function hashProtocolName(crypto, protocolName) { + if (protocolName.length <= 32) { + const h = alloc$2(32); + h.set(protocolName); + return h; + } + else { + return crypto.hash(protocolName); + } + } + + /* eslint-disable import/export */ + /* eslint-disable complexity */ + /* eslint-disable @typescript-eslint/no-namespace */ + /* eslint-disable @typescript-eslint/no-unnecessary-boolean-literal-compare */ + /* eslint-disable @typescript-eslint/no-empty-interface */ + var NoiseExtensions; + (function (NoiseExtensions) { + let _codec; + NoiseExtensions.codec = () => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork(); + } + if (obj.webtransportCerthashes != null) { + for (const value of obj.webtransportCerthashes) { + w.uint32(10); + w.bytes(value); + } + } + if (opts.lengthDelimited !== false) { + w.ldelim(); + } + }, (reader, length) => { + const obj = { + webtransportCerthashes: [] + }; + const end = length == null ? reader.len : reader.pos + length; + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + obj.webtransportCerthashes.push(reader.bytes()); + break; + } + default: { + reader.skipType(tag & 7); + break; + } + } + } + return obj; + }); + } + return _codec; + }; + NoiseExtensions.encode = (obj) => { + return encodeMessage(obj, NoiseExtensions.codec()); + }; + NoiseExtensions.decode = (buf) => { + return decodeMessage(buf, NoiseExtensions.codec()); + }; + })(NoiseExtensions || (NoiseExtensions = {})); + var NoiseHandshakePayload; + (function (NoiseHandshakePayload) { + let _codec; + NoiseHandshakePayload.codec = () => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork(); + } + if ((obj.identityKey != null && obj.identityKey.byteLength > 0)) { + w.uint32(10); + w.bytes(obj.identityKey); + } + if ((obj.identitySig != null && obj.identitySig.byteLength > 0)) { + w.uint32(18); + w.bytes(obj.identitySig); + } + if (obj.extensions != null) { + w.uint32(34); + NoiseExtensions.codec().encode(obj.extensions, w); + } + if (opts.lengthDelimited !== false) { + w.ldelim(); + } + }, (reader, length) => { + const obj = { + identityKey: alloc$2(0), + identitySig: alloc$2(0) + }; + const end = length == null ? reader.len : reader.pos + length; + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + obj.identityKey = reader.bytes(); + break; + } + case 2: { + obj.identitySig = reader.bytes(); + break; + } + case 4: { + obj.extensions = NoiseExtensions.codec().decode(reader, reader.uint32()); + break; + } + default: { + reader.skipType(tag & 7); + break; + } + } + } + return obj; + }); + } + return _codec; + }; + NoiseHandshakePayload.encode = (obj) => { + return encodeMessage(obj, NoiseHandshakePayload.codec()); + }; + NoiseHandshakePayload.decode = (buf) => { + return decodeMessage(buf, NoiseHandshakePayload.codec()); + }; + })(NoiseHandshakePayload || (NoiseHandshakePayload = {})); + + async function createHandshakePayload(privateKey, staticPublicKey, extensions) { + const identitySig = await privateKey.sign(getSignaturePayload(staticPublicKey)); + return NoiseHandshakePayload.encode({ + identityKey: privateKey.public.bytes, + identitySig, + extensions + }); + } + async function decodeHandshakePayload(payloadBytes, remoteStaticKey, remoteIdentityKey) { + try { + const payload = NoiseHandshakePayload.decode(payloadBytes); + if (remoteIdentityKey) { + const remoteIdentityKeyBytes = remoteIdentityKey.subarray(); + if (!equals(remoteIdentityKeyBytes, payload.identityKey)) { + throw new Error(`Payload identity key ${toString$1(payload.identityKey, 'hex')} does not match expected remote identity key ${toString$1(remoteIdentityKeyBytes, 'hex')}`); + } + } + if (!remoteStaticKey) { + throw new Error('Remote static does not exist'); + } + const signaturePayload = getSignaturePayload(remoteStaticKey); + const publicKey = unmarshalPublicKey(payload.identityKey); + if (!(await publicKey.verify(signaturePayload, payload.identitySig))) { + throw new Error('Invalid payload signature'); + } + return payload; + } + catch (e) { + throw new UnexpectedPeerError(e.message); + } + } + function getSignaturePayload(publicKey) { + const prefix = fromString('noise-libp2p-static-key:'); + if (publicKey instanceof Uint8Array) { + return concat$1([prefix, publicKey], prefix.length + publicKey.length); + } + publicKey.prepend(prefix); + return publicKey; + } + + async function performHandshakeInitiator(init) { + const { log, connection, crypto, privateKey, prologue, s, remoteIdentityKey, extensions } = init; + const payload = await createHandshakePayload(privateKey, s.publicKey, extensions); + const xx = new XXHandshakeState({ + crypto, + protocolName: 'Noise_XX_25519_ChaChaPoly_SHA256', + initiator: true, + prologue, + s + }); + logLocalStaticKeys(xx.s, log); + log.trace('Stage 0 - Initiator starting to send first message.'); + await connection.write(xx.writeMessageA(ZEROLEN)); + log.trace('Stage 0 - Initiator finished sending first message.'); + logLocalEphemeralKeys(xx.e, log); + log.trace('Stage 1 - Initiator waiting to receive first message from responder...'); + const plaintext = xx.readMessageB(await connection.read()); + log.trace('Stage 1 - Initiator received the message.'); + logRemoteEphemeralKey(xx.re, log); + logRemoteStaticKey(xx.rs, log); + log.trace("Initiator going to check remote's signature..."); + const receivedPayload = await decodeHandshakePayload(plaintext, xx.rs, remoteIdentityKey); + log.trace('All good with the signature!'); + log.trace('Stage 2 - Initiator sending third handshake message.'); + await connection.write(xx.writeMessageC(payload)); + log.trace('Stage 2 - Initiator sent message with signed payload.'); + const [cs1, cs2] = xx.ss.split(); + logCipherState(cs1, cs2, log); + return { + payload: receivedPayload, + encrypt: (plaintext) => cs1.encryptWithAd(ZEROLEN, plaintext), + decrypt: (ciphertext, dst) => cs2.decryptWithAd(ZEROLEN, ciphertext, dst) + }; + } + async function performHandshakeResponder(init) { + const { log, connection, crypto, privateKey, prologue, s, remoteIdentityKey, extensions } = init; + const payload = await createHandshakePayload(privateKey, s.publicKey, extensions); + const xx = new XXHandshakeState({ + crypto, + protocolName: 'Noise_XX_25519_ChaChaPoly_SHA256', + initiator: false, + prologue, + s + }); + logLocalStaticKeys(xx.s, log); + log.trace('Stage 0 - Responder waiting to receive first message.'); + xx.readMessageA(await connection.read()); + log.trace('Stage 0 - Responder received first message.'); + logRemoteEphemeralKey(xx.re, log); + log.trace('Stage 1 - Responder sending out first message with signed payload and static key.'); + await connection.write(xx.writeMessageB(payload)); + log.trace('Stage 1 - Responder sent the second handshake message with signed payload.'); + logLocalEphemeralKeys(xx.e, log); + log.trace('Stage 2 - Responder waiting for third handshake message...'); + const plaintext = xx.readMessageC(await connection.read()); + log.trace('Stage 2 - Responder received the message, finished handshake.'); + const receivedPayload = await decodeHandshakePayload(plaintext, xx.rs, remoteIdentityKey); + const [cs1, cs2] = xx.ss.split(); + logCipherState(cs1, cs2, log); + return { + payload: receivedPayload, + encrypt: (plaintext) => cs2.encryptWithAd(ZEROLEN, plaintext), + decrypt: (ciphertext, dst) => cs1.decryptWithAd(ZEROLEN, ciphertext, dst) + }; + } + + const CHACHA_TAG_LENGTH = 16; + // Returns generator that encrypts payload from the user + function encryptStream(handshake, metrics) { + return async function* (source) { + for await (const chunk of source) { + for (let i = 0; i < chunk.length; i += NOISE_MSG_MAX_LENGTH_BYTES_WITHOUT_TAG) { + let end = i + NOISE_MSG_MAX_LENGTH_BYTES_WITHOUT_TAG; + if (end > chunk.length) { + end = chunk.length; + } + let data; + if (chunk instanceof Uint8Array) { + data = handshake.encrypt(chunk.subarray(i, end)); + } + else { + data = handshake.encrypt(chunk.sublist(i, end)); + } + metrics?.encryptedPackets.increment(); + yield new Uint8ArrayList(uint16BEEncode(data.byteLength), data); + } + } + }; + } + // Decrypt received payload to the user + function decryptStream(handshake, metrics) { + return async function* (source) { + for await (const chunk of source) { + for (let i = 0; i < chunk.length; i += NOISE_MSG_MAX_LENGTH_BYTES) { + let end = i + NOISE_MSG_MAX_LENGTH_BYTES; + if (end > chunk.length) { + end = chunk.length; + } + if (end - CHACHA_TAG_LENGTH < i) { + throw new Error('Invalid chunk'); + } + const encrypted = chunk.sublist(i, end); + // memory allocation is not cheap so reuse the encrypted Uint8Array + // see https://github.com/ChainSafe/js-libp2p-noise/pull/242#issue-1422126164 + // this is ok because chacha20 reads bytes one by one and don't reread after that + // it's also tested in https://github.com/ChainSafe/as-chacha20poly1305/pull/1/files#diff-25252846b58979dcaf4e41d47b3eadd7e4f335e7fb98da6c049b1f9cd011f381R48 + const dst = chunk.subarray(i, end - CHACHA_TAG_LENGTH); + try { + const plaintext = handshake.decrypt(encrypted, dst); + metrics?.decryptedPackets.increment(); + yield plaintext; + } + catch (e) { + metrics?.decryptErrors.increment(); + throw e; + } + } + } + }; + } + + class Noise { + protocol = '/noise'; + crypto; + prologue; + staticKey; + extensions; + metrics; + components; + constructor(components, init = {}) { + const { staticNoiseKey, extensions, crypto, prologueBytes } = init; + const { metrics } = components; + this.components = components; + const _crypto = crypto ?? defaultCrypto; + this.crypto = wrapCrypto(_crypto); + this.extensions = extensions; + this.metrics = metrics ? registerMetrics(metrics) : undefined; + if (staticNoiseKey) { + // accepts x25519 private key of length 32 + this.staticKey = _crypto.generateX25519KeyPairFromSeed(staticNoiseKey); + } + else { + this.staticKey = _crypto.generateX25519KeyPair(); + } + this.prologue = prologueBytes ?? alloc$2(0); + } + /** + * Encrypt outgoing data to the remote party (handshake as initiator) + * + * @param localPeer - PeerId of the receiving peer + * @param connection - streaming iterable duplex that will be encrypted + * @param remotePeer - PeerId of the remote peer. Used to validate the integrity of the remote peer. + */ + async secureOutbound(localPeer, connection, remotePeer) { + const wrappedConnection = lpStream(connection, { + lengthEncoder: uint16BEEncode, + lengthDecoder: uint16BEDecode, + maxDataLength: NOISE_MSG_MAX_LENGTH_BYTES + }); + if (!localPeer.privateKey) { + throw new CodeError$2('local peerId does not contain private key', 'ERR_NO_PRIVATE_KEY'); + } + const privateKey = await unmarshalPrivateKey(localPeer.privateKey); + const remoteIdentityKey = remotePeer?.publicKey; + const handshake = await this.performHandshakeInitiator(wrappedConnection, privateKey, remoteIdentityKey); + const conn = await this.createSecureConnection(wrappedConnection, handshake); + connection.source = conn.source; + connection.sink = conn.sink; + return { + conn: connection, + remoteExtensions: handshake.payload.extensions, + remotePeer: await peerIdFromKeys(handshake.payload.identityKey) + }; + } + /** + * Decrypt incoming data (handshake as responder). + * + * @param localPeer - PeerId of the receiving peer. + * @param connection - streaming iterable duplex that will be encrypted. + * @param remotePeer - optional PeerId of the initiating peer, if known. This may only exist during transport upgrades. + */ + async secureInbound(localPeer, connection, remotePeer) { + const wrappedConnection = lpStream(connection, { + lengthEncoder: uint16BEEncode, + lengthDecoder: uint16BEDecode, + maxDataLength: NOISE_MSG_MAX_LENGTH_BYTES + }); + if (!localPeer.privateKey) { + throw new CodeError$2('local peerId does not contain private key', 'ERR_NO_PRIVATE_KEY'); + } + const privateKey = await unmarshalPrivateKey(localPeer.privateKey); + const remoteIdentityKey = remotePeer?.publicKey; + const handshake = await this.performHandshakeResponder(wrappedConnection, privateKey, remoteIdentityKey); + const conn = await this.createSecureConnection(wrappedConnection, handshake); + connection.source = conn.source; + connection.sink = conn.sink; + return { + conn: connection, + remoteExtensions: handshake.payload.extensions, + remotePeer: await peerIdFromKeys(handshake.payload.identityKey) + }; + } + /** + * Perform XX handshake as initiator. + */ + async performHandshakeInitiator(connection, + // TODO: pass private key in noise constructor via Components + privateKey, remoteIdentityKey) { + let result; + try { + result = await performHandshakeInitiator({ + connection, + privateKey, + remoteIdentityKey, + log: this.components.logger.forComponent('libp2p:noise:xxhandshake'), + crypto: this.crypto, + prologue: this.prologue, + s: this.staticKey, + extensions: this.extensions + }); + this.metrics?.xxHandshakeSuccesses.increment(); + } + catch (e) { + this.metrics?.xxHandshakeErrors.increment(); + throw e; + } + return result; + } + /** + * Perform XX handshake as responder. + */ + async performHandshakeResponder(connection, + // TODO: pass private key in noise constructor via Components + privateKey, remoteIdentityKey) { + let result; + try { + result = await performHandshakeResponder({ + connection, + privateKey, + remoteIdentityKey, + log: this.components.logger.forComponent('libp2p:noise:xxhandshake'), + crypto: this.crypto, + prologue: this.prologue, + s: this.staticKey, + extensions: this.extensions + }); + this.metrics?.xxHandshakeSuccesses.increment(); + } + catch (e) { + this.metrics?.xxHandshakeErrors.increment(); + throw e; + } + return result; + } + async createSecureConnection(connection, handshake) { + // Create encryption box/unbox wrapper + const [secure, user] = duplexPair(); + const network = connection.unwrap(); + await pipe(secure, // write to wrapper + encryptStream(handshake, this.metrics), // encrypt data + prefix with message length + network, // send to the remote peer + (source) => decode(source, { lengthDecoder: uint16BEDecode }), // read message length prefix + decryptStream(handshake, this.metrics), // decrypt the incoming data + secure // pipe to the wrapper + ); + return user; + } + } + + function noise(init = {}) { + return (components) => new Noise(components, init); + } + + function createListener$1(options) { + throw new Error('Not implemented'); + } + + function getIterator(obj) { + if (obj != null) { + if (typeof obj[Symbol.iterator] === 'function') { + return obj[Symbol.iterator](); + } + if (typeof obj[Symbol.asyncIterator] === 'function') { + return obj[Symbol.asyncIterator](); + } + if (typeof obj.next === 'function') { + return obj; // probably an iterator + } + } + throw new Error('argument is not an iterator or iterable'); + } + + function isPromise$3(thing) { + if (thing == null) { + return false; + } + return typeof thing.then === 'function' && + typeof thing.catch === 'function' && + typeof thing.finally === 'function'; + } + + function closeSource(source, log) { + const res = getIterator(source).return?.(); + if (isPromise$3(res)) { + res.catch(err => { + log.error('could not cause iterator to return', err); + }); + } + } + + const ERR_STREAM_RESET = 'ERR_STREAM_RESET'; + const ERR_SINK_INVALID_STATE = 'ERR_SINK_INVALID_STATE'; + const DEFAULT_SEND_CLOSE_WRITE_TIMEOUT = 5000; + function isPromise$2(thing) { + if (thing == null) { + return false; + } + return typeof thing.then === 'function' && + typeof thing.catch === 'function' && + typeof thing.finally === 'function'; + } + class AbstractStream { + id; + direction; + timeline; + protocol; + metadata; + source; + status; + readStatus; + writeStatus; + log; + sinkController; + sinkEnd; + closed; + endErr; + streamSource; + onEnd; + onCloseRead; + onCloseWrite; + onReset; + onAbort; + sendCloseWriteTimeout; + sendingData; + constructor(init) { + this.sinkController = new AbortController(); + this.sinkEnd = pDefer(); + this.closed = pDefer(); + this.log = init.log; + // stream status + this.status = 'open'; + this.readStatus = 'ready'; + this.writeStatus = 'ready'; + this.id = init.id; + this.metadata = init.metadata ?? {}; + this.direction = init.direction; + this.timeline = { + open: Date.now() + }; + this.sendCloseWriteTimeout = init.sendCloseWriteTimeout ?? DEFAULT_SEND_CLOSE_WRITE_TIMEOUT; + this.onEnd = init.onEnd; + this.onCloseRead = init?.onCloseRead; + this.onCloseWrite = init?.onCloseWrite; + this.onReset = init?.onReset; + this.onAbort = init?.onAbort; + this.source = this.streamSource = pushable$1({ + onEnd: (err) => { + if (err != null) { + this.log.trace('source ended with error', err); + } + else { + this.log.trace('source ended'); + } + this.onSourceEnd(err); + } + }); + // necessary because the libp2p upgrader wraps the sink function + this.sink = this.sink.bind(this); + } + async sink(source) { + if (this.writeStatus !== 'ready') { + throw new CodeError$2(`writable end state is "${this.writeStatus}" not "ready"`, ERR_SINK_INVALID_STATE); + } + try { + this.writeStatus = 'writing'; + const options = { + signal: this.sinkController.signal + }; + if (this.direction === 'outbound') { // If initiator, open a new stream + const res = this.sendNewStream(options); + if (isPromise$2(res)) { + await res; + } + } + const abortListener = () => { + closeSource(source, this.log); + }; + try { + this.sinkController.signal.addEventListener('abort', abortListener); + this.log.trace('sink reading from source'); + for await (let data of source) { + data = data instanceof Uint8Array ? new Uint8ArrayList(data) : data; + const res = this.sendData(data, options); + if (isPromise$2(res)) { + this.sendingData = pDefer(); + await res; + this.sendingData.resolve(); + this.sendingData = undefined; + } + } + } + finally { + this.sinkController.signal.removeEventListener('abort', abortListener); + } + this.log.trace('sink finished reading from source, write status is "%s"', this.writeStatus); + if (this.writeStatus === 'writing') { + this.writeStatus = 'closing'; + this.log.trace('send close write to remote'); + await this.sendCloseWrite({ + signal: AbortSignal.timeout(this.sendCloseWriteTimeout) + }); + this.writeStatus = 'closed'; + } + this.onSinkEnd(); + } + catch (err) { + this.log.trace('sink ended with error, calling abort with error', err); + this.abort(err); + throw err; + } + finally { + this.log.trace('resolve sink end'); + this.sinkEnd.resolve(); + } + } + onSourceEnd(err) { + if (this.timeline.closeRead != null) { + return; + } + this.timeline.closeRead = Date.now(); + this.readStatus = 'closed'; + if (err != null && this.endErr == null) { + this.endErr = err; + } + this.onCloseRead?.(); + if (this.timeline.closeWrite != null) { + this.log.trace('source and sink ended'); + this.timeline.close = Date.now(); + if (this.status !== 'aborted' && this.status !== 'reset') { + this.status = 'closed'; + } + if (this.onEnd != null) { + this.onEnd(this.endErr); + } + this.closed.resolve(); + } + else { + this.log.trace('source ended, waiting for sink to end'); + } + } + onSinkEnd(err) { + if (this.timeline.closeWrite != null) { + return; + } + this.timeline.closeWrite = Date.now(); + this.writeStatus = 'closed'; + if (err != null && this.endErr == null) { + this.endErr = err; + } + this.onCloseWrite?.(); + if (this.timeline.closeRead != null) { + this.log.trace('sink and source ended'); + this.timeline.close = Date.now(); + if (this.status !== 'aborted' && this.status !== 'reset') { + this.status = 'closed'; + } + if (this.onEnd != null) { + this.onEnd(this.endErr); + } + this.closed.resolve(); + } + else { + this.log.trace('sink ended, waiting for source to end'); + } + } + // Close for both Reading and Writing + async close(options) { + this.log.trace('closing gracefully'); + this.status = 'closing'; + // wait for read and write ends to close + await raceSignal(Promise.all([ + this.closeWrite(options), + this.closeRead(options), + this.closed.promise + ]), options?.signal); + this.status = 'closed'; + this.log.trace('closed gracefully'); + } + async closeRead(options = {}) { + if (this.readStatus === 'closing' || this.readStatus === 'closed') { + return; + } + this.log.trace('closing readable end of stream with starting read status "%s"', this.readStatus); + const readStatus = this.readStatus; + this.readStatus = 'closing'; + if (this.status !== 'reset' && this.status !== 'aborted' && this.timeline.closeRead == null) { + this.log.trace('send close read to remote'); + await this.sendCloseRead(options); + } + if (readStatus === 'ready') { + this.log.trace('ending internal source queue with %d queued bytes', this.streamSource.readableLength); + this.streamSource.end(); + } + this.log.trace('closed readable end of stream'); + } + async closeWrite(options = {}) { + if (this.writeStatus === 'closing' || this.writeStatus === 'closed') { + return; + } + this.log.trace('closing writable end of stream with starting write status "%s"', this.writeStatus); + if (this.writeStatus === 'ready') { + this.log.trace('sink was never sunk, sink an empty array'); + await raceSignal(this.sink([]), options.signal); + } + if (this.writeStatus === 'writing') { + // try to let sending outgoing data succeed + if (this.sendingData != null) { + await raceSignal(this.sendingData.promise, options.signal); + } + // stop reading from the source passed to `.sink` + this.log.trace('aborting source passed to .sink'); + this.sinkController.abort(); + await raceSignal(this.sinkEnd.promise, options.signal); + } + this.writeStatus = 'closed'; + this.log.trace('closed writable end of stream'); + } + /** + * Close immediately for reading and writing and send a reset message (local + * error) + */ + abort(err) { + if (this.status === 'closed' || this.status === 'aborted' || this.status === 'reset') { + return; + } + this.log('abort with error', err); + // try to send a reset message + this.log('try to send reset to remote'); + const res = this.sendReset(); + if (isPromise$2(res)) { + res.catch((err) => { + this.log.error('error sending reset message', err); + }); + } + this.status = 'aborted'; + this.timeline.abort = Date.now(); + this._closeSinkAndSource(err); + this.onAbort?.(err); + } + /** + * Receive a reset message - close immediately for reading and writing (remote + * error) + */ + reset() { + if (this.status === 'closed' || this.status === 'aborted' || this.status === 'reset') { + return; + } + const err = new CodeError$2('stream reset', ERR_STREAM_RESET); + this.status = 'reset'; + this.timeline.reset = Date.now(); + this._closeSinkAndSource(err); + this.onReset?.(); + } + _closeSinkAndSource(err) { + this._closeSink(err); + this._closeSource(err); + } + _closeSink(err) { + // if the sink function is running, cause it to end + if (this.writeStatus === 'writing') { + this.log.trace('end sink source'); + this.sinkController.abort(); + } + this.onSinkEnd(err); + } + _closeSource(err) { + // if the source is not ending, end it + if (this.readStatus !== 'closing' && this.readStatus !== 'closed') { + this.log.trace('ending source with %d bytes to be read by consumer', this.streamSource.readableLength); + this.readStatus = 'closing'; + this.streamSource.end(err); + } + } + /** + * The remote closed for writing so we should expect to receive no more + * messages + */ + remoteCloseWrite() { + if (this.readStatus === 'closing' || this.readStatus === 'closed') { + this.log('received remote close write but local source is already closed'); + return; + } + this.log.trace('remote close write'); + this._closeSource(); + } + /** + * The remote closed for reading so we should not send any more + * messages + */ + remoteCloseRead() { + if (this.writeStatus === 'closing' || this.writeStatus === 'closed') { + this.log('received remote close read but local sink is already closed'); + return; + } + this.log.trace('remote close read'); + this._closeSink(); + } + /** + * The underlying muxer has closed, no more messages can be sent or will + * be received, close immediately to free up resources + */ + destroy() { + if (this.status === 'closed' || this.status === 'aborted' || this.status === 'reset') { + this.log('received destroy but we are already closed'); + return; + } + this.log.trace('stream destroyed'); + this._closeSinkAndSource(); + } + /** + * When an extending class reads data from it's implementation-specific source, + * call this method to allow the stream consumer to read the data. + */ + sourcePush(data) { + this.streamSource.push(data); + } + /** + * Returns the amount of unread data - can be used to prevent large amounts of + * data building up when the stream consumer is too slow. + */ + sourceReadableLength() { + return this.streamSource.readableLength; + } + } + + class WebTransportStream extends AbstractStream { + writer; + reader; + constructor(init) { + super(init); + this.writer = init.bidiStream.writable.getWriter(); + this.reader = init.bidiStream.readable.getReader(); + Promise.resolve() + .then(async () => { + while (true) { + const result = await this.reader.read(); + if (result.done) { + init.log('remote closed write'); + return; + } + if (result.value != null) { + this.sourcePush(new Uint8ArrayList(result.value)); + } + } + }) + .catch(err => { + init.log.error('error reading from stream', err); + this.abort(err); + }) + .finally(() => { + this.remoteCloseWrite(); + }); + void this.writer.closed + .then(() => { + init.log('writer closed'); + }) + .catch((err) => { + init.log('writer close promise rejected', err); + }) + .finally(() => { + this.remoteCloseRead(); + }); + } + sendNewStream(options) { + // this is a no-op + } + async sendData(buf, options) { + for await (const chunk of buf) { + this.log('sendData waiting for writer to be ready'); + await raceSignal(this.writer.ready, options?.signal); + // the streams spec recommends not waiting for data to be sent + // https://streams.spec.whatwg.org/#example-manual-write-dont-await + this.writer.write(chunk) + .catch(err => { + this.log.error('error sending stream data', err); + }); + } + } + async sendReset(options) { + this.log('sendReset aborting writer'); + await raceSignal(this.writer.abort(), options?.signal); + this.log('sendReset aborted writer'); + } + async sendCloseWrite(options) { + this.log('sendCloseWrite closing writer'); + await raceSignal(this.writer.close(), options?.signal); + this.log('sendCloseWrite closed writer'); + } + async sendCloseRead(options) { + this.log('sendCloseRead cancelling reader'); + await raceSignal(this.reader.cancel(), options?.signal); + this.log('sendCloseRead cancelled reader'); + } + } + async function webtransportBiDiStreamToStream(bidiStream, streamId, direction, activeStreams, onStreamEnd, logger) { + const log = logger.forComponent(`libp2p:webtransport:stream:${direction}:${streamId}`); + const stream = new WebTransportStream({ + bidiStream, + id: streamId, + direction, + log, + onEnd: () => { + const index = activeStreams.findIndex(s => s === stream); + if (index !== -1) { + activeStreams.splice(index, 1); + } + onStreamEnd?.(stream); + } + }); + return stream; + } + + // Duplex that does nothing. Needed to fulfill the interface + function inertDuplex() { + return { + source: { + [Symbol.asyncIterator]() { + return { + async next() { + // This will never resolve + return new Promise(() => { }); + } + }; + } + }, + sink: async (source) => { + // This will never resolve + return new Promise(() => { }); + } + }; + } + + function webtransportMuxer(wt, reader, logger, config) { + let streamIDCounter = 0; + const log = logger.forComponent('libp2p:webtransport:muxer'); + return { + protocol: 'webtransport', + createStreamMuxer: (init) => { + // !TODO handle abort signal when WebTransport supports this. + if (typeof init === 'function') { + // The api docs say that init may be a function + init = { onIncomingStream: init }; + } + const activeStreams = []; + void Promise.resolve().then(async () => { + //! TODO unclear how to add backpressure here? + while (true) { + const { done, value: wtStream } = await reader.read(); + if (done) { + break; + } + if (activeStreams.length >= config.maxInboundStreams) { + log(`too many inbound streams open - ${activeStreams.length}/${config.maxInboundStreams}, closing new incoming stream`); + // We've reached our limit, close this stream. + wtStream.writable.close().catch((err) => { + log.error(`failed to close inbound stream that crossed our maxInboundStream limit: ${err.message}`); + }); + wtStream.readable.cancel().catch((err) => { + log.error(`failed to close inbound stream that crossed our maxInboundStream limit: ${err.message}`); + }); + } + else { + const stream = await webtransportBiDiStreamToStream(wtStream, String(streamIDCounter++), 'inbound', activeStreams, init?.onStreamEnd, logger); + activeStreams.push(stream); + init?.onIncomingStream?.(stream); + } + } + }); + const muxer = { + protocol: 'webtransport', + streams: activeStreams, + newStream: async (name) => { + log('new outgoing stream', name); + const wtStream = await wt.createBidirectionalStream(); + const stream = await webtransportBiDiStreamToStream(wtStream, String(streamIDCounter++), init?.direction ?? 'outbound', activeStreams, init?.onStreamEnd, logger); + activeStreams.push(stream); + return stream; + }, + /** + * Close all tracked streams and stop the muxer + */ + close: async () => { + log('closing webtransport muxer gracefully'); + wt.close(); + }, + /** + * Abort all tracked streams and stop the muxer + */ + abort: (err) => { + log('closing webtransport muxer with err:', err); + wt.close(); + }, + // This stream muxer is webtransport native. Therefore it doesn't plug in with any other duplex. + ...inertDuplex() + }; + return muxer; + } + }; + } + + /** + * Determines if `maybeSubset` is a subset of `set`. This means that all byte + * arrays in `maybeSubset` are present in `set`. + */ + function isSubset(set, maybeSubset) { + const intersection = maybeSubset.filter(byteArray => { + return Boolean(set.find((otherByteArray) => equals(byteArray, otherByteArray))); + }); + return (intersection.length === maybeSubset.length); + } + + // @ts-expect-error - Not easy to combine these types. + const multibaseDecoder = Object.values(bases).map(b => b.decoder).reduce((d, b) => d.or(b)); + function decodeCerthashStr(s) { + return decode$1(multibaseDecoder.decode(s)); + } + function parseMultiaddr(ma) { + if (!WebTransport$3.matches(ma)) { + throw new CodeError$2('Invalid multiaddr, was not a WebTransport address', 'ERR_INVALID_MULTIADDR'); + } + const parts = ma.stringTuples(); + const certhashes = parts + .filter(([name, _]) => name === getProtocol('certhash').code) + .map(([_, value]) => decodeCerthashStr(value ?? '')); + // only take the first peer id in the multiaddr as it may be a relay + const remotePeer = parts + .filter(([name, _]) => name === getProtocol('p2p').code) + .map(([_, value]) => peerIdFromString(value ?? ''))[0]; + const opts = ma.toOptions(); + let host = opts.host; + if (opts.family === 6 && host?.includes(':')) { + /** + * This resolves cases where `new WebTransport()` fails to construct because of an invalid URL being passed. + * + * `new URL('https://::1:4001/blah')` will throw a `TypeError: Failed to construct 'URL': Invalid URL` + * `new URL('https://[::1]:4001/blah')` is valid and will not. + * + * @see https://datatracker.ietf.org/doc/html/rfc3986#section-3.2.2 + */ + host = `[${host}]`; + } + return { + // All webtransport urls are https + url: `https://${host}:${opts.port}`, + certhashes, + remotePeer + }; + } + + var WebTransport$2 = WebTransport; + + /** + * @packageDocumentation + * + * A [libp2p transport](https://docs.libp2p.io/concepts/transports/overview/) based on [WebTransport](https://www.w3.org/TR/webtransport/). + * + * > + * > โš ๏ธ **Note** + * > + * > This WebTransport implementation currently only allows dialing to other nodes. It does not yet allow listening for incoming dials. This feature requires QUIC support to land in Node JS first. + * > + * > QUIC support in Node JS is actively being worked on. You can keep an eye on the progress by watching the [related issues on the Node JS issue tracker](https://github.com/nodejs/node/labels/quic) + * > + * + * @example + * + * ```TypeScript + * import { createLibp2p } from 'libp2p' + * import { webTransport } from '@libp2p/webtransport' + * import { noise } from '@chainsafe/libp2p-noise' + * + * const node = await createLibp2p({ + * transports: [ + * webTransport() + * ], + * connectionEncryption: [ + * noise() + * ] + * }) + * ``` + */ + class WebTransportTransport { + log; + components; + config; + metrics; + constructor(components, init = {}) { + this.log = components.logger.forComponent('libp2p:webtransport'); + this.components = components; + this.config = { + ...init, + maxInboundStreams: init.maxInboundStreams ?? 1000, + certificates: init.certificates ?? [] + }; + if (components.metrics != null) { + this.metrics = { + dialerEvents: components.metrics.registerCounterGroup('libp2p_webtransport_dialer_events_total', { + label: 'event', + help: 'Total count of WebTransport dialer events by type' + }) + }; + } + } + [Symbol.toStringTag] = '@libp2p/webtransport'; + [transportSymbol] = true; + async dial(ma, options) { + if (options?.signal?.aborted === true) { + throw new AbortError$5(); + } + this.log('dialing %s', ma); + const localPeer = this.components.peerId; + if (localPeer === undefined) { + throw new CodeError$2('Need a local peerid', 'ERR_INVALID_PARAMETERS'); + } + options = options ?? {}; + const { url, certhashes, remotePeer } = parseMultiaddr(ma); + let abortListener; + let maConn; + let cleanUpWTSession = () => { }; + let closed = false; + let ready = false; + let authenticated = false; + try { + this.metrics?.dialerEvents.increment({ pending: true }); + const wt = new WebTransport$2(`${url}/.well-known/libp2p-webtransport?type=noise`, { + serverCertificateHashes: certhashes.map(certhash => ({ + algorithm: 'sha-256', + value: certhash.digest + })) + }); + cleanUpWTSession = (metric) => { + if (closed) { + // already closed session + return; + } + try { + this.metrics?.dialerEvents.increment({ [metric]: true }); + wt.close(); + } + catch (err) { + this.log.error('error closing wt session', err); + } + finally { + // This is how we specify the connection is closed and shouldn't be used. + if (maConn != null) { + maConn.timeline.close = Date.now(); + } + closed = true; + } + }; + // if the dial is aborted before we are ready, close the WebTransport session + abortListener = () => { + if (ready) { + cleanUpWTSession('noise_timeout'); + } + else { + cleanUpWTSession('ready_timeout'); + } + }; + options.signal?.addEventListener('abort', abortListener, { + once: true + }); + this.log('wait for session to be ready'); + await Promise.race([ + wt.closed, + wt.ready + ]); + this.log('session became ready'); + ready = true; + this.metrics?.dialerEvents.increment({ ready: true }); + // this promise resolves/throws when the session is closed + wt.closed.catch((err) => { + this.log.error('error on remote wt session close', err); + }) + .finally(() => { + cleanUpWTSession('remote_close'); + }); + authenticated = await raceSignal(this.authenticateWebTransport(wt, localPeer, remotePeer, certhashes), options.signal); + if (!authenticated) { + throw new CodeError$2('Failed to authenticate webtransport', 'ERR_AUTHENTICATION_FAILED'); + } + this.metrics?.dialerEvents.increment({ open: true }); + maConn = { + close: async () => { + this.log('closing webtransport'); + cleanUpWTSession('close'); + }, + abort: (err) => { + this.log('aborting webtransport due to passed err', err); + cleanUpWTSession('abort'); + }, + remoteAddr: ma, + timeline: { + open: Date.now() + }, + log: this.components.logger.forComponent('libp2p:webtransport:maconn'), + // This connection is never used directly since webtransport supports native streams. + ...inertDuplex() + }; + return await options.upgrader.upgradeOutbound(maConn, { + skipEncryption: true, + muxerFactory: webtransportMuxer(wt, wt.incomingBidirectionalStreams.getReader(), this.components.logger, this.config), + skipProtection: true + }); + } + catch (err) { + this.log.error('caught wt session err', err); + if (authenticated) { + cleanUpWTSession('upgrade_error'); + } + else if (ready) { + cleanUpWTSession('noise_error'); + } + else { + cleanUpWTSession('ready_error'); + } + throw err; + } + finally { + if (abortListener != null) { + options.signal?.removeEventListener('abort', abortListener); + } + } + } + async authenticateWebTransport(wt, localPeer, remotePeer, certhashes = [], signal) { + if (signal?.aborted === true) { + throw new AbortError$5(); + } + const stream = await wt.createBidirectionalStream(); + const writer = stream.writable.getWriter(); + const reader = stream.readable.getReader(); + const duplex = { + source: (async function* () { + while (true) { + const val = await reader.read(); + if (val.value != null) { + yield val.value; + } + if (val.done) { + break; + } + } + })(), + sink: async (source) => { + for await (const chunk of source) { + await raceSignal(writer.ready, signal); + const buf = chunk instanceof Uint8Array ? chunk : chunk.subarray(); + writer.write(buf).catch(err => { + this.log.error('could not write chunk during authentication of WebTransport stream', err); + }); + } + } + }; + const n = noise()(this.components); + const { remoteExtensions } = await n.secureOutbound(localPeer, duplex, remotePeer); + // We're done with this authentication stream + writer.close().catch((err) => { + this.log.error(`Failed to close authentication stream writer: ${err.message}`); + }); + reader.cancel().catch((err) => { + this.log.error(`Failed to close authentication stream reader: ${err.message}`); + }); + // Verify the certhashes we used when dialing are a subset of the certhashes relayed by the remote peer + if (!isSubset(remoteExtensions?.webtransportCerthashes ?? [], certhashes.map(ch => ch.bytes))) { + throw new Error("Our certhashes are not a subset of the remote's reported certhashes"); + } + return true; + } + createListener(options) { + return createListener$1(this.components, { + ...options, + certificates: this.config.certificates, + maxInboundStreams: this.config.maxInboundStreams + }); + } + /** + * Filter check for all Multiaddrs that this transport can listen on + */ + listenFilter() { + return []; + } + /** + * Filter check for all Multiaddrs that this transport can dial + */ + dialFilter(multiaddrs) { + return multiaddrs.filter(ma => { + if (!WebTransport$3.exactMatch(ma)) { + return false; + } + const { url, certhashes } = parseMultiaddr(ma); + return url != null && certhashes.length > 0; + }); + } + } + function webTransport(init = {}) { + return (components) => new WebTransportTransport(components, init); + } + + // Protocol violation errors + const ERR_INVALID_FRAME = 'ERR_INVALID_FRAME'; + const ERR_UNREQUESTED_PING = 'ERR_UNREQUESTED_PING'; + const ERR_NOT_MATCHING_PING = 'ERR_NOT_MATCHING_PING'; + const ERR_STREAM_ALREADY_EXISTS = 'ERR_STREAM_ALREADY_EXISTS'; + const ERR_DECODE_INVALID_VERSION = 'ERR_DECODE_INVALID_VERSION'; + const ERR_BOTH_CLIENTS = 'ERR_BOTH_CLIENTS'; + const ERR_RECV_WINDOW_EXCEEDED = 'ERR_RECV_WINDOW_EXCEEDED'; + const PROTOCOL_ERRORS = new Set([ + ERR_INVALID_FRAME, + ERR_UNREQUESTED_PING, + ERR_NOT_MATCHING_PING, + ERR_STREAM_ALREADY_EXISTS, + ERR_DECODE_INVALID_VERSION, + ERR_BOTH_CLIENTS, + ERR_RECV_WINDOW_EXCEEDED + ]); + // local errors + const ERR_INVALID_CONFIG = 'ERR_INVALID_CONFIG'; + const ERR_MUXER_LOCAL_CLOSED = 'ERR_MUXER_LOCAL_CLOSED'; + const ERR_MUXER_REMOTE_CLOSED = 'ERR_MUXER_REMOTE_CLOSED'; + const ERR_STREAM_ABORT = 'ERR_STREAM_ABORT'; + const ERR_MAX_OUTBOUND_STREAMS_EXCEEDED = 'ERROR_MAX_OUTBOUND_STREAMS_EXCEEDED'; + const ERR_DECODE_IN_PROGRESS = 'ERR_DECODE_IN_PROGRESS'; + /** + * INITIAL_STREAM_WINDOW is the initial stream window size. + * + * Not an implementation choice, this is defined in the specification + */ + const INITIAL_STREAM_WINDOW = 256 * 1024; + /** + * Default max stream window + */ + const MAX_STREAM_WINDOW = 16 * 1024 * 1024; + + const defaultConfig = { + enableKeepAlive: true, + keepAliveInterval: 30000, + maxInboundStreams: 1000, + maxOutboundStreams: 1000, + initialStreamWindowSize: INITIAL_STREAM_WINDOW, + maxStreamWindowSize: MAX_STREAM_WINDOW, + maxMessageSize: 64 * 1024 + }; + function verifyConfig(config) { + if (config.keepAliveInterval <= 0) { + throw new CodeError$2('keep-alive interval must be positive', ERR_INVALID_CONFIG); + } + if (config.maxInboundStreams < 0) { + throw new CodeError$2('max inbound streams must be larger or equal 0', ERR_INVALID_CONFIG); + } + if (config.maxOutboundStreams < 0) { + throw new CodeError$2('max outbound streams must be larger or equal 0', ERR_INVALID_CONFIG); + } + if (config.initialStreamWindowSize < INITIAL_STREAM_WINDOW) { + throw new CodeError$2('InitialStreamWindowSize must be larger or equal 256 kB', ERR_INVALID_CONFIG); + } + if (config.maxStreamWindowSize < config.initialStreamWindowSize) { + throw new CodeError$2('MaxStreamWindowSize must be larger than the InitialStreamWindowSize', ERR_INVALID_CONFIG); + } + if (config.maxStreamWindowSize > 2 ** 32 - 1) { + throw new CodeError$2('MaxStreamWindowSize must be less than equal MAX_UINT32', ERR_INVALID_CONFIG); + } + if (config.maxMessageSize < 1024) { + throw new CodeError$2('MaxMessageSize must be greater than a kilobyte', ERR_INVALID_CONFIG); + } + } + + var FrameType; + (function (FrameType) { + /** Used to transmit data. May transmit zero length payloads depending on the flags. */ + FrameType[FrameType["Data"] = 0] = "Data"; + /** Used to updated the senders receive window size. This is used to implement per-session flow control. */ + FrameType[FrameType["WindowUpdate"] = 1] = "WindowUpdate"; + /** Used to measure RTT. It can also be used to heart-beat and do keep-alives over TCP. */ + FrameType[FrameType["Ping"] = 2] = "Ping"; + /** Used to close a session. */ + FrameType[FrameType["GoAway"] = 3] = "GoAway"; + })(FrameType || (FrameType = {})); + var Flag; + (function (Flag) { + /** Signals the start of a new stream. May be sent with a data or window update message. Also sent with a ping to indicate outbound. */ + Flag[Flag["SYN"] = 1] = "SYN"; + /** Acknowledges the start of a new stream. May be sent with a data or window update message. Also sent with a ping to indicate response. */ + Flag[Flag["ACK"] = 2] = "ACK"; + /** Performs a half-close of a stream. May be sent with a data message or window update. */ + Flag[Flag["FIN"] = 4] = "FIN"; + /** Reset a stream immediately. May be sent with a data or window update message. */ + Flag[Flag["RST"] = 8] = "RST"; + })(Flag || (Flag = {})); + Object.values(Flag).filter((x) => typeof x !== 'string'); + const YAMUX_VERSION = 0; + var GoAwayCode; + (function (GoAwayCode) { + GoAwayCode[GoAwayCode["NormalTermination"] = 0] = "NormalTermination"; + GoAwayCode[GoAwayCode["ProtocolError"] = 1] = "ProtocolError"; + GoAwayCode[GoAwayCode["InternalError"] = 2] = "InternalError"; + })(GoAwayCode || (GoAwayCode = {})); + const HEADER_LENGTH = 12; + + // used to bitshift in decoding + // native bitshift can overflow into a negative number, so we bitshift by multiplying by a power of 2 + const twoPow24 = 2 ** 24; + /** + * Decode a header from the front of a buffer + * + * @param data - Assumed to have enough bytes for a header + */ + function decodeHeader(data) { + if (data[0] !== YAMUX_VERSION) { + throw new CodeError$2('Invalid frame version', ERR_DECODE_INVALID_VERSION); + } + return { + type: data[1], + flag: (data[2] << 8) + data[3], + streamID: (data[4] * twoPow24) + (data[5] << 16) + (data[6] << 8) + data[7], + length: (data[8] * twoPow24) + (data[9] << 16) + (data[10] << 8) + data[11] + }; + } + /** + * Decodes yamux frames from a source + */ + class Decoder { + source; + /** Buffer for in-progress frames */ + buffer; + /** Used to sanity check against decoding while in an inconsistent state */ + frameInProgress; + constructor(source) { + // Normally, when entering a for-await loop with an iterable/async iterable, the only ways to exit the loop are: + // 1. exhaust the iterable + // 2. throw an error - slow, undesirable if there's not actually an error + // 3. break or return - calls the iterable's `return` method, finalizing the iterable, no more iteration possible + // + // In this case, we want to enter (and exit) a for-await loop per chunked data frame and continue processing the iterable. + // To do this, we strip the `return` method from the iterator and can now `break` early and continue iterating. + // Exiting the main for-await is still possible via 1. and 2. + this.source = returnlessSource(source); + this.buffer = new Uint8ArrayList(); + this.frameInProgress = false; + } + /** + * Emits frames from the decoder source. + * + * Note: If `readData` is emitted, it _must_ be called before the next iteration + * Otherwise an error is thrown + */ + async *emitFrames() { + for await (const chunk of this.source) { + this.buffer.append(chunk); + // Loop to consume as many bytes from the buffer as possible + // Eg: when a single chunk contains several frames + while (true) { + const header = this.readHeader(); + if (header === undefined) { + break; + } + const { type, length } = header; + if (type === FrameType.Data) { + // This is a data frame, the frame body must still be read + // `readData` must be called before the next iteration here + this.frameInProgress = true; + yield { + header, + readData: this.readBytes.bind(this, length) + }; + } + else { + yield { header }; + } + } + } + } + readHeader() { + // Sanity check to ensure a header isn't read when another frame is partially decoded + // In practice this shouldn't happen + if (this.frameInProgress) { + throw new CodeError$2('decoding frame already in progress', ERR_DECODE_IN_PROGRESS); + } + if (this.buffer.length < HEADER_LENGTH) { + // not enough data yet + return; + } + const header = decodeHeader(this.buffer.subarray(0, HEADER_LENGTH)); + this.buffer.consume(HEADER_LENGTH); + return header; + } + async readBytes(length) { + if (this.buffer.length < length) { + for await (const chunk of this.source) { + this.buffer.append(chunk); + if (this.buffer.length >= length) { + // see note above, the iterator is not `return`ed here + break; + } + } + } + const out = this.buffer.sublist(0, length); + this.buffer.consume(length); + // The next frame can now be decoded + this.frameInProgress = false; + return out; + } + } + /** + * Strip the `return` method from a `Source` + */ + function returnlessSource(source) { + if (source[Symbol.iterator] !== undefined) { + const iterator = source[Symbol.iterator](); + iterator.return = undefined; + return { + [Symbol.iterator]() { return iterator; } + }; + } + else if (source[Symbol.asyncIterator] !== undefined) { + const iterator = source[Symbol.asyncIterator](); + iterator.return = undefined; + return { + [Symbol.asyncIterator]() { return iterator; } + }; + } + else { + throw new Error('a source must be either an iterable or an async iterable'); + } + } + + function encodeHeader(header) { + const frame = new Uint8Array(HEADER_LENGTH); + // always assume version 0 + // frameView.setUint8(0, header.version) + frame[1] = header.type; + frame[2] = header.flag >>> 8; + frame[3] = header.flag; + frame[4] = header.streamID >>> 24; + frame[5] = header.streamID >>> 16; + frame[6] = header.streamID >>> 8; + frame[7] = header.streamID; + frame[8] = header.length >>> 24; + frame[9] = header.length >>> 16; + frame[10] = header.length >>> 8; + frame[11] = header.length; + return frame; + } + + /** + * @packageDocumentation + * + * Calls a function for each value in an (async)iterable. + * + * The function can be sync or async. + * + * Async functions can be awaited on so may slow down processing of the (async)iterable. + * + * @example + * + * ```javascript + * import each from 'it-foreach' + * import drain from 'it-drain' + * + * // This can also be an iterator, generator, etc + * const values = [0, 1, 2, 3, 4] + * + * // prints [0, 0], [1, 1], [2, 2], [3, 3], [4, 4] + * const arr = drain( + * each(values, console.info) + * ) + * ``` + * + * Async sources and callbacks must be awaited: + * + * ```javascript + * import each from 'it-foreach' + * import drain from 'it-drain' + * + * const values = async function * () { + * yield * [0, 1, 2, 3, 4] + * } + * + * // prints [0, 0], [1, 1], [2, 2], [3, 3], [4, 4] + * const arr = await drain( + * each(values(), console.info) + * ) + * ``` + */ + function isAsyncIterable$1(thing) { + return thing[Symbol.asyncIterator] != null; + } + function isPromise$1(thing) { + return thing?.then != null; + } + function forEach(source, fn) { + let index = 0; + if (isAsyncIterable$1(source)) { + return (async function* () { + for await (const val of source) { + const res = fn(val, index++); + if (isPromise$1(res)) { + await res; + } + yield val; + } + })(); + } + // if fn function returns a promise we have to return an async generator + const peekable$1 = peekable(source); + const { value, done } = peekable$1.next(); + if (done === true) { + return (function* () { }()); + } + const res = fn(value, index++); + if (typeof res?.then === 'function') { + return (async function* () { + yield value; + for await (const val of peekable$1) { + const res = fn(val, index++); + if (isPromise$1(res)) { + await res; + } + yield val; + } + })(); + } + const func = fn; + return (function* () { + yield value; + for (const val of peekable$1) { + func(val, index++); + yield val; + } + })(); + } + + var StreamState; + (function (StreamState) { + StreamState[StreamState["Init"] = 0] = "Init"; + StreamState[StreamState["SYNSent"] = 1] = "SYNSent"; + StreamState[StreamState["SYNReceived"] = 2] = "SYNReceived"; + StreamState[StreamState["Established"] = 3] = "Established"; + StreamState[StreamState["Finished"] = 4] = "Finished"; + })(StreamState || (StreamState = {})); + /** YamuxStream is used to represent a logical stream within a session */ + class YamuxStream extends AbstractStream { + name; + state; + config; + _id; + /** The number of available bytes to send */ + sendWindowCapacity; + /** Callback to notify that the sendWindowCapacity has been updated */ + sendWindowCapacityUpdate; + /** The number of bytes available to receive in a full window */ + recvWindow; + /** The number of available bytes to receive */ + recvWindowCapacity; + /** + * An 'epoch' is the time it takes to process and read data + * + * Used in conjunction with RTT to determine whether to increase the recvWindow + */ + epochStart; + getRTT; + sendFrame; + constructor(init) { + super({ + ...init, + onEnd: (err) => { + this.state = StreamState.Finished; + init.onEnd?.(err); + } + }); + this.config = init.config; + this._id = parseInt(init.id, 10); + this.name = init.name; + this.state = init.state; + this.sendWindowCapacity = INITIAL_STREAM_WINDOW; + this.recvWindow = this.config.initialStreamWindowSize; + this.recvWindowCapacity = this.recvWindow; + this.epochStart = Date.now(); + this.getRTT = init.getRTT; + this.sendFrame = init.sendFrame; + this.source = forEach(this.source, () => { + this.sendWindowUpdate(); + }); + } + /** + * Send a message to the remote muxer informing them a new stream is being + * opened. + * + * This is a noop for Yamux because the first window update is sent when + * .newStream is called on the muxer which opens the stream on the remote. + */ + async sendNewStream() { + } + /** + * Send a data message to the remote muxer + */ + async sendData(buf, options = {}) { + buf = buf.sublist(); + // send in chunks, waiting for window updates + while (buf.byteLength !== 0) { + // wait for the send window to refill + if (this.sendWindowCapacity === 0) { + this.log?.trace('wait for send window capacity, status %s', this.status); + await this.waitForSendWindowCapacity(options); + // check we didn't close while waiting for send window capacity + if (this.status === 'closed' || this.status === 'aborted' || this.status === 'reset') { + this.log?.trace('%s while waiting for send window capacity', this.status); + return; + } + } + // send as much as we can + const toSend = Math.min(this.sendWindowCapacity, this.config.maxMessageSize - HEADER_LENGTH, buf.length); + const flags = this.getSendFlags(); + this.sendFrame({ + type: FrameType.Data, + flag: flags, + streamID: this._id, + length: toSend + }, buf.sublist(0, toSend)); + this.sendWindowCapacity -= toSend; + buf.consume(toSend); + } + } + /** + * Send a reset message to the remote muxer + */ + async sendReset() { + this.sendFrame({ + type: FrameType.WindowUpdate, + flag: Flag.RST, + streamID: this._id, + length: 0 + }); + } + /** + * Send a message to the remote muxer, informing them no more data messages + * will be sent by this end of the stream + */ + async sendCloseWrite() { + const flags = this.getSendFlags() | Flag.FIN; + this.sendFrame({ + type: FrameType.WindowUpdate, + flag: flags, + streamID: this._id, + length: 0 + }); + } + /** + * Send a message to the remote muxer, informing them no more data messages + * will be read by this end of the stream + */ + async sendCloseRead() { + } + /** + * Wait for the send window to be non-zero + * + * Will throw with ERR_STREAM_ABORT if the stream gets aborted + */ + async waitForSendWindowCapacity(options = {}) { + if (this.sendWindowCapacity > 0) { + return; + } + let resolve; + let reject; + const abort = () => { + if (this.status === 'open' || this.status === 'closing') { + reject(new CodeError$2('stream aborted', ERR_STREAM_ABORT)); + } + else { + // the stream was closed already, ignore the failure to send + resolve(); + } + }; + options.signal?.addEventListener('abort', abort); + try { + await new Promise((_resolve, _reject) => { + this.sendWindowCapacityUpdate = () => { + _resolve(); + }; + reject = _reject; + resolve = _resolve; + }); + } + finally { + options.signal?.removeEventListener('abort', abort); + } + } + /** + * handleWindowUpdate is called when the stream receives a window update frame + */ + handleWindowUpdate(header) { + this.log?.trace('stream received window update id=%s', this._id); + this.processFlags(header.flag); + // increase send window + const available = this.sendWindowCapacity; + this.sendWindowCapacity += header.length; + // if the update increments a 0 availability, notify the stream that sending can resume + if (available === 0 && header.length > 0) { + this.sendWindowCapacityUpdate?.(); + } + } + /** + * handleData is called when the stream receives a data frame + */ + async handleData(header, readData) { + this.log?.trace('stream received data id=%s', this._id); + this.processFlags(header.flag); + // check that our recv window is not exceeded + if (this.recvWindowCapacity < header.length) { + throw new CodeError$2('receive window exceeded', ERR_RECV_WINDOW_EXCEEDED, { available: this.recvWindowCapacity, recv: header.length }); + } + const data = await readData(); + this.recvWindowCapacity -= header.length; + this.sourcePush(data); + } + /** + * processFlags is used to update the state of the stream based on set flags, if any. + */ + processFlags(flags) { + if ((flags & Flag.ACK) === Flag.ACK) { + if (this.state === StreamState.SYNSent) { + this.state = StreamState.Established; + } + } + if ((flags & Flag.FIN) === Flag.FIN) { + this.remoteCloseWrite(); + } + if ((flags & Flag.RST) === Flag.RST) { + this.reset(); + } + } + /** + * getSendFlags determines any flags that are appropriate + * based on the current stream state. + * + * The state is updated as a side-effect. + */ + getSendFlags() { + switch (this.state) { + case StreamState.Init: + this.state = StreamState.SYNSent; + return Flag.SYN; + case StreamState.SYNReceived: + this.state = StreamState.Established; + return Flag.ACK; + default: + return 0; + } + } + /** + * potentially sends a window update enabling further writes to take place. + */ + sendWindowUpdate() { + // determine the flags if any + const flags = this.getSendFlags(); + // If the stream has already been established + // and we've processed data within the time it takes for 4 round trips + // then we (up to) double the recvWindow + const now = Date.now(); + const rtt = this.getRTT(); + if (flags === 0 && rtt > -1 && now - this.epochStart < rtt * 4) { + // we've already validated that maxStreamWindowSize can't be more than MAX_UINT32 + this.recvWindow = Math.min(this.recvWindow * 2, this.config.maxStreamWindowSize); + } + if (this.recvWindowCapacity >= this.recvWindow && flags === 0) { + // a window update isn't needed + return; + } + // update the receive window + const delta = this.recvWindow - this.recvWindowCapacity; + this.recvWindowCapacity = this.recvWindow; + // update the epoch start + this.epochStart = now; + // send window update + this.sendFrame({ + type: FrameType.WindowUpdate, + flag: flags, + streamID: this._id, + length: delta + }); + } + } + + const YAMUX_PROTOCOL_ID = '/yamux/1.0.0'; + const CLOSE_TIMEOUT = 500; + class Yamux { + protocol = YAMUX_PROTOCOL_ID; + _components; + _init; + constructor(components, init = {}) { + this._components = components; + this._init = init; + } + createStreamMuxer(init) { + return new YamuxMuxer(this._components, { + ...this._init, + ...init + }); + } + } + class YamuxMuxer { + protocol = YAMUX_PROTOCOL_ID; + source; + sink; + config; + log; + logger; + /** Used to close the muxer from either the sink or source */ + closeController; + /** The next stream id to be used when initiating a new stream */ + nextStreamID; + /** Primary stream mapping, streamID => stream */ + _streams; + /** The next ping id to be used when pinging */ + nextPingID; + /** Tracking info for the currently active ping */ + activePing; + /** Round trip time */ + rtt; + /** True if client, false if server */ + client; + localGoAway; + remoteGoAway; + /** Number of tracked inbound streams */ + numInboundStreams; + /** Number of tracked outbound streams */ + numOutboundStreams; + onIncomingStream; + onStreamEnd; + constructor(components, init) { + this.client = init.direction === 'outbound'; + this.config = { ...defaultConfig, ...init }; + this.logger = components.logger; + this.log = this.logger.forComponent('libp2p:yamux'); + verifyConfig(this.config); + this.closeController = new AbortController(); + setMaxListeners(Infinity, this.closeController.signal); + this.onIncomingStream = init.onIncomingStream; + this.onStreamEnd = init.onStreamEnd; + this._streams = new Map(); + this.source = pushable$1({ + onEnd: () => { + this.log?.trace('muxer source ended'); + this._streams.forEach(stream => { + stream.destroy(); + }); + } + }); + this.sink = async (source) => { + const shutDownListener = () => { + const iterator = getIterator(source); + if (iterator.return != null) { + const res = iterator.return(); + if (isPromise(res)) { + res.catch(err => { + this.log?.('could not cause sink source to return', err); + }); + } + } + }; + let reason, error; + try { + const decoder = new Decoder(source); + try { + this.closeController.signal.addEventListener('abort', shutDownListener); + for await (const frame of decoder.emitFrames()) { + await this.handleFrame(frame.header, frame.readData); + } + } + finally { + this.closeController.signal.removeEventListener('abort', shutDownListener); + } + reason = GoAwayCode.NormalTermination; + } + catch (err) { + // either a protocol or internal error + const errCode = err.code; + if (PROTOCOL_ERRORS.has(errCode)) { + this.log?.error('protocol error in sink', err); + reason = GoAwayCode.ProtocolError; + } + else { + this.log?.error('internal error in sink', err); + reason = GoAwayCode.InternalError; + } + error = err; + } + this.log?.trace('muxer sink ended'); + if (error != null) { + this.abort(error, reason); + } + else { + await this.close({ reason }); + } + }; + this.numInboundStreams = 0; + this.numOutboundStreams = 0; + // client uses odd streamIDs, server uses even streamIDs + this.nextStreamID = this.client ? 1 : 2; + this.nextPingID = 0; + this.rtt = -1; + this.log?.trace('muxer created'); + if (this.config.enableKeepAlive) { + this.keepAliveLoop().catch(e => this.log?.error('keepalive error: %s', e)); + } + // send an initial ping to establish RTT + this.ping().catch(e => this.log?.error('ping error: %s', e)); + } + get streams() { + return Array.from(this._streams.values()); + } + newStream(name) { + if (this.remoteGoAway !== undefined) { + throw new CodeError$2('muxer closed remotely', ERR_MUXER_REMOTE_CLOSED); + } + if (this.localGoAway !== undefined) { + throw new CodeError$2('muxer closed locally', ERR_MUXER_LOCAL_CLOSED); + } + const id = this.nextStreamID; + this.nextStreamID += 2; + // check against our configured maximum number of outbound streams + if (this.numOutboundStreams >= this.config.maxOutboundStreams) { + throw new CodeError$2('max outbound streams exceeded', ERR_MAX_OUTBOUND_STREAMS_EXCEEDED); + } + this.log?.trace('new outgoing stream id=%s', id); + const stream = this._newStream(id, name, StreamState.Init, 'outbound'); + this._streams.set(id, stream); + this.numOutboundStreams++; + // send a window update to open the stream on the receiver end + stream.sendWindowUpdate(); + return stream; + } + /** + * Initiate a ping and wait for a response + * + * Note: only a single ping will be initiated at a time. + * If a ping is already in progress, a new ping will not be initiated. + * + * @returns the round-trip-time in milliseconds + */ + async ping() { + if (this.remoteGoAway !== undefined) { + throw new CodeError$2('muxer closed remotely', ERR_MUXER_REMOTE_CLOSED); + } + if (this.localGoAway !== undefined) { + throw new CodeError$2('muxer closed locally', ERR_MUXER_LOCAL_CLOSED); + } + // An active ping does not yet exist, handle the process here + if (this.activePing === undefined) { + // create active ping + let _resolve = () => { }; + this.activePing = { + id: this.nextPingID++, + // this promise awaits resolution or the close controller aborting + promise: new Promise((resolve, reject) => { + const closed = () => { + reject(new CodeError$2('muxer closed locally', ERR_MUXER_LOCAL_CLOSED)); + }; + this.closeController.signal.addEventListener('abort', closed, { once: true }); + _resolve = () => { + this.closeController.signal.removeEventListener('abort', closed); + resolve(); + }; + }), + resolve: _resolve + }; + // send ping + const start = Date.now(); + this.sendPing(this.activePing.id); + // await pong + try { + await this.activePing.promise; + } + finally { + // clean-up active ping + delete this.activePing; + } + // update rtt + const end = Date.now(); + this.rtt = end - start; + } + else { + // an active ping is already in progress, piggyback off that + await this.activePing.promise; + } + return this.rtt; + } + /** + * Get the ping round trip time + * + * Note: Will return 0 if no successful ping has yet been completed + * + * @returns the round-trip-time in milliseconds + */ + getRTT() { + return this.rtt; + } + /** + * Close the muxer + */ + async close(options = {}) { + if (this.closeController.signal.aborted) { + // already closed + return; + } + const reason = options?.reason ?? GoAwayCode.NormalTermination; + this.log?.trace('muxer close reason=%s', reason); + if (options.signal == null) { + const signal = AbortSignal.timeout(CLOSE_TIMEOUT); + setMaxListeners(Infinity, signal); + options = { + ...options, + signal + }; + } + try { + await Promise.all([...this._streams.values()].map(async (s) => s.close(options))); + // send reason to the other side, allow the other side to close gracefully + this.sendGoAway(reason); + this._closeMuxer(); + } + catch (err) { + this.abort(err); + } + } + abort(err, reason) { + if (this.closeController.signal.aborted) { + // already closed + return; + } + reason = reason ?? GoAwayCode.InternalError; + // If reason was provided, use that, otherwise use the presence of `err` to determine the reason + this.log?.error('muxer abort reason=%s error=%s', reason, err); + // Abort all underlying streams + for (const stream of this._streams.values()) { + stream.abort(err); + } + // send reason to the other side, allow the other side to close gracefully + this.sendGoAway(reason); + this._closeMuxer(); + } + isClosed() { + return this.closeController.signal.aborted; + } + /** + * Called when either the local or remote shuts down the muxer + */ + _closeMuxer() { + // stop the sink and any other processes + this.closeController.abort(); + // stop the source + this.source.end(); + } + /** Create a new stream */ + _newStream(id, name, state, direction) { + if (this._streams.get(id) != null) { + throw new CodeError$2('Stream already exists', ERR_STREAM_ALREADY_EXISTS, { id }); + } + const stream = new YamuxStream({ + id: id.toString(), + name, + state, + direction, + sendFrame: this.sendFrame.bind(this), + onEnd: () => { + this.closeStream(id); + this.onStreamEnd?.(stream); + }, + log: this.logger.forComponent(`libp2p:yamux:${direction}:${id}`), + config: this.config, + getRTT: this.getRTT.bind(this) + }); + return stream; + } + /** + * closeStream is used to close a stream once both sides have + * issued a close. + */ + closeStream(id) { + if (this.client === (id % 2 === 0)) { + this.numInboundStreams--; + } + else { + this.numOutboundStreams--; + } + this._streams.delete(id); + } + async keepAliveLoop() { + const abortPromise = new Promise((_resolve, reject) => { this.closeController.signal.addEventListener('abort', reject, { once: true }); }); + this.log?.trace('muxer keepalive enabled interval=%s', this.config.keepAliveInterval); + while (true) { + let timeoutId; + try { + await Promise.race([ + abortPromise, + new Promise((resolve) => { + timeoutId = setTimeout(resolve, this.config.keepAliveInterval); + }) + ]); + this.ping().catch(e => this.log?.error('ping error: %s', e)); + } + catch (e) { + // closed + clearInterval(timeoutId); + return; + } + } + } + async handleFrame(header, readData) { + const { streamID, type, length } = header; + this.log?.trace('received frame %o', header); + if (streamID === 0) { + switch (type) { + case FrameType.Ping: + { + this.handlePing(header); + return; + } + case FrameType.GoAway: + { + this.handleGoAway(length); + return; + } + default: + // Invalid state + throw new CodeError$2('Invalid frame type', ERR_INVALID_FRAME, { header }); + } + } + else { + switch (header.type) { + case FrameType.Data: + case FrameType.WindowUpdate: + { + await this.handleStreamMessage(header, readData); + return; + } + default: + // Invalid state + throw new CodeError$2('Invalid frame type', ERR_INVALID_FRAME, { header }); + } + } + } + handlePing(header) { + // If the ping is initiated by the sender, send a response + if (header.flag === Flag.SYN) { + this.log?.trace('received ping request pingId=%s', header.length); + this.sendPing(header.length, Flag.ACK); + } + else if (header.flag === Flag.ACK) { + this.log?.trace('received ping response pingId=%s', header.length); + this.handlePingResponse(header.length); + } + else { + // Invalid state + throw new CodeError$2('Invalid frame flag', ERR_INVALID_FRAME, { header }); + } + } + handlePingResponse(pingId) { + if (this.activePing === undefined) { + // this ping was not requested + throw new CodeError$2('ping not requested', ERR_UNREQUESTED_PING); + } + if (this.activePing.id !== pingId) { + // this ping doesn't match our active ping request + throw new CodeError$2('ping doesn\'t match our id', ERR_NOT_MATCHING_PING); + } + // valid ping response + this.activePing.resolve(); + } + handleGoAway(reason) { + this.log?.trace('received GoAway reason=%s', GoAwayCode[reason] ?? 'unknown'); + this.remoteGoAway = reason; + // If the other side is friendly, they would have already closed all streams before sending a GoAway + // In case they weren't, reset all streams + for (const stream of this._streams.values()) { + stream.reset(); + } + this._closeMuxer(); + } + async handleStreamMessage(header, readData) { + const { streamID, flag, type } = header; + if ((flag & Flag.SYN) === Flag.SYN) { + this.incomingStream(streamID); + } + const stream = this._streams.get(streamID); + if (stream === undefined) { + if (type === FrameType.Data) { + this.log?.('discarding data for stream id=%s', streamID); + if (readData === undefined) { + throw new Error('unreachable'); + } + await readData(); + } + else { + this.log?.('frame for missing stream id=%s', streamID); + } + return; + } + switch (type) { + case FrameType.WindowUpdate: { + stream.handleWindowUpdate(header); + return; + } + case FrameType.Data: { + if (readData === undefined) { + throw new Error('unreachable'); + } + await stream.handleData(header, readData); + return; + } + default: + throw new Error('unreachable'); + } + } + incomingStream(id) { + if (this.client !== (id % 2 === 0)) { + throw new CodeError$2('both endpoints are clients', ERR_BOTH_CLIENTS); + } + if (this._streams.has(id)) { + return; + } + this.log?.trace('new incoming stream id=%s', id); + if (this.localGoAway !== undefined) { + // reject (reset) immediately if we are doing a go away + this.sendFrame({ + type: FrameType.WindowUpdate, + flag: Flag.RST, + streamID: id, + length: 0 + }); + return; + } + // check against our configured maximum number of inbound streams + if (this.numInboundStreams >= this.config.maxInboundStreams) { + this.log?.('maxIncomingStreams exceeded, forcing stream reset'); + this.sendFrame({ + type: FrameType.WindowUpdate, + flag: Flag.RST, + streamID: id, + length: 0 + }); + return; + } + // allocate a new stream + const stream = this._newStream(id, undefined, StreamState.SYNReceived, 'inbound'); + this.numInboundStreams++; + // the stream should now be tracked + this._streams.set(id, stream); + this.onIncomingStream?.(stream); + } + sendFrame(header, data) { + this.log?.trace('sending frame %o', header); + if (header.type === FrameType.Data) { + if (data === undefined) { + throw new CodeError$2('invalid frame', ERR_INVALID_FRAME); + } + this.source.push(new Uint8ArrayList(encodeHeader(header), data)); + } + else { + this.source.push(encodeHeader(header)); + } + } + sendPing(pingId, flag = Flag.SYN) { + if (flag === Flag.SYN) { + this.log?.trace('sending ping request pingId=%s', pingId); + } + else { + this.log?.trace('sending ping response pingId=%s', pingId); + } + this.sendFrame({ + type: FrameType.Ping, + flag, + streamID: 0, + length: pingId + }); + } + sendGoAway(reason = GoAwayCode.NormalTermination) { + this.log?.('sending GoAway reason=%s', GoAwayCode[reason]); + this.localGoAway = reason; + this.sendFrame({ + type: FrameType.GoAway, + flag: 0, + streamID: 0, + length: reason + }); + } + } + function isPromise(thing) { + return thing != null && typeof thing.then === 'function'; + } + + /** + * @packageDocumentation + * + * This module is a JavaScript implementation of [Yamux from Hashicorp](https://github.com/hashicorp/yamux/blob/master/spec.md) designed to be used with [js-libp2p](https://github.com/libp2p/js-libp2p). + * + * @example Configure libp2p with Yamux + * + * ```typescript + * import { createLibp2p } from 'libp2p' + * import { yamux } from '@chainsafe/libp2p-yamux' + * + * const node = await createLibp2p({ + * // ... other options + * streamMuxers: [ + * yamux() + * ] + * }) + * ``` + * + * @example Using the low-level API + * + * ```js + * import { yamux } from '@chainsafe/libp2p-yamux' + * import { pipe } from 'it-pipe' + * import { duplexPair } from 'it-pair/duplex' + * import all from 'it-all' + * + * // Connect two yamux muxers to demo basic stream multiplexing functionality + * + * const clientMuxer = yamux({ + * client: true, + * onIncomingStream: stream => { + * // echo data on incoming streams + * pipe(stream, stream) + * }, + * onStreamEnd: stream => { + * // do nothing + * } + * })() + * + * const serverMuxer = yamux({ + * client: false, + * onIncomingStream: stream => { + * // echo data on incoming streams + * pipe(stream, stream) + * }, + * onStreamEnd: stream => { + * // do nothing + * } + * })() + * + * // `p` is our "connections", what we use to connect the two sides + * // In a real application, a connection is usually to a remote computer + * const p = duplexPair() + * + * // connect the muxers together + * pipe(p[0], clientMuxer, p[0]) + * pipe(p[1], serverMuxer, p[1]) + * + * // now either side can open streams + * const stream0 = clientMuxer.newStream() + * const stream1 = serverMuxer.newStream() + * + * // Send some data to the other side + * const encoder = new TextEncoder() + * const data = [encoder.encode('hello'), encoder.encode('world')] + * pipe(data, stream0) + * + * // Receive data back + * const result = await pipe(stream0, all) + * + * // close a stream + * stream1.close() + * + * // close the muxer + * clientMuxer.close() + * ``` + */ + function yamux(init = {}) { + return (components) => new Yamux(components, init); + } + + /** + * To guarantee Uint8Array semantics, convert nodejs Buffers + * into vanilla Uint8Arrays + */ + function asUint8Array(buf) { + if (globalThis.Buffer != null) { + return new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength); + } + return buf; + } + + /** + * Returns a `Uint8Array` of the requested size. Referenced memory will + * be initialized to 0. + */ + function alloc(size = 0) { + if (globalThis.Buffer?.alloc != null) { + return asUint8Array(globalThis.Buffer.alloc(size)); + } + return new Uint8Array(size); + } + + /* eslint-disable import/export */ + /* eslint-disable complexity */ + /* eslint-disable @typescript-eslint/no-namespace */ + /* eslint-disable @typescript-eslint/no-unnecessary-boolean-literal-compare */ + /* eslint-disable @typescript-eslint/no-empty-interface */ + var Peer$1; + (function (Peer) { + let _codec; + Peer.codec = () => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork(); + } + if ((obj.publicKey != null && obj.publicKey.byteLength > 0)) { + w.uint32(10); + w.bytes(obj.publicKey); + } + if (obj.addrs != null) { + for (const value of obj.addrs) { + w.uint32(18); + w.bytes(value); + } + } + if (opts.lengthDelimited !== false) { + w.ldelim(); + } + }, (reader, length) => { + const obj = { + publicKey: alloc(0), + addrs: [] + }; + const end = length == null ? reader.len : reader.pos + length; + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + obj.publicKey = reader.bytes(); + break; + } + case 2: { + obj.addrs.push(reader.bytes()); + break; + } + default: { + reader.skipType(tag & 7); + break; + } + } + } + return obj; + }); + } + return _codec; + }; + Peer.encode = (obj) => { + return encodeMessage(obj, Peer.codec()); + }; + Peer.decode = (buf) => { + return decodeMessage(buf, Peer.codec()); + }; + })(Peer$1 || (Peer$1 = {})); + + /** + * @packageDocumentation + * + * When the discovery module is started by libp2p it subscribes to the discovery pubsub topic(s) + * + * It will immediately broadcast your peer data via pubsub and repeat the broadcast on the configured `interval` + * + * ## Security Considerations + * + * It is worth noting that this module does not include any message signing for broadcasts. The reason for this is that libp2p-pubsub supports message signing and enables it by default, which means the message you received has been verified to be from the originator, so we can trust that the peer information we have received is indeed from the peer who owns it. This doesn't mean the peer can't falsify its own records, but this module isn't currently concerned with that scenario. + * + * ## Requirements + * + * This module *MUST* be used on a libp2p node that is running [Pubsub](https://github.com/libp2p/js-libp2p-pubsub). If Pubsub does not exist, or is not running, this module will not work. + * + * To run a PubSub service, include a `pubsub` implementation in your services map such as `@chainsafe/libp2p-gossipsub`. + * + * For more information see the [docs on customizing libp2p](https://github.com/libp2p/js-libp2p/blob/main/doc/CONFIGURATION.md#customizing-libp2p). + * + * @example Usage in js-libp2p + * + * See the [js-libp2p configuration docs](https://github.com/libp2p/js-libp2p/blob/main/doc/CONFIGURATION.md#customizing-peer-discovery) for how to include this module as a peer discovery module in js-libp2p. + * + * If you are only interested in listening to the global pubsub topic the minimal configuration for using this with libp2p is: + * + * ```js + * import { createLibp2p } from 'libp2p' + * import { websockets } from '@libp2p/websockets' + * import { yamux } from '@chainsafe/libp2p-yamux' + * import { noise } from '@chainsafe/libp2p-noise' + * import { gossipsub } from '@chainsafe/libp2p-gossipsub' + * import { pubsubPeerDiscovery } from '@libp2p/pubsub-peer-discovery' + * import { identify } from 'libp2p/identify' + * + * const node = await createLibp2p({ + * transports: [ + * websockets() + * ], // Any libp2p transport(s) can be used + * streamMuxers: [ + * yamux() + * ], + * connectionEncryption: [ + * noise() + * ], + * peerDiscovery: [ + * pubsubPeerDiscovery() + * ], + * services: { + * pubsub: gossipsub(), + * identify: identify() + * } + * }) + * ``` + * + * @example Customizing Pubsub Peer Discovery + * + * There are a few options you can use to customize `Pubsub Peer Discovery`. You can see the detailed [options](#options) below. + * + * ```js + * // ... Other imports from above + * import PubSubPeerDiscovery from '@libp2p/pubsub-peer-discovery' + * + * // Custom topics + * const topics = [ + * `myApp._peer-discovery._p2p._pubsub`, // It's recommended but not required to extend the global space + * '_peer-discovery._p2p._pubsub' // Include if you want to participate in the global space + * ] + * + * const node = await createLibp2p({ + * // ... + * peerDiscovery: [ + * pubsubPeerDiscovery({ + * interval: 10000, + * topics: topics, // defaults to ['_peer-discovery._p2p._pubsub'] + * listenOnly: false + * }) + * ] + * }) + * ``` + * + * ## Options + * + * | Name | Type | Description | + * | ---------- | --------------- | -------------------------------------------------------------------------------------------------------------- | + * | interval | `number` | How often (in `ms`), after initial broadcast, your node should broadcast your peer data. Default (`10000ms`) | + * | topics | `Array` | An Array of topic strings. If set, the default topic will not be used and must be included explicitly here | + * | listenOnly | `boolean` | If true it will not broadcast peer data. Dont set this unless you have a specific reason to. Default (`false`) | + * + * ## Default Topic + * + * The default pubsub topic the module subscribes to is `_peer-discovery._p2p._pubsub`, which is also set on `PubsubPeerDiscovery.TOPIC`. + */ + const TOPIC = '_peer-discovery._p2p._pubsub'; + /** + * A Peer Discovery Service that leverages libp2p Pubsub to find peers. + */ + class PubSubPeerDiscovery extends TypedEventEmitter { + [peerDiscoverySymbol] = true; + [Symbol.toStringTag] = '@libp2p/pubsub-peer-discovery'; + interval; + listenOnly; + topics; + intervalId; + components; + log; + constructor(components, init = {}) { + super(); + const { interval, topics, listenOnly } = init; + this.components = components; + this.interval = interval ?? 10000; + this.listenOnly = listenOnly ?? false; + this.log = components.logger.forComponent('libp2p:discovery:pubsub'); + // Ensure we have topics + if (Array.isArray(topics) && topics.length > 0) { + this.topics = topics; + } + else { + this.topics = [TOPIC]; + } + this._onMessage = this._onMessage.bind(this); + } + isStarted() { + return this.intervalId != null; + } + start() { + } + /** + * Subscribes to the discovery topic on `libp2p.pubsub` and performs a broadcast + * immediately, and every `this.interval` + */ + afterStart() { + if (this.intervalId != null) { + return; + } + const pubsub = this.components.pubsub; + if (pubsub == null) { + throw new Error('PubSub not configured'); + } + // Subscribe to pubsub + for (const topic of this.topics) { + pubsub.subscribe(topic); + pubsub.addEventListener('message', this._onMessage); + } + // Don't broadcast if we are only listening + if (this.listenOnly) { + return; + } + // Broadcast immediately, and then run on interval + this._broadcast(); + // Periodically publish our own information + this.intervalId = setInterval(() => { + this._broadcast(); + }, this.interval); + } + beforeStop() { + const pubsub = this.components.pubsub; + if (pubsub == null) { + throw new Error('PubSub not configured'); + } + for (const topic of this.topics) { + pubsub.unsubscribe(topic); + pubsub.removeEventListener('message', this._onMessage); + } + } + /** + * Unsubscribes from the discovery topic + */ + stop() { + if (this.intervalId != null) { + clearInterval(this.intervalId); + this.intervalId = undefined; + } + } + /** + * Performs a broadcast via Pubsub publish + */ + _broadcast() { + const peerId = this.components.peerId; + if (peerId.publicKey == null) { + throw new Error('PeerId was missing public key'); + } + const peer = { + publicKey: peerId.publicKey, + addrs: this.components.addressManager.getAddresses().map(ma => ma.bytes) + }; + const encodedPeer = Peer$1.encode(peer); + const pubsub = this.components.pubsub; + if (pubsub == null) { + throw new Error('PubSub not configured'); + } + for (const topic of this.topics) { + if (pubsub.getSubscribers(topic).length === 0) { + this.log('skipping broadcasting our peer data on topic %s because there are no peers present', topic); + continue; + } + this.log('broadcasting our peer data on topic %s', topic); + void pubsub.publish(topic, encodedPeer); + } + } + /** + * Handles incoming pubsub messages for our discovery topic + */ + _onMessage(event) { + if (!this.isStarted()) { + return; + } + const message = event.detail; + if (!this.topics.includes(message.topic)) { + return; + } + const peer = Peer$1.decode(message.data); + void peerIdFromKeys(peer.publicKey).then(peerId => { + // Ignore if we received our own response + if (peerId.equals(this.components.peerId)) { + return; + } + this.log('discovered peer %p on %s', peerId, message.topic); + this.safeDispatchEvent('peer', { + detail: { + id: peerId, + multiaddrs: peer.addrs.map(b => multiaddr(b)) + } + }); + }).catch(err => { + this.log.error(err); + }); + } + } + function pubsubPeerDiscovery(init = {}) { + return (components) => new PubSubPeerDiscovery(components, init); + } + + /** + * @packageDocumentation + * + * This module makes it easy to send and receive length-prefixed Protobuf encoded + * messages over streams. + * + * @example + * + * ```typescript + * import { pbStream } from 'it-protobuf-stream' + * import { MessageType } from './src/my-message-type.js' + * + * // RequestType and ResponseType have been generate from `.proto` files and have + * // `.encode` and `.decode` methods for serialization/deserialization + * + * const stream = pbStream(duplex) + * + * // write a message to the stream + * stream.write({ + * foo: 'bar' + * }, MessageType) + * + * // read a message from the stream + * const res = await stream.read(MessageType) + * ``` + */ + function pbStream(duplex, opts) { + const lp = lpStream(duplex, opts); + const W = { + read: async (proto, options) => { + // readLP, decode + const value = await lp.read(options); + return proto.decode(value); + }, + write: async (message, proto, options) => { + // encode, writeLP + await lp.write(proto.encode(message), options); + }, + writeV: async (messages, proto, options) => { + // encode, writeLP + await lp.writeV(messages.map(message => proto.encode(message)), options); + }, + pb: (proto) => { + return { + read: async (options) => W.read(proto, options), + write: async (d, options) => W.write(d, proto, options), + writeV: async (d, options) => W.writeV(d, proto, options), + unwrap: () => W + }; + }, + unwrap: () => { + return lp.unwrap(); + } + }; + return W; + } + + /** + * Multicodec code + */ + const CIRCUIT_PROTO_CODE = 290; + /** + * Relay HOP relay service namespace for discovery + */ + const RELAY_RENDEZVOUS_NS = '/libp2p/relay'; + const DEFAULT_RESERVATION_CONCURRENCY = 1; + const RELAY_TAG = 'circuit-relay-relay'; + // 128k is the default data limit + BigInt(1 << 17); + /** + * The hop protocol + */ + const RELAY_V2_HOP_CODEC = '/libp2p/circuit/relay/0.2.0/hop'; + /** + * the stop protocol + */ + const RELAY_V2_STOP_CODEC = '/libp2p/circuit/relay/0.2.0/stop'; + const MAX_CONNECTIONS = 300; + const ERR_RELAYED_DIAL = 'ERR_RELAYED_DIAL'; + const ERR_HOP_REQUEST_FAILED = 'ERR_HOP_REQUEST_FAILED'; + + /* eslint-disable import/export */ + /* eslint-disable complexity */ + /* eslint-disable @typescript-eslint/no-namespace */ + /* eslint-disable @typescript-eslint/no-unnecessary-boolean-literal-compare */ + /* eslint-disable @typescript-eslint/no-empty-interface */ + var HopMessage; + (function (HopMessage) { + (function (Type) { + Type["RESERVE"] = "RESERVE"; + Type["CONNECT"] = "CONNECT"; + Type["STATUS"] = "STATUS"; + })(HopMessage.Type || (HopMessage.Type = {})); + let __TypeValues; + (function (__TypeValues) { + __TypeValues[__TypeValues["RESERVE"] = 0] = "RESERVE"; + __TypeValues[__TypeValues["CONNECT"] = 1] = "CONNECT"; + __TypeValues[__TypeValues["STATUS"] = 2] = "STATUS"; + })(__TypeValues || (__TypeValues = {})); + (function (Type) { + Type.codec = () => { + return enumeration(__TypeValues); + }; + })(HopMessage.Type || (HopMessage.Type = {})); + let _codec; + HopMessage.codec = () => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork(); + } + if (obj.type != null) { + w.uint32(8); + HopMessage.Type.codec().encode(obj.type, w); + } + if (obj.peer != null) { + w.uint32(18); + Peer.codec().encode(obj.peer, w); + } + if (obj.reservation != null) { + w.uint32(26); + Reservation.codec().encode(obj.reservation, w); + } + if (obj.limit != null) { + w.uint32(34); + Limit.codec().encode(obj.limit, w); + } + if (obj.status != null) { + w.uint32(40); + Status.codec().encode(obj.status, w); + } + if (opts.lengthDelimited !== false) { + w.ldelim(); + } + }, (reader, length) => { + const obj = {}; + const end = length == null ? reader.len : reader.pos + length; + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + obj.type = HopMessage.Type.codec().decode(reader); + break; + case 2: + obj.peer = Peer.codec().decode(reader, reader.uint32()); + break; + case 3: + obj.reservation = Reservation.codec().decode(reader, reader.uint32()); + break; + case 4: + obj.limit = Limit.codec().decode(reader, reader.uint32()); + break; + case 5: + obj.status = Status.codec().decode(reader); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return obj; + }); + } + return _codec; + }; + HopMessage.encode = (obj) => { + return encodeMessage(obj, HopMessage.codec()); + }; + HopMessage.decode = (buf) => { + return decodeMessage(buf, HopMessage.codec()); + }; + })(HopMessage || (HopMessage = {})); + var StopMessage; + (function (StopMessage) { + (function (Type) { + Type["CONNECT"] = "CONNECT"; + Type["STATUS"] = "STATUS"; + })(StopMessage.Type || (StopMessage.Type = {})); + let __TypeValues; + (function (__TypeValues) { + __TypeValues[__TypeValues["CONNECT"] = 0] = "CONNECT"; + __TypeValues[__TypeValues["STATUS"] = 1] = "STATUS"; + })(__TypeValues || (__TypeValues = {})); + (function (Type) { + Type.codec = () => { + return enumeration(__TypeValues); + }; + })(StopMessage.Type || (StopMessage.Type = {})); + let _codec; + StopMessage.codec = () => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork(); + } + if (obj.type != null) { + w.uint32(8); + StopMessage.Type.codec().encode(obj.type, w); + } + if (obj.peer != null) { + w.uint32(18); + Peer.codec().encode(obj.peer, w); + } + if (obj.limit != null) { + w.uint32(26); + Limit.codec().encode(obj.limit, w); + } + if (obj.status != null) { + w.uint32(32); + Status.codec().encode(obj.status, w); + } + if (opts.lengthDelimited !== false) { + w.ldelim(); + } + }, (reader, length) => { + const obj = {}; + const end = length == null ? reader.len : reader.pos + length; + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + obj.type = StopMessage.Type.codec().decode(reader); + break; + case 2: + obj.peer = Peer.codec().decode(reader, reader.uint32()); + break; + case 3: + obj.limit = Limit.codec().decode(reader, reader.uint32()); + break; + case 4: + obj.status = Status.codec().decode(reader); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return obj; + }); + } + return _codec; + }; + StopMessage.encode = (obj) => { + return encodeMessage(obj, StopMessage.codec()); + }; + StopMessage.decode = (buf) => { + return decodeMessage(buf, StopMessage.codec()); + }; + })(StopMessage || (StopMessage = {})); + var Peer; + (function (Peer) { + let _codec; + Peer.codec = () => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork(); + } + if ((obj.id != null && obj.id.byteLength > 0)) { + w.uint32(10); + w.bytes(obj.id); + } + if (obj.addrs != null) { + for (const value of obj.addrs) { + w.uint32(18); + w.bytes(value); + } + } + if (opts.lengthDelimited !== false) { + w.ldelim(); + } + }, (reader, length) => { + const obj = { + id: new Uint8Array(0), + addrs: [] + }; + const end = length == null ? reader.len : reader.pos + length; + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + obj.id = reader.bytes(); + break; + case 2: + obj.addrs.push(reader.bytes()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return obj; + }); + } + return _codec; + }; + Peer.encode = (obj) => { + return encodeMessage(obj, Peer.codec()); + }; + Peer.decode = (buf) => { + return decodeMessage(buf, Peer.codec()); + }; + })(Peer || (Peer = {})); + var Reservation; + (function (Reservation) { + let _codec; + Reservation.codec = () => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork(); + } + if ((obj.expire != null && obj.expire !== 0n)) { + w.uint32(8); + w.uint64(obj.expire); + } + if (obj.addrs != null) { + for (const value of obj.addrs) { + w.uint32(18); + w.bytes(value); + } + } + if (obj.voucher != null) { + w.uint32(26); + w.bytes(obj.voucher); + } + if (opts.lengthDelimited !== false) { + w.ldelim(); + } + }, (reader, length) => { + const obj = { + expire: 0n, + addrs: [] + }; + const end = length == null ? reader.len : reader.pos + length; + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + obj.expire = reader.uint64(); + break; + case 2: + obj.addrs.push(reader.bytes()); + break; + case 3: + obj.voucher = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return obj; + }); + } + return _codec; + }; + Reservation.encode = (obj) => { + return encodeMessage(obj, Reservation.codec()); + }; + Reservation.decode = (buf) => { + return decodeMessage(buf, Reservation.codec()); + }; + })(Reservation || (Reservation = {})); + var Limit; + (function (Limit) { + let _codec; + Limit.codec = () => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork(); + } + if (obj.duration != null) { + w.uint32(8); + w.uint32(obj.duration); + } + if (obj.data != null) { + w.uint32(16); + w.uint64(obj.data); + } + if (opts.lengthDelimited !== false) { + w.ldelim(); + } + }, (reader, length) => { + const obj = {}; + const end = length == null ? reader.len : reader.pos + length; + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + obj.duration = reader.uint32(); + break; + case 2: + obj.data = reader.uint64(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return obj; + }); + } + return _codec; + }; + Limit.encode = (obj) => { + return encodeMessage(obj, Limit.codec()); + }; + Limit.decode = (buf) => { + return decodeMessage(buf, Limit.codec()); + }; + })(Limit || (Limit = {})); + var Status; + (function (Status) { + Status["UNUSED"] = "UNUSED"; + Status["OK"] = "OK"; + Status["RESERVATION_REFUSED"] = "RESERVATION_REFUSED"; + Status["RESOURCE_LIMIT_EXCEEDED"] = "RESOURCE_LIMIT_EXCEEDED"; + Status["PERMISSION_DENIED"] = "PERMISSION_DENIED"; + Status["CONNECTION_FAILED"] = "CONNECTION_FAILED"; + Status["NO_RESERVATION"] = "NO_RESERVATION"; + Status["MALFORMED_MESSAGE"] = "MALFORMED_MESSAGE"; + Status["UNEXPECTED_MESSAGE"] = "UNEXPECTED_MESSAGE"; + })(Status || (Status = {})); + var __StatusValues; + (function (__StatusValues) { + __StatusValues[__StatusValues["UNUSED"] = 0] = "UNUSED"; + __StatusValues[__StatusValues["OK"] = 100] = "OK"; + __StatusValues[__StatusValues["RESERVATION_REFUSED"] = 200] = "RESERVATION_REFUSED"; + __StatusValues[__StatusValues["RESOURCE_LIMIT_EXCEEDED"] = 201] = "RESOURCE_LIMIT_EXCEEDED"; + __StatusValues[__StatusValues["PERMISSION_DENIED"] = 202] = "PERMISSION_DENIED"; + __StatusValues[__StatusValues["CONNECTION_FAILED"] = 203] = "CONNECTION_FAILED"; + __StatusValues[__StatusValues["NO_RESERVATION"] = 204] = "NO_RESERVATION"; + __StatusValues[__StatusValues["MALFORMED_MESSAGE"] = 400] = "MALFORMED_MESSAGE"; + __StatusValues[__StatusValues["UNEXPECTED_MESSAGE"] = 401] = "UNEXPECTED_MESSAGE"; + })(__StatusValues || (__StatusValues = {})); + (function (Status) { + Status.codec = () => { + return enumeration(__StatusValues); + }; + })(Status || (Status = {})); + var ReservationVoucher; + (function (ReservationVoucher) { + let _codec; + ReservationVoucher.codec = () => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork(); + } + if ((obj.relay != null && obj.relay.byteLength > 0)) { + w.uint32(10); + w.bytes(obj.relay); + } + if ((obj.peer != null && obj.peer.byteLength > 0)) { + w.uint32(18); + w.bytes(obj.peer); + } + if ((obj.expiration != null && obj.expiration !== 0n)) { + w.uint32(24); + w.uint64(obj.expiration); + } + if (opts.lengthDelimited !== false) { + w.ldelim(); + } + }, (reader, length) => { + const obj = { + relay: new Uint8Array(0), + peer: new Uint8Array(0), + expiration: 0n + }; + const end = length == null ? reader.len : reader.pos + length; + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + obj.relay = reader.bytes(); + break; + case 2: + obj.peer = reader.bytes(); + break; + case 3: + obj.expiration = reader.uint64(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return obj; + }); + } + return _codec; + }; + ReservationVoucher.encode = (obj) => { + return encodeMessage(obj, ReservationVoucher.codec()); + }; + ReservationVoucher.decode = (buf) => { + return decodeMessage(buf, ReservationVoucher.codec()); + }; + })(ReservationVoucher || (ReservationVoucher = {})); + + /** + * Convert a namespace string into a cid + */ + async function namespaceToCid(namespace) { + const bytes = new TextEncoder().encode(namespace); + const hash = await sha256$1.digest(bytes); + return CID.createV0(hash); + } + /** + * returns number of ms between now and expiration time + */ + function getExpirationMilliseconds(expireTimeSeconds) { + const expireTimeMillis = expireTimeSeconds * BigInt(1000); + const currentTime = new Date().getTime(); + // downcast to number to use with setTimeout + return Number(expireTimeMillis - BigInt(currentTime)); + } + + /** + * ReservationManager automatically makes a circuit v2 reservation on any connected + * peers that support the circuit v2 HOP protocol. + */ + class RelayDiscovery extends TypedEventEmitter { + peerId; + peerStore; + contentRouting; + registrar; + started; + topologyId; + log; + constructor(components) { + super(); + this.log = components.logger.forComponent('libp2p:circuit-relay:discover-relays'); + this.started = false; + this.peerId = components.peerId; + this.peerStore = components.peerStore; + this.contentRouting = components.contentRouting; + this.registrar = components.registrar; + } + isStarted() { + return this.started; + } + async start() { + // register a topology listener for when new peers are encountered + // that support the hop protocol + this.topologyId = await this.registrar.register(RELAY_V2_HOP_CODEC, { + notifyOnTransient: true, + onConnect: (peerId) => { + this.safeDispatchEvent('relay:discover', { detail: peerId }); + } + }); + this.started = true; + } + afterStart() { + void this.discover() + .catch(err => { + this.log.error('error discovering relays', err); + }); + } + stop() { + if (this.topologyId != null) { + this.registrar.unregister(this.topologyId); + } + this.started = false; + } + /** + * Try to listen on available hop relay connections. + * The following order will happen while we do not have enough relays: + * + * 1. Check the metadata store for known relays, try to listen on the ones we are already connected + * 2. Dial and try to listen on the peers we know that support hop but are not connected + * 3. Search the network + */ + async discover() { + this.log('searching peer store for relays'); + const peers = (await this.peerStore.all({ + filters: [ + // filter by a list of peers supporting RELAY_V2_HOP and ones we are not listening on + (peer) => { + return peer.protocols.includes(RELAY_V2_HOP_CODEC); + } + ], + orders: [ + () => Math.random() < 0.5 ? 1 : -1 + ] + })); + for (const peer of peers) { + this.log('found relay peer %p in content peer store', peer.id); + this.safeDispatchEvent('relay:discover', { detail: peer.id }); + } + this.log('found %d relay peers in peer store', peers.length); + try { + this.log('searching content routing for relays'); + const cid = await namespaceToCid(RELAY_RENDEZVOUS_NS); + let found = 0; + for await (const provider of this.contentRouting.findProviders(cid)) { + if (provider.multiaddrs.length > 0 && !provider.id.equals(this.peerId)) { + const peerId = provider.id; + found++; + await this.peerStore.merge(peerId, { + multiaddrs: provider.multiaddrs + }); + this.log('found relay peer %p in content routing', peerId); + this.safeDispatchEvent('relay:discover', { detail: peerId }); + } + } + this.log('found %d relay peers in content routing', found); + } + catch (err) { + this.log.error('failed when finding relays on the network', err); + } + } + } + + // allow refreshing a relay reservation if it will expire in the next 10 minutes + const REFRESH_WINDOW = (60 * 1000) * 10; + // try to refresh relay reservations 5 minutes before expiry + const REFRESH_TIMEOUT = (60 * 1000) * 5; + // minimum duration before which a reservation must not be refreshed + const REFRESH_TIMEOUT_MIN = 30 * 1000; + class ReservationStore extends TypedEventEmitter { + peerId; + connectionManager; + transportManager; + peerStore; + events; + reserveQueue; + reservations; + maxDiscoveredRelays; + maxReservationQueueLength; + reservationCompletionTimeout; + started; + log; + constructor(components, init) { + super(); + this.log = components.logger.forComponent('libp2p:circuit-relay:transport:reservation-store'); + this.peerId = components.peerId; + this.connectionManager = components.connectionManager; + this.transportManager = components.transportManager; + this.peerStore = components.peerStore; + this.events = components.events; + this.reservations = new PeerMap(); + this.maxDiscoveredRelays = init?.discoverRelays ?? 0; + this.maxReservationQueueLength = init?.maxReservationQueueLength ?? 100; + this.reservationCompletionTimeout = init?.reservationCompletionTimeout ?? 10000; + this.started = false; + // ensure we don't listen on multiple relays simultaneously + this.reserveQueue = new PeerQueue({ + concurrency: init?.reservationConcurrency ?? DEFAULT_RESERVATION_CONCURRENCY, + metricName: 'libp2p_relay_reservation_queue', + metrics: components.metrics + }); + // When a peer disconnects, if we had a reservation on that peer + // remove the reservation and multiaddr and maybe trigger search + // for new relays + this.events.addEventListener('peer:disconnect', (evt) => { + this.#removeRelay(evt.detail); + }); + } + isStarted() { + return this.started; + } + start() { + this.started = true; + } + stop() { + this.reserveQueue.clear(); + this.reservations.forEach(({ timeout }) => { + clearTimeout(timeout); + }); + this.reservations.clear(); + this.started = false; + } + /** + * If the number of current relays is beneath the configured `maxReservations` + * value, and the passed peer id is not our own, and we have a non-relayed connection + * to the remote, and the remote peer speaks the hop protocol, try to reserve a slot + * on the remote peer + */ + async addRelay(peerId, type) { + if (this.peerId.equals(peerId)) { + this.log('not trying to use self as relay'); + return; + } + if (this.reserveQueue.size > this.maxReservationQueueLength) { + this.log('not adding relay as the queue is full'); + return; + } + if (this.reserveQueue.has(peerId)) { + this.log('relay peer is already in the reservation queue'); + return; + } + this.log('add relay %p', peerId); + await this.reserveQueue.add(async () => { + try { + // allow refresh of an existing reservation if it is about to expire + const existingReservation = this.reservations.get(peerId); + if (existingReservation != null) { + if (getExpirationMilliseconds(existingReservation.reservation.expire) > REFRESH_WINDOW) { + this.log('already have reservation on relay peer %p and it expires in more than 10 minutes', peerId); + return; + } + clearTimeout(existingReservation.timeout); + this.reservations.delete(peerId); + } + if (type === 'discovered' && [...this.reservations.values()].reduce((acc, curr) => { + if (curr.type === 'discovered') { + acc++; + } + return acc; + }, 0) >= this.maxDiscoveredRelays) { + this.log('already have enough discovered relays'); + return; + } + const signal = AbortSignal.timeout(this.reservationCompletionTimeout); + const connection = await this.connectionManager.openConnection(peerId, { + signal + }); + if (connection.remoteAddr.protoNames().includes('p2p-circuit')) { + this.log('not creating reservation over relayed connection'); + return; + } + const reservation = await this.#createReservation(connection, { + signal + }); + this.log('created reservation on relay peer %p', peerId); + const expiration = getExpirationMilliseconds(reservation.expire); + // sets a lower bound on the timeout, and also don't let it go over + // 2^31 - 1 (setTimeout will only accept signed 32 bit integers) + const timeoutDuration = Math.min(Math.max(expiration - REFRESH_TIMEOUT, REFRESH_TIMEOUT_MIN), Math.pow(2, 31) - 1); + const timeout = setTimeout(() => { + this.addRelay(peerId, type).catch(err => { + this.log.error('could not refresh reservation to relay %p', peerId, err); + }); + }, timeoutDuration); + // we've managed to create a reservation successfully + this.reservations.set(peerId, { + timeout, + reservation, + type + }); + // ensure we don't close the connection to the relay + await this.peerStore.merge(peerId, { + tags: { + [RELAY_TAG]: { + value: 1, + ttl: expiration + } + } + }); + // listen on multiaddr that only the circuit transport is listening for + await this.transportManager.listen([multiaddr(`/p2p/${peerId.toString()}/p2p-circuit`)]); + } + catch (err) { + this.log.error('could not reserve slot on %p', peerId, err); + // cancel the renewal timeout if it's been set + const reservation = this.reservations.get(peerId); + if (reservation != null) { + clearTimeout(reservation.timeout); + } + // if listening failed, remove the reservation + this.reservations.delete(peerId); + } + }, { + peerId + }); + } + hasReservation(peerId) { + return this.reservations.has(peerId); + } + getReservation(peerId) { + return this.reservations.get(peerId)?.reservation; + } + async #createReservation(connection, options) { + options.signal?.throwIfAborted(); + this.log('requesting reservation from %p', connection.remotePeer); + const stream = await connection.newStream(RELAY_V2_HOP_CODEC, options); + const pbstr = pbStream(stream); + const hopstr = pbstr.pb(HopMessage); + await hopstr.write({ type: HopMessage.Type.RESERVE }, options); + let response; + try { + response = await hopstr.read(options); + } + catch (err) { + this.log.error('error parsing reserve message response from %p because', connection.remotePeer, err); + stream.abort(err); + throw err; + } + finally { + await stream.close(); + } + if (response.status === Status.OK && (response.reservation != null)) { + // check that the returned relay has the relay address - this can be + // omitted when requesting a reservation from a go-libp2p relay we + // already have a reservation on + let hasRelayAddress = false; + const relayAddressBytes = connection.remoteAddr.bytes; + for (const buf of response.reservation.addrs) { + if (equals(relayAddressBytes, buf)) { + hasRelayAddress = true; + break; + } + } + if (!hasRelayAddress) { + response.reservation.addrs.push(relayAddressBytes); + } + return response.reservation; + } + const errMsg = `reservation failed with status ${response.status ?? 'undefined'}`; + this.log.error(errMsg); + throw new Error(errMsg); + } + /** + * Remove listen relay + */ + #removeRelay(peerId) { + const existingReservation = this.reservations.get(peerId); + if (existingReservation == null) { + return; + } + this.log('connection to relay %p closed, removing reservation from local store', peerId); + clearTimeout(existingReservation.timeout); + this.reservations.delete(peerId); + this.safeDispatchEvent('relay:removed', { detail: peerId }); + if (this.reservations.size < this.maxDiscoveredRelays) { + this.log('not enough relays %d/%d', this.reservations.size, this.maxDiscoveredRelays); + this.safeDispatchEvent('relay:not-enough-relays', {}); + } + } + } + + /** + * Convert a duplex iterable into a MultiaddrConnection. + * https://github.com/libp2p/interface-transport#multiaddrconnection + */ + function streamToMaConnection(props) { + const { stream, remoteAddr, logger } = props; + const log = logger.forComponent('libp2p:stream:converter'); + let closedRead = false; + let closedWrite = false; + // piggyback on `stream.close` invocations to close maconn + const streamClose = stream.close.bind(stream); + stream.close = async (options) => { + await streamClose(options); + close(true); + }; + // piggyback on `stream.abort` invocations to close maconn + const streamAbort = stream.abort.bind(stream); + stream.abort = (err) => { + streamAbort(err); + close(true); + }; + // piggyback on `stream.sink` invocations to close maconn + const streamSink = stream.sink.bind(stream); + stream.sink = async (source) => { + try { + await streamSink(source); + } + catch (err) { + // If aborted we can safely ignore + if (err.type !== 'aborted') { + // If the source errored the socket will already have been destroyed by + // toIterable.duplex(). If the socket errored it will already be + // destroyed. There's nothing to do here except log the error & return. + log.error('%s error in sink', remoteAddr, err); + } + } + finally { + closedWrite = true; + close(); + } + }; + const maConn = { + log, + sink: stream.sink, + source: (async function* () { + try { + for await (const list of stream.source) { + if (list instanceof Uint8Array) { + yield list; + } + else { + yield* list; + } + } + } + finally { + closedRead = true; + close(); + } + }()), + remoteAddr, + timeline: { open: Date.now(), close: undefined }, + close: stream.close, + abort: stream.abort + }; + function close(force) { + if (force === true) { + closedRead = true; + closedWrite = true; + } + if (closedRead && closedWrite && maConn.timeline.close == null) { + maConn.timeline.close = Date.now(); + } + } + return maConn; + } + + /* + * Valid combinations + */ + const DNS4 = base('dns4'); + const DNS6 = base('dns6'); + const DNSADDR = base('dnsaddr'); + const DNS = or(base('dns'), DNSADDR, DNS4, DNS6); + const IP = or(base('ip4'), base('ip6')); + const TCP = or(and(IP, base('tcp')), and(DNS, base('tcp'))); + const UDP = and(IP, base('udp')); + const UTP = and(UDP, base('utp')); + const QUIC = and(UDP, base('quic')); + const QUICV1 = and(UDP, base('quic-v1')); + const _WebSockets = or(and(TCP, base('ws')), and(DNS, base('ws'))); + const WebSockets = or(and(_WebSockets, base('p2p')), _WebSockets); + const _WebSocketsSecure = or(and(TCP, base('wss')), and(DNS, base('wss')), and(TCP, base('tls'), base('ws')), and(DNS, base('tls'), base('ws'))); + const WebSocketsSecure = or(and(_WebSocketsSecure, base('p2p')), _WebSocketsSecure); + const HTTP = or(and(TCP, base('http')), and(IP, base('http')), and(DNS, base('http'))); + const HTTPS = or(and(TCP, base('https')), and(IP, base('https')), and(DNS, base('https'))); + const _WebRTCDirect = and(UDP, base('webrtc-direct'), base('certhash')); + const WebRTCDirect = or(and(_WebRTCDirect, base('p2p')), _WebRTCDirect); + const _WebTransport = and(QUICV1, base('webtransport'), base('certhash'), base('certhash')); + const WebTransport$1 = or(and(_WebTransport, base('p2p')), _WebTransport); + /** + * @deprecated + */ + const P2PWebRTCStar = or(and(WebSockets, base('p2p-webrtc-star'), base('p2p')), and(WebSocketsSecure, base('p2p-webrtc-star'), base('p2p')), and(WebSockets, base('p2p-webrtc-star')), and(WebSocketsSecure, base('p2p-webrtc-star'))); + /** + * @deprecated + */ + const P2PWebRTCDirect = or(and(HTTP, base('p2p-webrtc-direct'), base('p2p')), and(HTTPS, base('p2p-webrtc-direct'), base('p2p')), and(HTTP, base('p2p-webrtc-direct')), and(HTTPS, base('p2p-webrtc-direct'))); + const Reliable = or(_WebSockets, _WebSocketsSecure, HTTP, HTTPS, P2PWebRTCStar, P2PWebRTCDirect, TCP, UTP, QUIC, DNS, WebRTCDirect, WebTransport$1); + const _P2P = or(and(Reliable, base('p2p')), P2PWebRTCStar, P2PWebRTCDirect, WebRTCDirect, WebTransport$1, base('p2p')); + const _Circuit = or(and(_P2P, base('p2p-circuit'), _P2P), and(_P2P, base('p2p-circuit')), and(base('p2p-circuit'), _P2P), and(Reliable, base('p2p-circuit')), and(base('p2p-circuit'), Reliable), base('p2p-circuit')); + const CircuitRecursive = () => or(and(_Circuit, CircuitRecursive), _Circuit); + const Circuit = CircuitRecursive(); + /* + * Validation funcs + */ + function makeMatchesFunction(partialMatch) { + function matches(a) { + let ma; + try { + ma = multiaddr(a); + } + catch (err) { // catch error + return false; // also if it's invalid it's probably not matching as well so return false + } + const out = partialMatch(ma.protoNames()); + if (out === null) { + return false; + } + if (out === true || out === false) { + return out; + } + return out.length === 0; + } + return matches; + } + function and(...args) { + function partialMatch(a) { + if (a.length < args.length) { + return null; + } + let out = a; + args.some((arg) => { + out = typeof arg === 'function' + ? arg().partialMatch(a) + : arg.partialMatch(a); + if (Array.isArray(out)) { + a = out; + } + if (out === null) { + return true; + } + return false; + }); + return out; + } + return { + toString: function () { return '{ ' + args.join(' ') + ' }'; }, + input: args, + matches: makeMatchesFunction(partialMatch), + partialMatch + }; + } + function or(...args) { + function partialMatch(a) { + let out = null; + args.some((arg) => { + const res = typeof arg === 'function' + ? arg().partialMatch(a) + : arg.partialMatch(a); + if (res != null) { + out = res; + return true; + } + return false; + }); + return out; + } + const result = { + toString: function () { return '{ ' + args.join(' ') + ' }'; }, + input: args, + matches: makeMatchesFunction(partialMatch), + partialMatch + }; + return result; + } + function base(n) { + const name = n; + function matches(a) { + let ma; + try { + ma = multiaddr(a); + } + catch (err) { // catch error + return false; // also if it's invalid it's probably not matching as well so return false + } + const pnames = ma.protoNames(); + if (pnames.length === 1 && pnames[0] === name) { + return true; + } + return false; + } + function partialMatch(protos) { + if (protos.length === 0) { + return null; + } + if (protos[0] === name) { + return protos.slice(1); + } + return null; + } + return { + toString: function () { return name; }, + matches, + partialMatch + }; + } + + class CircuitRelayTransportListener extends TypedEventEmitter { + connectionManager; + relayStore; + listeningAddrs; + log; + constructor(components) { + super(); + this.log = components.logger.forComponent('libp2p:circuit-relay:transport:listener'); + this.connectionManager = components.connectionManager; + this.relayStore = components.relayStore; + this.listeningAddrs = new PeerMap(); + // remove listening addrs when a relay is removed + this.relayStore.addEventListener('relay:removed', this._onRemoveRelayPeer); + } + _onRemoveRelayPeer = (evt) => { + this.#removeRelayPeer(evt.detail); + }; + async listen(addr) { + this.log('listen on %a', addr); + // remove the circuit part to get the peer id of the relay + const relayAddr = addr.decapsulate('/p2p-circuit'); + const relayConn = await this.connectionManager.openConnection(relayAddr); + if (!this.relayStore.hasReservation(relayConn.remotePeer)) { + this.log('making reservation on peer %p', relayConn.remotePeer); + // addRelay calls transportManager.listen which calls this listen method + await this.relayStore.addRelay(relayConn.remotePeer, 'configured'); + return; + } + const reservation = this.relayStore.getReservation(relayConn.remotePeer); + if (reservation == null) { + throw new CodeError$2('Did not have reservation after making reservation', 'ERR_NO_RESERVATION'); + } + if (this.listeningAddrs.has(relayConn.remotePeer)) { + this.log('already listening on relay %p', relayConn.remotePeer); + return; + } + // add all addresses from the relay reservation + this.listeningAddrs.set(relayConn.remotePeer, reservation.addrs.map(buf => { + return multiaddr(buf).encapsulate('/p2p-circuit'); + })); + this.safeDispatchEvent('listening', {}); + } + getAddrs() { + return [...this.listeningAddrs.values()].flat(); + } + async close() { + } + #removeRelayPeer(peerId) { + const had = this.listeningAddrs.has(peerId); + this.log('relay peer removed %p - had reservation', peerId, had); + this.listeningAddrs.delete(peerId); + if (had) { + this.log.trace('removing relay event listener for peer %p', peerId); + this.relayStore.removeEventListener('relay:removed', this._onRemoveRelayPeer); + // Announce listen addresses change + this.safeDispatchEvent('close', {}); + } + } + } + function createListener(options) { + return new CircuitRelayTransportListener(options); + } + + const isValidStop = (request) => { + if (request.peer == null) { + return false; + } + try { + request.peer.addrs.forEach(multiaddr); + } + catch { + return false; + } + return true; + }; + const defaults = { + maxInboundStopStreams: MAX_CONNECTIONS, + maxOutboundStopStreams: MAX_CONNECTIONS, + stopTimeout: 30000 + }; + class CircuitRelayTransport { + discovery; + registrar; + peerStore; + connectionManager; + transportManager; + peerId; + upgrader; + addressManager; + connectionGater; + reservationStore; + logger; + maxInboundStopStreams; + maxOutboundStopStreams; + stopTimeout; + started; + log; + constructor(components, init) { + this.log = components.logger.forComponent('libp2p:circuit-relay:transport'); + this.registrar = components.registrar; + this.peerStore = components.peerStore; + this.connectionManager = components.connectionManager; + this.transportManager = components.transportManager; + this.logger = components.logger; + this.peerId = components.peerId; + this.upgrader = components.upgrader; + this.addressManager = components.addressManager; + this.connectionGater = components.connectionGater; + this.maxInboundStopStreams = init.maxInboundStopStreams ?? defaults.maxInboundStopStreams; + this.maxOutboundStopStreams = init.maxOutboundStopStreams ?? defaults.maxOutboundStopStreams; + this.stopTimeout = init.stopTimeout ?? defaults.stopTimeout; + if (init.discoverRelays != null && init.discoverRelays > 0) { + this.discovery = new RelayDiscovery(components); + this.discovery.addEventListener('relay:discover', (evt) => { + this.reservationStore.addRelay(evt.detail, 'discovered') + .catch(err => { + this.log.error('could not add discovered relay %p', evt.detail, err); + }); + }); + } + this.reservationStore = new ReservationStore(components, init); + this.reservationStore.addEventListener('relay:not-enough-relays', () => { + this.discovery?.discover() + .catch(err => { + this.log.error('could not discover relays', err); + }); + }); + this.started = false; + } + isStarted() { + return this.started; + } + async start() { + this.reservationStore.start(); + await this.registrar.handle(RELAY_V2_STOP_CODEC, (data) => { + void this.onStop(data).catch(err => { + this.log.error('error while handling STOP protocol', err); + data.stream.abort(err); + }); + }, { + maxInboundStreams: this.maxInboundStopStreams, + maxOutboundStreams: this.maxOutboundStopStreams, + runOnTransientConnection: true + }); + await this.discovery?.start(); + this.started = true; + } + afterStart() { + this.discovery?.afterStart(); + } + async stop() { + this.discovery?.stop(); + this.reservationStore.stop(); + await this.registrar.unhandle(RELAY_V2_STOP_CODEC); + this.started = false; + } + [transportSymbol] = true; + [Symbol.toStringTag] = 'libp2p/circuit-relay-v2'; + /** + * Dial a peer over a relay + */ + async dial(ma, options = {}) { + if (ma.protoCodes().filter(code => code === CIRCUIT_PROTO_CODE).length !== 1) { + const errMsg = 'Invalid circuit relay address'; + this.log.error(errMsg, ma); + throw new CodeError$2(errMsg, ERR_RELAYED_DIAL); + } + // Check the multiaddr to see if it contains a relay and a destination peer + const addrs = ma.toString().split('/p2p-circuit'); + const relayAddr = multiaddr(addrs[0]); + const destinationAddr = multiaddr(addrs[addrs.length - 1]); + const relayId = relayAddr.getPeerId(); + const destinationId = destinationAddr.getPeerId(); + if (relayId == null || destinationId == null) { + const errMsg = `Circuit relay dial to ${ma.toString()} failed as address did not have peer ids`; + this.log.error(errMsg); + throw new CodeError$2(errMsg, ERR_RELAYED_DIAL); + } + const relayPeer = peerIdFromString(relayId); + const destinationPeer = peerIdFromString(destinationId); + let disconnectOnFailure = false; + const relayConnections = this.connectionManager.getConnections(relayPeer); + let relayConnection = relayConnections[0]; + if (relayConnection == null) { + await this.peerStore.merge(relayPeer, { + multiaddrs: [relayAddr] + }); + relayConnection = await this.connectionManager.openConnection(relayPeer, options); + disconnectOnFailure = true; + } + let stream; + try { + stream = await relayConnection.newStream(RELAY_V2_HOP_CODEC); + return await this.connectV2({ + stream, + connection: relayConnection, + destinationPeer, + destinationAddr, + relayAddr, + ma, + disconnectOnFailure + }); + } + catch (err) { + this.log.error('circuit relay dial to destination %p via relay %p failed', destinationPeer, relayPeer, err); + if (stream != null) { + stream.abort(err); + } + disconnectOnFailure && await relayConnection.close(); + throw err; + } + } + async connectV2({ stream, connection, destinationPeer, destinationAddr, relayAddr, ma, disconnectOnFailure }) { + try { + const pbstr = pbStream(stream); + const hopstr = pbstr.pb(HopMessage); + await hopstr.write({ + type: HopMessage.Type.CONNECT, + peer: { + id: destinationPeer.toBytes(), + addrs: [multiaddr(destinationAddr).bytes] + } + }); + const status = await hopstr.read(); + if (status.status !== Status.OK) { + throw new CodeError$2(`failed to connect via relay with status ${status?.status?.toString() ?? 'undefined'}`, ERR_HOP_REQUEST_FAILED); + } + const maConn = streamToMaConnection({ + stream: pbstr.unwrap(), + remoteAddr: ma, + localAddr: relayAddr.encapsulate(`/p2p-circuit/p2p/${this.peerId.toString()}`), + logger: this.logger + }); + this.log('new outbound transient connection %a', maConn.remoteAddr); + return await this.upgrader.upgradeOutbound(maConn, { + transient: true + }); + } + catch (err) { + this.log.error(`Circuit relay dial to destination ${destinationPeer.toString()} via relay ${connection.remotePeer.toString()} failed`, err); + disconnectOnFailure && await connection.close(); + throw err; + } + } + /** + * Create a listener + */ + createListener(options) { + return createListener({ + connectionManager: this.connectionManager, + relayStore: this.reservationStore, + logger: this.logger + }); + } + /** + * Filter check for all Multiaddrs that this transport can listen on + */ + listenFilter(multiaddrs) { + multiaddrs = Array.isArray(multiaddrs) ? multiaddrs : [multiaddrs]; + return multiaddrs.filter((ma) => { + return Circuit.matches(ma); + }); + } + /** + * Filter check for all Multiaddrs that this transport can dial + */ + dialFilter(multiaddrs) { + return this.listenFilter(multiaddrs); + } + /** + * An incoming STOP request means a remote peer wants to dial us via a relay + */ + async onStop({ connection, stream }) { + if (!this.reservationStore.hasReservation(connection.remotePeer)) { + try { + this.log('dialed via relay we did not have a reservation on, start listening on that relay address'); + await this.transportManager.listen([connection.remoteAddr.encapsulate('/p2p-circuit')]); + } + catch (err) { + // failed to refresh our hitherto unknown relay reservation but allow the connection attempt anyway + this.log.error('failed to listen on a relay peer we were dialed via but did not have a reservation on', err); + } + } + const signal = AbortSignal.timeout(this.stopTimeout); + const pbstr = pbStream(stream).pb(StopMessage); + const request = await pbstr.read({ + signal + }); + this.log('new circuit relay v2 stop stream from %p with type %s', connection.remotePeer, request.type); + if (request?.type === undefined) { + this.log.error('type was missing from circuit v2 stop protocol request from %s', connection.remotePeer); + await pbstr.write({ type: StopMessage.Type.STATUS, status: Status.MALFORMED_MESSAGE }, { + signal + }); + await stream.close(); + return; + } + // Validate the STOP request has the required input + if (request.type !== StopMessage.Type.CONNECT) { + this.log.error('invalid stop connect request via peer %p', connection.remotePeer); + await pbstr.write({ type: StopMessage.Type.STATUS, status: Status.UNEXPECTED_MESSAGE }, { + signal + }); + await stream.close(); + return; + } + if (!isValidStop(request)) { + this.log.error('invalid stop connect request via peer %p', connection.remotePeer); + await pbstr.write({ type: StopMessage.Type.STATUS, status: Status.MALFORMED_MESSAGE }, { + signal + }); + await stream.close(); + return; + } + const remotePeerId = peerIdFromBytes(request.peer.id); + if ((await this.connectionGater.denyInboundRelayedConnection?.(connection.remotePeer, remotePeerId)) === true) { + this.log.error('connection gater denied inbound relayed connection from %p', connection.remotePeer); + await pbstr.write({ type: StopMessage.Type.STATUS, status: Status.PERMISSION_DENIED }, { + signal + }); + await stream.close(); + return; + } + this.log.trace('sending success response to %p', connection.remotePeer); + await pbstr.write({ type: StopMessage.Type.STATUS, status: Status.OK }, { + signal + }); + const remoteAddr = connection.remoteAddr.encapsulate(`/p2p-circuit/p2p/${remotePeerId.toString()}`); + const localAddr = this.addressManager.getAddresses()[0]; + const maConn = streamToMaConnection({ + stream: pbstr.unwrap().unwrap(), + remoteAddr, + localAddr, + logger: this.logger + }); + this.log('new inbound transient connection %a', maConn.remoteAddr); + await this.upgrader.upgradeInbound(maConn, { + transient: true + }); + this.log('%s connection %a upgraded', 'inbound', maConn.remoteAddr); + } + } + + function circuitRelayTransport(init = {}) { + return (components) => { + return new CircuitRelayTransport(components, init); + }; + } + + const second$1 = 1000; + const minute$1 = 60 * second$1; + // Protocol identifiers + const FloodsubID = '/floodsub/1.0.0'; + /** + * The protocol ID for version 1.0.0 of the Gossipsub protocol + * It is advertised along with GossipsubIDv11 for backwards compatability + */ + const GossipsubIDv10 = '/meshsub/1.0.0'; + /** + * The protocol ID for version 1.1.0 of the Gossipsub protocol + * See the spec for details about how v1.1.0 compares to v1.0.0: + * https://github.com/libp2p/specs/blob/master/pubsub/gossipsub/gossipsub-v1.1.md + */ + const GossipsubIDv11 = '/meshsub/1.1.0'; + // Overlay parameters + /** + * GossipsubD sets the optimal degree for a Gossipsub topic mesh. For example, if GossipsubD == 6, + * each peer will want to have about six peers in their mesh for each topic they're subscribed to. + * GossipsubD should be set somewhere between GossipsubDlo and GossipsubDhi. + */ + const GossipsubD = 6; + /** + * GossipsubDlo sets the lower bound on the number of peers we keep in a Gossipsub topic mesh. + * If we have fewer than GossipsubDlo peers, we will attempt to graft some more into the mesh at + * the next heartbeat. + */ + const GossipsubDlo = 4; + /** + * GossipsubDhi sets the upper bound on the number of peers we keep in a Gossipsub topic mesh. + * If we have more than GossipsubDhi peers, we will select some to prune from the mesh at the next heartbeat. + */ + const GossipsubDhi = 12; + /** + * GossipsubDscore affects how peers are selected when pruning a mesh due to over subscription. + * At least GossipsubDscore of the retained peers will be high-scoring, while the remainder are + * chosen randomly. + */ + const GossipsubDscore = 4; + /** + * GossipsubDout sets the quota for the number of outbound connections to maintain in a topic mesh. + * When the mesh is pruned due to over subscription, we make sure that we have outbound connections + * to at least GossipsubDout of the survivor peers. This prevents sybil attackers from overwhelming + * our mesh with incoming connections. + * + * GossipsubDout must be set below GossipsubDlo, and must not exceed GossipsubD / 2. + */ + const GossipsubDout = 2; + // Gossip parameters + /** + * GossipsubHistoryLength controls the size of the message cache used for gossip. + * The message cache will remember messages for GossipsubHistoryLength heartbeats. + */ + const GossipsubHistoryLength = 5; + /** + * GossipsubHistoryGossip controls how many cached message ids we will advertise in + * IHAVE gossip messages. When asked for our seen message IDs, we will return + * only those from the most recent GossipsubHistoryGossip heartbeats. The slack between + * GossipsubHistoryGossip and GossipsubHistoryLength allows us to avoid advertising messages + * that will be expired by the time they're requested. + * + * GossipsubHistoryGossip must be less than or equal to GossipsubHistoryLength to + * avoid a runtime panic. + */ + const GossipsubHistoryGossip = 3; + /** + * GossipsubDlazy affects how many peers we will emit gossip to at each heartbeat. + * We will send gossip to at least GossipsubDlazy peers outside our mesh. The actual + * number may be more, depending on GossipsubGossipFactor and how many peers we're + * connected to. + */ + const GossipsubDlazy = 6; + /** + * GossipsubGossipFactor affects how many peers we will emit gossip to at each heartbeat. + * We will send gossip to GossipsubGossipFactor * (total number of non-mesh peers), or + * GossipsubDlazy, whichever is greater. + */ + const GossipsubGossipFactor = 0.25; + /** + * GossipsubGossipRetransmission controls how many times we will allow a peer to request + * the same message id through IWANT gossip before we start ignoring them. This is designed + * to prevent peers from spamming us with requests and wasting our resources. + */ + const GossipsubGossipRetransmission = 3; + // Heartbeat interval + /** + * GossipsubHeartbeatInitialDelay is the short delay before the heartbeat timer begins + * after the router is initialized. + */ + const GossipsubHeartbeatInitialDelay = 100; + /** + * GossipsubHeartbeatInterval controls the time between heartbeats. + */ + const GossipsubHeartbeatInterval = second$1; + /** + * GossipsubFanoutTTL controls how long we keep track of the fanout state. If it's been + * GossipsubFanoutTTL since we've published to a topic that we're not subscribed to, + * we'll delete the fanout map for that topic. + */ + const GossipsubFanoutTTL = minute$1; + /** + * GossipsubPrunePeers controls the number of peers to include in prune Peer eXchange. + * When we prune a peer that's eligible for PX (has a good score, etc), we will try to + * send them signed peer records for up to GossipsubPrunePeers other peers that we + * know of. + */ + const GossipsubPrunePeers = 16; + /** + * GossipsubPruneBackoff controls the backoff time for pruned peers. This is how long + * a peer must wait before attempting to graft into our mesh again after being pruned. + * When pruning a peer, we send them our value of GossipsubPruneBackoff so they know + * the minimum time to wait. Peers running older versions may not send a backoff time, + * so if we receive a prune message without one, we will wait at least GossipsubPruneBackoff + * before attempting to re-graft. + */ + const GossipsubPruneBackoff = minute$1; + /** + * Backoff to use when unsuscribing from a topic. Should not resubscribe to this topic before it expired. + */ + const GossipsubUnsubscribeBackoff = 10 * second$1; + /** + * GossipsubPruneBackoffTicks is the number of heartbeat ticks for attempting to prune expired + * backoff timers. + */ + const GossipsubPruneBackoffTicks = 15; + /** + * GossipsubDirectConnectTicks is the number of heartbeat ticks for attempting to reconnect direct peers + * that are not currently connected. + */ + const GossipsubDirectConnectTicks = 300; + /** + * GossipsubDirectConnectInitialDelay is the initial delay before opening connections to direct peers + */ + const GossipsubDirectConnectInitialDelay = second$1; + /** + * GossipsubOpportunisticGraftTicks is the number of heartbeat ticks for attempting to improve the mesh + * with opportunistic grafting. Every GossipsubOpportunisticGraftTicks we will attempt to select some + * high-scoring mesh peers to replace lower-scoring ones, if the median score of our mesh peers falls + * below a threshold + */ + const GossipsubOpportunisticGraftTicks = 60; + /** + * GossipsubOpportunisticGraftPeers is the number of peers to opportunistically graft. + */ + const GossipsubOpportunisticGraftPeers = 2; + /** + * If a GRAFT comes before GossipsubGraftFloodThreshold has elapsed since the last PRUNE, + * then there is an extra score penalty applied to the peer through P7. + */ + const GossipsubGraftFloodThreshold = 10 * second$1; + /** + * GossipsubMaxIHaveLength is the maximum number of messages to include in an IHAVE message. + * Also controls the maximum number of IHAVE ids we will accept and request with IWANT from a + * peer within a heartbeat, to protect from IHAVE floods. You should adjust this value from the + * default if your system is pushing more than 5000 messages in GossipsubHistoryGossip heartbeats; + * with the defaults this is 1666 messages/s. + */ + const GossipsubMaxIHaveLength = 5000; + /** + * GossipsubMaxIHaveMessages is the maximum number of IHAVE messages to accept from a peer within a heartbeat. + */ + const GossipsubMaxIHaveMessages = 10; + /** + * Time to wait for a message requested through IWANT following an IHAVE advertisement. + * If the message is not received within this window, a broken promise is declared and + * the router may apply bahavioural penalties. + */ + const GossipsubIWantFollowupTime = 3 * second$1; + /** + * Time in milliseconds to keep message ids in the seen cache + */ + const GossipsubSeenTTL = 2 * minute$1; + const TimeCacheDuration = 120 * 1000; + const ERR_TOPIC_VALIDATOR_REJECT = 'ERR_TOPIC_VALIDATOR_REJECT'; + const ERR_TOPIC_VALIDATOR_IGNORE = 'ERR_TOPIC_VALIDATOR_IGNORE'; + /** + * If peer score is better than this, we accept messages from this peer + * within ACCEPT_FROM_WHITELIST_DURATION_MS from the last time computing score. + **/ + const ACCEPT_FROM_WHITELIST_THRESHOLD_SCORE = 0; + /** + * If peer score >= ACCEPT_FROM_WHITELIST_THRESHOLD_SCORE, accept up to this + * number of messages from that peer. + */ + const ACCEPT_FROM_WHITELIST_MAX_MESSAGES = 128; + /** + * If peer score >= ACCEPT_FROM_WHITELIST_THRESHOLD_SCORE, accept messages from + * this peer up to this time duration. + */ + const ACCEPT_FROM_WHITELIST_DURATION_MS = 1000; + /** + * The default MeshMessageDeliveriesWindow to be used in metrics. + */ + const DEFAULT_METRIC_MESH_MESSAGE_DELIVERIES_WINDOWS = 1000; + /** Wait for 1 more heartbeats before clearing a backoff */ + const BACKOFF_SLACK = 1; + + const defaultDecodeRpcLimits = { + maxSubscriptions: Infinity, + maxMessages: Infinity, + maxIhaveMessageIDs: Infinity, + maxIwantMessageIDs: Infinity, + maxControlMessages: Infinity, + maxPeerInfos: Infinity + }; + + /* eslint-disable import/export */ + /* eslint-disable complexity */ + /* eslint-disable @typescript-eslint/no-namespace */ + /* eslint-disable @typescript-eslint/no-unnecessary-boolean-literal-compare */ + /* eslint-disable @typescript-eslint/no-empty-interface */ + var RPC$1; + (function (RPC) { + (function (SubOpts) { + let _codec; + SubOpts.codec = () => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork(); + } + if (obj.subscribe != null) { + w.uint32(8); + w.bool(obj.subscribe); + } + if (obj.topic != null) { + w.uint32(18); + w.string(obj.topic); + } + if (opts.lengthDelimited !== false) { + w.ldelim(); + } + }, (reader, length, opts = {}) => { + const obj = {}; + const end = length == null ? reader.len : reader.pos + length; + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + obj.subscribe = reader.bool(); + break; + } + case 2: { + obj.topic = reader.string(); + break; + } + default: { + reader.skipType(tag & 7); + break; + } + } + } + return obj; + }); + } + return _codec; + }; + SubOpts.encode = (obj) => { + return encodeMessage(obj, SubOpts.codec()); + }; + SubOpts.decode = (buf, opts) => { + return decodeMessage(buf, SubOpts.codec(), opts); + }; + })(RPC.SubOpts || (RPC.SubOpts = {})); + (function (Message) { + let _codec; + Message.codec = () => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork(); + } + if (obj.from != null) { + w.uint32(10); + w.bytes(obj.from); + } + if (obj.data != null) { + w.uint32(18); + w.bytes(obj.data); + } + if (obj.seqno != null) { + w.uint32(26); + w.bytes(obj.seqno); + } + if ((obj.topic != null && obj.topic !== '')) { + w.uint32(34); + w.string(obj.topic); + } + if (obj.signature != null) { + w.uint32(42); + w.bytes(obj.signature); + } + if (obj.key != null) { + w.uint32(50); + w.bytes(obj.key); + } + if (opts.lengthDelimited !== false) { + w.ldelim(); + } + }, (reader, length, opts = {}) => { + const obj = { + topic: '' + }; + const end = length == null ? reader.len : reader.pos + length; + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + obj.from = reader.bytes(); + break; + } + case 2: { + obj.data = reader.bytes(); + break; + } + case 3: { + obj.seqno = reader.bytes(); + break; + } + case 4: { + obj.topic = reader.string(); + break; + } + case 5: { + obj.signature = reader.bytes(); + break; + } + case 6: { + obj.key = reader.bytes(); + break; + } + default: { + reader.skipType(tag & 7); + break; + } + } + } + return obj; + }); + } + return _codec; + }; + Message.encode = (obj) => { + return encodeMessage(obj, Message.codec()); + }; + Message.decode = (buf, opts) => { + return decodeMessage(buf, Message.codec(), opts); + }; + })(RPC.Message || (RPC.Message = {})); + (function (ControlMessage) { + let _codec; + ControlMessage.codec = () => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork(); + } + if (obj.ihave != null) { + for (const value of obj.ihave) { + w.uint32(10); + RPC.ControlIHave.codec().encode(value, w); + } + } + if (obj.iwant != null) { + for (const value of obj.iwant) { + w.uint32(18); + RPC.ControlIWant.codec().encode(value, w); + } + } + if (obj.graft != null) { + for (const value of obj.graft) { + w.uint32(26); + RPC.ControlGraft.codec().encode(value, w); + } + } + if (obj.prune != null) { + for (const value of obj.prune) { + w.uint32(34); + RPC.ControlPrune.codec().encode(value, w); + } + } + if (opts.lengthDelimited !== false) { + w.ldelim(); + } + }, (reader, length, opts = {}) => { + const obj = { + ihave: [], + iwant: [], + graft: [], + prune: [] + }; + const end = length == null ? reader.len : reader.pos + length; + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (opts.limits?.ihave != null && obj.ihave.length === opts.limits.ihave) { + throw new CodeError$3('decode error - map field "ihave" had too many elements', 'ERR_MAX_LENGTH'); + } + obj.ihave.push(RPC.ControlIHave.codec().decode(reader, reader.uint32())); + break; + } + case 2: { + if (opts.limits?.iwant != null && obj.iwant.length === opts.limits.iwant) { + throw new CodeError$3('decode error - map field "iwant" had too many elements', 'ERR_MAX_LENGTH'); + } + obj.iwant.push(RPC.ControlIWant.codec().decode(reader, reader.uint32())); + break; + } + case 3: { + if (opts.limits?.graft != null && obj.graft.length === opts.limits.graft) { + throw new CodeError$3('decode error - map field "graft" had too many elements', 'ERR_MAX_LENGTH'); + } + obj.graft.push(RPC.ControlGraft.codec().decode(reader, reader.uint32())); + break; + } + case 4: { + if (opts.limits?.prune != null && obj.prune.length === opts.limits.prune) { + throw new CodeError$3('decode error - map field "prune" had too many elements', 'ERR_MAX_LENGTH'); + } + obj.prune.push(RPC.ControlPrune.codec().decode(reader, reader.uint32())); + break; + } + default: { + reader.skipType(tag & 7); + break; + } + } + } + return obj; + }); + } + return _codec; + }; + ControlMessage.encode = (obj) => { + return encodeMessage(obj, ControlMessage.codec()); + }; + ControlMessage.decode = (buf, opts) => { + return decodeMessage(buf, ControlMessage.codec(), opts); + }; + })(RPC.ControlMessage || (RPC.ControlMessage = {})); + (function (ControlIHave) { + let _codec; + ControlIHave.codec = () => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork(); + } + if (obj.topicID != null) { + w.uint32(10); + w.string(obj.topicID); + } + if (obj.messageIDs != null) { + for (const value of obj.messageIDs) { + w.uint32(18); + w.bytes(value); + } + } + if (opts.lengthDelimited !== false) { + w.ldelim(); + } + }, (reader, length, opts = {}) => { + const obj = { + messageIDs: [] + }; + const end = length == null ? reader.len : reader.pos + length; + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + obj.topicID = reader.string(); + break; + } + case 2: { + if (opts.limits?.messageIDs != null && obj.messageIDs.length === opts.limits.messageIDs) { + throw new CodeError$3('decode error - map field "messageIDs" had too many elements', 'ERR_MAX_LENGTH'); + } + obj.messageIDs.push(reader.bytes()); + break; + } + default: { + reader.skipType(tag & 7); + break; + } + } + } + return obj; + }); + } + return _codec; + }; + ControlIHave.encode = (obj) => { + return encodeMessage(obj, ControlIHave.codec()); + }; + ControlIHave.decode = (buf, opts) => { + return decodeMessage(buf, ControlIHave.codec(), opts); + }; + })(RPC.ControlIHave || (RPC.ControlIHave = {})); + (function (ControlIWant) { + let _codec; + ControlIWant.codec = () => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork(); + } + if (obj.messageIDs != null) { + for (const value of obj.messageIDs) { + w.uint32(10); + w.bytes(value); + } + } + if (opts.lengthDelimited !== false) { + w.ldelim(); + } + }, (reader, length, opts = {}) => { + const obj = { + messageIDs: [] + }; + const end = length == null ? reader.len : reader.pos + length; + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (opts.limits?.messageIDs != null && obj.messageIDs.length === opts.limits.messageIDs) { + throw new CodeError$3('decode error - map field "messageIDs" had too many elements', 'ERR_MAX_LENGTH'); + } + obj.messageIDs.push(reader.bytes()); + break; + } + default: { + reader.skipType(tag & 7); + break; + } + } + } + return obj; + }); + } + return _codec; + }; + ControlIWant.encode = (obj) => { + return encodeMessage(obj, ControlIWant.codec()); + }; + ControlIWant.decode = (buf, opts) => { + return decodeMessage(buf, ControlIWant.codec(), opts); + }; + })(RPC.ControlIWant || (RPC.ControlIWant = {})); + (function (ControlGraft) { + let _codec; + ControlGraft.codec = () => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork(); + } + if (obj.topicID != null) { + w.uint32(10); + w.string(obj.topicID); + } + if (opts.lengthDelimited !== false) { + w.ldelim(); + } + }, (reader, length, opts = {}) => { + const obj = {}; + const end = length == null ? reader.len : reader.pos + length; + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + obj.topicID = reader.string(); + break; + } + default: { + reader.skipType(tag & 7); + break; + } + } + } + return obj; + }); + } + return _codec; + }; + ControlGraft.encode = (obj) => { + return encodeMessage(obj, ControlGraft.codec()); + }; + ControlGraft.decode = (buf, opts) => { + return decodeMessage(buf, ControlGraft.codec(), opts); + }; + })(RPC.ControlGraft || (RPC.ControlGraft = {})); + (function (ControlPrune) { + let _codec; + ControlPrune.codec = () => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork(); + } + if (obj.topicID != null) { + w.uint32(10); + w.string(obj.topicID); + } + if (obj.peers != null) { + for (const value of obj.peers) { + w.uint32(18); + RPC.PeerInfo.codec().encode(value, w); + } + } + if (obj.backoff != null) { + w.uint32(24); + w.uint64Number(obj.backoff); + } + if (opts.lengthDelimited !== false) { + w.ldelim(); + } + }, (reader, length, opts = {}) => { + const obj = { + peers: [] + }; + const end = length == null ? reader.len : reader.pos + length; + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + obj.topicID = reader.string(); + break; + } + case 2: { + if (opts.limits?.peers != null && obj.peers.length === opts.limits.peers) { + throw new CodeError$3('decode error - map field "peers" had too many elements', 'ERR_MAX_LENGTH'); + } + obj.peers.push(RPC.PeerInfo.codec().decode(reader, reader.uint32())); + break; + } + case 3: { + obj.backoff = reader.uint64Number(); + break; + } + default: { + reader.skipType(tag & 7); + break; + } + } + } + return obj; + }); + } + return _codec; + }; + ControlPrune.encode = (obj) => { + return encodeMessage(obj, ControlPrune.codec()); + }; + ControlPrune.decode = (buf, opts) => { + return decodeMessage(buf, ControlPrune.codec(), opts); + }; + })(RPC.ControlPrune || (RPC.ControlPrune = {})); + (function (PeerInfo) { + let _codec; + PeerInfo.codec = () => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork(); + } + if (obj.peerID != null) { + w.uint32(10); + w.bytes(obj.peerID); + } + if (obj.signedPeerRecord != null) { + w.uint32(18); + w.bytes(obj.signedPeerRecord); + } + if (opts.lengthDelimited !== false) { + w.ldelim(); + } + }, (reader, length, opts = {}) => { + const obj = {}; + const end = length == null ? reader.len : reader.pos + length; + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + obj.peerID = reader.bytes(); + break; + } + case 2: { + obj.signedPeerRecord = reader.bytes(); + break; + } + default: { + reader.skipType(tag & 7); + break; + } + } + } + return obj; + }); + } + return _codec; + }; + PeerInfo.encode = (obj) => { + return encodeMessage(obj, PeerInfo.codec()); + }; + PeerInfo.decode = (buf, opts) => { + return decodeMessage(buf, PeerInfo.codec(), opts); + }; + })(RPC.PeerInfo || (RPC.PeerInfo = {})); + let _codec; + RPC.codec = () => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork(); + } + if (obj.subscriptions != null) { + for (const value of obj.subscriptions) { + w.uint32(10); + RPC.SubOpts.codec().encode(value, w); + } + } + if (obj.messages != null) { + for (const value of obj.messages) { + w.uint32(18); + RPC.Message.codec().encode(value, w); + } + } + if (obj.control != null) { + w.uint32(26); + RPC.ControlMessage.codec().encode(obj.control, w); + } + if (opts.lengthDelimited !== false) { + w.ldelim(); + } + }, (reader, length, opts = {}) => { + const obj = { + subscriptions: [], + messages: [] + }; + const end = length == null ? reader.len : reader.pos + length; + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (opts.limits?.subscriptions != null && obj.subscriptions.length === opts.limits.subscriptions) { + throw new CodeError$3('decode error - map field "subscriptions" had too many elements', 'ERR_MAX_LENGTH'); + } + obj.subscriptions.push(RPC.SubOpts.codec().decode(reader, reader.uint32())); + break; + } + case 2: { + if (opts.limits?.messages != null && obj.messages.length === opts.limits.messages) { + throw new CodeError$3('decode error - map field "messages" had too many elements', 'ERR_MAX_LENGTH'); + } + obj.messages.push(RPC.Message.codec().decode(reader, reader.uint32())); + break; + } + case 3: { + obj.control = RPC.ControlMessage.codec().decode(reader, reader.uint32()); + break; + } + default: { + reader.skipType(tag & 7); + break; + } + } + } + return obj; + }); + } + return _codec; + }; + RPC.encode = (obj) => { + return encodeMessage(obj, RPC.codec()); + }; + RPC.decode = (buf, opts) => { + return decodeMessage(buf, RPC.codec(), opts); + }; + })(RPC$1 || (RPC$1 = {})); + + class MessageCache { + gossip; + msgs = new Map(); + msgIdToStrFn; + history = []; + /** Track with accounting of messages in the mcache that are not yet validated */ + notValidatedCount = 0; + /** + * Holds history of messages in timebounded history arrays + */ + constructor( + /** + * The number of indices in the cache history used for gossiping. That means that a message + * won't get gossiped anymore when shift got called `gossip` many times after inserting the + * message in the cache. + */ + gossip, historyCapacity, msgIdToStrFn) { + this.gossip = gossip; + this.msgIdToStrFn = msgIdToStrFn; + for (let i = 0; i < historyCapacity; i++) { + this.history[i] = []; + } + } + get size() { + return this.msgs.size; + } + /** + * Adds a message to the current window and the cache + * Returns true if the message is not known and is inserted in the cache + */ + put(messageId, msg, validated = false) { + const { msgIdStr } = messageId; + // Don't add duplicate entries to the cache. + if (this.msgs.has(msgIdStr)) { + return false; + } + this.msgs.set(msgIdStr, { + message: msg, + validated, + originatingPeers: new Set(), + iwantCounts: new Map() + }); + this.history[0].push({ ...messageId, topic: msg.topic }); + if (!validated) { + this.notValidatedCount++; + } + return true; + } + observeDuplicate(msgId, fromPeerIdStr) { + const entry = this.msgs.get(msgId); + if ((entry != null) && + // if the message is already validated, we don't need to store extra peers sending us + // duplicates as the message has already been forwarded + !entry.validated) { + entry.originatingPeers.add(fromPeerIdStr); + } + } + /** + * Retrieves a message from the cache by its ID, if it is still present + */ + get(msgId) { + return this.msgs.get(this.msgIdToStrFn(msgId))?.message; + } + /** + * Increases the iwant count for the given message by one and returns the message together + * with the iwant if the message exists. + */ + getWithIWantCount(msgIdStr, p) { + const msg = this.msgs.get(msgIdStr); + if (msg == null) { + return null; + } + const count = (msg.iwantCounts.get(p) ?? 0) + 1; + msg.iwantCounts.set(p, count); + return { msg: msg.message, count }; + } + /** + * Retrieves a list of message IDs for a set of topics + */ + getGossipIDs(topics) { + const msgIdsByTopic = new Map(); + for (let i = 0; i < this.gossip; i++) { + this.history[i].forEach((entry) => { + const msg = this.msgs.get(entry.msgIdStr); + if ((msg?.validated ?? false) && topics.has(entry.topic)) { + let msgIds = msgIdsByTopic.get(entry.topic); + if (msgIds == null) { + msgIds = []; + msgIdsByTopic.set(entry.topic, msgIds); + } + msgIds.push(entry.msgId); + } + }); + } + return msgIdsByTopic; + } + /** + * Gets a message with msgId and tags it as validated. + * This function also returns the known peers that have sent us this message. This is used to + * prevent us sending redundant messages to peers who have already propagated it. + */ + validate(msgId) { + const entry = this.msgs.get(msgId); + if (entry == null) { + return null; + } + if (!entry.validated) { + this.notValidatedCount--; + } + const { message, originatingPeers } = entry; + entry.validated = true; + // Clear the known peers list (after a message is validated, it is forwarded and we no + // longer need to store the originating peers). + entry.originatingPeers = new Set(); + return { message, originatingPeers }; + } + /** + * Shifts the current window, discarding messages older than this.history.length of the cache + */ + shift() { + const lastCacheEntries = this.history[this.history.length - 1]; + lastCacheEntries.forEach((cacheEntry) => { + const entry = this.msgs.get(cacheEntry.msgIdStr); + if (entry != null) { + this.msgs.delete(cacheEntry.msgIdStr); + if (!entry.validated) { + this.notValidatedCount--; + } + } + }); + this.history.pop(); + this.history.unshift([]); + } + remove(msgId) { + const entry = this.msgs.get(msgId); + if (entry == null) { + return null; + } + // Keep the message on the history vector, it will be dropped on a shift() + this.msgs.delete(msgId); + return entry; + } + } + + var SignaturePolicy; + (function (SignaturePolicy) { + /** + * On the producing side: + * - Build messages with the signature, key (from may be enough for certain inlineable public key types), from and seqno fields. + * + * On the consuming side: + * - Enforce the fields to be present, reject otherwise. + * - Propagate only if the fields are valid and signature can be verified, reject otherwise. + */ + SignaturePolicy["StrictSign"] = "StrictSign"; + /** + * On the producing side: + * - Build messages without the signature, key, from and seqno fields. + * - The corresponding protobuf key-value pairs are absent from the marshalled message, not just empty. + * + * On the consuming side: + * - Enforce the fields to be absent, reject otherwise. + * - Propagate only if the fields are absent, reject otherwise. + * - A message_id function will not be able to use the above fields, and should instead rely on the data field. A commonplace strategy is to calculate a hash. + */ + SignaturePolicy["StrictNoSign"] = "StrictNoSign"; + })(SignaturePolicy || (SignaturePolicy = {})); + var PublishConfigType; + (function (PublishConfigType) { + PublishConfigType[PublishConfigType["Signing"] = 0] = "Signing"; + PublishConfigType[PublishConfigType["Anonymous"] = 1] = "Anonymous"; + })(PublishConfigType || (PublishConfigType = {})); + var RejectReason; + (function (RejectReason) { + /** + * The message failed the configured validation during decoding. + * SelfOrigin is considered a ValidationError + */ + RejectReason["Error"] = "error"; + /** + * Custom validator fn reported status IGNORE. + */ + RejectReason["Ignore"] = "ignore"; + /** + * Custom validator fn reported status REJECT. + */ + RejectReason["Reject"] = "reject"; + /** + * The peer that sent the message OR the source from field is blacklisted. + * Causes messages to be ignored, not penalized, neither do score record creation. + */ + RejectReason["Blacklisted"] = "blacklisted"; + })(RejectReason || (RejectReason = {})); + var ValidateError; + (function (ValidateError) { + /// The message has an invalid signature, + ValidateError["InvalidSignature"] = "invalid_signature"; + /// The sequence number was the incorrect size + ValidateError["InvalidSeqno"] = "invalid_seqno"; + /// The PeerId was invalid + ValidateError["InvalidPeerId"] = "invalid_peerid"; + /// Signature existed when validation has been sent to + /// [`crate::behaviour::MessageAuthenticity::Anonymous`]. + ValidateError["SignaturePresent"] = "signature_present"; + /// Sequence number existed when validation has been sent to + /// [`crate::behaviour::MessageAuthenticity::Anonymous`]. + ValidateError["SeqnoPresent"] = "seqno_present"; + /// Message source existed when validation has been sent to + /// [`crate::behaviour::MessageAuthenticity::Anonymous`]. + ValidateError["FromPresent"] = "from_present"; + /// The data transformation failed. + ValidateError["TransformFailed"] = "transform_failed"; + })(ValidateError || (ValidateError = {})); + var MessageStatus; + (function (MessageStatus) { + MessageStatus["duplicate"] = "duplicate"; + MessageStatus["invalid"] = "invalid"; + MessageStatus["valid"] = "valid"; + })(MessageStatus || (MessageStatus = {})); + /** + * Typesafe conversion of MessageAcceptance -> RejectReason. TS ensures all values covered + */ + function rejectReasonFromAcceptance(acceptance) { + switch (acceptance) { + case TopicValidatorResult.Ignore: + return RejectReason.Ignore; + case TopicValidatorResult.Reject: + return RejectReason.Reject; + default: + throw new Error('Unreachable'); + } + } + + var MessageSource; + (function (MessageSource) { + MessageSource["forward"] = "forward"; + MessageSource["publish"] = "publish"; + })(MessageSource || (MessageSource = {})); + var InclusionReason; + (function (InclusionReason) { + /** Peer was a fanaout peer. */ + InclusionReason["Fanout"] = "fanout"; + /** Included from random selection. */ + InclusionReason["Random"] = "random"; + /** Peer subscribed. */ + InclusionReason["Subscribed"] = "subscribed"; + /** On heartbeat, peer was included to fill the outbound quota. */ + InclusionReason["Outbound"] = "outbound"; + /** On heartbeat, not enough peers in mesh */ + InclusionReason["NotEnough"] = "not_enough"; + /** On heartbeat opportunistic grafting due to low mesh score */ + InclusionReason["Opportunistic"] = "opportunistic"; + })(InclusionReason || (InclusionReason = {})); + /// Reasons why a peer was removed from the mesh. + var ChurnReason; + (function (ChurnReason) { + /// Peer disconnected. + ChurnReason["Dc"] = "disconnected"; + /// Peer had a bad score. + ChurnReason["BadScore"] = "bad_score"; + /// Peer sent a PRUNE. + ChurnReason["Prune"] = "prune"; + /// Too many peers. + ChurnReason["Excess"] = "excess"; + })(ChurnReason || (ChurnReason = {})); + /// Kinds of reasons a peer's score has been penalized + var ScorePenalty; + (function (ScorePenalty) { + /// A peer grafted before waiting the back-off time. + ScorePenalty["GraftBackoff"] = "graft_backoff"; + /// A Peer did not respond to an IWANT request in time. + ScorePenalty["BrokenPromise"] = "broken_promise"; + /// A Peer did not send enough messages as expected. + ScorePenalty["MessageDeficit"] = "message_deficit"; + /// Too many peers under one IP address. + ScorePenalty["IPColocation"] = "IP_colocation"; + })(ScorePenalty || (ScorePenalty = {})); + var IHaveIgnoreReason; + (function (IHaveIgnoreReason) { + IHaveIgnoreReason["LowScore"] = "low_score"; + IHaveIgnoreReason["MaxIhave"] = "max_ihave"; + IHaveIgnoreReason["MaxIasked"] = "max_iasked"; + })(IHaveIgnoreReason || (IHaveIgnoreReason = {})); + var ScoreThreshold; + (function (ScoreThreshold) { + ScoreThreshold["graylist"] = "graylist"; + ScoreThreshold["publish"] = "publish"; + ScoreThreshold["gossip"] = "gossip"; + ScoreThreshold["mesh"] = "mesh"; + })(ScoreThreshold || (ScoreThreshold = {})); + /** + * A collection of metrics used throughout the Gossipsub behaviour. + * NOTE: except for special reasons, do not add more than 1 label for frequent metrics, + * there's a performance penalty as of June 2023. + */ + // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types, @typescript-eslint/explicit-function-return-type + function getMetrics(register, topicStrToLabel, opts) { + // Using function style instead of class to prevent having to re-declare all MetricsPrometheus types. + return { + /* Metrics for static config */ + protocolsEnabled: register.gauge({ + name: 'gossipsub_protocol', + help: 'Status of enabled protocols', + labelNames: ['protocol'] + }), + /* Metrics per known topic */ + /** + * Status of our subscription to this topic. This metric allows analyzing other topic metrics + * filtered by our current subscription status. + * = rust-libp2p `topic_subscription_status` */ + topicSubscriptionStatus: register.gauge({ + name: 'gossipsub_topic_subscription_status', + help: 'Status of our subscription to this topic', + labelNames: ['topicStr'] + }), + /** Number of peers subscribed to each topic. This allows us to analyze a topic's behaviour + * regardless of our subscription status. */ + topicPeersCount: register.gauge({ + name: 'gossipsub_topic_peer_count', + help: 'Number of peers subscribed to each topic', + labelNames: ['topicStr'] + }), + /* Metrics regarding mesh state */ + /** + * Number of peers in our mesh. This metric should be updated with the count of peers for a + * topic in the mesh regardless of inclusion and churn events. + * = rust-libp2p `mesh_peer_counts` */ + meshPeerCounts: register.gauge({ + name: 'gossipsub_mesh_peer_count', + help: 'Number of peers in our mesh', + labelNames: ['topicStr'] + }), + /** + * Number of times we include peers in a topic mesh for different reasons. + * = rust-libp2p `mesh_peer_inclusion_events` */ + meshPeerInclusionEventsFanout: register.gauge({ + name: 'gossipsub_mesh_peer_inclusion_events_fanout_total', + help: 'Number of times we include peers in a topic mesh for fanout reasons', + labelNames: ['topic'] + }), + meshPeerInclusionEventsRandom: register.gauge({ + name: 'gossipsub_mesh_peer_inclusion_events_random_total', + help: 'Number of times we include peers in a topic mesh for random reasons', + labelNames: ['topic'] + }), + meshPeerInclusionEventsSubscribed: register.gauge({ + name: 'gossipsub_mesh_peer_inclusion_events_subscribed_total', + help: 'Number of times we include peers in a topic mesh for subscribed reasons', + labelNames: ['topic'] + }), + meshPeerInclusionEventsOutbound: register.gauge({ + name: 'gossipsub_mesh_peer_inclusion_events_outbound_total', + help: 'Number of times we include peers in a topic mesh for outbound reasons', + labelNames: ['topic'] + }), + meshPeerInclusionEventsNotEnough: register.gauge({ + name: 'gossipsub_mesh_peer_inclusion_events_not_enough_total', + help: 'Number of times we include peers in a topic mesh for not_enough reasons', + labelNames: ['topic'] + }), + meshPeerInclusionEventsOpportunistic: register.gauge({ + name: 'gossipsub_mesh_peer_inclusion_events_opportunistic_total', + help: 'Number of times we include peers in a topic mesh for opportunistic reasons', + labelNames: ['topic'] + }), + meshPeerInclusionEventsUnknown: register.gauge({ + name: 'gossipsub_mesh_peer_inclusion_events_unknown_total', + help: 'Number of times we include peers in a topic mesh for unknown reasons', + labelNames: ['topic'] + }), + /** + * Number of times we remove peers in a topic mesh for different reasons. + * = rust-libp2p `mesh_peer_churn_events` */ + meshPeerChurnEventsDisconnected: register.gauge({ + name: 'gossipsub_peer_churn_events_disconnected_total', + help: 'Number of times we remove peers in a topic mesh for disconnected reasons', + labelNames: ['topic'] + }), + meshPeerChurnEventsBadScore: register.gauge({ + name: 'gossipsub_peer_churn_events_bad_score_total', + help: 'Number of times we remove peers in a topic mesh for bad_score reasons', + labelNames: ['topic'] + }), + meshPeerChurnEventsPrune: register.gauge({ + name: 'gossipsub_peer_churn_events_prune_total', + help: 'Number of times we remove peers in a topic mesh for prune reasons', + labelNames: ['topic'] + }), + meshPeerChurnEventsExcess: register.gauge({ + name: 'gossipsub_peer_churn_events_excess_total', + help: 'Number of times we remove peers in a topic mesh for excess reasons', + labelNames: ['topic'] + }), + meshPeerChurnEventsUnknown: register.gauge({ + name: 'gossipsub_peer_churn_events_unknown_total', + help: 'Number of times we remove peers in a topic mesh for unknown reasons', + labelNames: ['topic'] + }), + /* General Metrics */ + /** + * Gossipsub supports floodsub, gossipsub v1.0 and gossipsub v1.1. Peers are classified based + * on which protocol they support. This metric keeps track of the number of peers that are + * connected of each type. */ + peersPerProtocol: register.gauge({ + name: 'gossipsub_peers_per_protocol_count', + help: 'Peers connected for each topic', + labelNames: ['protocol'] + }), + /** The time it takes to complete one iteration of the heartbeat. */ + heartbeatDuration: register.histogram({ + name: 'gossipsub_heartbeat_duration_seconds', + help: 'The time it takes to complete one iteration of the heartbeat', + // Should take <10ms, over 1s it's a huge issue that needs debugging, since a heartbeat will be cancelled + buckets: [0.01, 0.1, 1] + }), + /** Heartbeat run took longer than heartbeat interval so next is skipped */ + heartbeatSkipped: register.gauge({ + name: 'gossipsub_heartbeat_skipped', + help: 'Heartbeat run took longer than heartbeat interval so next is skipped' + }), + /** + * Message validation results for each topic. + * Invalid == Reject? + * = rust-libp2p `invalid_messages`, `accepted_messages`, `ignored_messages`, `rejected_messages` */ + acceptedMessagesTotal: register.gauge({ + name: 'gossipsub_accepted_messages_total', + help: 'Total accepted messages for each topic', + labelNames: ['topic'] + }), + ignoredMessagesTotal: register.gauge({ + name: 'gossipsub_ignored_messages_total', + help: 'Total ignored messages for each topic', + labelNames: ['topic'] + }), + rejectedMessagesTotal: register.gauge({ + name: 'gossipsub_rejected_messages_total', + help: 'Total rejected messages for each topic', + labelNames: ['topic'] + }), + unknownValidationResultsTotal: register.gauge({ + name: 'gossipsub_unknown_validation_results_total', + help: 'Total unknown validation results for each topic', + labelNames: ['topic'] + }), + /** + * When the user validates a message, it tries to re propagate it to its mesh peers. If the + * message expires from the memcache before it can be validated, we count this a cache miss + * and it is an indicator that the memcache size should be increased. + * = rust-libp2p `mcache_misses` */ + asyncValidationMcacheHit: register.gauge({ + name: 'gossipsub_async_validation_mcache_hit_total', + help: 'Async validation result reported by the user layer', + labelNames: ['hit'] + }), + asyncValidationDelayFromFirstSeenSec: register.histogram({ + name: 'gossipsub_async_validation_delay_from_first_seen', + help: 'Async validation report delay from first seen in second', + buckets: [0.01, 0.03, 0.1, 0.3, 1, 3, 10] + }), + asyncValidationUnknownFirstSeen: register.gauge({ + name: 'gossipsub_async_validation_unknown_first_seen_count_total', + help: 'Async validation report unknown first seen value for message' + }), + // peer stream + peerReadStreamError: register.gauge({ + name: 'gossipsub_peer_read_stream_err_count_total', + help: 'Peer read stream error' + }), + // RPC outgoing. Track byte length + data structure sizes + rpcRecvBytes: register.gauge({ name: 'gossipsub_rpc_recv_bytes_total', help: 'RPC recv' }), + rpcRecvCount: register.gauge({ name: 'gossipsub_rpc_recv_count_total', help: 'RPC recv' }), + rpcRecvSubscription: register.gauge({ name: 'gossipsub_rpc_recv_subscription_total', help: 'RPC recv' }), + rpcRecvMessage: register.gauge({ name: 'gossipsub_rpc_recv_message_total', help: 'RPC recv' }), + rpcRecvControl: register.gauge({ name: 'gossipsub_rpc_recv_control_total', help: 'RPC recv' }), + rpcRecvIHave: register.gauge({ name: 'gossipsub_rpc_recv_ihave_total', help: 'RPC recv' }), + rpcRecvIWant: register.gauge({ name: 'gossipsub_rpc_recv_iwant_total', help: 'RPC recv' }), + rpcRecvGraft: register.gauge({ name: 'gossipsub_rpc_recv_graft_total', help: 'RPC recv' }), + rpcRecvPrune: register.gauge({ name: 'gossipsub_rpc_recv_prune_total', help: 'RPC recv' }), + rpcDataError: register.gauge({ name: 'gossipsub_rpc_data_err_count_total', help: 'RPC data error' }), + rpcRecvError: register.gauge({ name: 'gossipsub_rpc_recv_err_count_total', help: 'RPC recv error' }), + /** Total count of RPC dropped because acceptFrom() == false */ + rpcRecvNotAccepted: register.gauge({ + name: 'gossipsub_rpc_rcv_not_accepted_total', + help: 'Total count of RPC dropped because acceptFrom() == false' + }), + // RPC incoming. Track byte length + data structure sizes + rpcSentBytes: register.gauge({ name: 'gossipsub_rpc_sent_bytes_total', help: 'RPC sent' }), + rpcSentCount: register.gauge({ name: 'gossipsub_rpc_sent_count_total', help: 'RPC sent' }), + rpcSentSubscription: register.gauge({ name: 'gossipsub_rpc_sent_subscription_total', help: 'RPC sent' }), + rpcSentMessage: register.gauge({ name: 'gossipsub_rpc_sent_message_total', help: 'RPC sent' }), + rpcSentControl: register.gauge({ name: 'gossipsub_rpc_sent_control_total', help: 'RPC sent' }), + rpcSentIHave: register.gauge({ name: 'gossipsub_rpc_sent_ihave_total', help: 'RPC sent' }), + rpcSentIWant: register.gauge({ name: 'gossipsub_rpc_sent_iwant_total', help: 'RPC sent' }), + rpcSentGraft: register.gauge({ name: 'gossipsub_rpc_sent_graft_total', help: 'RPC sent' }), + rpcSentPrune: register.gauge({ name: 'gossipsub_rpc_sent_prune_total', help: 'RPC sent' }), + // publish message. Track peers sent to and bytes + /** Total count of msg published by topic */ + msgPublishCount: register.gauge({ + name: 'gossipsub_msg_publish_count_total', + help: 'Total count of msg published by topic', + labelNames: ['topic'] + }), + /** Total count of peers that we publish a msg to */ + msgPublishPeersByTopic: register.gauge({ + name: 'gossipsub_msg_publish_peers_total', + help: 'Total count of peers that we publish a msg to', + labelNames: ['topic'] + }), + /** Total count of peers (by group) that we publish a msg to */ + directPeersPublishedTotal: register.gauge({ + name: 'gossipsub_direct_peers_published_total', + help: 'Total direct peers that we publish a msg to', + labelNames: ['topic'] + }), + floodsubPeersPublishedTotal: register.gauge({ + name: 'gossipsub_floodsub_peers_published_total', + help: 'Total floodsub peers that we publish a msg to', + labelNames: ['topic'] + }), + meshPeersPublishedTotal: register.gauge({ + name: 'gossipsub_mesh_peers_published_total', + help: 'Total mesh peers that we publish a msg to', + labelNames: ['topic'] + }), + fanoutPeersPublishedTotal: register.gauge({ + name: 'gossipsub_fanout_peers_published_total', + help: 'Total fanout peers that we publish a msg to', + labelNames: ['topic'] + }), + /** Total count of msg publish data.length bytes */ + msgPublishBytes: register.gauge({ + name: 'gossipsub_msg_publish_bytes_total', + help: 'Total count of msg publish data.length bytes', + labelNames: ['topic'] + }), + /** Total time in seconds to publish a message */ + msgPublishTime: register.histogram({ + name: 'gossipsub_msg_publish_seconds', + help: 'Total time in seconds to publish a message', + buckets: [0.001, 0.002, 0.005, 0.01, 0.1, 0.5, 1], + labelNames: ['topic'] + }), + /** Total count of msg forwarded by topic */ + msgForwardCount: register.gauge({ + name: 'gossipsub_msg_forward_count_total', + help: 'Total count of msg forwarded by topic', + labelNames: ['topic'] + }), + /** Total count of peers that we forward a msg to */ + msgForwardPeers: register.gauge({ + name: 'gossipsub_msg_forward_peers_total', + help: 'Total count of peers that we forward a msg to', + labelNames: ['topic'] + }), + /** Total count of recv msgs before any validation */ + msgReceivedPreValidation: register.gauge({ + name: 'gossipsub_msg_received_prevalidation_total', + help: 'Total count of recv msgs before any validation', + labelNames: ['topic'] + }), + /** Total count of recv msgs error */ + msgReceivedError: register.gauge({ + name: 'gossipsub_msg_received_error_total', + help: 'Total count of recv msgs error', + labelNames: ['topic'] + }), + /** Tracks distribution of recv msgs by duplicate, invalid, valid */ + prevalidationInvalidTotal: register.gauge({ + name: 'gossipsub_pre_validation_invalid_total', + help: 'Total count of invalid messages received', + labelNames: ['topic'] + }), + prevalidationValidTotal: register.gauge({ + name: 'gossipsub_pre_validation_valid_total', + help: 'Total count of valid messages received', + labelNames: ['topic'] + }), + prevalidationDuplicateTotal: register.gauge({ + name: 'gossipsub_pre_validation_duplicate_total', + help: 'Total count of duplicate messages received', + labelNames: ['topic'] + }), + prevalidationUnknownTotal: register.gauge({ + name: 'gossipsub_pre_validation_unknown_status_total', + help: 'Total count of unknown_status messages received', + labelNames: ['topic'] + }), + /** Tracks specific reason of invalid */ + msgReceivedInvalid: register.gauge({ + name: 'gossipsub_msg_received_invalid_total', + help: 'Tracks specific reason of invalid', + labelNames: ['error'] + }), + msgReceivedInvalidByTopic: register.gauge({ + name: 'gossipsub_msg_received_invalid_by_topic_total', + help: 'Tracks specific invalid message by topic', + labelNames: ['topic'] + }), + /** Track duplicate message delivery time */ + duplicateMsgDeliveryDelay: register.histogram({ + name: 'gossisub_duplicate_msg_delivery_delay_seconds', + help: 'Time since the 1st duplicated message validated', + labelNames: ['topic'], + buckets: [ + 0.25 * opts.maxMeshMessageDeliveriesWindowSec, + 0.5 * opts.maxMeshMessageDeliveriesWindowSec, + Number(opts.maxMeshMessageDeliveriesWindowSec), + 2 * opts.maxMeshMessageDeliveriesWindowSec, + 4 * opts.maxMeshMessageDeliveriesWindowSec + ] + }), + /** Total count of late msg delivery total by topic */ + duplicateMsgLateDelivery: register.gauge({ + name: 'gossisub_duplicate_msg_late_delivery_total', + help: 'Total count of late duplicate message delivery by topic, which triggers P3 penalty', + labelNames: ['topic'] + }), + duplicateMsgIgnored: register.gauge({ + name: 'gossisub_ignored_published_duplicate_msgs_total', + help: 'Total count of published duplicate message ignored by topic', + labelNames: ['topic'] + }), + /* Metrics related to scoring */ + /** Total times score() is called */ + scoreFnCalls: register.gauge({ + name: 'gossipsub_score_fn_calls_total', + help: 'Total times score() is called' + }), + /** Total times score() call actually computed computeScore(), no cache */ + scoreFnRuns: register.gauge({ + name: 'gossipsub_score_fn_runs_total', + help: 'Total times score() call actually computed computeScore(), no cache' + }), + scoreCachedDelta: register.histogram({ + name: 'gossipsub_score_cache_delta', + help: 'Delta of score between cached values that expired', + buckets: [10, 100, 1000] + }), + /** Current count of peers by score threshold */ + peersByScoreThreshold: register.gauge({ + name: 'gossipsub_peers_by_score_threshold_count', + help: 'Current count of peers by score threshold', + labelNames: ['threshold'] + }), + score: register.avgMinMax({ + name: 'gossipsub_score', + help: 'Avg min max of gossip scores' + }), + /** + * Separate score weights + * Need to use 2-label metrics in this case to debug the score weights + **/ + scoreWeights: register.avgMinMax({ + name: 'gossipsub_score_weights', + help: 'Separate score weights', + labelNames: ['topic', 'p'] + }), + /** Histogram of the scores for each mesh topic. */ + // TODO: Not implemented + scorePerMesh: register.avgMinMax({ + name: 'gossipsub_score_per_mesh', + help: 'Histogram of the scores for each mesh topic', + labelNames: ['topic'] + }), + /** A counter of the kind of penalties being applied to peers. */ + // TODO: Not fully implemented + scoringPenalties: register.gauge({ + name: 'gossipsub_scoring_penalties_total', + help: 'A counter of the kind of penalties being applied to peers', + labelNames: ['penalty'] + }), + behaviourPenalty: register.histogram({ + name: 'gossipsub_peer_stat_behaviour_penalty', + help: 'Current peer stat behaviour_penalty at each scrape', + buckets: [ + 0.25 * opts.behaviourPenaltyThreshold, + 0.5 * opts.behaviourPenaltyThreshold, + Number(opts.behaviourPenaltyThreshold), + 2 * opts.behaviourPenaltyThreshold, + 4 * opts.behaviourPenaltyThreshold + ] + }), + // TODO: + // - iasked per peer (on heartbeat) + // - when promise is resolved, track messages from promises + /** Total received IHAVE messages that we ignore for some reason */ + ihaveRcvIgnored: register.gauge({ + name: 'gossipsub_ihave_rcv_ignored_total', + help: 'Total received IHAVE messages that we ignore for some reason', + labelNames: ['reason'] + }), + /** Total received IHAVE messages by topic */ + ihaveRcvMsgids: register.gauge({ + name: 'gossipsub_ihave_rcv_msgids_total', + help: 'Total received IHAVE messages by topic', + labelNames: ['topic'] + }), + /** + * Total messages per topic we don't have. Not actual requests. + * The number of times we have decided that an IWANT control message is required for this + * topic. A very high metric might indicate an underperforming network. + * = rust-libp2p `topic_iwant_msgs` */ + ihaveRcvNotSeenMsgids: register.gauge({ + name: 'gossipsub_ihave_rcv_not_seen_msgids_total', + help: 'Total messages per topic we do not have, not actual requests', + labelNames: ['topic'] + }), + /** Total received IWANT messages by topic */ + iwantRcvMsgids: register.gauge({ + name: 'gossipsub_iwant_rcv_msgids_total', + help: 'Total received IWANT messages by topic', + labelNames: ['topic'] + }), + /** Total requested messageIDs that we don't have */ + iwantRcvDonthaveMsgids: register.gauge({ + name: 'gossipsub_iwant_rcv_dont_have_msgids_total', + help: 'Total requested messageIDs that we do not have' + }), + iwantPromiseStarted: register.gauge({ + name: 'gossipsub_iwant_promise_sent_total', + help: 'Total count of started IWANT promises' + }), + /** Total count of resolved IWANT promises */ + iwantPromiseResolved: register.gauge({ + name: 'gossipsub_iwant_promise_resolved_total', + help: 'Total count of resolved IWANT promises' + }), + /** Total count of resolved IWANT promises from duplicate messages */ + iwantPromiseResolvedFromDuplicate: register.gauge({ + name: 'gossipsub_iwant_promise_resolved_from_duplicate_total', + help: 'Total count of resolved IWANT promises from duplicate messages' + }), + /** Total count of peers we have asked IWANT promises that are resolved */ + iwantPromiseResolvedPeers: register.gauge({ + name: 'gossipsub_iwant_promise_resolved_peers', + help: 'Total count of peers we have asked IWANT promises that are resolved' + }), + iwantPromiseBroken: register.gauge({ + name: 'gossipsub_iwant_promise_broken', + help: 'Total count of broken IWANT promises' + }), + iwantMessagePruned: register.gauge({ + name: 'gossipsub_iwant_message_pruned', + help: 'Total count of pruned IWANT messages' + }), + /** Histogram of delivery time of resolved IWANT promises */ + iwantPromiseDeliveryTime: register.histogram({ + name: 'gossipsub_iwant_promise_delivery_seconds', + help: 'Histogram of delivery time of resolved IWANT promises', + buckets: [ + 0.5 * opts.gossipPromiseExpireSec, + Number(opts.gossipPromiseExpireSec), + 2 * opts.gossipPromiseExpireSec, + 4 * opts.gossipPromiseExpireSec + ] + }), + iwantPromiseUntracked: register.gauge({ + name: 'gossip_iwant_promise_untracked', + help: 'Total count of untracked IWANT promise' + }), + /** Backoff time */ + connectedPeersBackoffSec: register.histogram({ + name: 'gossipsub_connected_peers_backoff_seconds', + help: 'Backoff time in seconds', + // Using 1 seconds as minimum as that's close to the heartbeat duration, no need for more resolution. + // As per spec, backoff times are 10 seconds for UnsubscribeBackoff and 60 seconds for PruneBackoff. + // Higher values of 60 seconds should not occur, but we add 120 seconds just in case + // https://github.com/libp2p/specs/blob/master/pubsub/gossipsub/gossipsub-v1.1.md#overview-of-new-parameters + buckets: [1, 2, 4, 10, 20, 60, 120] + }), + /* Data structure sizes */ + /** Unbounded cache sizes */ + cacheSize: register.gauge({ + name: 'gossipsub_cache_size', + help: 'Unbounded cache sizes', + labelNames: ['cache'] + }), + /** Current mcache msg count */ + mcacheSize: register.gauge({ + name: 'gossipsub_mcache_size', + help: 'Current mcache msg count' + }), + mcacheNotValidatedCount: register.gauge({ + name: 'gossipsub_mcache_not_validated_count', + help: 'Current mcache msg count not validated' + }), + fastMsgIdCacheCollision: register.gauge({ + name: 'gossipsub_fastmsgid_cache_collision_total', + help: 'Total count of key collisions on fastmsgid cache put' + }), + newConnectionCount: register.gauge({ + name: 'gossipsub_new_connection_total', + help: 'Total new connection by status', + labelNames: ['status'] + }), + topicStrToLabel, + toTopic(topicStr) { + return this.topicStrToLabel.get(topicStr) ?? topicStr; + }, + /** We joined a topic */ + onJoin(topicStr) { + this.topicSubscriptionStatus.set({ topicStr }, 1); + this.meshPeerCounts.set({ topicStr }, 0); // Reset count + }, + /** We left a topic */ + onLeave(topicStr) { + this.topicSubscriptionStatus.set({ topicStr }, 0); + this.meshPeerCounts.set({ topicStr }, 0); // Reset count + }, + /** Register the inclusion of peers in our mesh due to some reason. */ + onAddToMesh(topicStr, reason, count) { + const topic = this.toTopic(topicStr); + switch (reason) { + case InclusionReason.Fanout: + this.meshPeerInclusionEventsFanout.inc({ topic }, count); + break; + case InclusionReason.Random: + this.meshPeerInclusionEventsRandom.inc({ topic }, count); + break; + case InclusionReason.Subscribed: + this.meshPeerInclusionEventsSubscribed.inc({ topic }, count); + break; + case InclusionReason.Outbound: + this.meshPeerInclusionEventsOutbound.inc({ topic }, count); + break; + case InclusionReason.NotEnough: + this.meshPeerInclusionEventsNotEnough.inc({ topic }, count); + break; + case InclusionReason.Opportunistic: + this.meshPeerInclusionEventsOpportunistic.inc({ topic }, count); + break; + default: + this.meshPeerInclusionEventsUnknown.inc({ topic }, count); + break; + } + }, + /** Register the removal of peers in our mesh due to some reason */ + // - remove_peer_from_mesh() + // - heartbeat() Churn::BadScore + // - heartbeat() Churn::Excess + // - on_disconnect() Churn::Ds + onRemoveFromMesh(topicStr, reason, count) { + const topic = this.toTopic(topicStr); + switch (reason) { + case ChurnReason.Dc: + this.meshPeerChurnEventsDisconnected.inc({ topic }, count); + break; + case ChurnReason.BadScore: + this.meshPeerChurnEventsBadScore.inc({ topic }, count); + break; + case ChurnReason.Prune: + this.meshPeerChurnEventsPrune.inc({ topic }, count); + break; + case ChurnReason.Excess: + this.meshPeerChurnEventsExcess.inc({ topic }, count); + break; + default: + this.meshPeerChurnEventsUnknown.inc({ topic }, count); + break; + } + }, + /** + * Update validation result to metrics + * + * @param messageRecord - null means the message's mcache record was not known at the time of acceptance report + */ + onReportValidation(messageRecord, acceptance, firstSeenTimestampMs) { + this.asyncValidationMcacheHit.inc({ hit: messageRecord != null ? 'hit' : 'miss' }); + if (messageRecord != null) { + const topic = this.toTopic(messageRecord.message.topic); + switch (acceptance) { + case TopicValidatorResult.Accept: + this.acceptedMessagesTotal.inc({ topic }); + break; + case TopicValidatorResult.Ignore: + this.ignoredMessagesTotal.inc({ topic }); + break; + case TopicValidatorResult.Reject: + this.rejectedMessagesTotal.inc({ topic }); + break; + default: + this.unknownValidationResultsTotal.inc({ topic }); + break; + } + } + if (firstSeenTimestampMs != null) { + this.asyncValidationDelayFromFirstSeenSec.observe((Date.now() - firstSeenTimestampMs) / 1000); + } + else { + this.asyncValidationUnknownFirstSeen.inc(); + } + }, + /** + * - in handle_graft() Penalty::GraftBackoff + * - in apply_iwant_penalties() Penalty::BrokenPromise + * - in metric_score() P3 Penalty::MessageDeficit + * - in metric_score() P6 Penalty::IPColocation + */ + onScorePenalty(penalty) { + // Can this be labeled by topic too? + this.scoringPenalties.inc({ penalty }, 1); + }, + onIhaveRcv(topicStr, ihave, idonthave) { + const topic = this.toTopic(topicStr); + this.ihaveRcvMsgids.inc({ topic }, ihave); + this.ihaveRcvNotSeenMsgids.inc({ topic }, idonthave); + }, + onIwantRcv(iwantByTopic, iwantDonthave) { + for (const [topicStr, iwant] of iwantByTopic) { + const topic = this.toTopic(topicStr); + this.iwantRcvMsgids.inc({ topic }, iwant); + } + this.iwantRcvDonthaveMsgids.inc(iwantDonthave); + }, + onForwardMsg(topicStr, tosendCount) { + const topic = this.toTopic(topicStr); + this.msgForwardCount.inc({ topic }, 1); + this.msgForwardPeers.inc({ topic }, tosendCount); + }, + onPublishMsg(topicStr, tosendGroupCount, tosendCount, dataLen, ms) { + const topic = this.toTopic(topicStr); + this.msgPublishCount.inc({ topic }, 1); + this.msgPublishBytes.inc({ topic }, tosendCount * dataLen); + this.msgPublishPeersByTopic.inc({ topic }, tosendCount); + this.directPeersPublishedTotal.inc({ topic }, tosendGroupCount.direct); + this.floodsubPeersPublishedTotal.inc({ topic }, tosendGroupCount.floodsub); + this.meshPeersPublishedTotal.inc({ topic }, tosendGroupCount.mesh); + this.fanoutPeersPublishedTotal.inc({ topic }, tosendGroupCount.fanout); + this.msgPublishTime.observe({ topic }, ms / 1000); + }, + onMsgRecvPreValidation(topicStr) { + const topic = this.toTopic(topicStr); + this.msgReceivedPreValidation.inc({ topic }, 1); + }, + onMsgRecvError(topicStr) { + const topic = this.toTopic(topicStr); + this.msgReceivedError.inc({ topic }, 1); + }, + onPrevalidationResult(topicStr, status) { + const topic = this.toTopic(topicStr); + switch (status) { + case MessageStatus.duplicate: + this.prevalidationDuplicateTotal.inc({ topic }); + break; + case MessageStatus.invalid: + this.prevalidationInvalidTotal.inc({ topic }); + break; + case MessageStatus.valid: + this.prevalidationValidTotal.inc({ topic }); + break; + default: + this.prevalidationUnknownTotal.inc({ topic }); + break; + } + }, + onMsgRecvInvalid(topicStr, reason) { + const topic = this.toTopic(topicStr); + const error = reason.reason === RejectReason.Error ? reason.error : reason.reason; + this.msgReceivedInvalid.inc({ error }, 1); + this.msgReceivedInvalidByTopic.inc({ topic }, 1); + }, + onDuplicateMsgDelivery(topicStr, deliveryDelayMs, isLateDelivery) { + const topic = this.toTopic(topicStr); + this.duplicateMsgDeliveryDelay.observe({ topic }, deliveryDelayMs / 1000); + if (isLateDelivery) { + this.duplicateMsgLateDelivery.inc({ topic }, 1); + } + }, + onPublishDuplicateMsg(topicStr) { + const topic = this.toTopic(topicStr); + this.duplicateMsgIgnored.inc({ topic }, 1); + }, + onPeerReadStreamError() { + this.peerReadStreamError.inc(1); + }, + onRpcRecvError() { + this.rpcRecvError.inc(1); + }, + onRpcDataError() { + this.rpcDataError.inc(1); + }, + onRpcRecv(rpc, rpcBytes) { + this.rpcRecvBytes.inc(rpcBytes); + this.rpcRecvCount.inc(1); + if (rpc.subscriptions != null) + this.rpcRecvSubscription.inc(rpc.subscriptions.length); + if (rpc.messages != null) + this.rpcRecvMessage.inc(rpc.messages.length); + if (rpc.control != null) { + this.rpcRecvControl.inc(1); + if (rpc.control.ihave != null) + this.rpcRecvIHave.inc(rpc.control.ihave.length); + if (rpc.control.iwant != null) + this.rpcRecvIWant.inc(rpc.control.iwant.length); + if (rpc.control.graft != null) + this.rpcRecvGraft.inc(rpc.control.graft.length); + if (rpc.control.prune != null) + this.rpcRecvPrune.inc(rpc.control.prune.length); + } + }, + onRpcSent(rpc, rpcBytes) { + this.rpcSentBytes.inc(rpcBytes); + this.rpcSentCount.inc(1); + if (rpc.subscriptions != null) + this.rpcSentSubscription.inc(rpc.subscriptions.length); + if (rpc.messages != null) + this.rpcSentMessage.inc(rpc.messages.length); + if (rpc.control != null) { + const ihave = rpc.control.ihave?.length ?? 0; + const iwant = rpc.control.iwant?.length ?? 0; + const graft = rpc.control.graft?.length ?? 0; + const prune = rpc.control.prune?.length ?? 0; + if (ihave > 0) + this.rpcSentIHave.inc(ihave); + if (iwant > 0) + this.rpcSentIWant.inc(iwant); + if (graft > 0) + this.rpcSentGraft.inc(graft); + if (prune > 0) + this.rpcSentPrune.inc(prune); + if (ihave > 0 || iwant > 0 || graft > 0 || prune > 0) + this.rpcSentControl.inc(1); + } + }, + registerScores(scores, scoreThresholds) { + let graylist = 0; + let publish = 0; + let gossip = 0; + let mesh = 0; + for (const score of scores) { + if (score >= scoreThresholds.graylistThreshold) + graylist++; + if (score >= scoreThresholds.publishThreshold) + publish++; + if (score >= scoreThresholds.gossipThreshold) + gossip++; + if (score >= 0) + mesh++; + } + this.peersByScoreThreshold.set({ threshold: ScoreThreshold.graylist }, graylist); + this.peersByScoreThreshold.set({ threshold: ScoreThreshold.publish }, publish); + this.peersByScoreThreshold.set({ threshold: ScoreThreshold.gossip }, gossip); + this.peersByScoreThreshold.set({ threshold: ScoreThreshold.mesh }, mesh); + // Register full score too + this.score.set(scores); + }, + registerScoreWeights(sw) { + for (const [topic, wsTopic] of sw.byTopic) { + this.scoreWeights.set({ topic, p: 'p1' }, wsTopic.p1w); + this.scoreWeights.set({ topic, p: 'p2' }, wsTopic.p2w); + this.scoreWeights.set({ topic, p: 'p3' }, wsTopic.p3w); + this.scoreWeights.set({ topic, p: 'p3b' }, wsTopic.p3bw); + this.scoreWeights.set({ topic, p: 'p4' }, wsTopic.p4w); + } + this.scoreWeights.set({ p: 'p5' }, sw.p5w); + this.scoreWeights.set({ p: 'p6' }, sw.p6w); + this.scoreWeights.set({ p: 'p7' }, sw.p7w); + }, + registerScorePerMesh(mesh, scoreByPeer) { + const peersPerTopicLabel = new Map(); + mesh.forEach((peers, topicStr) => { + // Aggregate by known topicLabel or throw to 'unknown'. This prevent too high cardinality + const topicLabel = this.topicStrToLabel.get(topicStr) ?? 'unknown'; + let peersInMesh = peersPerTopicLabel.get(topicLabel); + if (peersInMesh == null) { + peersInMesh = new Set(); + peersPerTopicLabel.set(topicLabel, peersInMesh); + } + peers.forEach((p) => peersInMesh?.add(p)); + }); + for (const [topic, peers] of peersPerTopicLabel) { + const meshScores = []; + peers.forEach((peer) => { + meshScores.push(scoreByPeer.get(peer) ?? 0); + }); + this.scorePerMesh.set({ topic }, meshScores); + } + } + }; + } + + const ERR_INVALID_PEER_SCORE_PARAMS = 'ERR_INVALID_PEER_SCORE_PARAMS'; + + const defaultPeerScoreParams = { + topics: {}, + topicScoreCap: 10.0, + appSpecificScore: () => 0.0, + appSpecificWeight: 10.0, + IPColocationFactorWeight: -5.0, + IPColocationFactorThreshold: 10.0, + IPColocationFactorWhitelist: new Set(), + behaviourPenaltyWeight: -10.0, + behaviourPenaltyThreshold: 0.0, + behaviourPenaltyDecay: 0.2, + decayInterval: 1000.0, + decayToZero: 0.1, + retainScore: 3600 * 1000 + }; + const defaultTopicScoreParams = { + topicWeight: 0.5, + timeInMeshWeight: 1, + timeInMeshQuantum: 1, + timeInMeshCap: 3600, + firstMessageDeliveriesWeight: 1, + firstMessageDeliveriesDecay: 0.5, + firstMessageDeliveriesCap: 2000, + meshMessageDeliveriesWeight: -1, + meshMessageDeliveriesDecay: 0.5, + meshMessageDeliveriesCap: 100, + meshMessageDeliveriesThreshold: 20, + meshMessageDeliveriesWindow: 10, + meshMessageDeliveriesActivation: 5000, + meshFailurePenaltyWeight: -1, + meshFailurePenaltyDecay: 0.5, + invalidMessageDeliveriesWeight: -1, + invalidMessageDeliveriesDecay: 0.3 + }; + function createPeerScoreParams(p = {}) { + return { + ...defaultPeerScoreParams, + ...p, + topics: (p.topics != null) + ? Object.entries(p.topics).reduce((topics, [topic, topicScoreParams]) => { + topics[topic] = createTopicScoreParams(topicScoreParams); + return topics; + }, {}) + : {} + }; + } + function createTopicScoreParams(p = {}) { + return { + ...defaultTopicScoreParams, + ...p + }; + } + // peer score parameter validation + function validatePeerScoreParams(p) { + for (const [topic, params] of Object.entries(p.topics)) { + try { + validateTopicScoreParams(params); + } + catch (e) { + throw new CodeError$2(`invalid score parameters for topic ${topic}: ${e.message}`, ERR_INVALID_PEER_SCORE_PARAMS); + } + } + // check that the topic score is 0 or something positive + if (p.topicScoreCap < 0) { + throw new CodeError$2('invalid topic score cap; must be positive (or 0 for no cap)', ERR_INVALID_PEER_SCORE_PARAMS); + } + // check that we have an app specific score; the weight can be anything (but expected positive) + if (p.appSpecificScore === null || p.appSpecificScore === undefined) { + throw new CodeError$2('missing application specific score function', ERR_INVALID_PEER_SCORE_PARAMS); + } + // check the IP colocation factor + if (p.IPColocationFactorWeight > 0) { + throw new CodeError$2('invalid IPColocationFactorWeight; must be negative (or 0 to disable)', ERR_INVALID_PEER_SCORE_PARAMS); + } + if (p.IPColocationFactorWeight !== 0 && p.IPColocationFactorThreshold < 1) { + throw new CodeError$2('invalid IPColocationFactorThreshold; must be at least 1', ERR_INVALID_PEER_SCORE_PARAMS); + } + // check the behaviour penalty + if (p.behaviourPenaltyWeight > 0) { + throw new CodeError$2('invalid BehaviourPenaltyWeight; must be negative (or 0 to disable)', ERR_INVALID_PEER_SCORE_PARAMS); + } + if (p.behaviourPenaltyWeight !== 0 && (p.behaviourPenaltyDecay <= 0 || p.behaviourPenaltyDecay >= 1)) { + throw new CodeError$2('invalid BehaviourPenaltyDecay; must be between 0 and 1', ERR_INVALID_PEER_SCORE_PARAMS); + } + // check the decay parameters + if (p.decayInterval < 1000) { + throw new CodeError$2('invalid DecayInterval; must be at least 1s', ERR_INVALID_PEER_SCORE_PARAMS); + } + if (p.decayToZero <= 0 || p.decayToZero >= 1) { + throw new CodeError$2('invalid DecayToZero; must be between 0 and 1', ERR_INVALID_PEER_SCORE_PARAMS); + } + // no need to check the score retention; a value of 0 means that we don't retain scores + } + // eslint-disable-next-line complexity + function validateTopicScoreParams(p) { + // make sure we have a sane topic weight + if (p.topicWeight < 0) { + throw new CodeError$2('invalid topic weight; must be >= 0', ERR_INVALID_PEER_SCORE_PARAMS); + } + // check P1 + if (p.timeInMeshQuantum === 0) { + throw new CodeError$2('invalid TimeInMeshQuantum; must be non zero', ERR_INVALID_PEER_SCORE_PARAMS); + } + if (p.timeInMeshWeight < 0) { + throw new CodeError$2('invalid TimeInMeshWeight; must be positive (or 0 to disable)', ERR_INVALID_PEER_SCORE_PARAMS); + } + if (p.timeInMeshWeight !== 0 && p.timeInMeshQuantum <= 0) { + throw new CodeError$2('invalid TimeInMeshQuantum; must be positive', ERR_INVALID_PEER_SCORE_PARAMS); + } + if (p.timeInMeshWeight !== 0 && p.timeInMeshCap <= 0) { + throw new CodeError$2('invalid TimeInMeshCap; must be positive', ERR_INVALID_PEER_SCORE_PARAMS); + } + // check P2 + if (p.firstMessageDeliveriesWeight < 0) { + throw new CodeError$2('invallid FirstMessageDeliveriesWeight; must be positive (or 0 to disable)', ERR_INVALID_PEER_SCORE_PARAMS); + } + if (p.firstMessageDeliveriesWeight !== 0 && + (p.firstMessageDeliveriesDecay <= 0 || p.firstMessageDeliveriesDecay >= 1)) { + throw new CodeError$2('invalid FirstMessageDeliveriesDecay; must be between 0 and 1', ERR_INVALID_PEER_SCORE_PARAMS); + } + if (p.firstMessageDeliveriesWeight !== 0 && p.firstMessageDeliveriesCap <= 0) { + throw new CodeError$2('invalid FirstMessageDeliveriesCap; must be positive', ERR_INVALID_PEER_SCORE_PARAMS); + } + // check P3 + if (p.meshMessageDeliveriesWeight > 0) { + throw new CodeError$2('invalid MeshMessageDeliveriesWeight; must be negative (or 0 to disable)', ERR_INVALID_PEER_SCORE_PARAMS); + } + if (p.meshMessageDeliveriesWeight !== 0 && (p.meshMessageDeliveriesDecay <= 0 || p.meshMessageDeliveriesDecay >= 1)) { + throw new CodeError$2('invalid MeshMessageDeliveriesDecay; must be between 0 and 1', ERR_INVALID_PEER_SCORE_PARAMS); + } + if (p.meshMessageDeliveriesWeight !== 0 && p.meshMessageDeliveriesCap <= 0) { + throw new CodeError$2('invalid MeshMessageDeliveriesCap; must be positive', ERR_INVALID_PEER_SCORE_PARAMS); + } + if (p.meshMessageDeliveriesWeight !== 0 && p.meshMessageDeliveriesThreshold <= 0) { + throw new CodeError$2('invalid MeshMessageDeliveriesThreshold; must be positive', ERR_INVALID_PEER_SCORE_PARAMS); + } + if (p.meshMessageDeliveriesWindow < 0) { + throw new CodeError$2('invalid MeshMessageDeliveriesWindow; must be non-negative', ERR_INVALID_PEER_SCORE_PARAMS); + } + if (p.meshMessageDeliveriesWeight !== 0 && p.meshMessageDeliveriesActivation < 1000) { + throw new CodeError$2('invalid MeshMessageDeliveriesActivation; must be at least 1s', ERR_INVALID_PEER_SCORE_PARAMS); + } + // check P3b + if (p.meshFailurePenaltyWeight > 0) { + throw new CodeError$2('invalid MeshFailurePenaltyWeight; must be negative (or 0 to disable)', ERR_INVALID_PEER_SCORE_PARAMS); + } + if (p.meshFailurePenaltyWeight !== 0 && (p.meshFailurePenaltyDecay <= 0 || p.meshFailurePenaltyDecay >= 1)) { + throw new CodeError$2('invalid MeshFailurePenaltyDecay; must be between 0 and 1', ERR_INVALID_PEER_SCORE_PARAMS); + } + // check P4 + if (p.invalidMessageDeliveriesWeight > 0) { + throw new CodeError$2('invalid InvalidMessageDeliveriesWeight; must be negative (or 0 to disable)', ERR_INVALID_PEER_SCORE_PARAMS); + } + if (p.invalidMessageDeliveriesDecay <= 0 || p.invalidMessageDeliveriesDecay >= 1) { + throw new CodeError$2('invalid InvalidMessageDeliveriesDecay; must be between 0 and 1', ERR_INVALID_PEER_SCORE_PARAMS); + } + } + + const defaultPeerScoreThresholds = { + gossipThreshold: -10, + publishThreshold: -50, + graylistThreshold: -80, + acceptPXThreshold: 10, + opportunisticGraftThreshold: 20 + }; + function createPeerScoreThresholds(p = {}) { + return { + ...defaultPeerScoreThresholds, + ...p + }; + } + + /** + * Exclude up to `ineed` items from a set if item meets condition `cond` + */ + function removeItemsFromSet(superSet, ineed, cond = () => true) { + const subset = new Set(); + if (ineed <= 0) + return subset; + for (const id of superSet) { + if (subset.size >= ineed) + break; + if (cond(id)) { + subset.add(id); + superSet.delete(id); + } + } + return subset; + } + /** + * Exclude up to `ineed` items from a set + */ + function removeFirstNItemsFromSet(superSet, ineed) { + return removeItemsFromSet(superSet, ineed, () => true); + } + class MapDef extends Map { + getDefault; + constructor(getDefault) { + super(); + this.getDefault = getDefault; + } + getOrDefault(key) { + let value = super.get(key); + if (value === undefined) { + value = this.getDefault(); + this.set(key, value); + } + return value; + } + } + + function computeScore(peer, pstats, params, peerIPs) { + let score = 0; + // topic stores + Object.entries(pstats.topics).forEach(([topic, tstats]) => { + // the topic parameters + const topicParams = params.topics[topic]; + if (topicParams === undefined) { + // we are not scoring this topic + return; + } + let topicScore = 0; + // P1: time in Mesh + if (tstats.inMesh) { + let p1 = tstats.meshTime / topicParams.timeInMeshQuantum; + if (p1 > topicParams.timeInMeshCap) { + p1 = topicParams.timeInMeshCap; + } + topicScore += p1 * topicParams.timeInMeshWeight; + } + // P2: first message deliveries + let p2 = tstats.firstMessageDeliveries; + if (p2 > topicParams.firstMessageDeliveriesCap) { + p2 = topicParams.firstMessageDeliveriesCap; + } + topicScore += p2 * topicParams.firstMessageDeliveriesWeight; + // P3: mesh message deliveries + if (tstats.meshMessageDeliveriesActive && + tstats.meshMessageDeliveries < topicParams.meshMessageDeliveriesThreshold) { + const deficit = topicParams.meshMessageDeliveriesThreshold - tstats.meshMessageDeliveries; + const p3 = deficit * deficit; + topicScore += p3 * topicParams.meshMessageDeliveriesWeight; + } + // P3b: + // NOTE: the weight of P3b is negative (validated in validateTopicScoreParams) so this detracts + const p3b = tstats.meshFailurePenalty; + topicScore += p3b * topicParams.meshFailurePenaltyWeight; + // P4: invalid messages + // NOTE: the weight of P4 is negative (validated in validateTopicScoreParams) so this detracts + const p4 = tstats.invalidMessageDeliveries * tstats.invalidMessageDeliveries; + topicScore += p4 * topicParams.invalidMessageDeliveriesWeight; + // update score, mixing with topic weight + score += topicScore * topicParams.topicWeight; + }); + // apply the topic score cap, if any + if (params.topicScoreCap > 0 && score > params.topicScoreCap) { + score = params.topicScoreCap; + } + // P5: application-specific score + const p5 = params.appSpecificScore(peer); + score += p5 * params.appSpecificWeight; + // P6: IP colocation factor + pstats.knownIPs.forEach((ip) => { + if (params.IPColocationFactorWhitelist.has(ip)) { + return; + } + // P6 has a cliff (IPColocationFactorThreshold) + // It's only applied if at least that many peers are connected to us from that source IP addr. + // It is quadratic, and the weight is negative (validated in validatePeerScoreParams) + const peersInIP = peerIPs.get(ip); + const numPeersInIP = (peersInIP != null) ? peersInIP.size : 0; + if (numPeersInIP > params.IPColocationFactorThreshold) { + const surplus = numPeersInIP - params.IPColocationFactorThreshold; + const p6 = surplus * surplus; + score += p6 * params.IPColocationFactorWeight; + } + }); + // P7: behavioural pattern penalty + if (pstats.behaviourPenalty > params.behaviourPenaltyThreshold) { + const excess = pstats.behaviourPenalty - params.behaviourPenaltyThreshold; + const p7 = excess * excess; + score += p7 * params.behaviourPenaltyWeight; + } + return score; + } + + /** + * Custom implementation of a double ended queue. + */ + function Denque(array, options) { + var options = options || {}; + this._capacity = options.capacity; + + this._head = 0; + this._tail = 0; + + if (Array.isArray(array)) { + this._fromArray(array); + } else { + this._capacityMask = 0x3; + this._list = new Array(4); + } + } + + /** + * -------------- + * PUBLIC API + * ------------- + */ + + /** + * Returns the item at the specified index from the list. + * 0 is the first element, 1 is the second, and so on... + * Elements at negative values are that many from the end: -1 is one before the end + * (the last element), -2 is two before the end (one before last), etc. + * @param index + * @returns {*} + */ + Denque.prototype.peekAt = function peekAt(index) { + var i = index; + // expect a number or return undefined + if ((i !== (i | 0))) { + return void 0; + } + var len = this.size(); + if (i >= len || i < -len) return undefined; + if (i < 0) i += len; + i = (this._head + i) & this._capacityMask; + return this._list[i]; + }; + + /** + * Alias for peekAt() + * @param i + * @returns {*} + */ + Denque.prototype.get = function get(i) { + return this.peekAt(i); + }; + + /** + * Returns the first item in the list without removing it. + * @returns {*} + */ + Denque.prototype.peek = function peek() { + if (this._head === this._tail) return undefined; + return this._list[this._head]; + }; + + /** + * Alias for peek() + * @returns {*} + */ + Denque.prototype.peekFront = function peekFront() { + return this.peek(); + }; + + /** + * Returns the item that is at the back of the queue without removing it. + * Uses peekAt(-1) + */ + Denque.prototype.peekBack = function peekBack() { + return this.peekAt(-1); + }; + + /** + * Returns the current length of the queue + * @return {Number} + */ + Object.defineProperty(Denque.prototype, 'length', { + get: function length() { + return this.size(); + } + }); + + /** + * Return the number of items on the list, or 0 if empty. + * @returns {number} + */ + Denque.prototype.size = function size() { + if (this._head === this._tail) return 0; + if (this._head < this._tail) return this._tail - this._head; + else return this._capacityMask + 1 - (this._head - this._tail); + }; + + /** + * Add an item at the beginning of the list. + * @param item + */ + Denque.prototype.unshift = function unshift(item) { + if (arguments.length === 0) return this.size(); + var len = this._list.length; + this._head = (this._head - 1 + len) & this._capacityMask; + this._list[this._head] = item; + if (this._tail === this._head) this._growArray(); + if (this._capacity && this.size() > this._capacity) this.pop(); + if (this._head < this._tail) return this._tail - this._head; + else return this._capacityMask + 1 - (this._head - this._tail); + }; + + /** + * Remove and return the first item on the list, + * Returns undefined if the list is empty. + * @returns {*} + */ + Denque.prototype.shift = function shift() { + var head = this._head; + if (head === this._tail) return undefined; + var item = this._list[head]; + this._list[head] = undefined; + this._head = (head + 1) & this._capacityMask; + if (head < 2 && this._tail > 10000 && this._tail <= this._list.length >>> 2) this._shrinkArray(); + return item; + }; + + /** + * Add an item to the bottom of the list. + * @param item + */ + Denque.prototype.push = function push(item) { + if (arguments.length === 0) return this.size(); + var tail = this._tail; + this._list[tail] = item; + this._tail = (tail + 1) & this._capacityMask; + if (this._tail === this._head) { + this._growArray(); + } + if (this._capacity && this.size() > this._capacity) { + this.shift(); + } + if (this._head < this._tail) return this._tail - this._head; + else return this._capacityMask + 1 - (this._head - this._tail); + }; + + /** + * Remove and return the last item on the list. + * Returns undefined if the list is empty. + * @returns {*} + */ + Denque.prototype.pop = function pop() { + var tail = this._tail; + if (tail === this._head) return undefined; + var len = this._list.length; + this._tail = (tail - 1 + len) & this._capacityMask; + var item = this._list[this._tail]; + this._list[this._tail] = undefined; + if (this._head < 2 && tail > 10000 && tail <= len >>> 2) this._shrinkArray(); + return item; + }; + + /** + * Remove and return the item at the specified index from the list. + * Returns undefined if the list is empty. + * @param index + * @returns {*} + */ + Denque.prototype.removeOne = function removeOne(index) { + var i = index; + // expect a number or return undefined + if ((i !== (i | 0))) { + return void 0; + } + if (this._head === this._tail) return void 0; + var size = this.size(); + var len = this._list.length; + if (i >= size || i < -size) return void 0; + if (i < 0) i += size; + i = (this._head + i) & this._capacityMask; + var item = this._list[i]; + var k; + if (index < size / 2) { + for (k = index; k > 0; k--) { + this._list[i] = this._list[i = (i - 1 + len) & this._capacityMask]; + } + this._list[i] = void 0; + this._head = (this._head + 1 + len) & this._capacityMask; + } else { + for (k = size - 1 - index; k > 0; k--) { + this._list[i] = this._list[i = (i + 1 + len) & this._capacityMask]; + } + this._list[i] = void 0; + this._tail = (this._tail - 1 + len) & this._capacityMask; + } + return item; + }; + + /** + * Remove number of items from the specified index from the list. + * Returns array of removed items. + * Returns undefined if the list is empty. + * @param index + * @param count + * @returns {array} + */ + Denque.prototype.remove = function remove(index, count) { + var i = index; + var removed; + var del_count = count; + // expect a number or return undefined + if ((i !== (i | 0))) { + return void 0; + } + if (this._head === this._tail) return void 0; + var size = this.size(); + var len = this._list.length; + if (i >= size || i < -size || count < 1) return void 0; + if (i < 0) i += size; + if (count === 1 || !count) { + removed = new Array(1); + removed[0] = this.removeOne(i); + return removed; + } + if (i === 0 && i + count >= size) { + removed = this.toArray(); + this.clear(); + return removed; + } + if (i + count > size) count = size - i; + var k; + removed = new Array(count); + for (k = 0; k < count; k++) { + removed[k] = this._list[(this._head + i + k) & this._capacityMask]; + } + i = (this._head + i) & this._capacityMask; + if (index + count === size) { + this._tail = (this._tail - count + len) & this._capacityMask; + for (k = count; k > 0; k--) { + this._list[i = (i + 1 + len) & this._capacityMask] = void 0; + } + return removed; + } + if (index === 0) { + this._head = (this._head + count + len) & this._capacityMask; + for (k = count - 1; k > 0; k--) { + this._list[i = (i + 1 + len) & this._capacityMask] = void 0; + } + return removed; + } + if (i < size / 2) { + this._head = (this._head + index + count + len) & this._capacityMask; + for (k = index; k > 0; k--) { + this.unshift(this._list[i = (i - 1 + len) & this._capacityMask]); + } + i = (this._head - 1 + len) & this._capacityMask; + while (del_count > 0) { + this._list[i = (i - 1 + len) & this._capacityMask] = void 0; + del_count--; + } + if (index < 0) this._tail = i; + } else { + this._tail = i; + i = (i + count + len) & this._capacityMask; + for (k = size - (count + index); k > 0; k--) { + this.push(this._list[i++]); + } + i = this._tail; + while (del_count > 0) { + this._list[i = (i + 1 + len) & this._capacityMask] = void 0; + del_count--; + } + } + if (this._head < 2 && this._tail > 10000 && this._tail <= len >>> 2) this._shrinkArray(); + return removed; + }; + + /** + * Native splice implementation. + * Remove number of items from the specified index from the list and/or add new elements. + * Returns array of removed items or empty array if count == 0. + * Returns undefined if the list is empty. + * + * @param index + * @param count + * @param {...*} [elements] + * @returns {array} + */ + Denque.prototype.splice = function splice(index, count) { + var i = index; + // expect a number or return undefined + if ((i !== (i | 0))) { + return void 0; + } + var size = this.size(); + if (i < 0) i += size; + if (i > size) return void 0; + if (arguments.length > 2) { + var k; + var temp; + var removed; + var arg_len = arguments.length; + var len = this._list.length; + var arguments_index = 2; + if (!size || i < size / 2) { + temp = new Array(i); + for (k = 0; k < i; k++) { + temp[k] = this._list[(this._head + k) & this._capacityMask]; + } + if (count === 0) { + removed = []; + if (i > 0) { + this._head = (this._head + i + len) & this._capacityMask; + } + } else { + removed = this.remove(i, count); + this._head = (this._head + i + len) & this._capacityMask; + } + while (arg_len > arguments_index) { + this.unshift(arguments[--arg_len]); + } + for (k = i; k > 0; k--) { + this.unshift(temp[k - 1]); + } + } else { + temp = new Array(size - (i + count)); + var leng = temp.length; + for (k = 0; k < leng; k++) { + temp[k] = this._list[(this._head + i + count + k) & this._capacityMask]; + } + if (count === 0) { + removed = []; + if (i != size) { + this._tail = (this._head + i + len) & this._capacityMask; + } + } else { + removed = this.remove(i, count); + this._tail = (this._tail - leng + len) & this._capacityMask; + } + while (arguments_index < arg_len) { + this.push(arguments[arguments_index++]); + } + for (k = 0; k < leng; k++) { + this.push(temp[k]); + } + } + return removed; + } else { + return this.remove(i, count); + } + }; + + /** + * Soft clear - does not reset capacity. + */ + Denque.prototype.clear = function clear() { + this._list = new Array(this._list.length); + this._head = 0; + this._tail = 0; + }; + + /** + * Returns true or false whether the list is empty. + * @returns {boolean} + */ + Denque.prototype.isEmpty = function isEmpty() { + return this._head === this._tail; + }; + + /** + * Returns an array of all queue items. + * @returns {Array} + */ + Denque.prototype.toArray = function toArray() { + return this._copyArray(false); + }; + + /** + * ------------- + * INTERNALS + * ------------- + */ + + /** + * Fills the queue with items from an array + * For use in the constructor + * @param array + * @private + */ + Denque.prototype._fromArray = function _fromArray(array) { + var length = array.length; + var capacity = this._nextPowerOf2(length); + + this._list = new Array(capacity); + this._capacityMask = capacity - 1; + this._tail = length; + + for (var i = 0; i < length; i++) this._list[i] = array[i]; + }; + + /** + * + * @param fullCopy + * @param size Initialize the array with a specific size. Will default to the current list size + * @returns {Array} + * @private + */ + Denque.prototype._copyArray = function _copyArray(fullCopy, size) { + var src = this._list; + var capacity = src.length; + var length = this.length; + size = size | length; + + // No prealloc requested and the buffer is contiguous + if (size == length && this._head < this._tail) { + // Simply do a fast slice copy + return this._list.slice(this._head, this._tail); + } + + var dest = new Array(size); + + var k = 0; + var i; + if (fullCopy || this._head > this._tail) { + for (i = this._head; i < capacity; i++) dest[k++] = src[i]; + for (i = 0; i < this._tail; i++) dest[k++] = src[i]; + } else { + for (i = this._head; i < this._tail; i++) dest[k++] = src[i]; + } + + return dest; + }; + + /** + * Grows the internal list array. + * @private + */ + Denque.prototype._growArray = function _growArray() { + if (this._head != 0) { + // double array size and copy existing data, head to end, then beginning to tail. + var newList = this._copyArray(true, this._list.length << 1); + + this._tail = this._list.length; + this._head = 0; + + this._list = newList; + } else { + this._tail = this._list.length; + this._list.length <<= 1; + } + + this._capacityMask = (this._capacityMask << 1) | 1; + }; + + /** + * Shrinks the internal list array. + * @private + */ + Denque.prototype._shrinkArray = function _shrinkArray() { + this._list.length >>>= 1; + this._capacityMask >>>= 1; + }; + + /** + * Find the next power of 2, at least 4 + * @private + * @param {number} num + * @returns {number} + */ + Denque.prototype._nextPowerOf2 = function _nextPowerOf2(num) { + var log2 = Math.log(num) / Math.log(2); + var nextPow2 = 1 << (log2 + 1); + + return Math.max(nextPow2, 4); + }; + + var denque = Denque; + + var Denque$1 = /*@__PURE__*/getDefaultExportFromCjs(denque); + + var DeliveryRecordStatus; + (function (DeliveryRecordStatus) { + /** + * we don't know (yet) if the message is valid + */ + DeliveryRecordStatus[DeliveryRecordStatus["unknown"] = 0] = "unknown"; + /** + * we know the message is valid + */ + DeliveryRecordStatus[DeliveryRecordStatus["valid"] = 1] = "valid"; + /** + * we know the message is invalid + */ + DeliveryRecordStatus[DeliveryRecordStatus["invalid"] = 2] = "invalid"; + /** + * we were instructed by the validator to ignore the message + */ + DeliveryRecordStatus[DeliveryRecordStatus["ignored"] = 3] = "ignored"; + })(DeliveryRecordStatus || (DeliveryRecordStatus = {})); + /** + * Map of canonical message ID to DeliveryRecord + * + * Maintains an internal queue for efficient gc of old messages + */ + class MessageDeliveries { + records; + queue; + constructor() { + this.records = new Map(); + this.queue = new Denque$1(); + } + getRecord(msgIdStr) { + return this.records.get(msgIdStr); + } + ensureRecord(msgIdStr) { + let drec = this.records.get(msgIdStr); + if (drec != null) { + return drec; + } + // record doesn't exist yet + // create record + drec = { + status: DeliveryRecordStatus.unknown, + firstSeenTsMs: Date.now(), + validated: 0, + peers: new Set() + }; + this.records.set(msgIdStr, drec); + // and add msgId to the queue + const entry = { + msgId: msgIdStr, + expire: Date.now() + TimeCacheDuration + }; + this.queue.push(entry); + return drec; + } + gc() { + const now = Date.now(); + // queue is sorted by expiry time + // remove expired messages, remove from queue until first un-expired message found + let head = this.queue.peekFront(); + while ((head != null) && head.expire < now) { + this.records.delete(head.msgId); + this.queue.shift(); + head = this.queue.peekFront(); + } + } + clear() { + this.records.clear(); + this.queue.clear(); + } + } + + class PeerScore { + params; + metrics; + /** + * Per-peer stats for score calculation + */ + peerStats = new Map(); + /** + * IP colocation tracking; maps IP => set of peers. + */ + peerIPs = new MapDef(() => new Set()); + /** + * Cache score up to decayInterval if topic stats are unchanged. + */ + scoreCache = new Map(); + /** + * Recent message delivery timing/participants + */ + deliveryRecords = new MessageDeliveries(); + _backgroundInterval; + scoreCacheValidityMs; + computeScore; + log; + constructor(params, metrics, componentLogger, opts) { + this.params = params; + this.metrics = metrics; + validatePeerScoreParams(params); + this.scoreCacheValidityMs = opts.scoreCacheValidityMs; + this.computeScore = opts.computeScore ?? computeScore; + this.log = componentLogger.forComponent('libp2p:gossipsub:score'); + } + get size() { + return this.peerStats.size; + } + /** + * Start PeerScore instance + */ + start() { + if (this._backgroundInterval != null) { + this.log('Peer score already running'); + return; + } + this._backgroundInterval = setInterval(() => { this.background(); }, this.params.decayInterval); + this.log('started'); + } + /** + * Stop PeerScore instance + */ + stop() { + if (this._backgroundInterval == null) { + this.log('Peer score already stopped'); + return; + } + clearInterval(this._backgroundInterval); + delete this._backgroundInterval; + this.peerIPs.clear(); + this.peerStats.clear(); + this.deliveryRecords.clear(); + this.log('stopped'); + } + /** + * Periodic maintenance + */ + background() { + this.refreshScores(); + this.deliveryRecords.gc(); + } + dumpPeerScoreStats() { + return Object.fromEntries(Array.from(this.peerStats.entries()).map(([peer, stats]) => [peer, stats])); + } + messageFirstSeenTimestampMs(msgIdStr) { + const drec = this.deliveryRecords.getRecord(msgIdStr); + return (drec != null) ? drec.firstSeenTsMs : null; + } + /** + * Decays scores, and purges score records for disconnected peers once their expiry has elapsed. + */ + refreshScores() { + const now = Date.now(); + const decayToZero = this.params.decayToZero; + this.peerStats.forEach((pstats, id) => { + if (!pstats.connected) { + // has the retention period expired? + if (now > pstats.expire) { + // yes, throw it away (but clean up the IP tracking first) + this.removeIPsForPeer(id, pstats.knownIPs); + this.peerStats.delete(id); + this.scoreCache.delete(id); + } + // we don't decay retained scores, as the peer is not active. + // this way the peer cannot reset a negative score by simply disconnecting and reconnecting, + // unless the retention period has elapsed. + // similarly, a well behaved peer does not lose its score by getting disconnected. + return; + } + Object.entries(pstats.topics).forEach(([topic, tstats]) => { + const tparams = this.params.topics[topic]; + if (tparams === undefined) { + // we are not scoring this topic + // should be unreachable, we only add scored topics to pstats + return; + } + // decay counters + tstats.firstMessageDeliveries *= tparams.firstMessageDeliveriesDecay; + if (tstats.firstMessageDeliveries < decayToZero) { + tstats.firstMessageDeliveries = 0; + } + tstats.meshMessageDeliveries *= tparams.meshMessageDeliveriesDecay; + if (tstats.meshMessageDeliveries < decayToZero) { + tstats.meshMessageDeliveries = 0; + } + tstats.meshFailurePenalty *= tparams.meshFailurePenaltyDecay; + if (tstats.meshFailurePenalty < decayToZero) { + tstats.meshFailurePenalty = 0; + } + tstats.invalidMessageDeliveries *= tparams.invalidMessageDeliveriesDecay; + if (tstats.invalidMessageDeliveries < decayToZero) { + tstats.invalidMessageDeliveries = 0; + } + // update mesh time and activate mesh message delivery parameter if need be + if (tstats.inMesh) { + tstats.meshTime = now - tstats.graftTime; + if (tstats.meshTime > tparams.meshMessageDeliveriesActivation) { + tstats.meshMessageDeliveriesActive = true; + } + } + }); + // decay P7 counter + pstats.behaviourPenalty *= this.params.behaviourPenaltyDecay; + if (pstats.behaviourPenalty < decayToZero) { + pstats.behaviourPenalty = 0; + } + }); + } + /** + * Return the score for a peer + */ + score(id) { + this.metrics?.scoreFnCalls.inc(); + const pstats = this.peerStats.get(id); + if (pstats == null) { + return 0; + } + const now = Date.now(); + const cacheEntry = this.scoreCache.get(id); + // Found cached score within validity period + if ((cacheEntry != null) && cacheEntry.cacheUntil > now) { + return cacheEntry.score; + } + this.metrics?.scoreFnRuns.inc(); + const score = this.computeScore(id, pstats, this.params, this.peerIPs); + const cacheUntil = now + this.scoreCacheValidityMs; + if (cacheEntry != null) { + this.metrics?.scoreCachedDelta.observe(Math.abs(score - cacheEntry.score)); + cacheEntry.score = score; + cacheEntry.cacheUntil = cacheUntil; + } + else { + this.scoreCache.set(id, { score, cacheUntil }); + } + return score; + } + /** + * Apply a behavioural penalty to a peer + */ + addPenalty(id, penalty, penaltyLabel) { + const pstats = this.peerStats.get(id); + if (pstats != null) { + pstats.behaviourPenalty += penalty; + this.metrics?.onScorePenalty(penaltyLabel); + } + } + addPeer(id) { + // create peer stats (not including topic stats for each topic to be scored) + // topic stats will be added as needed + const pstats = { + connected: true, + expire: 0, + topics: {}, + knownIPs: new Set(), + behaviourPenalty: 0 + }; + this.peerStats.set(id, pstats); + } + /** Adds a new IP to a peer, if the peer is not known the update is ignored */ + addIP(id, ip) { + const pstats = this.peerStats.get(id); + if (pstats != null) { + pstats.knownIPs.add(ip); + } + this.peerIPs.getOrDefault(ip).add(id); + } + /** Remove peer association with IP */ + removeIP(id, ip) { + const pstats = this.peerStats.get(id); + if (pstats != null) { + pstats.knownIPs.delete(ip); + } + const peersWithIP = this.peerIPs.get(ip); + if (peersWithIP != null) { + peersWithIP.delete(id); + if (peersWithIP.size === 0) { + this.peerIPs.delete(ip); + } + } + } + removePeer(id) { + const pstats = this.peerStats.get(id); + if (pstats == null) { + return; + } + // decide whether to retain the score; this currently only retains non-positive scores + // to dissuade attacks on the score function. + if (this.score(id) > 0) { + this.removeIPsForPeer(id, pstats.knownIPs); + this.peerStats.delete(id); + return; + } + // furthermore, when we decide to retain the score, the firstMessageDelivery counters are + // reset to 0 and mesh delivery penalties applied. + Object.entries(pstats.topics).forEach(([topic, tstats]) => { + tstats.firstMessageDeliveries = 0; + const threshold = this.params.topics[topic].meshMessageDeliveriesThreshold; + if (tstats.inMesh && tstats.meshMessageDeliveriesActive && tstats.meshMessageDeliveries < threshold) { + const deficit = threshold - tstats.meshMessageDeliveries; + tstats.meshFailurePenalty += deficit * deficit; + } + tstats.inMesh = false; + tstats.meshMessageDeliveriesActive = false; + }); + pstats.connected = false; + pstats.expire = Date.now() + this.params.retainScore; + } + /** Handles scoring functionality as a peer GRAFTs to a topic. */ + graft(id, topic) { + const pstats = this.peerStats.get(id); + if (pstats != null) { + const tstats = this.getPtopicStats(pstats, topic); + if (tstats != null) { + // if we are scoring the topic, update the mesh status. + tstats.inMesh = true; + tstats.graftTime = Date.now(); + tstats.meshTime = 0; + tstats.meshMessageDeliveriesActive = false; + } + } + } + /** Handles scoring functionality as a peer PRUNEs from a topic. */ + prune(id, topic) { + const pstats = this.peerStats.get(id); + if (pstats != null) { + const tstats = this.getPtopicStats(pstats, topic); + if (tstats != null) { + // sticky mesh delivery rate failure penalty + const threshold = this.params.topics[topic].meshMessageDeliveriesThreshold; + if (tstats.meshMessageDeliveriesActive && tstats.meshMessageDeliveries < threshold) { + const deficit = threshold - tstats.meshMessageDeliveries; + tstats.meshFailurePenalty += deficit * deficit; + } + tstats.meshMessageDeliveriesActive = false; + tstats.inMesh = false; + // TODO: Consider clearing score cache on important penalties + // this.scoreCache.delete(id) + } + } + } + validateMessage(msgIdStr) { + this.deliveryRecords.ensureRecord(msgIdStr); + } + deliverMessage(from, msgIdStr, topic) { + this.markFirstMessageDelivery(from, topic); + const drec = this.deliveryRecords.ensureRecord(msgIdStr); + const now = Date.now(); + // defensive check that this is the first delivery trace -- delivery status should be unknown + if (drec.status !== DeliveryRecordStatus.unknown) { + this.log('unexpected delivery: message from %s was first seen %s ago and has delivery status %s', from, now - drec.firstSeenTsMs, DeliveryRecordStatus[drec.status]); + return; + } + // mark the message as valid and reward mesh peers that have already forwarded it to us + drec.status = DeliveryRecordStatus.valid; + drec.validated = now; + drec.peers.forEach((p) => { + // this check is to make sure a peer can't send us a message twice and get a double count + // if it is a first delivery. + if (p !== from.toString()) { + this.markDuplicateMessageDelivery(p, topic); + } + }); + } + /** + * Similar to `rejectMessage` except does not require the message id or reason for an invalid message. + */ + rejectInvalidMessage(from, topic) { + this.markInvalidMessageDelivery(from, topic); + } + rejectMessage(from, msgIdStr, topic, reason) { + // eslint-disable-next-line default-case + switch (reason) { + // these messages are not tracked, but the peer is penalized as they are invalid + case RejectReason.Error: + this.markInvalidMessageDelivery(from, topic); + return; + // we ignore those messages, so do nothing. + case RejectReason.Blacklisted: + return; + // the rest are handled after record creation + } + const drec = this.deliveryRecords.ensureRecord(msgIdStr); + // defensive check that this is the first rejection -- delivery status should be unknown + if (drec.status !== DeliveryRecordStatus.unknown) { + this.log('unexpected rejection: message from %s was first seen %s ago and has delivery status %d', from, Date.now() - drec.firstSeenTsMs, DeliveryRecordStatus[drec.status]); + return; + } + if (reason === RejectReason.Ignore) { + // we were explicitly instructed by the validator to ignore the message but not penalize the peer + drec.status = DeliveryRecordStatus.ignored; + drec.peers.clear(); + return; + } + // mark the message as invalid and penalize peers that have already forwarded it. + drec.status = DeliveryRecordStatus.invalid; + this.markInvalidMessageDelivery(from, topic); + drec.peers.forEach((p) => { + this.markInvalidMessageDelivery(p, topic); + }); + // release the delivery time tracking map to free some memory early + drec.peers.clear(); + } + duplicateMessage(from, msgIdStr, topic) { + const drec = this.deliveryRecords.ensureRecord(msgIdStr); + if (drec.peers.has(from)) { + // we have already seen this duplicate + return; + } + // eslint-disable-next-line default-case + switch (drec.status) { + case DeliveryRecordStatus.unknown: + // the message is being validated; track the peer delivery and wait for + // the Deliver/Reject/Ignore notification. + drec.peers.add(from); + break; + case DeliveryRecordStatus.valid: + // mark the peer delivery time to only count a duplicate delivery once. + drec.peers.add(from); + this.markDuplicateMessageDelivery(from, topic, drec.validated); + break; + case DeliveryRecordStatus.invalid: + // we no longer track delivery time + this.markInvalidMessageDelivery(from, topic); + break; + case DeliveryRecordStatus.ignored: + // the message was ignored; do nothing (we don't know if it was valid) + break; + } + } + /** + * Increments the "invalid message deliveries" counter for all scored topics the message is published in. + */ + markInvalidMessageDelivery(from, topic) { + const pstats = this.peerStats.get(from); + if (pstats != null) { + const tstats = this.getPtopicStats(pstats, topic); + if (tstats != null) { + tstats.invalidMessageDeliveries += 1; + } + } + } + /** + * Increments the "first message deliveries" counter for all scored topics the message is published in, + * as well as the "mesh message deliveries" counter, if the peer is in the mesh for the topic. + * Messages already known (with the seenCache) are counted with markDuplicateMessageDelivery() + */ + markFirstMessageDelivery(from, topic) { + const pstats = this.peerStats.get(from); + if (pstats != null) { + const tstats = this.getPtopicStats(pstats, topic); + if (tstats != null) { + let cap = this.params.topics[topic].firstMessageDeliveriesCap; + tstats.firstMessageDeliveries = Math.min(cap, tstats.firstMessageDeliveries + 1); + if (tstats.inMesh) { + cap = this.params.topics[topic].meshMessageDeliveriesCap; + tstats.meshMessageDeliveries = Math.min(cap, tstats.meshMessageDeliveries + 1); + } + } + } + } + /** + * Increments the "mesh message deliveries" counter for messages we've seen before, + * as long the message was received within the P3 window. + */ + markDuplicateMessageDelivery(from, topic, validatedTime) { + const pstats = this.peerStats.get(from); + if (pstats != null) { + const now = validatedTime !== undefined ? Date.now() : 0; + const tstats = this.getPtopicStats(pstats, topic); + // eslint-disable-next-line @typescript-eslint/prefer-optional-chain + if (tstats != null && tstats.inMesh) { + const tparams = this.params.topics[topic]; + // check against the mesh delivery window -- if the validated time is passed as 0, then + // the message was received before we finished validation and thus falls within the mesh + // delivery window. + if (validatedTime !== undefined) { + const deliveryDelayMs = now - validatedTime; + const isLateDelivery = deliveryDelayMs > tparams.meshMessageDeliveriesWindow; + this.metrics?.onDuplicateMsgDelivery(topic, deliveryDelayMs, isLateDelivery); + if (isLateDelivery) { + return; + } + } + const cap = tparams.meshMessageDeliveriesCap; + tstats.meshMessageDeliveries = Math.min(cap, tstats.meshMessageDeliveries + 1); + } + } + } + /** + * Removes an IP list from the tracking list for a peer. + */ + removeIPsForPeer(id, ipsToRemove) { + for (const ipToRemove of ipsToRemove) { + const peerSet = this.peerIPs.get(ipToRemove); + if (peerSet != null) { + peerSet.delete(id); + if (peerSet.size === 0) { + this.peerIPs.delete(ipToRemove); + } + } + } + } + /** + * Returns topic stats if they exist, otherwise if the supplied parameters score the + * topic, inserts the default stats and returns a reference to those. If neither apply, returns None. + */ + getPtopicStats(pstats, topic) { + let topicStats = pstats.topics[topic]; + if (topicStats !== undefined) { + return topicStats; + } + if (this.params.topics[topic] !== undefined) { + topicStats = { + inMesh: false, + graftTime: 0, + meshTime: 0, + firstMessageDeliveries: 0, + meshMessageDeliveries: 0, + meshMessageDeliveriesActive: false, + meshFailurePenalty: 0, + invalidMessageDeliveries: 0 + }; + pstats.topics[topic] = topicStats; + return topicStats; + } + return null; + } + } + + function computeScoreWeights(peer, pstats, params, peerIPs, topicStrToLabel) { + let score = 0; + const byTopic = new Map(); + // topic stores + Object.entries(pstats.topics).forEach(([topic, tstats]) => { + // the topic parameters + // Aggregate by known topicLabel or throw to 'unknown'. This prevent too high cardinality + const topicLabel = topicStrToLabel.get(topic) ?? 'unknown'; + const topicParams = params.topics[topic]; + if (topicParams === undefined) { + // we are not scoring this topic + return; + } + let topicScores = byTopic.get(topicLabel); + if (topicScores == null) { + topicScores = { + p1w: 0, + p2w: 0, + p3w: 0, + p3bw: 0, + p4w: 0 + }; + byTopic.set(topicLabel, topicScores); + } + let p1w = 0; + let p2w = 0; + let p3w = 0; + let p3bw = 0; + let p4w = 0; + // P1: time in Mesh + if (tstats.inMesh) { + const p1 = Math.max(tstats.meshTime / topicParams.timeInMeshQuantum, topicParams.timeInMeshCap); + p1w += p1 * topicParams.timeInMeshWeight; + } + // P2: first message deliveries + let p2 = tstats.firstMessageDeliveries; + if (p2 > topicParams.firstMessageDeliveriesCap) { + p2 = topicParams.firstMessageDeliveriesCap; + } + p2w += p2 * topicParams.firstMessageDeliveriesWeight; + // P3: mesh message deliveries + if (tstats.meshMessageDeliveriesActive && + tstats.meshMessageDeliveries < topicParams.meshMessageDeliveriesThreshold) { + const deficit = topicParams.meshMessageDeliveriesThreshold - tstats.meshMessageDeliveries; + const p3 = deficit * deficit; + p3w += p3 * topicParams.meshMessageDeliveriesWeight; + } + // P3b: + // NOTE: the weight of P3b is negative (validated in validateTopicScoreParams) so this detracts + const p3b = tstats.meshFailurePenalty; + p3bw += p3b * topicParams.meshFailurePenaltyWeight; + // P4: invalid messages + // NOTE: the weight of P4 is negative (validated in validateTopicScoreParams) so this detracts + const p4 = tstats.invalidMessageDeliveries * tstats.invalidMessageDeliveries; + p4w += p4 * topicParams.invalidMessageDeliveriesWeight; + // update score, mixing with topic weight + score += (p1w + p2w + p3w + p3bw + p4w) * topicParams.topicWeight; + topicScores.p1w += p1w; + topicScores.p2w += p2w; + topicScores.p3w += p3w; + topicScores.p3bw += p3bw; + topicScores.p4w += p4w; + }); + // apply the topic score cap, if any + if (params.topicScoreCap > 0 && score > params.topicScoreCap) { + score = params.topicScoreCap; + // Proportionally apply cap to all individual contributions + const capF = params.topicScoreCap / score; + for (const ws of byTopic.values()) { + ws.p1w *= capF; + ws.p2w *= capF; + ws.p3w *= capF; + ws.p3bw *= capF; + ws.p4w *= capF; + } + } + let p5w = 0; + let p6w = 0; + let p7w = 0; + // P5: application-specific score + const p5 = params.appSpecificScore(peer); + p5w += p5 * params.appSpecificWeight; + // P6: IP colocation factor + pstats.knownIPs.forEach((ip) => { + if (params.IPColocationFactorWhitelist.has(ip)) { + return; + } + // P6 has a cliff (IPColocationFactorThreshold) + // It's only applied if at least that many peers are connected to us from that source IP addr. + // It is quadratic, and the weight is negative (validated in validatePeerScoreParams) + const peersInIP = peerIPs.get(ip); + const numPeersInIP = (peersInIP != null) ? peersInIP.size : 0; + if (numPeersInIP > params.IPColocationFactorThreshold) { + const surplus = numPeersInIP - params.IPColocationFactorThreshold; + const p6 = surplus * surplus; + p6w += p6 * params.IPColocationFactorWeight; + } + }); + // P7: behavioural pattern penalty + const p7 = pstats.behaviourPenalty * pstats.behaviourPenalty; + p7w += p7 * params.behaviourPenaltyWeight; + score += p5w + p6w + p7w; + return { + byTopic, + p5w, + p6w, + p7w, + score + }; + } + function computeAllPeersScoreWeights(peerIdStrs, peerStats, params, peerIPs, topicStrToLabel) { + const sw = { + byTopic: new Map(), + p5w: [], + p6w: [], + p7w: [], + score: [] + }; + for (const peerIdStr of peerIdStrs) { + const pstats = peerStats.get(peerIdStr); + if (pstats != null) { + const swPeer = computeScoreWeights(peerIdStr, pstats, params, peerIPs, topicStrToLabel); + for (const [topic, swPeerTopic] of swPeer.byTopic) { + let swTopic = sw.byTopic.get(topic); + if (swTopic == null) { + swTopic = { + p1w: [], + p2w: [], + p3w: [], + p3bw: [], + p4w: [] + }; + sw.byTopic.set(topic, swTopic); + } + swTopic.p1w.push(swPeerTopic.p1w); + swTopic.p2w.push(swPeerTopic.p2w); + swTopic.p3w.push(swPeerTopic.p3w); + swTopic.p3bw.push(swPeerTopic.p3bw); + swTopic.p4w.push(swPeerTopic.p4w); + } + sw.p5w.push(swPeer.p5w); + sw.p6w.push(swPeer.p6w); + sw.p7w.push(swPeer.p7w); + sw.score.push(swPeer.score); + } + else { + sw.p5w.push(0); + sw.p6w.push(0); + sw.p7w.push(0); + sw.score.push(0); + } + } + return sw; + } + + class OutboundStream { + rawStream; + pushable; + closeController; + maxBufferSize; + constructor(rawStream, errCallback, opts) { + this.rawStream = rawStream; + this.pushable = pushable$1(); + this.closeController = new AbortController(); + this.maxBufferSize = opts.maxBufferSize ?? Infinity; + this.closeController.signal.addEventListener('abort', () => { + rawStream.close() + .catch(err => { + rawStream.abort(err); + }); + }); + pipe(this.pushable, this.rawStream).catch(errCallback); + } + get protocol() { + // TODO remove this non-nullish assertion after https://github.com/libp2p/js-libp2p-interfaces/pull/265 is incorporated + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + return this.rawStream.protocol; + } + push(data) { + if (this.pushable.readableLength > this.maxBufferSize) { + throw Error(`OutboundStream buffer full, size > ${this.maxBufferSize}`); + } + this.pushable.push(encode.single(data)); + } + /** + * Same to push() but this is prefixed data so no need to encode length prefixed again + */ + pushPrefixed(data) { + if (this.pushable.readableLength > this.maxBufferSize) { + throw Error(`OutboundStream buffer full, size > ${this.maxBufferSize}`); + } + this.pushable.push(data); + } + async close() { + this.closeController.abort(); + // similar to pushable.end() but clear the internal buffer + await this.pushable.return(); + } + } + class InboundStream { + source; + rawStream; + closeController; + constructor(rawStream, opts = {}) { + this.rawStream = rawStream; + this.closeController = new AbortController(); + this.closeController.signal.addEventListener('abort', () => { + rawStream.close() + .catch(err => { + rawStream.abort(err); + }); + }); + this.source = pipe(this.rawStream, (source) => decode(source, opts)); + } + async close() { + this.closeController.abort(); + } + } + + /** + * IWantTracer is an internal tracer that tracks IWANT requests in order to penalize + * peers who don't follow up on IWANT requests after an IHAVE advertisement. + * The tracking of promises is probabilistic to avoid using too much memory. + * + * Note: Do not confuse these 'promises' with JS Promise objects. + * These 'promises' are merely expectations of a peer's behavior. + */ + class IWantTracer { + gossipsubIWantFollowupMs; + msgIdToStrFn; + metrics; + /** + * Promises to deliver a message + * Map per message id, per peer, promise expiration time + */ + promises = new Map(); + /** + * First request time by msgId. Used for metrics to track expire times. + * Necessary to know if peers are actually breaking promises or simply sending them a bit later + */ + requestMsByMsg = new Map(); + requestMsByMsgExpire; + constructor(gossipsubIWantFollowupMs, msgIdToStrFn, metrics) { + this.gossipsubIWantFollowupMs = gossipsubIWantFollowupMs; + this.msgIdToStrFn = msgIdToStrFn; + this.metrics = metrics; + this.requestMsByMsgExpire = 10 * gossipsubIWantFollowupMs; + } + get size() { + return this.promises.size; + } + get requestMsByMsgSize() { + return this.requestMsByMsg.size; + } + /** + * Track a promise to deliver a message from a list of msgIds we are requesting + */ + addPromise(from, msgIds) { + // pick msgId randomly from the list + const ix = Math.floor(Math.random() * msgIds.length); + const msgId = msgIds[ix]; + const msgIdStr = this.msgIdToStrFn(msgId); + let expireByPeer = this.promises.get(msgIdStr); + if (expireByPeer == null) { + expireByPeer = new Map(); + this.promises.set(msgIdStr, expireByPeer); + } + const now = Date.now(); + // If a promise for this message id and peer already exists we don't update the expiry + if (!expireByPeer.has(from)) { + expireByPeer.set(from, now + this.gossipsubIWantFollowupMs); + if (this.metrics != null) { + this.metrics.iwantPromiseStarted.inc(1); + if (!this.requestMsByMsg.has(msgIdStr)) { + this.requestMsByMsg.set(msgIdStr, now); + } + } + } + } + /** + * Returns the number of broken promises for each peer who didn't follow up on an IWANT request. + * + * This should be called not too often relative to the expire times, since it iterates over the whole data. + */ + getBrokenPromises() { + const now = Date.now(); + const result = new Map(); + let brokenPromises = 0; + this.promises.forEach((expireByPeer, msgId) => { + expireByPeer.forEach((expire, p) => { + // the promise has been broken + if (expire < now) { + // add 1 to result + result.set(p, (result.get(p) ?? 0) + 1); + // delete from tracked promises + expireByPeer.delete(p); + // for metrics + brokenPromises++; + } + }); + // clean up empty promises for a msgId + if (expireByPeer.size === 0) { + this.promises.delete(msgId); + } + }); + this.metrics?.iwantPromiseBroken.inc(brokenPromises); + return result; + } + /** + * Someone delivered a message, stop tracking promises for it + */ + deliverMessage(msgIdStr, isDuplicate = false) { + this.trackMessage(msgIdStr); + const expireByPeer = this.promises.get(msgIdStr); + // Expired promise, check requestMsByMsg + if (expireByPeer != null) { + this.promises.delete(msgIdStr); + if (this.metrics != null) { + this.metrics.iwantPromiseResolved.inc(1); + if (isDuplicate) + this.metrics.iwantPromiseResolvedFromDuplicate.inc(1); + this.metrics.iwantPromiseResolvedPeers.inc(expireByPeer.size); + } + } + } + /** + * A message got rejected, so we can stop tracking promises and let the score penalty apply from invalid message delivery, + * unless its an obviously invalid message. + */ + rejectMessage(msgIdStr, reason) { + this.trackMessage(msgIdStr); + // A message got rejected, so we can stop tracking promises and let the score penalty apply. + // With the expection of obvious invalid messages + switch (reason) { + case RejectReason.Error: + return; + } + this.promises.delete(msgIdStr); + } + clear() { + this.promises.clear(); + } + prune() { + const maxMs = Date.now() - this.requestMsByMsgExpire; + let count = 0; + for (const [k, v] of this.requestMsByMsg.entries()) { + if (v < maxMs) { + // messages that stay too long in the requestMsByMsg map, delete + this.requestMsByMsg.delete(k); + count++; + } + else { + // recent messages, keep them + // sort by insertion order + break; + } + } + this.metrics?.iwantMessagePruned.inc(count); + } + trackMessage(msgIdStr) { + if (this.metrics != null) { + const requestMs = this.requestMsByMsg.get(msgIdStr); + if (requestMs !== undefined) { + this.metrics.iwantPromiseDeliveryTime.observe((Date.now() - requestMs) / 1000); + this.requestMsByMsg.delete(msgIdStr); + } + } + } + } + + const SignPrefix = fromString('libp2p-pubsub:'); + async function buildRawMessage(publishConfig, topic, originalData, transformedData) { + switch (publishConfig.type) { + case PublishConfigType.Signing: { + const rpcMsg = { + from: publishConfig.author.toBytes(), + data: transformedData, + seqno: randomBytes(8), + topic, + signature: undefined, // Exclude signature field for signing + key: undefined // Exclude key field for signing + }; + // Get the message in bytes, and prepend with the pubsub prefix + // the signature is over the bytes "libp2p-pubsub:" + const bytes = concat$1([SignPrefix, RPC$1.Message.encode(rpcMsg)]); + rpcMsg.signature = await publishConfig.privateKey.sign(bytes); + rpcMsg.key = publishConfig.key; + const msg = { + type: 'signed', + from: publishConfig.author, + data: originalData, + sequenceNumber: BigInt(`0x${toString$1(rpcMsg.seqno, 'base16')}`), + topic, + signature: rpcMsg.signature, + key: rpcMsg.key + }; + return { + raw: rpcMsg, + msg + }; + } + case PublishConfigType.Anonymous: { + return { + raw: { + from: undefined, + data: transformedData, + seqno: undefined, + topic, + signature: undefined, + key: undefined + }, + msg: { + type: 'unsigned', + data: originalData, + topic + } + }; + } + default: + throw new Error('Unreachable'); + } + } + async function validateToRawMessage(signaturePolicy, msg) { + // If strict-sign, verify all + // If anonymous (no-sign), ensure no preven + switch (signaturePolicy) { + case StrictNoSign: + if (msg.signature != null) + return { valid: false, error: ValidateError.SignaturePresent }; + if (msg.seqno != null) + return { valid: false, error: ValidateError.SeqnoPresent }; + if (msg.key != null) + return { valid: false, error: ValidateError.FromPresent }; + return { valid: true, message: { type: 'unsigned', topic: msg.topic, data: msg.data ?? new Uint8Array(0) } }; + case StrictSign: { + // Verify seqno + if (msg.seqno == null) + return { valid: false, error: ValidateError.InvalidSeqno }; + if (msg.seqno.length !== 8) { + return { valid: false, error: ValidateError.InvalidSeqno }; + } + if (msg.signature == null) + return { valid: false, error: ValidateError.InvalidSignature }; + if (msg.from == null) + return { valid: false, error: ValidateError.InvalidPeerId }; + let fromPeerId; + try { + // TODO: Fix PeerId types + fromPeerId = peerIdFromBytes(msg.from); + } + catch (e) { + return { valid: false, error: ValidateError.InvalidPeerId }; + } + // - check from defined + // - transform source to PeerId + // - parse signature + // - get .key, else from source + // - check key == source if present + // - verify sig + let publicKey; + if (msg.key != null) { + publicKey = unmarshalPublicKey(msg.key); + // TODO: Should `fromPeerId.pubKey` be optional? + if (fromPeerId.publicKey !== undefined && !equals(publicKey.bytes, fromPeerId.publicKey)) { + return { valid: false, error: ValidateError.InvalidPeerId }; + } + } + else { + if (fromPeerId.publicKey == null) { + return { valid: false, error: ValidateError.InvalidPeerId }; + } + publicKey = unmarshalPublicKey(fromPeerId.publicKey); + } + const rpcMsgPreSign = { + from: msg.from, + data: msg.data, + seqno: msg.seqno, + topic: msg.topic, + signature: undefined, // Exclude signature field for signing + key: undefined // Exclude key field for signing + }; + // Get the message in bytes, and prepend with the pubsub prefix + // the signature is over the bytes "libp2p-pubsub:" + const bytes = concat$1([SignPrefix, RPC$1.Message.encode(rpcMsgPreSign)]); + if (!(await publicKey.verify(bytes, msg.signature))) { + return { valid: false, error: ValidateError.InvalidSignature }; + } + return { + valid: true, + message: { + type: 'signed', + from: fromPeerId, + data: msg.data ?? new Uint8Array(0), + sequenceNumber: BigInt(`0x${toString$1(msg.seqno, 'base16')}`), + topic: msg.topic, + signature: msg.signature, + key: msg.key ?? marshalPublicKey(publicKey) + } + }; + } + default: + throw new Error('Unreachable'); + } + } + + /** + * Create a gossipsub RPC object + */ + function createGossipRpc(messages = [], control) { + return { + subscriptions: [], + messages, + control: control !== undefined + ? { + graft: control.graft ?? [], + prune: control.prune ?? [], + ihave: control.ihave ?? [], + iwant: control.iwant ?? [] + } + : undefined + }; + } + function ensureControl(rpc) { + if (rpc.control === undefined) { + rpc.control = { + graft: [], + prune: [], + ihave: [], + iwant: [] + }; + } + return rpc; + } + + /** + * Pseudo-randomly shuffles an array + * + * Mutates the input array + */ + function shuffle(arr) { + if (arr.length <= 1) { + return arr; + } + const randInt = () => { + return Math.floor(Math.random() * Math.floor(arr.length)); + }; + for (let i = 0; i < arr.length; i++) { + const j = randInt(); + const tmp = arr[i]; + arr[i] = arr[j]; + arr[j] = tmp; + } + return arr; + } + + /** + * Browser friendly function to convert Uint8Array message id to base64 string. + */ + function messageIdToString(msgId) { + return toString$1(msgId, 'base64'); + } + + /** + * Prepare a PublishConfig object from a PeerId. + */ + async function getPublishConfigFromPeerId(signaturePolicy, peerId) { + switch (signaturePolicy) { + case StrictSign: { + if (peerId == null) { + throw Error('Must provide PeerId'); + } + if (peerId.privateKey == null) { + throw Error('Cannot sign message, no private key present'); + } + if (peerId.publicKey == null) { + throw Error('Cannot sign message, no public key present'); + } + // Transform privateKey once at initialization time instead of once per message + const privateKey = await unmarshalPrivateKey(peerId.privateKey); + return { + type: PublishConfigType.Signing, + author: peerId, + key: peerId.publicKey, + privateKey + }; + } + case StrictNoSign: + return { + type: PublishConfigType.Anonymous + }; + default: + throw new Error(`Unknown signature policy "${signaturePolicy}"`); + } + } + + /** + * Generate a message id, based on the `key` and `seqno` + */ + const msgId = (key, seqno) => { + const seqnoBytes = fromString(seqno.toString(16).padStart(16, '0'), 'base16'); + const msgId = new Uint8Array(key.length + seqnoBytes.length); + msgId.set(key, 0); + msgId.set(seqnoBytes, key.length); + return msgId; + }; + + /** + * Generate a message id, based on the `key` and `seqno` + */ + function msgIdFnStrictSign(msg) { + if (msg.type !== 'signed') { + throw new Error('expected signed message type'); + } + // Should never happen + if (msg.sequenceNumber == null) + throw Error('missing seqno field'); + // TODO: Should use .from here or key? + return msgId(msg.from.toBytes(), msg.sequenceNumber); + } + /** + * Generate a message id, based on message `data` + */ + async function msgIdFnStrictNoSign(msg) { + return sha256$1.encode(msg.data); + } + + // Protocols https://github.com/multiformats/multiaddr/blob/master/protocols.csv + // code size name + // 4 32 ip4 + // 41 128 ip6 + var Protocol; + (function (Protocol) { + Protocol[Protocol["ip4"] = 4] = "ip4"; + Protocol[Protocol["ip6"] = 41] = "ip6"; + })(Protocol || (Protocol = {})); + function multiaddrToIPStr(multiaddr) { + for (const tuple of multiaddr.tuples()) { + switch (tuple[0]) { + case Protocol.ip4: + case Protocol.ip6: + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + return convertToString(tuple[0], tuple[1]); + } + } + return null; + } + + /** + * This is similar to https://github.com/daviddias/time-cache/blob/master/src/index.js + * for our own need, we don't use lodash throttle to improve performance. + * This gives 4x - 5x performance gain compared to npm TimeCache + */ + class SimpleTimeCache { + entries = new Map(); + validityMs; + constructor(opts) { + this.validityMs = opts.validityMs; + // allow negative validityMs so that this does not cache anything, spec test compliance.spec.js + // sends duplicate messages and expect peer to receive all. Application likely uses positive validityMs + } + get size() { + return this.entries.size; + } + /** Returns true if there was a key collision and the entry is dropped */ + put(key, value) { + if (this.entries.has(key)) { + // Key collisions break insertion order in the entries cache, which break prune logic. + // prune relies on each iterated entry to have strictly ascending validUntilMs, else it + // won't prune expired entries and SimpleTimeCache will grow unexpectedly. + // As of Oct 2022 NodeJS v16, inserting the same key twice with different value does not + // change the key position in the iterator stream. A unit test asserts this behaviour. + return true; + } + this.entries.set(key, { value, validUntilMs: Date.now() + this.validityMs }); + return false; + } + prune() { + const now = Date.now(); + for (const [k, v] of this.entries.entries()) { + if (v.validUntilMs < now) { + this.entries.delete(k); + } + else { + // Entries are inserted with strictly ascending validUntilMs. + // Stop early to save iterations + break; + } + } + } + has(key) { + return this.entries.has(key); + } + get(key) { + const value = this.entries.get(key); + return (value != null) && value.validUntilMs >= Date.now() ? value.value : undefined; + } + clear() { + this.entries.clear(); + } + } + + var GossipStatusCode; + (function (GossipStatusCode) { + GossipStatusCode[GossipStatusCode["started"] = 0] = "started"; + GossipStatusCode[GossipStatusCode["stopped"] = 1] = "stopped"; + })(GossipStatusCode || (GossipStatusCode = {})); + class GossipSub extends TypedEventEmitter { + /** + * The signature policy to follow by default + */ + globalSignaturePolicy; + multicodecs = [GossipsubIDv11, GossipsubIDv10]; + publishConfig; + dataTransform; + // State + peers = new Set(); + streamsInbound = new Map(); + streamsOutbound = new Map(); + /** Ensures outbound streams are created sequentially */ + outboundInflightQueue = pushable$1({ objectMode: true }); + /** Direct peers */ + direct = new Set(); + /** Floodsub peers */ + floodsubPeers = new Set(); + /** Cache of seen messages */ + seenCache; + /** + * Map of peer id and AcceptRequestWhileListEntry + */ + acceptFromWhitelist = new Map(); + /** + * Map of topics to which peers are subscribed to + */ + topics = new Map(); + /** + * List of our subscriptions + */ + subscriptions = new Set(); + /** + * Map of topic meshes + * topic => peer id set + */ + mesh = new Map(); + /** + * Map of topics to set of peers. These mesh peers are the ones to which we are publishing without a topic membership + * topic => peer id set + */ + fanout = new Map(); + /** + * Map of last publish time for fanout topics + * topic => last publish time + */ + fanoutLastpub = new Map(); + /** + * Map of pending messages to gossip + * peer id => control messages + */ + gossip = new Map(); + /** + * Map of control messages + * peer id => control message + */ + control = new Map(); + /** + * Number of IHAVEs received from peer in the last heartbeat + */ + peerhave = new Map(); + /** Number of messages we have asked from peer in the last heartbeat */ + iasked = new Map(); + /** Prune backoff map */ + backoff = new Map(); + /** + * Connection direction cache, marks peers with outbound connections + * peer id => direction + */ + outbound = new Map(); + msgIdFn; + /** + * A fast message id function used for internal message de-duplication + */ + fastMsgIdFn; + msgIdToStrFn; + /** Maps fast message-id to canonical message-id */ + fastMsgIdCache; + /** + * Short term cache for published message ids. This is used for penalizing peers sending + * our own messages back if the messages are anonymous or use a random author. + */ + publishedMessageIds; + /** + * A message cache that contains the messages for last few heartbeat ticks + */ + mcache; + /** Peer score tracking */ + score; + /** + * Custom validator function per topic. + * Must return or resolve quickly (< 100ms) to prevent causing penalties for late messages. + * If you need to apply validation that may require longer times use `asyncValidation` option and callback the + * validation result through `Gossipsub.reportValidationResult` + */ + topicValidators = new Map(); + /** + * Make this protected so child class may want to redirect to its own log. + */ + log; + /** + * Number of heartbeats since the beginning of time + * This allows us to amortize some resource cleanup -- eg: backoff cleanup + */ + heartbeatTicks = 0; + /** + * Tracks IHAVE/IWANT promises broken by peers + */ + gossipTracer; + components; + directPeerInitial = null; + static multicodec = GossipsubIDv11; + // Options + opts; + decodeRpcLimits; + metrics; + status = { code: GossipStatusCode.stopped }; + maxInboundStreams; + maxOutboundStreams; + runOnTransientConnection; + allowedTopics; + heartbeatTimer = null; + constructor(components, options = {}) { + super(); + const opts = { + fallbackToFloodsub: true, + floodPublish: true, + batchPublish: false, + tagMeshPeers: true, + doPX: false, + directPeers: [], + D: GossipsubD, + Dlo: GossipsubDlo, + Dhi: GossipsubDhi, + Dscore: GossipsubDscore, + Dout: GossipsubDout, + Dlazy: GossipsubDlazy, + heartbeatInterval: GossipsubHeartbeatInterval, + fanoutTTL: GossipsubFanoutTTL, + mcacheLength: GossipsubHistoryLength, + mcacheGossip: GossipsubHistoryGossip, + seenTTL: GossipsubSeenTTL, + gossipsubIWantFollowupMs: GossipsubIWantFollowupTime, + prunePeers: GossipsubPrunePeers, + pruneBackoff: GossipsubPruneBackoff, + unsubcribeBackoff: GossipsubUnsubscribeBackoff, + graftFloodThreshold: GossipsubGraftFloodThreshold, + opportunisticGraftPeers: GossipsubOpportunisticGraftPeers, + opportunisticGraftTicks: GossipsubOpportunisticGraftTicks, + directConnectTicks: GossipsubDirectConnectTicks, + ...options, + scoreParams: createPeerScoreParams(options.scoreParams), + scoreThresholds: createPeerScoreThresholds(options.scoreThresholds) + }; + this.components = components; + this.decodeRpcLimits = opts.decodeRpcLimits ?? defaultDecodeRpcLimits; + this.globalSignaturePolicy = opts.globalSignaturePolicy ?? StrictSign; + // Also wants to get notified of peers connected using floodsub + if (opts.fallbackToFloodsub) { + this.multicodecs.push(FloodsubID); + } + // From pubsub + this.log = components.logger.forComponent(opts.debugName ?? 'libp2p:gossipsub'); + // Gossipsub + this.opts = opts; + this.direct = new Set(opts.directPeers.map((p) => p.id.toString())); + this.seenCache = new SimpleTimeCache({ validityMs: opts.seenTTL }); + this.publishedMessageIds = new SimpleTimeCache({ validityMs: opts.seenTTL }); + if (options.msgIdFn != null) { + // Use custom function + this.msgIdFn = options.msgIdFn; + } + else { + switch (this.globalSignaturePolicy) { + case StrictSign: + this.msgIdFn = msgIdFnStrictSign; + break; + case StrictNoSign: + this.msgIdFn = msgIdFnStrictNoSign; + break; + default: + throw new Error(`Invalid globalSignaturePolicy: ${this.globalSignaturePolicy}`); + } + } + if (options.fastMsgIdFn != null) { + this.fastMsgIdFn = options.fastMsgIdFn; + this.fastMsgIdCache = new SimpleTimeCache({ validityMs: opts.seenTTL }); + } + // By default, gossipsub only provide a browser friendly function to convert Uint8Array message id to string. + this.msgIdToStrFn = options.msgIdToStrFn ?? messageIdToString; + this.mcache = options.messageCache ?? new MessageCache(opts.mcacheGossip, opts.mcacheLength, this.msgIdToStrFn); + if (options.dataTransform != null) { + this.dataTransform = options.dataTransform; + } + if (options.metricsRegister != null) { + if (options.metricsTopicStrToLabel == null) { + throw Error('Must set metricsTopicStrToLabel with metrics'); + } + // in theory, each topic has its own meshMessageDeliveriesWindow param + // however in lodestar, we configure it mostly the same so just pick the max of positive ones + // (some topics have meshMessageDeliveriesWindow as 0) + const maxMeshMessageDeliveriesWindowMs = Math.max(...Object.values(opts.scoreParams.topics).map((topicParam) => topicParam.meshMessageDeliveriesWindow), DEFAULT_METRIC_MESH_MESSAGE_DELIVERIES_WINDOWS); + const metrics = getMetrics(options.metricsRegister, options.metricsTopicStrToLabel, { + gossipPromiseExpireSec: this.opts.gossipsubIWantFollowupMs / 1000, + behaviourPenaltyThreshold: opts.scoreParams.behaviourPenaltyThreshold, + maxMeshMessageDeliveriesWindowSec: maxMeshMessageDeliveriesWindowMs / 1000 + }); + metrics.mcacheSize.addCollect(() => { this.onScrapeMetrics(metrics); }); + for (const protocol of this.multicodecs) { + metrics.protocolsEnabled.set({ protocol }, 1); + } + this.metrics = metrics; + } + else { + this.metrics = null; + } + this.gossipTracer = new IWantTracer(this.opts.gossipsubIWantFollowupMs, this.msgIdToStrFn, this.metrics); + /** + * libp2p + */ + this.score = new PeerScore(this.opts.scoreParams, this.metrics, this.components.logger, { + scoreCacheValidityMs: opts.heartbeatInterval + }); + this.maxInboundStreams = options.maxInboundStreams; + this.maxOutboundStreams = options.maxOutboundStreams; + this.runOnTransientConnection = options.runOnTransientConnection; + this.allowedTopics = (opts.allowedTopics != null) ? new Set(opts.allowedTopics) : null; + } + getPeers() { + return [...this.peers.keys()].map((str) => peerIdFromString(str)); + } + isStarted() { + return this.status.code === GossipStatusCode.started; + } + // LIFECYCLE METHODS + /** + * Mounts the gossipsub protocol onto the libp2p node and sends our + * our subscriptions to every peer connected + */ + async start() { + // From pubsub + if (this.isStarted()) { + return; + } + this.log('starting'); + this.publishConfig = await getPublishConfigFromPeerId(this.globalSignaturePolicy, this.components.peerId); + // Create the outbound inflight queue + // This ensures that outbound stream creation happens sequentially + this.outboundInflightQueue = pushable$1({ objectMode: true }); + pipe(this.outboundInflightQueue, async (source) => { + for await (const { peerId, connection } of source) { + await this.createOutboundStream(peerId, connection); + } + }).catch((e) => { this.log.error('outbound inflight queue error', e); }); + // set direct peer addresses in the address book + await Promise.all(this.opts.directPeers.map(async (p) => { + await this.components.peerStore.merge(p.id, { + multiaddrs: p.addrs + }); + })); + const registrar = this.components.registrar; + // Incoming streams + // Called after a peer dials us + await Promise.all(this.multicodecs.map(async (multicodec) => registrar.handle(multicodec, this.onIncomingStream.bind(this), { + maxInboundStreams: this.maxInboundStreams, + maxOutboundStreams: this.maxOutboundStreams, + runOnTransientConnection: this.runOnTransientConnection + }))); + // # How does Gossipsub interact with libp2p? Rough guide from Mar 2022 + // + // ## Setup: + // Gossipsub requests libp2p to callback, TBD + // + // `this.libp2p.handle()` registers a handler for `/meshsub/1.1.0` and other Gossipsub protocols + // The handler callback is registered in libp2p Upgrader.protocols map. + // + // Upgrader receives an inbound connection from some transport and (`Upgrader.upgradeInbound`): + // - Adds encryption (NOISE in our case) + // - Multiplex stream + // - Create a muxer and register that for each new stream call Upgrader.protocols handler + // + // ## Topology + // - new instance of Topology (unlinked to libp2p) with handlers + // - registar.register(topology) + // register protocol with topology + // Topology callbacks called on connection manager changes + const topology = { + onConnect: this.onPeerConnected.bind(this), + onDisconnect: this.onPeerDisconnected.bind(this), + notifyOnTransient: this.runOnTransientConnection + }; + const registrarTopologyIds = await Promise.all(this.multicodecs.map(async (multicodec) => registrar.register(multicodec, topology))); + // Schedule to start heartbeat after `GossipsubHeartbeatInitialDelay` + const heartbeatTimeout = setTimeout(this.runHeartbeat, GossipsubHeartbeatInitialDelay); + // Then, run heartbeat every `heartbeatInterval` offset by `GossipsubHeartbeatInitialDelay` + this.status = { + code: GossipStatusCode.started, + registrarTopologyIds, + heartbeatTimeout, + hearbeatStartMs: Date.now() + GossipsubHeartbeatInitialDelay + }; + this.score.start(); + // connect to direct peers + this.directPeerInitial = setTimeout(() => { + Promise.resolve() + .then(async () => { + await Promise.all(Array.from(this.direct).map(async (id) => this.connect(id))); + }) + .catch((err) => { + this.log(err); + }); + }, GossipsubDirectConnectInitialDelay); + if (this.opts.tagMeshPeers) { + this.addEventListener('gossipsub:graft', this.tagMeshPeer); + this.addEventListener('gossipsub:prune', this.untagMeshPeer); + } + this.log('started'); + } + /** + * Unmounts the gossipsub protocol and shuts down every connection + */ + async stop() { + this.log('stopping'); + // From pubsub + if (this.status.code !== GossipStatusCode.started) { + return; + } + const { registrarTopologyIds } = this.status; + this.status = { code: GossipStatusCode.stopped }; + if (this.opts.tagMeshPeers) { + this.removeEventListener('gossipsub:graft', this.tagMeshPeer); + this.removeEventListener('gossipsub:prune', this.untagMeshPeer); + } + // unregister protocol and handlers + const registrar = this.components.registrar; + await Promise.all(this.multicodecs.map(async (multicodec) => registrar.unhandle(multicodec))); + registrarTopologyIds.forEach((id) => { registrar.unregister(id); }); + this.outboundInflightQueue.end(); + const closePromises = []; + for (const outboundStream of this.streamsOutbound.values()) { + closePromises.push(outboundStream.close()); + } + this.streamsOutbound.clear(); + for (const inboundStream of this.streamsInbound.values()) { + closePromises.push(inboundStream.close()); + } + this.streamsInbound.clear(); + await Promise.all(closePromises); + this.peers.clear(); + this.subscriptions.clear(); + // Gossipsub + if (this.heartbeatTimer != null) { + this.heartbeatTimer.cancel(); + this.heartbeatTimer = null; + } + this.score.stop(); + this.mesh.clear(); + this.fanout.clear(); + this.fanoutLastpub.clear(); + this.gossip.clear(); + this.control.clear(); + this.peerhave.clear(); + this.iasked.clear(); + this.backoff.clear(); + this.outbound.clear(); + this.gossipTracer.clear(); + this.seenCache.clear(); + if (this.fastMsgIdCache != null) + this.fastMsgIdCache.clear(); + if (this.directPeerInitial != null) + clearTimeout(this.directPeerInitial); + this.log('stopped'); + } + /** FOR DEBUG ONLY - Dump peer stats for all peers. Data is cloned, safe to mutate */ + dumpPeerScoreStats() { + return this.score.dumpPeerScoreStats(); + } + /** + * On an inbound stream opened + */ + onIncomingStream({ stream, connection }) { + if (!this.isStarted()) { + return; + } + const peerId = connection.remotePeer; + // add peer to router + this.addPeer(peerId, connection.direction, connection.remoteAddr); + // create inbound stream + this.createInboundStream(peerId, stream); + // attempt to create outbound stream + this.outboundInflightQueue.push({ peerId, connection }); + } + /** + * Registrar notifies an established connection with pubsub protocol + */ + onPeerConnected(peerId, connection) { + this.metrics?.newConnectionCount.inc({ status: connection.status }); + // libp2p may emit a closed connection and never issue peer:disconnect event + // see https://github.com/ChainSafe/js-libp2p-gossipsub/issues/398 + if (!this.isStarted() || connection.status !== 'open') { + return; + } + this.addPeer(peerId, connection.direction, connection.remoteAddr); + this.outboundInflightQueue.push({ peerId, connection }); + } + /** + * Registrar notifies a closing connection with pubsub protocol + */ + onPeerDisconnected(peerId) { + this.log('connection ended %p', peerId); + this.removePeer(peerId); + } + async createOutboundStream(peerId, connection) { + if (!this.isStarted()) { + return; + } + const id = peerId.toString(); + if (!this.peers.has(id)) { + return; + } + // TODO make this behavior more robust + // This behavior is different than for inbound streams + // If an outbound stream already exists, don't create a new stream + if (this.streamsOutbound.has(id)) { + return; + } + try { + const stream = new OutboundStream(await connection.newStream(this.multicodecs, { + runOnTransientConnection: this.runOnTransientConnection + }), (e) => { this.log.error('outbound pipe error', e); }, { maxBufferSize: this.opts.maxOutboundBufferSize }); + this.log('create outbound stream %p', peerId); + this.streamsOutbound.set(id, stream); + const protocol = stream.protocol; + if (protocol === FloodsubID) { + this.floodsubPeers.add(id); + } + this.metrics?.peersPerProtocol.inc({ protocol }, 1); + // Immediately send own subscriptions via the newly attached stream + if (this.subscriptions.size > 0) { + this.log('send subscriptions to', id); + this.sendSubscriptions(id, Array.from(this.subscriptions), true); + } + } + catch (e) { + this.log.error('createOutboundStream error', e); + } + } + createInboundStream(peerId, stream) { + if (!this.isStarted()) { + return; + } + const id = peerId.toString(); + if (!this.peers.has(id)) { + return; + } + // TODO make this behavior more robust + // This behavior is different than for outbound streams + // If a peer initiates a new inbound connection + // we assume that one is the new canonical inbound stream + const priorInboundStream = this.streamsInbound.get(id); + if (priorInboundStream !== undefined) { + this.log('replacing existing inbound steam %s', id); + priorInboundStream.close().catch((err) => { this.log.error(err); }); + } + this.log('create inbound stream %s', id); + const inboundStream = new InboundStream(stream, { maxDataLength: this.opts.maxInboundDataLength }); + this.streamsInbound.set(id, inboundStream); + this.pipePeerReadStream(peerId, inboundStream.source).catch((err) => { this.log(err); }); + } + /** + * Add a peer to the router + */ + addPeer(peerId, direction, addr) { + const id = peerId.toString(); + if (!this.peers.has(id)) { + this.log('new peer %p', peerId); + this.peers.add(id); + // Add to peer scoring + this.score.addPeer(id); + const currentIP = multiaddrToIPStr(addr); + if (currentIP !== null) { + this.score.addIP(id, currentIP); + } + else { + this.log('Added peer has no IP in current address %s %s', id, addr.toString()); + } + // track the connection direction. Don't allow to unset outbound + if (!this.outbound.has(id)) { + this.outbound.set(id, direction === 'outbound'); + } + } + } + /** + * Removes a peer from the router + */ + removePeer(peerId) { + const id = peerId.toString(); + if (!this.peers.has(id)) { + return; + } + // delete peer + this.log('delete peer %p', peerId); + this.peers.delete(id); + const outboundStream = this.streamsOutbound.get(id); + const inboundStream = this.streamsInbound.get(id); + if (outboundStream != null) { + this.metrics?.peersPerProtocol.inc({ protocol: outboundStream.protocol }, -1); + } + // close streams + outboundStream?.close().catch((err) => { this.log.error(err); }); + inboundStream?.close().catch((err) => { this.log.error(err); }); + // remove streams + this.streamsOutbound.delete(id); + this.streamsInbound.delete(id); + // remove peer from topics map + for (const peers of this.topics.values()) { + peers.delete(id); + } + // Remove this peer from the mesh + for (const [topicStr, peers] of this.mesh) { + if (peers.delete(id)) { + this.metrics?.onRemoveFromMesh(topicStr, ChurnReason.Dc, 1); + } + } + // Remove this peer from the fanout + for (const peers of this.fanout.values()) { + peers.delete(id); + } + // Remove from floodsubPeers + this.floodsubPeers.delete(id); + // Remove from gossip mapping + this.gossip.delete(id); + // Remove from control mapping + this.control.delete(id); + // Remove from backoff mapping + this.outbound.delete(id); + // Remove from peer scoring + this.score.removePeer(id); + this.acceptFromWhitelist.delete(id); + } + // API METHODS + get started() { + return this.status.code === GossipStatusCode.started; + } + /** + * Get a the peer-ids in a topic mesh + */ + getMeshPeers(topic) { + const peersInTopic = this.mesh.get(topic); + return (peersInTopic != null) ? Array.from(peersInTopic) : []; + } + /** + * Get a list of the peer-ids that are subscribed to one topic. + */ + getSubscribers(topic) { + const peersInTopic = this.topics.get(topic); + return ((peersInTopic != null) ? Array.from(peersInTopic) : []).map((str) => peerIdFromString(str)); + } + /** + * Get the list of topics which the peer is subscribed to. + */ + getTopics() { + return Array.from(this.subscriptions); + } + // TODO: Reviewing Pubsub API + // MESSAGE METHODS + /** + * Responsible for processing each RPC message received by other peers. + */ + async pipePeerReadStream(peerId, stream) { + try { + await pipe(stream, async (source) => { + for await (const data of source) { + try { + // TODO: Check max gossip message size, before decodeRpc() + const rpcBytes = data.subarray(); + // Note: This function may throw, it must be wrapped in a try {} catch {} to prevent closing the stream. + // TODO: What should we do if the entire RPC is invalid? + const rpc = RPC$1.decode(rpcBytes, { + limits: { + subscriptions: this.decodeRpcLimits.maxSubscriptions, + messages: this.decodeRpcLimits.maxMessages, + control$: { + ihave: this.decodeRpcLimits.maxIhaveMessageIDs, + iwant: this.decodeRpcLimits.maxIwantMessageIDs, + graft: this.decodeRpcLimits.maxControlMessages, + prune: this.decodeRpcLimits.maxControlMessages, + prune$: { + peers: this.decodeRpcLimits.maxPeerInfos + } + } + } + }); + this.metrics?.onRpcRecv(rpc, rpcBytes.length); + // Since processRpc may be overridden entirely in unsafe ways, + // the simplest/safest option here is to wrap in a function and capture all errors + // to prevent a top-level unhandled exception + // This processing of rpc messages should happen without awaiting full validation/execution of prior messages + if (this.opts.awaitRpcHandler) { + try { + await this.handleReceivedRpc(peerId, rpc); + } + catch (err) { + this.metrics?.onRpcRecvError(); + this.log(err); + } + } + else { + this.handleReceivedRpc(peerId, rpc).catch((err) => { + this.metrics?.onRpcRecvError(); + this.log(err); + }); + } + } + catch (e) { + this.metrics?.onRpcDataError(); + this.log(e); + } + } + }); + } + catch (err) { + this.metrics?.onPeerReadStreamError(); + this.handlePeerReadStreamError(err, peerId); + } + } + /** + * Handle error when read stream pipe throws, less of the functional use but more + * to for testing purposes to spy on the error handling + * */ + handlePeerReadStreamError(err, peerId) { + this.log.error(err); + this.onPeerDisconnected(peerId); + } + /** + * Handles an rpc request from a peer + */ + async handleReceivedRpc(from, rpc) { + // Check if peer is graylisted in which case we ignore the event + if (!this.acceptFrom(from.toString())) { + this.log('received message from unacceptable peer %p', from); + this.metrics?.rpcRecvNotAccepted.inc(); + return; + } + const subscriptions = (rpc.subscriptions != null) ? rpc.subscriptions.length : 0; + const messages = (rpc.messages != null) ? rpc.messages.length : 0; + let ihave = 0; + let iwant = 0; + let graft = 0; + let prune = 0; + if (rpc.control != null) { + if (rpc.control.ihave != null) + ihave = rpc.control.ihave.length; + if (rpc.control.iwant != null) + iwant = rpc.control.iwant.length; + if (rpc.control.graft != null) + graft = rpc.control.graft.length; + if (rpc.control.prune != null) + prune = rpc.control.prune.length; + } + this.log(`rpc.from ${from.toString()} subscriptions ${subscriptions} messages ${messages} ihave ${ihave} iwant ${iwant} graft ${graft} prune ${prune}`); + // Handle received subscriptions + if ((rpc.subscriptions != null) && rpc.subscriptions.length > 0) { + // update peer subscriptions + const subscriptions = []; + rpc.subscriptions.forEach((subOpt) => { + const topic = subOpt.topic; + const subscribe = subOpt.subscribe === true; + if (topic != null) { + if ((this.allowedTopics != null) && !this.allowedTopics.has(topic)) { + // Not allowed: subscription data-structures are not bounded by topic count + // TODO: Should apply behaviour penalties? + return; + } + this.handleReceivedSubscription(from, topic, subscribe); + subscriptions.push({ topic, subscribe }); + } + }); + this.safeDispatchEvent('subscription-change', { + detail: { peerId: from, subscriptions } + }); + } + // Handle messages + // TODO: (up to limit) + for (const message of rpc.messages) { + if ((this.allowedTopics != null) && !this.allowedTopics.has(message.topic)) { + // Not allowed: message cache data-structures are not bounded by topic count + // TODO: Should apply behaviour penalties? + continue; + } + const handleReceivedMessagePromise = this.handleReceivedMessage(from, message) + // Should never throw, but handle just in case + .catch((err) => { + this.metrics?.onMsgRecvError(message.topic); + this.log(err); + }); + if (this.opts.awaitRpcMessageHandler) { + await handleReceivedMessagePromise; + } + } + // Handle control messages + if (rpc.control != null) { + await this.handleControlMessage(from.toString(), rpc.control); + } + } + /** + * Handles a subscription change from a peer + */ + handleReceivedSubscription(from, topic, subscribe) { + this.log('subscription update from %p topic %s', from, topic); + let topicSet = this.topics.get(topic); + if (topicSet == null) { + topicSet = new Set(); + this.topics.set(topic, topicSet); + } + if (subscribe) { + // subscribe peer to new topic + topicSet.add(from.toString()); + } + else { + // unsubscribe from existing topic + topicSet.delete(from.toString()); + } + // TODO: rust-libp2p has A LOT more logic here + } + /** + * Handles a newly received message from an RPC. + * May forward to all peers in the mesh. + */ + async handleReceivedMessage(from, rpcMsg) { + this.metrics?.onMsgRecvPreValidation(rpcMsg.topic); + const validationResult = await this.validateReceivedMessage(from, rpcMsg); + this.metrics?.onPrevalidationResult(rpcMsg.topic, validationResult.code); + const validationCode = validationResult.code; + switch (validationCode) { + case MessageStatus.duplicate: + // Report the duplicate + this.score.duplicateMessage(from.toString(), validationResult.msgIdStr, rpcMsg.topic); + // due to the collision of fastMsgIdFn, 2 different messages may end up the same fastMsgId + // so we need to also mark the duplicate message as delivered or the promise is not resolved + // and peer gets penalized. See https://github.com/ChainSafe/js-libp2p-gossipsub/pull/385 + this.gossipTracer.deliverMessage(validationResult.msgIdStr, true); + this.mcache.observeDuplicate(validationResult.msgIdStr, from.toString()); + return; + case MessageStatus.invalid: + // invalid messages received + // metrics.register_invalid_message(&raw_message.topic) + // Tell peer_score about reject + // Reject the original source, and any duplicates we've seen from other peers. + if (validationResult.msgIdStr != null) { + const msgIdStr = validationResult.msgIdStr; + this.score.rejectMessage(from.toString(), msgIdStr, rpcMsg.topic, validationResult.reason); + this.gossipTracer.rejectMessage(msgIdStr, validationResult.reason); + } + else { + this.score.rejectInvalidMessage(from.toString(), rpcMsg.topic); + } + this.metrics?.onMsgRecvInvalid(rpcMsg.topic, validationResult); + return; + case MessageStatus.valid: + // Tells score that message arrived (but is maybe not fully validated yet). + // Consider the message as delivered for gossip promises. + this.score.validateMessage(validationResult.messageId.msgIdStr); + this.gossipTracer.deliverMessage(validationResult.messageId.msgIdStr); + // Add the message to our memcache + // if no validation is required, mark the message as validated + this.mcache.put(validationResult.messageId, rpcMsg, !this.opts.asyncValidation); + // Dispatch the message to the user if we are subscribed to the topic + if (this.subscriptions.has(rpcMsg.topic)) { + const isFromSelf = this.components.peerId.equals(from); + if (!isFromSelf || this.opts.emitSelf) { + super.dispatchEvent(new CustomEvent$1('gossipsub:message', { + detail: { + propagationSource: from, + msgId: validationResult.messageId.msgIdStr, + msg: validationResult.msg + } + })); + // TODO: Add option to switch between emit per topic or all messages in one + super.dispatchEvent(new CustomEvent$1('message', { detail: validationResult.msg })); + } + } + // Forward the message to mesh peers, if no validation is required + // If asyncValidation is ON, expect the app layer to call reportMessageValidationResult(), then forward + if (!this.opts.asyncValidation) { + // TODO: in rust-libp2p + // .forward_msg(&msg_id, raw_message, Some(propagation_source)) + this.forwardMessage(validationResult.messageId.msgIdStr, rpcMsg, from.toString()); + } + break; + default: + throw new Error(`Invalid validation result: ${validationCode}`); + } + } + /** + * Handles a newly received message from an RPC. + * May forward to all peers in the mesh. + */ + async validateReceivedMessage(propagationSource, rpcMsg) { + // Fast message ID stuff + const fastMsgIdStr = this.fastMsgIdFn?.(rpcMsg); + const msgIdCached = fastMsgIdStr !== undefined ? this.fastMsgIdCache?.get(fastMsgIdStr) : undefined; + if (msgIdCached != null) { + // This message has been seen previously. Ignore it + return { code: MessageStatus.duplicate, msgIdStr: msgIdCached }; + } + // Perform basic validation on message and convert to RawGossipsubMessage for fastMsgIdFn() + const validationResult = await validateToRawMessage(this.globalSignaturePolicy, rpcMsg); + if (!validationResult.valid) { + return { code: MessageStatus.invalid, reason: RejectReason.Error, error: validationResult.error }; + } + const msg = validationResult.message; + // Try and perform the data transform to the message. If it fails, consider it invalid. + try { + if (this.dataTransform != null) { + msg.data = this.dataTransform.inboundTransform(rpcMsg.topic, msg.data); + } + } + catch (e) { + this.log('Invalid message, transform failed', e); + return { code: MessageStatus.invalid, reason: RejectReason.Error, error: ValidateError.TransformFailed }; + } + // TODO: Check if message is from a blacklisted source or propagation origin + // - Reject any message from a blacklisted peer + // - Also reject any message that originated from a blacklisted peer + // - reject messages claiming to be from ourselves but not locally published + // Calculate the message id on the transformed data. + const msgId = await this.msgIdFn(msg); + const msgIdStr = this.msgIdToStrFn(msgId); + const messageId = { msgId, msgIdStr }; + // Add the message to the duplicate caches + if (fastMsgIdStr !== undefined && (this.fastMsgIdCache != null)) { + const collision = this.fastMsgIdCache.put(fastMsgIdStr, msgIdStr); + if (collision) { + this.metrics?.fastMsgIdCacheCollision.inc(); + } + } + if (this.seenCache.has(msgIdStr)) { + return { code: MessageStatus.duplicate, msgIdStr }; + } + else { + this.seenCache.put(msgIdStr); + } + // (Optional) Provide custom validation here with dynamic validators per topic + // NOTE: This custom topicValidator() must resolve fast (< 100ms) to allow scores + // to not penalize peers for long validation times. + const topicValidator = this.topicValidators.get(rpcMsg.topic); + if (topicValidator != null) { + let acceptance; + // Use try {} catch {} in case topicValidator() is synchronous + try { + acceptance = await topicValidator(propagationSource, msg); + } + catch (e) { + const errCode = e.code; + if (errCode === ERR_TOPIC_VALIDATOR_IGNORE) + acceptance = TopicValidatorResult.Ignore; + if (errCode === ERR_TOPIC_VALIDATOR_REJECT) + acceptance = TopicValidatorResult.Reject; + else + acceptance = TopicValidatorResult.Ignore; + } + if (acceptance !== TopicValidatorResult.Accept) { + return { code: MessageStatus.invalid, reason: rejectReasonFromAcceptance(acceptance), msgIdStr }; + } + } + return { code: MessageStatus.valid, messageId, msg }; + } + /** + * Return score of a peer. + */ + getScore(peerId) { + return this.score.score(peerId); + } + /** + * Send an rpc object to a peer with subscriptions + */ + sendSubscriptions(toPeer, topics, subscribe) { + this.sendRpc(toPeer, { + subscriptions: topics.map((topic) => ({ topic, subscribe })), + messages: [] + }); + } + /** + * Handles an rpc control message from a peer + */ + async handleControlMessage(id, controlMsg) { + if (controlMsg === undefined) { + return; + } + const iwant = (controlMsg.ihave != null) ? this.handleIHave(id, controlMsg.ihave) : []; + const ihave = (controlMsg.iwant != null) ? this.handleIWant(id, controlMsg.iwant) : []; + const prune = (controlMsg.graft != null) ? await this.handleGraft(id, controlMsg.graft) : []; + (controlMsg.prune != null) && (await this.handlePrune(id, controlMsg.prune)); + if ((iwant.length === 0) && (ihave.length === 0) && (prune.length === 0)) { + return; + } + const sent = this.sendRpc(id, createGossipRpc(ihave, { iwant, prune })); + const iwantMessageIds = iwant[0]?.messageIDs; + if (iwantMessageIds != null) { + if (sent) { + this.gossipTracer.addPromise(id, iwantMessageIds); + } + else { + this.metrics?.iwantPromiseUntracked.inc(1); + } + } + } + /** + * Whether to accept a message from a peer + */ + acceptFrom(id) { + if (this.direct.has(id)) { + return true; + } + const now = Date.now(); + const entry = this.acceptFromWhitelist.get(id); + if ((entry != null) && entry.messagesAccepted < ACCEPT_FROM_WHITELIST_MAX_MESSAGES && entry.acceptUntil >= now) { + entry.messagesAccepted += 1; + return true; + } + const score = this.score.score(id); + if (score >= ACCEPT_FROM_WHITELIST_THRESHOLD_SCORE) { + // peer is unlikely to be able to drop its score to `graylistThreshold` + // after 128 messages or 1s + this.acceptFromWhitelist.set(id, { + messagesAccepted: 0, + acceptUntil: now + ACCEPT_FROM_WHITELIST_DURATION_MS + }); + } + else { + this.acceptFromWhitelist.delete(id); + } + return score >= this.opts.scoreThresholds.graylistThreshold; + } + /** + * Handles IHAVE messages + */ + handleIHave(id, ihave) { + if (ihave.length === 0) { + return []; + } + // we ignore IHAVE gossip from any peer whose score is below the gossips threshold + const score = this.score.score(id); + if (score < this.opts.scoreThresholds.gossipThreshold) { + this.log('IHAVE: ignoring peer %s with score below threshold [ score = %d ]', id, score); + this.metrics?.ihaveRcvIgnored.inc({ reason: IHaveIgnoreReason.LowScore }); + return []; + } + // IHAVE flood protection + const peerhave = (this.peerhave.get(id) ?? 0) + 1; + this.peerhave.set(id, peerhave); + if (peerhave > GossipsubMaxIHaveMessages) { + this.log('IHAVE: peer %s has advertised too many times (%d) within this heartbeat interval; ignoring', id, peerhave); + this.metrics?.ihaveRcvIgnored.inc({ reason: IHaveIgnoreReason.MaxIhave }); + return []; + } + const iasked = this.iasked.get(id) ?? 0; + if (iasked >= GossipsubMaxIHaveLength) { + this.log('IHAVE: peer %s has already advertised too many messages (%d); ignoring', id, iasked); + this.metrics?.ihaveRcvIgnored.inc({ reason: IHaveIgnoreReason.MaxIasked }); + return []; + } + // string msgId => msgId + const iwant = new Map(); + ihave.forEach(({ topicID, messageIDs }) => { + if (topicID == null || (messageIDs == null) || !this.mesh.has(topicID)) { + return; + } + let idonthave = 0; + messageIDs.forEach((msgId) => { + const msgIdStr = this.msgIdToStrFn(msgId); + if (!this.seenCache.has(msgIdStr)) { + iwant.set(msgIdStr, msgId); + idonthave++; + } + }); + this.metrics?.onIhaveRcv(topicID, messageIDs.length, idonthave); + }); + if (iwant.size === 0) { + return []; + } + let iask = iwant.size; + if (iask + iasked > GossipsubMaxIHaveLength) { + iask = GossipsubMaxIHaveLength - iasked; + } + this.log('IHAVE: Asking for %d out of %d messages from %s', iask, iwant.size, id); + let iwantList = Array.from(iwant.values()); + // ask in random order + shuffle(iwantList); + // truncate to the messages we are actually asking for and update the iasked counter + iwantList = iwantList.slice(0, iask); + this.iasked.set(id, iasked + iask); + // do not add gossipTracer promise here until a successful sendRpc() + return [ + { + messageIDs: iwantList + } + ]; + } + /** + * Handles IWANT messages + * Returns messages to send back to peer + */ + handleIWant(id, iwant) { + if (iwant.length === 0) { + return []; + } + // we don't respond to IWANT requests from any per whose score is below the gossip threshold + const score = this.score.score(id); + if (score < this.opts.scoreThresholds.gossipThreshold) { + this.log('IWANT: ignoring peer %s with score below threshold [score = %d]', id, score); + return []; + } + const ihave = new Map(); + const iwantByTopic = new Map(); + let iwantDonthave = 0; + iwant.forEach(({ messageIDs }) => { + messageIDs?.forEach((msgId) => { + const msgIdStr = this.msgIdToStrFn(msgId); + const entry = this.mcache.getWithIWantCount(msgIdStr, id); + if (entry == null) { + iwantDonthave++; + return; + } + iwantByTopic.set(entry.msg.topic, 1 + (iwantByTopic.get(entry.msg.topic) ?? 0)); + if (entry.count > GossipsubGossipRetransmission) { + this.log('IWANT: Peer %s has asked for message %s too many times: ignoring request', id, msgId); + return; + } + ihave.set(msgIdStr, entry.msg); + }); + }); + this.metrics?.onIwantRcv(iwantByTopic, iwantDonthave); + if (ihave.size === 0) { + this.log('IWANT: Could not provide any wanted messages to %s', id); + return []; + } + this.log('IWANT: Sending %d messages to %s', ihave.size, id); + return Array.from(ihave.values()); + } + /** + * Handles Graft messages + */ + async handleGraft(id, graft) { + const prune = []; + const score = this.score.score(id); + const now = Date.now(); + let doPX = this.opts.doPX; + graft.forEach(({ topicID }) => { + if (topicID == null) { + return; + } + const peersInMesh = this.mesh.get(topicID); + if (peersInMesh == null) { + // don't do PX when there is an unknown topic to avoid leaking our peers + doPX = false; + // spam hardening: ignore GRAFTs for unknown topics + return; + } + // check if peer is already in the mesh; if so do nothing + if (peersInMesh.has(id)) { + return; + } + const backoffExpiry = this.backoff.get(topicID)?.get(id); + // This if/else chain contains the various cases of valid (and semi-valid) GRAFTs + // Most of these cases result in a PRUNE immediately being sent in response + // we don't GRAFT to/from direct peers; complain loudly if this happens + if (this.direct.has(id)) { + this.log('GRAFT: ignoring request from direct peer %s', id); + // this is possibly a bug from a non-reciprical configuration; send a PRUNE + prune.push(topicID); + // but don't px + doPX = false; + // make sure we are not backing off that peer + } + else if (typeof backoffExpiry === 'number' && now < backoffExpiry) { + this.log('GRAFT: ignoring backed off peer %s', id); + // add behavioral penalty + this.score.addPenalty(id, 1, ScorePenalty.GraftBackoff); + // no PX + doPX = false; + // check the flood cutoff -- is the GRAFT coming too fast? + const floodCutoff = backoffExpiry + this.opts.graftFloodThreshold - this.opts.pruneBackoff; + if (now < floodCutoff) { + // extra penalty + this.score.addPenalty(id, 1, ScorePenalty.GraftBackoff); + } + // refresh the backoff + this.addBackoff(id, topicID); + prune.push(topicID); + // check the score + } + else if (score < 0) { + // we don't GRAFT peers with negative score + this.log('GRAFT: ignoring peer %s with negative score: score=%d, topic=%s', id, score, topicID); + // we do send them PRUNE however, because it's a matter of protocol correctness + prune.push(topicID); + // but we won't PX to them + doPX = false; + // add/refresh backoff so that we don't reGRAFT too early even if the score decays + this.addBackoff(id, topicID); + // check the number of mesh peers; if it is at (or over) Dhi, we only accept grafts + // from peers with outbound connections; this is a defensive check to restrict potential + // mesh takeover attacks combined with love bombing + } + else if (peersInMesh.size >= this.opts.Dhi && !(this.outbound.get(id) ?? false)) { + prune.push(topicID); + this.addBackoff(id, topicID); + // valid graft + } + else { + this.log('GRAFT: Add mesh link from %s in %s', id, topicID); + this.score.graft(id, topicID); + peersInMesh.add(id); + this.metrics?.onAddToMesh(topicID, InclusionReason.Subscribed, 1); + } + this.safeDispatchEvent('gossipsub:graft', { detail: { peerId: id, topic: topicID, direction: 'inbound' } }); + }); + if (prune.length === 0) { + return []; + } + const onUnsubscribe = false; + return Promise.all(prune.map(async (topic) => this.makePrune(id, topic, doPX, onUnsubscribe))); + } + /** + * Handles Prune messages + */ + async handlePrune(id, prune) { + const score = this.score.score(id); + for (const { topicID, backoff, peers } of prune) { + if (topicID == null) { + continue; + } + const peersInMesh = this.mesh.get(topicID); + if (peersInMesh == null) { + return; + } + this.log('PRUNE: Remove mesh link to %s in %s', id, topicID); + this.score.prune(id, topicID); + if (peersInMesh.has(id)) { + peersInMesh.delete(id); + this.metrics?.onRemoveFromMesh(topicID, ChurnReason.Prune, 1); + } + // is there a backoff specified by the peer? if so obey it + if (typeof backoff === 'number' && backoff > 0) { + this.doAddBackoff(id, topicID, backoff * 1000); + } + else { + this.addBackoff(id, topicID); + } + // PX + if ((peers != null) && (peers.length > 0)) { + // we ignore PX from peers with insufficient scores + if (score < this.opts.scoreThresholds.acceptPXThreshold) { + this.log('PRUNE: ignoring PX from peer %s with insufficient score [score = %d, topic = %s]', id, score, topicID); + } + else { + await this.pxConnect(peers); + } + } + this.safeDispatchEvent('gossipsub:prune', { detail: { peerId: id, topic: topicID, direction: 'inbound' } }); + } + } + /** + * Add standard backoff log for a peer in a topic + */ + addBackoff(id, topic) { + this.doAddBackoff(id, topic, this.opts.pruneBackoff); + } + /** + * Add backoff expiry interval for a peer in a topic + * + * @param id + * @param topic + * @param intervalMs - backoff duration in milliseconds + */ + doAddBackoff(id, topic, intervalMs) { + let backoff = this.backoff.get(topic); + if (backoff == null) { + backoff = new Map(); + this.backoff.set(topic, backoff); + } + const expire = Date.now() + intervalMs; + const existingExpire = backoff.get(id) ?? 0; + if (existingExpire < expire) { + backoff.set(id, expire); + } + } + /** + * Apply penalties from broken IHAVE/IWANT promises + */ + applyIwantPenalties() { + this.gossipTracer.getBrokenPromises().forEach((count, p) => { + this.log("peer %s didn't follow up in %d IWANT requests; adding penalty", p, count); + this.score.addPenalty(p, count, ScorePenalty.BrokenPromise); + }); + } + /** + * Clear expired backoff expiries + */ + clearBackoff() { + // we only clear once every GossipsubPruneBackoffTicks ticks to avoid iterating over the maps too much + if (this.heartbeatTicks % GossipsubPruneBackoffTicks !== 0) { + return; + } + const now = Date.now(); + this.backoff.forEach((backoff, topic) => { + backoff.forEach((expire, id) => { + // add some slack time to the expiration, see https://github.com/libp2p/specs/pull/289 + if (expire + BACKOFF_SLACK * this.opts.heartbeatInterval < now) { + backoff.delete(id); + } + }); + if (backoff.size === 0) { + this.backoff.delete(topic); + } + }); + } + /** + * Maybe reconnect to direct peers + */ + async directConnect() { + const toconnect = []; + this.direct.forEach((id) => { + if (!this.streamsOutbound.has(id)) { + toconnect.push(id); + } + }); + await Promise.all(toconnect.map(async (id) => this.connect(id))); + } + /** + * Maybe attempt connection given signed peer records + */ + async pxConnect(peers) { + if (peers.length > this.opts.prunePeers) { + shuffle(peers); + peers = peers.slice(0, this.opts.prunePeers); + } + const toconnect = []; + await Promise.all(peers.map(async (pi) => { + if (pi.peerID == null) { + return; + } + const peer = peerIdFromBytes(pi.peerID); + const p = peer.toString(); + if (this.peers.has(p)) { + return; + } + if (pi.signedPeerRecord == null) { + toconnect.push(p); + return; + } + // The peer sent us a signed record + // This is not a record from the peer who sent the record, but another peer who is connected with it + // Ensure that it is valid + try { + if (!(await this.components.peerStore.consumePeerRecord(pi.signedPeerRecord, peer))) { + this.log('bogus peer record obtained through px: could not add peer record to address book'); + return; + } + toconnect.push(p); + } + catch (e) { + this.log('bogus peer record obtained through px: invalid signature or not a peer record'); + } + })); + if (toconnect.length === 0) { + return; + } + await Promise.all(toconnect.map(async (id) => this.connect(id))); + } + /** + * Connect to a peer using the gossipsub protocol + */ + async connect(id) { + this.log('Initiating connection with %s', id); + const peerId = peerIdFromString(id); + const connection = await this.components.connectionManager.openConnection(peerId); + for (const multicodec of this.multicodecs) { + for (const topology of this.components.registrar.getTopologies(multicodec)) { + topology.onConnect?.(peerId, connection); + } + } + } + /** + * Subscribes to a topic + */ + subscribe(topic) { + if (this.status.code !== GossipStatusCode.started) { + throw new Error('Pubsub has not started'); + } + if (!this.subscriptions.has(topic)) { + this.subscriptions.add(topic); + for (const peerId of this.peers.keys()) { + this.sendSubscriptions(peerId, [topic], true); + } + } + this.join(topic); + } + /** + * Unsubscribe to a topic + */ + unsubscribe(topic) { + if (this.status.code !== GossipStatusCode.started) { + throw new Error('Pubsub is not started'); + } + const wasSubscribed = this.subscriptions.delete(topic); + this.log('unsubscribe from %s - am subscribed %s', topic, wasSubscribed); + if (wasSubscribed) { + for (const peerId of this.peers.keys()) { + this.sendSubscriptions(peerId, [topic], false); + } + } + this.leave(topic); + } + /** + * Join topic + */ + join(topic) { + if (this.status.code !== GossipStatusCode.started) { + throw new Error('Gossipsub has not started'); + } + // if we are already in the mesh, return + if (this.mesh.has(topic)) { + return; + } + this.log('JOIN %s', topic); + this.metrics?.onJoin(topic); + const toAdd = new Set(); + const backoff = this.backoff.get(topic); + // check if we have mesh_n peers in fanout[topic] and add them to the mesh if we do, + // removing the fanout entry. + const fanoutPeers = this.fanout.get(topic); + if (fanoutPeers != null) { + // Remove fanout entry and the last published time + this.fanout.delete(topic); + this.fanoutLastpub.delete(topic); + // remove explicit peers, peers with negative scores, and backoffed peers + fanoutPeers.forEach((id) => { + if (!this.direct.has(id) && this.score.score(id) >= 0 && ((backoff == null) || !backoff.has(id))) { + toAdd.add(id); + } + }); + this.metrics?.onAddToMesh(topic, InclusionReason.Fanout, toAdd.size); + } + // check if we need to get more peers, which we randomly select + if (toAdd.size < this.opts.D) { + const fanoutCount = toAdd.size; + const newPeers = this.getRandomGossipPeers(topic, this.opts.D, (id) => + // filter direct peers and peers with negative score + !toAdd.has(id) && !this.direct.has(id) && this.score.score(id) >= 0 && ((backoff == null) || !backoff.has(id))); + newPeers.forEach((peer) => { + toAdd.add(peer); + }); + this.metrics?.onAddToMesh(topic, InclusionReason.Random, toAdd.size - fanoutCount); + } + this.mesh.set(topic, toAdd); + toAdd.forEach((id) => { + this.log('JOIN: Add mesh link to %s in %s', id, topic); + this.sendGraft(id, topic); + // rust-libp2p + // - peer_score.graft() + // - Self::control_pool_add() + // - peer_added_to_mesh() + }); + } + /** + * Leave topic + */ + leave(topic) { + if (this.status.code !== GossipStatusCode.started) { + throw new Error('Gossipsub has not started'); + } + this.log('LEAVE %s', topic); + this.metrics?.onLeave(topic); + // Send PRUNE to mesh peers + const meshPeers = this.mesh.get(topic); + if (meshPeers != null) { + Promise.all(Array.from(meshPeers).map(async (id) => { + this.log('LEAVE: Remove mesh link to %s in %s', id, topic); + await this.sendPrune(id, topic); + })).catch((err) => { + this.log('Error sending prunes to mesh peers', err); + }); + this.mesh.delete(topic); + } + } + selectPeersToForward(topic, propagationSource, excludePeers) { + const tosend = new Set(); + // Add explicit peers + const peersInTopic = this.topics.get(topic); + if (peersInTopic != null) { + this.direct.forEach((peer) => { + if (peersInTopic.has(peer) && propagationSource !== peer && !(excludePeers?.has(peer) ?? false)) { + tosend.add(peer); + } + }); + // As of Mar 2022, spec + golang-libp2p include this while rust-libp2p does not + // rust-libp2p: https://github.com/libp2p/rust-libp2p/blob/6cc3b4ec52c922bfcf562a29b5805c3150e37c75/protocols/gossipsub/src/behaviour.rs#L2693 + // spec: https://github.com/libp2p/specs/blob/10712c55ab309086a52eec7d25f294df4fa96528/pubsub/gossipsub/gossipsub-v1.0.md?plain=1#L361 + this.floodsubPeers.forEach((peer) => { + if (peersInTopic.has(peer) && + propagationSource !== peer && + !(excludePeers?.has(peer) ?? false) && + this.score.score(peer) >= this.opts.scoreThresholds.publishThreshold) { + tosend.add(peer); + } + }); + } + // add mesh peers + const meshPeers = this.mesh.get(topic); + if ((meshPeers != null) && meshPeers.size > 0) { + meshPeers.forEach((peer) => { + if (propagationSource !== peer && !(excludePeers?.has(peer) ?? false)) { + tosend.add(peer); + } + }); + } + return tosend; + } + selectPeersToPublish(topic) { + const tosend = new Set(); + const tosendCount = { + direct: 0, + floodsub: 0, + mesh: 0, + fanout: 0 + }; + const peersInTopic = this.topics.get(topic); + if (peersInTopic != null) { + // flood-publish behavior + // send to direct peers and _all_ peers meeting the publishThreshold + if (this.opts.floodPublish) { + peersInTopic.forEach((id) => { + if (this.direct.has(id)) { + tosend.add(id); + tosendCount.direct++; + } + else if (this.score.score(id) >= this.opts.scoreThresholds.publishThreshold) { + tosend.add(id); + tosendCount.floodsub++; + } + }); + } + else { + // non-flood-publish behavior + // send to direct peers, subscribed floodsub peers + // and some mesh peers above publishThreshold + // direct peers (if subscribed) + this.direct.forEach((id) => { + if (peersInTopic.has(id)) { + tosend.add(id); + tosendCount.direct++; + } + }); + // floodsub peers + // Note: if there are no floodsub peers, we save a loop through peersInTopic Map + this.floodsubPeers.forEach((id) => { + if (peersInTopic.has(id) && this.score.score(id) >= this.opts.scoreThresholds.publishThreshold) { + tosend.add(id); + tosendCount.floodsub++; + } + }); + // Gossipsub peers handling + const meshPeers = this.mesh.get(topic); + if ((meshPeers != null) && meshPeers.size > 0) { + meshPeers.forEach((peer) => { + tosend.add(peer); + tosendCount.mesh++; + }); + // eslint-disable-next-line @typescript-eslint/brace-style + } + // We are not in the mesh for topic, use fanout peers + else { + const fanoutPeers = this.fanout.get(topic); + if ((fanoutPeers != null) && fanoutPeers.size > 0) { + fanoutPeers.forEach((peer) => { + tosend.add(peer); + tosendCount.fanout++; + }); + // eslint-disable-next-line @typescript-eslint/brace-style + } + // We have no fanout peers, select mesh_n of them and add them to the fanout + else { + // If we are not in the fanout, then pick peers in topic above the publishThreshold + const newFanoutPeers = this.getRandomGossipPeers(topic, this.opts.D, (id) => { + return this.score.score(id) >= this.opts.scoreThresholds.publishThreshold; + }); + // eslint-disable-next-line max-depth + if (newFanoutPeers.size > 0) { + this.fanout.set(topic, newFanoutPeers); + newFanoutPeers.forEach((peer) => { + tosend.add(peer); + tosendCount.fanout++; + }); + } + } + // We are publishing to fanout peers - update the time we published + this.fanoutLastpub.set(topic, Date.now()); + } + } + } + return { tosend, tosendCount }; + } + /** + * Forwards a message from our peers. + * + * For messages published by us (the app layer), this class uses `publish` + */ + forwardMessage(msgIdStr, rawMsg, propagationSource, excludePeers) { + // message is fully validated inform peer_score + if (propagationSource != null) { + this.score.deliverMessage(propagationSource, msgIdStr, rawMsg.topic); + } + const tosend = this.selectPeersToForward(rawMsg.topic, propagationSource, excludePeers); + // Note: Don't throw if tosend is empty, we can have a mesh with a single peer + // forward the message to peers + tosend.forEach((id) => { + // sendRpc may mutate RPC message on piggyback, create a new message for each peer + this.sendRpc(id, createGossipRpc([rawMsg])); + }); + this.metrics?.onForwardMsg(rawMsg.topic, tosend.size); + } + /** + * App layer publishes a message to peers, return number of peers this message is published to + * Note: `async` due to crypto only if `StrictSign`, otherwise it's a sync fn. + * + * For messages not from us, this class uses `forwardMessage`. + */ + async publish(topic, data, opts) { + const startMs = Date.now(); + const transformedData = (this.dataTransform != null) ? this.dataTransform.outboundTransform(topic, data) : data; + if (this.publishConfig == null) { + throw Error('PublishError.Uninitialized'); + } + // Prepare raw message with user's publishConfig + const { raw: rawMsg, msg } = await buildRawMessage(this.publishConfig, topic, data, transformedData); + // calculate the message id from the un-transformed data + const msgId = await this.msgIdFn(msg); + const msgIdStr = this.msgIdToStrFn(msgId); + // Current publish opt takes precedence global opts, while preserving false value + const ignoreDuplicatePublishError = opts?.ignoreDuplicatePublishError ?? this.opts.ignoreDuplicatePublishError; + if (this.seenCache.has(msgIdStr)) { + // This message has already been seen. We don't re-publish messages that have already + // been published on the network. + if (ignoreDuplicatePublishError) { + this.metrics?.onPublishDuplicateMsg(topic); + return { recipients: [] }; + } + throw Error('PublishError.Duplicate'); + } + const { tosend, tosendCount } = this.selectPeersToPublish(topic); + const willSendToSelf = this.opts.emitSelf && this.subscriptions.has(topic); + // Current publish opt takes precedence global opts, while preserving false value + const allowPublishToZeroTopicPeers = opts?.allowPublishToZeroTopicPeers ?? this.opts.allowPublishToZeroTopicPeers; + if (tosend.size === 0 && !allowPublishToZeroTopicPeers && !willSendToSelf) { + throw Error('PublishError.NoPeersSubscribedToTopic'); + } + // If the message isn't a duplicate and we have sent it to some peers add it to the + // duplicate cache and memcache. + this.seenCache.put(msgIdStr); + // all published messages are valid + this.mcache.put({ msgId, msgIdStr }, rawMsg, true); + // If the message is anonymous or has a random author add it to the published message ids cache. + this.publishedMessageIds.put(msgIdStr); + const batchPublish = opts?.batchPublish ?? this.opts.batchPublish; + const rpc = createGossipRpc([rawMsg]); + if (batchPublish) { + this.sendRpcInBatch(tosend, rpc); + } + else { + // Send to set of peers aggregated from direct, mesh, fanout + for (const id of tosend) { + // sendRpc may mutate RPC message on piggyback, create a new message for each peer + const sent = this.sendRpc(id, rpc); + // did not actually send the message + if (!sent) { + tosend.delete(id); + } + } + } + const durationMs = Date.now() - startMs; + this.metrics?.onPublishMsg(topic, tosendCount, tosend.size, rawMsg.data != null ? rawMsg.data.length : 0, durationMs); + // Dispatch the message to the user if we are subscribed to the topic + if (willSendToSelf) { + tosend.add(this.components.peerId.toString()); + super.dispatchEvent(new CustomEvent$1('gossipsub:message', { + detail: { + propagationSource: this.components.peerId, + msgId: msgIdStr, + msg + } + })); + // TODO: Add option to switch between emit per topic or all messages in one + super.dispatchEvent(new CustomEvent$1('message', { detail: msg })); + } + return { + recipients: Array.from(tosend.values()).map((str) => peerIdFromString(str)) + }; + } + /** + * Send the same data in batch to tosend list without considering cached control messages + * This is not only faster but also avoid allocating memory for each peer + * see https://github.com/ChainSafe/js-libp2p-gossipsub/issues/344 + */ + sendRpcInBatch(tosend, rpc) { + const rpcBytes = RPC$1.encode(rpc); + const prefixedData = encode.single(rpcBytes); + for (const id of tosend) { + const outboundStream = this.streamsOutbound.get(id); + if (outboundStream == null) { + this.log(`Cannot send RPC to ${id} as there is no open stream to it available`); + tosend.delete(id); + continue; + } + try { + outboundStream.pushPrefixed(prefixedData); + } + catch (e) { + tosend.delete(id); + this.log.error(`Cannot send rpc to ${id}`, e); + } + this.metrics?.onRpcSent(rpc, rpcBytes.length); + } + } + /** + * This function should be called when `asyncValidation` is `true` after + * the message got validated by the caller. Messages are stored in the `mcache` and + * validation is expected to be fast enough that the messages should still exist in the cache. + * There are three possible validation outcomes and the outcome is given in acceptance. + * + * If acceptance = `MessageAcceptance.Accept` the message will get propagated to the + * network. The `propagation_source` parameter indicates who the message was received by and + * will not be forwarded back to that peer. + * + * If acceptance = `MessageAcceptance.Reject` the message will be deleted from the memcache + * and the Pโ‚„ penalty will be applied to the `propagationSource`. + * + * If acceptance = `MessageAcceptance.Ignore` the message will be deleted from the memcache + * but no Pโ‚„ penalty will be applied. + * + * This function will return true if the message was found in the cache and false if was not + * in the cache anymore. + * + * This should only be called once per message. + */ + reportMessageValidationResult(msgId, propagationSource, acceptance) { + let cacheEntry; + if (acceptance === TopicValidatorResult.Accept) { + cacheEntry = this.mcache.validate(msgId); + if (cacheEntry != null) { + const { message: rawMsg, originatingPeers } = cacheEntry; + // message is fully validated inform peer_score + this.score.deliverMessage(propagationSource, msgId, rawMsg.topic); + this.forwardMessage(msgId, cacheEntry.message, propagationSource, originatingPeers); + } + // else, Message not in cache. Ignoring forwarding + // eslint-disable-next-line @typescript-eslint/brace-style + } + // Not valid + else { + cacheEntry = this.mcache.remove(msgId); + if (cacheEntry != null) { + const rejectReason = rejectReasonFromAcceptance(acceptance); + const { message: rawMsg, originatingPeers } = cacheEntry; + // Tell peer_score about reject + // Reject the original source, and any duplicates we've seen from other peers. + this.score.rejectMessage(propagationSource, msgId, rawMsg.topic, rejectReason); + for (const peer of originatingPeers) { + this.score.rejectMessage(peer, msgId, rawMsg.topic, rejectReason); + } + } + // else, Message not in cache. Ignoring forwarding + } + const firstSeenTimestampMs = this.score.messageFirstSeenTimestampMs(msgId); + this.metrics?.onReportValidation(cacheEntry, acceptance, firstSeenTimestampMs); + } + /** + * Sends a GRAFT message to a peer + */ + sendGraft(id, topic) { + const graft = [ + { + topicID: topic + } + ]; + const out = createGossipRpc([], { graft }); + this.sendRpc(id, out); + } + /** + * Sends a PRUNE message to a peer + */ + async sendPrune(id, topic) { + // this is only called from leave() function + const onUnsubscribe = true; + const prune = [await this.makePrune(id, topic, this.opts.doPX, onUnsubscribe)]; + const out = createGossipRpc([], { prune }); + this.sendRpc(id, out); + } + /** + * Send an rpc object to a peer + */ + sendRpc(id, rpc) { + const outboundStream = this.streamsOutbound.get(id); + if (outboundStream == null) { + this.log(`Cannot send RPC to ${id} as there is no open stream to it available`); + return false; + } + // piggyback control message retries + const ctrl = this.control.get(id); + if (ctrl != null) { + this.piggybackControl(id, rpc, ctrl); + this.control.delete(id); + } + // piggyback gossip + const ihave = this.gossip.get(id); + if (ihave != null) { + this.piggybackGossip(id, rpc, ihave); + this.gossip.delete(id); + } + const rpcBytes = RPC$1.encode(rpc); + try { + outboundStream.push(rpcBytes); + } + catch (e) { + this.log.error(`Cannot send rpc to ${id}`, e); + // if the peer had control messages or gossip, re-attach + if (ctrl != null) { + this.control.set(id, ctrl); + } + if (ihave != null) { + this.gossip.set(id, ihave); + } + return false; + } + this.metrics?.onRpcSent(rpc, rpcBytes.length); + if (rpc.control?.graft != null) { + for (const topic of rpc.control?.graft) { + if (topic.topicID != null) { + this.safeDispatchEvent('gossipsub:graft', { detail: { peerId: id, topic: topic.topicID, direction: 'outbound' } }); + } + } + } + if (rpc.control?.prune != null) { + for (const topic of rpc.control?.prune) { + if (topic.topicID != null) { + this.safeDispatchEvent('gossipsub:prune', { detail: { peerId: id, topic: topic.topicID, direction: 'outbound' } }); + } + } + } + return true; + } + /** Mutates `outRpc` adding graft and prune control messages */ + piggybackControl(id, outRpc, ctrl) { + const rpc = ensureControl(outRpc); + for (const graft of ctrl.graft) { + if (graft.topicID != null && (this.mesh.get(graft.topicID)?.has(id) ?? false)) { + rpc.control.graft.push(graft); + } + } + for (const prune of ctrl.prune) { + if (prune.topicID != null && !(this.mesh.get(prune.topicID)?.has(id) ?? false)) { + rpc.control.prune.push(prune); + } + } + } + /** Mutates `outRpc` adding ihave control messages */ + piggybackGossip(id, outRpc, ihave) { + const rpc = ensureControl(outRpc); + rpc.control.ihave = ihave; + } + /** + * Send graft and prune messages + * + * @param tograft - peer id => topic[] + * @param toprune - peer id => topic[] + */ + async sendGraftPrune(tograft, toprune, noPX) { + const doPX = this.opts.doPX; + const onUnsubscribe = false; + for (const [id, topics] of tograft) { + const graft = topics.map((topicID) => ({ topicID })); + let prune = []; + // If a peer also has prunes, process them now + const pruning = toprune.get(id); + if (pruning != null) { + prune = await Promise.all(pruning.map(async (topicID) => this.makePrune(id, topicID, doPX && !(noPX.get(id) ?? false), onUnsubscribe))); + toprune.delete(id); + } + this.sendRpc(id, createGossipRpc([], { graft, prune })); + } + for (const [id, topics] of toprune) { + const prune = await Promise.all(topics.map(async (topicID) => this.makePrune(id, topicID, doPX && !(noPX.get(id) ?? false), onUnsubscribe))); + this.sendRpc(id, createGossipRpc([], { prune })); + } + } + /** + * Emits gossip - Send IHAVE messages to a random set of gossip peers + */ + emitGossip(peersToGossipByTopic) { + const gossipIDsByTopic = this.mcache.getGossipIDs(new Set(peersToGossipByTopic.keys())); + for (const [topic, peersToGossip] of peersToGossipByTopic) { + this.doEmitGossip(topic, peersToGossip, gossipIDsByTopic.get(topic) ?? []); + } + } + /** + * Send gossip messages to GossipFactor peers above threshold with a minimum of D_lazy + * Peers are randomly selected from the heartbeat which exclude mesh + fanout peers + * We also exclude direct peers, as there is no reason to emit gossip to them + * + * @param topic + * @param candidateToGossip - peers to gossip + * @param messageIDs - message ids to gossip + */ + doEmitGossip(topic, candidateToGossip, messageIDs) { + if (messageIDs.length === 0) { + return; + } + // shuffle to emit in random order + shuffle(messageIDs); + // if we are emitting more than GossipsubMaxIHaveLength ids, truncate the list + if (messageIDs.length > GossipsubMaxIHaveLength) { + // we do the truncation (with shuffling) per peer below + this.log('too many messages for gossip; will truncate IHAVE list (%d messages)', messageIDs.length); + } + if (candidateToGossip.size === 0) + return; + let target = this.opts.Dlazy; + const factor = GossipsubGossipFactor * candidateToGossip.size; + let peersToGossip = candidateToGossip; + if (factor > target) { + target = factor; + } + if (target > peersToGossip.size) { + target = peersToGossip.size; + } + else { + // only shuffle if needed + peersToGossip = shuffle(Array.from(peersToGossip)).slice(0, target); + } + // Emit the IHAVE gossip to the selected peers up to the target + peersToGossip.forEach((id) => { + let peerMessageIDs = messageIDs; + if (messageIDs.length > GossipsubMaxIHaveLength) { + // shuffle and slice message IDs per peer so that we emit a different set for each peer + // we have enough reduncancy in the system that this will significantly increase the message + // coverage when we do truncate + peerMessageIDs = shuffle(peerMessageIDs.slice()).slice(0, GossipsubMaxIHaveLength); + } + this.pushGossip(id, { + topicID: topic, + messageIDs: peerMessageIDs + }); + }); + } + /** + * Flush gossip and control messages + */ + flush() { + // send gossip first, which will also piggyback control + for (const [peer, ihave] of this.gossip.entries()) { + this.gossip.delete(peer); + this.sendRpc(peer, createGossipRpc([], { ihave })); + } + // send the remaining control messages + for (const [peer, control] of this.control.entries()) { + this.control.delete(peer); + const out = createGossipRpc([], { graft: control.graft, prune: control.prune }); + this.sendRpc(peer, out); + } + } + /** + * Adds new IHAVE messages to pending gossip + */ + pushGossip(id, controlIHaveMsgs) { + this.log('Add gossip to %s', id); + const gossip = this.gossip.get(id) ?? []; + this.gossip.set(id, gossip.concat(controlIHaveMsgs)); + } + /** + * Make a PRUNE control message for a peer in a topic + */ + async makePrune(id, topic, doPX, onUnsubscribe) { + this.score.prune(id, topic); + if (this.streamsOutbound.get(id)?.protocol === GossipsubIDv10) { + // Gossipsub v1.0 -- no backoff, the peer won't be able to parse it anyway + return { + topicID: topic, + peers: [] + }; + } + // backoff is measured in seconds + // GossipsubPruneBackoff and GossipsubUnsubscribeBackoff are measured in milliseconds + // The protobuf has it as a uint64 + const backoffMs = onUnsubscribe ? this.opts.unsubcribeBackoff : this.opts.pruneBackoff; + const backoff = backoffMs / 1000; + this.doAddBackoff(id, topic, backoffMs); + if (!doPX) { + return { + topicID: topic, + peers: [], + backoff + }; + } + // select peers for Peer eXchange + const peers = this.getRandomGossipPeers(topic, this.opts.prunePeers, (xid) => { + return xid !== id && this.score.score(xid) >= 0; + }); + const px = await Promise.all(Array.from(peers).map(async (peerId) => { + // see if we have a signed record to send back; if we don't, just send + // the peer ID and let the pruned peer find them in the DHT -- we can't trust + // unsigned address records through PX anyways + // Finding signed records in the DHT is not supported at the time of writing in js-libp2p + const id = peerIdFromString(peerId); + let peerInfo; + try { + peerInfo = await this.components.peerStore.get(id); + } + catch (err) { + if (err.code !== 'ERR_NOT_FOUND') { + throw err; + } + } + return { + peerID: id.toBytes(), + signedPeerRecord: peerInfo?.peerRecordEnvelope + }; + })); + return { + topicID: topic, + peers: px, + backoff + }; + } + runHeartbeat = () => { + const timer = this.metrics?.heartbeatDuration.startTimer(); + this.heartbeat() + .catch((err) => { + this.log('Error running heartbeat', err); + }) + .finally(() => { + if (timer != null) { + timer(); + } + // Schedule the next run if still in started status + if (this.status.code === GossipStatusCode.started) { + // Clear previous timeout before overwriting `status.heartbeatTimeout`, it should be completed tho. + clearTimeout(this.status.heartbeatTimeout); + // NodeJS setInterval function is innexact, calls drift by a few miliseconds on each call. + // To run the heartbeat precisely setTimeout() must be used recomputing the delay on every loop. + let msToNextHeartbeat = this.opts.heartbeatInterval - ((Date.now() - this.status.hearbeatStartMs) % this.opts.heartbeatInterval); + // If too close to next heartbeat, skip one + if (msToNextHeartbeat < this.opts.heartbeatInterval * 0.25) { + msToNextHeartbeat += this.opts.heartbeatInterval; + this.metrics?.heartbeatSkipped.inc(); + } + this.status.heartbeatTimeout = setTimeout(this.runHeartbeat, msToNextHeartbeat); + } + }); + }; + /** + * Maintains the mesh and fanout maps in gossipsub. + */ + async heartbeat() { + const { D, Dlo, Dhi, Dscore, Dout, fanoutTTL } = this.opts; + this.heartbeatTicks++; + // cache scores throught the heartbeat + const scores = new Map(); + const getScore = (id) => { + let s = scores.get(id); + if (s === undefined) { + s = this.score.score(id); + scores.set(id, s); + } + return s; + }; + // peer id => topic[] + const tograft = new Map(); + // peer id => topic[] + const toprune = new Map(); + // peer id => don't px + const noPX = new Map(); + // clean up expired backoffs + this.clearBackoff(); + // clean up peerhave/iasked counters + this.peerhave.clear(); + this.metrics?.cacheSize.set({ cache: 'iasked' }, this.iasked.size); + this.iasked.clear(); + // apply IWANT request penalties + this.applyIwantPenalties(); + // ensure direct peers are connected + if (this.heartbeatTicks % this.opts.directConnectTicks === 0) { + // we only do this every few ticks to allow pending connections to complete and account for restarts/downtime + await this.directConnect(); + } + // EXTRA: Prune caches + this.fastMsgIdCache?.prune(); + this.seenCache.prune(); + this.gossipTracer.prune(); + this.publishedMessageIds.prune(); + /** + * Instead of calling getRandomGossipPeers multiple times to: + * + get more mesh peers + * + more outbound peers + * + oppportunistic grafting + * + emitGossip + * + * We want to loop through the topic peers only a single time and prepare gossip peers for all topics to improve the performance + */ + const peersToGossipByTopic = new Map(); + // maintain the mesh for topics we have joined + // eslint-disable-next-line complexity + this.mesh.forEach((peers, topic) => { + const peersInTopic = this.topics.get(topic); + const candidateMeshPeers = new Set(); + const peersToGossip = new Set(); + peersToGossipByTopic.set(topic, peersToGossip); + if (peersInTopic != null) { + const shuffledPeers = shuffle(Array.from(peersInTopic)); + const backoff = this.backoff.get(topic); + for (const id of shuffledPeers) { + const peerStreams = this.streamsOutbound.get(id); + if ((peerStreams != null) && + this.multicodecs.includes(peerStreams.protocol) && + !peers.has(id) && + !this.direct.has(id)) { + const score = getScore(id); + if (((backoff == null) || !backoff.has(id)) && score >= 0) + candidateMeshPeers.add(id); + // instead of having to find gossip peers after heartbeat which require another loop + // we prepare peers to gossip in a topic within heartbeat to improve performance + if (score >= this.opts.scoreThresholds.gossipThreshold) + peersToGossip.add(id); + } + } + } + // prune/graft helper functions (defined per topic) + const prunePeer = (id, reason) => { + this.log('HEARTBEAT: Remove mesh link to %s in %s', id, topic); + // no need to update peer score here as we do it in makePrune + // add prune backoff record + this.addBackoff(id, topic); + // remove peer from mesh + peers.delete(id); + // after pruning a peer from mesh, we want to gossip topic to it if its score meet the gossip threshold + if (getScore(id) >= this.opts.scoreThresholds.gossipThreshold) + peersToGossip.add(id); + this.metrics?.onRemoveFromMesh(topic, reason, 1); + // add to toprune + const topics = toprune.get(id); + if (topics == null) { + toprune.set(id, [topic]); + } + else { + topics.push(topic); + } + }; + const graftPeer = (id, reason) => { + this.log('HEARTBEAT: Add mesh link to %s in %s', id, topic); + // update peer score + this.score.graft(id, topic); + // add peer to mesh + peers.add(id); + // when we add a new mesh peer, we don't want to gossip messages to it + peersToGossip.delete(id); + this.metrics?.onAddToMesh(topic, reason, 1); + // add to tograft + const topics = tograft.get(id); + if (topics == null) { + tograft.set(id, [topic]); + } + else { + topics.push(topic); + } + }; + // drop all peers with negative score, without PX + peers.forEach((id) => { + const score = getScore(id); + // Record the score + if (score < 0) { + this.log('HEARTBEAT: Prune peer %s with negative score: score=%d, topic=%s', id, score, topic); + prunePeer(id, ChurnReason.BadScore); + noPX.set(id, true); + } + }); + // do we have enough peers? + if (peers.size < Dlo) { + const ineed = D - peers.size; + // slice up to first `ineed` items and remove them from candidateMeshPeers + // same to `const newMeshPeers = candidateMeshPeers.slice(0, ineed)` + const newMeshPeers = removeFirstNItemsFromSet(candidateMeshPeers, ineed); + newMeshPeers.forEach((p) => { + graftPeer(p, InclusionReason.NotEnough); + }); + } + // do we have to many peers? + if (peers.size > Dhi) { + let peersArray = Array.from(peers); + // sort by score + peersArray.sort((a, b) => getScore(b) - getScore(a)); + // We keep the first D_score peers by score and the remaining up to D randomly + // under the constraint that we keep D_out peers in the mesh (if we have that many) + peersArray = peersArray.slice(0, Dscore).concat(shuffle(peersArray.slice(Dscore))); + // count the outbound peers we are keeping + let outbound = 0; + peersArray.slice(0, D).forEach((p) => { + if (this.outbound.get(p) ?? false) { + outbound++; + } + }); + // if it's less than D_out, bubble up some outbound peers from the random selection + if (outbound < Dout) { + const rotate = (i) => { + // rotate the peersArray to the right and put the ith peer in the front + const p = peersArray[i]; + for (let j = i; j > 0; j--) { + peersArray[j] = peersArray[j - 1]; + } + peersArray[0] = p; + }; + // first bubble up all outbound peers already in the selection to the front + if (outbound > 0) { + let ihave = outbound; + for (let i = 1; i < D && ihave > 0; i++) { + // eslint-disable-next-line max-depth + if (this.outbound.get(peersArray[i]) ?? false) { + rotate(i); + ihave--; + } + } + } + // now bubble up enough outbound peers outside the selection to the front + let ineed = D - outbound; + for (let i = D; i < peersArray.length && ineed > 0; i++) { + if (this.outbound.get(peersArray[i]) ?? false) { + rotate(i); + ineed--; + } + } + } + // prune the excess peers + peersArray.slice(D).forEach((p) => { + prunePeer(p, ChurnReason.Excess); + }); + } + // do we have enough outbound peers? + if (peers.size >= Dlo) { + // count the outbound peers we have + let outbound = 0; + peers.forEach((p) => { + if (this.outbound.get(p) ?? false) { + outbound++; + } + }); + // if it's less than D_out, select some peers with outbound connections and graft them + if (outbound < Dout) { + const ineed = Dout - outbound; + const newMeshPeers = removeItemsFromSet(candidateMeshPeers, ineed, (id) => this.outbound.get(id) === true); + newMeshPeers.forEach((p) => { + graftPeer(p, InclusionReason.Outbound); + }); + } + } + // should we try to improve the mesh with opportunistic grafting? + if (this.heartbeatTicks % this.opts.opportunisticGraftTicks === 0 && peers.size > 1) { + // Opportunistic grafting works as follows: we check the median score of peers in the + // mesh; if this score is below the opportunisticGraftThreshold, we select a few peers at + // random with score over the median. + // The intention is to (slowly) improve an underperforming mesh by introducing good + // scoring peers that may have been gossiping at us. This allows us to get out of sticky + // situations where we are stuck with poor peers and also recover from churn of good peers. + // now compute the median peer score in the mesh + const peersList = Array.from(peers).sort((a, b) => getScore(a) - getScore(b)); + const medianIndex = Math.floor(peers.size / 2); + const medianScore = getScore(peersList[medianIndex]); + // if the median score is below the threshold, select a better peer (if any) and GRAFT + if (medianScore < this.opts.scoreThresholds.opportunisticGraftThreshold) { + const ineed = this.opts.opportunisticGraftPeers; + const newMeshPeers = removeItemsFromSet(candidateMeshPeers, ineed, (id) => getScore(id) > medianScore); + for (const id of newMeshPeers) { + this.log('HEARTBEAT: Opportunistically graft peer %s on topic %s', id, topic); + graftPeer(id, InclusionReason.Opportunistic); + } + } + } + }); + // expire fanout for topics we haven't published to in a while + const now = Date.now(); + this.fanoutLastpub.forEach((lastpb, topic) => { + if (lastpb + fanoutTTL < now) { + this.fanout.delete(topic); + this.fanoutLastpub.delete(topic); + } + }); + // maintain our fanout for topics we are publishing but we have not joined + this.fanout.forEach((fanoutPeers, topic) => { + // checks whether our peers are still in the topic and have a score above the publish threshold + const topicPeers = this.topics.get(topic); + fanoutPeers.forEach((id) => { + if (!(topicPeers?.has(id) ?? false) || getScore(id) < this.opts.scoreThresholds.publishThreshold) { + fanoutPeers.delete(id); + } + }); + const peersInTopic = this.topics.get(topic); + const candidateFanoutPeers = []; + // the fanout map contains topics to which we are not subscribed. + const peersToGossip = new Set(); + peersToGossipByTopic.set(topic, peersToGossip); + if (peersInTopic != null) { + const shuffledPeers = shuffle(Array.from(peersInTopic)); + for (const id of shuffledPeers) { + const peerStreams = this.streamsOutbound.get(id); + if ((peerStreams != null) && + this.multicodecs.includes(peerStreams.protocol) && + !fanoutPeers.has(id) && + !this.direct.has(id)) { + const score = getScore(id); + if (score >= this.opts.scoreThresholds.publishThreshold) + candidateFanoutPeers.push(id); + // instead of having to find gossip peers after heartbeat which require another loop + // we prepare peers to gossip in a topic within heartbeat to improve performance + if (score >= this.opts.scoreThresholds.gossipThreshold) + peersToGossip.add(id); + } + } + } + // do we need more peers? + if (fanoutPeers.size < D) { + const ineed = D - fanoutPeers.size; + candidateFanoutPeers.slice(0, ineed).forEach((id) => { + fanoutPeers.add(id); + peersToGossip?.delete(id); + }); + } + }); + this.emitGossip(peersToGossipByTopic); + // send coalesced GRAFT/PRUNE messages (will piggyback gossip) + await this.sendGraftPrune(tograft, toprune, noPX); + // flush pending gossip that wasn't piggybacked above + this.flush(); + // advance the message history window + this.mcache.shift(); + this.dispatchEvent(new CustomEvent$1('gossipsub:heartbeat')); + } + /** + * Given a topic, returns up to count peers subscribed to that topic + * that pass an optional filter function + * + * @param topic + * @param count + * @param filter - a function to filter acceptable peers + */ + getRandomGossipPeers(topic, count, filter = () => true) { + const peersInTopic = this.topics.get(topic); + if (peersInTopic == null) { + return new Set(); + } + // Adds all peers using our protocol + // that also pass the filter function + let peers = []; + peersInTopic.forEach((id) => { + const peerStreams = this.streamsOutbound.get(id); + if (peerStreams == null) { + return; + } + if (this.multicodecs.includes(peerStreams.protocol) && filter(id)) { + peers.push(id); + } + }); + // Pseudo-randomly shuffles peers + peers = shuffle(peers); + if (count > 0 && peers.length > count) { + peers = peers.slice(0, count); + } + return new Set(peers); + } + onScrapeMetrics(metrics) { + /* Data structure sizes */ + metrics.mcacheSize.set(this.mcache.size); + metrics.mcacheNotValidatedCount.set(this.mcache.notValidatedCount); + // Arbitrary size + metrics.cacheSize.set({ cache: 'direct' }, this.direct.size); + metrics.cacheSize.set({ cache: 'seenCache' }, this.seenCache.size); + metrics.cacheSize.set({ cache: 'fastMsgIdCache' }, this.fastMsgIdCache?.size ?? 0); + metrics.cacheSize.set({ cache: 'publishedMessageIds' }, this.publishedMessageIds.size); + metrics.cacheSize.set({ cache: 'mcache' }, this.mcache.size); + metrics.cacheSize.set({ cache: 'score' }, this.score.size); + metrics.cacheSize.set({ cache: 'gossipTracer.promises' }, this.gossipTracer.size); + metrics.cacheSize.set({ cache: 'gossipTracer.requests' }, this.gossipTracer.requestMsByMsgSize); + // Bounded by topic + metrics.cacheSize.set({ cache: 'topics' }, this.topics.size); + metrics.cacheSize.set({ cache: 'subscriptions' }, this.subscriptions.size); + metrics.cacheSize.set({ cache: 'mesh' }, this.mesh.size); + metrics.cacheSize.set({ cache: 'fanout' }, this.fanout.size); + // Bounded by peer + metrics.cacheSize.set({ cache: 'peers' }, this.peers.size); + metrics.cacheSize.set({ cache: 'streamsOutbound' }, this.streamsOutbound.size); + metrics.cacheSize.set({ cache: 'streamsInbound' }, this.streamsInbound.size); + metrics.cacheSize.set({ cache: 'acceptFromWhitelist' }, this.acceptFromWhitelist.size); + metrics.cacheSize.set({ cache: 'gossip' }, this.gossip.size); + metrics.cacheSize.set({ cache: 'control' }, this.control.size); + metrics.cacheSize.set({ cache: 'peerhave' }, this.peerhave.size); + metrics.cacheSize.set({ cache: 'outbound' }, this.outbound.size); + // 2D nested data structure + let backoffSize = 0; + const now = Date.now(); + metrics.connectedPeersBackoffSec.reset(); + for (const backoff of this.backoff.values()) { + backoffSize += backoff.size; + for (const [peer, expiredMs] of backoff.entries()) { + if (this.peers.has(peer)) { + metrics.connectedPeersBackoffSec.observe(Math.max(0, expiredMs - now) / 1000); + } + } + } + metrics.cacheSize.set({ cache: 'backoff' }, backoffSize); + // Peer counts + for (const [topicStr, peers] of this.topics) { + metrics.topicPeersCount.set({ topicStr }, peers.size); + } + for (const [topicStr, peers] of this.mesh) { + metrics.meshPeerCounts.set({ topicStr }, peers.size); + } + // Peer scores + const scores = []; + const scoreByPeer = new Map(); + metrics.behaviourPenalty.reset(); + for (const peerIdStr of this.peers.keys()) { + const score = this.score.score(peerIdStr); + scores.push(score); + scoreByPeer.set(peerIdStr, score); + metrics.behaviourPenalty.observe(this.score.peerStats.get(peerIdStr)?.behaviourPenalty ?? 0); + } + metrics.registerScores(scores, this.opts.scoreThresholds); + // Breakdown score per mesh topicLabel + metrics.registerScorePerMesh(this.mesh, scoreByPeer); + // Breakdown on each score weight + const sw = computeAllPeersScoreWeights(this.peers.keys(), this.score.peerStats, this.score.params, this.score.peerIPs, metrics.topicStrToLabel); + metrics.registerScoreWeights(sw); + } + tagMeshPeer = (evt) => { + const { peerId, topic } = evt.detail; + this.components.peerStore.merge(peerIdFromString(peerId), { + tags: { + [topic]: { + value: 100 + } + } + }).catch((err) => { this.log.error('Error tagging peer %s with topic %s', peerId, topic, err); }); + }; + untagMeshPeer = (evt) => { + const { peerId, topic } = evt.detail; + this.components.peerStore.merge(peerIdFromString(peerId), { + tags: { + [topic]: undefined + } + }).catch((err) => { this.log.error('Error untagging peer %s with topic %s', peerId, topic, err); }); + }; + } + function gossipsub(init = {}) { + return (components) => new GossipSub(components, init); + } + + const IDENTIFY_PROTOCOL_VERSION = '0.1.0'; + const MULTICODEC_IDENTIFY_PROTOCOL_NAME = 'id'; + const MULTICODEC_IDENTIFY_PUSH_PROTOCOL_NAME = 'id/push'; + const MULTICODEC_IDENTIFY_PROTOCOL_VERSION = '1.0.0'; + const MULTICODEC_IDENTIFY_PUSH_PROTOCOL_VERSION = '1.0.0'; + // https://github.com/libp2p/go-libp2p/blob/8d2e54e1637041d5cf4fac1e531287560bd1f4ac/p2p/protocol/identify/id.go#L52 + const MAX_IDENTIFY_MESSAGE_SIZE = 1024 * 8; + // https://github.com/libp2p/go-libp2p/blob/0385ec924bad172f74a74db09939e97c079b1420/p2p/protocol/identify/id.go#L47C7-L47C25 + const MAX_PUSH_CONCURRENCY = 32; + + /* eslint-disable import/export */ + /* eslint-disable complexity */ + /* eslint-disable @typescript-eslint/no-namespace */ + /* eslint-disable @typescript-eslint/no-unnecessary-boolean-literal-compare */ + /* eslint-disable @typescript-eslint/no-empty-interface */ + var Identify$1; + (function (Identify) { + let _codec; + Identify.codec = () => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork(); + } + if (obj.protocolVersion != null) { + w.uint32(42); + w.string(obj.protocolVersion); + } + if (obj.agentVersion != null) { + w.uint32(50); + w.string(obj.agentVersion); + } + if (obj.publicKey != null) { + w.uint32(10); + w.bytes(obj.publicKey); + } + if (obj.listenAddrs != null) { + for (const value of obj.listenAddrs) { + w.uint32(18); + w.bytes(value); + } + } + if (obj.observedAddr != null) { + w.uint32(34); + w.bytes(obj.observedAddr); + } + if (obj.protocols != null) { + for (const value of obj.protocols) { + w.uint32(26); + w.string(value); + } + } + if (obj.signedPeerRecord != null) { + w.uint32(66); + w.bytes(obj.signedPeerRecord); + } + if (opts.lengthDelimited !== false) { + w.ldelim(); + } + }, (reader, length) => { + const obj = { + listenAddrs: [], + protocols: [] + }; + const end = length == null ? reader.len : reader.pos + length; + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 5: + obj.protocolVersion = reader.string(); + break; + case 6: + obj.agentVersion = reader.string(); + break; + case 1: + obj.publicKey = reader.bytes(); + break; + case 2: + obj.listenAddrs.push(reader.bytes()); + break; + case 4: + obj.observedAddr = reader.bytes(); + break; + case 3: + obj.protocols.push(reader.string()); + break; + case 8: + obj.signedPeerRecord = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return obj; + }); + } + return _codec; + }; + Identify.encode = (obj) => { + return encodeMessage(obj, Identify.codec()); + }; + Identify.decode = (buf) => { + return decodeMessage(buf, Identify.codec()); + }; + })(Identify$1 || (Identify$1 = {})); + + // https://github.com/electron/electron/issues/2288 + function isElectron$1() { + // Renderer process + if (typeof window !== 'undefined' && typeof window.process === 'object' && window.process.type === 'renderer') { + return true; + } + + // Main process + if (typeof process !== 'undefined' && typeof process.versions === 'object' && !!process.versions.electron) { + return true; + } + + // Detect the user agent when the `nodeIntegration` option is set to false + if (typeof navigator === 'object' && typeof navigator.userAgent === 'string' && navigator.userAgent.indexOf('Electron') >= 0) { + return true; + } + + return false; + } + + var isElectron_1 = isElectron$1; + + var detectElectron = /*@__PURE__*/getDefaultExportFromCjs(isElectron_1); + + const isEnvWithDom = typeof window === 'object' && typeof document === 'object' && document.nodeType === 9; + const isElectron = detectElectron(); + + /** + * Detects browser main thread **NOT** web worker or service worker + */ + const isBrowser = isEnvWithDom && !isElectron; + const isElectronMain = isElectron && !isEnvWithDom; + const isElectronRenderer = isElectron && isEnvWithDom; + const isNode = typeof globalThis.process !== 'undefined' && typeof globalThis.process.release !== 'undefined' && globalThis.process.release.name === 'node' && !isElectron; + // @ts-ignore + // eslint-disable-next-line no-undef + const isWebWorker = typeof importScripts === 'function' && typeof self !== 'undefined' && typeof WorkerGlobalScope !== 'undefined' && self instanceof WorkerGlobalScope; + + // defeat bundlers replacing process.env.NODE_ENV with "development" or whatever + typeof globalThis.process !== 'undefined' && typeof globalThis.process.env !== 'undefined' && globalThis.process.env['NODE' + (() => '_')() + 'ENV'] === 'test'; + const isReactNative = typeof navigator !== 'undefined' && navigator.product === 'ReactNative'; + + const defaultValues = { + protocolPrefix: 'ipfs', + timeout: 5000, + maxInboundStreams: 1, + maxOutboundStreams: 1, + maxObservedAddresses: 10, + maxMessageSize: MAX_IDENTIFY_MESSAGE_SIZE, + runOnConnectionOpen: true, + runOnSelfUpdate: true, + runOnTransientConnection: true, + concurrency: MAX_PUSH_CONCURRENCY + }; + /** + * Takes the `addr` and converts it to a Multiaddr if possible + */ + function getCleanMultiaddr(addr) { + if (addr != null && addr.length > 0) { + try { + return multiaddr(addr); + } + catch { + } + } + } + function getAgentVersion(nodeInfo, agentVersion) { + if (agentVersion != null) { + return agentVersion; + } + agentVersion = `${nodeInfo.name}/${nodeInfo.version}`; + // Append user agent version to default AGENT_VERSION depending on the environment + if (isNode || isElectronMain) { + agentVersion += ` UserAgent=${globalThis.process.version}`; + } + else if (isBrowser || isWebWorker || isElectronRenderer || isReactNative) { + agentVersion += ` UserAgent=${globalThis.navigator.userAgent}`; + } + return agentVersion; + } + async function consumeIdentifyMessage(peerStore, events, log, connection, message) { + log('received identify from %p', connection.remotePeer); + if (message == null) { + throw new CodeError$2('message was null or undefined', 'ERR_INVALID_MESSAGE'); + } + const peer = {}; + if (message.listenAddrs.length > 0) { + peer.addresses = message.listenAddrs.map(buf => ({ + isCertified: false, + multiaddr: multiaddr(buf) + })); + } + if (message.protocols.length > 0) { + peer.protocols = message.protocols; + } + if (message.publicKey != null) { + peer.publicKey = message.publicKey; + const peerId = await peerIdFromKeys(message.publicKey); + if (!peerId.equals(connection.remotePeer)) { + throw new CodeError$2('public key did not match remote PeerId', 'ERR_INVALID_PUBLIC_KEY'); + } + } + let output; + // if the peer record has been sent, prefer the addresses in the record as they are signed by the remote peer + if (message.signedPeerRecord != null) { + log('received signedPeerRecord from %p', connection.remotePeer); + let peerRecordEnvelope = message.signedPeerRecord; + const envelope = await RecordEnvelope.openAndCertify(peerRecordEnvelope, PeerRecord.DOMAIN); + let peerRecord = PeerRecord.createFromProtobuf(envelope.payload); + // Verify peerId + if (!peerRecord.peerId.equals(envelope.peerId)) { + throw new CodeError$2('signing key does not match PeerId in the PeerRecord', 'ERR_INVALID_SIGNING_KEY'); + } + // Make sure remote peer is the one sending the record + if (!connection.remotePeer.equals(peerRecord.peerId)) { + throw new CodeError$2('signing key does not match remote PeerId', 'ERR_INVALID_PEER_RECORD_KEY'); + } + let existingPeer; + try { + existingPeer = await peerStore.get(peerRecord.peerId); + } + catch (err) { + if (err.code !== 'ERR_NOT_FOUND') { + throw err; + } + } + if (existingPeer != null) { + // don't lose any existing metadata + peer.metadata = existingPeer.metadata; + // if we have previously received a signed record for this peer, compare it to the incoming one + if (existingPeer.peerRecordEnvelope != null) { + const storedEnvelope = await RecordEnvelope.createFromProtobuf(existingPeer.peerRecordEnvelope); + const storedRecord = PeerRecord.createFromProtobuf(storedEnvelope.payload); + // ensure seq is greater than, or equal to, the last received + if (storedRecord.seqNumber >= peerRecord.seqNumber) { + log('sequence number was lower or equal to existing sequence number - stored: %d received: %d', storedRecord.seqNumber, peerRecord.seqNumber); + peerRecord = storedRecord; + peerRecordEnvelope = existingPeer.peerRecordEnvelope; + } + } + } + // store the signed record for next time + peer.peerRecordEnvelope = peerRecordEnvelope; + // override the stored addresses with the signed multiaddrs + peer.addresses = peerRecord.multiaddrs.map(multiaddr => ({ + isCertified: true, + multiaddr + })); + output = { + seq: peerRecord.seqNumber, + addresses: peerRecord.multiaddrs + }; + } + else { + log('%p did not send a signed peer record', connection.remotePeer); + } + log('patching %p with', connection.remotePeer, peer); + await peerStore.patch(connection.remotePeer, peer); + if (message.agentVersion != null || message.protocolVersion != null) { + const metadata = {}; + if (message.agentVersion != null) { + metadata.AgentVersion = fromString(message.agentVersion); + } + if (message.protocolVersion != null) { + metadata.ProtocolVersion = fromString(message.protocolVersion); + } + log('merging %p metadata', connection.remotePeer, metadata); + await peerStore.merge(connection.remotePeer, { + metadata + }); + } + const result = { + peerId: connection.remotePeer, + protocolVersion: message.protocolVersion, + agentVersion: message.agentVersion, + publicKey: message.publicKey, + listenAddrs: message.listenAddrs.map(buf => multiaddr(buf)), + observedAddr: message.observedAddr == null ? undefined : multiaddr(message.observedAddr), + protocols: message.protocols, + signedPeerRecord: output, + connection + }; + events.safeDispatchEvent('peer:identify', { detail: result }); + return result; + } + class AbstractIdentify { + host; + protocol; + started; + timeout; + peerId; + peerStore; + registrar; + addressManager; + maxInboundStreams; + maxOutboundStreams; + maxMessageSize; + maxObservedAddresses; + events; + runOnTransientConnection; + log; + constructor(components, init) { + this.protocol = init.protocol; + this.started = false; + this.peerId = components.peerId; + this.peerStore = components.peerStore; + this.registrar = components.registrar; + this.addressManager = components.addressManager; + this.events = components.events; + this.log = init.log; + this.timeout = init.timeout ?? defaultValues.timeout; + this.maxInboundStreams = init.maxInboundStreams ?? defaultValues.maxInboundStreams; + this.maxOutboundStreams = init.maxOutboundStreams ?? defaultValues.maxOutboundStreams; + this.maxMessageSize = init.maxMessageSize ?? defaultValues.maxMessageSize; + this.maxObservedAddresses = init.maxObservedAddresses ?? defaultValues.maxObservedAddresses; + this.runOnTransientConnection = init.runOnTransientConnection ?? defaultValues.runOnTransientConnection; + // Store self host metadata + this.host = { + protocolVersion: `${init.protocolPrefix ?? defaultValues.protocolPrefix}/${IDENTIFY_PROTOCOL_VERSION}`, + agentVersion: getAgentVersion(components.nodeInfo, init.agentVersion) + }; + } + isStarted() { + return this.started; + } + async start() { + if (this.started) { + return; + } + await this.peerStore.merge(this.peerId, { + metadata: { + AgentVersion: fromString(this.host.agentVersion), + ProtocolVersion: fromString(this.host.protocolVersion) + } + }); + await this.registrar.handle(this.protocol, (data) => { + void this.handleProtocol(data).catch(err => { + this.log.error(err); + }); + }, { + maxInboundStreams: this.maxInboundStreams, + maxOutboundStreams: this.maxOutboundStreams, + runOnTransientConnection: this.runOnTransientConnection + }); + this.started = true; + } + async stop() { + await this.registrar.unhandle(this.protocol); + this.started = false; + } + } + + /* eslint-disable complexity */ + class IdentifyPush extends AbstractIdentify { + connectionManager; + concurrency; + constructor(components, init = {}) { + super(components, { + ...init, + protocol: `/${init.protocolPrefix ?? defaultValues.protocolPrefix}/${MULTICODEC_IDENTIFY_PUSH_PROTOCOL_NAME}/${MULTICODEC_IDENTIFY_PUSH_PROTOCOL_VERSION}`, + log: components.logger.forComponent('libp2p:identify-push') + }); + this.connectionManager = components.connectionManager; + this.concurrency = init.concurrency ?? defaultValues.concurrency; + if ((init.runOnSelfUpdate ?? defaultValues.runOnSelfUpdate)) { + // When self peer record changes, trigger identify-push + components.events.addEventListener('self:peer:update', (evt) => { + void this.push().catch(err => { this.log.error(err); }); + }); + } + } + /** + * Calls `push` on all peer connections + */ + async push() { + // Do not try to push if we are not running + if (!this.isStarted()) { + return; + } + const listenAddresses = this.addressManager.getAddresses().map(ma => ma.decapsulateCode(getProtocol('p2p').code)); + const peerRecord = new PeerRecord({ + peerId: this.peerId, + multiaddrs: listenAddresses + }); + const signedPeerRecord = await RecordEnvelope.seal(peerRecord, this.peerId); + const supportedProtocols = this.registrar.getProtocols(); + const peer = await this.peerStore.get(this.peerId); + const agentVersion = toString$1(peer.metadata.get('AgentVersion') ?? fromString(this.host.agentVersion)); + const protocolVersion = toString$1(peer.metadata.get('ProtocolVersion') ?? fromString(this.host.protocolVersion)); + const self = this; + async function* pushToConnections() { + for (const connection of self.connectionManager.getConnections()) { + const peer = await self.peerStore.get(connection.remotePeer); + if (!peer.protocols.includes(self.protocol)) { + continue; + } + yield async () => { + let stream; + const signal = AbortSignal.timeout(self.timeout); + setMaxListeners(Infinity, signal); + try { + stream = await connection.newStream(self.protocol, { + signal, + runOnTransientConnection: self.runOnTransientConnection + }); + const pb = pbStream(stream, { + maxDataLength: self.maxMessageSize + }).pb(Identify$1); + await pb.write({ + listenAddrs: listenAddresses.map(ma => ma.bytes), + signedPeerRecord: signedPeerRecord.marshal(), + protocols: supportedProtocols, + agentVersion, + protocolVersion + }, { + signal + }); + await stream.close({ + signal + }); + } + catch (err) { + // Just log errors + self.log.error('could not push identify update to peer', err); + stream?.abort(err); + } + }; + } + } + await drain(parallel(pushToConnections(), { + concurrency: this.concurrency + })); + } + /** + * Reads the Identify Push message from the given `connection` + */ + async handleProtocol(data) { + const { connection, stream } = data; + try { + if (this.peerId.equals(connection.remotePeer)) { + throw new Error('received push from ourselves?'); + } + const options = { + signal: AbortSignal.timeout(this.timeout) + }; + const pb = pbStream(stream, { + maxDataLength: this.maxMessageSize + }).pb(Identify$1); + const message = await pb.read(options); + await stream.close(options); + await consumeIdentifyMessage(this.peerStore, this.events, this.log, connection, message); + } + catch (err) { + this.log.error('received invalid message', err); + stream.abort(err); + return; + } + this.log('handled push from %p', connection.remotePeer); + } + } + + /* eslint-disable complexity */ + class Identify extends AbstractIdentify { + constructor(components, init = {}) { + super(components, { + ...init, + protocol: `/${init.protocolPrefix ?? defaultValues.protocolPrefix}/${MULTICODEC_IDENTIFY_PROTOCOL_NAME}/${MULTICODEC_IDENTIFY_PROTOCOL_VERSION}`, + log: components.logger.forComponent('libp2p:identify') + }); + if (init.runOnConnectionOpen ?? defaultValues.runOnConnectionOpen) { + // When a new connection happens, trigger identify + components.events.addEventListener('connection:open', (evt) => { + const connection = evt.detail; + this.identify(connection).catch(err => { this.log.error('error during identify trigged by connection:open', err); }); + }); + } + } + async _identify(connection, options = {}) { + let stream; + if (options.signal == null) { + const signal = AbortSignal.timeout(this.timeout); + setMaxListeners(Infinity, signal); + options = { + ...options, + signal + }; + } + try { + stream = await connection.newStream(this.protocol, { + ...options, + runOnTransientConnection: this.runOnTransientConnection + }); + const pb = pbStream(stream, { + maxDataLength: this.maxMessageSize + }).pb(Identify$1); + const message = await pb.read(options); + await stream.close(options); + return message; + } + catch (err) { + this.log.error('error while reading identify message', err); + stream?.abort(err); + throw err; + } + } + async identify(connection, options = {}) { + const message = await this._identify(connection, options); + const { publicKey, protocols, observedAddr } = message; + if (publicKey == null) { + throw new CodeError$2('public key was missing from identify message', 'ERR_MISSING_PUBLIC_KEY'); + } + const id = await peerIdFromKeys(publicKey); + if (!connection.remotePeer.equals(id)) { + throw new CodeError$2('identified peer does not match the expected peer', 'ERR_INVALID_PEER'); + } + if (this.peerId.equals(id)) { + throw new CodeError$2('identified peer is our own peer id?', 'ERR_INVALID_PEER'); + } + // Get the observedAddr if there is one + const cleanObservedAddr = getCleanMultiaddr(observedAddr); + this.log('identify completed for peer %p and protocols %o', id, protocols); + this.log('our observed address is %a', cleanObservedAddr); + if (cleanObservedAddr != null && + this.addressManager.getObservedAddrs().length < (this.maxObservedAddresses ?? Infinity)) { + this.log('storing our observed address %a', cleanObservedAddr); + this.addressManager.addObservedAddr(cleanObservedAddr); + } + return consumeIdentifyMessage(this.peerStore, this.events, this.log, connection, message); + } + /** + * Sends the `Identify` response with the Signed Peer Record + * to the requesting peer over the given `connection` + */ + async handleProtocol(data) { + const { connection, stream } = data; + const signal = AbortSignal.timeout(this.timeout); + setMaxListeners(Infinity, signal); + try { + const publicKey = this.peerId.publicKey ?? new Uint8Array(0); + const peerData = await this.peerStore.get(this.peerId); + const multiaddrs = this.addressManager.getAddresses().map(ma => ma.decapsulateCode(getProtocol('p2p').code)); + let signedPeerRecord = peerData.peerRecordEnvelope; + if (multiaddrs.length > 0 && signedPeerRecord == null) { + const peerRecord = new PeerRecord({ + peerId: this.peerId, + multiaddrs + }); + const envelope = await RecordEnvelope.seal(peerRecord, this.peerId); + signedPeerRecord = envelope.marshal().subarray(); + } + let observedAddr = connection.remoteAddr.bytes; + if (!IP_OR_DOMAIN.matches(connection.remoteAddr)) { + observedAddr = undefined; + } + const pb = pbStream(stream).pb(Identify$1); + await pb.write({ + protocolVersion: this.host.protocolVersion, + agentVersion: this.host.agentVersion, + publicKey, + listenAddrs: multiaddrs.map(addr => addr.bytes), + signedPeerRecord, + observedAddr, + protocols: peerData.protocols + }, { + signal + }); + await stream.close({ + signal + }); + } + catch (err) { + this.log.error('could not respond to identify request', err); + stream.abort(err); + } + } + } + + /** + * @packageDocumentation + * + * Use the `identify` function to add support for the [Identify protocol](https://github.com/libp2p/specs/blob/master/identify/README.md) to libp2p. + * + * This protocol allows network peers to discover the multiaddrs the current node listens on, and the protocols it supports. + * + * A second function, `identifyPush` is also exported to add support for [identify/push](https://github.com/libp2p/specs/blob/master/identify/README.md#identifypush). + * + * This protocol will send updates to all connected peers when the multiaddrs or protocols of the current node change. + * + * > [!TIP] + * > For maximum network compatibility you should configure both protocols + * + * @example Enabling identify + * + * ```typescript + * import { createLibp2p } from 'libp2p' + * import { identify } from '@libp2p/identify' + * + * const node = await createLibp2p({ + * // ...other options + * services: { + * identify: identify() + * } + * }) + * ``` + * + * @example Enabling identify push + * + * ```typescript + * import { createLibp2p } from 'libp2p' + * import { identifyPush } from '@libp2p/identify' + * + * const node = await createLibp2p({ + * // ...other options + * services: { + * identifyPush: identifyPush() + * } + * }) + * ``` + */ + function identify(init = {}) { + return (components) => new Identify(components, init); + } + function identifyPush(init = {}) { + return (components) => new IdentifyPush(components, init); + } + + // MaxRecordAge specifies the maximum time that any node will hold onto a record + // from the time its received. This does not apply to any other forms of validity that + // the record may contain. + // For example, a record may contain an ipns entry with an EOL saying its valid + // until the year 2020 (a great time in the future). For that record to stick around + // it must be rebroadcasted more frequently than once every 'MaxRecordAge' + const second = 1000; + const minute = 60 * second; + const hour = 60 * minute; + const MAX_RECORD_AGE = 36 * hour; + const PROTOCOL = '/ipfs/kad/1.0.0'; + const RECORD_KEY_PREFIX = '/dht/record'; + const PROVIDER_KEY_PREFIX = '/dht/provider'; + const PROVIDERS_LRU_CACHE_SIZE = 256; + const PROVIDERS_VALIDITY = 24 * hour; + const PROVIDERS_CLEANUP_INTERVAL = hour; + // K is the maximum number of requests to perform before returning failure + const K = 20; + // Alpha is the concurrency for asynchronous requests + const ALPHA = 3; + // How often we look for our closest DHT neighbours + const QUERY_SELF_INTERVAL = 5 * minute; + // How often we look for the first set of our closest DHT neighbours + const QUERY_SELF_INITIAL_INTERVAL = second; + // How long to look for our closest DHT neighbours for + const QUERY_SELF_TIMEOUT = 5 * second; + // How often we try to find new peers + const TABLE_REFRESH_INTERVAL = 5 * minute; + // How how long to look for new peers for + const TABLE_REFRESH_QUERY_TIMEOUT = 30 * second; + // When a timeout is not specified, run a query for this long + const DEFAULT_QUERY_TIMEOUT = 180 * second; + + /* eslint-disable import/export */ + /* eslint-disable complexity */ + /* eslint-disable @typescript-eslint/no-namespace */ + /* eslint-disable @typescript-eslint/no-unnecessary-boolean-literal-compare */ + /* eslint-disable @typescript-eslint/no-empty-interface */ + var Record$1; + (function (Record) { + let _codec; + Record.codec = () => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork(); + } + if ((obj.key != null && obj.key.byteLength > 0)) { + w.uint32(10); + w.bytes(obj.key); + } + if ((obj.value != null && obj.value.byteLength > 0)) { + w.uint32(18); + w.bytes(obj.value); + } + if ((obj.timeReceived != null && obj.timeReceived !== '')) { + w.uint32(42); + w.string(obj.timeReceived); + } + if (opts.lengthDelimited !== false) { + w.ldelim(); + } + }, (reader, length, opts = {}) => { + const obj = { + key: alloc$2(0), + value: alloc$2(0), + timeReceived: '' + }; + const end = length == null ? reader.len : reader.pos + length; + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + obj.key = reader.bytes(); + break; + } + case 2: { + obj.value = reader.bytes(); + break; + } + case 5: { + obj.timeReceived = reader.string(); + break; + } + default: { + reader.skipType(tag & 7); + break; + } + } + } + return obj; + }); + } + return _codec; + }; + Record.encode = (obj) => { + return encodeMessage(obj, Record.codec()); + }; + Record.decode = (buf, opts) => { + return decodeMessage(buf, Record.codec(), opts); + }; + })(Record$1 || (Record$1 = {})); + + /** + * Convert a JavaScript date into an `RFC3339Nano` formatted + * string + */ + function toRFC3339(time) { + const year = time.getUTCFullYear(); + const month = String(time.getUTCMonth() + 1).padStart(2, '0'); + const day = String(time.getUTCDate()).padStart(2, '0'); + const hour = String(time.getUTCHours()).padStart(2, '0'); + const minute = String(time.getUTCMinutes()).padStart(2, '0'); + const seconds = String(time.getUTCSeconds()).padStart(2, '0'); + const milliseconds = time.getUTCMilliseconds(); + const nanoseconds = String(milliseconds * 1000 * 1000).padStart(9, '0'); + return `${year}-${month}-${day}T${hour}:${minute}:${seconds}.${nanoseconds}Z`; + } + /** + * Parses a date string formatted as `RFC3339Nano` into a + * JavaScript Date object + */ + function parseRFC3339(time) { + const rfc3339Matcher = new RegExp( + // 2006-01-02T + '(\\d{4})-(\\d{2})-(\\d{2})T' + + // 15:04:05 + '(\\d{2}):(\\d{2}):(\\d{2})' + + // .999999999Z + '\\.(\\d+)Z'); + const m = String(time).trim().match(rfc3339Matcher); + if (m == null) { + throw new Error('Invalid format'); + } + const year = parseInt(m[1], 10); + const month = parseInt(m[2], 10) - 1; + const date = parseInt(m[3], 10); + const hour = parseInt(m[4], 10); + const minute = parseInt(m[5], 10); + const second = parseInt(m[6], 10); + const millisecond = parseInt(m[7].slice(0, -6), 10); + return new Date(Date.UTC(year, month, date, hour, minute, second, millisecond)); + } + + /** + * @packageDocumentation + * + * This is an implementation of the [routing record format](https://github.com/libp2p/specs/blob/b9efe152c29f93f7a87931c14d78ae11e7924d5a/kad-dht/README.md?plain=1#L408-L425) used by libp2p to store data in the datastore passed to the libp2p constructor. + * + * @example Deserialization + * + * ```TypeScript + * import { Libp2pRecord } from '@libp2p/record' + * + * const buf = Uint8Array.from([0, 1, 2, 3]) + * const record = Libp2pRecord.deserialize(buf) + * ``` + * + * @example Serialization + * + * ```TypeScript + * import { Libp2pRecord } from '@libp2p/record' + * + * const key = Uint8Array.from([0, 1, 2, 3]) + * const value = Uint8Array.from([0, 1, 2, 3]) + * const timeReceived = new Date() + * + * const record = new Libp2pRecord(key, value, timeReceived) + * const buf = record.serialize() + * ``` + */ + class Libp2pRecord { + key; + value; + timeReceived; + constructor(key, value, timeReceived) { + if (!(key instanceof Uint8Array)) { + throw new Error('key must be a Uint8Array'); + } + if (!(value instanceof Uint8Array)) { + throw new Error('value must be a Uint8Array'); + } + this.key = key; + this.value = value; + this.timeReceived = timeReceived; + } + serialize() { + return Record$1.encode(this.prepareSerialize()); + } + /** + * Return the object format ready to be given to the protobuf library. + */ + prepareSerialize() { + return { + key: this.key, + value: this.value, + timeReceived: toRFC3339(this.timeReceived) + }; + } + /** + * Decode a protobuf encoded record + */ + static deserialize(raw) { + const rec = Record$1.decode(raw); + return new Libp2pRecord(rec.key, rec.value, new Date(rec.timeReceived)); + } + /** + * Create a record from the raw object returned from the protobuf library + */ + static fromDeserialized(obj) { + const recvtime = parseRFC3339(obj.timeReceived); + if (obj.key == null) { + throw new Error('key missing from deserialized object'); + } + if (obj.value == null) { + throw new Error('value missing from deserialized object'); + } + const rec = new Libp2pRecord(obj.key, obj.value, recvtime); + return rec; + } + } + + /* eslint-disable import/export */ + /* eslint-disable complexity */ + /* eslint-disable @typescript-eslint/no-namespace */ + /* eslint-disable @typescript-eslint/no-unnecessary-boolean-literal-compare */ + /* eslint-disable @typescript-eslint/no-empty-interface */ + var Record; + (function (Record) { + let _codec; + Record.codec = () => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork(); + } + if (obj.key != null) { + w.uint32(10); + w.bytes(obj.key); + } + if (obj.value != null) { + w.uint32(18); + w.bytes(obj.value); + } + if (obj.author != null) { + w.uint32(26); + w.bytes(obj.author); + } + if (obj.signature != null) { + w.uint32(34); + w.bytes(obj.signature); + } + if (obj.timeReceived != null) { + w.uint32(42); + w.string(obj.timeReceived); + } + if (opts.lengthDelimited !== false) { + w.ldelim(); + } + }, (reader, length) => { + const obj = {}; + const end = length == null ? reader.len : reader.pos + length; + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + obj.key = reader.bytes(); + break; + } + case 2: { + obj.value = reader.bytes(); + break; + } + case 3: { + obj.author = reader.bytes(); + break; + } + case 4: { + obj.signature = reader.bytes(); + break; + } + case 5: { + obj.timeReceived = reader.string(); + break; + } + default: { + reader.skipType(tag & 7); + break; + } + } + } + return obj; + }); + } + return _codec; + }; + Record.encode = (obj) => { + return encodeMessage(obj, Record.codec()); + }; + Record.decode = (buf) => { + return decodeMessage(buf, Record.codec()); + }; + })(Record || (Record = {})); + var MessageType; + (function (MessageType) { + MessageType["PUT_VALUE"] = "PUT_VALUE"; + MessageType["GET_VALUE"] = "GET_VALUE"; + MessageType["ADD_PROVIDER"] = "ADD_PROVIDER"; + MessageType["GET_PROVIDERS"] = "GET_PROVIDERS"; + MessageType["FIND_NODE"] = "FIND_NODE"; + MessageType["PING"] = "PING"; + })(MessageType || (MessageType = {})); + var __MessageTypeValues; + (function (__MessageTypeValues) { + __MessageTypeValues[__MessageTypeValues["PUT_VALUE"] = 0] = "PUT_VALUE"; + __MessageTypeValues[__MessageTypeValues["GET_VALUE"] = 1] = "GET_VALUE"; + __MessageTypeValues[__MessageTypeValues["ADD_PROVIDER"] = 2] = "ADD_PROVIDER"; + __MessageTypeValues[__MessageTypeValues["GET_PROVIDERS"] = 3] = "GET_PROVIDERS"; + __MessageTypeValues[__MessageTypeValues["FIND_NODE"] = 4] = "FIND_NODE"; + __MessageTypeValues[__MessageTypeValues["PING"] = 5] = "PING"; + })(__MessageTypeValues || (__MessageTypeValues = {})); + (function (MessageType) { + MessageType.codec = () => { + return enumeration(__MessageTypeValues); + }; + })(MessageType || (MessageType = {})); + var ConnectionType; + (function (ConnectionType) { + ConnectionType["NOT_CONNECTED"] = "NOT_CONNECTED"; + ConnectionType["CONNECTED"] = "CONNECTED"; + ConnectionType["CAN_CONNECT"] = "CAN_CONNECT"; + ConnectionType["CANNOT_CONNECT"] = "CANNOT_CONNECT"; + })(ConnectionType || (ConnectionType = {})); + var __ConnectionTypeValues; + (function (__ConnectionTypeValues) { + __ConnectionTypeValues[__ConnectionTypeValues["NOT_CONNECTED"] = 0] = "NOT_CONNECTED"; + __ConnectionTypeValues[__ConnectionTypeValues["CONNECTED"] = 1] = "CONNECTED"; + __ConnectionTypeValues[__ConnectionTypeValues["CAN_CONNECT"] = 2] = "CAN_CONNECT"; + __ConnectionTypeValues[__ConnectionTypeValues["CANNOT_CONNECT"] = 3] = "CANNOT_CONNECT"; + })(__ConnectionTypeValues || (__ConnectionTypeValues = {})); + (function (ConnectionType) { + ConnectionType.codec = () => { + return enumeration(__ConnectionTypeValues); + }; + })(ConnectionType || (ConnectionType = {})); + var PeerInfo; + (function (PeerInfo) { + let _codec; + PeerInfo.codec = () => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork(); + } + if ((obj.id != null && obj.id.byteLength > 0)) { + w.uint32(10); + w.bytes(obj.id); + } + if (obj.multiaddrs != null) { + for (const value of obj.multiaddrs) { + w.uint32(18); + w.bytes(value); + } + } + if (obj.connection != null) { + w.uint32(24); + ConnectionType.codec().encode(obj.connection, w); + } + if (opts.lengthDelimited !== false) { + w.ldelim(); + } + }, (reader, length) => { + const obj = { + id: alloc$2(0), + multiaddrs: [] + }; + const end = length == null ? reader.len : reader.pos + length; + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + obj.id = reader.bytes(); + break; + } + case 2: { + obj.multiaddrs.push(reader.bytes()); + break; + } + case 3: { + obj.connection = ConnectionType.codec().decode(reader); + break; + } + default: { + reader.skipType(tag & 7); + break; + } + } + } + return obj; + }); + } + return _codec; + }; + PeerInfo.encode = (obj) => { + return encodeMessage(obj, PeerInfo.codec()); + }; + PeerInfo.decode = (buf) => { + return decodeMessage(buf, PeerInfo.codec()); + }; + })(PeerInfo || (PeerInfo = {})); + var Message; + (function (Message) { + let _codec; + Message.codec = () => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork(); + } + if (obj.type != null && __MessageTypeValues[obj.type] !== 0) { + w.uint32(8); + MessageType.codec().encode(obj.type, w); + } + if (obj.clusterLevel != null) { + w.uint32(80); + w.int32(obj.clusterLevel); + } + if (obj.key != null) { + w.uint32(18); + w.bytes(obj.key); + } + if (obj.record != null) { + w.uint32(26); + w.bytes(obj.record); + } + if (obj.closer != null) { + for (const value of obj.closer) { + w.uint32(66); + PeerInfo.codec().encode(value, w); + } + } + if (obj.providers != null) { + for (const value of obj.providers) { + w.uint32(74); + PeerInfo.codec().encode(value, w); + } + } + if (opts.lengthDelimited !== false) { + w.ldelim(); + } + }, (reader, length) => { + const obj = { + type: MessageType.PUT_VALUE, + closer: [], + providers: [] + }; + const end = length == null ? reader.len : reader.pos + length; + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + obj.type = MessageType.codec().decode(reader); + break; + } + case 10: { + obj.clusterLevel = reader.int32(); + break; + } + case 2: { + obj.key = reader.bytes(); + break; + } + case 3: { + obj.record = reader.bytes(); + break; + } + case 8: { + obj.closer.push(PeerInfo.codec().decode(reader, reader.uint32())); + break; + } + case 9: { + obj.providers.push(PeerInfo.codec().decode(reader, reader.uint32())); + break; + } + default: { + reader.skipType(tag & 7); + break; + } + } + } + return obj; + }); + } + return _codec; + }; + Message.encode = (obj) => { + return encodeMessage(obj, Message.codec()); + }; + Message.decode = (buf) => { + return decodeMessage(buf, Message.codec()); + }; + })(Message || (Message = {})); + + function sendQueryEvent(fields, options = {}) { + const event = { + ...fields, + name: 'SEND_QUERY', + type: 0, + messageName: fields.type, + messageType: fields.type + }; + options.onProgress?.(new CustomEvent$1('kad-dht:query:send-query', { detail: event })); + return event; + } + function peerResponseEvent(fields, options = {}) { + const event = { + ...fields, + name: 'PEER_RESPONSE', + type: 1, + messageName: fields.messageType, + closer: (fields.closer != null) ? fields.closer : [], + providers: (fields.providers != null) ? fields.providers : [] + }; + options.onProgress?.(new CustomEvent$1('kad-dht:query:peer-response', { detail: event })); + return event; + } + function finalPeerEvent(fields, options = {}) { + const event = { + ...fields, + name: 'FINAL_PEER', + type: 2 + }; + options.onProgress?.(new CustomEvent$1('kad-dht:query:final-peer', { detail: event })); + return event; + } + function queryErrorEvent(fields, options = {}) { + const event = { + ...fields, + name: 'QUERY_ERROR', + type: 3 + }; + options.onProgress?.(new CustomEvent$1('kad-dht:query:query-error', { detail: event })); + return event; + } + function providerEvent(fields, options = {}) { + const event = { + ...fields, + name: 'PROVIDER', + type: 4 + }; + options.onProgress?.(new CustomEvent$1('kad-dht:query:provider', { detail: event })); + return event; + } + function valueEvent(fields, options = {}) { + const event = { + ...fields, + name: 'VALUE', + type: 5 + }; + options.onProgress?.(new CustomEvent$1('kad-dht:query:value', { detail: event })); + return event; + } + function dialPeerEvent(fields, options = {}) { + const event = { + ...fields, + name: 'DIAL_PEER', + type: 7 + }; + options.onProgress?.(new CustomEvent$1('kad-dht:query:dial-peer', { detail: event })); + return event; + } + + /** + * Select the best record out of the given records + */ + function bestRecord(selectors, k, records) { + if (records.length === 0) { + const errMsg = 'No records given'; + throw new CodeError$2(errMsg, 'ERR_NO_RECORDS_RECEIVED'); + } + const kStr = toString$1(k); + const parts = kStr.split('/'); + if (parts.length < 3) { + const errMsg = 'Record key does not have a selector function'; + throw new CodeError$2(errMsg, 'ERR_NO_SELECTOR_FUNCTION_FOR_RECORD_KEY'); + } + const selector = selectors[parts[1].toString()]; + if (selector == null) { + const errMsg = `No selector function configured for key type "${parts[1]}"`; + throw new CodeError$2(errMsg, 'ERR_UNRECOGNIZED_KEY_PREFIX'); + } + if (records.length === 1) { + return 0; + } + return selector(k, records); + } + /** + * Best record selector, for public key records. + * Simply returns the first record, as all valid public key + * records are equal + */ + function publickKey(k, records) { + return 0; + } + const selectors = { + pk: publickKey + }; + + /** + * Checks a record and ensures it is still valid. + * It runs the needed validators. + * If verification fails the returned Promise will reject with the error. + */ + async function verifyRecord(validators, record) { + const key = record.key; + const keyString = toString$1(key); + const parts = keyString.split('/'); + if (parts.length < 3) { + // No validator available + return; + } + const validator = validators[parts[1].toString()]; + if (validator == null) { + const errMsg = `No validator available for key type "${parts[1]}"`; + throw new CodeError$2(errMsg, 'ERR_INVALID_RECORD_KEY_TYPE'); + } + await validator(key, record.value); + } + /** + * Validator for public key records. + * Verifies that the passed in record value is the PublicKey + * that matches the passed in key. + * If validation fails the returned Promise will reject with the error. + * + * @param {Uint8Array} key - A valid key is of the form `'/pk/'` + * @param {Uint8Array} publicKey - The public key to validate against (protobuf encoded). + */ + const validatePublicKeyRecord = async (key, publicKey) => { + if (!(key instanceof Uint8Array)) { + throw new CodeError$2('"key" must be a Uint8Array', 'ERR_INVALID_RECORD_KEY_NOT_BUFFER'); + } + if (key.byteLength < 5) { + throw new CodeError$2('invalid public key record', 'ERR_INVALID_RECORD_KEY_TOO_SHORT'); + } + const prefix = toString$1(key.subarray(0, 4)); + if (prefix !== '/pk/') { + throw new CodeError$2('key was not prefixed with /pk/', 'ERR_INVALID_RECORD_KEY_BAD_PREFIX'); + } + const keyhash = key.slice(4); + const publicKeyHash = await sha256$1.digest(publicKey); + if (!equals(keyhash, publicKeyHash.bytes)) { + throw new CodeError$2('public key does not match passed in key', 'ERR_INVALID_RECORD_HASH_MISMATCH'); + } + }; + const validators = { + pk: validatePublicKeyRecord + }; + + // const IPNS_PREFIX = uint8ArrayFromString('/ipns/') + const PK_PREFIX = fromString('/pk/'); + function removePrivateAddressesMapper(peer) { + return { + ...peer, + multiaddrs: peer.multiaddrs.filter(multiaddr => { + const [[type, addr]] = multiaddr.stringTuples(); + // treat /dns, /dns4, and /dns6 addrs as public + if (type === 53 || type === 54 || type === 55) { + // localhost can be a dns address but it's private + if (addr === 'localhost') { + return false; + } + return true; + } + if (type !== 4 && type !== 6) { + return false; + } + if (addr == null) { + return false; + } + const isPrivate = isPrivateIp(addr); + if (isPrivate == null) { + // not an ip address + return true; + } + return !isPrivate; + }) + }; + } + /** + * Creates a DHT ID by hashing a given Uint8Array + */ + async function convertBuffer(buf) { + const multihash = await sha256$1.digest(buf); + return multihash.digest; + } + /** + * Creates a DHT ID by hashing a Peer ID + */ + async function convertPeerId(peerId) { + return convertBuffer(peerId.toBytes()); + } + /** + * Convert a Uint8Array to their SHA2-256 hash + */ + function bufferToRecordKey(buf) { + return new Key(`${RECORD_KEY_PREFIX}/${toString$1(buf, 'base32')}`, false); + } + /** + * Generate the key for a public key. + */ + function keyForPublicKey(peer) { + return concat$1([ + PK_PREFIX, + peer.toBytes() + ]); + } + function isPublicKeyKey(key) { + return toString$1(key.subarray(0, 4)) === '/pk/'; + } + function fromPublicKeyKey(key) { + return peerIdFromBytes(key.subarray(4)); + } + /** + * Create a new put record, encodes and signs it if enabled + */ + function createPutRecord(key, value) { + const timeReceived = new Date(); + const rec = new Libp2pRecord(key, value, timeReceived); + return rec.serialize(); + } + function debounce(callback, wait = 100) { + let timeout; + return () => { + clearTimeout(timeout); + timeout = setTimeout(() => { callback(); }, wait); + }; + } + // see https://github.com/multiformats/multiaddr/blob/master/protocols.csv + const P2P_CIRCUIT_CODE = 290; + const DNS4_CODE = 54; + const DNS6_CODE = 55; + const DNSADDR_CODE = 56; + const IP4_CODE = 4; + const IP6_CODE = 41; + function multiaddrIsPublic(multiaddr) { + const tuples = multiaddr.stringTuples(); + // p2p-circuit should not enable server mode + for (const tuple of tuples) { + if (tuple[0] === P2P_CIRCUIT_CODE) { + return false; + } + } + // dns4 or dns6 or dnsaddr + if (tuples[0][0] === DNS4_CODE || tuples[0][0] === DNS6_CODE || tuples[0][0] === DNSADDR_CODE) { + return true; + } + // ip4 or ip6 + if (tuples[0][0] === IP4_CODE || tuples[0][0] === IP6_CODE) { + const result = isPrivateIp(`${tuples[0][1]}`); + const isPublic = result == null || !result; + return isPublic; + } + return false; + } + + class ContentFetching { + log; + components; + validators; + selectors; + peerRouting; + queryManager; + network; + constructor(components, init) { + const { validators, selectors, peerRouting, queryManager, network, logPrefix } = init; + this.components = components; + this.log = components.logger.forComponent(`${logPrefix}:content-fetching`); + this.validators = validators; + this.selectors = selectors; + this.peerRouting = peerRouting; + this.queryManager = queryManager; + this.network = network; + } + /** + * Attempt to retrieve the value for the given key from + * the local datastore + */ + async getLocal(key) { + this.log('getLocal %b', key); + const dsKey = bufferToRecordKey(key); + this.log('fetching record for key %k', dsKey); + const raw = await this.components.datastore.get(dsKey); + this.log('found %k in local datastore', dsKey); + const rec = Libp2pRecord.deserialize(raw); + await verifyRecord(this.validators, rec); + return rec; + } + /** + * Send the best record found to any peers that have an out of date record + */ + async *sendCorrectionRecord(key, vals, best, options = {}) { + this.log('sendCorrection for %b', key); + const fixupRec = createPutRecord(key, best); + for (const { value, from } of vals) { + // no need to do anything + if (equals(value, best)) { + this.log('record was ok'); + continue; + } + // correct ourself + if (this.components.peerId.equals(from)) { + try { + const dsKey = bufferToRecordKey(key); + this.log(`Storing corrected record for key ${dsKey.toString()}`); + await this.components.datastore.put(dsKey, fixupRec.subarray()); + } + catch (err) { + this.log.error('Failed error correcting self', err); + } + continue; + } + // send correction + let sentCorrection = false; + const request = { + type: MessageType.PUT_VALUE, + key, + record: fixupRec + }; + for await (const event of this.network.sendRequest(from, request, options)) { + if (event.name === 'PEER_RESPONSE' && (event.record != null) && equals(event.record.value, Libp2pRecord.deserialize(fixupRec).value)) { + sentCorrection = true; + } + yield event; + } + if (!sentCorrection) { + yield queryErrorEvent({ from, error: new CodeError$2('value not put correctly', 'ERR_PUT_VALUE_INVALID') }, options); + } + this.log.error('Failed error correcting entry'); + } + } + /** + * Store the given key/value pair in the DHT + */ + async *put(key, value, options = {}) { + this.log('put key %b value %b', key, value); + // create record in the dht format + const record = createPutRecord(key, value); + // store the record locally + const dsKey = bufferToRecordKey(key); + this.log(`storing record for key ${dsKey.toString()}`); + await this.components.datastore.put(dsKey, record.subarray()); + // put record to the closest peers + yield* pipe(this.peerRouting.getClosestPeers(key, { signal: options.signal }), (source) => map(source, (event) => { + return async () => { + if (event.name !== 'FINAL_PEER') { + return [event]; + } + const events = []; + const msg = { + type: MessageType.PUT_VALUE, + key, + record + }; + this.log('send put to %p', event.peer.id); + for await (const putEvent of this.network.sendRequest(event.peer.id, msg, options)) { + events.push(putEvent); + if (putEvent.name !== 'PEER_RESPONSE') { + continue; + } + if (!(putEvent.record != null && equals(putEvent.record.value, Libp2pRecord.deserialize(record).value))) { + events.push(queryErrorEvent({ from: event.peer.id, error: new CodeError$2('value not put correctly', 'ERR_PUT_VALUE_INVALID') }, options)); + } + } + return events; + }; + }), (source) => parallel(source, { + ordered: false, + concurrency: ALPHA + }), async function* (source) { + for await (const events of source) { + yield* events; + } + }); + } + /** + * Get the value to the given key + */ + async *get(key, options = {}) { + this.log('get %b', key); + const vals = []; + for await (const event of this.getMany(key, options)) { + if (event.name === 'VALUE') { + vals.push(event); + } + yield event; + } + if (vals.length === 0) { + return; + } + const records = vals.map((v) => v.value); + let i = 0; + try { + i = bestRecord(this.selectors, key, records); + } + catch (err) { + // Assume the first record if no selector available + if (err.code !== 'ERR_NO_SELECTOR_FUNCTION_FOR_RECORD_KEY') { + throw err; + } + } + const best = records[i]; + this.log('GetValue %b %b', key, best); + if (best == null) { + throw new CodeError$2('best value was not found', 'ERR_NOT_FOUND'); + } + yield* this.sendCorrectionRecord(key, vals, best, options); + yield vals[i]; + } + /** + * Get the `n` values to the given key without sorting + */ + async *getMany(key, options = {}) { + this.log('getMany values for %b', key); + try { + const localRec = await this.getLocal(key); + yield valueEvent({ + value: localRec.value, + from: this.components.peerId + }, options); + } + catch (err) { + this.log('error getting local value for %b', key, err); + } + const self = this; // eslint-disable-line @typescript-eslint/no-this-alias + const getValueQuery = async function* ({ peer, signal }) { + for await (const event of self.peerRouting.getValueOrPeers(peer, key, { signal })) { + yield event; + if (event.name === 'PEER_RESPONSE' && (event.record != null)) { + yield valueEvent({ from: peer, value: event.record.value }, options); + } + } + }; + // we have peers, lets send the actual query to them + yield* this.queryManager.run(key, getValueQuery, options); + } + } + + function toPbPeerInfo(peer, connection) { + const output = { + id: peer.id.toBytes(), + multiaddrs: (peer.multiaddrs ?? []).map((m) => m.bytes), + connection + }; + return output; + } + function fromPbPeerInfo(peer) { + if (peer.id == null) { + throw new Error('Invalid peer in message'); + } + return { + id: peerIdFromBytes(peer.id), + multiaddrs: (peer.multiaddrs ?? []).map((a) => multiaddr(a)) + }; + } + + class ContentRouting { + log; + components; + network; + peerRouting; + queryManager; + routingTable; + providers; + constructor(components, init) { + const { network, peerRouting, queryManager, routingTable, providers, logPrefix } = init; + this.components = components; + this.log = components.logger.forComponent(`${logPrefix}:content-routing`); + this.network = network; + this.peerRouting = peerRouting; + this.queryManager = queryManager; + this.routingTable = routingTable; + this.providers = providers; + } + /** + * Announce to the network that we can provide the value for a given key and + * are contactable on the given multiaddrs + */ + async *provide(key, multiaddrs, options = {}) { + this.log('provide %s', key); + const target = key.multihash.bytes; + // Add peer as provider + await this.providers.addProvider(key, this.components.peerId); + const msg = { + type: MessageType.ADD_PROVIDER, + key: target, + providers: [ + toPbPeerInfo({ + id: this.components.peerId, + multiaddrs + }) + ] + }; + let sent = 0; + const maybeNotifyPeer = (event) => { + return async () => { + if (event.name !== 'FINAL_PEER') { + return [event]; + } + const events = []; + this.log('putProvider %s to %p', key, event.peer.id); + try { + this.log('sending provider record for %s to %p', key, event.peer.id); + for await (const sendEvent of this.network.sendMessage(event.peer.id, msg, options)) { + if (sendEvent.name === 'PEER_RESPONSE') { + this.log('sent provider record for %s to %p', key, event.peer.id); + sent++; + } + events.push(sendEvent); + } + } + catch (err) { + this.log.error('error sending provide record to peer %p', event.peer.id, err); + events.push(queryErrorEvent({ from: event.peer.id, error: err }, options)); + } + return events; + }; + }; + // Notify closest peers + yield* pipe(this.peerRouting.getClosestPeers(target, options), (source) => map(source, (event) => maybeNotifyPeer(event)), (source) => parallel(source, { + ordered: false, + concurrency: ALPHA + }), async function* (source) { + for await (const events of source) { + yield* events; + } + }); + this.log('sent provider records to %d peers', sent); + } + /** + * Search the dht for up to `K` providers of the given CID. + */ + async *findProviders(key, options) { + const toFind = this.routingTable.kBucketSize; + let found = 0; + const target = key.multihash.bytes; + const self = this; // eslint-disable-line @typescript-eslint/no-this-alias + this.log('findProviders %c', key); + const provs = await this.providers.getProviders(key); + // yield values if we have some, also slice because maybe we got lucky and already have too many? + if (provs.length > 0) { + const providers = []; + for (const peerId of provs.slice(0, toFind)) { + try { + const peer = await this.components.peerStore.get(peerId); + providers.push({ + id: peerId, + multiaddrs: peer.addresses.map(({ multiaddr }) => multiaddr) + }); + } + catch (err) { + if (err.code !== 'ERR_NOT_FOUND') { + throw err; + } + this.log('no peer store entry for %p', peerId); + } + } + yield peerResponseEvent({ from: this.components.peerId, messageType: MessageType.GET_PROVIDERS, providers }, options); + yield providerEvent({ from: this.components.peerId, providers }, options); + found += providers.length; + if (found >= toFind) { + return; + } + } + /** + * The query function to use on this particular disjoint path + */ + const findProvidersQuery = async function* ({ peer, signal }) { + const request = { + type: MessageType.GET_PROVIDERS, + key: target + }; + yield* self.network.sendRequest(peer, request, { + ...options, + signal + }); + }; + const providers = new PeerSet(provs); + for await (const event of this.queryManager.run(target, findProvidersQuery, options)) { + yield event; + if (event.name === 'PEER_RESPONSE') { + this.log('Found %d provider entries for %c and %d closer peers', event.providers.length, key, event.closer.length); + const newProviders = []; + for (const peer of event.providers) { + if (providers.has(peer.id)) { + continue; + } + providers.add(peer.id); + newProviders.push(peer); + } + if (newProviders.length > 0) { + yield providerEvent({ from: event.from, providers: newProviders }, options); + found += newProviders.length; + if (found >= toFind) { + return; + } + } + } + } + } + } + + /** + * Implements exponential moving average. Ported from `moving-average`. + * + * @see https://en.wikipedia.org/wiki/Moving_average#Exponential_moving_average + * @see https://www.npmjs.com/package/moving-average + */ + class MovingAverage { + movingAverage; + variance; + deviation; + forecast; + timespan; + previousTime; + constructor(timespan) { + this.timespan = timespan; + this.movingAverage = 0; + this.variance = 0; + this.deviation = 0; + this.forecast = 0; + } + alpha(t, pt) { + return 1 - (Math.exp(-(t - pt) / this.timespan)); + } + push(value, time = Date.now()) { + if (this.previousTime != null) { + // calculate moving average + const a = this.alpha(time, this.previousTime); + const diff = value - this.movingAverage; + const incr = a * diff; + this.movingAverage = a * value + (1 - a) * this.movingAverage; + // calculate variance & deviation + this.variance = (1 - a) * (this.variance + diff * incr); + this.deviation = Math.sqrt(this.variance); + // calculate forecast + this.forecast = this.movingAverage + a * diff; + } + else { + this.movingAverage = value; + } + this.previousTime = time; + } + } + + const DEFAULT_TIMEOUT_MULTIPLIER = 1.2; + const DEFAULT_FAILURE_MULTIPLIER = 2; + const DEFAULT_MIN_TIMEOUT = 2000; + class AdaptiveTimeout { + success; + failure; + next; + metric; + timeoutMultiplier; + failureMultiplier; + minTimeout; + constructor(init = {}) { + this.success = new MovingAverage(init.interval ?? 5000); + this.failure = new MovingAverage(init.interval ?? 5000); + this.next = new MovingAverage(init.interval ?? 5000); + this.failureMultiplier = init.failureMultiplier ?? DEFAULT_FAILURE_MULTIPLIER; + this.timeoutMultiplier = init.timeoutMultiplier ?? DEFAULT_TIMEOUT_MULTIPLIER; + this.minTimeout = init.minTimeout ?? DEFAULT_MIN_TIMEOUT; + if (init.metricName != null) { + this.metric = init.metrics?.registerMetricGroup(init.metricName); + } + } + getTimeoutSignal(options = {}) { + // calculate timeout for individual peers based on moving average of + // previous successful requests + const timeout = Math.max(Math.round(this.next.movingAverage * (options.timeoutFactor ?? this.timeoutMultiplier)), this.minTimeout); + const sendTimeout = AbortSignal.timeout(timeout); + const timeoutSignal = anySignal([options.signal, sendTimeout]); + setMaxListeners(Infinity, timeoutSignal, sendTimeout); + timeoutSignal.start = Date.now(); + timeoutSignal.timeout = timeout; + return timeoutSignal; + } + cleanUp(signal) { + const time = Date.now() - signal.start; + if (signal.aborted) { + this.failure.push(time); + this.next.push(time * this.failureMultiplier); + this.metric?.update({ + failureMovingAverage: this.failure.movingAverage, + failureDeviation: this.failure.deviation, + failureForecast: this.failure.forecast, + failureVariance: this.failure.variance, + failure: time + }); + } + else { + this.success.push(time); + this.next.push(time); + this.metric?.update({ + successMovingAverage: this.success.movingAverage, + successDeviation: this.success.deviation, + successForecast: this.success.forecast, + successVariance: this.success.variance, + success: time + }); + } + } + } + + /** + * Handle network operations for the dht + */ + class Network extends TypedEventEmitter { + log; + protocol; + running; + components; + timeout; + /** + * Create a new network + */ + constructor(components, init) { + super(); + const { protocol } = init; + this.components = components; + this.log = components.logger.forComponent(`${init.logPrefix}:network`); + this.running = false; + this.protocol = protocol; + this.timeout = new AdaptiveTimeout({ + ...(init.timeout ?? {}), + metrics: components.metrics, + metricName: `${init.logPrefix.replaceAll(':', '_')}_network_message_send_times_milliseconds` + }); + } + /** + * Start the network + */ + async start() { + if (this.running) { + return; + } + this.running = true; + } + /** + * Stop all network activity + */ + async stop() { + this.running = false; + } + /** + * Is the network online? + */ + isStarted() { + return this.running; + } + /** + * Send a request and record RTT for latency measurements + */ + async *sendRequest(to, msg, options = {}) { + if (!this.running) { + return; + } + const type = msg.type; + if (type == null) { + throw new CodeError$3('Message type was missing', 'ERR_INVALID_PARAMETERS'); + } + this.log('sending %s to %p', msg.type, to); + yield dialPeerEvent({ peer: to }, options); + yield sendQueryEvent({ to, type }, options); + let stream; + const signal = this.timeout.getTimeoutSignal(options); + options = { + ...options, + signal + }; + try { + const connection = await this.components.connectionManager.openConnection(to, options); + stream = await connection.newStream(this.protocol, options); + const response = await this._writeReadMessage(stream, msg, options); + stream.close(options) + .catch(err => { + this.log.error('error closing stream to %p', to, err); + stream?.abort(err); + }); + yield peerResponseEvent({ + from: to, + messageType: response.type, + closer: response.closer.map(fromPbPeerInfo), + providers: response.providers.map(fromPbPeerInfo), + record: response.record == null ? undefined : Libp2pRecord.deserialize(response.record) + }, options); + } + catch (err) { + stream?.abort(err); + this.log.error('could not send %s to %p', msg.type, to, err); + yield queryErrorEvent({ from: to, error: err }, options); + } + finally { + this.timeout.cleanUp(signal); + } + } + /** + * Sends a message without expecting an answer + */ + async *sendMessage(to, msg, options = {}) { + if (!this.running) { + return; + } + const type = msg.type; + if (type == null) { + throw new CodeError$3('Message type was missing', 'ERR_INVALID_PARAMETERS'); + } + this.log('sending %s to %p', msg.type, to); + yield dialPeerEvent({ peer: to }, options); + yield sendQueryEvent({ to, type }, options); + let stream; + const signal = this.timeout.getTimeoutSignal(options); + options = { + ...options, + signal + }; + try { + const connection = await this.components.connectionManager.openConnection(to, options); + stream = await connection.newStream(this.protocol, options); + await this._writeMessage(stream, msg, options); + stream.close(options) + .catch(err => { + this.log.error('error closing stream to %p', to, err); + stream?.abort(err); + }); + yield peerResponseEvent({ from: to, messageType: type }, options); + } + catch (err) { + stream?.abort(err); + yield queryErrorEvent({ from: to, error: err }, options); + } + finally { + this.timeout.cleanUp(signal); + } + } + /** + * Write a message to the given stream + */ + async _writeMessage(stream, msg, options) { + const pb = pbStream(stream); + await pb.write(msg, Message, options); + await pb.unwrap().close(options); + } + /** + * Write a message and read its response. + * If no response is received after the specified timeout + * this will error out. + */ + async _writeReadMessage(stream, msg, options) { + const pb = pbStream(stream); + await pb.write(msg, Message, options); + const message = await pb.read(Message, options); + await pb.unwrap().close(options); + // tell any listeners about new peers we've seen + message.closer.forEach(peerData => { + this.safeDispatchEvent('peer', { + detail: fromPbPeerInfo(peerData) + }); + }); + message.providers.forEach(peerData => { + this.safeDispatchEvent('peer', { + detail: fromPbPeerInfo(peerData) + }); + }); + return message; + } + } + + /** + * Compares two Uint8Arrays representing two xor distances. Returns `-1` if `a` + * is a lower distance, `1` if `b` is a lower distance or `0` if the distances + * are equal. + */ + function xorCompare(a, b) { + if (a.byteLength !== b.byteLength) { + throw new Error('Inputs should have the same length'); + } + for (let i = 0; i < a.byteLength; i++) { + if (a[i] === b[i]) { + continue; + } + return a[i] < b[i] ? -1 : 1; + } + return 0; + } + + /** + * Maintains a list of peerIds sorted by distance from a DHT key. + */ + class PeerDistanceList { + /** + * The DHT key from which distance is calculated + */ + originDhtKey; + /** + * The maximum size of the list + */ + capacity; + peerDistances; + constructor(originDhtKey, capacity) { + this.originDhtKey = originDhtKey; + this.capacity = capacity; + this.peerDistances = []; + } + /** + * The length of the list + */ + get length() { + return this.peerDistances.length; + } + /** + * The peers in the list, in order of distance from the origin key + */ + get peers() { + return this.peerDistances.map(pd => pd.peer); + } + /** + * Add a peerId to the list. + */ + async add(peer) { + const dhtKey = await convertPeerId(peer.id); + this.addWitKadId(peer, dhtKey); + } + /** + * Add a peerId to the list. + */ + addWitKadId(peer, kadId) { + if (this.peerDistances.find(pd => pd.peer.id.equals(peer.id)) != null) { + return; + } + const el = { + peer, + distance: xor(this.originDhtKey, kadId) + }; + this.peerDistances.push(el); + this.peerDistances.sort((a, b) => xorCompare(a.distance, b.distance)); + this.peerDistances = this.peerDistances.slice(0, this.capacity); + } + /** + * Indicates whether any of the peerIds passed as a parameter are closer + * to the origin key than the furthest peerId in the PeerDistanceList. + */ + async isCloser(peerId) { + if (this.length === 0) { + return true; + } + const dhtKey = await convertPeerId(peerId); + const dhtKeyXor = xor(dhtKey, this.originDhtKey); + const furthestDistance = this.peerDistances[this.peerDistances.length - 1].distance; + return xorCompare(dhtKeyXor, furthestDistance) === -1; + } + /** + * Indicates whether any of the peerIds passed as a parameter are closer + * to the origin key than the furthest peerId in the PeerDistanceList. + */ + async anyCloser(peerIds) { + if (peerIds.length === 0) { + return false; + } + return Promise.any(peerIds.map(async (peerId) => this.isCloser(peerId))); + } + } + + class PeerRouting { + log; + routingTable; + network; + validators; + queryManager; + peerStore; + peerId; + constructor(components, init) { + const { routingTable, network, validators, queryManager, logPrefix } = init; + this.routingTable = routingTable; + this.network = network; + this.validators = validators; + this.queryManager = queryManager; + this.peerStore = components.peerStore; + this.peerId = components.peerId; + this.log = components.logger.forComponent(`${logPrefix}:peer-routing`); + } + /** + * Look if we are connected to a peer with the given id. + * Returns its id and addresses, if found, otherwise `undefined`. + */ + async findPeerLocal(peer) { + let peerData; + const p = await this.routingTable.find(peer); + if (p != null) { + this.log('findPeerLocal found %p in routing table', peer); + try { + peerData = await this.peerStore.get(p); + } + catch (err) { + if (err.code !== 'ERR_NOT_FOUND') { + throw err; + } + } + } + if (peerData == null) { + try { + peerData = await this.peerStore.get(peer); + } + catch (err) { + if (err.code !== 'ERR_NOT_FOUND') { + throw err; + } + } + } + if (peerData != null) { + this.log('findPeerLocal found %p in peer store', peer); + return { + id: peerData.id, + multiaddrs: peerData.addresses.map((address) => address.multiaddr) + }; + } + return undefined; + } + /** + * Get a value via rpc call for the given parameters + */ + async *_getValueSingle(peer, key, options = {}) { + const msg = { + type: MessageType.GET_VALUE, + key + }; + yield* this.network.sendRequest(peer, msg, options); + } + /** + * Get the public key directly from a node + */ + async *getPublicKeyFromNode(peer, options = {}) { + const pkKey = keyForPublicKey(peer); + for await (const event of this._getValueSingle(peer, pkKey, options)) { + yield event; + if (event.name === 'PEER_RESPONSE' && event.record != null) { + const recPeer = await peerIdFromKeys(marshalPublicKey({ bytes: event.record.value })); + // compare hashes of the pub key + if (!recPeer.equals(peer)) { + throw new CodeError$2('public key does not match id', 'ERR_PUBLIC_KEY_DOES_NOT_MATCH_ID'); + } + if (recPeer.publicKey == null) { + throw new CodeError$2('public key missing', 'ERR_PUBLIC_KEY_MISSING'); + } + yield valueEvent({ from: peer, value: recPeer.publicKey }, options); + } + } + throw new CodeError$2(`Node not responding with its public key: ${peer.toString()}`, 'ERR_INVALID_RECORD'); + } + /** + * Search for a peer with the given ID + */ + async *findPeer(id, options = {}) { + this.log('findPeer %p', id); + if (options.useCache !== false) { + // Try to find locally + const pi = await this.findPeerLocal(id); + // already got it + if (pi != null) { + this.log('found local'); + yield finalPeerEvent({ + from: this.peerId, + peer: pi + }, options); + return; + } + } + let foundPeer = false; + if (options.useNetwork !== false) { + const self = this; // eslint-disable-line @typescript-eslint/no-this-alias + const findPeerQuery = async function* ({ peer, signal }) { + const request = { + type: MessageType.FIND_NODE, + key: id.toBytes() + }; + for await (const event of self.network.sendRequest(peer, request, { + ...options, + signal + })) { + yield event; + if (event.name === 'PEER_RESPONSE') { + const match = event.closer.find((p) => p.id.equals(id)); + // found the peer + if (match != null) { + yield finalPeerEvent({ from: event.from, peer: match }, options); + } + } + } + }; + for await (const event of this.queryManager.run(id.toBytes(), findPeerQuery, options)) { + if (event.name === 'FINAL_PEER') { + foundPeer = true; + } + yield event; + } + } + if (!foundPeer) { + yield queryErrorEvent({ from: this.peerId, error: new CodeError$2('Not found', 'ERR_NOT_FOUND') }, options); + } + } + /** + * Kademlia 'FIND_NODE' operation on a key, which could be the bytes from + * a multihash or a peer ID + */ + async *getClosestPeers(key, options = {}) { + this.log('getClosestPeers to %b', key); + const kadId = await convertBuffer(key); + const tablePeers = this.routingTable.closestPeers(kadId); + const self = this; // eslint-disable-line @typescript-eslint/no-this-alias + const peers = new PeerDistanceList(kadId, this.routingTable.kBucketSize); + await Promise.all(tablePeers.map(async (peer) => { await peers.add({ id: peer, multiaddrs: [] }); })); + const getCloserPeersQuery = async function* ({ peer, signal }) { + self.log('closerPeersSingle %s from %p', toString$1(key, 'base32'), peer); + const request = { + type: MessageType.FIND_NODE, + key + }; + yield* self.network.sendRequest(peer, request, { + ...options, + signal + }); + }; + for await (const event of this.queryManager.run(key, getCloserPeersQuery, options)) { + if (event.name === 'PEER_RESPONSE') { + await Promise.all(event.closer.map(async (peerData) => { + await peers.add(peerData); + })); + } + yield event; + } + this.log('found %d peers close to %b', peers.length, key); + for (const peer of peers.peers) { + yield finalPeerEvent({ + from: this.peerId, + peer + }, options); + } + } + /** + * Query a particular peer for the value for the given key. + * It will either return the value or a list of closer peers. + * + * Note: The peerStore is updated with new addresses found for the given peer. + */ + async *getValueOrPeers(peer, key, options = {}) { + for await (const event of this._getValueSingle(peer, key, options)) { + if (event.name === 'PEER_RESPONSE') { + if (event.record != null) { + // We have a record + try { + await this._verifyRecordOnline(event.record); + } + catch (err) { + const errMsg = 'invalid record received, discarded'; + this.log(errMsg); + yield queryErrorEvent({ from: event.from, error: new CodeError$2(errMsg, 'ERR_INVALID_RECORD') }, options); + continue; + } + } + } + yield event; + } + } + /** + * Verify a record, fetching missing public keys from the network. + * Throws an error if the record is invalid. + */ + async _verifyRecordOnline(record) { + if (record.timeReceived == null) { + throw new CodeError$2('invalid record received', 'ERR_INVALID_RECORD'); + } + await verifyRecord(this.validators, new Libp2pRecord(record.key, record.value, record.timeReceived)); + } + /** + * Get the nearest peers to the given query, but if closer + * than self + */ + async getCloserPeersOffline(key, closerThan) { + const id = await convertBuffer(key); + const ids = this.routingTable.closestPeers(id); + const output = []; + for (const peerId of ids) { + if (peerId.equals(closerThan)) { + continue; + } + try { + const peer = await this.peerStore.get(peerId); + output.push({ + id: peerId, + multiaddrs: peer.addresses.map(({ multiaddr }) => multiaddr) + }); + } + catch (err) { + if (err.code !== 'ERR_NOT_FOUND') { + throw err; + } + } + } + if (output.length > 0) { + this.log('getCloserPeersOffline found %d peer(s) closer to %b than %p', output.length, key, closerThan); + } + else { + this.log('getCloserPeersOffline could not find peer closer to %b than %p with %d peers in the routing table', key, closerThan, this.routingTable.size); + } + return output; + } + } + + /** + * This class manages known providers. + * A provider is a peer that we know to have the content for a given CID. + * + * Every `cleanupInterval` providers are checked if they + * are still valid, i.e. younger than the `provideValidity`. + * If they are not, they are deleted. + * + * To ensure the list survives restarts of the daemon, + * providers are stored in the datastore, but to ensure + * access is fast there is an LRU cache in front of that. + */ + class Providers { + log; + datastore; + cache; + cleanupInterval; + provideValidity; + syncQueue; + started; + cleaner; + constructor(components, init = {}) { + const { cacheSize, cleanupInterval, provideValidity } = init; + this.log = components.logger.forComponent('libp2p:kad-dht:providers'); + this.datastore = components.datastore; + this.cleanupInterval = cleanupInterval ?? PROVIDERS_CLEANUP_INTERVAL; + this.provideValidity = provideValidity ?? PROVIDERS_VALIDITY; + this.cache = cache$1(cacheSize ?? PROVIDERS_LRU_CACHE_SIZE); + this.syncQueue = new PQueue({ concurrency: 1 }); + this.started = false; + } + isStarted() { + return this.started; + } + /** + * Start the provider cleanup service + */ + async start() { + if (this.started) { + return; + } + this.started = true; + this.cleaner = setInterval(() => { + this._cleanup().catch(err => { + this.log.error(err); + }); + }, this.cleanupInterval); + } + /** + * Release any resources. + */ + async stop() { + this.started = false; + if (this.cleaner != null) { + clearInterval(this.cleaner); + this.cleaner = undefined; + } + } + /** + * Check all providers if they are still valid, and if not delete them + */ + async _cleanup() { + await this.syncQueue.add(async () => { + const start = Date.now(); + let count = 0; + let deleteCount = 0; + const deleted = new Map(); + const batch = this.datastore.batch(); + // Get all provider entries from the datastore + const query = this.datastore.query({ prefix: PROVIDER_KEY_PREFIX }); + for await (const entry of query) { + try { + // Add a delete to the batch for each expired entry + const { cid, peerId } = parseProviderKey(entry.key); + const time = readTime(entry.value).getTime(); + const now = Date.now(); + const delta = now - time; + const expired = delta > this.provideValidity; + this.log('comparing: %d - %d = %d > %d %s', now, time, delta, this.provideValidity, expired ? '(expired)' : ''); + if (expired) { + deleteCount++; + batch.delete(entry.key); + const peers = deleted.get(cid) ?? new Set(); + peers.add(peerId); + deleted.set(cid, peers); + } + count++; + } + catch (err) { + this.log.error(err.message); + } + } + // Commit the deletes to the datastore + if (deleted.size > 0) { + this.log('deleting %d / %d entries', deleteCount, count); + await batch.commit(); + } + else { + this.log('nothing to delete'); + } + // Clear expired entries from the cache + for (const [cid, peers] of deleted) { + const key = makeProviderKey(cid); + const provs = this.cache.get(key); + if (provs != null) { + for (const peerId of peers) { + provs.delete(peerId); + } + if (provs.size === 0) { + this.cache.remove(key); + } + else { + this.cache.set(key, provs); + } + } + } + this.log('Cleanup successful (%dms)', Date.now() - start); + }); + } + /** + * Get the currently known provider peer ids for a given CID + */ + async _getProvidersMap(cid) { + const cacheKey = makeProviderKey(cid); + let provs = this.cache.get(cacheKey); + if (provs == null) { + provs = await loadProviders(this.datastore, cid); + this.cache.set(cacheKey, provs); + } + return provs; + } + /** + * Add a new provider for the given CID + */ + async addProvider(cid, provider) { + await this.syncQueue.add(async () => { + this.log('%p provides %s', provider, cid); + const provs = await this._getProvidersMap(cid); + this.log('loaded %s provs', provs.size); + const now = new Date(); + provs.set(provider.toString(), now); + const dsKey = makeProviderKey(cid); + this.cache.set(dsKey, provs); + await writeProviderEntry(this.datastore, cid, provider, now); + }); + } + /** + * Get a list of providers for the given CID + */ + async getProviders(cid) { + return this.syncQueue.add(async () => { + this.log('get providers for %s', cid); + const provs = await this._getProvidersMap(cid); + return [...provs.keys()].map(peerIdStr => { + return peerIdFromString(peerIdStr); + }); + }, { + // no timeout is specified for this queue so it will not + // throw, but this is required to get the right return + // type since p-queue@7.3.4 + throwOnTimeout: true + }); + } + } + /** + * Encode the given key its matching datastore key + */ + function makeProviderKey(cid) { + const cidStr = typeof cid === 'string' ? cid : toString$1(cid.multihash.bytes, 'base32'); + return `${PROVIDER_KEY_PREFIX}/${cidStr}`; + } + /** + * Write a provider into the given store + */ + async function writeProviderEntry(store, cid, peer, time) { + const dsKey = [ + makeProviderKey(cid), + '/', + peer.toString() + ].join(''); + const key = new Key(dsKey); + const buffer = encode$5(time.getTime()); + await store.put(key, buffer); + } + /** + * Parse the CID and provider peer id from the key + */ + function parseProviderKey(key) { + const parts = key.toString().split('/'); + if (parts.length !== 5) { + throw new Error(`incorrectly formatted provider entry key in datastore: ${key.toString()}`); + } + return { + cid: parts[3], + peerId: parts[4] + }; + } + /** + * Load providers for the given CID from the store + */ + async function loadProviders(store, cid) { + const providers = new Map(); + const query = store.query({ prefix: makeProviderKey(cid) }); + for await (const entry of query) { + const { peerId } = parseProviderKey(entry.key); + providers.set(peerId, readTime(entry.value)); + } + return providers; + } + function readTime(buf) { + return new Date(decode$6(buf)); + } + + /** + * Walks a path through the DHT, calling the passed query function for + * every peer encountered that we have not seen before + */ + async function* queryPath(options) { + const { key, startingPeer, ourPeerId, signal, query, alpha, pathIndex, numPaths, queryFuncTimeout, log, peersSeen, connectionManager } = options; + // Only ALPHA node/value lookups are allowed at any given time for each process + // https://github.com/libp2p/specs/tree/master/kad-dht#alpha-concurrency-parameter-%CE%B1 + const queue = new Queue({ + concurrency: alpha, + sort: (a, b) => xorCompare(a.options.distance, b.options.distance) + }); + // perform lookups on kadId, not the actual value + const kadId = await convertBuffer(key); + /** + * Adds the passed peer to the query queue if it's not us and no + * other path has passed through this peer + */ + function queryPeer(peer, peerKadId) { + if (peer == null) { + return; + } + peersSeen.add(peer); + const peerXor = xor(peerKadId, kadId); + queue.add(async () => { + const signals = [signal]; + if (queryFuncTimeout != null) { + signals.push(AbortSignal.timeout(queryFuncTimeout)); + } + const compoundSignal = anySignal(signals); + // this signal can get listened to a lot + setMaxListeners(Infinity, compoundSignal); + try { + for await (const event of query({ + key, + peer, + signal: compoundSignal, + pathIndex, + numPaths + })) { + if (compoundSignal.aborted) { + return; + } + // if there are closer peers and the query has not completed, continue the query + if (event.name === 'PEER_RESPONSE') { + for (const closerPeer of event.closer) { + if (peersSeen.has(closerPeer.id)) { // eslint-disable-line max-depth + log('already seen %p in query', closerPeer.id); + continue; + } + if (ourPeerId.equals(closerPeer.id)) { // eslint-disable-line max-depth + log('not querying ourselves'); + continue; + } + if (!(await connectionManager.isDialable(closerPeer.multiaddrs))) { // eslint-disable-line max-depth + log('not querying undialable peer'); + continue; + } + const closerPeerKadId = await convertPeerId(closerPeer.id); + const closerPeerXor = xor(closerPeerKadId, kadId); + // only continue query if closer peer is actually closer + if (xorCompare(closerPeerXor, peerXor) !== -1) { // eslint-disable-line max-depth + log('skipping %p as they are not closer to %b than %p', closerPeer.id, key, peer); + continue; + } + log('querying closer peer %p', closerPeer.id); + queryPeer(closerPeer.id, closerPeerKadId); + } + } + queue.safeDispatchEvent('completed', { + detail: event + }); + } + } + catch (err) { + if (!signal.aborted) { + return queryErrorEvent({ + from: peer, + error: err + }, options); + } + } + finally { + compoundSignal.clear(); + } + }, { + distance: peerXor + }).catch(err => { + log.error(err); + }); + } + // begin the query with the starting peer + queryPeer(startingPeer, await convertPeerId(startingPeer)); + try { + // yield results as they come in + for await (const event of queue.toGenerator({ signal })) { + if (event != null) { + yield event; + } + } + } + catch (err) { + if (signal.aborted) { + throw new CodeError$2('Query aborted', 'ERR_QUERY_ABORTED'); + } + throw err; + } + } + + /** + * Keeps track of all running queries + */ + class QueryManager { + disjointPaths; + alpha; + shutDownController; + running; + queries; + logger; + peerId; + connectionManager; + routingTable; + initialQuerySelfHasRun; + logPrefix; + metrics; + constructor(components, init) { + const { disjointPaths = K, alpha = ALPHA, logPrefix } = init; + this.logPrefix = logPrefix; + this.disjointPaths = disjointPaths ?? K; + this.running = false; + this.alpha = alpha ?? ALPHA; + this.queries = 0; + this.initialQuerySelfHasRun = init.initialQuerySelfHasRun; + this.routingTable = init.routingTable; + this.logger = components.logger; + this.peerId = components.peerId; + this.connectionManager = components.connectionManager; + if (components.metrics != null) { + this.metrics = { + runningQueries: components.metrics.registerMetric(`${logPrefix.replaceAll(':', '_')}_running_queries`), + queryTime: components.metrics.registerMetric(`${logPrefix.replaceAll(':', '_')}_query_time_seconds`) + }; + } + // allow us to stop queries on shut down + this.shutDownController = new AbortController(); + // make sure we don't make a lot of noise in the logs + setMaxListeners(Infinity, this.shutDownController.signal); + } + isStarted() { + return this.running; + } + /** + * Starts the query manager + */ + async start() { + this.running = true; + // allow us to stop queries on shut down + this.shutDownController = new AbortController(); + // make sure we don't make a lot of noise in the logs + setMaxListeners(Infinity, this.shutDownController.signal); + } + /** + * Stops all queries + */ + async stop() { + this.running = false; + this.shutDownController.abort(); + } + async *run(key, queryFunc, options = {}) { + if (!this.running) { + throw new Error('QueryManager not started'); + } + const stopQueryTimer = this.metrics?.queryTime.timer(); + if (options.signal == null) { + // don't let queries run forever + const signal = AbortSignal.timeout(DEFAULT_QUERY_TIMEOUT); + // this signal will get listened to for network requests, etc + // so make sure we don't make a lot of noise in the logs + setMaxListeners(Infinity, signal); + options = { + ...options, + signal + }; + } + // if the user breaks out of a for..await of loop iterating over query + // results we need to cancel any in-flight network requests + const queryEarlyExitController = new AbortController(); + const signal = anySignal([ + this.shutDownController.signal, + queryEarlyExitController.signal, + options.signal + ]); + // this signal will get listened to for every invocation of queryFunc + // so make sure we don't make a lot of noise in the logs + setMaxListeners(Infinity, signal, queryEarlyExitController.signal); + const log = this.logger.forComponent(`${this.logPrefix}:query:` + toString$1(key, 'base58btc')); + // query a subset of peers up to `kBucketSize / 2` in length + const startTime = Date.now(); + let queryFinished = false; + try { + if (options.isSelfQuery !== true && this.initialQuerySelfHasRun != null) { + log('waiting for initial query-self query before continuing'); + await raceSignal(this.initialQuerySelfHasRun.promise, signal); + this.initialQuerySelfHasRun = undefined; + } + log('query:start'); + this.queries++; + this.metrics?.runningQueries.update(this.queries); + const id = await convertBuffer(key); + const peers = this.routingTable.closestPeers(id); + const peersToQuery = peers.slice(0, Math.min(this.disjointPaths, peers.length)); + if (peers.length === 0) { + log.error('Running query with no peers'); + return; + } + // make sure we don't get trapped in a loop + const peersSeen = new PeerSet(); + // Create query paths from the starting peers + const paths = peersToQuery.map((peer, index) => { + return queryPath({ + key, + startingPeer: peer, + ourPeerId: this.peerId, + signal, + query: queryFunc, + pathIndex: index, + numPaths: peersToQuery.length, + alpha: this.alpha, + queryFuncTimeout: options.queryFuncTimeout, + log, + peersSeen, + onProgress: options.onProgress, + connectionManager: this.connectionManager + }); + }); + // Execute the query along each disjoint path and yield their results as they become available + for await (const event of merge(...paths)) { + if (event.name === 'QUERY_ERROR') { + log.error('query error', event.error); + } + if (event.name === 'PEER_RESPONSE') { + for (const peer of [...event.closer, ...event.providers]) { + // eslint-disable-next-line max-depth + if (!(await this.connectionManager.isDialable(peer.multiaddrs))) { + continue; + } + await this.routingTable.add(peer.id); + } + } + yield event; + } + queryFinished = true; + } + catch (err) { + if (!this.running && err.code === 'ERR_QUERY_ABORTED') ; + else { + throw err; + } + } + finally { + if (!queryFinished) { + log('query exited early'); + queryEarlyExitController.abort(); + } + signal.clear(); + this.queries--; + this.metrics?.runningQueries.update(this.queries); + if (stopQueryTimer != null) { + stopQueryTimer(); + } + log('query:done in %dms', Date.now() - startTime); + } + } + } + + /** + * @packageDocumentation + * + * Counts the number of items in an (async)iterable. + * + * N.b. will consume the iterable + * + * @example + * + * ```javascript + * import length from 'it-length' + * + * // This can also be an iterator, generator, etc + * const values = [0, 1, 2, 3, 4] + * + * const res = length(values) + * + * console.info(res) // 5 + * ``` + * + * Async sources must be awaited: + * + * ```javascript + * import length from 'it-length' + * + * const values = async function * () { + * yield * [0, 1, 2, 3, 4] + * } + * + * const res = await length(values()) + * + * console.info(res) // 5 + * ``` + */ + function isAsyncIterable(thing) { + return thing[Symbol.asyncIterator] != null; + } + function length(source) { + if (isAsyncIterable(source)) { + return (async () => { + let count = 0; + for await (const _ of source) { // eslint-disable-line no-unused-vars,@typescript-eslint/no-unused-vars + count++; + } + return count; + })(); + } + else { + let count = 0; + for (const _ of source) { // eslint-disable-line no-unused-vars,@typescript-eslint/no-unused-vars + count++; + } + return count; + } + } + + const normalizeEmitter = emitter => { + const addListener = emitter.addEventListener || emitter.on || emitter.addListener; + const removeListener = emitter.removeEventListener || emitter.off || emitter.removeListener; + + if (!addListener || !removeListener) { + throw new TypeError('Emitter is not compatible'); + } + + return { + addListener: addListener.bind(emitter), + removeListener: removeListener.bind(emitter), + }; + }; + + function pEventMultiple(emitter, event, options) { + let cancel; + const returnValue = new Promise((resolve, reject) => { + options = { + rejectionEvents: ['error'], + multiArgs: false, + resolveImmediately: false, + ...options, + }; + + if (!(options.count >= 0 && (options.count === Number.POSITIVE_INFINITY || Number.isInteger(options.count)))) { + throw new TypeError('The `count` option should be at least 0 or more'); + } + + options.signal?.throwIfAborted(); + + // Allow multiple events + const events = [event].flat(); + + const items = []; + const {addListener, removeListener} = normalizeEmitter(emitter); + + const onItem = (...arguments_) => { + const value = options.multiArgs ? arguments_ : arguments_[0]; + + // eslint-disable-next-line unicorn/no-array-callback-reference + if (options.filter && !options.filter(value)) { + return; + } + + items.push(value); + + if (options.count === items.length) { + cancel(); + resolve(items); + } + }; + + const rejectHandler = error => { + cancel(); + reject(error); + }; + + cancel = () => { + for (const event of events) { + removeListener(event, onItem); + } + + for (const rejectionEvent of options.rejectionEvents) { + removeListener(rejectionEvent, rejectHandler); + } + }; + + for (const event of events) { + addListener(event, onItem); + } + + for (const rejectionEvent of options.rejectionEvents) { + addListener(rejectionEvent, rejectHandler); + } + + if (options.signal) { + options.signal.addEventListener('abort', () => { + rejectHandler(options.signal.reason); + }, {once: true}); + } + + if (options.resolveImmediately) { + resolve(items); + } + }); + + returnValue.cancel = cancel; + + if (typeof options.timeout === 'number') { + const timeout = pTimeout(returnValue, {milliseconds: options.timeout}); + timeout.cancel = cancel; + return timeout; + } + + return returnValue; + } + + function pEvent(emitter, event, options) { + if (typeof options === 'function') { + options = {filter: options}; + } + + options = { + ...options, + count: 1, + resolveImmediately: false, + }; + + const arrayPromise = pEventMultiple(emitter, event, options); + const promise = arrayPromise.then(array => array[0]); + promise.cancel = arrayPromise.cancel; + + return promise; + } + + /** + * Receives notifications of new peers joining the network that support the DHT protocol + */ + class QuerySelf { + log; + peerId; + peerRouting; + routingTable; + count; + interval; + initialInterval; + queryTimeout; + started; + timeoutId; + controller; + initialQuerySelfHasRun; + querySelfPromise; + constructor(components, init) { + const { peerRouting, logPrefix, count, interval, queryTimeout, routingTable } = init; + this.peerId = components.peerId; + this.log = components.logger.forComponent(`${logPrefix}:query-self`); + this.started = false; + this.peerRouting = peerRouting; + this.routingTable = routingTable; + this.count = count ?? K; + this.interval = interval ?? QUERY_SELF_INTERVAL; + this.initialInterval = init.initialInterval ?? QUERY_SELF_INITIAL_INTERVAL; + this.queryTimeout = queryTimeout ?? QUERY_SELF_TIMEOUT; + this.initialQuerySelfHasRun = init.initialQuerySelfHasRun; + } + isStarted() { + return this.started; + } + start() { + if (this.started) { + return; + } + this.started = true; + clearTimeout(this.timeoutId); + this.timeoutId = setTimeout(() => { + this.querySelf() + .catch(err => { + this.log.error('error running self-query', err); + }); + }, this.initialInterval); + } + stop() { + this.started = false; + if (this.timeoutId != null) { + clearTimeout(this.timeoutId); + } + if (this.controller != null) { + this.controller.abort(); + } + } + async querySelf() { + if (!this.started) { + this.log('skip self-query because we are not started'); + return; + } + if (this.querySelfPromise != null) { + this.log('joining existing self query'); + return this.querySelfPromise.promise; + } + this.querySelfPromise = pDefer(); + if (this.started) { + this.controller = new AbortController(); + const timeoutSignal = AbortSignal.timeout(this.queryTimeout); + const signal = anySignal([this.controller.signal, timeoutSignal]); + // this controller will get used for lots of dial attempts so make sure we don't cause warnings to be logged + setMaxListeners(Infinity, signal, this.controller.signal, timeoutSignal); + try { + if (this.routingTable.size === 0) { + this.log('routing table was empty, waiting for some peers before running query'); + // wait to discover at least one DHT peer + await pEvent(this.routingTable, 'peer:add', { + signal + }); + } + this.log('run self-query, look for %d peers timing out after %dms', this.count, this.queryTimeout); + const start = Date.now(); + const found = await pipe(this.peerRouting.getClosestPeers(this.peerId.toBytes(), { + signal, + isSelfQuery: true + }), (source) => take(source, this.count), async (source) => length(source)); + this.log('self-query found %d peers in %dms', found, Date.now() - start); + } + catch (err) { + this.log.error('self-query error', err); + } + finally { + signal.clear(); + if (this.initialQuerySelfHasRun != null) { + this.initialQuerySelfHasRun.resolve(); + this.initialQuerySelfHasRun = undefined; + } + } + } + this.querySelfPromise.resolve(); + this.querySelfPromise = undefined; + if (!this.started) { + return; + } + this.timeoutId = setTimeout(() => { + this.querySelf() + .catch(err => { + this.log.error('error running self-query', err); + }); + }, this.interval); + } + } + + function arrayEquals(array1, array2) { + if (array1 === array2) { + return true; + } + if (array1.length !== array2.length) { + return false; + } + for (let i = 0, length = array1.length; i < length; ++i) { + if (array1[i] !== array2[i]) { + return false; + } + } + return true; + } + function ensureInt8(name, val) { + if (!(val instanceof Uint8Array)) { + throw new TypeError(name + ' is not a Uint8Array'); + } + if (val.byteLength !== 32) { + throw new TypeError(name + ' had incorrect length'); + } + } + function isLeafBucket(obj) { + return Array.isArray(obj?.peers); + } + /** + * Implementation of a Kademlia DHT routing table as a prefix binary trie with + * configurable prefix length, bucket split threshold and size. + */ + class KBucket extends TypedEventEmitter { + root; + localPeer; + prefixLength; + splitThreshold; + kBucketSize; + numberOfNodesToPing; + constructor(options) { + super(); + this.localPeer = options.localPeer; + this.prefixLength = options.prefixLength; + this.kBucketSize = options.kBucketSize ?? KBUCKET_SIZE; + this.splitThreshold = options.splitThreshold ?? this.kBucketSize; + this.numberOfNodesToPing = options.numberOfNodesToPing ?? 3; + ensureInt8('options.localPeer.kadId', options.localPeer.kadId); + this.root = { + prefix: '', + depth: 0, + peers: [] + }; + } + /** + * Adds a contact to the k-bucket. + * + * @param {Peer} peer - the contact object to add + */ + add(peer) { + ensureInt8('peer.kadId', peer?.kadId); + const bucket = this._determineBucket(peer.kadId); + // check if the contact already exists + if (this._indexOf(bucket, peer.kadId) > -1) { + return; + } + // are there too many peers in the bucket and can we make the trie deeper? + if (bucket.peers.length === this.splitThreshold && bucket.depth < this.prefixLength) { + // split the bucket + this._split(bucket); + // try again + this.add(peer); + return; + } + // is there space in the bucket? + if (bucket.peers.length < this.kBucketSize) { + bucket.peers.push(peer); + this.safeDispatchEvent('added', { detail: peer }); + return; + } + // we are at the bottom of the trie and the bucket is full so we can't add + // any more peers. + // + // instead ping the first this.numberOfNodesToPing in order to determine + // if they are still online. + // + // only add the new peer if one of the pinged nodes does not respond, this + // prevents DoS flooding with new invalid contacts. + this.safeDispatchEvent('ping', { + detail: { + oldContacts: bucket.peers.slice(0, this.numberOfNodesToPing), + newContact: peer + } + }); + } + /** + * Get 0-n closest contacts to the provided node id. "Closest" here means: + * closest according to the XOR metric of the contact node id. + * + * @param {Uint8Array} id - Contact node id + * @returns {Generator} Array Maximum of n closest contacts to the node id + */ + *closest(id, n = this.kBucketSize) { + const list = new PeerDistanceList(id, n); + for (const peer of this.toIterable()) { + list.addWitKadId({ id: peer.peerId, multiaddrs: [] }, peer.kadId); + } + yield* map(list.peers, info => info.id); + } + /** + * Counts the total number of contacts in the tree. + * + * @returns {number} The number of contacts held in the tree + */ + count() { + function countBucket(bucket) { + if (isLeafBucket(bucket)) { + return bucket.peers.length; + } + let count = 0; + if (bucket.left != null) { + count += countBucket(bucket.left); + } + if (bucket.right != null) { + count += countBucket(bucket.right); + } + return count; + } + return countBucket(this.root); + } + /** + * Get a contact by its exact ID. + * If this is a leaf, loop through the bucket contents and return the correct + * contact if we have it or null if not. If this is an inner node, determine + * which branch of the tree to traverse and repeat. + * + * @param {Uint8Array} kadId - The ID of the contact to fetch. + * @returns {object | undefined} The contact if available, otherwise null + */ + get(kadId) { + const bucket = this._determineBucket(kadId); + const index = this._indexOf(bucket, kadId); + return bucket.peers[index]; + } + /** + * Removes contact with the provided id. + * + * @param {Uint8Array} kadId - The ID of the contact to remove + */ + remove(kadId) { + const bucket = this._determineBucket(kadId); + const index = this._indexOf(bucket, kadId); + if (index > -1) { + const peer = bucket.peers.splice(index, 1)[0]; + this.safeDispatchEvent('removed', { + detail: peer + }); + } + } + /** + * Similar to `toArray()` but instead of buffering everything up into an + * array before returning it, yields contacts as they are encountered while + * walking the tree. + * + * @returns {Iterable} All of the contacts in the tree, as an iterable + */ + *toIterable() { + function* iterate(bucket) { + if (isLeafBucket(bucket)) { + yield* bucket.peers; + return; + } + yield* iterate(bucket.left); + yield* iterate(bucket.right); + } + yield* iterate(this.root); + } + /** + * Default distance function. Finds the XOR distance between firstId and + * secondId. + * + * @param {Uint8Array} firstId - Uint8Array containing first id. + * @param {Uint8Array} secondId - Uint8Array containing second id. + * @returns {number} Integer The XOR distance between firstId and secondId. + */ + distance(firstId, secondId) { + return BigInt('0x' + toString$1(xor(firstId, secondId), 'base16')); + } + /** + * Determines whether the id at the bitIndex is 0 or 1 + * Return left leaf if `id` at `bitIndex` is 0, right leaf otherwise + * + * @param {Uint8Array} kadId - Id to compare localNodeId with + * @returns {LeafBucket} left leaf if id at bitIndex is 0, right leaf otherwise. + */ + _determineBucket(kadId) { + const bitString = toString$1(kadId, 'base2'); + const prefix = bitString.substring(0, this.prefixLength); + function findBucket(bucket, bitIndex = 0) { + if (isLeafBucket(bucket)) { + return bucket; + } + const bit = prefix[bitIndex]; + if (bit === '0') { + return findBucket(bucket.left, bitIndex + 1); + } + return findBucket(bucket.right, bitIndex + 1); + } + return findBucket(this.root); + } + /** + * Returns the index of the contact with provided + * id if it exists, returns -1 otherwise. + * + * @param {object} bucket - internal object that has 2 leafs: left and right + * @param {Uint8Array} kadId - KadId of peer + * @returns {number} Integer Index of contact with provided id if it exists, -1 otherwise. + */ + _indexOf(bucket, kadId) { + return bucket.peers.findIndex(peer => arrayEquals(peer.kadId, kadId)); + } + /** + * Modify the bucket, turn it from a leaf bucket to an internal bucket + * + * @param {any} bucket - bucket for splitting + */ + _split(bucket) { + const depth = bucket.depth + 1; + // create child buckets + const left = { + prefix: '0', + depth, + peers: [] + }; + const right = { + prefix: '1', + depth, + peers: [] + }; + // redistribute peers + for (const peer of bucket.peers) { + const bitString = toString$1(peer.kadId, 'base2'); + if (bitString[depth] === '0') { + left.peers.push(peer); + } + else { + right.peers.push(peer); + } + } + // convert leaf bucket to internal bucket + // @ts-expect-error peers is not a property of LeafBucket + delete bucket.peers; + // @ts-expect-error left is not a property of LeafBucket + bucket.left = left; + // @ts-expect-error right is not a property of LeafBucket + bucket.right = right; + } + } + + const KAD_CLOSE_TAG_NAME = 'kad-close'; + const KAD_CLOSE_TAG_VALUE = 50; + const KBUCKET_SIZE = 20; + const PREFIX_LENGTH = 32; + const PING_TIMEOUT = 10000; + const PING_CONCURRENCY = 10; + /** + * A wrapper around `k-bucket`, to provide easy store and + * retrieval for peers. + */ + class RoutingTable extends TypedEventEmitter { + kBucketSize; + kb; + pingQueue; + log; + components; + prefixLength; + splitThreshold; + pingTimeout; + pingConcurrency; + running; + protocol; + tagName; + tagValue; + metrics; + constructor(components, init) { + super(); + this.components = components; + this.log = components.logger.forComponent(`${init.logPrefix}:routing-table`); + this.kBucketSize = init.kBucketSize ?? KBUCKET_SIZE; + this.pingTimeout = init.pingTimeout ?? PING_TIMEOUT; + this.pingConcurrency = init.pingConcurrency ?? PING_CONCURRENCY; + this.running = false; + this.protocol = init.protocol; + this.tagName = init.tagName ?? KAD_CLOSE_TAG_NAME; + this.tagValue = init.tagValue ?? KAD_CLOSE_TAG_VALUE; + this.prefixLength = init.prefixLength ?? PREFIX_LENGTH; + this.splitThreshold = init.splitThreshold ?? KBUCKET_SIZE; + this.pingQueue = new PeerQueue({ + concurrency: this.pingConcurrency, + metricName: `${init.logPrefix.replaceAll(':', '_')}_ping_queue`, + metrics: this.components.metrics + }); + this.pingQueue.addEventListener('error', evt => { + this.log.error('error pinging peer', evt.detail); + }); + if (this.components.metrics != null) { + this.metrics = { + routingTableSize: this.components.metrics.registerMetric(`${init.logPrefix.replaceAll(':', '_')}_routing_table_size`), + routingTableKadBucketTotal: this.components.metrics.registerMetric(`${init.logPrefix.replaceAll(':', '_')}_routing_table_kad_bucket_total`), + routingTableKadBucketAverageOccupancy: this.components.metrics.registerMetric(`${init.logPrefix.replaceAll(':', '_')}_routing_table_kad_bucket_average_occupancy`), + routingTableKadBucketMaxDepth: this.components.metrics.registerMetric(`${init.logPrefix.replaceAll(':', '_')}_routing_table_kad_bucket_max_depth`) + }; + } + } + isStarted() { + return this.running; + } + async start() { + this.running = true; + const kBuck = new KBucket({ + localPeer: { + kadId: await convertPeerId(this.components.peerId), + peerId: this.components.peerId + }, + kBucketSize: this.kBucketSize, + prefixLength: this.prefixLength, + splitThreshold: this.splitThreshold, + numberOfNodesToPing: 1 + }); + this.kb = kBuck; + // test whether to evict peers + kBuck.addEventListener('ping', (evt) => { + this._onPing(evt).catch(err => { + this.log.error('could not process k-bucket ping event', err); + }); + }); + let peerStorePeers = 0; + // add existing peers from the peer store to routing table + for (const peer of await this.components.peerStore.all()) { + if (peer.protocols.includes(this.protocol)) { + const id = await convertPeerId(peer.id); + this.kb.add({ kadId: id, peerId: peer.id }); + peerStorePeers++; + } + } + this.log('added %d peer store peers to the routing table', peerStorePeers); + // tag kad-close peers + this._tagPeers(kBuck); + } + async stop() { + this.running = false; + this.pingQueue.clear(); + this.kb = undefined; + } + /** + * Keep track of our k-closest peers and tag them in the peer store as such + * - this will lower the chances that connections to them get closed when + * we reach connection limits + */ + _tagPeers(kBuck) { + let kClosest = new PeerSet(); + const updatePeerTags = debounce(() => { + const newClosest = new PeerSet(kBuck.closest(kBuck.localPeer.kadId, KBUCKET_SIZE)); + const addedPeers = newClosest.difference(kClosest); + const removedPeers = kClosest.difference(newClosest); + Promise.resolve() + .then(async () => { + for (const peer of addedPeers) { + await this.components.peerStore.merge(peer, { + tags: { + [this.tagName]: { + value: this.tagValue + } + } + }); + } + for (const peer of removedPeers) { + await this.components.peerStore.merge(peer, { + tags: { + [this.tagName]: undefined + } + }); + } + }) + .catch(err => { + this.log.error('Could not update peer tags', err); + }); + kClosest = newClosest; + }); + kBuck.addEventListener('added', (evt) => { + updatePeerTags(); + this.safeDispatchEvent('peer:add', { detail: evt.detail.peerId }); + }); + kBuck.addEventListener('removed', (evt) => { + updatePeerTags(); + this.safeDispatchEvent('peer:remove', { detail: evt.detail.peerId }); + }); + } + /** + * Called on the `ping` event from `k-bucket` when a bucket is full + * and cannot split. + * + * `oldContacts.length` is defined by the `numberOfNodesToPing` param + * passed to the `k-bucket` constructor. + * + * `oldContacts` will not be empty and is the list of contacts that + * have not been contacted for the longest. + */ + async _onPing(evt) { + if (!this.running) { + return; + } + const { oldContacts, newContact } = evt.detail; + const results = await Promise.all(oldContacts.map(async (oldContact) => { + // if a previous ping wants us to ping this contact, re-use the result + const pingJob = this.pingQueue.find(oldContact.peerId); + if (pingJob != null) { + return pingJob.join(); + } + return this.pingQueue.add(async () => { + let stream; + try { + const options = { + signal: AbortSignal.timeout(this.pingTimeout) + }; + this.log('pinging old contact %p', oldContact.peerId); + const connection = await this.components.connectionManager.openConnection(oldContact.peerId, options); + stream = await connection.newStream(this.protocol, options); + const pb = pbStream(stream); + await pb.write({ + type: MessageType.PING + }, Message, options); + const response = await pb.read(Message, options); + await pb.unwrap().close(); + if (response.type !== MessageType.PING) { + throw new CodeError$2(`Incorrect message type received, expected PING got ${response.type}`, 'ERR_BAD_PING_RESPONSE'); + } + return true; + } + catch (err) { + if (this.running && this.kb != null) { + // only evict peers if we are still running, otherwise we evict + // when dialing is cancelled due to shutdown in progress + this.log.error('could not ping peer %p', oldContact.peerId, err); + this.log('evicting old contact after ping failed %p', oldContact.peerId); + this.kb.remove(oldContact.kadId); + } + stream?.abort(err); + return false; + } + finally { + this.metrics?.routingTableSize.update(this.size); + } + }, { + peerId: oldContact.peerId + }); + })); + const responded = results + .filter(res => res) + .length; + if (this.running && responded < oldContacts.length && this.kb != null) { + this.log('adding new contact %p', newContact.peerId); + this.kb.add(newContact); + } + } + // -- Public Interface + /** + * Amount of currently stored peers + */ + get size() { + if (this.kb == null) { + return 0; + } + return this.kb.count(); + } + /** + * Find a specific peer by id + */ + async find(peer) { + const key = await convertPeerId(peer); + return this.kb?.get(key)?.peerId; + } + /** + * Retrieve the closest peers to the given kadId + */ + closestPeer(kadId) { + const res = this.closestPeers(kadId, 1); + if (res.length > 0) { + return res[0]; + } + return undefined; + } + /** + * Retrieve the `count`-closest peers to the given kadId + */ + closestPeers(kadId, count = this.kBucketSize) { + if (this.kb == null) { + return []; + } + return [...this.kb.closest(kadId, count)]; + } + /** + * Add or update the routing table with the given peer + */ + async add(peerId) { + if (this.kb == null) { + throw new Error('RoutingTable is not started'); + } + const kadId = await convertPeerId(peerId); + this.kb.add({ kadId, peerId }); + this.log('added %p with kad id %b', peerId, kadId); + this.updateMetrics(); + } + /** + * Remove a given peer from the table + */ + async remove(peer) { + if (this.kb == null) { + throw new Error('RoutingTable is not started'); + } + const id = await convertPeerId(peer); + this.kb.remove(id); + this.updateMetrics(); + } + updateMetrics() { + if (this.metrics == null || this.kb == null) { + return; + } + let size = 0; + let buckets = 0; + let maxDepth = 0; + function count(bucket) { + if (isLeafBucket(bucket)) { + if (bucket.depth > maxDepth) { + maxDepth = bucket.depth; + } + buckets++; + size += bucket.peers.length; + return; + } + count(bucket.left); + count(bucket.right); + } + count(this.kb.root); + this.metrics.routingTableSize.update(size); + this.metrics.routingTableKadBucketTotal.update(buckets); + this.metrics.routingTableKadBucketAverageOccupancy.update(Math.round(size / buckets)); + this.metrics.routingTableKadBucketMaxDepth.update(maxDepth); + } + } + + var GENERATED_PREFIXES = [ + 77591, 22417, 43971, 28421, 740, 29829, 71467, 228973, 196661, 78537, 27689, 36431, 44415, 14362, 19456, 106025, + 96308, 2882, 49509, 21149, 87173, 131409, 75844, 23676, 121838, 30291, 17492, 2953, 7564, 110620, 129477, 127283, + 53113, 72417, 165166, 109690, 21200, 102125, 24049, 71504, 90342, 25307, 72039, 26812, 26715, 32264, 133800, 71161, + 88956, 171987, 51779, 24425, 16671, 30251, 186294, 247761, 14202, 2121, 8465, 35024, 4876, 85917, 169730, 3638, + 256836, 96184, 943, 18678, 6583, 52907, 35807, 112254, 214097, 18796, 11595, 9243, 23554, 887, 268203, 382004, + 24590, 111335, 11625, 16619, 29039, 102425, 69006, 97976, 92362, 32552, 63717, 41433, 128974, 137630, 59943, 10019, + 13986, 35430, 33665, 108037, 43799, 43280, 38195, 29078, 58629, 18265, 14425, 46832, 235538, 40830, 77881, 110717, + 58937, 3463, 325358, 51300, 47623, 117252, 19007, 10170, 20540, 91237, 294813, 4951, 79841, 56232, 36270, 128547, + 69209, 66275, 100156, 32063, 73531, 34439, 80937, 28892, 44466, 88595, 216307, 32583, 49620, 16605, 82127, 45807, + 21630, 78726, 20235, 40163, 111007, 96926, 5567, 72083, 21665, 58844, 39419, 179767, 48328, 42662, 51550, 5251, + 37811, 49608, 81056, 50854, 55513, 20922, 18891, 197409, 164656, 32593, 71449, 220474, 58919, 85682, 67854, 13758, + 35066, 3565, 61905, 214793, 119572, 141419, 21504, 10302, 27354, 67003, 46131, 32668, 15165, 64871, 34450, 17821, + 2757, 11452, 34189, 5160, 12257, 85523, 560, 53385, 65887, 119549, 135620, 312353, 115979, 122356, 10867, 193231, + 124537, 54783, 90675, 120791, 4715, 142253, 50943, 17271, 43358, 25331, 4917, 120566, 34580, 12878, 33786, 160528, + 32523, 4869, 301307, 104817, 81491, 23276, 8832, 97911, 31265, 52065, 7998, 49622, 9715, 43998, 34091, 84587, + 20664, 69041, 29419, 53205, 10838, 58288, 116145, 6185, 5154, 141795, 35924, 21307, 144738, 43730, 12085, 8279, + 10002, 119, 133779, 199668, 72938, 31768, 39176, 67875, 38453, 9700, 44144, 4121, 116048, 41733, 12868, 82669, + 92308, 128, 34262, 11332, 7712, 90764, 36141, 13553, 71312, 77470, 117314, 96549, 49135, 23602, 54468, 28605, + 6327, 62308, 17171, 67531, 21319, 14105, 894, 107722, 46157, 8503, 51069, 100472, 45138, 15246, 14577, 35609, + 191464, 1757, 13364, 161349, 32067, 91705, 81144, 52339, 5408, 91066, 21983, 14157, 100545, 4372, 26630, 129112, + 1423, 29676, 213626, 4397, 88436, 99190, 6877, 49958, 26122, 114348, 60661, 29818, 293118, 50042, 179738, 16400, + 163423, 89627, 31040, 43973, 36638, 45952, 5153, 1894, 109322, 1898, 134021, 12402, 112077, 68309, 190269, 69866, + 31938, 107383, 11522, 105232, 11248, 14868, 39852, 71707, 186525, 16530, 38162, 106212, 11700, 5130, 16608, 26998, + 59586, 108399, 230033, 43683, 48135, 82179, 2073, 5015, 196684, 189293, 16378, 23452, 8301, 35640, 11632, 214551, + 29240, 57644, 33137, 91949, 55157, 52384, 117313, 5090, 17717, 89668, 49363, 82238, 241035, 66216, 29066, 184088, + 97206, 62820, 26595, 4241, 135635, 173672, 8202, 459, 71355, 146294, 29587, 3008, 135385, 141203, 14803, 6634, + 45094, 69362, 50925, 546, 51884, 62011, 83296, 234584, 44515, 56050, 89476, 87751, 19373, 12691, 149923, 19794, + 13833, 35846, 87557, 58339, 2884, 19145, 25647, 12224, 11024, 77338, 64608, 122297, 53025, 7205, 36189, 36294, + 170779, 21750, 7739, 173883, 75192, 35664, 224240, 113121, 30181, 26267, 27036, 117827, 92015, 106516, 55628, 203549, + 67949, 60462, 60844, 35911, 20457, 1820, 920, 19773, 8738, 73173, 181993, 38521, 98254, 76257, 46008, 92796, + 5384, 26868, 151566, 22124, 2411, 15919, 186872, 180021, 28099, 152961, 78811, 80237, 62352, 102653, 74259, 184890, + 16792, 123702, 224945, 29940, 19512, 75283, 14059, 112691, 92811, 233329, 20411, 138569, 53341, 109802, 50600, 134528, + 66747, 5529, 166531, 31578, 64732, 67189, 1596, 126357, 967, 167999, 206598, 109752, 119431, 207825, 78791, 91938, + 10301, 27311, 24233, 252343, 28831, 32812, 66002, 112267, 90895, 8786, 8095, 16824, 22866, 21813, 60507, 174833, + 19549, 130985, 117051, 52110, 6938, 81923, 123864, 38061, 919, 18680, 53534, 46739, 112893, 161529, 85429, 26761, + 11900, 81121, 91968, 15390, 217947, 56524, 1713, 6654, 37089, 85630, 138866, 61850, 16491, 75577, 16884, 98296, + 73523, 6140, 44645, 6062, 36366, 29844, 57946, 37932, 42472, 5266, 20834, 19309, 33753, 127182, 134259, 35810, + 41805, 45878, 312001, 14881, 47757, 49251, 120050, 44252, 3708, 25856, 107864, 120347, 1228, 36550, 41682, 34496, + 47025, 8393, 173365, 246526, 12894, 161607, 35670, 90785, 126572, 2095, 124731, 157033, 58694, 554, 12786, 9642, + 4817, 16136, 47864, 174698, 66992, 4639, 69284, 10625, 40710, 27763, 51738, 30404, 264105, 137904, 109882, 52487, + 42824, 57514, 2740, 10479, 146799, 107390, 16586, 88038, 174951, 9410, 16185, 44158, 5568, 40658, 46108, 12763, + 97385, 26175, 108859, 664, 230732, 67470, 46663, 14395, 50750, 141320, 93140, 15361, 47997, 55784, 6791, 307840, + 118569, 107326, 18056, 58281, 260415, 54691, 8790, 73332, 45633, 7511, 45674, 143373, 14031, 11799, 94491, 35646, + 96544, 14560, 26049, 32983, 25791, 83814, 42094, 231370, 63955, 139212, 2359, 169908, 3108, 183486, 105867, 28197, + 32941, 124968, 26402, 88267, 149768, 23053, 3078, 19091, 52924, 25383, 19209, 111548, 97361, 3959, 24880, 235061, + 9099, 24921, 161254, 151405, 20508, 7159, 34381, 20133, 11434, 74036, 19974, 34769, 36585, 1076, 22454, 17354, + 38727, 235160, 111547, 96454, 117448, 156940, 91330, 37299, 7310, 26915, 117060, 51369, 22620, 61861, 322264, 106850, + 111694, 15091, 2624, 40345, 300446, 177064, 1707, 27389, 54792, 327783, 132669, 183543, 59003, 17744, 20603, 151134, + 106923, 53084, 71803, 279424, 319816, 11579, 21946, 16728, 38274, 72711, 5085, 83391, 88646, 40159, 25027, 34680, + 10752, 12988, 54126, 30365, 18338, 100445, 230674, 44874, 84974, 143877, 123253, 139372, 28082, 91477, 144002, 13096, + 219729, 46016, 50029, 42377, 14601, 6660, 58244, 58978, 23918, 88206, 113611, 64452, 17541, 41032, 10942, 12021, + 49189, 10978, 40175, 37156, 10947, 71709, 106894, 112538, 57007, 137486, 150608, 152719, 40615, 7746, 279716, 13101, + 19524, 28708, 40578, 72320, 1096, 182051, 94527, 51275, 22833, 45164, 81917, 77519, 48508, 5421, 140302, 37845, + 149830, 5587, 27579, 5357, 428725, 248187, 6326, 206760, 39814, 32585, 89923, 44341, 288753, 284443, 96368, 31201, + 94189, 119504, 20359, 52073, 103216, 179, 27934, 32801, 96035, 34111, 34309, 101326, 18198, 20704, 210266, 37643, + 27880, 141873, 106000, 19414, 56614, 167714, 66483, 107885, 86602, 4379, 20796, 75467, 4987, 5017, 118857, 26003, + 34308, 114428, 29198, 6686, 29697, 73632, 3739, 69795, 16798, 41504, 7207, 30722, 21436, 36735, 28067, 28545, + 3239, 11221, 36031, 41889, 100010, 19247, 317673, 29495, 174554, 6424, 129725, 53845, 94986, 7955, 59676, 2604, + 191497, 19735, 102214, 62954, 23844, 11872, 179525, 261436, 34492, 428, 78404, 142035, 16747, 17246, 27578, 37021, + 33672, 57944, 26056, 135760, 2369, 61674, 122066, 31327, 19374, 157065, 40553, 130982, 69619, 71290, 38855, 72100, + 92903, 95940, 51422, 165999, 65713, 57873, 50726, 7288, 20272, 2081, 42326, 22624, 81120, 57914, 79352, 19447, + 1684, 72302, 11774, 302559, 161481, 96396, 13692, 414988, 3721, 79066, 56627, 46883, 21150, 11747, 12184, 5856, + 113458, 176117, 84416, 52079, 27933, 3354, 59765, 141359, 2212, 216309, 2555, 23458, 196722, 142463, 45701, 44548, + 28798, 19418, 215, 29916, 9396, 10574, 114226, 84475, 13520, 18694, 34056, 4524, 90302, 62930, 13539, 19407, + 77209, 7728, 38088, 9535, 2263, 23875, 183945, 17750, 26274, 67172, 10585, 28042, 22199, 7478, 51331, 66030, + 26774, 192929, 31434, 25850, 50197, 52926, 178158, 4679, 181256, 70184, 229600, 9959, 105594, 72158, 73974, 2726, + 35085, 78087, 23284, 35568, 51713, 155676, 5401, 27254, 11966, 17569, 223253, 71993, 103357, 111477, 55722, 30504, + 26034, 46774, 35392, 36285, 214814, 41143, 163465, 1051, 16094, 81044, 6636, 76489, 179102, 20712, 39178, 35683, + 125177, 54219, 30617, 52994, 25324, 50123, 2543, 87529, 58995, 10688, 125199, 12388, 60158, 125481, 131646, 7642, + 133350, 65874, 3438, 97277, 101450, 10075, 56344, 116821, 50778, 60547, 98016, 106135, 13859, 14255, 16300, 77373, + 173521, 8285, 45932, 37426, 4054, 114295, 55947, 7703, 39114, 52, 51119, 128135, 19714, 60715, 9554, 50492, + 88180, 2823, 118271, 52993, 122625, 97919, 23859, 37895, 25040, 33614, 32102, 20431, 3577, 9275, 15686, 43031, + 157741, 110358, 1884, 40291, 125391, 13736, 5008, 64881, 87336, 77381, 70711, 43032, 49155, 118587, 70494, 4318, + 10168, 30126, 12580, 10524, 280104, 104001, 145413, 2862, 84140, 6603, 106005, 13566, 12780, 11251, 42830, 571, + 179910, 82443, 13146, 469, 42714, 32591, 265217, 424024, 92553, 54721, 134100, 6007, 15242, 114681, 59030, 16718, + 85465, 200214, 85982, 55174, 165013, 23493, 56964, 82529, 109150, 32706, 27568, 82442, 5350, 14976, 13165, 44890, + 60021, 21343, 33978, 17264, 4655, 22328, 27819, 75730, 16567, 55483, 14510, 17926, 45827, 150609, 3704, 7385, + 272531, 161543, 76904, 122163, 52405, 2039, 19165, 41623, 14423, 228354, 3369, 176360, 85491, 7122, 35789, 303724, + 4465, 13628, 2233, 55311, 118771, 20713, 10006, 221519, 45115, 71021, 35650, 29775, 7337, 10864, 20665, 21142, + 1746, 15080, 1624, 32449, 10905, 105743, 229797, 7701, 3940, 22997, 178467, 57208, 389057, 39683, 59403, 63344, + 63125, 54847, 69691, 18336, 56448, 3362, 37202, 18282, 29648, 138224, 35867, 10495, 5911, 28814, 26653, 31514, + 176702, 26550, 45621, 11734, 4525, 40543, 73944, 121080, 27858, 155561, 14887, 44670, 30742, 8796, 107455, 113472, + 56369, 75581, 183777, 240095, 133699, 153299, 8768, 160464, 26058, 49078, 103971, 21875, 71486, 44888, 17156, 9678, + 89541, 123019, 102337, 3972, 83930, 21245, 87852, 109660, 287918, 183019, 686, 10100, 39177, 283941, 11274, 24736, + 26793, 26214, 25995, 77011, 141580, 4070, 23742, 46285, 46632, 30700, 26669, 19056, 35951, 115575, 174034, 56097, + 35463, 87425, 24575, 44245, 38701, 82317, 85922, 281616, 100333, 147697, 61503, 7730, 84330, 8530, 59917, 61597, + 17173, 9092, 32658, 90288, 193136, 39023, 20381, 56654, 31132, 7779, 1919, 1375, 117128, 30819, 11169, 40938, + 23935, 115201, 101155, 151034, 4835, 11231, 74550, 89388, 59951, 91704, 107312, 167882, 115062, 12732, 72738, 88703, + 464019, 158267, 57995, 60496, 737, 14371, 123867, 4174, 243339, 159946, 7568, 16025, 134556, 110916, 38103, 191, + 80226, 88794, 29688, 27230, 10454, 76308, 57647, 77409, 113483, 66864, 14745, 19808, 12023, 46583, 84805, 16015, + 17102, 2231, 20611, 3547, 95740, 250131, 34559, 108894, 8498, 15853, 159169, 148920, 20942, 2813, 93160, 45188, + 210613, 45531, 52587, 149062, 39782, 28194, 57849, 60965, 84954, 89766, 84453, 100927, 16501, 27658, 165311, 103841, + 54192, 207341, 19558, 20084, 319622, 5672, 205467, 98462, 61849, 36279, 13609, 147177, 24726, 165015, 209489, 59591, + 31157, 6551, 117580, 75060, 141146, 277310, 21072, 22023, 106474, 63041, 137443, 122965, 68371, 5383, 42146, 98961, + 113467, 30863, 23794, 4843, 99630, 30392, 82679, 13699, 241612, 33601, 93146, 24319, 18643, 32155, 95669, 40440, + 15333, 34089, 67799, 142144, 58245, 38633, 114531, 117400, 77861, 188726, 5507, 2568, 8853, 10987, 107222, 2663, + 2421, 11530, 13345, 30075, 41785, 118661, 104786, 17459, 12490, 16281, 71936, 193555, 17431, 5944, 71758, 26485, + 77317, 20803, 367167, 158, 7362, 93430, 11735, 172445, 46002, 11532, 54482, 930, 62911, 2235, 23004, 179236, + 4764, 101859, 208113, 22477, 55163, 95579, 14098, 67320, 162556, 90709, 156949, 3826, 57492, 4025, 34092, 87442, + 104565, 6718, 186015, 28214, 14209, 10039, 107186, 233912, 58877, 81637, 55265, 39828, 6194, 145813, 50831, 105849, + 4974, 88319, 122296, 10272, 197216, 95714, 51540, 72418, 23324, 91555, 8743, 140452, 250249, 51666, 34124, 7229, + 38592, 129641, 78169, 174242, 22464, 149964, 51450, 14034, 10026, 95376, 26190, 120062, 14401, 8700, 265, 31386, + 143573, 7203, 229889, 61567, 4227, 140981, 2466, 72052, 10787, 10062, 30958, 6099, 38471, 30103, 23202, 208101, + 70847, 467, 58934, 32271, 32984, 36637, 24107, 30771, 17109, 73353, 13650, 2098, 157040, 67366, 66904, 106018, + 265380, 107238, 18535, 44025, 32681, 144983, 62505, 91295, 56120, 3082, 77508, 10322, 63023, 36700, 81885, 224127, + 16721, 45023, 239261, 111272, 13852, 7866, 149243, 204199, 32309, 22084, 42029, 38316, 126644, 104973, 14406, 43454, + 67322, 61310, 15789, 40285, 24026, 181047, 6301, 70927, 23319, 115823, 27248, 66693, 115875, 278566, 63007, 146844, + 56841, 59007, 87368, 180001, 22370, 42114, 80605, 12022, 10374, 308, 25079, 14689, 12618, 63368, 7936, 264973, + 212291, 136713, 95999, 105801, 18965, 32075, 48700, 52230, 35119, 96912, 32992, 8586, 16606, 101333, 101812, 14969, + 39930, 759, 193090, 27387, 42914, 12937, 5058, 62646, 64528, 38624, 25743, 37502, 3716, 4435, 30352, 178687, + 26461, 132611, 42002, 138442, 35833, 59582, 16345, 8048, 60319, 49349, 309, 47800, 49739, 90482, 26405, 34470, + 63786, 32479, 85028, 39866, 47846, 11649, 23934, 29466, 2816, 42864, 31828, 7410, 74885, 49632, 47629, 111801, + 90749, 19536, 18767, 105764, 59606, 21223, 10746, 76298, 22220, 39408, 7190, 79654, 64856, 11602, 82156, 272765, + 17079, 70089, 245473, 51813, 184407, 384678, 1576, 122249, 5064, 27481, 6188, 25790, 74361, 27541, 318284, 45430, + 31488, 620, 93579, 45723, 192118, 22670, 51913, 4162, 70244, 35966, 26397, 16199, 50899, 209613, 121702, 287507, + 2993, 36101, 132229, 67345, 33062, 76295, 118628, 78705, 52316, 34375, 107083, 107454, 44863, 127561, 33964, 3073, + 154010, 190914, 55967, 39074, 6272, 31047, 5550, 41123, 26154, 98638, 47110, 19998, 148091, 50229, 31329, 59900, + 195442, 19106, 61347, 73497, 70015, 682, 45850, 25776, 38022, 148951, 6288, 37411, 232526, 109277, 27286, 32342, + 9262, 5220, 16651, 23175, 46740, 129438, 78614, 121925, 66914, 88710, 127952, 5563, 21500, 34521, 10739, 14863, + 191006, 62956, 17359, 16749, 67027, 56284, 69134, 43301, 35039, 58883, 54466, 60823, 404451, 75743, 59856, 86979, + 7923, 34273, 83785, 32142, 7693, 268986, 197428, 282681, 17049, 22346, 22990, 92245, 107180, 3357, 37104, 96724, + 49153, 7683, 31197, 43267, 82231, 164276, 23696, 20848, 188364, 22309, 24821, 158707, 1018, 22514, 70922, 27792, + 45589, 59709, 10765, 736, 35218, 63479, 51987, 24275, 63588, 55361, 92929, 81964, 4658, 20122, 12330, 44058, + 13065, 311456, 72224, 8337, 211229, 38979, 22590, 138478, 52757, 32595, 133600, 8838, 31549, 94412, 43391, 90056, + 1585, 94802, 127271, 6223, 31889, 137038, 132910, 2165, 57616, 230152, 6080, 10748, 36737, 74579, 134062, 50525, + 180532, 119270, 34556, 76155, 82394, 52595, 29258, 31435, 87820, 67996, 26943, 183878, 38007, 2410, 13526, 180297, + 69856, 3503, 187396, 167700, 7838, 16701, 9199, 56267, 3661, 37407, 65994, 23767, 5708, 62508, 221700, 67088, + 86978, 46776, 84434, 32088, 5612, 9149, 88244, 21685, 95151, 46750, 189612, 2979, 506311, 2594, 3628, 40074, + 105039, 78243, 28523, 6651, 38058, 71999, 30992, 12764, 68261, 108991, 6165, 26450, 61961, 13400, 22426, 7490, + 60890, 109623, 2070, 12958, 50355, 67979, 257096, 7213, 42578, 52121, 35716, 65461, 7516, 124758, 39268, 302, + 64712, 14977, 1467, 219452, 2840, 34229, 11121, 21602, 19270, 63574, 8024, 1532, 17331, 79839, 78885, 52029, + 180767, 57957, 6069, 91265, 61380, 55767, 8927, 32881, 287603, 22149, 35029, 68876, 6428, 199567, 46926, 13412, + 104132, 21434, 366616, 45060, 110046, 81924, 128910, 45886, 52821, 130416, 29416, 77342, 21762, 67329, 121432, 79924, + 11724, 38625, 81006, 102033, 28338, 13326, 3250, 82056, 82526, 38212, 21112, 12382, 111495, 3263, 7414, 86274, + 93490, 40844, 30224, 45212, 24019, 48411, 71367, 24941, 76729, 57776, 3769, 38114, 202019, 197745, 31953, 237533, + 33270, 201580, 255648, 100798, 44741, 32241, 98468, 106931, 10085, 15090, 170358, 33154, 66787, 18819, 69760, 25061, + 234005, 82660, 6295, 131975, 16874, 9076, 4094, 25005, 17740, 40908, 19533, 220019, 44330, 99792, 50040, 19619, + 13950, 55228, 24423, 31253, 95308, 103177, 184795, 28590, 82285, 5059, 3210, 75525, 49894, 70007, 56178, 10580, + 36051, 139681, 21617, 98736, 3555, 106306, 164189, 37352, 63915, 47824, 24883, 145530, 61904, 28444, 11483, 19837, + 145446, 30420, 112972, 85939, 11835, 191233, 2262, 20705, 58630, 1753, 148334, 1197, 144714, 6887, 11223, 107667, + 60879, 77914, 4151, 57417, 81594, 96681, 169430, 1784, 20444, 95138, 254041, 27038, 596, 7117, 72808, 13759, + 3353, 126776, 21074, 55322, 27081, 36942, 39547, 139830, 179275, 4453, 713, 8722, 71399, 19204, 25785, 22794, + 23923, 104114, 11291, 25458, 102309, 88396, 75288, 230440, 206396, 104551, 58447, 130857, 37247, 94734, 31548, 176529, + 226077, 65159, 20104, 10096, 66881, 94191, 237909, 27109, 37404, 1520, 27421, 25220, 113003, 23423, 24884, 50585, + 6286, 231877, 150800, 11789, 3226, 90004, 60642, 5053, 202400, 61442, 132531, 175329, 57138, 30116, 103847, 9973, + 75367, 16452, 32360, 59119, 21246, 10191, 164804, 23305, 61051, 37348, 154530, 13214, 5468, 50403, 66754, 130976, + 50559, 80515, 14436, 155492, 84017, 5472, 43107, 41240, 2890, 90431, 70188, 382, 76234, 48040, 50211, 281038, + 237007, 32115, 142178, 1536, 22761, 96429, 1811, 31243, 1679, 49143, 55209, 17402, 235054, 61494, 7462, 77030, + 34925, 87609, 78002, 9499, 9027, 73289, 201078, 101379, 63544, 27666, 5469, 10642, 30029, 49816, 132979, 95620, + 58086, 351930, 116300, 2110, 2043, 30845, 6154, 11279, 16727, 4122, 2277, 27281, 4971, 3650, 39060, 61970, + 65951, 39674, 75686, 38151, 11370, 130809, 177895, 32665, 63725, 122267, 7857, 39618, 118483, 44792, 157755, 178624, + 136994, 24260, 41308, 22471, 12404, 21707, 12486, 30473, 52781, 50246, 20247, 39065, 909, 56825, 103158, 128603, + 31542, 1089, 41935, 32744, 12428, 37963, 84420, 33134, 72921, 208449, 42622, 168151, 127335, 147107, 46699, 38216, + 12591, 94342, 85814, 31423, 24944, 2605, 87542, 67473, 192551, 4496, 56321, 91819, 17630, 6300, 256183, 114569, + 202090, 33209, 35289, 34897, 24967, 40520, 43470, 5344, 10199, 34810, 14283, 10381, 10017, 62923, 49924, 23233, + 64539, 13051, 35686, 19698, 11570, 135555, 120868, 44924, 87065, 52318, 52335, 47586, 140906, 245885, 109834, 78668, + 9065, 46990, 25258, 72022, 61243, 40838, 4545, 146387, 10537, 11557, 17470, 36930, 68104, 46711, 24264, 79401, + 81043, 18225, 120488, 24746, 84338, 81652, 28266, 13776, 21878, 46973, 1047, 230465, 73357, 95777, 24973, 210160, + 62210, 58404, 110633, 169651, 6937, 41870, 9909, 26822, 191062, 76553, 27519, 96256, 239070, 2478, 205678, 67955, + 58532, 20601, 50120, 19148, 78501, 195724, 110740, 8249, 109665, 27446, 30568, 57631, 31425, 49752, 32820, 65504, + 50079, 3663, 102256, 219898, 23849, 211315, 14645, 4359, 91767, 9528, 12449, 49366, 7941, 49763, 107848, 8930, + 27086, 50686, 9744, 10447, 81935, 39513, 46514, 1670, 29229, 6172, 22312, 137280, 97759, 9806, 14445, 22976, + 56458, 73391, 34983, 93760, 174219, 52573, 33149, 59747, 2429, 136277, 75123, 165263, 91040, 7446, 57632, 48633, + 97140, 246081, 84766, 151684, 79918, 93268, 120346, 54059, 54875, 77858, 32996, 103590, 45276, 11968, 19600, 25849, + 17159, 132907, 42828, 16817, 4913, 99462, 103303, 27395, 5737, 74184, 20749, 21160, 14377, 77062, 131403, 158735, + 10999, 27799, 77785, 9320, 34366, 51593, 61070, 33746, 47048, 29268, 36675, 30262, 53297, 9832, 82000, 20188, + 122292, 39917, 7331, 18160, 68301, 185935, 134830, 15031, 4935, 10004, 165845, 185534, 46923, 30109, 44134, 122631, + 18874, 22903, 112790, 26561, 18549, 348902, 82871, 140345, 255565, 135390, 63556, 103747, 145055, 179600, 145662, 296111, + 61661, 211987, 23952, 52342, 126343, 48450, 32919, 44277, 82185, 9591, 62139, 205363, 376969, 394874, 108461, 18040, + 120885, 14798, 39863, 16571, 16794, 58271, 81025, 55206, 14640, 118656, 6361, 44092, 85970, 6262, 153863, 108244, + 180200, 72264, 79947, 38044, 10050, 5735, 61221, 80712, 5471, 115689, 11391, 11661, 184257, 20010, 60116, 30320, + 19327, 134598, 45455, 27542, 18004, 125092, 452272, 1549, 91523, 46567, 180063, 156026, 2608, 11174, 58848, 37788, + 65907, 80194, 30490, 5786, 40775, 119519, 106241, 11323, 156297, 8425, 61495, 2617, 29675, 2425, 59886, 112582, + 49142, 59618, 4863, 50597, 86710, 50650, 168632, 27693, 85641, 83643, 18993, 25768, 84284, 28090, 93592, 36627, + 312804, 43381, 9887, 9402, 100931, 97165, 3311, 173330, 66805, 28935, 4963, 184460, 3201, 78102, 19126, 21607, + 37496, 24938, 22615, 16153, 32862, 134792, 153318, 61120, 6067, 2812, 12826, 12792, 23825, 37559, 64662, 202250, + 102694, 155488, 85881, 149193, 46233, 65383, 15521, 106982, 11358, 176786, 25752, 39717, 34208, 24510, 32464, 77742, + 39371, 72028, 138229, 60688, 71386, 102834, 132477, 2208, 11548, 63670, 271279, 28351, 30338, 38620, 32491, 99845, + 143885, 152266, 13252, 2825, 178663, 108097, 1775, 78201, 14897, 113573, 163346, 62292, 171129, 22183, 96598, 38733, + 64971, 166776, 117445, 9968, 146393, 44677, 74867, 20908, 97328, 12761, 25656, 26785, 9148, 112344, 26115, 99176, + 110121, 22437, 49547, 6180, 79320, 5835, 31392, 43328, 33377, 75870, 119860, 69497, 80273, 7325, 155219, 43167, + 111173, 28347, 20222, 3763, 71752, 55041, 47252, 14618, 28088, 15012, 97805, 194698, 54636, 2036, 41349, 6173, + 96604, 61530, 51859, 43782, 13361, 24334, 22668, 24792, 7070, 23441, 16789, 3209, 36211, 208475, 26242, 32880, + 122181, 182407, 21444, 31060, 88459, 29929, 77907, 12716, 10934, 97005, 20599, 31690, 8403, 58445, 30303, 22700, + 10336, 86731, 103115, 337709, 72556, 46788, 112566, 47684, 67089, 53548, 36874, 56487, 41387, 125985, 26893, 40071, + 106683, 73712, 18787, 40105, 72992, 67246, 137276, 50802, 36790, 70328, 138827, 22466, 39263, 183295, 29858, 50975, + 9322, 57397, 10654, 24364, 30383, 55799, 41600, 23584, 127295, 296610, 129078, 143558, 244131, 86397, 36049, 1085, + 80677, 3820, 108139, 5476, 34767, 24683, 7758, 13060, 7239, 131671, 250593, 59556, 103392, 29810, 4188, 252323, + 39404, 116877, 7651, 43600, 40338, 13554, 157253, 39196, 25978, 144387, 61211, 234, 50104, 6129, 10449, 93777, + 9240, 356378, 274148, 4439, 72970, 3724, 147770, 78680, 62570, 115877, 40027, 40547, 36817, 224392, 64609, 34795, + 165027, 67440, 2477, 37206, 23431, 50754, 164797, 46018, 94995, 170982, 27051, 7957, 22767, 3674, 27900, 56419, + 18930, 60701, 41302, 2692, 84749, 339721, 61996, 111094, 80221, 50129, 1045, 8153, 62945, 19202, 8250, 37208, + 37418, 32560, 79477, 41106, 88569, 33963, 36693, 5892, 30570, 1581, 66471, 49647, 11922, 160717, 29442, 5643, + 114865, 82962, 95982, 132098, 22633, 22838, 94726, 54556, 28566, 205039, 162340, 33216, 16849, 35847, 221339, 94851, + 26533, 71469, 1805, 3804, 12935, 45483, 71020, 36310, 65381, 192960, 34240, 35165, 59773, 1248, 46954, 155332, + 96864, 4246, 388800, 16129, 57133, 74592, 44807, 442014, 38203, 42574, 80818, 91592, 26377, 36424, 65760, 977, + 77387, 22628, 147610, 28018, 30561, 98454, 6969, 119628, 63648, 18170, 36854, 26601, 64018, 22027, 37279, 51395, + 152934, 21153, 9430, 58760, 194742, 5330, 55115, 34158, 28917, 174111, 13171, 122326, 1526, 43896, 66094, 25325, + 4234, 148354, 11450, 275, 18999, 112191, 44365, 22723, 68409, 8733, 57746, 96565, 75007, 14196, 108844, 29475, + 88599, 177563, 100792, 106156, 86323, 93726, 14248, 135341, 194131, 40126, 47099, 14779, 8272, 39597, 95983, 171398, + 65882, 28052, 10393, 47213, 40689, 22120, 72212, 106829, 34964, 109146, 753, 648, 21660, 30047, 17527, 181025, + 5619, 145357, 4085, 216883, 9359, 186951, 24779, 53931, 24545, 36197, 223296, 62628, 168101, 4243, 107313, 30321, + 26642, 13049, 51059, 31027, 107912, 807, 73550, 26551, 84369, 122422, 165872, 49754, 74213, 234264, 33151, 52014, + 33100, 87183, 22365, 52500, 40013, 23302, 5652, 72723, 21404, 26107, 48434, 587, 94049, 168493, 96418, 32871, + 70860, 31709, 25128, 443, 71597, 166253, 15670, 70994, 26341, 133675, 28280, 75491, 54756, 47955, 56028, 26182, + 11952, 113272, 472197, 64640, 110753, 17919, 337, 50642, 22576, 142, 87371, 53391, 93210, 126694, 15285, 19642, + 85667, 14148, 1506, 42092, 52962, 33243, 11970, 20734, 135843, 57044, 58880, 13002, 219134, 22876, 64754, 232519, + 4257, 43120, 321573, 24799, 64526, 124728, 52579, 81472, 70831, 276848, 17403, 74359, 23021, 182101, 74597, 23744, + 148267, 12055, 7976, 5349, 11772, 67540, 167347, 65318, 18720, 127832, 108238, 22828, 90233, 9987, 259080, 118185, + 73209, 79270, 13775, 90100, 137742, 90799, 70569, 15699, 19961, 9087, 67475, 57872, 39731, 8810, 134897, 131868, + 146849, 19898, 3334, 2281, 167061, 91073, 60356, 467742, 74712, 188, 53179, 137679, 92769, 29241, 9537, 132595, + 80119, 1041, 88962, 5976, 40171, 44911, 102859, 139059, 104558, 98987, 47761, 19272, 71472, 113864, 175377, 73338, + 10857, 23402, 23758, 1591, 139864, 5644, 4076, 118760, 16427, 134198, 18853, 20291, 100849, 37423, 22038, 36677, + 19071, 195521, 57445, 11069, 31869, 55718, 66882, 148490, 44, 41296, 75242, 49704, 166810, 9906, 20943, 122258, + 49112, 105667, 15969, 10344, 6408, 187694, 21399, 72742, 58970, 14867, 14376, 81889, 41856, 23225, 15042, 56993, + 16074, 131389, 74276, 72407, 53875, 383108, 53597, 37363, 68993, 44854, 122548, 430927, 198279, 38430, 80409, 12245, + 2981, 628, 2818, 17760, 37437, 238229, 7968, 46892, 2200, 3730, 34190, 65983, 37959, 112291, 87850, 70827, + 6522, 20750, 73913, 111621, 41652, 19587, 2780, 58668, 25916, 85259, 18200, 168962, 95781, 42445, 102050, 7776, + 57662, 103313, 47742, 96358, 41964, 66174, 100396, 29069, 204735, 19679, 27978, 7479, 40264, 22534, 61183, 36081, + 107436, 58223, 14680, 23002, 101311, 24716, 124108, 12908, 5646, 31750, 40380, 14215, 232799, 102772, 14122, 96775, + 61398, 50917, 12096, 149880, 67833, 598749, 124194, 155871, 49216, 790, 14677, 65319, 56917, 7440, 145744, 95701, + 12206, 49405, 129269, 76199, 45732, 9767, 11058, 9047, 210885, 11051, 7392, 26307, 2130, 8132, 147526, 20802, + 232698, 115660, 50060, 59789, 57344, 107623, 80343, 112676, 23291, 9866, 160971, 34032, 118291, 15719, 59730, 164911, + 28975, 2659, 58046, 78480, 21854, 66209, 53863, 109085, 116045, 29021, 46481, 107552, 22130, 18764, 70254, 31272, + 11300, 52460, 43933, 84738, 20721, 53869, 190840, 79673, 105300, 7561, 321817, 66924, 13940, 33281, 101046, 183181, + 32176, 71878, 5678, 62924, 79535, 56646, 40303, 19559, 27703, 93042, 73368, 42187, 3670, 37376, 46440, 7023, + 36816, 109628, 20680, 5940, 276440, 275233, 170848, 112093, 136996, 14984, 20226, 111441, 77693, 112960, 48577, 39370, + 55707, 50314, 123404, 26570, 54281, 61372, 123391, 4857, 35928, 246740, 132507, 106646, 44241, 7196, 92258, 9825, + 37688, 51197, 303141, 5590, 15476, 132986, 10955, 85782, 34486, 26696, 7991, 28813, 18858, 39546, 11703, 11365, + 38185, 5716, 93555, 11925, 40121, 60002, 6985, 10976, 171384, 3887, 43394, 13337, 56346, 6381, 252336, 39573, + 75042, 53711, 1028, 31781, 44295, 95925, 131713, 7214, 68125, 43571, 70954, 213234, 1628, 8760, 13391, 65485, + 17320, 56038, 1710, 25248, 60803, 57399, 19839, 3870, 326, 281556, 50945, 72400, 21460, 316244, 75619, 56246, + 98775, 481, 13513, 55765, 50427, 7388, 123519, 32929, 57908, 27124, 61316, 101097, 57467, 30228, 48792, 10788, + 20402, 37318, 50526, 155730, 34456, 158065, 145305, 17832, 43733, 64052, 4506, 35072, 205355, 177028, 184004, 187081, + 68616, 35938, 83703, 10367, 36892, 93186, 260137, 51934, 89970, 4985, 23445, 26755, 21558, 7948, 78741, 23376, + 124405, 85594, 68596, 57536, 49351, 12619, 56593, 132668, 99924, 109728, 71844, 71935, 196018, 65464, 17617, 14987, + 89701, 143773, 33997, 8687, 22701, 33258, 2914, 4436, 72108, 85610, 9671, 49067, 2327, 82988, 1361, 1672, + 44033, 35777, 30269, 24057, 10605, 82236, 616, 15793, 13919, 47249, 112086, 116698, 9484, 80207, 90574, 33304, + 68624, 93127, 56101, 42210, 160929, 4827, 38995, 38095, 4701, 125119, 5027, 33680, 9236, 231236, 14135, 87837, + 23318, 70261, 78893, 30151, 81482, 14332, 1084, 74256, 27532, 46644, 79185, 3148, 62615, 6981, 55672, 31668, + 36825, 1849, 14536, 37446, 14738, 23779, 43058, 162749, 72199, 1168, 21346, 5592, 85932, 85302, 9668, 18351, + 57135, 150360, 2080, 228015, 77953, 34670, 119302, 151751, 31009, 106725, 84265, 45214, 59289, 74178, 113071, 263206, + 111009, 4021, 44449, 188119, 192629, 123592, 392506, 292847, 114487, 12831, 205858, 9852, 20780, 79648, 75767, 357014, + 97721, 18166, 21005, 67950, 33226, 204009, 16536, 2987, 11335, 66717, 144910, 47950, 17262, 55060, 15063, 2934, + 51038, 26775, 178497, 66008, 3427, 49433, 128592, 20036, 157553, 63861, 3089, 23015, 51210, 28696, 35933, 49942, + 71135, 231518, 99620, 17248, 21835, 176536, 20676, 16944, 38700, 165831, 233253, 295625, 36723, 13023, 52745, 10907, + 19423, 67972, 125868, 95473, 82875, 1183, 108455, 52685, 33417, 64095, 21433, 52438, 33191, 127809, 44505, 211823, + 7810, 2752, 95548, 162031, 7185, 91196, 47563, 61721, 33359, 17897, 23682, 42806, 178101, 22874, 49707, 199897, + 75419, 82456, 8618, 11171, 79712, 116847, 18783, 44190, 46564, 5346, 59046, 95032, 7893, 14916, 3214, 26800, + 24172, 121453, 34362, 10250, 17408, 18888, 4840, 68696, 22831, 13162, 36005, 32512, 14800, 62357, 41723, 45046, + 27247, 37486, 5372, 2564, 34261, 298500, 66509, 133920, 89138, 31305, 117697, 19097, 108304, 81386, 84106, 23802, + 46411, 63304, 946, 51417, 41777, 41041, 19501, 115864, 60743, 294354, 37955, 94165, 18116, 1156, 17937, 20645, + 57114, 90804, 58042, 48643, 92288, 9861, 2557, 88546, 61333, 101008, 12853, 5148, 87856, 4152, 144503, 73841, + 18718, 9789, 147565, 10846, 42085, 12789, 30223, 8993, 56352, 67203, 2448, 28215, 6052, 23540, 126319, 75933, + 36689, 80235, 23231, 23561, 21383, 38800, 77548, 102798, 21234, 31468, 158608, 46188, 63960, 191679, 8051, 67014, + 11185, 170078, 42186, 28827, 34777, 41930, 212079, 12421, 34750, 24111, 110344, 73918, 45171, 70826, 141949, 40063, + 23979, 24254, 37309, 26724, 27179, 24718, 83648, 54938, 14591, 17425, 29525, 102675, 48975, 48654, 12316, 8929, + 60640, 41709, 50168, 63264, 89812, 50716, 48632, 38755, 138583, 160123, 55579, 71829, 24230, 233277, 46322, 39650, + 166388, 34718, 24108, 98252, 7031, 106695, 62498, 18258, 35062, 217827, 78731, 34824, 33354, 19520, 60852, 2432, + 60224, 8587, 2836, 62955, 702, 20227, 42285, 40560, 95592, 62486, 11094, 53035, 143291, 18842, 46177, 77994, + 1770, 9657, 107422, 172915, 32655, 128716, 25886, 25164, 156740, 119928, 165875, 85817, 11007, 89110, 33956, 12652, + 65156, 180266, 8494, 36889, 19958, 20955, 96, 1264, 118288, 135769, 44754, 86671, 5632, 19026, 168220, 289120, + 33569, 93821, 66144, 70635, 7687, 5642, 2714, 55445, 56636, 71545, 184182, 93133, 7332, 37389, 12643, 52315, + 22729, 11014, 158742, 17050, 152889, 50178, 34601, 41945, 52136, 9948, 26914, 63548, 95721, 115951, 40759, 8960, + 158258, 38938, 49232, 48325, 42234, 81523, 253019, 66128, 40978, 20048, 238048, 38760, 62928, 122560, 118532, 43687, + 137472, 163689, 26680, 9878, 17448, 51035, 16211, 60834, 36749, 29178, 14241, 59868, 150086, 2305, 26477, 42422, + 34342, 165341, 83279, 33894, 14257, 29928, 12743, 13957, 125571, 89134, 66712, 10952, 16507, 147839, 30146, 7249, + 16565, 45399, 39874, 114565, 215780, 31990, 230881, 171477, 102, 196546, 44538, 10880, 84948, 281705, 86651, 10617, + 31395, 2342, 453658, 43569, 60561, 132901, 21845, 17727, 58556, 258242, 22262, 58728, 4008, 77997, 11806, 37431, + 30599, 81375, 109137, 185787, 114085, 217292, 97453, 169085, 30593, 60212, 11544, 102056, 65580, 2384, 91655, 4855, + 95725, 7295, 157994, 16228, 20669, 53276, 141590, 105246, 17334, 25440, 76067, 17967, 39321, 38911, 11362, 28559, + 63807, 21627, 26468, 85816, 40120, 1025, 15234, 58319, 69516, 66512, 124548, 75845, 78873, 22137, 46681, 51242, + 85683, 32909, 76747, 35555, 43396, 101465, 1765, 73094, 1077, 2962, 39028, 66777, 57831, 42048, 15828, 13962, + 36041, 63657, 52412, 5242, 58846, 2141, 5506, 219012, 134451, 3936, 182230, 17558, 17153, 152237, 22621, 49377, + 170216, 35257, 68233, 65374, 6510, 11126, 212151, 7184, 2480, 22517, 3437, 33073, 30156, 16557, 3768, 55067, + 86829, 91000, 12350, 148650, 66017, 79424, 70885, 49066, 28250, 21369, 51213, 34533, 11510, 3258, 18176, 18465, + 84413, 6315, 36411, 163765, 4346, 356, 107618, 598, 13727, 285026, 162695, 8749, 14583, 7132, 63521, 184253, + 32378, 25991, 5604, 30961, 53675, 4874, 84693, 5086, 34811, 26978, 56564, 7904, 33519, 51221, 113942, 69253, + 6664, 125563, 22055, 220680, 102008, 742, 51930, 19494, 176108, 44424, 35123, 13025, 75685, 11759, 74335, 22250, + 181453, 131147, 16984, 132115, 154311, 11991, 76452, 52609, 85351, 196, 30969, 9198, 74919, 2529, 56838, 71779, + 29187, 116304, 3504, 62330, 41190, 86153, 28393, 254926, 104228, 105189, 13264, 84359, 3574, 12415, 8534, 57147, + 10175, 188174, 59504, 60932, 66318, 16407, 107921, 17638, 99103, 49278, 28403, 39786, 145865, 8462, 3558, 43406, + 142271, 29139, 21989, 36552, 93955, 72365, 7176, 13556, 106185, 37957, 321774, 17782, 129017, 51154, 27938, 24952, + 1935, 39366, 2791, 33489, 41582, 56078, 24558, 9311, 5449, 218786, 27808, 190429, 68013, 36020, 86003, 29735, + 3404, 87348, 119357, 115714, 2324, 86796, 81973, 40992, 43376, 93621, 28784, 16808, 36367, 2517, 2909, 191926, + 24978, 55303, 53308, 205724, 60068, 3098, 21375, 64784, 23949, 26579, 63121, 12319, 80145, 39967, 97861, 6757, + 70143, 67642, 37082, 34698, 69140, 122883, 46151, 62187, 80934, 429, 19437, 135071, 137885, 222647, 13331, 154065, + 327, 61778, 74257, 40116, 37493, 14855, 85079, 237641, 42342, 102164, 199965, 71204, 4662, 29368, 5042, 113914, + 122214, 8955, 13149, 102503, 43173, 5659, 163787, 69003, 307084, 63392, 171080, 21390, 81918, 86666, 36622, 24126, + 28887, 5736, 28054, 207170, 163428, 79891, 346467, 95363, 38980, 111806, 80828, 9200, 19288, 294896, 114468, 87405, + 111715, 141705, 7015, 72754, 68463, 48738, 243147, 33397, 101210, 37051, 98801, 82847, 20397, 4940, 185559, 18716, + 54718, 83491, 11725, 40803, 1128, 12128, 23060, 5174, 7745, 67007, 46701, 1571, 27807, 180186, 256996, 18975, + 16837, 7877, 212758, 250379, 15440, 87954, 57755, 24719, 124057, 83461, 258, 50864, 8874, 29038, 71289, 31627, + 15429, 9005, 4061, 113851, 107716, 82819, 13651, 79656, 117851, 17539, 111446, 12938, 39724, 190787, 4352, 15402, + 21070, 62708, 8539, 23777, 73853, 13552, 38810, 86117, 16285, 56400, 1718, 75342, 142863, 29033, 378, 110113, + 180321, 32586, 23606, 26393, 160984, 207987, 23783, 8406, 16904, 24596, 47274, 11693, 46539, 60524, 78595, 48423, + 31718, 20170, 9009, 146268, 15183, 191060, 172765, 1349, 138436, 37365, 10970, 40509, 225817, 20021, 70394, 152138, + 21541, 66559, 66544, 89352, 2725, 17258, 91345, 7313, 3815, 115868, 8660, 40362, 4071, 103524, 39388, 118275, + 21950, 6549, 38226, 32754, 209574, 29201, 43495, 18028, 20296, 40597, 18370, 47520, 202450, 24134, 2219, 8195, + 69545, 38041, 136934, 46374, 19041, 159811, 84865, 58620, 846, 98749, 13569, 30714, 97246, 32186, 4479, 27355, + 92973, 35214, 151491, 75963, 37631, 1561, 27200, 238083, 23182, 60756, 12291, 25766, 39355, 102333, 87362, 65741, + 59906, 19538, 201575, 48772, 102938, 24438, 292580, 39964, 66366, 9004, 61379, 50548, 37622, 38732, 28379, 68180, + 76622, 17488, 69849, 5963, 7219, 48143, 43413, 55358, 540, 58691, 29506, 19245, 52193, 48621, 5518, 13048, + 118625, 44755, 191081, 42061, 89197, 2259, 60665, 66994, 71210, 51232, 3585, 142096, 55024, 7892, 8345, 58653, + 463307, 65658, 64319, 137941, 136323, 53499, 12746, 43492, 6978, 95163, 29925, 60175, 5128, 7352, 41463, 184756, + 121146, 20473, 18426, 4598, 5309, 54580, 14277, 121151, 10691, 56711, 43880, 63409, 76682, 11830, 172218, 264898, + 32632, 66536, 81062, 31649, 25788, 92774, 60222, 11100, 63159, 9432, 224657, 25240, 53613, 152, 138620, 163829, + 2397, 85345, 12501, 37507, 64932, 38575, 43522, 65789, 80198, 78796, 35226, 3851, 108891, 73311, 3060, 28391, + 93671, 39663, 46142, 30982, 66041, 37281, 68157, 26553, 71872, 81142, 211527, 39747, 118119, 22695, 2859, 11066, + 20232, 168911, 7933, 197005, 17066, 111071, 44434, 133994, 120798, 12766, 227798, 45756, 132852, 29917, 36076, 55352, + 65281, 129800, 41958, 18944, 84678, 18580, 168093, 132621, 39997, 54092, 27740, 32354, 3770, 114118, 103242, 43918, + 15899, 18574, 145944, 3190, 123469, 219903, 24169, 100571, 62403, 16776, 92779, 14535, 17168, 16475, 14304, 37231, + 1712, 28218, 242754, 61688, 28980, 1318, 51359, 222657, 99200, 67989, 31772, 23932, 35351, 201251, 49041, 27306, + 19128, 40135, 3986, 77333, 19649, 120683, 151927, 21081, 7076, 78375, 77501, 101599, 8011, 89585, 96715, 58179, + 5378, 102138, 106793, 26051, 217276, 4197, 16297, 27014, 46721, 13322, 22806, 5278, 29629, 70632, 9647, 71519, + 58818, 40603, 128530, 8903, 36770, 56900, 31483, 26935, 43845, 34265, 34920, 87658, 6114, 84767, 64250, 47318, + 50720, 19264, 162514, 33357, 13117, 6705, 46696, 75032, 71054, 87004, 42035, 69138, 11903, 99854, 102328, 19611, + 34525, 69312, 6431, 49842, 101600, 133178, 108751, 41829, 89939, 225664, 48916, 99556, 9195, 130387, 5960, 36857, + 116724, 53518, 94002, 39077, 53996, 6945, 22261, 64291, 8314, 152785, 57588, 16522, 9091, 5048, 87671, 35441, + 39509, 1945, 12423, 158923, 178413, 37549, 14095, 1475, 73188, 62878, 4819, 24012, 68534, 42606, 4010, 120809, + 57497, 59564, 101758, 103718, 32701, 80116, 12345, 95834, 46918, 21468, 53213, 15665, 31200, 3867, 5140, 96013, + 250744, 21016, 10069, 13968, 35449, 180829, 27683, 39704, 59956, 22893, 3115, 26293, 32785, 75934, 62445, 141162, + 62720, 2018, 83638, 19949, 114012, 95006, 3330, 99829, 130935, 309272, 9565, 55874, 121727, 37017, 23586, 319858, + 40970, 27602, 8625, 112329, 61060, 100088, 118525, 25922, 16232, 1907, 60671, 51583, 44553, 80993, 5262, 94679, + 8676, 940, 20736, 11823, 3020, 16476, 12340, 152600, 97416, 3703, 25744, 66826, 16245, 16876, 46446, 84798, + 74227, 176020, 45192, 61955, 75496, 23946, 23626, 40372, 26036, 6149, 11822, 30582, 16541, 41914, 82385, 232823, + 40921, 80773, 14930, 3631, 7517, 39619, 4348, 36180, 126106, 138939, 62611, 1477, 113512, 47321, 25052, 14546, + 118881, 29060, 23589, 128322, 36795, 18401, 137921, 104699, 267929, 36194, 172791, 18113, 4766, 188215, 30083, 332586, + 94089, 5805, 77909, 22194, 68234, 154976, 43220, 40660, 70001, 184893, 138095, 11128, 103010, 22663, 5108, 212615, + 8485, 5565, 49222, 54614, 26530, 42639, 16319, 55062, 152662, 105595, 21114, 22216, 10294, 68158, 10436, 86950, + 7206, 62115, 3977, 3657, 59874, 456, 118617, 18156, 106663, 112229, 80992, 17442, 8217, 55551, 5133, 34344, + 251927, 51153, 39364, 201321, 7816, 66803, 23057, 156724, 145664, 14276, 95705, 979, 2796, 6875, 13429, 212525, + 50602, 26276, 28284, 3424, 19465, 52397, 46963, 31420, 51399, 206476, 92317, 48851, 637, 100820, 83349, 10317, + 60227, 21972, 6908, 282439, 32857, 224767, 95629, 83882, 42106, 87338, 69757, 29840, 68709, 37665, 45244, 114577, + 49188, 175943, 54009, 186746, 106158, 70168, 3358, 234002, 50555, 9221, 129338, 9562, 20118, 32923, 78479, 118280, + 65752, 4977, 10474, 102174, 60947, 129006, 10570, 83451, 8598, 8078, 159367, 123785, 80438, 16742, 5905, 5281, + 181513, 42402, 6977, 163136, 93179, 42191, 14968, 50421, 112401, 105440, 33456, 57347, 121611, 4221, 94954, 36517, + 24046, 27796, 6255, 33394, 72990, 135408, 116627, 1233, 57874, 25654, 95419, 68156, 401399, 313338, 55208, 45573, + 93124, 119251, 47200, 38196, 11909, 130667, 45391, 73904, 64964, 167846, 4137, 115606, 52036, 62214, 7969, 160925, + 7187, 1132, 134835, 40309, 73195, 64494, 80472, 444841, 61111, 26500, 45323, 40743, 53625, 52797, 22659, 15631, + 29739, 36706, 28841, 39147, 102836, 26794, 10536, 14845, 87305, 45874, 12241, 127587, 83833, 57183, 79722, 30844, + 41304, 84655, 20825, 92500, 3722, 25655, 27811, 10157, 81634, 31362, 34088, 92487, 70123, 22190, 185100, 72658, + 139035, 192523, 88241, 2078, 230490, 44528, 85638, 100198, 22088, 29982, 291233, 241062, 13865, 4445, 137791, 37835, + 107218, 31726, 19718, 38234, 72528, 23046, 19177, 66695, 5109, 17251, 28077, 5617, 21554, 47839, 72425, 133825, + 1486, 73065, 181275, 141508, 21768, 62971, 63082, 2512, 34200, 9904, 120309, 6392, 91243, 68416, 268253, 41199, + 116757, 138551, 185526, 41246, 28986, 4093, 19057, 17295, 4148, 245766, 122360, 35356, 112075, 20301, 75441, 10998, + 7977, 19769, 62922, 937, 63547, 100196, 26427, 157820, 20983, 236696, 22935, 8140, 90315, 156004, 47204, 140973, + 7726, 45097, 52725, 22636, 23436, 257282, 105247, 522, 88389, 216031, 202204, 46812, 211666, 19693, 68828, 81691, + 45925, 11256, 30292, 372, 5236, 167826, 88328, 232776, 151611, 5360, 82104, 18841, 80393, 25465, 18285, 20320, + 72377, 31730, 33160, 45803, 38715, 27705, 37379, 24163, 18360, 103586, 4015, 32305, 269494, 91252, 20080, 36567, + 54650, 7797, 57073, 12650, 31164, 42209, 6375, 261663, 105528, 81661, 106002, 2800, 5375, 17247, 43151, 4442, + 15727, 194619, 100855, 144898, 62320, 78465, 39929, 16454, 1967, 28311, 61363, 17219, 9395, 8745, 121445, 76939, + 80385, 162380, 22009, 54191, 44248, 16299, 122830, 48151, 74429, 78291, 64755, 14238, 44966, 2511, 17712, 67954, + 93583, 829, 105899, 49935, 84750, 11591, 33185, 85447, 42717, 27409, 208542, 28965, 62052, 52525, 5597, 25694, + 65594, 16343, 63224, 276188, 12475, 9331, 127507, 38522, 57287, 24128, 133161, 79723, 105548, 133695, 48917, 27558, + 43278, 46520, 13778, 141954, 110785, 83366, 17715, 46317, 105763, 66298, 147013, 41086, 94180, 16478, 220447, 44611, + 730, 19722, 78975, 117889, 125643, 26254, 16574, 18480, 65006, 15806, 38549, 246418, 46052, 36056, 8440, 34984, + 30170, 3163, 59800, 4458, 115442, 4283, 41970, 33507, 104078, 1653, 22, 121158, 276486, 3655, 6338, 24048, + 133421, 23641, 2161, 24422, 36006, 8086, 10675, 181474, 12307, 29514, 59143, 14729, 52509, 87128, 122470, 19446, + 80852, 33314, 24573, 119864, 14237, 9652, 57779, 6612, 51851, 15284, 98871, 90581, 124466, 156831, 21190, 22015, + 71380, 161906, 87247, 69201, 18392, 17908, 108470, 72962, 40719, 14338, 17911, 95260, 43339, 20610, 78916, 20710, + 72451, 11315, 31448, 17263, 58853, 178878, 48111, 116002, 45497, 80506, 82605, 85880, 36300, 121755, 25215, 36118, + 301929, 88728, 405223, 276136, 553, 34704, 212438, 49970, 78329, 922, 20711, 25036, 257130, 38295, 145369, 18128, + 15385, 30829, 55656, 48345, 8012, 3561, 28004, 122041, 192900, 58338, 112508, 41085, 29976, 87040, 47117, 23905, + 4336, 92061, 138880, 97407, 42083, 172121, 6256, 25192, 172671, 5, 93568, 1420, 12677, 31605, 56743, 40620, + 6015, 78415, 231077, 31298, 80026, 13902, 19048, 24924, 170586, 32955, 176119, 87859, 36731, 6773, 27711, 24658, + 26475, 115216, 133207, 93250, 95820, 88522, 8317, 5714, 124047, 55219, 86860, 19677, 23961, 22928, 162209, 8904, + 225992, 359835, 56084, 96201, 29392, 96558, 86071, 93643, 55114, 13347, 8183, 95129, 82012, 2017, 123336, 34219, + 115554, 157159, 47747, 101684, 41008, 18735, 193781, 104151, 226906, 7552, 179874, 124113, 31159, 21162, 44010, 14771, + 51268, 166128, 31382, 73124, 77438, 92830, 205709, 12113, 1292, 38937, 13114, 1334, 2118, 15597, 69581, 14449, + 21934, 76618, 48728, 67038, 14967, 51495, 24243, 87736, 147249, 26720, 11119, 46063, 43749, 5843, 44147, 152629, + 133428, 65703, 14269, 45604, 57982, 28672, 55616, 45957, 8438, 95433, 37698, 220862, 132034, 39456, 61870, 4161, + 26501, 73560, 56418, 9845, 4654, 20916, 10456, 88920, 119358, 9015, 65931, 96507, 48029, 38534, 21676, 109081, + 43078, 34943, 25089, 6131, 28766, 23665, 5477, 10255, 16695, 67, 45778, 42443, 42770, 29534, 23733, 100513, + 62617, 42630, 48746, 14191, 43753, 50295, 26007, 8792, 57243, 43119, 54725, 164253, 58250, 112304, 131796, 25165, + 4651, 3188, 24831, 47748, 3705, 19540, 13211, 102095, 5593, 18699, 23666, 32005, 117571, 33541, 60584, 74573, + 86311, 99443, 25172, 27222, 168938, 7143, 11853, 53560, 18834, 19960, 86522, 28217, 53266, 117700, 72989, 34323, + 18721, 66450, 34346, 74056, 47217, 202002, 46269, 9429, 68582, 75458, 37823, 82843, 96652, 32549, 145144, 27958, + 19820, 158086, 31955, 201406, 135379, 31207, 192545, 12950, 51704, 9094, 248263, 76147, 64028, 110009, 79407, 89345, + 99284, 223492, 47966, 26848, 15359, 201137, 2861, 110507, 71231, 72297, 31851, 118777, 71039, 151051, 240855, 16333, + 50766, 14727, 7939, 4149, 80908, 418780, 88378, 59276, 1327, 7284, 38576, 79814, 65820, 42199, 84860, 49574, + 62596, 12396, 70598, 40117, 8648, 7994, 16836, 7630, 14047, 359699, 106878, 525, 29037, 28064, 13380, 11675, + 50669, 74216, 103539, 180314, 27449, 56299, 172344, 19274, 7301, 246099, 32043, 19422, 36506, 129317, 6806, 30140, + 4614, 46639, 66926, 932, 86600, 6322, 27847, 233103, 10541, 39025, 34887, 3517, 12972, 26220, 2031, 66561, + 115015, 48658, 47596, 12714, 33845, 3893, 16165, 35237, 89983, 14769, 11962, 147224, 47018, 29977, 27979, 5552, + 82338, 86023, 131368, 1218, 24853, 237840, 132193, 15455, 40873, 3668, 65351, 53388, 15229, 59889, 272245, 47934, + 11858, 34347, 18038, 90853, 86981, 300602, 19343, 114181, 29362, 84921, 6095, 106059, 79472, 38015, 1206, 48741, + 6208, 80000, 21916, 17423, 6002, 108083, 24479, 34931, 56661, 9511, 26995, 100694, 163853, 35997, 81254, 58321, + 18919, 171890, 86877, 91341, 74503, 70477, 53412, 7027, 59281, 39892, 131302, 5864, 15947, 61301, 67466, 162369, + 47956, 27874, 35624, 282324, 21270, 111847, 102548, 41482, 30955, 116737, 28264, 8592, 55458, 22301, 75090, 29821, + 30697, 51709, 3041, 19208, 8038, 24634, 30467, 87509, 126428, 19389, 18814, 152686, 20701, 83474, 45832, 80891, + 105808, 11378, 153223, 120770, 98186, 150633, 49838, 9141, 12755, 30962, 5260, 74490, 21256, 31678, 65062, 33326, + 289838, 187831, 20595, 89768, 2805, 58535, 10844, 70085, 12090, 2451, 138068, 98544, 24461, 4511, 6754, 41684, + 28203, 3383, 65355, 82833, 30161, 83924, 234361, 128424, 28921, 222594, 33975, 125491, 34069, 11508, 67464, 144226, + 41850, 98703, 34371, 7901, 21254, 38398, 65651, 23549, 53883, 213340, 123269, 12028, 71764, 177701, 28758, 2623, + 68395, 11549, 15232, 68603, 9660, 63116, 36079, 57093, 31198, 20475, 48467, 89984, 35619, 186847, 107469, 31389, + 43631, 73867, 41949, 68841, 114250, 1605, 30564, 63403, 17588, 27680, 99533, 12641, 70325, 50428, 73426, 78379, + 11855, 91651, 72081, 91720, 60198, 15743, 12065, 83398, 140046, 6761, 46598, 45900, 5068, 886, 62448, 148968, + 37347, 19405, 9680, 15819, 43496, 63370, 75667, 163700, 37639, 3633, 22774, 34341, 183131, 134335, 37200, 23915, + 7054, 14194, 12970, 26438, 13350, 285521, 25594, 8219, 104410, 91039, 168804, 138480, 149734, 15907, 33818, 61132, + 60082, 4622, 110187, 56736, 13551, 73571, 3945, 73463, 65498, 17758, 263266, 17593, 2710, 27585, 54469, 38200, + 45367, 63754, 28881, 3473, 12791, 98287, 31895, 65787, 4463, 94536, 24951, 36332, 59901, 28803, 52130, 86403, + 7668, 181822, 74831, 18977, 9850, 177206, 145485, 109798, 7292, 31421, 26280, 77211, 58511, 12507, 127004, 11113, + 147, 8729, 56208, 43066, 79926, 129937, 31345, 83947, 39915, 46146, 98763, 42566, 1337, 13192, 18323, 105163, + 80570, 117753, 16555, 72883, 11077, 159438, 40764, 70933, 83329, 26066, 12276, 72059, 21655, 173836, 126713, 69454, + 153482, 91585, 70644, 102558, 110483, 6764, 127864, 190133, 3961, 101798, 20945, 71138, 82402, 90884, 69669, 44753, + 923, 16939, 59700, 164258, 25969, 27082, 31399, 43846, 6306, 246093, 51342, 6153, 151581, 202801, 182731, 56475, + 162188, 89426, 141356, 14355, 121815, 27536, 28023, 65257, 77523, 106668, 127314, 24947, 12790, 38796, 169698, 23555, + 10725, 44573, 183083, 42088, 62716, 43265, 105958, 32050, 44067, 50118, 1668, 3874, 6243, 318411, 16599, 1691, + 94999, 52378, 28671, 216728, 123258, 2059, 34969, 69225, 5913, 136280, 171443, 141515, 91662, 22175, 135282, 80020, + 92270, 1663, 4808, 4482, 3495, 34691, 5226, 109830, 108512, 17342, 107488, 11606, 123190, 100247, 29666, 146527, + 113014, 15794, 30894, 13224, 39585, 243192, 22351, 9903, 7836, 47699, 11078, 25468, 122291, 48821, 26780, 122679, + 75521, 81450, 630, 4895, 92900, 55074, 74293, 17441, 3563, 111657, 103102, 51613, 12318, 52370, 36191, 68245, + 34269, 40445, 41354, 122901, 168604, 182500, 62012, 42557, 11259, 24428, 115113, 86345, 12362, 3909, 78430, 86852, + 134602, 20459, 47853, 93879, 22577, 7659, 3688, 38555, 13349, 17381, 56715, 91639, 12493, 10895, 92438, 3142, + 37057, 28928, 2004, 36427, 32268, 34222, 209974, 10432, 67436, 41989, 173518, 107930, 27079, 62729, 30908, 55558, + 5828, 45031, 14902, 53546, 8204, 144263, 60255, 14520, 88212, 86582, 109589, 69356, 8064, 47449, 8505, 66558, + 16886, 4844, 52817, 111260, 215129, 12941, 91118, 650, 20770, 6273, 73089, 40618, 62790, 2873, 35002, 14023, + 97208, 19386, 102646, 36993, 143736, 135457, 35385, 113601, 17893, 32627, 84439, 100619, 56016, 6581, 57264, 172160, + 45452, 111710, 203627, 70131, 24100, 322787, 1996, 35665, 70078, 22358, 90922, 83658, 4097, 63200, 58499, 14542, + 99153, 52159, 6615, 12414, 63415, 31986, 16823, 1579, 65405, 137809, 8841, 16898, 48082, 259, 33014, 42375, + 12260, 179850, 73667, 91389, 98882, 29532, 17311, 326251, 41092, 5928, 20742, 44964, 48019, 43505, 9317, 49265, + 6643, 192712, 48424, 163487, 19861, 20113, 70848, 31928, 105333, 23685, 78563, 14638, 54755, 7158, 24142, 44018, + 20774, 125255, 20331, 24280, 10163, 1285, 2336, 39851, 4299, 117269, 46714, 63816, 87779, 159624, 11731, 9971, + 990, 137317, 108831, 50994, 74554, 162680, 23640, 131597, 146962, 170620, 34829, 91205, 21184, 1913, 63616, 18427, + 93136, 156592, 17519, 67565, 115882, 138220, 78622, 88535, 18115, 2711, 33554, 109492, 54298, 971, 24914, 25863, + 36363, 45715, 27099, 194995, 14299, 178181, 111488, 72395, 322385, 157719, 130787, 11897, 81843, 83999, 11369, 49280, + 118604, 40922, 61332, 110343, 53407, 75639, 40582, 300440, 54722, 25637, 13694, 48248, 48278, 194521, 56203, 52779, + 48783, 72627, 10953, 376, 16733, 280238, 26351, 230789, 15132, 25168, 137270, 3588, 63704, 73376, 94031, 74284, + 19443, 159557, 9697, 39901, 13351, 119050, 15406, 146455, 3460, 29556, 75195, 37673, 102524, 92329, 47289, 98413, + 15311, 100684, 56345, 7116, 95480, 11590, 7200, 167, 23610, 58426, 17730, 136656, 27944, 53151, 2701, 8824, + 103124, 3017, 90744, 113588, 53216, 79736, 65940, 26931, 498, 29568, 80540, 143543, 21292, 1740, 59268, 16561, + 180816, 42323, 50174, 40890, 52866, 10703, 57169, 4700, 17191, 4424, 93511, 49698, 166650, 26972, 48631, 165169, + 82879, 69326, 202970, 4007, 2376, 231325, 139592, 22119, 62851, 37504, 68816, 58345, 67398, 186643, 43331, 277416, + 53749, 15746, 23102, 17432, 4793, 151138, 48822, 54265, 48203, 198688, 14305, 54287, 2291, 18018, 113378, 123260, + 7180, 97549, 87027, 120085, 2920, 76080, 8190, 102005, 5641, 64580, 14955, 59802, 54028, 58884, 19367, 81779, + 412567, 85957, 97053, 103637, 78871, 29364, 27637, 141728, 4767, 30686, 112738, 130146, 42745, 12730, 105040, 14844, + 232, 210944, 36581, 152317, 135543, 29744, 3129, 55647, 58149, 46319, 27265, 17499, 28005, 59948, 7170, 34138, + 5702, 293047, 110892, 408, 91760, 218674, 18469, 46095, 81403, 14389, 4610, 35672, 73060, 11006, 74848, 104820, + 118143, 190357, 20043, 105358, 141735, 5115, 27093, 45924, 123073, 52599, 29433, 9616, 238350, 78610, 24851, 58858, + 26769, 31969, 24613, 18294, 4982, 32735, 39639, 143563, 112073, 202205, 12567, 4873, 88601, 44897, 81503, 101648, + 81362, 34662, 85277, 17574, 48173, 21435, 221188, 40215, 39576, 80786, 26544, 64668, 81841, 10731, 37733, 247986, + 149188, 127703, 495, 18382, 54388, 72446, 43071, 30974, 198723, 89608, 41360, 190, 33045, 8386, 31658, 19992, + 237838, 119015, 137622, 50890, 100913, 6460, 116233, 267230, 26621, 104129, 65114, 14190, 41542, 14888, 85962, 23342, + 23041, 26453, 43725, 71809, 45186, 4770, 46452, 53894, 56616, 221286, 18973, 9038, 109299, 55365, 19366, 26863, + 18808, 60909, 69353, 41738, 83463, 12100, 68561, 72860, 3980, 13796, 49340, 12332, 31311, 27418, 4255, 53430, + 18976, 45523, 510, 14224, 30477, 26581, 4530, 3651, 101663, 139840, 22709, 150861, 31996, 63923, 120623, 262522, + 3076, 10528, 2929, 14672, 130238, 18087, 9816, 121894, 100308, 25085, 55111, 14565, 18952, 53293, 2042, 369988, + 23674, 61789, 133529, 28783, 108293, 35477, 47119, 36448, 71049, 40015, 33055, 78598, 198442, 1833, 159937, 40654, + 77444, 189245, 113153, 8621, 18599, 38553, 35223, 166072, 2375, 11659, 21786, 89523, 6032, 12116, 63046, 159398, + 18454, 3678, 32521, 47626, 11411, 103527, 38896, 42946, 15696, 26370, 10185, 8413, 37080, 165583, 4331, 63555, + 14907, 72220, 50056, 6623, 62236, 36565, 49783, 10049, 17503, 100581, 55951, 146244, 24724, 9626, 17969, 25524, + 109300, 173965, 99994, 101056, 46459, 43647, 53737, 277968, 8347, 123521, 74858, 33829, 44762, 77574, 877, 81377, + 222525, 123532, 30602, 43881, 53145, 2973, 16284, 81940, 61281, 127044, 63620, 9875, 14756, 114829, 19032, 9202, + 52759, 119141, 23928, 120551, 19607, 3599, 33401, 76821, 73233, 117430, 39968, 36539, 7071, 5446, 121735, 194059, + 15206, 45283, 6706, 15603, 65615, 1207, 165723, 92275, 34773, 104447, 8396, 32353, 205240, 164323, 13600, 60555, + 79205, 25532, 22907, 33410, 57480, 107111, 69630, 32137, 47832, 70913, 33161, 20321, 2371, 117348, 10714, 86246, + 1625, 11763, 17900, 268, 78457, 99175, 97940, 101092, 86660, 32221, 14041, 128504, 125080, 53744, 124263, 31017, + 13897, 403, 31859, 21964, 5633, 111630, 5547, 77329, 17961, 18241, 84995, 25984, 12983, 67491, 62168, 47262, + 5241, 297, 51191, 7351, 8967, 147212, 82060, 16821, 782, 11033, 82431, 62957, 5026, 43459, 77963, 203477, + 53528, 6247, 191852, 87774, 74164, 215654, 13467, 1522, 219964, 28589, 244104, 16242, 117821, 67725, 72570, 156792, + 17186, 15979, 26990, 44128, 193014, 35276, 57125, 16212, 166451, 68017, 6905, 77608, 16364, 53777, 75921, 76426, + 37975, 26203, 269296, 64099, 84122, 12077, 38533, 830, 4407, 20139, 963, 43028, 38902, 42911, 37503, 83343, + 85045, 16979, 1165, 60835, 137387, 58380, 86990, 110066, 134540, 56331, 193845, 81238, 17922, 163093, 38744, 110641, + 12502, 56404, 34862, 26865, 125964, 12965, 111648, 25547, 7771, 27196, 136980, 9555, 29551, 107158, 57885, 18831, + 37705, 35505, 101742, 13970, 102109, 62548, 124657, 23328, 11124, 89592, 146376, 248050, 6241, 22033, 18337, 80685, + 29898, 11908, 216623, 67721, 106162, 146610, 21377, 15085, 91552, 42041, 62560, 122532, 125336, 102365, 121537, 142559, + 29693, 223919, 11515, 110495, 18776, 22494, 5895, 185059, 103592, 229351, 51220, 100102, 37027, 257855, 29359, 54123, + 36066, 106493, 12244, 79258, 32002, 432, 56205, 94836, 90182, 6726, 14762, 29391, 48938, 26864, 38083, 60364, + 3310, 60192, 14766, 205567, 57504, 110760, 22649, 24666, 46333, 21517, 3430, 13135, 28873, 27052, 158809, 11597, + 20529, 6695, 23138, 22960, 37137, 45574, 6545, 305877, 43423, 26153, 24769, 59844, 14501, 10430, 134352, 56169, + 13213, 103432, 49523, 35181, 13435, 12408, 129475, 64620, 230854, 77390, 51990, 15653, 83248, 33466, 44571, 117828, + 51481, 2187, 10559, 68019, 18021, 54895, 48247, 18354, 33737, 4554, 108595, 37288, 39767, 116707, 9175, 3726, + 108877, 21616, 83684, 49862, 1938, 8543, 276466, 20134, 108498, 48770, 102254, 31914, 131520, 185291, 100559, 51890, + 209, 19526, 76471, 50544, 71814, 99351, 8172, 198526, 28816, 20419, 9109, 98389, 136777, 76479, 75596, 30635, + 165417, 48216, 120220, 25955, 211071, 39314, 24308, 32164, 2559, 146280, 43403, 9233, 17947, 90585, 1786, 86920, + 125662, 2457, 64741, 32152, 32918, 122882, 78538, 44001, 31723, 56426, 23375, 103172, 88177, 145697, 52506, 49319, + 68016, 31664, 41488, 18486, 110400, 7030, 28241, 986, 109199, 19900, 42147, 56864, 65287, 49183, 7858, 24000, + 30453, 840, 16673, 25907, 68916, 89927, 6309, 158335, 36407, 199737, 130464, 13137, 59603, 201778, 195292, 21015, + 42466, 179062, 172561, 89492, 11075, 180407, 31868, 72493, 20998, 60217, 9865, 19530, 39274, 130266, 54539, 21623, + 12535, 13505, 40641, 73375, 4087, 85633, 2153, 3117, 70680, 55788, 92096, 47509, 98493, 37490, 271936, 151475, + 3032, 16171, 96642, 34106, 78425, 125761, 19591, 3366, 19316, 54508, 24183, 50786, 194248, 91528, 33253, 34622, + 108355, 41741, 705, 3814, 3883, 108929, 13203, 67831, 10142, 59754, 68208, 29128, 84820, 56880, 38794, 24972, + 48571, 40821, 40476, 18137, 164254, 24064, 236309, 79181, 11282, 395, 39169, 2013, 51587, 28551, 9645, 701, + 109513, 115899, 113566, 12762, 62045, 58322, 103726, 41343, 40866, 244102, 143816, 2490, 70346, 40973, 52618, 15412, + 30720, 104315, 38917, 42027, 93676, 17513, 107418, 20706, 123890, 13399, 97727, 24044, 87962, 65606, 44250, 98044, + 65276, 74790, 101473, 19350, 91570, 1326, 87790, 172042, 7577, 100813, 86896, 85891, 41512, 108130, 27794, 14875, + 71431, 12835, 156250, 58135, 3759, 22476, 42176, 115873, 34686, 56523, 73643, 108505, 51491, 20838, 12721, 32863, + 45700, 29496, 13700, 34294, 55360, 29206, 155942, 123812, 7706, 163234, 203, 132720, 49358, 144431, 8130, 175788, + 35818, 3270, 76832, 25710, 54095, 97274, 28779, 94621, 74396, 19092, 128242, 58067, 20885, 14670, 93255, 15107, + 63291, 23654, 126900, 129421, 59294, 262659, 9798, 3251, 67344, 28600, 44629, 50672, 29072, 26999, 31526, 23183, + 49175, 165843, 175455, 17282, 175411, 32022, 45989, 30298, 90690, 78118, 83156, 23749, 35636, 31317, 7069, 80381, + 94561, 133756, 14960, 97404, 6138, 41065, 78041, 32843, 16601, 34123, 9559, 146529, 123377, 96395, 54441, 42012, + 84257, 123541, 10745, 22139, 106459, 11720, 150883, 172651, 154996, 110538, 4728, 53447, 25704, 2009, 71152, 119354, + 21166, 66604, 1429, 216162, 8637, 122250, 63520, 27180, 29172, 36124, 276428, 107787, 77184, 4680, 14952, 104903, + 24418, 14793, 51561, 52931, 8371, 26342, 48526, 7118, 92066, 67280, 40653, 8847, 34597, 105438, 14198, 50163, + 61188, 146286, 50315, 41205, 170829, 161496, 585, 197359, 95056, 1687, 365794, 91349, 48507, 5804, 49263, 5146, + 104902, 96365, 117343, 132222, 46084, 96919, 16875, 8073, 262381, 79982, 52663, 13928, 16056, 153908, 15145, 109256, + 132308, 18763, 24904, 167644, 13618, 40750, 18686, 147124, 114709, 150038, 52849, 2938, 12568, 48617, 8778, 5459, + 44202, 44591, 74914, 17183, 248689, 13878, 7822, 80060, 23116, 194037, 18487, 2067, 7798, 43077, 33678, 244028, + 31320, 74273, 2794, 19466, 8218, 36280, 183997, 48124, 19416, 29656, 19280, 98734, 7715, 18311, 30701, 133602, + 150307, 126956, 7378, 2933, 79903, 13178, 12593, 86571, 26604, 92446, 13574, 44205, 65699, 427599, 21118, 8245, + 14407, 27877, 47936, 33542, 7916, 26460, 117762, 21596, 37818, 2249, 127359, 209394, 60044, 47677, 308089, 36791, + 154971, 31417, 6998, 150042, 174360, 12255, 43009, 29335, 48739, 3912, 101398, 53340, 2580, 146939, 151295, 45360, + 125275, 15273, 45383, 27456, 48761, 23314, 8750, 60801, 85823, 104759, 27894, 123685, 66968, 39480, 26917, 55290, + 83305, 2696, 98390, 57569, 145853, 340733, 4919, 20024, 52268, 30884, 7413, 203685, 70989, 112855, 4129, 50536, + 349518, 68205, 332641, 159581, 135361, 236026, 37563, 176404, 64899, 6578, 122033, 63871, 1850, 85234, 82089, 66124, + 74145, 121098, 107351, 12687, 36881, 117334, 13136, 14698, 85933, 93866, 18047, 32620, 310, 15094, 46000, 88451, + 23632, 36645, 27940, 87618, 80520, 58892, 20976, 27702, 140090, 96075, 67841, 103292, 238964, 87778, 107338, 17019, + 83427, 67522, 7302, 8261, 47570, 116787, 8730, 80484, 61772, 174422, 56005, 131193, 52875, 14588, 28471, 59817, + 9586, 15720, 158155, 51307, 109734, 15196, 11025, 59331, 3884, 52626, 102602, 84797, 25158, 27314, 4437, 20488, + 76214, 189248, 35023, 114952, 157376, 2827, 62439, 102878, 129749, 36405, 10329, 109339, 108633, 36662, 1254, 13267, + 5470, 87105, 58004, 15397, 10434, 159667, 21864, 52022, 179464, 3013, 32147, 31496, 116832, 18494, 105502, 129227, + 107267, 50033, 13481, 9954, 24267, 22141, 16257, 116154, 36185, 950, 115685, 11305, 176708, 2048, 178671, 112573, + 287867, 162328, 497663, 95170, 50979, 193861, 50987, 30368, 136257, 31830, 46549, 15119, 169876, 23788, 17462, 249887, + 57377, 1949, 35448, 14791, 43769, 210091, 3783, 34612, 282103, 88380, 245190, 5457, 20491, 98908, 11402, 86899, + 117916, 16028, 162584, 60644, 320177, 156096, 31065, 55876, 22000, 77655, 9992, 23397, 13757, 317623, 63978, 215255, + 2443, 17648, 93231, 27388, 104529, 93807, 55505, 140477, 12046, 112040, 70887, 40152, 94365, 112353, 25063, 114679, + 266061, 71248, 119555, 15589, 2244, 617, 14129, 211431, 70110, 100652, 7777, 4383, 85911, 89221, 21010, 120615, + 58357, 86405, 37554, 41647, 18, 15143, 69662, 60491, 14714, 186134, 148344, 42347, 5410, 168175, 44535, 42449, + 343894, 129417, 99682, 20659, 27272, 140483, 63455, 222159, 17536, 13722, 42637, 62324, 11976, 114691, 148109, 2283, + 32057, 182393, 4295, 147364, 33705, 2075, 44303, 30274, 28331, 63740, 69740, 29148, 10346, 44862, 33716, 73937, + 153333, 12930, 38784, 247159, 2515, 41053, 20256, 83368, 256189, 54639, 115240, 5096, 24661, 175419, 153552, 26516, + 141, 138176, 63885, 34115, 47222, 55709, 2765, 28479, 38875, 236608, 12229, 22921, 77291, 54426, 45388, 2860, + 57787, 114579, 295139, 105782, 17826, 71066, 19119, 54364, 69385, 16568, 12323, 28057, 33346, 34919, 124763, 155533, + 101386, 31644, 8627, 49001, 303600, 29868, 63213, 9103, 77280, 71333, 9696, 138789, 37059, 24823, 5057, 21352, + 32368, 114208, 56803, 19424, 10445, 58514, 8661, 209508, 26187, 171838, 10460, 63454, 14016, 122504, 41328, 21329, + 46618, 32493, 38225, 7855, 31763, 7945, 29876, 8734, 6438, 24205, 97490, 139977, 130740, 47323, 33195, 85390, + 57194, 13813, 60600, 21313, 96251, 7699, 27584, 170521, 139271, 1363, 4402, 336738, 129223, 84983, 69150, 13147, + 3590, 163929, 207225, 155260, 55916, 20288, 4503, 8398, 98490, 11773, 27512, 37113, 84976, 86558, 28365, 11756, + 116005, 182148, 13733, 115313, 47644, 67208, 85069, 9347, 14995, 226141, 14704, 101835, 41159, 35314, 13113, 63526, + 214039, 29978, 50446, 83339, 17440, 129441, 72522, 118641, 97816, 24907, 73844, 15717, 118884, 167255, 96509, 162793, + 30847, 36849, 51297, 78974, 77793, 10427, 1873, 2972, 9999, 35074, 28190, 64297, 146836, 46298, 60038, 163007, + 108919, 61219, 2403, 75022, 127339, 4233, 110389, 69022, 9833, 128097, 88016, 79390, 222936, 22570, 94657, 28462, + 56956, 38803, 81536, 30474, 152794, 19566, 16481, 147408, 74574, 81895, 20731, 1918, 1366, 76367, 187321, 54494, + 24366, 21690, 61696, 33283, 107477, 77499, 31112, 414383, 74362, 18463, 218441, 120929, 59848, 258629, 201924, 69269, + 454, 19989, 13054, 59894, 3623, 58908, 20681, 35723, 78523, 102680, 38988, 184112, 108087, 50944, 132704, 52966, + 21699, 18860, 96349, 201411, 82697, 85395, 95658, 5093, 6427, 177894, 44191, 32755, 26961, 155739, 6249, 31310, + 81030, 26574, 84311, 120155, 86730, 113535, 7424, 48888, 13516, 45747, 98098, 20077, 183995, 81945, 43210, 26704, + 40420, 75831, 45648, 11180, 6855, 57927, 65528, 124096, 34851, 2598, 156633, 107572, 127352, 38169, 123845, 60142, + 62722, 105584, 232364, 23211, 68120, 1601, 22169, 89299, 747, 258039, 80572, 7258, 152249, 11862, 101204, 8834, + 121434, 33761, 19175, 133142, 46343, 40178, 48723, 3589, 41977, 30210, 38868, 62257, 10087, 82658, 87827, 90646, + 16415, 47552, 351723, 28298, 72225, 91146, 272760, 1701, 11295, 1652, 109651, 300747, 51863, 198800, 29446, 11794, + 32345, 37538, 22356, 33102, 37590, 113544, 37970, 11478, 179743, 25454, 103417, 59905, 221970, 105196, 145604, 7817, + 164809, 102360, 16974, 75840, 255333, 56902, 6659, 1954, 645, 59400, 67769, 7689, 18675, 5215, 13793, 20536, + 27852, 3387, 29523, 259718, 16860, 94625, 43143, 29245, 15848, 233581, 22685, 63631, 78557, 22836, 133302, 84513, + 1348, 51826, 47129, 98836, 58284, 1830, 1749, 94642, 10933, 6145, 12506, 10975, 13879, 103781, 144434, 10268, + 28409, 32346, 52968, 121567, 107374, 77268, 23686, 35097, 10501, 155275, 15303, 47136, 21102, 168741, 55332, 90385, + 15996, 84817, 681, 137803, 25054, 142275, 6163, 38175, 8056, 124296, 240642, 65621, 4934, 178205, 16101, 62803, + 60964, 18230, 100622, 76465, 44689, 14545, 9543, 47514, 16852, 93380, 28048, 12047, 107106, 37575, 101485, 77047, + 57326, 34819, 96137, 76916, 6469, 46264, 115983, 75768, 87668, 69942, 13027, 165, 8373, 114231, 26434, 52844, + 42799, 182044, 23580, 146254, 38081, 43236, 33883, 146220, 382894, 14606, 46035, 36481, 166621, 35417, 95382, 2957, + 59384, 60428, 36358, 66343, 75378, 22267, 22950, 83528, 17577, 56474, 25285, 4619, 179691, 75355, 95836, 53295, + 34588, 171410, 4487, 14679, 84208, 44015, 18562, 109133, 54101, 11531, 86052, 174479, 303157, 28095, 9953, 35642, + 14564, 39802, 16145, 77606, 117406, 53038, 121117, 53624, 22062, 1212, 7632, 127157, 237292, 189087, 10478, 127345, + 102515, 181997, 86752, 87623, 10966, 121602, 68783, 68681, 83042, 114380, 138349, 191305, 67176, 50085, 39016, 1427, + 42384, 1412, 67118, 122616, 72389, 25260, 2237, 13576, 137346, 19938, 20304, 2191, 68759, 5373, 61364, 238507, + 75814, 23931, 69565, 38993, 131741, 38364, 12528, 87762, 5679, 129853, 5310, 186831, 32653, 90338, 260176, 389531, + 108118, 26843, 43985, 50175, 30563, 25106, 56965, 18130, 140428, 4542, 165503, 117991, 24219, 229605, 1819, 129663, + 1240, 3797, 76093, 18398, 71339, 51919, 93043, 27175, 47060, 216257, 6483, 35051, 1217, 16512, 80798, 129064, + 13225, 69339, 8548, 237079, 72298, 2575, 34280, 51379, 117910, 55671, 53345, 247552, 29486, 39328, 140821, 34681, + 57045, 60177, 5004, 90269, 78522, 2479, 322607, 48474, 61296, 13057, 31558, 4678, 59271, 6699, 27044, 31988, + 35944, 12503, 83480, 4389, 136508, 3781, 114121, 70279, 4488, 155829, 42214, 2898, 68191, 75695, 305850, 45041, + 74344, 106509, 30087, 17429, 93292, 12477, 290, 23080, 114802, 35714, 18751, 26554, 105424, 17775, 2144, 2412, + 100610, 65192, 113975, 52975, 180272, 135050, 129815, 76238, 106483, 21440, 63186, 4260, 46189, 9711, 28249, 4169, + 23429, 23390, 8324, 141585, 63809, 67668, 38457, 38063, 39226, 59972, 1189, 203916, 62368, 14403, 16949, 61767, + 85801, 1739, 40147, 35049, 76757, 33124, 62102, 15780, 103593, 103009, 53484, 22952, 67973, 114645, 6566, 5245, + 50462, 7601, 8288, 3513, 194571, 80276, 1908, 54592, 5124, 58571, 2513, 6800, 273997, 193904, 1119, 17991, + 117245, 2508, 129156, 82366, 26278, 71465, 63341, 56943, 39662, 106116, 94966, 156875, 9736, 2204, 122308, 94418, + 27134, 1280, 24539, 49022, 45314, 3764, 50904, 46424, 30699, 28087, 293839, 9400, 33646, 40165, 822, 147499, + 50263, 116179, 29085, 11863, 31314, 5578, 17797, 5104, 12454, 1604, 15342, 219206, 10232, 67800, 94261, 25872, + 13565, 90339, 78971, 75377, 26649, 41184, 47695, 11514, 35369, 20767, 14227, 41953, 309396, 148270, 147938, 33074, + 14453, 27499, 109019, 39018, 25738, 240196, 158931, 52820, 8612, 95853, 21524, 137010, 84901, 70869, 70021, 116794, + 48404, 38771, 6732, 1070, 70990, 187297, 49140, 5238, 576, 3564, 253975, 16027, 16483, 2811, 37775, 19034, + 25259, 4053, 2000, 70083, 95774, 19713, 33431, 92703, 91314, 42381, 288770, 48194, 95985, 3991, 77418, 13406, + 241328, 245086, 56533, 35275, 62725, 9246, 51924, 70181, 95331, 16163, 31410, 79016, 39312, 120878, 119371, 275987, + 80124, 27712, 9186, 220, 23598, 146167, 85209, 68238, 282190, 57048, 31273, 30555, 80913, 17594, 75779, 59160, + 135002, 101219, 189377, 29225, 96735, 60126, 62522, 104000, 27620, 86814, 17240, 147533, 11001, 5425, 43682, 410, + 49460, 87270, 69480, 46315, 59448, 1816, 76201, 9431, 11788, 87960, 29063, 65539, 47347, 11678, 33846, 7008, + 196704, 9895, 6753, 8633, 120892, 59970, 572824, 115934, 6646, 202559, 892, 48351, 37611, 251282, 57823, 67263, + 57750, 26527, 34485, 90747, 7685, 88370, 6144, 64182, 1709, 41969, 21458, 62327, 181657, 49247, 225330, 122600, + 114574, 107124, 85361, 111833, 63243, 71420, 15655, 191178, 72430, 18063, 51425, 54002, 12364, 53225, 86557, 18193, + 97580, 41232, 138398, 67821, 128724, 8944, 233212, 101353, 52099, 42127, 14006, 120107, 32789, 32132, 3498, 18123, + 33758, 56058, 5779, 128760, 59888, 98869, 18445, 84702, 51911, 13234, 218379, 20093, 39031, 8074, 70195, 20708, + 23462, 24355, 131384, 60189, 26390, 10403, 41060, 7140, 10781, 49410, 42261, 87202, 82566, 41663, 43105, 60276, + 2768, 5733, 74176, 28329, 2297, 145430, 131632, 83615, 122915, 105441, 655, 224102, 5284, 136426, 67763, 16294, + 188511, 32538, 61049, 27893, 3394, 13951, 159099, 28542, 17930, 145360, 9492, 190122, 32285, 78855, 26440, 13570, + 58648, 73908, 4239, 124561, 2444, 74172, 53131, 11468, 10794, 73566, 11623, 35343, 64710, 30481, 4163, 10328, + 38309, 29901, 10538, 154377, 76132, 92405, 24839, 11679, 3465, 13449, 11637, 7824, 2337, 57754, 1260, 14458, + 41118, 19878, 38661, 13416, 159180, 37074, 163164, 54137, 28627, 52134, 184900, 8520, 40385, 29546, 30502, 22386, + 66527, 107458, 6850, 24022, 47983, 30603, 35083, 8934, 304066, 39500, 9, 28261, 33026, 77251, 9374, 44833, + 116312, 34990, 29236, 63563, 125639, 135405, 165398, 159055, 55690, 88141, 69643, 236964, 31983, 25572, 20436, 36746, + 60896, 31850, 16179, 11828, 5888, 3043, 66368, 9750, 31167, 7915, 53111, 36430, 1333, 64344, 93659, 20061, + 60596, 180191, 51630, 6792, 30244, 43509, 101058, 22409, 420, 44210, 109783, 43223, 27030, 72477, 72831, 32679, + 29235, 7675, 47556, 12258, 39907, 149412, 84926, 118247, 24692, 71717, 105038, 86009, 45941, 41189, 89453, 29856, + 52543, 30627, 226798, 67303, 59230, 67415, 34408, 1367, 99685, 16867, 128419, 52147, 4111, 125381, 117881, 16173, + 44093, 102224, 31575, 23234, 24870, 83790, 127407, 239098, 3200, 994, 1255, 100903, 242275, 117266, 55116, 38205, + 16140, 29662, 11307, 40414, 208793, 123355, 56470, 4862, 75600, 30119, 58218, 70828, 24075, 26974, 7802, 192353, + 4851, 5475, 78720, 66596, 3409, 28573, 64396, 30381, 30690, 59859, 88256, 5406, 99945, 103064, 34463, 37727, + 24238, 86643, 60088, 4057, 23741, 5967, 162904, 38240, 28356, 93858, 25510, 122879, 6897, 3278, 7057, 11971, + 4400, 35461, 211413, 21395, 59615, 39471, 87233, 55795, 128426, 3051, 22470, 41950, 14705, 3974, 180108, 80476, + 78442, 204996, 91987, 15634, 67610, 139015, 142373, 35611, 51134, 10387, 4353, 153456, 57749, 181039, 14183, 68447, + 151532, 21107, 36452, 20551, 3186, 46247, 46383, 129666, 88736, 140662, 146243, 2066, 8360, 7978, 64818, 106963, + 17896, 47801, 10723, 114821, 223295, 74192, 3293, 3393, 16987, 74064, 11277, 91622, 4270, 29828, 27951, 387869, + 103235, 1374, 61988, 120083, 477, 145892, 128378, 11779, 211263, 61354, 18221, 17869, 46530, 83061, 108538, 157981, + 90608, 67199, 95080, 49064, 195814, 12302, 66307, 10348, 231346, 160732, 112859, 63633, 146558, 21271, 31037, 198802, + 47622, 12862, 95710, 3910, 77850, 73961, 85585, 34752, 61000, 4082, 24595, 103679, 71107, 8208, 79568, 150019, + 16615, 24961, 139857, 32664, 197366, 4559, 54735, 32696, 4126, 162019, 75698, 13916, 70108, 159638, 19834, 9349, + 24675, 175560, 49643, 18206, 52459, 27992, 10809, 88865, 401975, 133172, 29000, 34558, 30915, 3658, 25834, 42430, + 36562, 125265, 18182, 10155, 40149, 97082, 208980, 19575, 60853, 90529, 66545, 9600, 789, 46420, 2317, 88593, + 55595, 98980, 115302, 5742, 169155, 1073, 177901, 3472, 11189, 63711, 78643, 65472, 50459, 127979, 93, 42202, + 67053, 21720, 157650, 11145, 141378, 42033, 22824, 85705, 79114, 35584, 15974, 1510, 54172, 28562, 12451, 104226, + 19190, 97151, 73024, 20948, 5151, 81741, 21499, 29006, 84183, 198074, 54003, 45120, 170125, 26240, 35177, 28389, + 64863, 79974, 60778, 176915, 232183, 45342, 2038, 80253, 41564, 40703, 32689, 5430, 100689, 5366, 23007, 134279, + 14266, 26712, 73993, 24934, 64242, 52113, 102887, 61801, 46415, 201049, 54251, 62133, 122757, 164883, 30815, 139966, + 2319, 30842, 766, 13362, 10287, 134518, 86111, 81665, 82440, 28333, 43019, 18963, 8804, 161944, 23439, 102144, + 101145, 80029, 39052, 248708, 30350, 117340, 11878, 128467, 974, 138625, 63961, 5237, 74778, 61834, 67040, 43814, + 13690, 65947, 33809, 232476, 115258, 181745, 28824, 94013, 9510, 10246, 93722, 81976, 7217, 114383, 3493, 16014, + 69045, 72692, 12145, 80981, 9507, 6692, 1620, 60820, 330444, 35474, 33962, 4797, 7053, 295463, 46445, 27026, + 12491, 77988, 49524, 35675, 90947, 29114, 166705, 101385, 133782, 32704, 6186, 84595, 176031, 185623, 45966, 151302, + 63069, 1699, 107491, 947, 15458, 74452, 196212, 6046, 10498, 12163, 10239, 35191, 243951, 9277, 9090, 29539, + 54460, 22820, 26514, 112549, 60372, 51753, 48756, 21812, 70861, 260326, 41, 44222, 10441, 16961, 48148, 138771, + 216194, 5914, 52153, 53400, 212036, 56519, 26245, 10117, 45888, 15294, 138019, 90913, 26368, 43842, 42111, 23348, + 6082, 194845, 161089, 156206, 51546, 11647, 30759, 302912, 262094, 8635, 78876, 26535, 35283, 54183, 31183, 85484, + 147873, 12989, 5197, 6356, 72894, 65347, 20150, 27370, 73787, 1493, 45918, 12366, 190217, 20724, 13858, 10981, + 67449, 81213, 7553, 14115, 72242, 271517, 11842, 48310, 88743, 143726, 22177, 3290, 243231, 58452, 62937, 12592, + 1654, 40066, 33477, 13751, 9921, 128442, 15868, 7106, 75236, 83773, 10775, 36938, 10482, 170465, 17368, 17469, + 161508, 32752, 98340, 800, 19824, 264456, 3901, 87319, 2867, 26782, 9630, 113102, 185815, 24197, 44584, 86366, + 40224, 3636, 140916, 31731, 267731, 9567, 53678, 72984, 29389, 27963, 17106, 50282, 284911, 60170, 8322, 12608, + 23374, 89652, 5268, 39044, 229766, 8869, 151350, 31436, 177342, 12269, 183212, 120418, 116270, 2843, 78888, 69192, + 7865, 184099, 1086, 129897, 18383, 70508, 20242, 18508, 229924, 124569, 35749, 50589, 55626, 9884, 83115, 40971, + 30671, 18135, 14452, 38861, 17844, 201826, 5549, 26413, 17189, 13561, 38539, 10679, 143331, 3314, 36785, 171194, + 49685, 187713, 67506, 4618, 104039, 17060, 195080, 50648, 33159, 19238, 67559, 134840, 28599, 157523, 17130, 38064, + 117398, 94355, 31918, 13575, 34538, 40326, 13997, 3494, 348283, 62481, 26862, 3603, 104426, 244363, 153709, 112487, + 304612, 199674, 41239, 35545, 54869, 293005, 28223, 26277, 26899, 4533, 18518, 15492, 38587, 80488, 70485, 160395, + 263, 60162, 11382, 222152, 4696, 250751, 51921, 182609, 10707, 48463, 46243, 1227, 49111, 111564, 46502, 33342, + 56846, 68541, 63559, 858, 139927, 16654, 229375, 76759, 26478, 33205, 95828, 23399, 92945, 2637, 35630, 28470, + 143992, 50214, 14174, 21456, 166191, 65665, 1711, 21594, 78019, 97599, 111701, 36, 147151, 110246, 189022, 43021, + 30397, 40757, 131935, 42065, 73335, 48039, 26596, 28984, 15102, 2361, 7421, 202167, 69744, 43766, 52826, 3642, + 83304, 33873, 75140, 63169, 192389, 36551, 92748, 13039, 123959, 233220, 21738, 84447, 77230, 20228, 187852, 19095, + 25799, 92136, 108774, 29237, 53947, 2299, 118106, 2687, 8830, 42331, 202924, 33667, 2023, 73763, 30704, 19363, + 19779, 16737, 35629, 48081, 24068, 101013, 162338, 291912, 13749, 24745, 328289, 167679, 70086, 48299, 23306, 16732, + 17801, 43322, 54589, 3586, 63653, 43624, 53474, 925, 109177, 251316, 43805, 13082, 19511, 86565, 142182, 92461, + 17117, 101033, 103319, 64589, 4022, 4351, 235897, 5352, 82705, 107142, 46391, 156084, 5860, 61365, 10558, 13045, + 7717, 18357, 33922, 12590, 33065, 6928, 46993, 783, 46937, 67846, 8952, 26295, 6107, 119656, 18799, 17458, + 50747, 4229, 179559, 112727, 118080, 20683, 41464, 125468, 51560, 49749, 44231, 7359, 35339, 62988, 136487, 67015, + 5208, 29150, 24956, 105186, 48858, 6143, 18097, 6972, 16404, 73489, 58742, 97196, 36357, 164616, 5834, 32267, + 13746, 147733, 15113, 132091, 34127, 106298, 39729, 106426, 22294, 9780, 15602, 36213, 71502, 42808, 66802, 599, + 60755, 5851, 39120, 67363, 108623, 126368, 72770, 91263, 32486, 30596, 151717, 7951, 52002, 43103, 11768, 68942, + 40901, 39344, 24037, 127500, 116890, 48403, 16926, 86750, 17745, 48648, 159545, 34460, 58419, 5634, 114317, 67865, + 31462, 23352, 24010, 98185, 125708, 69686, 68337, 13610, 26271, 70691, 2980, 4768, 27225, 102402, 75453, 28106, + 8104, 6931, 1176, 6274, 6475, 112635, 22498, 6176, 238686, 26832, 28893, 90319, 14441, 15682, 15087, 39517, + 45270, 109134, 104440, 45965, 47645, 81772, 7876, 52683, 87720, 12898, 4505, 185665, 2769, 113401, 15664, 57592, + 105229, 137381, 97059, 119268, 6876, 43309, 33886, 128363, 35476, 144249, 67013, 143587, 83367, 25703, 91436, 59347, + 53236, 2289, 16519, 19844, 46309, 58558, 99834, 23313, 218816, 231303, 36388, 51333, 183535, 109792, 139277, 54306, + 90139, 18235, 8275, 32710, 37677, 82464, 86025, 92204, 88842, 117723, 37570, 128723, 234242, 76350, 73795, 34896, + 148247, 58424, 11105, 11744, 45746, 63372, 17118, 49772, 199520, 81902, 38004, 22911, 33752, 3125, 1995, 53792, + 4689, 26909, 108150, 146062, 69674, 41811, 161444, 84855, 8999, 28561, 16731, 93937, 3189, 21967, 24890, 22943, + 1356, 145300, 51569, 28802, 517, 118679, 31703, 40607, 48098, 108854, 25003, 10233, 73969, 177495, 5248, 24516, + 215347, 146192, 48712, 60626, 69188, 40735, 5866, 586, 101541, 6509, 47590, 52129, 5969, 222045, 110933, 25733, + 24223, 65339, 62812, 2414, 155418, 35819, 16022, 78423, 43138, 20995, 128255, 240673, 46745, 236093, 72176, 57085, + 97841, 61248, 107, 36068, 193177, 105427, 55726, 215229, 20446, 47228, 100420, 87091, 14429, 121708, 23605, 21157, + 187721, 21880, 2997, 203976, 99166, 95068, 25877, 7724, 98925, 83401, 4829, 13182, 18229, 13718, 239662, 38653, + 116505, 153497, 30589, 89029, 38962, 181302, 43853, 78872, 180301, 4786, 248240, 7401, 106136, 112590, 77745, 19731, + 60880, 77789, 125748, 135487, 5975, 48627, 34084, 12419, 215770, 47557, 254582, 10364, 106495, 21856, 67539, 88981, + 38805, 21428, 48732, 42316, 12149, 16078, 52808, 25327, 51322, 33850, 51147, 12253, 122354, 46077, 56483, 254553, + 115417, 81834, 150991, 94662, 86668, 7381, 12841, 100650, 18218, 15741, 22372, 68294, 50705, 15535, 84660, 61887, + 22553, 72299, 31361, 24824, 17743, 46820, 64288, 31582, 77006, 111674, 116384, 30760, 80920, 86149, 77192, 51979, + 79691, 60342, 122805, 103800, 240873, 160744, 233114, 78962, 54920, 8608, 3484, 316104, 72548, 24337, 5088, 230040, + 21926, 10172, 36838, 26, 86221, 83458, 102176, 12062, 17571, 41929, 41170, 28428, 68239, 41750, 103930, 2634, + 18313, 53019, 34825, 97837, 63115, 24606, 73157, 152474, 14715, 91439, 37033, 109806, 140259, 30668, 174760, 380, + 135597, 95673, 136073, 65073, 134249, 13829, 17279, 122305, 4420, 46444, 10237, 64848, 203623, 70728, 10349, 182885, + 65075, 24519, 25783, 40318, 34139, 22222, 63394, 55266, 102764, 41422, 20126, 65100, 90408, 53640, 35128, 48932, + 11192, 38935, 96839, 34782, 39492, 19396, 41332, 6250, 5511, 19492, 51304, 25936, 104466, 54099, 73771, 86115, + 5080, 7669, 30891, 111700, 13931, 25276, 72289, 135447, 14820, 258641, 25265, 31005, 281179, 75286, 393, 95359, + 14623, 13584, 6680, 101227, 80173, 44933, 76666, 54542, 13244, 39348, 458, 25379, 109451, 134348, 81143, 6959, + 65554, 12027, 51311, 8716, 57589, 140731, 28467, 23316, 17272, 30458, 25980, 55229, 77197, 83798, 28302, 114784, + 7428, 34548, 26241, 14712, 39336, 103304, 18928, 54080, 12870, 334, 87722, 15208, 16895, 142098, 114262, 39820, + 83913, 57817, 28682, 7721, 14900, 108672, 11250, 62246, 42849, 415188, 1724, 26555, 24549, 25505, 26443, 107450, + 145899, 61035, 43528, 6901, 60726, 65906, 267741, 21338, 147590, 42079, 18924, 73017, 135236, 15393, 5206, 4026, + 84185, 1531, 5988, 113890, 82647, 303391, 7386, 69844, 71611, 189865, 76523, 31877, 13315, 19314, 198575, 32821, + 1928, 67641, 25913, 104475, 103489, 3297, 70391, 18406, 15446, 113347, 19295, 93790, 27856, 1792, 167471, 116449, + 8541, 4408, 41757, 63233, 25765, 86680, 64501, 27034, 24816, 34975, 6079, 4486, 49693, 36229, 16917, 21581, + 62426, 27862, 11612, 54284, 35702, 194034, 355, 24277, 48262, 87411, 70504, 310164, 118018, 12516, 47559, 43502, + 57433, 107139, 9290, 66533, 80863, 14634, 34312, 91725, 28606, 21342, 67241, 72355, 43244, 375789, 37402, 174015, + 105070, 8342, 44167, 67494, 1890, 16365, 11723, 271002, 1865, 47918, 8350, 45564, 27742, 25110, 125803, 8553, + 49504, 81925, 62211, 4534, 15491, 19011, 80373, 206920, 667, 102405, 128623, 245524, 5553, 113309, 192739, 65766, + 19567, 22832, 261958, 29679, 21293, 71134, 20962, 105123, 24721, 860, 21752, 33448, 18372, 157167, 94822, 35770, + 173224, 232737, 75729, 28937, 46828, 28062, 25453, 5207, 140366, 36665, 30652, 6169, 67920, 150458, 92040, 23186, + 184604, 92330, 20891, 176492, 49427, 27828, 38305, 42495, 143982, 49560, 25503, 90043, 29747, 65328, 47830, 12932, + 11068, 77721, 9003, 25213, 94205, 140426, 46090, 89945, 138173, 192691, 33329, 112232, 129905, 35709, 27514, 1841, + 19957, 31411, 127476, 53572, 17497, 173549, 55063, 175135, 19841, 69314, 5192, 237921, 117660, 150697, 4060, 273045, + 50414, 98940, 65348, 153665, 164423, 58804, 156695, 48994, 213928, 86036, 28608, 8355, 39574, 34540, 16927, 135680, + 18374, 151587, 10830, 53805, 16878, 16623, 4282, 48030, 8537, 14986, 46102, 13062, 72897, 72, 33050, 108227, + 39451, 45935, 651, 113320, 40535, 95176, 57450, 48843, 5003, 19019, 10407, 211163, 3848, 1068, 4988, 32091, + 30095, 41692, 15099, 43602, 107434, 50744, 7627, 171349, 16313, 150832, 352665, 207750, 33937, 38256, 51091, 156000, + 87889, 90663, 84175, 24908, 114900, 50365, 31494, 83829, 5398, 169342, 47521, 54818, 18935, 8356, 43094, 41212, + 174536, 10082, 92550, 6678, 60614, 23355, 69721, 14796, 34149, 128830, 58187, 3179, 208, 40325, 28399, 225029, + 401412, 51150, 31580, 207268, 6657, 10993, 69818, 64282, 289845, 23308, 12961, 38447, 6681, 52944, 31855, 2572, + 47646, 120728, 179148, 37240, 45196, 218274, 4816, 3695, 21961, 50084, 35209, 18073, 51452, 27004, 6100, 33941, + 1377, 84831, 171214, 85, 141510, 9078, 99227, 32610, 6417, 11718, 49868, 65579, 87902, 73018, 49062, 46280, + 61742, 21512, 40862, 107733, 15941, 29168, 157765, 144919, 14487, 5767, 158014, 140070, 7241, 573, 71584, 16921, + 223566, 40331, 179473, 35081, 47926, 140885, 41508, 52104, 59180, 42310, 32811, 29048, 123517, 102413, 80208, 10104, + 14746, 12649, 153641, 126022, 37965, 113017, 4171, 83, 142592, 2809, 6362, 50416, 71323, 116894, 260776, 16204, + 1524, 5760, 30351, 12658, 20703, 54403, 36083, 45408, 74772, 4946, 14485, 50759, 111222, 10890, 2195, 167147, + 92962, 130534, 16283, 177256, 35016, 15472, 210156, 151187, 73922, 117691, 43250, 52051, 37392, 24811, 24358, 30830, + 5775, 818, 21969, 1476, 127322, 151783, 58392, 31021, 106913, 65215, 89407, 90802, 28531, 11690, 20234, 95249, + 44602, 37256, 18707, 11928, 5161, 4410, 26571, 51903, 49768, 22008, 25252, 65780, 209499, 68769, 203726, 13249, + 137363, 48845, 86823, 6658, 5674, 31881, 1083, 1823, 108676, 34518, 166752, 13791, 14287, 91576, 91429, 8665, + 11529, 26401, 16191, 91972, 30964, 5254, 28486, 54697, 79613, 66520, 18447, 22870, 45203, 194466, 22822, 51703, + 12278, 76716, 44595, 73455, 33546, 12235, 144843, 36154, 51247, 11116, 33040, 3180, 225753, 60864, 1972, 28469, + 12891, 28879, 10338, 144157, 56294, 353058, 38302, 41447, 87532, 110616, 27065, 168438, 6557, 1213, 50804, 144643, + 24817, 2390, 136531, 38174, 247513, 16190, 4059, 122791, 131994, 137430, 39506, 57650, 16305, 5188, 54309, 106128, + 20628, 88071, 67394, 395446, 250285, 66176, 91254, 1399, 114196, 43915, 60230, 44853, 27206, 106353, 43013, 18733, + 345105, 226453, 51202, 16607, 57106, 117175, 35492, 10476, 89598, 127439, 15187, 39624, 13688, 61570, 10615, 31111, + 59370, 6238, 175252, 32143, 224492, 41388, 95408, 34384, 148238, 78307, 38959, 9340, 160091, 61443, 15737, 11216, + 41244, 170, 38299, 102443, 113097, 26382, 14027, 33707, 3957, 76300, 66160, 19431, 18900, 6952, 1717, 108656, + 82206, 188021, 257335, 27295, 43999, 41210, 31777, 46956, 57457, 12657, 11489, 15697, 48060, 204748, 53583, 82422, + 284790, 30503, 137341, 8120, 19615, 220311, 15991, 10217, 63424, 9808, 67431, 70976, 98221, 4491, 15177, 28535, + 144789, 751, 13230, 2394, 1504, 33977, 132104, 30316, 22230, 931, 97193, 185240, 24826, 22687, 174322, 15307, + 22988, 1390, 188745, 180325, 29580, 59068, 74903, 18994, 29195, 79, 15436, 7622, 38462, 11566, 138710, 44828, + 45774, 37768, 99236, 68137, 84083, 19282, 22698, 17134, 74807, 126662, 173497, 46248, 16938, 119735, 3212, 28292, + 213652, 49013, 9975, 32180, 45660, 86250, 4801, 68788, 95490, 77482, 113751, 11994, 44624, 94452, 46839, 128497, + 100316, 5798, 58588, 73184, 202987, 65417, 37790, 88524, 1606, 43156, 97964, 105717, 34947, 11203, 100060, 37742, + 130074, 93653, 107799, 94311, 196106, 41347, 8035, 10780, 16390, 27883, 118236, 167395, 1979, 25006, 19375, 31628, + 18916, 144723, 78502, 114047, 103107, 86492, 107686, 5844, 20934, 206963, 23556, 22591, 16562, 146333, 20167, 10471, + 117434, 33085, 2863, 9740, 36669, 41849, 37271, 22790, 18209, 28979, 8231, 12952, 54408, 21731, 25130, 45208, + 55748, 138120, 75826, 414, 29593, 9925, 292865, 25999, 683, 123149, 7036, 92159, 86055, 61827, 103680, 23176, + 54918, 58466, 57578, 13305, 5709, 86479, 16697, 31064, 17660, 200919, 10770, 49793, 33423, 32370, 52047, 16488, + 62555, 6459, 8426, 83493, 7763, 59725, 82812, 18628, 67760, 79405, 68557, 9612, 7673, 28102, 56517, 69620, + 171797, 32458, 29541, 15870, 81109, 32080, 207644, 71495, 21202, 11039, 91036, 61230, 2810, 130800, 32260, 4613, + 60590, 37112, 75214, 33979, 126402, 155062, 30642, 63875, 12810, 194463, 82799, 47664, 16725, 36685, 43367, 61099, + 449, 172150, 102867, 21691, 301838, 36745, 7130, 18671, 57316, 34852, 38034, 54182, 35578, 65900, 99486, 19771, + 3456, 2658, 16914, 99866, 28390, 28109, 8262, 21147, 34353, 20006, 4228, 137085, 1675, 203023, 283196, 198286, + 214375, 163329, 290603, 152574, 40471, 83506, 30068, 14730, 23177, 131539, 34759, 27668, 32178, 71896, 104799, 116305, + 85430, 119262, 42860, 25160, 8911, 23428, 49437, 105322, 6519, 16203, 6349, 74711, 1230, 38045, 8540, 75165, + 44736, 25909, 51026, 317034, 4984, 32281, 91312, 27060, 44431, 17817, 45363, 155937, 239085, 35697, 59784, 91993, + 29531, 126740, 213757, 76560, 167776, 285273, 24262, 8237, 65030, 41160, 74437, 48804, 118916, 13159, 37842, 1031, + 75349, 1478, 11655, 108777, 23435, 277425, 101734, 67469, 70231, 124711, 43532, 28514, 65526, 54956, 1000, 21882, + 17728, 25302, 40952, 52214, 149632, 1999, 2111, 3259, 63362, 89961, 220561, 39777, 26335, 9063, 10572, 12416, + 34551, 34623, 38604, 24723, 5947, 15588, 69927, 66252, 119177, 69173, 46629, 28714, 70715, 212408, 20521, 406913, + 74380, 11716, 50659, 50862, 37009, 88460, 130101, 7210, 53853, 538, 65120, 151950, 55806, 163748, 52837, 13153, + 21100, 16674, 64536, 6091, 138201, 44837, 58547, 3723, 163, 2177, 32288, 85454, 34033, 8497, 14282, 25742, + 10535, 10741, 79559, 117493, 243787, 49337, 100718, 79495, 40139, 42956, 7551, 55433, 15421, 31509, 23034, 45081, + 547, 61176, 53434, 328001, 8470, 36263, 30145, 4519, 74173, 53935, 11845, 73774, 60211, 78025, 3, 4102, + 73782, 109293, 315332, 48412, 26683, 13714, 6865, 20128, 18490, 104141, 325, 39470, 171970, 115860, 15707, 7268, + 73301, 74336, 31370, 2368, 111827, 107757, 136231, 142844, 97138, 96638, 84053, 38691, 23801, 1588, 10573, 122098, + 77039, 240, 186135, 146101, 11996, 18143, 112963, 46171, 155836, 348769, 47795, 121213, 116266, 132515, 3344, 144804, + 31286, 99187, 255838, 129694, 35894, 48779, 55235, 148582, 71967, 65282, 15174, 13920, 47080, 6147, 108242, 157593, + 125025, 7136, 1286, 28957, 127956, 28402, 98813, 20805, 7532, 109417, 40610, 5041, 32958, 15142, 18408, 108596, + 33543, 50517, 27748, 80114, 233434, 91447, 487, 37094, 100048, 30541, 43477, 10639, 89862, 155868, 37667, 8726, + 60684, 237903, 73408, 99589, 12190, 38739, 97348, 3914, 13594, 2680, 149016, 13907, 30171, 28343, 23530, 115225, + 61104, 35821, 147679, 14337, 4297, 244282, 24085, 326976, 56428, 7851, 21303, 131620, 71446, 83253, 68692, 111870, + 5224, 15813, 38197, 49026, 45057, 13660, 3306, 76345, 40671, 27905, 91072, 996, 68527, 62085, 91351, 122634, + 55109, 168209, 2024, 27560, 112707, 17352, 8306, 167115, 169921, 166958, 5031, 46020, 11844, 67284, 19130, 76185, + 6920, 32849, 5450, 14610, 22451, 21002, 17392, 31872, 66682, 84796, 13709, 40210, 59898, 12029, 8719, 53564, + 21462, 91884, 21647, 88379, 194428, 12754, 37797, 132826, 160016, 22567, 54383, 53186, 77611, 31107, 8339, 4694, + 19185, 90355, 23597, 17222, 140675, 28442, 23668, 55977, 9128, 61555, 28774, 155229, 17658, 9390, 24379, 69357, + 15752, 127381, 239631, 62460, 93181, 55913, 45133, 140155, 18676, 25249, 33164, 29581, 82837, 67223, 22362, 29975, + 7317, 52813, 1943, 29613, 20012, 207130, 49617, 49651, 5636, 15334, 36313, 29226, 28084, 95247, 72072, 19000, + 224932, 15811, 114, 32127, 38097, 37508, 88507, 37225, 27359, 91626, 12193, 69279, 20608, 11055, 88156, 92808, + 2152, 57259, 55275, 72789, 24475, 104414, 1708, 9882, 3818, 48661, 66897, 1631, 34806, 227930, 85815, 87753, + 18321, 250664, 72733, 25107, 206797, 50891, 8082, 196411, 92596, 96764, 152823, 65514, 22819, 387277, 62176, 51225, + 40329, 15563, 189, 3659, 73670, 64357, 51793, 275136, 33482, 86653, 74615, 67058, 11318, 125720, 15388, 22388, + 8267, 1730, 102663, 170910, 40784, 7144, 85373, 13040, 7088, 94309, 583, 44224, 140424, 77439, 18496, 164026, + 36578, 4722, 9151, 5824, 63365, 26510, 35199, 40500, 79277, 32495, 44614, 35233, 9566, 203293, 152144, 7097, + 2330, 183480, 98629, 13423, 330887, 44130, 68600, 30939, 97829, 31012, 345465, 56747, 94879, 4939, 160027, 149761, + 99423, 46099, 32251, 15332, 8761, 96094, 128555, 5763, 235318, 222223, 55729, 30241, 55420, 201746, 3987, 81382, + 8259, 49325, 23287, 7719, 24633, 251100, 92311, 18591, 110533, 64759, 170260, 393860, 7175, 21144, 132887, 3593, + 75346, 101277, 91109, 16387, 259187, 11627, 57459, 173829, 44694, 55780, 49797, 89192, 120443, 62622, 3904, 14814, + 23887, 1027, 112258, 64955, 99800, 11132, 66353, 36202, 48624, 18158, 88481, 96882, 43059, 11040, 2455, 7077, + 21651, 181159, 99126, 100434, 61388, 68186, 19161, 110468, 120052, 8819, 55324, 41494, 7014, 37689, 3618, 87729, + 92615, 207943, 9823, 128657, 12587, 15857, 6379, 67628, 51216, 71775, 157617, 63244, 1503, 3864, 218754, 110864, + 5769, 21492, 7243, 1192, 87921, 85529, 31512, 18537, 42698, 35350, 73510, 84474, 34301, 8991, 21013, 35034, + 566, 38832, 19838, 35586, 37216, 39413, 55006, 12178, 59742, 856, 84563, 6900, 25632, 17437, 49786, 30723, + 13847, 70845, 4044, 7843, 23944, 235976, 55530, 48942, 6518, 20939, 73769, 192653, 52936, 95207, 23895, 132542, + 142982, 22632, 87452, 48042, 54018, 178468, 10728, 26230, 23559, 363, 81269, 142012, 5718, 346258, 31456, 84333, + 246476, 51018, 66692, 101804, 120570, 39962, 30373, 70593, 2864, 60541, 19425, 54209, 104092, 7201, 31545, 48018, + 25865, 15442, 46257, 40443, 8328, 6451, 111782, 47527, 97754, 33046, 470, 245116, 31095, 39, 91934, 87208, + 73470, 36708, 36521, 12801, 70624, 36272, 8892, 79768, 12427, 55454, 103756, 5908, 52390, 62962, 22720, 141138, + 94634, 41689, 128402, 126390, 6628, 106394, 35527, 134394, 82727, 254651, 194502, 148064, 89549, 3202, 28359, 957, + 21954, 27906, 49840, 142747, 8307, 24206, 48978, 1186, 71728, 133038, 71474, 91306, 6333, 110959, 74600, 70387, + 18983, 62609, 56057, 22970, 1147, 135850, 1321, 28834, 3578, 59715, 102227, 32827, 81415, 99952, 55636, 257598, + 390, 22702, 35701, 85872, 402916, 39216, 189795, 14929, 19467, 10112, 144422, 61514, 5279, 63421, 134686, 41436, + 8424, 51925, 10598, 132295, 124416, 4604, 194739, 210929, 57866, 31829, 51626, 50007, 9976, 91878, 61906, 56168, + 81906, 60918, 61859, 40017, 23059, 16887, 40927, 62064, 12785, 32893, 32913, 21782, 93965, 20169, 44387, 79084, + 38463, 11457, 93950, 27127, 157050, 2697, 337088, 5116, 54128, 48255, 33279, 8821, 27352, 25515, 124022, 65710, + 28906, 38557, 33390, 1722, 104435, 72215, 38551, 12094, 30978, 25113, 6671, 37355, 175109, 42862, 98024, 65406, + 221276, 59624, 118012, 64637, 78760, 86697, 21426, 1639, 40350, 12584, 67193, 84144, 31396, 7863, 143011, 69629, + 63112, 9454, 28666, 65798, 46372, 134721, 6314, 51402, 30837, 151922, 2847, 38676, 38008, 92823, 136245, 17540, + 5504, 109295, 205242, 37606, 5211, 214892, 1586, 20670, 208711, 137743, 19328, 40652, 16995, 20023, 14657, 154919, + 34422, 12996, 13918, 38221, 47690, 16398, 2959, 37680, 89122, 6721, 198469, 91876, 172043, 83898, 101992, 26084, + 94570, 3635, 76958, 22853, 76497, 38266, 176590, 168403, 44464, 142840, 79180, 184594, 1984, 41806, 83147, 11985, + 6546, 366068, 59732, 24533, 271505, 8736, 39084, 222992, 93429, 28962, 58985, 86665, 8432, 30028, 14548, 32439, + 54424, 165029, 55175, 27458, 69046, 121277, 46168, 33732, 20661, 24581, 135574, 123110, 37556, 79260, 72611, 16957, + 12939, 46162, 58238, 44907, 72936, 253758, 41324, 32518, 96480, 11949, 124438, 65280, 43256, 34107, 53533, 43531, + 37037, 28366, 45970, 32741, 173438, 6121, 194202, 62969, 26355, 30314, 58370, 28455, 1848, 50519, 82830, 90393, + 21761, 295490, 10936, 256940, 133568, 44050, 20269, 4089, 27457, 21610, 219460, 36743, 14821, 101388, 52005, 13124, + 30979, 140816, 167362, 26054, 18458, 60789, 34917, 40447, 26606, 33422, 9066, 3452, 83614, 5761, 20263, 137238, + 25038, 91310, 101, 52322, 74548, 42572, 38084, 214054, 186568, 31802, 17665, 30620, 141936, 37730, 14420, 4265, + 187218, 49640, 188208, 51441, 55388, 96452, 66659, 40869, 42039, 60967, 221027, 19234, 178581, 29105, 96050, 9165, + 196118, 157335, 3738, 40354, 117436, 2965, 34136, 59659, 15570, 50843, 230035, 31444, 71260, 43886, 18316, 5387, + 38500, 168508, 17406, 32174, 8828, 103373, 143806, 90367, 3560, 18719, 122310, 16508, 26719, 2541, 105429, 6645, + 37998, 73190, 10591, 235916, 49737, 87112, 233941, 53188, 32193, 79154, 4544, 52905, 126477, 7580, 63501, 57314, + 3216, 31337, 6541, 103083, 60846, 49, 9756, 15481, 1355, 43840, 14319, 13743, 27486, 10222, 73114, 230718, + 418644, 16706, 6674, 279748, 23058, 45273, 295831, 86306, 2743, 5535, 88773, 21829, 35253, 120938, 31153, 3169, + 16839, 42847, 8751, 80974, 33942, 36867, 35514, 16485, 26474, 77775, 56877, 5391, 48346, 3882, 108713, 31403, + 27804, 55248, 26235, 43821, 136104, 40118, 175507, 28034, 203908, 18732, 1788, 34030, 106427, 36958, 54359, 7251, + 44936, 15356, 69139, 455, 157915, 22173, 140291, 50348, 43275, 82066, 49621, 54952, 15216, 36226, 96695, 66855, + 6936, 1987, 8227, 196087, 4631, 68827, 99004, 47541, 110265, 17953, 147605, 110242, 58520, 31312, 38724, 329975, + 642, 3155, 34497, 75937, 6207, 73843, 6120, 17249, 51429, 117746, 3218, 910, 68961, 319671, 14938, 29555, + 34700, 1649, 66673, 72268, 9655, 76800, 153087, 6941, 210168, 27130, 35398, 1780, 73242, 3135, 56689, 19556, + 165307, 8765, 35967, 121458, 13333, 70453, 17350, 117253, 22265, 13340, 44265, 39869, 441, 3742, 135025, 23581, + 33309, 16543, 17731, 13291, 157637, 283005, 21408, 101360, 63887, 52312, 83873, 5338, 233779, 23759, 186949, 34531, + 177320, 38069, 156465, 91004, 19353, 59852, 68160, 14891, 1338, 1072, 29823, 1950, 28901, 81407, 313445, 73038, + 84807, 162348, 240257, 37162, 138934, 16111, 58013, 41253, 102951, 16457, 96056, 19541, 56402, 67217, 41638, 94381, + 89674, 29481, 37456, 80815, 151579, 13937, 13683, 132537, 19699, 134545, 67020, 29816, 222341, 141235, 427578, 48868, + 129557, 233342, 23077, 87871, 16213, 18728, 16184, 9469, 37913, 19680, 2798, 171356, 178328, 13216, 50049, 72690, + 71904, 124644, 55455, 7504, 29052, 41036, 266546, 19899, 30391, 188755, 8659, 59469, 16, 104298, 112943, 53865, + 76203, 138226, 68857, 139953, 14125, 107625, 119795, 173133, 4398, 50273, 48808, 54390, 16466, 122086, 31835, 67035, + 50971, 48859, 7508, 46427, 66477, 73021, 84615, 39985, 83076, 46779, 201569, 53336, 36443, 60865, 168164, 143810, + 51393, 25548, 169307, 32896, 24485, 38424, 21837, 29087, 275813, 51674, 6714, 64883, 46169, 187369, 55186, 76192, + 12852, 12018, 62134, 31067, 118303, 16542, 12125, 10579, 4928, 26291, 43854, 7091, 10946, 253716, 109062, 39283, + 17261, 113012, 258512, 47764, 125126, 32646, 55892, 80279, 201623, 149872, 3192, 385, 1208, 48750, 5376, 58738, + 22335, 5427, 82416, 47811, 32435, 143086, 38930, 94128, 59975, 156037, 37977, 38224, 62485, 7698, 50405, 71027, + 16462, 21559, 136153, 34131, 107506, 162069, 63703, 3101, 215029, 40407, 4178, 3774, 9187, 80019, 17880, 97926, + 67579, 2600, 18405, 8351, 47924, 86638, 70820, 92206, 86453, 29610, 42241, 119200, 3198, 15466, 67813, 57863, + 35454, 4779, 99518, 4649, 104641, 144269, 33730, 38073, 65864, 6838, 109456, 193298, 154007, 5623, 45741, 30846, + 182578, 25573, 157224, 1543, 58575, 138703, 146140, 44971, 49356, 18275, 59064, 20300, 13122, 11848, 24453, 11973, + 9797, 86843, 2919, 25530, 49210, 1130, 161220, 76788, 75373, 85604, 34926, 36014, 17777, 17255, 51533, 11676, + 92226, 51845, 119859, 21525, 5936, 18507, 28050, 1140, 31418, 14857, 34207, 47859, 10750, 36382, 32079, 106909, + 59426, 87757, 38393, 110042, 15965, 97104, 33757, 35344, 97993, 53979, 33651, 45407, 41884, 82515, 173089, 7177, + 58371, 35365, 47543, 51927, 35587, 10670, 23544, 29306, 84233, 39976, 76076, 62097, 9007, 8668, 28119, 78281, + 120790, 19835, 143020, 54968, 18670, 64959, 20649, 34469, 42570, 33001, 136570, 87796, 120044, 1106, 58700, 63951, + 127623, 12805, 83057, 40212, 31773, 49850, 7361, 54336, 347524, 101314, 23751, 19569, 48791, 29174, 49369, 20467, + 7465, 75842, 38281, 623, 112457, 60210, 28849, 51003, 94720, 6426, 90047, 85560, 43761, 3579, 85105, 34607, + 90410, 118528, 7224, 42907, 111163, 18168, 6960, 161135, 191298, 5247, 100584, 127552, 171568, 20121, 91173, 12636, + 54615, 20199, 63730, 98105, 2396, 40387, 14438, 125012, 4765, 33235, 12865, 45299, 37728, 82098, 77872, 114037, + 59253, 19675, 24838, 398016, 102561, 11446, 17069, 57508, 178277, 65836, 99941, 26114, 2585, 271882, 136866, 50126, + 11027, 155648, 118367, 14585, 8910, 123015, 335383, 40434, 41016, 53021, 14439, 87098, 176860, 201543, 121888, 2358, + 9286, 5739, 22666, 54270, 37884, 169381, 33984, 93859, 16124, 89364, 72207, 51639, 76366, 99029, 65812, 2198, + 12147, 174891, 194289, 6986, 30252, 88822, 21284, 11445, 288337, 160821, 33034, 100869, 43852, 25761, 52882, 1144, + 103809, 1924, 84458, 86079, 43411, 13542, 139276, 18141, 34978, 41298, 7276, 26481, 173800, 33210, 17951, 142652, + 33616, 33677, 2210, 19941, 98568, 2486, 192414, 80136, 12058, 235883, 50963, 249638, 29572, 27221, 47034, 6124, + 72107, 63346, 97620, 158513, 299699, 40388, 23235, 37176, 224244, 198386, 121323, 67992, 23827, 63170, 17838, 106622, + 158590, 26807, 5345, 23489, 91891, 55474, 74834, 37981, 13058, 5977, 72552, 34706, 26828, 145172, 19904, 21367, + 34043, 960, 77092, 91381, 4733, 47446, 7680, 41697, 5170, 16960, 14741, 46101, 13656, 473, 51842, 37433, + 11103, 11551, 121951, 13191, 97536, 165932, 50397, 51628, 129028, 9069, 44885, 6590, 59195, 47045, 32940, 225472, + 90345, 21833, 13303, 29407, 96615, 141951, 5198, 6028, 18395, 7181, 3861, 14966, 156358, 167182, 36529, 55253, + 25942, 173153, 30959, 27261, 50691, 150176, 162201, 38467, 48462, 80602, 42163, 118482, 168, 108756, 26011, 17166, + 54149, 456538, 22512, 91374, 13816, 90358, 131615, 18132, 226707, 1824, 28139, 26860, 42253, 93877, 77351, 65575, + 8980, 80574, 22020, 27948, 40422, 91324, 76376, 13528, 39281, 91685, 82215, 122541, 144066, 1983, 193851, 17283, + 26320, 2739, 194978, 4790, 26845, 42627, 61300, 65815, 174612, 55133, 4200, 191130, 79771, 158321, 52280, 166796, + 221620, 62461, 11278, 4067, 88152, 83409, 31717, 121367, 13522, 47325, 37945, 10406, 174348, 249321, 154101, 64912, + 29938, 51775, 17220, 15776, 166138, 78890, 84425, 54121, 42861, 16368, 24572, 291647, 10197, 32073, 22651, 11677, + 97509, 26952, 35787, 18424, 41910, 71614, 94977, 72318, 41594, 70024, 275419, 37702, 60199, 7335, 39107, 61315, + 18271, 18394, 33768, 87884, 104277, 123724, 7277, 56288, 71981, 189803, 49320, 3352, 6798, 14240, 8954, 69220, + 94433, 57372, 28620, 68863, 193727, 85575, 42309, 41667, 67689, 42081, 22543, 44824, 12719, 28540, 114236, 101553, + 27638, 27296, 4300, 5353, 4663, 19379, 94098, 3758, 95888, 95144, 80344, 87320, 28447, 259518, 12718, 71391, + 152731, 37063, 24132, 31911, 104896, 15672, 103782, 1521, 4945, 72541, 23717, 122632, 15619, 87175, 206120, 29428, + 189780, 61416, 28350, 44457, 972, 1175, 47233, 198738, 95789, 41907, 21953, 97034, 59341, 22864, 53713, 16873, + 32971, 20693, 20954, 31336, 21477, 16169, 38370, 16412, 9019, 3841, 24599, 21938, 17085, 6484, 81198, 76413, + 5849, 72514, 12320, 65247, 276175, 37234, 59796, 52642, 16312, 57349, 198507, 94148, 46134, 18958, 125552, 1747, + 18725, 151873, 14901, 5490, 68287, 29470, 3689, 64794, 40814, 26018, 25692, 54450, 2703, 88278, 124886, 173087, + 174000, 24159, 179477, 24276, 46004, 201876, 209202, 445, 52876, 31948, 30206, 157610, 39180, 18439, 44124, 50469, + 5774, 96278, 222758, 200216, 50290, 45486, 20435, 46986, 46276, 140133, 142326, 15569, 13363, 47522, 92583, 2182, + 7135, 16853, 22998, 30272, 4952, 63263, 35623, 39096, 53789, 44864, 20053, 110392, 124213, 4630, 16087, 28221, + 127787, 25839, 77481, 44693, 13464, 113146, 6983, 27069, 55717, 50102, 4760, 7107, 26186, 66507, 59145, 36032, + 104182, 71328, 29425, 64317, 50781, 47465, 94298, 69706, 74899, 22754, 120756, 25108, 93077, 56834, 73286, 39928, + 16218, 41699, 176763, 7555, 70819, 50083, 26895, 23315, 26014, 16773, 123079, 41712, 5719, 31516, 90427, 158540, + 85051, 183128, 40864, 27505, 55392, 9058, 45224, 96857, 30901, 136622, 96557, 56304, 120061, 11501, 151448, 5773, + 89743, 7769, 86069, 2935, 18471, 41628, 10114, 33660, 110170, 49479, 26745, 92846, 33221, 26731, 18795, 87076, + 8550, 2100, 29972, 120289, 3077, 72490, 33784, 2630, 208722, 50861, 63483, 79029, 6419, 39467, 14302, 45286, + 64207, 9686, 67513, 44170, 1050, 77246, 59266, 17055, 53801, 7150, 11111, 42432, 4278, 94579, 362117, 36175, + 42902, 41933, 39002, 98489, 22913, 74161, 84773, 57036, 17556, 162288, 74485, 178760, 93867, 73635, 128860, 50362, + 261, 67455, 80001, 46080, 35662, 4368, 25247, 19230, 74393, 22588, 1822, 27682, 235324, 13798, 85998, 13194, + 235067, 23514, 71669, 147632, 23191, 134748, 214683, 105101, 1518, 25489, 247114, 7380, 54842, 26922, 3971, 26361, + 20844, 68642, 170517, 77339, 123255, 8963, 77818, 150998, 48466, 36806, 2732, 23261, 11741, 236162, 18243, 126216, + 28690, 50546, 16385, 92760, 197383, 246558, 201295, 88255, 67588, 71687, 176076, 172653, 169058, 33906, 63747, 24835, + 157621, 43338, 30050, 46152, 132741, 2770, 51371, 94835, 6614, 15112, 11749, 56936, 1250, 19027, 399017, 58036, + 100215, 23388, 55815, 308768, 124152, 94803, 9521, 64186, 8971, 28, 30427, 62163, 7616, 103838, 35079, 29203, + 131235, 7743, 17389, 10882, 37420, 61460, 228512, 85363, 41581, 131077, 62822, 119647, 10130, 54445, 26925, 19968, + 29016, 24446, 74028, 24176, 61448, 67185, 9254, 8563, 119129, 9771, 99184, 37716, 39514, 10532, 221512, 258753, + 218630, 55980, 23394, 32141, 61924, 66749, 32411, 3741, 36475, 26678, 77010, 44946, 91203, 128749, 116953, 20476, + 49625, 53116, 13735, 102335, 29376, 51946, 83407, 67892, 59212, 34685, 21083, 1546, 112982, 32972, 74397, 1078, + 190545, 16082, 86140, 58591, 89611, 101531, 10061, 105104, 76319, 20035, 17551, 52611, 169061, 190842, 100780, 23907, + 90413, 115619, 9675, 34710, 193435, 49443, 129734, 11183, 258877, 16318, 136182, 126808, 44635, 27304, 192375, 2599, + 125648, 47051, 12091, 23814, 721, 58800, 40137, 66726, 97930, 60877, 74487, 7942, 54326, 9841, 41428, 13762, + 8211, 85383, 6950, 99177, 79806, 201786, 296464, 124087, 13144, 29741, 41721, 47634, 55088, 254286, 106408, 17041, + 99064, 12942, 64086, 45233, 14005, 2612, 55827, 255, 7984, 13980, 38574, 12776, 46654, 73499, 249951, 2101, + 26676, 25996, 132326, 116415, 119062, 50449, 31033, 23038, 11589, 179252, 20007, 14860, 129270, 21143, 17796, 144715, + 60106, 70758, 69842, 34674, 282133, 44014, 16774, 57268, 38528, 24053, 46373, 201667, 28327, 471023, 51889, 102667, + 21193, 114909, 84132, 69317, 96723, 67969, 16134, 68145, 15058, 28765, 32035, 2524, 101089, 98664, 25045, 76571, + 14957, 86040, 118506, 262428, 154764, 81573, 39681, 283900, 73287, 127825, 544, 80448, 52347, 38512, 175971, 15180, + 45467, 33086, 46552, 48894, 81107, 43213, 36672, 54025, 76703, 8053, 7608, 13299, 56619, 20752, 238099, 54164, + 105133, 1444, 32942, 953, 37564, 8000, 66316, 119463, 106817, 404, 13667, 149108, 128597, 31267, 10269, 49836, + 106150, 1484, 52330, 76965, 160486, 171648, 38456, 31263, 22424, 37738, 66245, 67467, 143369, 60471, 75610, 20895, + 115528, 86070, 60854, 40796, 49347, 18989, 15030, 11371, 37578, 15779, 79867, 10187, 86462, 46402, 155626, 93200, + 40229, 7090, 57547, 108053, 99598, 11088, 47505, 41218, 206017, 2173, 20988, 30219, 22919, 80563, 57566, 42369, + 93141, 41675, 2407, 182519, 120495, 27154, 16702, 29456, 14349, 7958, 16688, 117177, 140375, 42467, 261919, 74916, + 153569, 10836, 34742, 49526, 7621, 105997, 12212, 2270, 392377, 7755, 17959, 25086, 232152, 138791, 33847, 13860, + 35316, 5811, 1344, 71259, 50452, 207539, 92635, 50359, 5821, 33674, 30255, 2086, 2587, 96264, 17543, 42, + 6029, 9580, 43007, 139248, 82831, 12917, 29607, 25786, 51467, 42137, 85161, 100698, 31561, 88989, 121990, 278500, + 3602, 109344, 37982, 15279, 116442, 28936, 30880, 87894, 58079, 128661, 126731, 67392, 28051, 146885, 4861, 16216, + 97344, 42827, 147561, 153948, 22684, 21335, 47685, 1853, 43349, 15185, 59642, 10229, 25520, 187921, 108972, 5579, + 98037, 24945, 6697, 19193, 63734, 137934, 75056, 89740, 19767, 224268, 56138, 63643, 151661, 39313, 70618, 84031, + 89723, 84074, 13703, 85626, 35460, 8867, 64845, 3439, 57906, 99776, 63968, 49270, 81130, 34356, 16210, 23547, + 36446, 34090, 140028, 72439, 2221, 22163, 57058, 363492, 113754, 18913, 95451, 48663, 54464, 54037, 176097, 68425, + 3023, 34906, 29482, 117389, 341780, 80431, 58330, 16753, 92616, 60907, 94846, 147486, 4498, 48646, 7773, 46801, + 7778, 18946, 464978, 47558, 33223, 177444, 7328, 15626, 63337, 94700, 11743, 9351, 255024, 39098, 16447, 42647, + 96230, 39769, 58840, 10068, 63439, 35800, 65843, 58823, 413844, 9156, 51258, 7434, 61791, 85018, 6872, 3692, + 28096, 7121, 33024, 6009, 75532, 31997, 192535, 9661, 3304, 9547, 14753, 31987, 25314, 55689, 15896, 20430, + 39472, 31340, 99744, 25398, 115569, 54883, 28719, 205423, 23071, 57855, 64638, 149867, 25671, 82403, 37616, 20668, + 39989, 77996, 74948, 140555, 175248, 64810, 36515, 46595, 4958, 248773, 24045, 28728, 136673, 168704, 20804, 114833, + 100325, 27135, 21205, 96151, 153134, 45992, 7093, 13992, 76047, 1980, 19432, 145001, 75159, 87462, 17710, 1013, + 45556, 34297, 144882, 20648, 26061, 11319, 129567, 108555, 18872, 464580, 33386, 22717, 65948, 167189, 5603, 135042, + 79542, 8801, 202632, 18114, 91882, 5973, 5239, 67315, 4431, 60916, 47819, 71693, 32597, 32606, 18183, 45072, + 80329, 76385, 24749, 51305, 40314, 156514, 14693, 130345, 13168, 66214, 18029, 12858, 34801, 27628, 14544, 10823, + 40522, 40185, 33739, 148694, 23548, 9923, 61012, 28859, 17933, 19442, 34364, 99849, 164107, 141167, 30629, 21054, + 6744, 36491, 8096, 42474, 41706, 155060, 30650, 10600, 163442, 1143, 96655, 61390, 52359, 7559, 51568, 64256, + 203854, 4467, 22453, 14504, 436398, 7878, 6980, 8293, 63610, 293747, 16167, 35763, 19627, 147603, 15419, 18032, + 110744, 51346, 33681, 54571, 40472, 48615, 39073, 21604, 13754, 173027, 92560, 11083, 47299, 63062, 11813, 52007, + 29883, 9734, 139722, 15953, 1550, 20651, 13616, 49306, 16113, 90089, 92326, 7584, 30712, 72424, 164858, 6831, + 152871, 55746, 197721, 34167, 196442, 6022, 112107, 55215, 7538, 123381, 4920, 43539, 77165, 8939, 50392, 34192, + 20225, 79762, 22505, 58667, 40770, 29788, 97180, 82835, 4568, 8579, 13273, 363569, 35898, 49983, 436, 36598, + 3237, 131691, 62418, 35591, 8101, 4073, 379438, 65218, 76072, 33887, 2968, 27573, 212619, 288680, 68278, 72851, + 150504, 217896, 6913, 121339, 22017, 35340, 51072, 43616, 75043, 31437, 10833, 81487, 4364, 22968, 41454, 106687, + 85446, 19863, 109625, 149241, 524, 141850, 214404, 54376, 657, 237023, 9401, 108137, 53800, 32474, 49712, 53334, + 126876, 27337, 45552, 177696, 8269, 15036, 12097, 42240, 2328, 125374, 119295, 99715, 2500, 19624, 39441, 27220, + 102691, 60957, 94543, 39101, 18566, 67362, 13975, 78230, 25017, 34017, 239007, 90027, 39351, 41681, 35354, 43822, + 1043, 916, 58587, 141983, 94818, 38799, 75459, 41114, 67432, 16195, 36606, 59568, 22272, 126769, 31424, 68659, + 12287, 134302, 257977, 5756, 207285, 95637, 47248, 117689, 19583, 77451, 22373, 12200, 54993, 117118, 34244, 29386, + 34562, 53819, 71267, 64172, 77665, 49368, 7716, 59301, 25749, 45426, 194789, 17297, 2650, 1766, 32501, 45198, + 20403, 20984, 6600, 14171, 94604, 19037, 5402, 29896, 9938, 59935, 109708, 88081, 145182, 44844, 39167, 352626, + 164173, 35374, 45982, 6122, 154, 73419, 220487, 53834, 53601, 17992, 8609, 229321, 5610, 68098, 66815, 71012, + 95069, 140968, 27396, 8957, 134489, 24656, 86659, 56598, 134852, 17316, 123838, 255436, 6613, 41610, 138033, 81452, + 32023, 32396, 123687, 63398, 8693, 29712, 30407, 19296, 121188, 3551, 36099, 20032, 111948, 56624, 16547, 27453, + 35916, 15378, 52039, 56849, 13489, 22214, 73177, 53097, 277349, 2157, 14029, 187886, 10260, 141743, 246460, 91880, + 50869, 3788, 49486, 133566, 54950, 33120, 129337, 53768, 18333, 9525, 26902, 312251, 10297, 9020, 70759, 16647, + 112432, 59260, 84609, 9818, 82766, 73569, 468, 46001, 75780, 55028, 52106, 11498, 43645, 108069, 17150, 17753, + 29417, 16705, 31799, 9606, 289, 122254, 115975, 8620, 6133, 255357, 56908, 14456, 133464, 43554, 79224, 11247, + 29630, 160, 12756, 25464, 65960, 350428, 62521, 321796, 100359, 67358, 35169, 46172, 113128, 48988, 88868, 31094, + 33266, 6847, 60887, 98188, 49659, 69117, 92977, 220228, 13947, 80181, 35103, 62170, 97351, 13475, 2440, 199768, + 19498, 36597, 46971, 25234, 67806, 62881, 84717, 73648, 181966, 10488, 94149, 21550, 26655, 63436, 48375, 14405, + 165650, 9621, 24439, 28043, 42735, 4490, 29963, 56674, 45373, 1934, 262446, 50855, 67098, 26898, 5261, 52696, + 40644, 33900, 9440, 180286, 87162, 22940, 19704, 26936, 69769, 10254, 101759, 27406, 12243, 48000, 73926, 113215, + 54935, 5726, 192787, 4312, 106216, 9366, 11550, 52949, 23457, 212271, 277152, 133895, 108374, 6191, 96477, 29980, + 218916, 58024, 54696, 40853, 91124, 65894, 91170, 65908, 252552, 6793, 29212, 15389, 44516, 122515, 52617, 35058, + 9017, 103536, 39510, 49136, 19242, 130652, 662077, 74699, 47024, 31422, 8517, 73351, 24399, 13867, 128360, 4810, + 4434, 61779, 111983, 61036, 17798, 110240, 59722, 102960, 39688, 10001, 23803, 23039, 176498, 56659, 44814, 134295, + 17188, 77577, 74466, 226175, 102472, 154333, 63900, 111747, 18062, 41171, 79669, 32773, 408933, 42562, 28931, 30907, + 107388, 43487, 2946, 240310, 23938, 24354, 319, 184983, 7927, 6488, 1422, 10790, 68809, 68209, 64775, 4361, + 202, 17123, 59634, 51200, 44391, 18188, 17843, 2619, 74278, 3230, 9540, 47187, 21702, 36274, 56894, 43907, + 16310, 34790, 16866, 6150, 5561, 13587, 107545, 108873, 126867, 86986, 28640, 33427, 19017, 5762, 80637, 17430, + 46903, 2047, 131055, 25958, 13558, 5444, 47152, 13900, 44563, 122857, 45348, 70863, 39593, 54332, 38068, 33637, + 318, 40310, 143467, 18502, 24520, 11377, 62013, 28942, 27246, 28269, 83545, 17999, 59015, 90707, 30065, 15161, + 34720, 1263, 37008, 2012, 6060, 98575, 92933, 5721, 299, 199555, 24578, 29223, 2985, 743, 115825, 109523, + 136657, 47454, 26378, 53586, 3733, 174945, 93340, 244456, 5693, 37386, 28782, 89767, 27545, 23573, 18798, 136425, + 34320, 84778, 20041, 48453, 38215, 7477, 71958, 40621, 8773, 5874, 187927, 105965, 51100, 43533, 18083, 8443, + 10180, 43597, 2003, 183999, 69689, 12216, 129696, 146188, 62389, 34044, 68410, 12765, 43273, 26949, 266807, 3345, + 34477, 79197, 5688, 47539, 213110, 21634, 22257, 50092, 32222, 42346, 39530, 63668, 98, 134978, 74022, 5152, + 59088, 174145, 37220, 9934, 9545, 118937, 5724, 87240, 19875, 15784, 40143, 23263, 87513, 181654, 285152, 37881, + 263241, 4966, 43934, 10433, 186657, 6470, 74416, 225854, 25908, 142677, 246262, 32280, 6192, 75890, 45546, 143264, + 135305, 29742, 47013, 77787, 11732, 126658, 8763, 37950, 21806, 57557, 113464, 89465, 108995, 164574, 23894, 22996, + 23169, 15369, 23117, 17642, 130607, 40503, 36239, 280990, 44666, 9981, 40427, 147487, 26869, 168452, 32886, 32991, + 46798, 240839, 15111, 70502, 65697, 88548, 44145, 28701, 48767, 31139, 206777, 35659, 181164, 166262, 14554, 171445, + 31786, 66523, 76607, 17956, 6507, 31279, 90476, 116611, 167918, 6560, 1243, 115324, 80128, 41867, 55897, 187323, + 37069, 32596, 189444, 145931, 13390, 105530, 65709, 26805, 6999, 55714, 41300, 22915, 68951, 22138, 21120, 22264, + 10058, 19945, 33635, 56123, 99085, 10032, 5818, 6016, 46649, 57476, 35264, 94413, 112522, 262288, 93686, 83038, + 14341, 23204, 28807, 66084, 77987, 6101, 126673, 7133, 38126, 5923, 122091, 170240, 97772, 46874, 215746, 43948, + 41622, 3272, 55596, 8332, 146411, 251315, 13533, 8561, 81521, 115449, 48616, 175175, 2063, 186556, 3036, 134537, + 75772, 29728, 82360, 22973, 186559, 86348, 89100, 38388, 82297, 45610, 2613, 87082, 9986, 177812, 57884, 23591, + 47485, 42543, 33582, 44713, 74439, 257444, 252451, 31825, 35631, 38540, 33066, 5147, 13973, 4343, 51830, 70378, + 22827, 26448, 95560, 36896, 241741, 48067, 203953, 298860, 61620, 20450, 3220, 67272, 6586, 107662, 100160, 108684, + 6929, 57226, 4762, 7457, 1320, 40404, 77204, 99309, 62750, 208653, 59977, 44000, 74315, 34332, 5819, 172217, + 64904, 114077, 18147, 84012, 1791, 98456, 90930, 21446, 116669, 103938, 7422, 85140, 59713, 5768, 326211, 16239, + 75411, 13229, 29398, 10758, 236107, 1539, 112472, 95979, 152154, 151294, 306, 21196, 38146, 10700, 6891, 84282, + 109646, 56492, 40539, 6589, 119491, 51354, 30685, 140209, 136906, 29622, 73617, 49553, 70525, 51671, 166869, 139616, + 74395, 37439, 49595, 45678, 11959, 33211, 86560, 52434, 9282, 62690, 112155, 130810, 5243, 108261, 99970, 265613, + 72551, 80049, 6391, 33365, 90721, 66737, 69872, 87011, 1860, 9032, 112544, 60905, 37371, 89015, 140351, 19076, + 850, 373531, 2802, 36725, 218795, 72062, 28990, 16550, 24614, 7815, 6187, 26336, 33373, 32162, 42791, 73555, + 32062, 23386, 10244, 56392, 49442, 27076, 136262, 12412, 14883, 1134, 33675, 97153, 199281, 15608, 100152, 74072, + 47942, 254301, 36451, 16026, 10687, 65067, 56708, 254030, 30290, 50490, 13864, 57941, 259331, 35588, 23485, 43486, + 24869, 21620, 92971, 22072, 88645, 1048, 182050, 13343, 32452, 14825, 19509, 3325, 216938, 45740, 99716, 189082, + 53740, 78245, 25609, 24311, 176777, 47340, 308354, 40669, 66085, 14102, 125339, 9225, 128709, 97207, 1271, 200933, + 78439, 113451, 88975, 18324, 46521, 11819, 18570, 141756, 72512, 170020, 52754, 63550, 118515, 103073, 93330, 32736, + 50499, 14722, 31600, 68452, 398867, 29316, 172786, 18417, 104924, 2606, 5670, 84818, 16288, 67106, 59580, 82929, + 607401, 291, 85829, 359, 15897, 35830, 50696, 65630, 52672, 22115, 356968, 29895, 40837, 231192, 34024, 38957, + 26722, 406, 23335, 124952, 72068, 68804, 13268, 147101, 164740, 276569, 162596, 66943, 11569, 26654, 66358, 4777, + 23229, 102127, 5848, 978, 2921, 59666, 5371, 28212, 90108, 42938, 39320, 2499, 4271, 108792, 33510, 125072, + 71653, 65239, 38250, 66357, 38577, 13964, 86251, 35708, 50755, 36010, 29448, 12209, 3844, 38222, 206337, 100876, + 67827, 137088, 14167, 252225, 84163, 195270, 1306, 5703, 54198, 779, 46802, 22028, 51124, 86759, 70560, 113164, + 35685, 162145, 45471, 34561, 422, 2611, 6464, 47486, 19223, 38246, 9191, 18331, 89942, 243642, 212364, 15893, + 17518, 22617, 6409, 30046, 126182, 59716, 36560, 104428, 18846, 26592, 19458, 50793, 147333, 30826, 1388, 27647, + 10922, 14495, 33545, 19269, 135828, 39727, 41601, 46931, 233379, 49169, 131130, 182112, 16276, 82381, 118209, 142445, + 128310, 19672, 28740, 82907, 33436, 3118, 102206, 28723, 24819, 41937, 38854, 5157, 3881, 111491, 1142, 9776, + 421673, 152241, 29309, 14961, 87854, 6054, 15424, 3796, 82656, 54996, 2108, 55367, 239450, 154525, 9643, 118103, + 106041, 64601, 68549, 48707, 30266, 25772, 18740, 9462, 229669, 91798, 112152, 191327, 14493, 72828, 8175, 66636, + 236474, 25817, 87351, 129027, 76653, 20422, 22983, 71240, 27846, 44661, 12399, 46158, 77704, 53101, 35032, 11072, + 17300, 109294, 33638, 24408, 1895, 11241, 760, 17584, 82479, 125877, 63150, 141075, 34259, 23274, 81698, 15732, + 43577, 48340, 91584, 14688, 16379, 24481, 150280, 96420, 262050, 48635, 43727, 61819, 56268, 72003, 88178, 17281, + 79912, 13218, 122519, 125295, 166396, 11811, 2171, 118930, 67746, 17636, 178278, 174656, 95661, 173039, 83845, 79689, + 17473, 98555, 127696, 203415, 54730, 22925, 232239, 9309, 12136, 175026, 20740, 180188, 10747, 39816, 314017, 266131, + 10040, 175732, 112550, 220651, 31974, 37393, 888, 23008, 86799, 4303, 64905, 148467, 75337, 251, 3284, 370102, + 50264, 9835, 5438, 23655, 4481, 29851, 329, 12855, 7162, 64931, 78141, 12804, 42372, 296771, 83547, 18624, + 34874, 86271, 3360, 48665, 77735, 88767, 11463, 63527, 28889, 22258, 29140, 194315, 113924, 25499, 6406, 31334, + 1845, 4802, 49184, 43455, 35469, 127594, 92970, 61038, 115005, 38840, 87761, 106838, 8811, 20572, 55637, 11162, + 96721, 132425, 108925, 2948, 125457, 36356, 3502, 75270, 27622, 127192, 2561, 123095, 49394, 61155, 16897, 110064, + 9699, 89448, 53356, 19628, 220310, 21622, 83036, 9885, 112214, 6087, 26713, 17901, 161912, 91492, 3440, 68594, + 9266, 92238, 8087, 6866, 150194, 72175, 80701, 13459, 31836, 43243, 239700, 95846, 44749, 50647, 21945, 230538, + 120612, 132371, 244604, 5193, 105637, 34661, 41341, 68775, 85393, 1874, 8771, 33718, 49672, 77403, 595452, 99507, + 6490, 58895, 128742, 7704, 39239, 73217, 43816, 62824, 37804, 199976, 22361, 80005, 87514, 94832, 14089, 4574, + 139975, 59142, 75523, 100268, 43906, 53442, 15152, 2547, 186002, 17011, 19513, 204282, 3343, 60568, 128318, 119250, + 4298, 51871, 41336, 71759, 21921, 45074, 98169, 145889, 99427, 11350, 1237, 5520, 28799, 7803, 53702, 21026, + 136352, 38293, 128690, 12158, 90132, 44600, 10184, 26957, 39459, 126025, 78904, 82999, 59373, 39301, 150198, 120529, + 153042, 20177, 50089, 14764, 271571, 30530, 123161, 38975, 101562, 22941, 5648, 124654, 109243, 69817, 71675, 49162, + 106884, 21241, 107795, 30258, 16572, 188262, 141456, 7688, 60718, 8271, 11044, 32440, 104608, 103419, 236109, 93156, + 43293, 128929, 42107, 67180, 25201, 115254, 185488, 130954, 72813, 167547, 20537, 39969, 38432, 22582, 184022, 1139, + 27199, 5655, 17767, 97412, 122606, 209377, 27070, 35871, 326617, 188954, 42680, 73512, 80911, 22629, 3011, 95021, + 315242, 157737, 383, 41821, 41808, 19335, 27950, 15674, 25677, 110950, 35375, 76835, 59108, 57370, 35262, 16569, + 160415, 37706, 78086, 32041, 49691, 137143, 9782, 172080, 50148, 77917, 6323, 10110, 69172, 17711, 21795, 59511, + 76184, 135114, 31046, 132319, 59105, 157578, 20549, 80778, 57649, 158421, 65143, 4575, 72235, 21899, 10797, 92745, + 34035, 106079, 80159, 4508, 78304, 25350, 75457, 46458, 32937, 25623, 47, 8531, 104751, 84953, 8138, 36508, + 187199, 66310, 115274, 13253, 32461, 38536, 1916, 42007, 187160, 35055, 26325, 84394, 35963, 94216, 45590, 97782 + ]; + + /** + * Cannot generate random KadIds longer than this + 1 + */ + const MAX_COMMON_PREFIX_LENGTH = 15; + /** + * A wrapper around `k-bucket`, to provide easy store and + * retrieval for peers. + */ + class RoutingTableRefresh { + log; + peerRouting; + routingTable; + refreshInterval; + refreshQueryTimeout; + commonPrefixLengthRefreshedAt; + refreshTimeoutId; + constructor(components, init) { + const { peerRouting, routingTable, refreshInterval, refreshQueryTimeout, logPrefix } = init; + this.log = components.logger.forComponent(`${logPrefix}:routing-table:refresh`); + this.peerRouting = peerRouting; + this.routingTable = routingTable; + this.refreshInterval = refreshInterval ?? TABLE_REFRESH_INTERVAL; + this.refreshQueryTimeout = refreshQueryTimeout ?? TABLE_REFRESH_QUERY_TIMEOUT; + this.commonPrefixLengthRefreshedAt = []; + this.refreshTable = this.refreshTable.bind(this); + } + async afterStart() { + this.log(`refreshing routing table every ${this.refreshInterval}ms`); + this.refreshTable(true); + } + async stop() { + if (this.refreshTimeoutId != null) { + clearTimeout(this.refreshTimeoutId); + } + } + /** + * To speed lookups, we seed the table with random PeerIds. This means + * when we are asked to locate a peer on the network, we can find a KadId + * that is close to the requested peer ID and query that, then network + * peers will tell us who they know who is close to the fake ID + */ + refreshTable(force = false) { + this.log('refreshing routing table'); + const prefixLength = this._maxCommonPrefix(); + const refreshCpls = this._getTrackedCommonPrefixLengthsForRefresh(prefixLength); + this.log(`max common prefix length ${prefixLength}`); + this.log(`tracked CPLs [ ${refreshCpls.map(date => date.toISOString()).join(', ')} ]`); + /** + * If we see a gap at a common prefix length in the Routing table, we ONLY refresh up until + * the maximum cpl we have in the Routing Table OR (2 * (Cpl+ 1) with the gap), whichever + * is smaller. + * + * This is to prevent refreshes for Cpls that have no peers in the network but happen to be + * before a very high max Cpl for which we do have peers in the network. + * + * The number of 2 * (Cpl + 1) can be proved and a proof would have been written here if + * the programmer had paid more attention in the Math classes at university. + * + * So, please be patient and a doc explaining it will be published soon. + * + * https://github.com/libp2p/go-libp2p-kad-dht/commit/2851c88acb0a3f86bcfe3cfd0f4604a03db801d8#diff-ad45f4ba97ffbc4083c2eb87a4420c1157057b233f048030d67c6b551855ccf6R219 + */ + Promise.all(refreshCpls.map(async (lastRefresh, index) => { + try { + await this._refreshCommonPrefixLength(index, lastRefresh, force); + if (this._numPeersForCpl(prefixLength) === 0) { + const lastCpl = Math.min(2 * (index + 1), refreshCpls.length - 1); + for (let n = index + 1; n < lastCpl + 1; n++) { + try { + await this._refreshCommonPrefixLength(n, lastRefresh, force); + } + catch (err) { + this.log.error(err); + } + } + } + } + catch (err) { + this.log.error(err); + } + })).catch(err => { + this.log.error(err); + }).then(() => { + this.refreshTimeoutId = setTimeout(this.refreshTable, this.refreshInterval); + if (this.refreshTimeoutId.unref != null) { + this.refreshTimeoutId.unref(); + } + }).catch(err => { + this.log.error(err); + }); + } + async _refreshCommonPrefixLength(cpl, lastRefresh, force) { + if (!force && lastRefresh.getTime() > (Date.now() - this.refreshInterval)) { + this.log('not running refresh for cpl %s as time since last refresh not above interval', cpl); + return; + } + // gen a key for the query to refresh the cpl + const peerId = await this._generateRandomPeerId(cpl); + this.log('starting refreshing cpl %s with key %p (routing table size was %s)', cpl, peerId, this.routingTable.size); + const signal = AbortSignal.timeout(this.refreshQueryTimeout); + setMaxListeners(Infinity, signal); + const peers = await length(this.peerRouting.getClosestPeers(peerId.toBytes(), { + signal + })); + this.log(`found ${peers} peers that were close to imaginary peer %p`, peerId); + this.log('finished refreshing cpl %s with key %p (routing table size is now %s)', cpl, peerId, this.routingTable.size); + } + _getTrackedCommonPrefixLengthsForRefresh(maxCommonPrefix) { + if (maxCommonPrefix > MAX_COMMON_PREFIX_LENGTH) { + maxCommonPrefix = MAX_COMMON_PREFIX_LENGTH; + } + const dates = []; + for (let i = 0; i <= maxCommonPrefix; i++) { + // defaults to the zero value if we haven't refreshed it yet. + dates[i] = this.commonPrefixLengthRefreshedAt[i] ?? new Date(); + } + return dates; + } + async _generateRandomPeerId(targetCommonPrefixLength) { + if (this.routingTable.kb == null) { + throw new Error('Routing table not started'); + } + const randomData = randomBytes(2); + const randomUint16 = (randomData[1] << 8) + randomData[0]; + const key = await this._makePeerId(this.routingTable.kb.localPeer.kadId, randomUint16, targetCommonPrefixLength); + return peerIdFromBytes(key); + } + async _makePeerId(localKadId, randomPrefix, targetCommonPrefixLength) { + if (targetCommonPrefixLength > MAX_COMMON_PREFIX_LENGTH) { + throw new Error(`Cannot generate peer ID for common prefix length greater than ${MAX_COMMON_PREFIX_LENGTH}`); + } + const view = new DataView(localKadId.buffer, localKadId.byteOffset, localKadId.byteLength); + const localPrefix = view.getUint16(0, false); + // For host with ID `L`, an ID `K` belongs to a bucket with ID `B` ONLY IF CommonPrefixLen(L,K) is EXACTLY B. + // Hence, to achieve a targetPrefix `T`, we must toggle the (T+1)th bit in L & then copy (T+1) bits from L + // to our randomly generated prefix. + const toggledLocalPrefix = localPrefix ^ (0x8000 >> targetCommonPrefixLength); + // Combine the toggled local prefix and the random bits at the correct offset + // such that ONLY the first `targetCommonPrefixLength` bits match the local ID. + const mask = 65535 << (16 - (targetCommonPrefixLength + 1)); + const targetPrefix = (toggledLocalPrefix & mask) | (randomPrefix & ~mask); + // Convert to a known peer ID. + const keyPrefix = GENERATED_PREFIXES[targetPrefix]; + const keyBuffer = new ArrayBuffer(34); + const keyView = new DataView(keyBuffer, 0, keyBuffer.byteLength); + keyView.setUint8(0, sha256$1.code); + keyView.setUint8(1, 32); + keyView.setUint32(2, keyPrefix, false); + return new Uint8Array(keyView.buffer, keyView.byteOffset, keyView.byteLength); + } + /** + * returns the maximum common prefix length between any peer in the table + * and the current peer + */ + _maxCommonPrefix() { + // xor our KadId with every KadId in the k-bucket tree, + // return the longest id prefix that is the same + let prefixLength = 0; + for (const length of this._prefixLengths()) { + if (length > prefixLength) { + prefixLength = length; + } + } + return prefixLength; + } + /** + * Returns the number of peers in the table with a given prefix length + */ + _numPeersForCpl(prefixLength) { + let count = 0; + for (const length of this._prefixLengths()) { + if (length === prefixLength) { + count++; + } + } + return count; + } + /** + * Yields the common prefix length of every peer in the table + */ + *_prefixLengths() { + if (this.routingTable.kb == null) { + return; + } + for (const { kadId } of this.routingTable.kb.toIterable()) { + const distance = xor(this.routingTable.kb.localPeer.kadId, kadId); + let leadingZeros = 0; + for (const byte of distance) { + if (byte === 0) { + leadingZeros++; + } + else { + break; + } + } + yield leadingZeros; + } + } + } + + class AddProviderHandler { + providers; + log; + constructor(components, init) { + this.log = components.logger.forComponent(`${init.logPrefix}:rpc:handlers:add-provider`); + this.providers = init.providers; + } + async handle(peerId, msg) { + this.log('start'); + if (msg.key == null || msg.key.length === 0) { + throw new CodeError$2('Missing key', 'ERR_MISSING_KEY'); + } + let cid; + try { + // this is actually just the multihash, not the whole CID + cid = CID.decode(msg.key); + } + catch (err) { + throw new CodeError$2('Invalid CID', 'ERR_INVALID_CID'); + } + if (msg.providers == null || msg.providers.length === 0) { + this.log.error('no providers found in message'); + } + await Promise.all(msg.providers.map(async (pi) => { + // Ignore providers not from the originator + if (!peerId.equals(pi.id)) { + this.log('invalid provider peer %p from %p', pi.id, peerId); + return; + } + if (pi.multiaddrs.length < 1) { + this.log('no valid addresses for provider %p. Ignore', peerId); + return; + } + this.log('received provider %p for %s (addrs %s)', peerId, cid, pi.multiaddrs.map((m) => multiaddr(m).toString())); + await this.providers.addProvider(cid, peerIdFromBytes(pi.id)); + })); + return undefined; + } + } + + class FindNodeHandler { + peerRouting; + peerInfoMapper; + peerId; + addressManager; + log; + constructor(components, init) { + const { peerRouting, logPrefix } = init; + this.log = components.logger.forComponent(`${logPrefix}:rpc:handlers:find-node`); + this.peerId = components.peerId; + this.addressManager = components.addressManager; + this.peerRouting = peerRouting; + this.peerInfoMapper = init.peerInfoMapper; + } + /** + * Process `FindNode` DHT messages + */ + async handle(peerId, msg) { + this.log('incoming request from %p for peers closer to %b', peerId, msg.key); + if (msg.key == null) { + throw new CodeError$2('Invalid FIND_NODE message received - key was missing', 'ERR_INVALID_MESSAGE'); + } + const closer = await this.peerRouting.getCloserPeersOffline(msg.key, peerId); + if (equals(this.peerId.toBytes(), msg.key)) { + closer.push({ + id: this.peerId, + multiaddrs: this.addressManager.getAddresses().map(ma => ma.decapsulateCode(getProtocol('p2p').code)) + }); + } + const response = { + type: MessageType.FIND_NODE, + clusterLevel: msg.clusterLevel, + closer: closer + .map(this.peerInfoMapper) + .filter(({ multiaddrs }) => multiaddrs.length) + .map(peerInfo => ({ + id: peerInfo.id.toBytes(), + multiaddrs: peerInfo.multiaddrs.map(ma => ma.bytes) + })), + providers: [] + }; + if (response.closer.length === 0) { + this.log('could not find any peers closer to %b than %p', msg.key, peerId); + } + return response; + } + } + + class GetProvidersHandler { + peerRouting; + providers; + peerStore; + peerInfoMapper; + log; + constructor(components, init) { + const { peerRouting, providers, logPrefix } = init; + this.log = components.logger.forComponent(`${logPrefix}:rpc:handlers:get-providers`); + this.peerStore = components.peerStore; + this.peerRouting = peerRouting; + this.providers = providers; + this.peerInfoMapper = init.peerInfoMapper; + } + async handle(peerId, msg) { + if (msg.key == null) { + throw new CodeError$2('Invalid GET_PROVIDERS message received - key was missing', 'ERR_INVALID_MESSAGE'); + } + let cid; + try { + cid = CID.decode(msg.key); + } + catch (err) { + throw new CodeError$2('Invalid CID', 'ERR_INVALID_CID'); + } + this.log('%p asking for providers for %s', peerId, cid); + const [peers, closer] = await Promise.all([ + this.providers.getProviders(cid), + this.peerRouting.getCloserPeersOffline(msg.key, peerId) + ]); + const providerPeers = await this._getPeers(peers); + const closerPeers = await this._getPeers(closer.map(({ id }) => id)); + const response = { + type: MessageType.GET_PROVIDERS, + key: msg.key, + clusterLevel: msg.clusterLevel, + closer: closerPeers + .map(this.peerInfoMapper) + .filter(({ multiaddrs }) => multiaddrs.length) + .map(peerInfo => ({ + id: peerInfo.id.toBytes(), + multiaddrs: peerInfo.multiaddrs.map(ma => ma.bytes) + })), + providers: providerPeers + .map(this.peerInfoMapper) + .filter(({ multiaddrs }) => multiaddrs.length) + .map(peerInfo => ({ + id: peerInfo.id.toBytes(), + multiaddrs: peerInfo.multiaddrs.map(ma => ma.bytes) + })) + }; + this.log('got %s providers %s closerPeers', response.providers.length, response.closer.length); + return response; + } + async _getAddresses(peerId) { + return []; + } + async _getPeers(peerIds) { + const output = []; + for (const peerId of peerIds) { + try { + const peer = await this.peerStore.get(peerId); + const peerAfterFilter = this.peerInfoMapper({ + id: peerId, + multiaddrs: peer.addresses.map(({ multiaddr }) => multiaddr) + }); + if (peerAfterFilter.multiaddrs.length > 0) { + output.push(peerAfterFilter); + } + } + catch (err) { + if (err.code !== 'ERR_NOT_FOUND') { + throw err; + } + } + } + return output; + } + } + + class GetValueHandler { + peerStore; + datastore; + peerRouting; + log; + constructor(components, init) { + this.log = components.logger.forComponent(`${init.logPrefix}:rpc:handlers:get-value`); + this.peerStore = components.peerStore; + this.datastore = components.datastore; + this.peerRouting = init.peerRouting; + } + async handle(peerId, msg) { + const key = msg.key; + this.log('%p asked for key %b', peerId, key); + if (key == null || key.length === 0) { + throw new CodeError$2('Invalid key', 'ERR_INVALID_KEY'); + } + const response = { + type: MessageType.GET_VALUE, + key, + clusterLevel: msg.clusterLevel, + closer: [], + providers: [] + }; + if (isPublicKeyKey(key)) { + this.log('is public key'); + const idFromKey = fromPublicKeyKey(key); + let pubKey; + try { + const peer = await this.peerStore.get(idFromKey); + if (peer.id.publicKey == null) { + throw new CodeError$2('No public key found in key book', 'ERR_NOT_FOUND'); + } + pubKey = peer.id.publicKey; + } + catch (err) { + if (err.code !== 'ERR_NOT_FOUND') { + throw err; + } + } + if (pubKey != null) { + this.log('returning found public key'); + response.record = new Libp2pRecord(key, pubKey, new Date()).serialize(); + return response; + } + } + const [record, closer] = await Promise.all([ + this._checkLocalDatastore(key), + this.peerRouting.getCloserPeersOffline(key, peerId) + ]); + if (record != null) { + this.log('had record for %b in local datastore', key); + response.record = record.serialize(); + } + if (closer.length > 0) { + this.log('had %s closer peers in routing table', closer.length); + response.closer = closer.map(peerInfo => ({ + id: peerInfo.id.toBytes(), + multiaddrs: peerInfo.multiaddrs.map(ma => ma.bytes) + })); + } + return response; + } + /** + * Try to fetch a given record by from the local datastore. + * Returns the record if it is still valid, meaning + * - it was either authored by this node, or + * - it was received less than `MAX_RECORD_AGE` ago. + */ + async _checkLocalDatastore(key) { + this.log('checkLocalDatastore looking for %b', key); + const dsKey = bufferToRecordKey(key); + // Fetch value from ds + let rawRecord; + try { + rawRecord = await this.datastore.get(dsKey); + } + catch (err) { + if (err.code === 'ERR_NOT_FOUND') { + return undefined; + } + throw err; + } + // Create record from the returned bytes + const record = Libp2pRecord.deserialize(rawRecord); + if (record == null) { + throw new CodeError$2('Invalid record', 'ERR_INVALID_RECORD'); + } + // Check validity: compare time received with max record age + if (record.timeReceived == null || + Date.now() - record.timeReceived.getTime() > MAX_RECORD_AGE) { + // If record is bad delete it and return + await this.datastore.delete(dsKey); + return undefined; + } + // Record is valid + return record; + } + } + + class PingHandler { + log; + constructor(components, init) { + this.log = components.logger.forComponent(`${init.logPrefix}:rpc:handlers:ping`); + } + async handle(peerId, msg) { + this.log('ping from %p', peerId); + return msg; + } + } + + class PutValueHandler { + components; + validators; + log; + constructor(components, init) { + const { validators } = init; + this.components = components; + this.log = components.logger.forComponent(`${init.logPrefix}:rpc:handlers:put-value`); + this.validators = validators; + } + async handle(peerId, msg) { + const key = msg.key; + this.log('%p asked us to store value for key %b', peerId, key); + if (msg.record == null) { + const errMsg = `Empty record from: ${peerId.toString()}`; + this.log.error(errMsg); + throw new CodeError$2(errMsg, 'ERR_EMPTY_RECORD'); + } + try { + const deserializedRecord = Libp2pRecord.deserialize(msg.record); + await verifyRecord(this.validators, deserializedRecord); + deserializedRecord.timeReceived = new Date(); + const recordKey = bufferToRecordKey(deserializedRecord.key); + await this.components.datastore.put(recordKey, deserializedRecord.serialize().subarray()); + this.log('put record for %b into datastore under key %k', key, recordKey); + } + catch (err) { + this.log('did not put record for key %b into datastore %o', key, err); + } + return msg; + } + } + + class RPC { + handlers; + routingTable; + log; + constructor(components, init) { + const { providers, peerRouting, validators, logPrefix, peerInfoMapper } = init; + this.log = components.logger.forComponent(`${logPrefix}:rpc`); + this.routingTable = init.routingTable; + this.handlers = { + [MessageType.GET_VALUE.toString()]: new GetValueHandler(components, { peerRouting, logPrefix }), + [MessageType.PUT_VALUE.toString()]: new PutValueHandler(components, { validators, logPrefix }), + [MessageType.FIND_NODE.toString()]: new FindNodeHandler(components, { peerRouting, logPrefix, peerInfoMapper }), + [MessageType.ADD_PROVIDER.toString()]: new AddProviderHandler(components, { providers, logPrefix }), + [MessageType.GET_PROVIDERS.toString()]: new GetProvidersHandler(components, { peerRouting, providers, logPrefix, peerInfoMapper }), + [MessageType.PING.toString()]: new PingHandler(components, { logPrefix }) + }; + } + /** + * Process incoming DHT messages + */ + async handleMessage(peerId, msg) { + try { + await this.routingTable.add(peerId); + } + catch (err) { + this.log.error('Failed to update the kbucket store', err); + } + // get handler & execute it + const handler = this.handlers[msg.type]; + if (handler == null) { + this.log.error(`no handler found for message type: ${msg.type}`); + return; + } + return handler.handle(peerId, msg); + } + /** + * Handle incoming streams on the dht protocol + */ + onIncomingStream(data) { + Promise.resolve().then(async () => { + const { stream, connection } = data; + const peerId = connection.remotePeer; + try { + await this.routingTable.add(peerId); + } + catch (err) { + this.log.error(err); + } + const self = this; // eslint-disable-line @typescript-eslint/no-this-alias + await pipe(stream, (source) => decode(source), async function* (source) { + for await (const msg of source) { + // handle the message + const desMessage = Message.decode(msg); + self.log('incoming %s from %p', desMessage.type, peerId); + const res = await self.handleMessage(peerId, desMessage); + // Not all handlers will return a response + if (res != null) { + yield Message.encode(res); + } + } + }, (source) => encode(source), stream); + }) + .catch(err => { + this.log.error(err); + }); + } + } + + /** + * Receives notifications of new peers joining the network that support the DHT protocol + */ + class TopologyListener extends TypedEventEmitter { + log; + components; + protocol; + running; + registrarId; + constructor(components, init) { + super(); + const { protocol, logPrefix } = init; + this.components = components; + this.log = components.logger.forComponent(`${logPrefix}:topology-listener`); + this.running = false; + this.protocol = protocol; + } + isStarted() { + return this.running; + } + /** + * Start the network + */ + async start() { + if (this.running) { + return; + } + this.running = true; + // register protocol with topology + this.registrarId = await this.components.registrar.register(this.protocol, { + onConnect: (peerId) => { + this.log('observed peer %p with protocol %s', peerId, this.protocol); + this.dispatchEvent(new CustomEvent$1('peer', { + detail: peerId + })); + } + }); + } + /** + * Stop all network activity + */ + async stop() { + this.running = false; + // unregister protocol and handlers + if (this.registrarId != null) { + this.components.registrar.unregister(this.registrarId); + this.registrarId = undefined; + } + } + } + + /** + * Wrapper class to convert events into returned values + */ + class DHTContentRouting { + dht; + constructor(dht) { + this.dht = dht; + } + async provide(cid, options = {}) { + await drain(this.dht.provide(cid, options)); + } + async *findProviders(cid, options = {}) { + for await (const event of this.dht.findProviders(cid, options)) { + if (event.name === 'PROVIDER') { + yield* event.providers; + } + } + } + async put(key, value, options) { + await drain(this.dht.put(key, value, options)); + } + async get(key, options) { + for await (const event of this.dht.get(key, options)) { + if (event.name === 'VALUE') { + return event.value; + } + } + throw new CodeError$2('Not found', 'ERR_NOT_FOUND'); + } + } + /** + * Wrapper class to convert events into returned values + */ + class DHTPeerRouting { + dht; + constructor(dht) { + this.dht = dht; + } + async findPeer(peerId, options = {}) { + for await (const event of this.dht.findPeer(peerId, options)) { + if (event.name === 'FINAL_PEER') { + return event.peer; + } + } + throw new CodeError$2('Not found', 'ERR_NOT_FOUND'); + } + async *getClosestPeers(key, options = {}) { + for await (const event of this.dht.getClosestPeers(key, options)) { + if (event.name === 'FINAL_PEER') { + yield event.peer; + } + } + } + } + const DEFAULT_MAX_INBOUND_STREAMS = 32; + const DEFAULT_MAX_OUTBOUND_STREAMS = 64; + /** + * A DHT implementation modelled after Kademlia with S/Kademlia modifications. + * Original implementation in go: https://github.com/libp2p/go-libp2p-kad-dht. + */ + class KadDHT extends TypedEventEmitter { + protocol; + routingTable; + providers; + network; + peerRouting; + components; + log; + running; + kBucketSize; + clientMode; + validators; + selectors; + queryManager; + contentFetching; + contentRouting; + routingTableRefresh; + rpc; + topologyListener; + querySelf; + maxInboundStreams; + maxOutboundStreams; + dhtContentRouting; + dhtPeerRouting; + peerInfoMapper; + /** + * Create a new KadDHT + */ + constructor(components, init) { + super(); + const { kBucketSize, clientMode, validators: validators$1, selectors: selectors$1, querySelfInterval, protocol, logPrefix, pingTimeout, pingConcurrency, maxInboundStreams, maxOutboundStreams, providers: providersInit } = init; + const loggingPrefix = logPrefix ?? 'libp2p:kad-dht'; + this.running = false; + this.components = components; + this.log = components.logger.forComponent(loggingPrefix); + this.protocol = protocol ?? PROTOCOL; + this.kBucketSize = kBucketSize ?? 20; + this.clientMode = clientMode ?? true; + this.maxInboundStreams = maxInboundStreams ?? DEFAULT_MAX_INBOUND_STREAMS; + this.maxOutboundStreams = maxOutboundStreams ?? DEFAULT_MAX_OUTBOUND_STREAMS; + this.peerInfoMapper = init.peerInfoMapper ?? removePrivateAddressesMapper; + this.routingTable = new RoutingTable(components, { + kBucketSize, + pingTimeout, + pingConcurrency, + protocol: this.protocol, + logPrefix: loggingPrefix + }); + this.providers = new Providers(components, providersInit ?? {}); + this.validators = { + ...validators, + ...validators$1 + }; + this.selectors = { + ...selectors, + ...selectors$1 + }; + this.network = new Network(components, { + protocol: this.protocol, + logPrefix: loggingPrefix + }); + // all queries should wait for the initial query-self query to run so we have + // some peers and don't force consumers to use arbitrary timeouts + const initialQuerySelfHasRun = pDefer(); + // if the user doesn't want to wait for query peers, resolve the initial + // self-query promise immediately + if (init.allowQueryWithZeroPeers === true) { + initialQuerySelfHasRun.resolve(); + } + this.queryManager = new QueryManager(components, { + // Number of disjoint query paths to use - This is set to `kBucketSize/2` per the S/Kademlia paper + disjointPaths: Math.ceil(this.kBucketSize / 2), + logPrefix: loggingPrefix, + initialQuerySelfHasRun, + routingTable: this.routingTable + }); + // DHT components + this.peerRouting = new PeerRouting(components, { + routingTable: this.routingTable, + network: this.network, + validators: this.validators, + queryManager: this.queryManager, + logPrefix: loggingPrefix + }); + this.contentFetching = new ContentFetching(components, { + validators: this.validators, + selectors: this.selectors, + peerRouting: this.peerRouting, + queryManager: this.queryManager, + network: this.network, + logPrefix: loggingPrefix + }); + this.contentRouting = new ContentRouting(components, { + network: this.network, + peerRouting: this.peerRouting, + queryManager: this.queryManager, + routingTable: this.routingTable, + providers: this.providers, + logPrefix: loggingPrefix + }); + this.routingTableRefresh = new RoutingTableRefresh(components, { + peerRouting: this.peerRouting, + routingTable: this.routingTable, + logPrefix: loggingPrefix + }); + this.rpc = new RPC(components, { + routingTable: this.routingTable, + providers: this.providers, + peerRouting: this.peerRouting, + validators: this.validators, + logPrefix: loggingPrefix, + peerInfoMapper: this.peerInfoMapper + }); + this.topologyListener = new TopologyListener(components, { + protocol: this.protocol, + logPrefix: loggingPrefix + }); + this.querySelf = new QuerySelf(components, { + peerRouting: this.peerRouting, + interval: querySelfInterval, + initialInterval: init.initialQuerySelfInterval, + logPrefix: loggingPrefix, + initialQuerySelfHasRun, + routingTable: this.routingTable + }); + // handle peers being discovered during processing of DHT messages + this.network.addEventListener('peer', (evt) => { + const peerData = evt.detail; + this.onPeerConnect(peerData).catch(err => { + this.log.error('could not add %p to routing table', peerData.id, err); + }); + this.dispatchEvent(new CustomEvent$1('peer', { + detail: peerData + })); + }); + // handle peers being discovered via other peer discovery mechanisms + this.topologyListener.addEventListener('peer', (evt) => { + const peerId = evt.detail; + Promise.resolve().then(async () => { + const peer = await this.components.peerStore.get(peerId); + const peerData = { + id: peerId, + multiaddrs: peer.addresses.map(({ multiaddr }) => multiaddr), + protocols: peer.protocols + }; + await this.onPeerConnect(peerData); + }).catch(err => { + this.log.error('could not add %p to routing table', peerId, err); + }); + }); + this.dhtPeerRouting = new DHTPeerRouting(this); + this.dhtContentRouting = new DHTContentRouting(this); + // if client mode has not been explicitly specified, auto-switch to server + // mode when the node's peer data is updated with publicly dialable + // addresses + if (init.clientMode == null) { + components.events.addEventListener('self:peer:update', (evt) => { + this.log('received update of self-peer info'); + void Promise.resolve().then(async () => { + const hasPublicAddress = evt.detail.peer.addresses + .some(({ multiaddr }) => multiaddrIsPublic(multiaddr)); + const mode = this.getMode(); + if (hasPublicAddress && mode === 'client') { + await this.setMode('server'); + } + else if (mode === 'server' && !hasPublicAddress) { + await this.setMode('client'); + } + }) + .catch(err => { + this.log.error('error setting dht server mode', err); + }); + }); + } + } + get [contentRoutingSymbol]() { + return this.dhtContentRouting; + } + get [peerRoutingSymbol]() { + return this.dhtPeerRouting; + } + get [peerDiscoverySymbol]() { + return this; + } + async onPeerConnect(peerData) { + this.log('peer %p connected', peerData.id); + peerData = this.peerInfoMapper(peerData); + if (peerData.multiaddrs.length === 0) { + this.log('ignoring %p as there were no valid addresses in %s after filtering', peerData.id, peerData.multiaddrs.map(addr => addr.toString())); + return; + } + try { + await this.routingTable.add(peerData.id); + } + catch (err) { + this.log.error('could not add %p to routing table', peerData.id, err); + } + } + /** + * Is this DHT running. + */ + isStarted() { + return this.running; + } + /** + * If 'server' this node will respond to DHT queries, if 'client' this node will not + */ + getMode() { + return this.clientMode ? 'client' : 'server'; + } + /** + * If 'server' this node will respond to DHT queries, if 'client' this node will not + */ + async setMode(mode) { + await this.components.registrar.unhandle(this.protocol); + if (mode === 'client') { + this.log('enabling client mode'); + this.clientMode = true; + } + else { + this.log('enabling server mode'); + this.clientMode = false; + await this.components.registrar.handle(this.protocol, this.rpc.onIncomingStream.bind(this.rpc), { + maxInboundStreams: this.maxInboundStreams, + maxOutboundStreams: this.maxOutboundStreams + }); + } + } + /** + * Start listening to incoming connections. + */ + async start() { + this.running = true; + // Only respond to queries when not in client mode + await this.setMode(this.clientMode ? 'client' : 'server'); + await start(this.querySelf, this.providers, this.queryManager, this.network, this.routingTable, this.topologyListener, this.routingTableRefresh); + } + /** + * Stop accepting incoming connections and sending outgoing + * messages. + */ + async stop() { + this.running = false; + await stop(this.querySelf, this.providers, this.queryManager, this.network, this.routingTable, this.routingTableRefresh, this.topologyListener); + } + /** + * Store the given key/value pair in the DHT + */ + async *put(key, value, options = {}) { + yield* this.contentFetching.put(key, value, options); + } + /** + * Get the value that corresponds to the passed key + */ + async *get(key, options = {}) { + yield* this.contentFetching.get(key, options); + } + // ----------- Content Routing + /** + * Announce to the network that we can provide given key's value + */ + async *provide(key, options = {}) { + yield* this.contentRouting.provide(key, this.components.addressManager.getAddresses(), options); + } + /** + * Search the dht for providers of the given CID + */ + async *findProviders(key, options = {}) { + yield* this.contentRouting.findProviders(key, options); + } + // ----------- Peer Routing ----------- + /** + * Search for a peer with the given ID + */ + async *findPeer(id, options = {}) { + yield* this.peerRouting.findPeer(id, options); + } + /** + * Kademlia 'node lookup' operation + */ + async *getClosestPeers(key, options = {}) { + yield* this.peerRouting.getClosestPeers(key, options); + } + async refreshRoutingTable() { + this.routingTableRefresh.refreshTable(true); + } + } + + /** + * @packageDocumentation + * + * This module implements the [libp2p Kademlia spec](https://github.com/libp2p/specs/blob/master/kad-dht/README.md) in TypeScript. + * + * The Kademlia DHT allow for several operations such as finding peers, searching for providers of DHT records, etc. + * + * @example Using with libp2p + * + * ```TypeScript + * import { kadDHT } from '@libp2p/kad-dht' + * import { createLibp2p } from 'libp2p' + * import { peerIdFromString } from '@libp2p/peer-id' + * + * const node = await createLibp2p({ + * services: { + * dht: kadDHT({ + * // DHT options + * }) + * } + * }) + * + * const peerId = peerIdFromString('QmFoo') + * const peerInfo = await node.peerRouting.findPeer(peerId) + * + * console.info(peerInfo) // peer id, multiaddrs + * ``` + * + * @example Connecting to the IPFS Amino DHT + * + * The [Amino DHT](https://blog.ipfs.tech/2023-09-amino-refactoring/) is a public-good DHT used by IPFS to fetch content, find peers, etc. + * + * If you are trying to access content on the public internet, this is the implementation you want. + * + * ```TypeScript + * import { kadDHT, removePrivateAddressesMapper } from '@libp2p/kad-dht' + * import { createLibp2p } from 'libp2p' + * import { peerIdFromString } from '@libp2p/peer-id' + * + * const node = await createLibp2p({ + * services: { + * aminoDHT: kadDHT({ + * protocol: '/ipfs/kad/1.0.0', + * peerInfoMapper: removePrivateAddressesMapper + * }) + * } + * }) + * + * const peerId = peerIdFromString('QmFoo') + * const peerInfo = await node.peerRouting.findPeer(peerId) + * + * console.info(peerInfo) // peer id, multiaddrs + * ``` + * + * @example Connecting to a LAN-only DHT + * + * This DHT only works with privately dialable peers. + * + * This is for use when peers are on the local area network. + * + * ```TypeScript + * import { kadDHT, removePublicAddressesMapper } from '@libp2p/kad-dht' + * import { createLibp2p } from 'libp2p' + * import { peerIdFromString } from '@libp2p/peer-id' + * + * const node = await createLibp2p({ + * services: { + * lanDHT: kadDHT({ + * protocol: '/ipfs/lan/kad/1.0.0', + * peerInfoMapper: removePublicAddressesMapper, + * clientMode: false + * }) + * } + * }) + * + * const peerId = peerIdFromString('QmFoo') + * const peerInfo = await node.peerRouting.findPeer(peerId) + * + * console.info(peerInfo) // peer id, multiaddrs + * ``` + */ + /** + * The types of events emitted during DHT queries + */ + var EventTypes; + (function (EventTypes) { + EventTypes[EventTypes["SEND_QUERY"] = 0] = "SEND_QUERY"; + EventTypes[EventTypes["PEER_RESPONSE"] = 1] = "PEER_RESPONSE"; + EventTypes[EventTypes["FINAL_PEER"] = 2] = "FINAL_PEER"; + EventTypes[EventTypes["QUERY_ERROR"] = 3] = "QUERY_ERROR"; + EventTypes[EventTypes["PROVIDER"] = 4] = "PROVIDER"; + EventTypes[EventTypes["VALUE"] = 5] = "VALUE"; + EventTypes[EventTypes["ADD_PEER"] = 6] = "ADD_PEER"; + EventTypes[EventTypes["DIAL_PEER"] = 7] = "DIAL_PEER"; + })(EventTypes || (EventTypes = {})); + /** + * Creates a custom DHT implementation, please ensure you pass a `protocol` + * string as an option. + */ + function kadDHT(init) { + return (components) => new KadDHT(components, init); + } + + /** + * @packageDocumentation + * + * Stores metrics in memory and periodically invokes a configured callback to + * receive them. + * + * @example + * + * ```ts + * import { createLibp2p } from 'libp2p' + * import { simpleMetrics } from '@libp2p/simple-metrics' + * + * const node = await createLibp2p({ + * // ... other options + * metrics: simpleMetrics({ + * onMetrics: (metrics) => { + * // do something with metrics + * } + * }), + * intervalMs: 1000 // default 1s + * }) + * + * ``` + */ + const log = logger('libp2p:simple-metrics'); + class DefaultMetric { + value = 0; + update(value) { + this.value = value; + } + increment(value = 1) { + this.value += value; + } + decrement(value = 1) { + this.value -= value; + } + reset() { + this.value = 0; + } + timer() { + const start = Date.now(); + return () => { + this.value = Date.now() - start; + }; + } + } + class DefaultGroupMetric { + values = {}; + update(values) { + Object.entries(values).forEach(([key, value]) => { + this.values[key] = value; + }); + } + increment(values) { + Object.entries(values).forEach(([key, value]) => { + this.values[key] = this.values[key] ?? 0; + const inc = typeof value === 'number' ? value : 1; + this.values[key] += Number(inc); + }); + } + decrement(values) { + Object.entries(values).forEach(([key, value]) => { + this.values[key] = this.values[key] ?? 0; + const dec = typeof value === 'number' ? value : 1; + this.values[key] -= Number(dec); + }); + } + reset() { + this.values = {}; + } + timer(key) { + const start = Date.now(); + return () => { + this.values[key] = Date.now() - start; + }; + } + } + class SimpleMetrics { + metrics = new Map(); + transferStats; + started; + interval; + intervalMs; + onMetrics; + constructor(components, init) { + this.started = false; + this._emitMetrics = this._emitMetrics.bind(this); + this.intervalMs = init.intervalMs ?? 1000; + this.onMetrics = init.onMetrics; + // holds global and per-protocol sent/received stats + this.transferStats = new Map(); + } + isStarted() { + return this.started; + } + start() { + this.started = true; + this.interval = setInterval(this._emitMetrics, this.intervalMs); + } + stop() { + this.started = false; + clearInterval(this.interval); + } + _emitMetrics() { + void Promise.resolve().then(async () => { + const output = {}; + for (const [name, metric] of this.metrics.entries()) { + if (metric instanceof DefaultMetric) { + output[name] = metric.value; + } + else if (metric instanceof DefaultGroupMetric) { + output[name] = metric.values; + } + else { + output[name] = await metric(); + } + } + this.onMetrics(structuredClone(output)); + }) + .catch(err => { + log.error('could not invoke onMetrics callback', err); + }); + } + /** + * Increment the transfer stat for the passed key, making sure + * it exists first + */ + _incrementValue(key, value) { + const existing = this.transferStats.get(key) ?? 0; + this.transferStats.set(key, existing + value); + } + /** + * Override the sink/source of the stream to count the bytes + * in and out + */ + _track(stream, name) { + const self = this; + const sink = stream.sink; + stream.sink = async function trackedSink(source) { + await sink(forEach(source, buf => { + self._incrementValue(`${name} sent`, buf.byteLength); + })); + }; + const source = stream.source; + stream.source = forEach(source, buf => { + self._incrementValue(`${name} received`, buf.byteLength); + }); + } + trackMultiaddrConnection(maConn) { + this._track(maConn, 'global'); + } + trackProtocolStream(stream, connection) { + if (stream.protocol == null) { + // protocol not negotiated yet, should not happen as the upgrader + // calls this handler after protocol negotiation + return; + } + this._track(stream, stream.protocol); + } + registerMetric(name, opts = {}) { + if (name == null || name.trim() === '') { + throw new Error('Metric name is required'); + } + if (opts?.calculate != null) { + // calculated metric + this.metrics.set(name, opts.calculate); + return; + } + const metric = new DefaultMetric(); + this.metrics.set(name, metric); + return metric; + } + registerMetricGroup(name, opts = {}) { + if (name == null || name.trim() === '') { + throw new Error('Metric name is required'); + } + if (opts?.calculate != null) { + // calculated metric + this.metrics.set(name, opts.calculate); + return; + } + const metric = new DefaultGroupMetric(); + this.metrics.set(name, metric); + return metric; + } + registerCounter(name, opts = {}) { + if (name == null || name.trim() === '') { + throw new Error('Metric name is required'); + } + if (opts?.calculate != null) { + // calculated metric + this.metrics.set(name, opts.calculate); + return; + } + const metric = new DefaultMetric(); + this.metrics.set(name, metric); + return metric; + } + registerCounterGroup(name, opts = {}) { + if (name == null || name.trim() === '') { + throw new Error('Metric name is required'); + } + if (opts?.calculate != null) { + // calculated metric + this.metrics.set(name, opts.calculate); + return; + } + const metric = new DefaultGroupMetric(); + this.metrics.set(name, metric); + return metric; + } + } + function simpleMetrics(init) { + return (components) => new SimpleMetrics(components, init); + } + + class webpeerjs{ + + //libp2p instance + #libp2p + + //map [id,addrs] of discovered peers (addrs is array of address) + #discoveredPeers + + //array of all webpeers id has been found + #webPeersId + + //map [id,addrs] + #webPeersAddrs + + //database of best peers has been found + #dbstore + #dbstoreData + + //map of [id,number_of_dialed] of good peers on #connectionTracker + #dialedGoodPeers + + //boolean is dial websocket + #isDialWebsocket + + //map [id,mddrs] of dialed bootstrap address + #dialedKnownBootstrap + + //array of dialed discovered peers id + //#dialedDiscoveredPeers + + //object from joinRoom() + #rooms + + //map [id,addrs] of webpeers currently connected (addrs is array of address) + #connectedPeers + + //array of we peers id proxy of #connectedPeers + #connectedPeersArr + + //map [id,number_of_dialed] of #connectionTracker object store + #connectionTrackerStore + + //map [id,addr] of all peers connections (addr is string of address) + #connections + + //track disconnect event + #trackDisconnect + + //list of dial multiaddress queue + #dialQueue + + //is dial enabled + #isDialEnabled + + id + status + IPFS + address + peers + + constructor(libp2p,dbstore,onMetrics){ + + this.#libp2p = libp2p; + this.#dbstore = dbstore; + this.#dbstoreData = new Map(); + this.#discoveredPeers = new Map(); + this.#webPeersId = []; + this.#webPeersAddrs = new Map(); + this.#dialedGoodPeers = new Map(); + this.#isDialWebsocket = false; + this.#dialedKnownBootstrap = new Map(); + //this.#dialedDiscoveredPeers = [] + this.address = []; + this.#rooms = {}; + this.#connectedPeers = new Map(); + this.#connectedPeersArr = []; + this.#connectionTrackerStore = new Map(); + this.#connections = new Map(); + this.#trackDisconnect = new Map(); + this.#dialQueue = []; + this.#isDialEnabled = true; + + this.peers = (function(f) { + return f + })(this.#connectedPeersArr); + + this.status = (function(libp2p) { + return libp2p.status + })(this.#libp2p); + + this.IPFS = (function(libp2p,discoveredPeers) { + const obj = {libp2p,discoveredPeers}; + return obj + })(this.#libp2p,this.#discoveredPeers); + + this.id = this.#libp2p.peerId.toString(); + + + //listen to peer connect event + this.#libp2p.addEventListener("peer:connect", (evt) => { + + //console.log(`Connected to ${connection.toString()}`); + + const connection = evt.detail; + const id = evt.detail.toString(); + + const connections = this.#libp2p.getConnections().map((con)=>{return {id:con.remotePeer.toString(),addr:con.remoteAddr.toString()}}); + const connect = connections.find((con)=>con.id == id); + const addr = connect.addr; + this.#connections.set(id,addr); + + //required by joinRoom version 1 to announce via universal connectivity + if(connection.toString() === CONFIG_KNOWN_BOOTSTRAP_PEER_IDS[0]){ + setTimeout(()=>{ + this.#announce(); + },1000); + } + + }); + + + //listen message from subscribed pupsub topic + this.#libp2p.services.pubsub.addEventListener('message', event => { + + //console.log('on:'+event.detail.topic,event.detail.data) + //console.log('from '+event.detail.from.toString(),event) + + if (event.detail.type !== 'signed') { + return + } + { + const topic = event.detail.topic; + const senderPeerId = event.detail.from.toString(); + if(CONFIG_PUBSUB_PEER_DISCOVERY.includes(topic)){ + try{ + + //if it is webpeer + if(this.#webPeersId.includes(senderPeerId)){ + + if(this.#connectedPeers.has(senderPeerId)){ + //reset this last seen + const address = this.#connectedPeers.get(senderPeerId).addrs; + const now = new Date().getTime(); + const metadata = {addrs:address,last:now}; + this.#connectedPeers.set(senderPeerId,metadata); + } + else { + //add to connected webpeers + this.#onConnectFn(senderPeerId); + const address = this.#webPeersAddrs.get(senderPeerId); + const now = new Date().getTime(); + const metadata = {addrs:address,last:now}; + this.#connectedPeers.set(senderPeerId,metadata); + } + + //dial if not connected + if(!this.#isConnected(senderPeerId)){ + if(this.#connections.has(senderPeerId)){ + let mddrs = []; + const addr = this.#connections.get(senderPeerId); + const mddr = multiaddr(addr); + mddrs.push(mddr); + this.#dialMultiaddress(mddrs); + } + else if(this.#discoveredPeers.has(senderPeerId)){ + const addrs = this.#discoveredPeers.get(senderPeerId); + let mddrs = []; + for(const addr of addrs){ + const mddr = multiaddr(addr); + mddrs.push(mddr); + } + this.#dialMultiaddress(mddrs); + } + else { + const addrs = this.#connectedPeers.get(senderPeerId).addrs; + let mddrs = []; + for(const addr of addrs){ + const mddr = multiaddr(addr); + mddrs.push(mddr); + } + this.#dialMultiaddress(mddrs); + } + } + + + } + + //parse the message over pupsub peer discovery + const peer = Peer$3.decode(event.detail.data); + const msg = uint8ArrayToString(peer.addrs[0]); + const json = JSON.parse(msg); + const prefix = json.prefix; + const room = json.room; + const rooms = json.rooms; + const message = json.message; + const signal = json.signal; + const id = json.id; + //console.log(`from ${id}:${signal} = ${message}`) + if(id != senderPeerId)return + let address = json.address; + + //detect special webpeer identity + if(prefix === CONFIG_PREFIX){ + + //add to webpeers id + if(!this.#webPeersId.includes(id))this.#webPeersId.push(id); + + //add to connected webpeers + if(!this.#connectedPeers.has(id)){ + this.#onConnectFn(id); + address = []; + const now = new Date().getTime(); + const metadata = {addrs:address,last:now}; + this.#connectedPeers.set(id,metadata); + this.#webPeersAddrs.set(id,address); + this.#connectedPeersArr.length = 0; + for(const peer of this.#connectedPeers){ + const item = {id:peer[0],address:peer[1].addrs}; + this.#connectedPeersArr.push(item); + } + } + + + if(room){ + if(this.#rooms[room]){ + + //inbound message + if(message){ + this.#rooms[room].onMessage(message,id); + } + + //update room members + if(!this.#rooms[room].members.includes(id)){ + this.#rooms[room].members.push(id); + this.#rooms[room].onMembers(this.#rooms[room].members); + } + } + } + + if(rooms){ + for(const room of Object.keys(this.#rooms)){ + //update room members + if(!this.#rooms[room].members.includes(id)){ + this.#rooms[room].members.push(id); + this.#rooms[room].onMembers(this.#rooms[room].members); + } + } + } + + if(signal){ + + //repply announce with ping + if(signal == 'announce'){ + setTimeout(()=>{this.#ping('yes');},1000); + //console.log('rooms',rooms) + } + + if(signal == 'ping'){ + //console.log('rooms',rooms) + } + + //update connected webpeers + const now = new Date().getTime(); + const metadata = {addrs:address,last:now}; + this.#connectedPeers.set(id,metadata); + this.#webPeersAddrs.set(id,address); + this.#connectedPeersArr.length = 0; + for(const peer of this.#connectedPeers){ + const item = {id:peer[0],address:peer[1].addrs}; + this.#connectedPeersArr.push(item); + } + + } + } + + }catch(err){ + //console.log('from '+event.detail.from.toString()) + console.debug(err); + } + }else { + const json = JSON.parse(topic); + const room = json.room; + const message = new TextDecoder().decode(event.detail.data); + this.#rooms[room].onMessage(message); + } + } + + }); + + + //listen to peer discovery event + this.#libp2p.addEventListener('peer:discovery', (evt) => { + + //console.log('Discovered:', evt.detail.id.toString()) + //console.log('Discovered: '+evt.detail.id.toString(), evt.detail.multiaddrs.toString()) + + //save peer discover + + const multiaddrs = evt.detail.multiaddrs; + const id = evt.detail.id; + + if(multiaddrs.length != 0){ + let addrs = []; + for(const addr of multiaddrs){ + let peeraddr; + if(multiaddrs.toString().includes(evt.detail.id.toString())){ + //console.log('Discovered:', evt.detail.multiaddrs.toString()) + //peer from pupsub peer discovery already has included self id + peeraddr = addr.toString(); + } + else { + //other need to add Id + peeraddr = addr.toString()+'/p2p/'+id; + } + addrs.push(peeraddr); + } + //save the new format multiaddrs + this.#discoveredPeers.set(id.toString(), addrs); + + //track if peer come from relay then dial it because there is a chance it is from other browser node + if(multiaddrs.toString().includes('certhash')&& multiaddrs.toString().includes('webtransport') && multiaddrs.toString().includes('p2p-circuit')){ + //console.log(addrs) + if(!this.#connections.has(id)){ + for(const addr of addrs){ + multiaddr(addr); + } + //this.#dialMultiaddress(mddrs) + } + } + } + + }); + + + //listen to peer disconnect event + this.#libp2p.addEventListener("peer:disconnect",async (evt) => { + + //const connection = evt.detail; + //console.log(`Disconnected from ${connection.toCID().toString()}`); + const id = evt.detail.string; + + //track disconnect event + if(this.#trackDisconnect.has(id)){ + let count = this.#trackDisconnect.get(id); + count++; + this.#trackDisconnect.set(id,count); + //console.log(this.#trackDisconnect) + if(count>5){ + if(this.#dbstoreData.has(id)){ + //await this.#dbstore.delete(new Key(id)) + this.#dbstoreData.delete(id); + } + + if(!this.#webPeersId.includes(id) && !this.#dialedKnownBootstrap.has(id)){ + return + } + } + } + else { + this.#trackDisconnect.set(id,0); + } + + //if this disconnected peer is web peer redial it + if(this.#webPeersId.includes(id)){ + const addr = this.#connections.get(id); + let mddrs = []; + const mddr = multiaddr(addr); + mddrs.push(mddr); + this.#dialMultiaddress(mddrs); + } + + //if this disconnected peer is known bootstrap redial it + else if(this.#dialedKnownBootstrap.has(id)){ + const addr = this.#connections.get(id); + let mddrs = []; + const addrs = multiaddr(addr); + mddrs.push(addrs); + this.#dialMultiaddress(mddrs); + } + + //redial if this disconnected peer is regular peer + else { + const addr = this.#connections.get(id); + let mddrs = []; + const addrs = multiaddr(addr); + mddrs.push(addrs); + this.#dialMultiaddress(mddrs); + } + }); + + + //listen to self peer update + this.#libp2p.addEventListener('self:peer:update', ({ detail: { peer } }) => { + //const multiaddrs = peer.addresses.map(({ multiaddr }) => multiaddr) + //console.log(`changed multiaddrs: peer ${peer.id.toString()} multiaddrs: ${multiaddrs}`) + const id = peer.id.toString(); + const mddrs = []; + peer.addresses.forEach((addr)=>{ + const maddr = addr.multiaddr.toString()+'/p2p/'+id; + if(maddr.includes('webtransport') && maddr.includes('certhash')){ + mddrs.push(maddr); + } + }); + //this.#ListenAddressChange(mddrs) + this.address = mddrs; + this.#ping(); + }); + + //dial known peers from configuration + this.#dialKnownPeers(); + + //watch connection every 30s if none dial known peers again from configuration + this.#watchConnection(); + + //if found good peers save to storage and reconnect if disconnect + this.#connectionTracker(); + + //periodically dial saved bootstrap address if disconnect + this.#dialRandomBootstrap(); + + //dial random discovered peers + //this.#dialdiscoveredpeers() + + onMetrics((data)=>{ + const signal = metrics(data); + this.#isDialEnabled = signal; + + }); + + setInterval(()=>{ + this.#dialQueueList(); + },5e3); + + setInterval(()=>{ + this.#trackLastSeen(); + },5e3); + + } + + + + + /* + PUBLIC FUNCTION + */ + + //Listen on new peer connection + #onConnectFn = () => {} + onJoin = f => (this.#onConnectFn = f) + + + //Listen on peer disconnect + #onDisconnectFn = () => {} + onLeave = f => (this.#onDisconnectFn = f) + + + + + /* + PRIVATE FUNCTION + */ + + + //check the last seen in web peer + #trackLastSeen(){ + const timeout = 25*1000; + const now = new Date().getTime(); + + //if webpeer last seen grather then timeout send onDisconnect + for(const peer of this.#connectedPeers){ + const id = peer[0]; + const last = peer[1].last; + const time = now-last; + if(time>timeout){ + this.#connectedPeers.delete(id); + this.#connectedPeersArr.length = 0; + for(const peer of this.#connectedPeers){ + const item = {id:peer[0],address:peer[1].addrs}; + this.#connectedPeersArr.push(item); + } + this.#onDisconnectFn(id); + + //remove id from room member + const rooms = Object.keys(this.#rooms); + for(const room of rooms){ + if(this.#rooms[room].members.includes(id)){ + const index = this.#rooms[room].members.indexOf(id); + this.#rooms[room].members.splice(index,1); + this.#rooms[room].onMembers(this.#rooms[room].members); + } + } + } + } + } + + //check if this id is connected + #isConnected(id){ + let peers = []; + for(const peer of this.#libp2p.getPeers()){ + peers.push(peer.toString()); + } + if(peers.includes(id)){ + return true + } + else { + return false + } + } + + + //add multiaddr address to queue list + #dialMultiaddress(mddrs){ + if(mddrs.length>0){ + + const id = mddrs[0].toString().split('/').pop(); + + const ids = this.#dialQueue.map((arr)=> arr[0].toString().split('/').pop()); + + //if peer id is already in the queque cancel queque + if(ids.includes(id)){ + return + } + + if(this.#webPeersId.includes(id) || id == CONFIG_KNOWN_BOOTSTRAP_PEER_IDS[0] ){ + this.#dialQueue.unshift(mddrs); + } + else { + this.#dialQueue.push(mddrs); + } + + } + } + + //dial multiaddr address in queue list + #dialQueueList(){ + + if(!this.#isDialEnabled)return + + const mddrsToDial = 3; + + let queue = []; + for(const item of this.#libp2p.getDialQueue()){ + const id = item.peerId.string; + queue.push(id); + } + + if (queue.length > mddrsToDial)return + + for(let i = 0; i < mddrsToDial; i++){ + const mddrs = this.#dialQueue.shift(); + if(mddrs != undefined && mddrs.length>0){ + + const id = mddrs[0].toString().split('/').pop(); + + if(this.#isConnected(id))continue + if(queue.includes(id)){continue;} + + //dial with webtransport + this.#dialWebtransport(mddrs); + + //fallback dial with websocket if enabled + if(this.#isDialWebsocket){ + this.#dialWebsocket(mddrs); + } + + } + else { + break + } + } + + } + + + //announce and ping via pupsub peer discovery + async #announce(){ + const topics = CONFIG_PUBSUB_PEER_DISCOVERY; + const data = JSON.stringify({prefix:CONFIG_PREFIX,signal:'announce',id:this.#libp2p.peerId.toString(),address:this.address,rooms:this.#rooms}); + const peer = { + publicKey: this.#libp2p.peerId.publicKey, + addrs: [uint8ArrayFromString(data)], + }; + const encodedPeer = Peer$3.encode(peer); + for(const topic of topics){ + await this.#libp2p.services.pubsub.publish(topic, encodedPeer); + } + } + async #ping(){ + const topics = CONFIG_PUBSUB_PEER_DISCOVERY; + const data = JSON.stringify({prefix:CONFIG_PREFIX,signal:'ping',id:this.#libp2p.peerId.toString(),address:this.address,rooms:this.#rooms}); + const peer = { + publicKey: this.#libp2p.peerId.publicKey, + addrs: [uint8ArrayFromString(data)], + }; + const encodedPeer = Peer$3.encode(peer); + for(const topic of topics){ + await this.#libp2p.services.pubsub.publish(topic, encodedPeer); + } + } + + + joinRoom = room => { + if (this.#rooms[room]) { + return [ + this.#rooms[room].sendMessage, + this.#rooms[room].listenMessage, + this.#rooms[room].onMembersChange + ] + + + } + + if (!room) { + throw mkErr('room is required') + } + + //join room version 1 user pupsub via pupsub peer discovery + { + + const topics = CONFIG_PUBSUB_PEER_DISCOVERY; + + this.#rooms[room] = { + onMessage : () => {}, + listenMessage : f => (this.#rooms[room] = {...this.#rooms[room], onMessage: f}), + sendMessage : async (message) => { + const data = JSON.stringify({prefix:CONFIG_PREFIX,room,message,id:this.#libp2p.peerId.toString()}); + const peer = { + publicKey: this.#libp2p.peerId.publicKey, + addrs: [uint8ArrayFromString(data)], + }; + const encodedPeer = Peer$3.encode(peer); + for(const topic of topics){ + await this.#libp2p.services.pubsub.publish(topic, encodedPeer); + } + }, + members : [this.id], + onMembers : () => {}, + onMembersChange : f => {this.#rooms[room] = {...this.#rooms[room], onMembers: f};this.#rooms[room].onMembers(this.#rooms[room].members);}, + }; + } + + return [ + this.#rooms[room].sendMessage, + this.#rooms[room].listenMessage, + this.#rooms[room].onMembersChange + ] + } + + + //dial discovered peers + /*#dialdiscoveredpeers(){ + setInterval(()=>{ + const keys = Array.from(this.#discoveredPeers.keys()) + for(const key of keys){ + if(!this.#dialedDiscoveredPeers.includes(key)){ + this.#dialedDiscoveredPeers.push(key) + const addrs = this.#discoveredPeers.get(key) + let mddrs = [] + for(const addr of addrs){ + const mddr = multiaddr(addr) + mddrs.push(mddr) + } + this.#dialMultiaddress(mddrs) + break + } + } + },30*1000) + }*/ + + + //dial random known bootstrap periodically + #dialRandomBootstrap(){ + setInterval(()=>{ + //const keys = Array.from(this.#dialedKnownBootstrap.keys()) + const keys = CONFIG_KNOWN_BOOTSTRAP_PEER_IDS; + const randomKey = Math.floor(Math.random() * keys.length); + let ids = []; + ids.push(keys[randomKey]); + + //universal connectivity id for webpeer discovery and joinRoom version 1 to work + ids.push(CONFIG_KNOWN_BOOTSTRAP_PEER_IDS[0]); + + for(const id of ids){ + if(id == undefined)continue + //const addrs = this.#dialedKnownBootstrap.get(id) + + if(!this.#isConnected(id)){ + if(this.#connections.has(id)) + { + let mddrs = []; + const addr = this.#connections.get(id); + const mddr = multiaddr(addr); + mddrs.push(mddr); + this.#dialMultiaddress(mddrs); + } + else if (this.#dialedKnownBootstrap.has(id)){ + let mddrs = []; + const addrs = this.#dialedKnownBootstrap.get(id); + for(const addr of addrs){ + const mddr = multiaddr(addr); + mddrs.push(mddr); + } + this.#dialMultiaddress(mddrs); + } + else { + const bootstrap = CONFIG_KNOWN_BOOTSTRAP_PEERS_ADDRS; + const index = bootstrap.findIndex((peer)=>peer.Peers[0].ID == id); + const addrs = bootstrap[index].Peers[0].Addrs; + let mddrs = []; + for(const addr of addrs){ + const peeraddr = addr+'/p2p/'+id; + const mddr = multiaddr(peeraddr); + mddrs.push(mddr); + } + this.#dialMultiaddress(mddrs); + } + } + } + },45*1000); + } + + + //track for good connection + async #connectionTracker(){ + + for await (const { key, value } of this.#dbstore.query({})) { + const id = key.toString().split('/')[1]; + const addr = new TextDecoder().decode(value); + this.#dbstoreData.set(id,addr); + } + + setInterval(async ()=>{ + + //save peer address if connection is good + const connections = this.#libp2p.getConnections(); + for(const connect of connections){ + const peer = connect.remotePeer; + const remote = connect.remoteAddr; + const upgraded = connect.timeline.upgraded; + const bestlimit = 5*60*1000; + const now = new Date().getTime(); + const besttime = now-upgraded; + if(besttime>bestlimit){ + const addr = remote.toString(); + const id = peer.toString(); + if(!this.#webPeersId.includes(id) && !CONFIG_KNOWN_BOOTSTRAP_PEER_IDS.includes(id) && !this.#dbstoreData.get(id) && !addr.includes('p2p-circuit')){ + //await this.#dbstore.delete(new Key(id)) + await this.#dbstore.put(new Key(id), new TextEncoder().encode(addr)); + this.#dbstoreData.set(id,addr); + } + } + const goodlimit = 60*1000; + const goodtime = now-upgraded; + if(goodtime>goodlimit){ + const id = peer.toString(); + if(!this.#dialedGoodPeers.has(id))this.#dialedGoodPeers.set(id,0); + } + + } + + + let peers = []; + for(const peer of this.#libp2p.getPeers()){ + peers.push(peer.toString()); + } + + + //connect to saved best peer address + //working great + for(const peer of this.#dbstoreData){ + const id = peer[0]; + const addr = peer[1]; + if(peers.includes(id)){ + this.#connectionTrackerStore.set(id,0); + continue + }else { + if(this.#connectionTrackerStore.has(id)){ + let current = this.#connectionTrackerStore.get(id); + if(current>10)continue + current++; + this.#connectionTrackerStore.set(id,current); + } + else { + this.#connectionTrackerStore.set(id,0); + } + let mddrs = []; + const mddr = multiaddr(addr); + mddrs.push(mddr); + this.#dialMultiaddress(mddrs); + } + } + + //connect to good peer address if it is disconnected + const goods = Array.from(this.#dialedGoodPeers.keys()); + for(const id of goods){ + if(peers.includes(id)){ + this.#dialedGoodPeers.set(id,0); + continue + } + else { + + let count = this.#dialedGoodPeers.get(id); + if (count < 15 || (count < 25 && this.#dialedKnownBootstrap.has(id))){ + const addr = this.#connections.get(id); + let mddrs = []; + const mddr = multiaddr(addr); + mddrs.push(mddr); + this.#dialMultiaddress(mddrs); + count++; + this.#dialedGoodPeers.set(id,count); + } + } + } + + },30*1000); + } + + + //update listen address on change + //#ListenAddressChange = () => {} + //#onSelfAddress = f => (this.#ListenAddressChange = f) + + + //Periodically watch for connection + #watchConnection(){ + setInterval(()=>{ + const peers = this.#libp2p.getPeers().length; + if(peers == 0){ + this.#dialKnownPeers(); + } + },60*1000); + } + + + //dial to all known bootstrap peers and DNS + #dialKnownPeers(){ + this.#dialKnownBootstrap(); + setTimeout(()=>{ + const peers = this.#libp2p.getPeers().length; + if(peers == 0){ + //currently not needed + //this.#dialKnownID() + setTimeout(()=>{ + const peers = this.#libp2p.getPeers().length; + if(peers == 0){ + //currently not needed + //this.#dialKnownDNS() + setTimeout(()=>{ + this.#libp2p.getPeers().length; + },15000); + } + },15000); + } + },15000); + } + + + //dial based on known bootsrap peers address + #dialKnownBootstrap(){ + const bootstrap = CONFIG_KNOWN_BOOTSTRAP_PEERS_ADDRS; + for(const peer of bootstrap){ + const address = peer.Peers[0].Addrs; + const id = peer.Peers[0].ID; + let mddrs = []; + let addrs = []; + for(const addr of address){ + const peeraddr = addr+'/p2p/'+id; + const peermddr = multiaddr(peeraddr); + addrs.push(peeraddr); + mddrs.push(peermddr); + } + + this.#dialedKnownBootstrap.set(id,addrs); + if(!this.#isConnected(id)){ + this.#dialMultiaddress(mddrs); + } + + } + } + + + //dial based on known peers ID + /*async #dialKnownID(){ + const api = config.CONFIG_DELEGATED_API + const delegatedClient = createDelegatedRoutingV1HttpApiClient(api) + const BOOTSTRAP_PEER_IDS = config.CONFIG_KNOWN_BOOTSTRAP_PEER_IDS + const peers = await Promise.all( + BOOTSTRAP_PEER_IDS.map((peerId) => first(delegatedClient.getPeers(peerIdFromString(peerId)))), + ) + for(const peer of peers){ + if(!peer)return + const address = peer.Addrs + const id = peer.ID + let mddrs = [] + let addrs = [] + for(const addr of address){ + const peeraddr = addr.toString()+'/p2p/'+id.toString() + const peermddr = multiaddr(peeraddr) + addrs.push(peeraddr) + mddrs.push(peermddr) + } + + this.#dialedKnownBootstrap.set(id,addrs) + if(!this.#isConnected(id)){ + this.#dialMultiaddress(mddrs) + } + } + }*/ + + + //dial based on known bootstrap DNS + /*async #dialKnownDNS(){ + const dnsresolver = config.CONFIG_DNS_RESOLVER + const bootstrapdns = config.CONFIG_KNOWN_BOOTSTRAP_DNS + const response = await fetch(dnsresolver+'?name='+bootstrapdns+'&type=txt') + const json = await response.json() + const dns = json.Answer + const BOOTSTRAP_PEER_IDS = [] + for(const dnsaddr of dns){ + const id = dnsaddr.data.split('/').pop() + BOOTSTRAP_PEER_IDS.push(id) + } + const api = config.CONFIG_DELEGATED_API + const delegatedClient = createDelegatedRoutingV1HttpApiClient(api) + const peers = await Promise.all( + BOOTSTRAP_PEER_IDS.map((peerId) => first(delegatedClient.getPeers(peerIdFromString(peerId)))), + ) + for(const peer of peers){ + const address = peer.Addrs + const id = peer.ID + let mddrs = [] + let addrs = [] + for(const addr of address){ + const peeraddr = addr.toString()+'/p2p/'+id.toString() + const peermddr = multiaddr(peeraddr) + addrs.push(peeraddr) + mddrs.push(peermddr) + } + + this.#dialedKnownBootstrap.set(id,addrs) + if(!this.#isConnected(id)){ + this.#dialMultiaddress(mddrs) + } + } + + }*/ + + + //dial based on known bootstrap DNS using DNS resolver only + /*async #dialKnownDNSonly(){ + const dnsresolver = config.CONFIG_DNS_RESOLVER + const bootstrapdns = config.CONFIG_KNOWN_BOOTSTRAP_DNS + const response = await fetch(dnsresolver+'?name='+bootstrapdns+'&type=txt') + const json = await response.json() + const dns = json.Answer + + for(const dnsitem of dns){ + const arr = dnsitem.data.split('/') + const id = arr.pop() + const dnsaddr = '_dnsaddr.'+arr[2] + this.#dialDNSWebsocketWebtransport(id,dnsaddr) + } + }*/ + + + //dial DNS with webtransport and websocket + /*async #dialDNSWebsocketWebtransport(id,dnsaddr){ + const dnsresolver = config.CONFIG_DNS_RESOLVER + const response = await fetch(dnsresolver+'?name='+dnsaddr+'&type=txt') + const json = await response.json() + const dns = json.Answer + let mddrs = [] + let addrs = [] + for(const dnsitem of dns){ + const arr = dnsitem.data.split('=') + const dnsaddr = arr[1] + const maddr = multiaddr(dnsaddr) + mddrs.push(maddr) + addrs.push(dnsaddr) + } + + + this.#isDialWebsocket = true + this.#dialedKnownBootstrap.set(id,addrs) + + this.#dialedKnownBootstrap.set(id,addrs) + if(!this.#isConnected(id)){ + this.#dialMultiaddress(mddrs) + this.#dialWebsocket(mddrs) + } + }*/ + + + //dial only webtransport multiaddrs + async #dialWebtransport(multiaddrs){ + const webTransportMadrs = multiaddrs.filter((maddr) => maddr.protoNames().includes('webtransport')&&maddr.protoNames().includes('certhash')); + for (const addr of webTransportMadrs) { + try { + //console.log(`attempting to dial webtransport multiaddr: %o`, addr.toString()) + await this.#libp2p.dial(addr); + return // if we succeed dialing the peer, no need to try another address + } catch (error) { + //console.log(`failed to dial webtransport multiaddr: %o`, addr.toString()) + console.debug(error); + } + } + } + + //dial only webtransport multiaddrs + /*#dialWebtransport1(multiaddrs){ + const webTransportMadrs = multiaddrs.filter((maddr) => maddr.protoNames().includes('webtransport')&&maddr.protoNames().includes('certhash')) + if(webTransportMadrs.length == 0)return + this.#libp2p.dial(webTransportMadrs).then((data)=>{console.warn(data)},(data)=>{console.warn(data)}) + }*/ + + //dial only websocket multiaddrs + async #dialWebsocket(multiaddrs){ + const webSocketMadrs = multiaddrs.filter((maddr) => maddr.protoNames().includes('wss')); + for (const addr of webSocketMadrs) { + try { + //console.log(`attempting to dial websocket multiaddr: %o`, addr) + await this.#libp2p.dial(addr); + return // if we succeed dialing the peer, no need to try another address + } catch (error) { + //console.log(`failed to dial websocket multiaddr: %o`, addr) + console.debug(error); + } + } + } + + + //entry point to webpeerjs + static async createWebpeer(){ + + // all libp2p debug logs + localStorage.setItem('debug', 'libp2p:*'); + + const dbstore = new IDBDatastore(CONFIG_DBSTORE_PATH); + await dbstore.open(); + + //let addrs = [] + const getbootstrap = CONFIG_KNOWN_BOOTSTRAP_PEERS_ADDRS; + for(const peer of getbootstrap){ + const addrs = peer.Peers[0].Addrs; + peer.Peers[0].ID; + //let mddrs = [] + for(const addr of addrs){ + if(addr.includes('webtransport')&&addr.includes('certhash')); + } + } + + let onMetricsFn = () => {}; + const onMetrics = f => (onMetricsFn = f); + + //create libp2p instance + const libp2p = await createLibp2p({ + addresses: { + listen: [ + ], + }, + transports:[ + webTransport(), + circuitRelayTransport({ + discoverRelays: CONFIG_DISCOVER_RELAYS, + reservationConcurrency: 1, + maxReservationQueueLength: 3 + }), + ], + connectionManager: { + maxConnections: CONFIG_MAX_CONNECTIONS, + minConnections: CONFIG_MIN_CONNECTIONS, + autoDialInterval:60e3, + autoDialConcurrency:0, + autoDialMaxQueueLength:0, + autoDialPriority:1000, + autoDialDiscoveredPeersDebounce:30e3, + maxParallelDials: 3, + dialTimeout: 5e3, + maxIncomingPendingConnections: 5, + maxDialQueueLength:10, + inboundConnectionThreshold:3, + maxPeerAddrsToDial:2, + inboundUpgradeTimeout:5e3 + }, + connectionEncryption: [noise()], + streamMuxers: [ + yamux({ + maxInboundStreams: 50, + maxOutboundStreams: 50, + }) + ], + connectionGater: { + filterMultiaddrForPeer: async (peer, multiaddrTest) => { + const multiaddrString = multiaddrTest.toString(); + if ( + multiaddrString.includes("/ip4/127.0.0.1") || + multiaddrString.includes("/ip6/") + ) { + return false; + } + return true; + }, + denyDialMultiaddr: async (multiaddrTest) => { + const multiaddrString = multiaddrTest.toString(); + if ( + multiaddrString.includes("/ip4/127.0.0.1") || + multiaddrString.includes("/ip6/") + ) { + return true; + } + return false; + }, + }, + peerDiscovery: [ + pubsubPeerDiscovery({ + interval: 10_000, + topics: CONFIG_PUBSUB_PEER_DISCOVERY, + listenOnly: false, + }), + + ], + services: { + pubsub: gossipsub({ + allowPublishToZeroTopicPeers: true, + msgIdFn: msgIdFnStrictNoSign$1, + ignoreDuplicatePublishError: true, + }), + identify: identify(), + identifyPush: identifyPush(), + aminoDHT: kadDHT({ + protocol: '/ipfs/kad/1.0.0', + peerInfoMapper: removePrivateAddressesMapper, + clientMode: false + }), + + }, + peerStore: { + persistence: true, + threshold: 1 + }, + metrics: simpleMetrics({ + onMetrics: (metrics) => {onMetricsFn(metrics);}, + intervalMs: 1000 + }) + }); + + + + //console.log(`Node started with id ${libp2p.peerId.toString()}`) + + //DHT server mode act as bootstrap peer in IPFS network + await libp2p.services.aminoDHT.setMode("server"); + + + //return webpeerjs class + return new webpeerjs(libp2p,dbstore,onMetrics) + } + } + + return webpeerjs; + +})); diff --git a/package.json b/package.json index 09013db..1e09c1f 100644 --- a/package.json +++ b/package.json @@ -1,15 +1,12 @@ { "name": "webpeerjs", - "version": "0.0.3", + "version": "0.0.4", "description": "Simple peer-to-peer with IPFS", "main": "./dist/umd/webpeerjs.js", - "module": "./src/webpeerjs.js", - "imports": { - "#/pubsub-peer-discovery/*.js": "./node_modules/@libp2p/pubsub-peer-discovery/dist/src/*.js" - }, + "module": "./dist/esm/webpeerjs.js", "exports": { ".": { - "import": "./src/webpeerjs.js", + "import": "./dist/esm/webpeerjs.js", "require": "./dist/umd/webpeerjs.js" } }, diff --git a/src/config.d.ts b/src/config.d.ts deleted file mode 100644 index d168f4e..0000000 --- a/src/config.d.ts +++ /dev/null @@ -1,22 +0,0 @@ -export const CONFIG_PREFIX: "webpeerjs"; -export const CONFIG_BLOCKSTORE_PATH: string; -export const CONFIG_DATASTORE_PATH: string; -export const CONFIG_DBSTORE_PATH: string; -export const CONFIG_MAX_CONNECTIONS: 50; -export const CONFIG_MIN_CONNECTIONS: 0; -export const CONFIG_DISCOVER_RELAYS: 2; -export const CONFIG_PEER_DISCOVERY_UNIVERSAL_CONNECTIVITY: "universal-connectivity-browser-peer-discovery"; -export const CONFIG_PUBSUB_PEER_DISCOVERY: string[]; -export const CONFIG_PUPSUB_TOPIC: string; -export const CONFIG_DELEGATED_API: "https://delegated-ipfs.dev"; -export const CONFIG_DNS_RESOLVER: "https://dns.google/resolve"; -export const CONFIG_KNOWN_BOOTSTRAP_DNS: "_dnsaddr.bootstrap.libp2p.io"; -export const CONFIG_JOIN_ROOM_VERSION: 1; -export const CONFIG_KNOWN_BOOTSTRAP_PEERS_ADDRS: { - Peers: { - Addrs: string[]; - ID: string; - Schema: string; - }[]; -}[]; -export const CONFIG_KNOWN_BOOTSTRAP_PEER_IDS: string[]; diff --git a/src/package.json b/src/package.json deleted file mode 100644 index 6990891..0000000 --- a/src/package.json +++ /dev/null @@ -1 +0,0 @@ -{"type": "module"} diff --git a/src/peer.js b/src/peer.js new file mode 100644 index 0000000..10bd52b --- /dev/null +++ b/src/peer.js @@ -0,0 +1,64 @@ +/* eslint-disable import/export */ +/* eslint-disable complexity */ +/* eslint-disable @typescript-eslint/no-namespace */ +/* eslint-disable @typescript-eslint/no-unnecessary-boolean-literal-compare */ +/* eslint-disable @typescript-eslint/no-empty-interface */ +import { decodeMessage, encodeMessage, message } from 'protons-runtime'; +import { alloc as uint8ArrayAlloc } from 'uint8arrays/alloc'; +export var Peer; +(function (Peer) { + let _codec; + Peer.codec = () => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork(); + } + if ((obj.publicKey != null && obj.publicKey.byteLength > 0)) { + w.uint32(10); + w.bytes(obj.publicKey); + } + if (obj.addrs != null) { + for (const value of obj.addrs) { + w.uint32(18); + w.bytes(value); + } + } + if (opts.lengthDelimited !== false) { + w.ldelim(); + } + }, (reader, length) => { + const obj = { + publicKey: uint8ArrayAlloc(0), + addrs: [] + }; + const end = length == null ? reader.len : reader.pos + length; + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + obj.publicKey = reader.bytes(); + break; + } + case 2: { + obj.addrs.push(reader.bytes()); + break; + } + default: { + reader.skipType(tag & 7); + break; + } + } + } + return obj; + }); + } + return _codec; + }; + Peer.encode = (obj) => { + return encodeMessage(obj, Peer.codec()); + }; + Peer.decode = (buf) => { + return decodeMessage(buf, Peer.codec()); + }; +})(Peer || (Peer = {})); diff --git a/src/utils.d.ts b/src/utils.d.ts deleted file mode 100644 index f95e555..0000000 --- a/src/utils.d.ts +++ /dev/null @@ -1,8 +0,0 @@ -export function uint8ArrayToString(uint8Array: any): string; -export function uint8ArrayFromString(string: any): Uint8Array; -export function first(farr: any): Promise; -export function msgIdFnStrictNoSign(msg: any): Promise; -export function metrics(data: any): boolean | undefined; -export function mkErr(msg: any): Error; -import { Key } from 'interface-datastore'; -export { PBPeer, Key }; diff --git a/src/utils.js b/src/utils.js index 1996385..077fff6 100644 --- a/src/utils.js +++ b/src/utils.js @@ -1,6 +1,6 @@ import * as config from './config' -import { Peer as PBPeer } from '#/pubsub-peer-discovery/peer.js' +import { Peer as PBPeer } from './peer' import { Key } from 'interface-datastore' diff --git a/src/webpeerjs.d.ts b/src/webpeerjs.d.ts deleted file mode 100644 index 640296c..0000000 --- a/src/webpeerjs.d.ts +++ /dev/null @@ -1,16 +0,0 @@ -export class webpeerjs { - static createWebpeer(): Promise; - constructor(libp2p: any, dbstore: any, onMetrics: any); - id: any; - status: any; - IPFS: { - libp2p: any; - discoveredPeers: Map; - }; - address: any[]; - peers: any[]; - onJoin: (f: any) => any; - onLeave: (f: any) => any; - joinRoom: (room: any) => any[]; - #private; -}