From f0acea3381b77d212072c5694e329b41e6d95712 Mon Sep 17 00:00:00 2001 From: paulo-ocean Date: Wed, 19 Feb 2025 10:43:27 +0000 Subject: [PATCH 01/25] add validate function signature changes, missing update docs and others --- src/services/Aquarius.ts | 57 +++++++++++++++++++++++++++++++------ src/utils/SignatureUtils.ts | 20 +++++++++++++ 2 files changed, 68 insertions(+), 9 deletions(-) diff --git a/src/services/Aquarius.ts b/src/services/Aquarius.ts index 6aa8c5dc5..d7f67f646 100644 --- a/src/services/Aquarius.ts +++ b/src/services/Aquarius.ts @@ -2,6 +2,8 @@ import fetch from 'cross-fetch' import { LoggerInstance } from '../utils/Logger' import { Asset, DDO, ValidateMetadata } from '../@types' import { sleep } from '../utils/General' +import { Signer } from 'ethers' +import { signRequest } from '../utils/SignatureUtils' export interface SearchQuery { from?: number @@ -99,23 +101,60 @@ export class Aquarius { * @param {AbortSignal} signal abort signal * @return {Promise}. */ - public async validate(ddo: DDO, signal?: AbortSignal): Promise { + public async validate( + ddo: DDO, + signer?: Signer, + signal?: AbortSignal + ): Promise { const status: ValidateMetadata = { valid: false } let jsonResponse + let response try { const path = this.aquariusURL + '/api/aquarius/assets/ddo/validate' + if (signer) { + const publisherAddress = await signer.getAddress() + // aquarius is always same url of other components with ocean nodes + const pathNonce = this.aquariusURL + '/api/services/nonce' + const responseNonce = await fetch( + pathNonce + `?userAddress=${publisherAddress}`, + { + method: 'GET', + headers: { 'Content-Type': 'application/json' }, + signal + } + ) + let { nonce } = await responseNonce.json() + console.log(`[getNonce] Consumer: ${publisherAddress} nonce: ${nonce}`) + if (!nonce || nonce === null) { + nonce = '0' + } - const response = await fetch(path, { - method: 'POST', - body: JSON.stringify(ddo), - headers: { 'Content-Type': 'application/octet-stream' }, - signal - }) - - jsonResponse = await response.json() + // same signed message as usual (did + nonce) + // the node will only validate (add his signature if there fields are present and are valid) + let signatureMessage = publisherAddress + signatureMessage += ddo.id + nonce + const signature = await signRequest(signer, signatureMessage) + const data = { ddo, publisherAddress, nonce, signature } + response = await fetch(path, { + method: 'POST', + body: JSON.stringify(data), + headers: { 'Content-Type': 'application/octet-stream' }, + signal + }) + } else { + // backwards compatibility, "old" way without signature stuff + // this will not validate on newer versions of Ocean Node (status:400), as the node will not add the validation signature + response = await fetch(path, { + method: 'POST', + body: JSON.stringify(ddo), + headers: { 'Content-Type': 'application/octet-stream' }, + signal + }) + } if (response.status === 200) { + jsonResponse = await response.json() status.valid = true status.hash = jsonResponse.hash status.proof = { diff --git a/src/utils/SignatureUtils.ts b/src/utils/SignatureUtils.ts index ac60c1db7..5c565d71a 100644 --- a/src/utils/SignatureUtils.ts +++ b/src/utils/SignatureUtils.ts @@ -1,4 +1,5 @@ import { ethers, Signer, providers } from 'ethers' +import { LoggerInstance } from './Logger' /** * Signs the hash of a message using the provided signer. @@ -23,3 +24,22 @@ export async function signHash(signer: Signer, message: string) { return { v, r, s } } + +export async function signRequest(signer: Signer, message: string): Promise { + const consumerMessage = ethers.utils.solidityKeccak256( + ['bytes'], + [ethers.utils.hexlify(ethers.utils.toUtf8Bytes(message))] + ) + const messageHashBytes = ethers.utils.arrayify(consumerMessage) + const chainId = await signer.getChainId() + try { + return await signer.signMessage(messageHashBytes) + } catch (error) { + LoggerInstance.error('Sign message error: ', error) + if (chainId === 8996) { + return await (signer as providers.JsonRpcSigner)._legacySignMessage( + messageHashBytes + ) + } + } +} From 858f264487a1a122378b07311e0b97c0c714a571 Mon Sep 17 00:00:00 2001 From: paulo-ocean Date: Fri, 21 Feb 2025 11:24:27 +0000 Subject: [PATCH 02/25] fix nonce --- src/services/Aquarius.ts | 5 +++-- src/utils/Assets.ts | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/src/services/Aquarius.ts b/src/services/Aquarius.ts index d7f67f646..0995684df 100644 --- a/src/services/Aquarius.ts +++ b/src/services/Aquarius.ts @@ -130,13 +130,14 @@ export class Aquarius { if (!nonce || nonce === null) { nonce = '0' } + const newNonce = Number(nonce + 1).toString() // have to increase the previous // same signed message as usual (did + nonce) // the node will only validate (add his signature if there fields are present and are valid) let signatureMessage = publisherAddress - signatureMessage += ddo.id + nonce + signatureMessage += ddo.id + newNonce const signature = await signRequest(signer, signatureMessage) - const data = { ddo, publisherAddress, nonce, signature } + const data = { ddo, publisherAddress, newNonce, signature } response = await fetch(path, { method: 'POST', body: JSON.stringify(data), diff --git a/src/utils/Assets.ts b/src/utils/Assets.ts index 81ff16570..961a9f6fd 100644 --- a/src/utils/Assets.ts +++ b/src/utils/Assets.ts @@ -209,7 +209,7 @@ export async function createAsset( let flags if (encryptDDO) { metadata = await ProviderInstance.encrypt(ddo, chainID, providerUrl) - const validateResult = await aquariusInstance.validate(ddo) + const validateResult = await aquariusInstance.validate(ddo, owner) metadataHash = validateResult.hash flags = 2 } else { From 380420c55951a67cc1ecded3d9f4b6ecab56f270 Mon Sep 17 00:00:00 2001 From: paulo-ocean Date: Fri, 21 Feb 2025 14:42:29 +0000 Subject: [PATCH 03/25] see if the issue was the format --- src/services/Aquarius.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/services/Aquarius.ts b/src/services/Aquarius.ts index 0995684df..679c5400b 100644 --- a/src/services/Aquarius.ts +++ b/src/services/Aquarius.ts @@ -130,8 +130,8 @@ export class Aquarius { if (!nonce || nonce === null) { nonce = '0' } - const newNonce = Number(nonce + 1).toString() // have to increase the previous - + const newNonce = (Number(nonce) + 1).toString() // have to increase the previous + console.log('nonce: ' + nonce + ' newNonce ' + newNonce) // same signed message as usual (did + nonce) // the node will only validate (add his signature if there fields are present and are valid) let signatureMessage = publisherAddress From 7d5952319acb8eec78b50b1e62dce68e6dd059e3 Mon Sep 17 00:00:00 2001 From: paulo-ocean Date: Fri, 21 Feb 2025 15:14:44 +0000 Subject: [PATCH 04/25] debug paths --- src/services/Aquarius.ts | 3 +++ src/utils/Assets.ts | 1 + 2 files changed, 4 insertions(+) diff --git a/src/services/Aquarius.ts b/src/services/Aquarius.ts index 679c5400b..ed705b0e3 100644 --- a/src/services/Aquarius.ts +++ b/src/services/Aquarius.ts @@ -113,10 +113,12 @@ export class Aquarius { let response try { const path = this.aquariusURL + '/api/aquarius/assets/ddo/validate' + console.log('path: ', path) if (signer) { const publisherAddress = await signer.getAddress() // aquarius is always same url of other components with ocean nodes const pathNonce = this.aquariusURL + '/api/services/nonce' + console.log('pathnonce', pathNonce) const responseNonce = await fetch( pathNonce + `?userAddress=${publisherAddress}`, { @@ -125,6 +127,7 @@ export class Aquarius { signal } ) + console.log('response: ', responseNonce) let { nonce } = await responseNonce.json() console.log(`[getNonce] Consumer: ${publisherAddress} nonce: ${nonce}`) if (!nonce || nonce === null) { diff --git a/src/utils/Assets.ts b/src/utils/Assets.ts index 961a9f6fd..c0d6a410c 100644 --- a/src/utils/Assets.ts +++ b/src/utils/Assets.ts @@ -209,6 +209,7 @@ export async function createAsset( let flags if (encryptDDO) { metadata = await ProviderInstance.encrypt(ddo, chainID, providerUrl) + console.log('before validate, providerUrl: ', providerUrl) const validateResult = await aquariusInstance.validate(ddo, owner) metadataHash = validateResult.hash flags = 2 From a1e5a8b2ace0818835cedc95bf36a992d0b8cd3a Mon Sep 17 00:00:00 2001 From: paulo-ocean Date: Fri, 21 Feb 2025 15:55:26 +0000 Subject: [PATCH 05/25] update validate api call, consider legacy as backup --- CodeExamples.md | 5 +-- ComputeExamples.md | 2 +- src/services/Aquarius.ts | 50 +++++++++++++++++------- src/utils/Assets.ts | 2 +- test/integration/CodeExamples.test.ts | 12 +++++- test/integration/ComputeExamples.test.ts | 2 +- test/integration/PublishFlows.test.ts | 12 +++++- test/integration/helpers.ts | 4 +- 8 files changed, 62 insertions(+), 27 deletions(-) diff --git a/CodeExamples.md b/CodeExamples.md index ef020a78d..705d63ae6 100644 --- a/CodeExamples.md +++ b/CodeExamples.md @@ -396,7 +396,7 @@ Now let's console log the DID to check everything is working providerUrl ) const encryptedDDO = await providerResponse - const isAssetValid: ValidateMetadata = await aquarius.validate(fixedDDO) + const isAssetValid: ValidateMetadata = await aquarius.validate(fixedDDO, publisherAccount, providerUrl) assert(isAssetValid.valid === true, 'Published asset is not valid') await nft.setMetadata( freNftAddress, @@ -408,7 +408,6 @@ Now let's console log the DID to check everything is working encryptedDDO, isAssetValid.hash ) - }) ``` ### 6.3 Marketplace displays fixed rate asset for sale @@ -676,7 +675,7 @@ Now we need to encrypt file(s) using provider fixedDDO.chainId, providerUrl ) - const isAssetValid: ValidateMetadata = await aquarius.validate(fixedDDO) + const isAssetValid: ValidateMetadata = await aquarius.validate(fixedDDO, publisherAccount, providerUrl) assert(isAssetValid.valid === true, 'Published asset is not valid') await nft.setMetadata( dispenserNftAddress, diff --git a/ComputeExamples.md b/ComputeExamples.md index 19c6bae74..1d58a1b8f 100644 --- a/ComputeExamples.md +++ b/ComputeExamples.md @@ -343,7 +343,7 @@ async function createAsset( ddo.id = 'did:op:' + SHA256(ethers.utils.getAddress(nftAddress) + chain.toString(10)) const encryptedResponse = await ProviderInstance.encrypt(ddo, chain, providerUrl) - const validateResult = await aquariusInstance.validate(ddo) + const validateResult = await aquariusInstance.validate(ddo, owner, providerUrl) await nft.setMetadata( nftAddress, await owner.getAddress(), diff --git a/src/services/Aquarius.ts b/src/services/Aquarius.ts index ed705b0e3..565651231 100644 --- a/src/services/Aquarius.ts +++ b/src/services/Aquarius.ts @@ -104,6 +104,7 @@ export class Aquarius { public async validate( ddo: DDO, signer?: Signer, + providerUrl?: string, signal?: AbortSignal ): Promise { const status: ValidateMetadata = { @@ -111,13 +112,26 @@ export class Aquarius { } let jsonResponse let response + + const path = this.aquariusURL + '/api/aquarius/assets/ddo/validate' + try { - const path = this.aquariusURL + '/api/aquarius/assets/ddo/validate' console.log('path: ', path) - if (signer) { + // Old aquarius API and node API (before publisherAddress, nonce and signature verification) + const validateRequestLegacy = async function (): Promise { + response = await fetch(path, { + method: 'POST', + body: JSON.stringify(ddo), + headers: { 'Content-Type': 'application/octet-stream' }, + signal + }) + return response + } + + if (signer && providerUrl) { const publisherAddress = await signer.getAddress() // aquarius is always same url of other components with ocean nodes - const pathNonce = this.aquariusURL + '/api/services/nonce' + const pathNonce = providerUrl + '/api/services/nonce' console.log('pathnonce', pathNonce) const responseNonce = await fetch( pathNonce + `?userAddress=${publisherAddress}`, @@ -141,21 +155,27 @@ export class Aquarius { signatureMessage += ddo.id + newNonce const signature = await signRequest(signer, signatureMessage) const data = { ddo, publisherAddress, newNonce, signature } - response = await fetch(path, { - method: 'POST', - body: JSON.stringify(data), - headers: { 'Content-Type': 'application/octet-stream' }, - signal - }) + + try { + response = await fetch(path, { + method: 'POST', + body: JSON.stringify(data), + headers: { 'Content-Type': 'application/octet-stream' }, + signal + }) + } catch (e) { + // retry with legacy path validation + LoggerInstance.error( + 'Metadata validation failed using publisher signature validation (perhaps not supported or legacy Aquarius), retrying with legacy path...', + response.status, + e.message + ) + response = await validateRequestLegacy() + } } else { // backwards compatibility, "old" way without signature stuff // this will not validate on newer versions of Ocean Node (status:400), as the node will not add the validation signature - response = await fetch(path, { - method: 'POST', - body: JSON.stringify(ddo), - headers: { 'Content-Type': 'application/octet-stream' }, - signal - }) + response = await validateRequestLegacy() } if (response.status === 200) { jsonResponse = await response.json() diff --git a/src/utils/Assets.ts b/src/utils/Assets.ts index c0d6a410c..9ac0af4e8 100644 --- a/src/utils/Assets.ts +++ b/src/utils/Assets.ts @@ -210,7 +210,7 @@ export async function createAsset( if (encryptDDO) { metadata = await ProviderInstance.encrypt(ddo, chainID, providerUrl) console.log('before validate, providerUrl: ', providerUrl) - const validateResult = await aquariusInstance.validate(ddo, owner) + const validateResult = await aquariusInstance.validate(ddo, owner, providerUrl) metadataHash = validateResult.hash flags = 2 } else { diff --git a/test/integration/CodeExamples.test.ts b/test/integration/CodeExamples.test.ts index aef4d79cf..fbc356c79 100644 --- a/test/integration/CodeExamples.test.ts +++ b/test/integration/CodeExamples.test.ts @@ -396,7 +396,11 @@ describe('Marketplace flow tests', async () => { providerUrl ) const encryptedDDO = await providerResponse - const isAssetValid: ValidateMetadata = await aquarius.validate(fixedDDO) + const isAssetValid: ValidateMetadata = await aquarius.validate( + fixedDDO, + publisherAccount, + providerUrl + ) assert(isAssetValid.valid === true, 'Published asset is not valid') await nft.setMetadata( freNftAddress, @@ -676,7 +680,11 @@ describe('Marketplace flow tests', async () => { fixedDDO.chainId, providerUrl ) - const isAssetValid: ValidateMetadata = await aquarius.validate(fixedDDO) + const isAssetValid: ValidateMetadata = await aquarius.validate( + fixedDDO, + publisherAccount, + providerUrl + ) assert(isAssetValid.valid === true, 'Published asset is not valid') await nft.setMetadata( dispenserNftAddress, diff --git a/test/integration/ComputeExamples.test.ts b/test/integration/ComputeExamples.test.ts index 4eb58b3ec..5e2510dcd 100644 --- a/test/integration/ComputeExamples.test.ts +++ b/test/integration/ComputeExamples.test.ts @@ -343,7 +343,7 @@ async function createAsset( ddo.id = 'did:op:' + SHA256(ethers.utils.getAddress(nftAddress) + chain.toString(10)) const encryptedResponse = await ProviderInstance.encrypt(ddo, chain, providerUrl) - const validateResult = await aquariusInstance.validate(ddo) + const validateResult = await aquariusInstance.validate(ddo, owner, providerUrl) await nft.setMetadata( nftAddress, await owner.getAddress(), diff --git a/test/integration/PublishFlows.test.ts b/test/integration/PublishFlows.test.ts index b556ee2dc..450300afd 100644 --- a/test/integration/PublishFlows.test.ts +++ b/test/integration/PublishFlows.test.ts @@ -180,7 +180,11 @@ describe('Publish tests', async () => { SHA256(ethers.utils.getAddress(nftAddress) + config.chainId.toString(10)) fixedPricedDID = fixedPriceDdo.id - const isAssetValid: ValidateMetadata = await aquarius.validate(fixedPriceDdo) + const isAssetValid: ValidateMetadata = await aquarius.validate( + fixedPriceDdo, + publisherAccount, + providerUrl + ) assert(isAssetValid.valid === true, 'Published asset is not valid') const encryptedResponse = await ProviderInstance.encrypt( fixedPriceDdo, @@ -291,7 +295,11 @@ describe('Publish tests', async () => { SHA256(ethers.utils.getAddress(nftAddress) + config.chainId.toString(10)) dispenserDID = dispenserDdo.id - const isAssetValid: ValidateMetadata = await aquarius.validate(dispenserDdo) + const isAssetValid: ValidateMetadata = await aquarius.validate( + dispenserDdo, + publisherAccount, + providerUrl + ) assert(isAssetValid.valid === true, 'Published asset is not valid') const encryptedDdo = await ProviderInstance.encrypt( diff --git a/test/integration/helpers.ts b/test/integration/helpers.ts index 6e3f7c75d..623d16f14 100644 --- a/test/integration/helpers.ts +++ b/test/integration/helpers.ts @@ -77,7 +77,7 @@ export async function createAsset( ddo.id = 'did:op:' + SHA256(ethers.utils.getAddress(nftAddress) + chain.toString(10)) const encryptedResponse = await ProviderInstance.encrypt(ddo, chain, providerUrl) - const validateResult = await aquariusInstance.validate(ddo) + const validateResult = await aquariusInstance.validate(ddo, owner, providerUrl) await nft.setMetadata( nftAddress, await owner.getAddress(), @@ -104,7 +104,7 @@ export async function updateAssetMetadata( providerUrl ) const encryptedResponse = await providerResponse - const validateResult = await aquariusInstance.validate(updatedDdo) + const validateResult = await aquariusInstance.validate(updatedDdo, owner, providerUrl) const updateDdoTX = await nft.setMetadata( updatedDdo.nftAddress, await owner.getAddress(), From 2033cf9cb94f7b69f294b8077cfaec0ad8890cd8 Mon Sep 17 00:00:00 2001 From: paulo-ocean Date: Fri, 21 Feb 2025 16:14:42 +0000 Subject: [PATCH 06/25] undo function call extra params --- CodeExamples.md | 4 ++-- ComputeExamples.md | 2 +- src/services/Aquarius.ts | 15 ++++++++------- test/integration/CodeExamples.test.ts | 12 ++---------- test/integration/ComputeExamples.test.ts | 10 +++++----- test/integration/PublishFlows.test.ts | 12 ++---------- test/integration/helpers.ts | 4 ++-- 7 files changed, 22 insertions(+), 37 deletions(-) diff --git a/CodeExamples.md b/CodeExamples.md index 705d63ae6..7d11b9cc8 100644 --- a/CodeExamples.md +++ b/CodeExamples.md @@ -396,7 +396,7 @@ Now let's console log the DID to check everything is working providerUrl ) const encryptedDDO = await providerResponse - const isAssetValid: ValidateMetadata = await aquarius.validate(fixedDDO, publisherAccount, providerUrl) + const isAssetValid: ValidateMetadata = await aquarius.validate(fixedDDO) assert(isAssetValid.valid === true, 'Published asset is not valid') await nft.setMetadata( freNftAddress, @@ -675,7 +675,7 @@ Now we need to encrypt file(s) using provider fixedDDO.chainId, providerUrl ) - const isAssetValid: ValidateMetadata = await aquarius.validate(fixedDDO, publisherAccount, providerUrl) + const isAssetValid: ValidateMetadata = await aquarius.validate(fixedDDO) assert(isAssetValid.valid === true, 'Published asset is not valid') await nft.setMetadata( dispenserNftAddress, diff --git a/ComputeExamples.md b/ComputeExamples.md index 1d58a1b8f..19c6bae74 100644 --- a/ComputeExamples.md +++ b/ComputeExamples.md @@ -343,7 +343,7 @@ async function createAsset( ddo.id = 'did:op:' + SHA256(ethers.utils.getAddress(nftAddress) + chain.toString(10)) const encryptedResponse = await ProviderInstance.encrypt(ddo, chain, providerUrl) - const validateResult = await aquariusInstance.validate(ddo, owner, providerUrl) + const validateResult = await aquariusInstance.validate(ddo) await nft.setMetadata( nftAddress, await owner.getAddress(), diff --git a/src/services/Aquarius.ts b/src/services/Aquarius.ts index 565651231..0efa0a81f 100644 --- a/src/services/Aquarius.ts +++ b/src/services/Aquarius.ts @@ -149,14 +149,15 @@ export class Aquarius { } const newNonce = (Number(nonce) + 1).toString() // have to increase the previous console.log('nonce: ' + nonce + ' newNonce ' + newNonce) - // same signed message as usual (did + nonce) - // the node will only validate (add his signature if there fields are present and are valid) - let signatureMessage = publisherAddress - signatureMessage += ddo.id + newNonce - const signature = await signRequest(signer, signatureMessage) - const data = { ddo, publisherAddress, newNonce, signature } - try { + // same signed message as usual (did + nonce) + // the node will only validate (add his signature if there fields are present and are valid) + let signatureMessage = publisherAddress + signatureMessage += ddo.id + newNonce + console.log('will sign the request...') + const signature = await signRequest(signer, signatureMessage) + const data = { ddo, publisherAddress, newNonce, signature } + console.log('will call validate path at ', path) response = await fetch(path, { method: 'POST', body: JSON.stringify(data), diff --git a/test/integration/CodeExamples.test.ts b/test/integration/CodeExamples.test.ts index fbc356c79..aef4d79cf 100644 --- a/test/integration/CodeExamples.test.ts +++ b/test/integration/CodeExamples.test.ts @@ -396,11 +396,7 @@ describe('Marketplace flow tests', async () => { providerUrl ) const encryptedDDO = await providerResponse - const isAssetValid: ValidateMetadata = await aquarius.validate( - fixedDDO, - publisherAccount, - providerUrl - ) + const isAssetValid: ValidateMetadata = await aquarius.validate(fixedDDO) assert(isAssetValid.valid === true, 'Published asset is not valid') await nft.setMetadata( freNftAddress, @@ -680,11 +676,7 @@ describe('Marketplace flow tests', async () => { fixedDDO.chainId, providerUrl ) - const isAssetValid: ValidateMetadata = await aquarius.validate( - fixedDDO, - publisherAccount, - providerUrl - ) + const isAssetValid: ValidateMetadata = await aquarius.validate(fixedDDO) assert(isAssetValid.valid === true, 'Published asset is not valid') await nft.setMetadata( dispenserNftAddress, diff --git a/test/integration/ComputeExamples.test.ts b/test/integration/ComputeExamples.test.ts index 5e2510dcd..221571120 100644 --- a/test/integration/ComputeExamples.test.ts +++ b/test/integration/ComputeExamples.test.ts @@ -281,9 +281,9 @@ let agreementId: string /// Now we define the helper methods which we will use later to publish the dataset and algorithm, and also order them -/// Add a `createAsset()`function. +/// Add a `createAssetHelper()`function. /// ```Typescript -async function createAsset( +async function createAssetHelper( name: string, symbol: string, owner: Signer, @@ -343,7 +343,7 @@ async function createAsset( ddo.id = 'did:op:' + SHA256(ethers.utils.getAddress(nftAddress) + chain.toString(10)) const encryptedResponse = await ProviderInstance.encrypt(ddo, chain, providerUrl) - const validateResult = await aquariusInstance.validate(ddo, owner, providerUrl) + const validateResult = await aquariusInstance.validate(ddo) await nft.setMetadata( nftAddress, await owner.getAddress(), @@ -503,7 +503,7 @@ describe('Compute-to-data example tests', async () => { it('6.1 Publish a dataset (create NFT + Datatoken) and set dataset metadata', async () => { /// ```Typescript - datasetId = await createAsset( + datasetId = await createAssetHelper( 'D1Min', 'D1M', publisherAccount, @@ -520,7 +520,7 @@ describe('Compute-to-data example tests', async () => { it('6.2 Publish an algorithm (create NFT + Datatoken) and set algorithm metadata', async () => { /// ```Typescript - algorithmId = await createAsset( + algorithmId = await createAssetHelper( 'D1Min', 'D1M', publisherAccount, diff --git a/test/integration/PublishFlows.test.ts b/test/integration/PublishFlows.test.ts index 450300afd..b556ee2dc 100644 --- a/test/integration/PublishFlows.test.ts +++ b/test/integration/PublishFlows.test.ts @@ -180,11 +180,7 @@ describe('Publish tests', async () => { SHA256(ethers.utils.getAddress(nftAddress) + config.chainId.toString(10)) fixedPricedDID = fixedPriceDdo.id - const isAssetValid: ValidateMetadata = await aquarius.validate( - fixedPriceDdo, - publisherAccount, - providerUrl - ) + const isAssetValid: ValidateMetadata = await aquarius.validate(fixedPriceDdo) assert(isAssetValid.valid === true, 'Published asset is not valid') const encryptedResponse = await ProviderInstance.encrypt( fixedPriceDdo, @@ -295,11 +291,7 @@ describe('Publish tests', async () => { SHA256(ethers.utils.getAddress(nftAddress) + config.chainId.toString(10)) dispenserDID = dispenserDdo.id - const isAssetValid: ValidateMetadata = await aquarius.validate( - dispenserDdo, - publisherAccount, - providerUrl - ) + const isAssetValid: ValidateMetadata = await aquarius.validate(dispenserDdo) assert(isAssetValid.valid === true, 'Published asset is not valid') const encryptedDdo = await ProviderInstance.encrypt( diff --git a/test/integration/helpers.ts b/test/integration/helpers.ts index 623d16f14..982a603dd 100644 --- a/test/integration/helpers.ts +++ b/test/integration/helpers.ts @@ -17,7 +17,7 @@ import { ProviderFees, getEventFromTx } from '../../src' - +// superseed by src/utils/CreateAsset export async function createAsset( name: string, symbol: string, @@ -77,7 +77,7 @@ export async function createAsset( ddo.id = 'did:op:' + SHA256(ethers.utils.getAddress(nftAddress) + chain.toString(10)) const encryptedResponse = await ProviderInstance.encrypt(ddo, chain, providerUrl) - const validateResult = await aquariusInstance.validate(ddo, owner, providerUrl) + const validateResult = await aquariusInstance.validate(ddo) await nft.setMetadata( nftAddress, await owner.getAddress(), From 06912b9b6c08f178040c6f896490caa3cf04183f Mon Sep 17 00:00:00 2001 From: paulo-ocean Date: Fri, 21 Feb 2025 16:48:39 +0000 Subject: [PATCH 07/25] more debug --- src/services/Aquarius.ts | 5 ++++- test/integration/helpers.ts | 2 +- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/src/services/Aquarius.ts b/src/services/Aquarius.ts index 0efa0a81f..5a8f54929 100644 --- a/src/services/Aquarius.ts +++ b/src/services/Aquarius.ts @@ -119,6 +119,7 @@ export class Aquarius { console.log('path: ', path) // Old aquarius API and node API (before publisherAddress, nonce and signature verification) const validateRequestLegacy = async function (): Promise { + console.log('using validateRequestLegacy()') response = await fetch(path, { method: 'POST', body: JSON.stringify(ddo), @@ -156,8 +157,10 @@ export class Aquarius { signatureMessage += ddo.id + newNonce console.log('will sign the request...') const signature = await signRequest(signer, signatureMessage) + console.log('signature: ', signature) const data = { ddo, publisherAddress, newNonce, signature } console.log('will call validate path at ', path) + console.log('calldata: ', data) response = await fetch(path, { method: 'POST', body: JSON.stringify(data), @@ -165,10 +168,10 @@ export class Aquarius { signal }) } catch (e) { + console.error('GOT ERROR:', e) // retry with legacy path validation LoggerInstance.error( 'Metadata validation failed using publisher signature validation (perhaps not supported or legacy Aquarius), retrying with legacy path...', - response.status, e.message ) response = await validateRequestLegacy() diff --git a/test/integration/helpers.ts b/test/integration/helpers.ts index 982a603dd..539b95d75 100644 --- a/test/integration/helpers.ts +++ b/test/integration/helpers.ts @@ -104,7 +104,7 @@ export async function updateAssetMetadata( providerUrl ) const encryptedResponse = await providerResponse - const validateResult = await aquariusInstance.validate(updatedDdo, owner, providerUrl) + const validateResult = await aquariusInstance.validate(updatedDdo) const updateDdoTX = await nft.setMetadata( updatedDdo.nftAddress, await owner.getAddress(), From b5194cd8ef6ad739c1121f4c427ebb3f3712046f Mon Sep 17 00:00:00 2001 From: paulo-ocean Date: Fri, 21 Feb 2025 17:35:16 +0000 Subject: [PATCH 08/25] another try --- src/services/Aquarius.ts | 102 ++++++++++++++++++++------------------- 1 file changed, 53 insertions(+), 49 deletions(-) diff --git a/src/services/Aquarius.ts b/src/services/Aquarius.ts index 5a8f54929..eb7f7d8fc 100644 --- a/src/services/Aquarius.ts +++ b/src/services/Aquarius.ts @@ -115,10 +115,10 @@ export class Aquarius { const path = this.aquariusURL + '/api/aquarius/assets/ddo/validate' - try { - console.log('path: ', path) - // Old aquarius API and node API (before publisherAddress, nonce and signature verification) - const validateRequestLegacy = async function (): Promise { + console.log('path: ', path) + // Old aquarius API and node API (before publisherAddress, nonce and signature verification) + const validateRequestLegacy = async function (): Promise { + try { console.log('using validateRequestLegacy()') response = await fetch(path, { method: 'POST', @@ -127,9 +127,14 @@ export class Aquarius { signal }) return response + } catch (error) { + LoggerInstance.error('Error validating metadata: ', error) + return null } + } - if (signer && providerUrl) { + if (signer && providerUrl) { + try { const publisherAddress = await signer.getAddress() // aquarius is always same url of other components with ocean nodes const pathNonce = providerUrl + '/api/services/nonce' @@ -150,54 +155,53 @@ export class Aquarius { } const newNonce = (Number(nonce) + 1).toString() // have to increase the previous console.log('nonce: ' + nonce + ' newNonce ' + newNonce) - try { - // same signed message as usual (did + nonce) - // the node will only validate (add his signature if there fields are present and are valid) - let signatureMessage = publisherAddress - signatureMessage += ddo.id + newNonce - console.log('will sign the request...') - const signature = await signRequest(signer, signatureMessage) - console.log('signature: ', signature) - const data = { ddo, publisherAddress, newNonce, signature } - console.log('will call validate path at ', path) - console.log('calldata: ', data) - response = await fetch(path, { - method: 'POST', - body: JSON.stringify(data), - headers: { 'Content-Type': 'application/octet-stream' }, - signal - }) - } catch (e) { - console.error('GOT ERROR:', e) - // retry with legacy path validation - LoggerInstance.error( - 'Metadata validation failed using publisher signature validation (perhaps not supported or legacy Aquarius), retrying with legacy path...', - e.message - ) - response = await validateRequestLegacy() - } - } else { - // backwards compatibility, "old" way without signature stuff - // this will not validate on newer versions of Ocean Node (status:400), as the node will not add the validation signature + // same signed message as usual (did + nonce) + // the node will only validate (add his signature if there fields are present and are valid) + let signatureMessage = publisherAddress + signatureMessage += ddo.id + newNonce + console.log('will sign the request...') + const signature = await signRequest(signer, signatureMessage) + console.log('signature: ', signature) + const data = { ddo, publisherAddress, newNonce, signature } + console.log('will call validate path at ', path) + console.log('calldata: ', data) + response = await fetch(path, { + method: 'POST', + body: JSON.stringify(data), + headers: { 'Content-Type': 'application/octet-stream' }, + signal + }) + } catch (e) { + console.error('GOT ERROR:', e) + // retry with legacy path validation + LoggerInstance.error( + 'Metadata validation failed using publisher signature validation (perhaps not supported or legacy Aquarius), retrying with legacy path...', + e.message + ) response = await validateRequestLegacy() } - if (response.status === 200) { - jsonResponse = await response.json() - status.valid = true - status.hash = jsonResponse.hash - status.proof = { - validatorAddress: jsonResponse.publicKey, - r: jsonResponse.r[0], - s: jsonResponse.s[0], - v: jsonResponse.v - } - } else { - status.errors = jsonResponse - LoggerInstance.error('validate Metadata failed:', response.status, status.errors) + } else { + // backwards compatibility, "old" way without signature stuff + // this will not validate on newer versions of Ocean Node (status:400), as the node will not add the validation signature + response = await validateRequestLegacy() + } + if (!response) return status + + if (response.status === 200) { + jsonResponse = await response.json() + status.valid = true + status.hash = jsonResponse.hash + status.proof = { + validatorAddress: jsonResponse.publicKey, + r: jsonResponse.r[0], + s: jsonResponse.s[0], + v: jsonResponse.v } - } catch (error) { - LoggerInstance.error('Error validating metadata: ', error) + } else { + status.errors = jsonResponse + LoggerInstance.error('validate Metadata failed:', response.status, status.errors) } + return status } From 6f8628ffd68b8a51bbbafe354eb1fdec85718f12 Mon Sep 17 00:00:00 2001 From: paulo-ocean Date: Fri, 21 Feb 2025 18:08:57 +0000 Subject: [PATCH 09/25] more debug --- src/services/Aquarius.ts | 6 +++--- src/utils/SignatureUtils.ts | 1 + 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/src/services/Aquarius.ts b/src/services/Aquarius.ts index eb7f7d8fc..ef82422bb 100644 --- a/src/services/Aquarius.ts +++ b/src/services/Aquarius.ts @@ -157,9 +157,9 @@ export class Aquarius { console.log('nonce: ' + nonce + ' newNonce ' + newNonce) // same signed message as usual (did + nonce) // the node will only validate (add his signature if there fields are present and are valid) - let signatureMessage = publisherAddress - signatureMessage += ddo.id + newNonce - console.log('will sign the request...') + // let signatureMessage = publisherAddress + const signatureMessage = ddo.id + newNonce + console.log('will sign the request...', signatureMessage) const signature = await signRequest(signer, signatureMessage) console.log('signature: ', signature) const data = { ddo, publisherAddress, newNonce, signature } diff --git a/src/utils/SignatureUtils.ts b/src/utils/SignatureUtils.ts index 5c565d71a..369868f2e 100644 --- a/src/utils/SignatureUtils.ts +++ b/src/utils/SignatureUtils.ts @@ -37,6 +37,7 @@ export async function signRequest(signer: Signer, message: string): Promise Date: Fri, 21 Feb 2025 18:19:25 +0000 Subject: [PATCH 10/25] more debug --- src/services/Aquarius.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/src/services/Aquarius.ts b/src/services/Aquarius.ts index ef82422bb..e23f27f3f 100644 --- a/src/services/Aquarius.ts +++ b/src/services/Aquarius.ts @@ -171,6 +171,7 @@ export class Aquarius { headers: { 'Content-Type': 'application/octet-stream' }, signal }) + console.log('response was: ', response.json()) } catch (e) { console.error('GOT ERROR:', e) // retry with legacy path validation From 7ad8a972cf39062d05c0743ca59f37f503efafde Mon Sep 17 00:00:00 2001 From: paulo-ocean Date: Fri, 21 Feb 2025 18:39:37 +0000 Subject: [PATCH 11/25] more debug --- src/services/Aquarius.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/services/Aquarius.ts b/src/services/Aquarius.ts index e23f27f3f..0e5fdec53 100644 --- a/src/services/Aquarius.ts +++ b/src/services/Aquarius.ts @@ -171,7 +171,7 @@ export class Aquarius { headers: { 'Content-Type': 'application/octet-stream' }, signal }) - console.log('response was: ', response.json()) + console.log('response was: ', await response.json()) } catch (e) { console.error('GOT ERROR:', e) // retry with legacy path validation From f7b7a859604ff35bcf8f1f74bd41676aee0e7f3e Mon Sep 17 00:00:00 2001 From: paulo-ocean Date: Fri, 21 Feb 2025 19:06:32 +0000 Subject: [PATCH 12/25] more debug --- src/services/Aquarius.ts | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/src/services/Aquarius.ts b/src/services/Aquarius.ts index 0e5fdec53..4f1b3370a 100644 --- a/src/services/Aquarius.ts +++ b/src/services/Aquarius.ts @@ -171,7 +171,14 @@ export class Aquarius { headers: { 'Content-Type': 'application/octet-stream' }, signal }) - console.log('response was: ', await response.json()) + const resp = await response.json() + if (resp && JSON.stringify(resp).includes('no version provided for DDO.')) { + // do it again + console.log('do it again') + response = await validateRequestLegacy() + } else { + jsonResponse = resp + } } catch (e) { console.error('GOT ERROR:', e) // retry with legacy path validation @@ -189,7 +196,7 @@ export class Aquarius { if (!response) return status if (response.status === 200) { - jsonResponse = await response.json() + jsonResponse = jsonResponse || (await response.json()) status.valid = true status.hash = jsonResponse.hash status.proof = { From b26f453acade09aca3df9d4d31a8dcc21cac7183 Mon Sep 17 00:00:00 2001 From: GitHub Actions Bot <> Date: Fri, 21 Feb 2025 19:15:44 +0000 Subject: [PATCH 13/25] Updating CodeExamples.md --- CodeExamples.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CodeExamples.md b/CodeExamples.md index 7d11b9cc8..ef020a78d 100644 --- a/CodeExamples.md +++ b/CodeExamples.md @@ -408,6 +408,7 @@ Now let's console log the DID to check everything is working encryptedDDO, isAssetValid.hash ) + }) ``` ### 6.3 Marketplace displays fixed rate asset for sale From 7b6e9e48b0b39a649e5d4bf78d64f7907e82b556 Mon Sep 17 00:00:00 2001 From: GitHub Actions Bot <> Date: Fri, 21 Feb 2025 19:15:45 +0000 Subject: [PATCH 14/25] Updating ComputeExamples.md --- ComputeExamples.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/ComputeExamples.md b/ComputeExamples.md index 19c6bae74..beec05442 100644 --- a/ComputeExamples.md +++ b/ComputeExamples.md @@ -281,9 +281,9 @@ let agreementId: string Now we define the helper methods which we will use later to publish the dataset and algorithm, and also order them -Add a `createAsset()`function. +Add a `createAssetHelper()`function. ```Typescript -async function createAsset( +async function createAssetHelper( name: string, symbol: string, owner: Signer, @@ -503,7 +503,7 @@ you need to mint oceans to mentioned accounts only if you are using barge to tes ### 6.1 Publish a dataset (create NFT + Datatoken) and set dataset metadata ```Typescript - datasetId = await createAsset( + datasetId = await createAssetHelper( 'D1Min', 'D1M', publisherAccount, @@ -520,7 +520,7 @@ Now, let's check that we successfully published a dataset (create NFT + Datatoke ### 6.2 Publish an algorithm (create NFT + Datatoken) and set algorithm metadata ```Typescript - algorithmId = await createAsset( + algorithmId = await createAssetHelper( 'D1Min', 'D1M', publisherAccount, From 528387af6478bd91e59f3ec5b857b4f14c3dced4 Mon Sep 17 00:00:00 2001 From: paulo-ocean Date: Sat, 22 Feb 2025 11:46:45 +0000 Subject: [PATCH 15/25] remove console.log --- src/services/Aquarius.ts | 14 ++------------ src/utils/Assets.ts | 1 - 2 files changed, 2 insertions(+), 13 deletions(-) diff --git a/src/services/Aquarius.ts b/src/services/Aquarius.ts index 4f1b3370a..15feda40b 100644 --- a/src/services/Aquarius.ts +++ b/src/services/Aquarius.ts @@ -115,11 +115,9 @@ export class Aquarius { const path = this.aquariusURL + '/api/aquarius/assets/ddo/validate' - console.log('path: ', path) // Old aquarius API and node API (before publisherAddress, nonce and signature verification) const validateRequestLegacy = async function (): Promise { try { - console.log('using validateRequestLegacy()') response = await fetch(path, { method: 'POST', body: JSON.stringify(ddo), @@ -138,7 +136,6 @@ export class Aquarius { const publisherAddress = await signer.getAddress() // aquarius is always same url of other components with ocean nodes const pathNonce = providerUrl + '/api/services/nonce' - console.log('pathnonce', pathNonce) const responseNonce = await fetch( pathNonce + `?userAddress=${publisherAddress}`, { @@ -147,24 +144,18 @@ export class Aquarius { signal } ) - console.log('response: ', responseNonce) let { nonce } = await responseNonce.json() console.log(`[getNonce] Consumer: ${publisherAddress} nonce: ${nonce}`) if (!nonce || nonce === null) { nonce = '0' } const newNonce = (Number(nonce) + 1).toString() // have to increase the previous - console.log('nonce: ' + nonce + ' newNonce ' + newNonce) // same signed message as usual (did + nonce) // the node will only validate (add his signature if there fields are present and are valid) // let signatureMessage = publisherAddress const signatureMessage = ddo.id + newNonce - console.log('will sign the request...', signatureMessage) const signature = await signRequest(signer, signatureMessage) - console.log('signature: ', signature) const data = { ddo, publisherAddress, newNonce, signature } - console.log('will call validate path at ', path) - console.log('calldata: ', data) response = await fetch(path, { method: 'POST', body: JSON.stringify(data), @@ -172,15 +163,14 @@ export class Aquarius { signal }) const resp = await response.json() + // this is the legacy API version (especting just a DDO object in the body) if (resp && JSON.stringify(resp).includes('no version provided for DDO.')) { - // do it again - console.log('do it again') + // do it again, using the legacy API response = await validateRequestLegacy() } else { jsonResponse = resp } } catch (e) { - console.error('GOT ERROR:', e) // retry with legacy path validation LoggerInstance.error( 'Metadata validation failed using publisher signature validation (perhaps not supported or legacy Aquarius), retrying with legacy path...', diff --git a/src/utils/Assets.ts b/src/utils/Assets.ts index 9ac0af4e8..acd557803 100644 --- a/src/utils/Assets.ts +++ b/src/utils/Assets.ts @@ -209,7 +209,6 @@ export async function createAsset( let flags if (encryptDDO) { metadata = await ProviderInstance.encrypt(ddo, chainID, providerUrl) - console.log('before validate, providerUrl: ', providerUrl) const validateResult = await aquariusInstance.validate(ddo, owner, providerUrl) metadataHash = validateResult.hash flags = 2 From 9ee165a079b9e95a68cd08bc1c0095275d899889 Mon Sep 17 00:00:00 2001 From: paulo-ocean Date: Sat, 22 Feb 2025 11:51:42 +0000 Subject: [PATCH 16/25] refactor fn name to avoid confusion with new one --- test/integration/ComputeFlow.test.ts | 10 +++++----- test/integration/PublishEditConsume.test.ts | 12 ++++++------ test/integration/helpers.ts | 2 +- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/test/integration/ComputeFlow.test.ts b/test/integration/ComputeFlow.test.ts index 7641e6153..6c3969ebf 100644 --- a/test/integration/ComputeFlow.test.ts +++ b/test/integration/ComputeFlow.test.ts @@ -10,7 +10,7 @@ import { amountToUnits } from '../../src' import { ComputeJob, ComputeAsset, ComputeAlgorithm, Files } from '../../src/@types' -import { createAsset, handleComputeOrder } from './helpers' +import { createAssetHelper, handleComputeOrder } from './helpers' let config: Config @@ -302,7 +302,7 @@ describe('Compute flow tests', async () => { amountToUnits(null, null, '1000', 18) ) - ddoWith5mTimeoutId = await createAsset( + ddoWith5mTimeoutId = await createAssetHelper( 'D1Min', 'D1M', publisherAccount, @@ -312,7 +312,7 @@ describe('Compute flow tests', async () => { addresses.ERC721Factory, aquarius ) - ddoWithNoTimeoutId = await createAsset( + ddoWithNoTimeoutId = await createAssetHelper( 'D1Min', 'D1M', publisherAccount, @@ -322,7 +322,7 @@ describe('Compute flow tests', async () => { addresses.ERC721Factory, aquarius ) - algoDdoWith5mTimeoutId = await createAsset( + algoDdoWith5mTimeoutId = await createAssetHelper( 'A1Min', 'A1M', publisherAccount, @@ -333,7 +333,7 @@ describe('Compute flow tests', async () => { aquarius ) - algoDdoWithNoTimeoutId = await createAsset( + algoDdoWithNoTimeoutId = await createAssetHelper( 'A1Min', 'A1M', publisherAccount, diff --git a/test/integration/PublishEditConsume.test.ts b/test/integration/PublishEditConsume.test.ts index 22ac5b627..06c0ded26 100644 --- a/test/integration/PublishEditConsume.test.ts +++ b/test/integration/PublishEditConsume.test.ts @@ -12,7 +12,7 @@ import { amountToUnits } from '../../src' import { Files, Smartcontract } from '../../src/@types' -import { createAsset, orderAsset, updateAssetMetadata } from './helpers' +import { createAssetHelper, orderAsset, updateAssetMetadata } from './helpers' let config: Config @@ -210,7 +210,7 @@ describe('Publish consume test', async () => { }) it('Should publish url asset', async () => { - urlAssetId = await createAsset( + urlAssetId = await createAssetHelper( 'UrlDatatoken', 'URLDT', publisherAccount, @@ -223,7 +223,7 @@ describe('Publish consume test', async () => { assert(urlAssetId, 'Failed to publish url DDO') }) it('Should publish arweave asset', async () => { - arweaveAssetId = await createAsset( + arweaveAssetId = await createAssetHelper( 'ArwaveDatatoken', 'ARWAVEDT', publisherAccount, @@ -247,7 +247,7 @@ describe('Publish consume test', async () => { } ] } - ipfsAssetId = await createAsset( + ipfsAssetId = await createAssetHelper( 'IpfsDatatoken', 'IPFSDT', publisherAccount, @@ -274,7 +274,7 @@ describe('Publish consume test', async () => { chainId: 8996 } onchainFile.files[0] = chainFile - onchainAssetId = await createAsset( + onchainAssetId = await createAssetHelper( 'ChainDatatoken', 'CHAINDT', publisherAccount, @@ -288,7 +288,7 @@ describe('Publish consume test', async () => { }) it('Should publish graphql asset', async () => { - grapqlAssetId = await createAsset( + grapqlAssetId = await createAssetHelper( 'GraphDatatoken', 'GRAPHDT', publisherAccount, diff --git a/test/integration/helpers.ts b/test/integration/helpers.ts index 539b95d75..8469874b7 100644 --- a/test/integration/helpers.ts +++ b/test/integration/helpers.ts @@ -18,7 +18,7 @@ import { getEventFromTx } from '../../src' // superseed by src/utils/CreateAsset -export async function createAsset( +export async function createAssetHelper( name: string, symbol: string, owner: Signer, From e1b5d3f4f38302df6211d22963c14175f17032e5 Mon Sep 17 00:00:00 2001 From: paulo-ocean Date: Mon, 24 Feb 2025 10:55:33 +0000 Subject: [PATCH 17/25] minor refactor, nonce --- src/services/Aquarius.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/services/Aquarius.ts b/src/services/Aquarius.ts index 15feda40b..9610b1578 100644 --- a/src/services/Aquarius.ts +++ b/src/services/Aquarius.ts @@ -149,13 +149,13 @@ export class Aquarius { if (!nonce || nonce === null) { nonce = '0' } - const newNonce = (Number(nonce) + 1).toString() // have to increase the previous + const nextNonce = (Number(nonce) + 1).toString() // have to increase the previous // same signed message as usual (did + nonce) // the node will only validate (add his signature if there fields are present and are valid) // let signatureMessage = publisherAddress - const signatureMessage = ddo.id + newNonce + const signatureMessage = ddo.id + nextNonce const signature = await signRequest(signer, signatureMessage) - const data = { ddo, publisherAddress, newNonce, signature } + const data = { ddo, publisherAddress, nonce: nextNonce, signature } response = await fetch(path, { method: 'POST', body: JSON.stringify(data), From 390c5fc469f841495b7c2d0d7322b7ce8875a6c4 Mon Sep 17 00:00:00 2001 From: paulo-ocean Date: Tue, 25 Feb 2025 09:41:28 +0000 Subject: [PATCH 18/25] use process.env if not supplied in param --- src/services/Aquarius.ts | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/services/Aquarius.ts b/src/services/Aquarius.ts index 9610b1578..0fec13ebc 100644 --- a/src/services/Aquarius.ts +++ b/src/services/Aquarius.ts @@ -131,8 +131,12 @@ export class Aquarius { } } - if (signer && providerUrl) { + if (signer) { try { + // make it optional and get from env if not present + if (!providerUrl) { + providerUrl = process.env.PROVIDER_URL + } const publisherAddress = await signer.getAddress() // aquarius is always same url of other components with ocean nodes const pathNonce = providerUrl + '/api/services/nonce' From 8444cf2ec20c963b3f5cb16155bcab13c07bf27b Mon Sep 17 00:00:00 2001 From: paulo-ocean Date: Fri, 28 Feb 2025 10:14:24 +0000 Subject: [PATCH 19/25] add comments --- docs/classes/Aquarius.md | 4 +++- src/services/Aquarius.ts | 3 +++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/docs/classes/Aquarius.md b/docs/classes/Aquarius.md index 6275cd2f9..b2d860420 100644 --- a/docs/classes/Aquarius.md +++ b/docs/classes/Aquarius.md @@ -123,7 +123,7 @@ ___ ### validate -▸ **validate**(`ddo`, `signal?`): `Promise`<[`ValidateMetadata`](../interfaces/ValidateMetadata.md)\> +▸ **validate**(`ddo`, `signal?`,`signer?`,`providerUrl?`): `Promise`<[`ValidateMetadata`](../interfaces/ValidateMetadata.md)\> Validate DDO content @@ -132,6 +132,8 @@ Validate DDO content | Name | Type | Description | | :------ | :------ | :------ | | `ddo` | [`DDO`](../interfaces/DDO.md) | DID Descriptor Object content. | +| `signer?` | `Signer` | publisher account (for signature). | +| `providerUrl?` | `string` | Provider url (to get nonce) | | `signal?` | `AbortSignal` | abort signal | #### Returns diff --git a/src/services/Aquarius.ts b/src/services/Aquarius.ts index 0fec13ebc..31a2062e9 100644 --- a/src/services/Aquarius.ts +++ b/src/services/Aquarius.ts @@ -98,6 +98,8 @@ export class Aquarius { /** * Validate DDO content * @param {DDO} ddo DID Descriptor Object content. + * @param {signer} ddo publisher account. + * @param {providerUrl} provider url used to get the nonce. * @param {AbortSignal} signal abort signal * @return {Promise}. */ @@ -116,6 +118,7 @@ export class Aquarius { const path = this.aquariusURL + '/api/aquarius/assets/ddo/validate' // Old aquarius API and node API (before publisherAddress, nonce and signature verification) + // Older Providers (before updated Ocean Nodes) const validateRequestLegacy = async function (): Promise { try { response = await fetch(path, { From 8fc2fc52f694d7a0a2d1bce963a8f28ec8d76ac9 Mon Sep 17 00:00:00 2001 From: paulo-ocean Date: Mon, 3 Mar 2025 12:47:50 +0000 Subject: [PATCH 20/25] fix filr url --- test/integration/Provider.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/integration/Provider.test.ts b/test/integration/Provider.test.ts index 2aa69f2ab..8c6c6daa4 100644 --- a/test/integration/Provider.test.ts +++ b/test/integration/Provider.test.ts @@ -32,7 +32,7 @@ describe('Provider tests', async () => { const fileinfo: FileInfo[] = await providerInstance.getFileInfo( { type: 'url', - url: 'https://dumps.wikimedia.org/enwiki/latest/enwiki-latest-abstract.xml.gz-rss.xml', + url: 'https://raw.githubusercontent.com/oceanprotocol/ocean-node/refs/heads/main/LICENSE', method: 'GET' }, config.providerUri From 9a542d039c64bbf70bfb9ba70d7579d5c4bb2957 Mon Sep 17 00:00:00 2001 From: Ahmed Raza <85652910+ahmedraza118@users.noreply.github.com> Date: Tue, 4 Mar 2025 11:07:33 +0200 Subject: [PATCH 21/25] DDO.js integration + ESM updates --- .eslintrc | 1 + .github/workflows/ci.yml | 8 - .mocharc.json | 8 + CodeExamples.md | 9 +- ComputeExamples.md | 5 +- package-lock.json | 43315 +++++++----------- package.json | 46 +- scripts/get-metadata.js | 15 +- scripts/typedoc.js | 3 + src/@types/DDO/Service.ts | 4 +- src/@types/File.ts | 2 +- src/@types/index.ts | 40 +- src/config/ConfigHelper.ts | 6 +- src/config/index.ts | 4 +- src/contracts/AccessList.ts | 8 +- src/contracts/AccessListFactory.ts | 12 +- src/contracts/Datatoken.ts | 12 +- src/contracts/Datatoken4.ts | 10 +- src/contracts/Dispenser.ts | 22 +- src/contracts/FixedRateExchange.ts | 8 +- src/contracts/NFT.ts | 13 +- src/contracts/NFTFactory.ts | 12 +- src/contracts/Router.ts | 6 +- src/contracts/SmartContract.ts | 8 +- src/contracts/SmartContractWithAddress.ts | 6 +- src/contracts/df/DfRewards.ts | 4 +- src/contracts/df/DfStrategyV1.ts | 4 +- src/contracts/index.ts | 40 +- src/contracts/ve/VeAllocate.ts | 6 +- src/contracts/ve/VeFeeDistributor.ts | 6 +- src/contracts/ve/VeFeeEstimate.ts | 6 +- src/contracts/ve/VeOcean.ts | 6 +- src/index.ts | 10 +- src/services/Aquarius.ts | 11 +- src/services/Provider.ts | 2 +- src/services/index.ts | 4 +- src/utils/Addresses.ts | 4 +- src/utils/Assets.ts | 65 +- src/utils/ContractUtils.ts | 12 +- src/utils/DdoHelpers.ts | 2 +- src/utils/FetchHelper.ts | 2 +- src/utils/Logger.ts | 2 +- src/utils/OrderUtils.ts | 63 +- src/utils/ProviderErrors.ts | 5 +- src/utils/TokenUtils.ts | 12 +- src/utils/index.ts | 27 +- test/.mocharc.json | 5 +- test/config.ts | 4 +- test/integration/CodeExamples.test.ts | 9 +- test/integration/ComputeExamples.test.ts | 5 +- test/integration/ComputeFlow.test.ts | 13 +- test/integration/Provider.test.ts | 8 +- test/integration/PublishEditConsume.test.ts | 8 +- test/integration/PublishFlows.test.ts | 14 +- test/integration/Sapphire.test.ts | 32 +- test/integration/helpers.ts | 7 +- test/tsconfig.json | 22 +- test/unit/AssetUtils.test.ts | 10 +- test/unit/DFRewards.test.ts | 4 +- test/unit/Datatoken.test.ts | 4 +- test/unit/Dispenser.test.ts | 4 +- test/unit/FixedRateExchange.test.ts | 4 +- test/unit/Nft.test.ts | 14 +- test/unit/NftFactory.test.ts | 10 +- test/unit/Router.test.ts | 4 +- test/unit/veOcean.test.ts | 4 +- tsconfig.json | 7 +- 67 files changed, 18122 insertions(+), 25946 deletions(-) create mode 100644 .mocharc.json diff --git a/.eslintrc b/.eslintrc index f93683740..8ab6a94b0 100644 --- a/.eslintrc +++ b/.eslintrc @@ -1,4 +1,5 @@ { + "ignorePatterns": ["dist/**/*"], "parser": "@typescript-eslint/parser", "parserOptions": { "sourceType": "module", diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8664b8006..918ee7091 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -117,14 +117,6 @@ jobs: bash -x start_ocean.sh --with-provider2 --no-dashboard --with-c2d 2>&1 > start_ocean.log & - run: npm ci - run: npm run build:metadata - - name: Delete default runner images - run: | - docker image rm -f node:18 - docker image rm -f node:18-alpine - docker image rm -f node:20 - docker image rm -f debian:10 - docker image rm -f debian:11 - docker image rm -f moby/buildkit:latest - name: Wait for contracts deployment and C2D cluster to be ready working-directory: ${{ github.workspace }}/barge run: | diff --git a/.mocharc.json b/.mocharc.json new file mode 100644 index 000000000..5e2f3af26 --- /dev/null +++ b/.mocharc.json @@ -0,0 +1,8 @@ +{ + "require": ["source-map-support/register", "mock-local-storage"], + "extension": ["ts"], + "timeout": 200000, + "bail": true, + "exit": true, + "fullTrace": true +} diff --git a/CodeExamples.md b/CodeExamples.md index ef020a78d..0b9495aa7 100644 --- a/CodeExamples.md +++ b/CodeExamples.md @@ -79,7 +79,7 @@ Start by importing all of the necessary dependencies import fs from 'fs' import { ethers, providers, Signer } from 'ethers' -import { SHA256 } from 'crypto-js' +import crypto from 'crypto-js' import { homedir } from 'os' import { approve, @@ -109,8 +109,9 @@ import { getEventFromTx, DDO, LoggerInstance -} from '@oceanprotocol/lib' +} from '../../src/index.js' ``` +const { SHA256 } = crypto -Let's have 5 minute of compute access -```Typescript - const mytime = new Date() - const computeMinutes = 5 - mytime.setMinutes(mytime.getMinutes() + computeMinutes) - const computeValidUntil = Math.floor(mytime.getTime() / 1000) - - const assets: ComputeAsset[] = [ - { - documentId: resolvedDatasetDdo.id, - serviceId: resolvedDatasetDdo.services[0].id + computeRoutePath = await ProviderInstance.getComputeStartRoutes(providerUrl, true) + if (isDefined(computeRoutePath)) { + hasFreeComputeSupport = true + Let's have 5 minute of compute access + ```Typescript + const mytime = new Date() + const computeMinutes = 5 + mytime.setMinutes(mytime.getMinutes() + computeMinutes) + const computeValidUntil = Math.floor(mytime.getTime() / 1000) + + const assets: ComputeAsset[] = [ + { + documentId: resolvedDatasetDdo.id, + serviceId: resolvedDatasetDdo.services[0].id + } + ] + const dtAddressArray = [resolvedDatasetDdo.services[0].datatokenAddress] + const algo: ComputeAlgorithm = { + documentId: resolvedAlgorithmDdo.id, + serviceId: resolvedAlgorithmDdo.services[0].id } - ] - const dtAddressArray = [resolvedDatasetDdo.services[0].datatokenAddress] - const algo: ComputeAlgorithm = { - documentId: resolvedAlgorithmDdo.id, - serviceId: resolvedAlgorithmDdo.services[0].id - } - const providerInitializeComputeResults = await ProviderInstance.initializeCompute( - assets, - algo, - computeEnv.id, - computeValidUntil, - providerUrl, - await consumerAccount.getAddress() - ) -``` - -```Typescript - algo.transferTxId = await handleOrder( - providerInitializeComputeResults.algorithm, - resolvedAlgorithmDdo.services[0].datatokenAddress, - consumerAccount, - computeEnv.consumerAddress, - 0 - ) - for (let i = 0; i < providerInitializeComputeResults.datasets.length; i++) { - assets[i].transferTxId = await handleOrder( - providerInitializeComputeResults.datasets[i], - dtAddressArray[i], + const providerInitializeComputeResults = await ProviderInstance.initializeCompute( + assets, + algo, + computeEnv.id, + computeValidUntil, + providerUrl, + consumerAccount + ) + ``` + + ```Typescript + algo.transferTxId = await handleOrder( + providerInitializeComputeResults.algorithm, + resolvedAlgorithmDdo.services[0].datatokenAddress, consumerAccount, computeEnv.consumerAddress, 0 ) - } + for (let i = 0; i < providerInitializeComputeResults.datasets.length; i++) { + assets[i].transferTxId = await handleOrder( + providerInitializeComputeResults.datasets[i], + dtAddressArray[i], + consumerAccount, + computeEnv.consumerAddress, + 0 + ) + } - const computeJobs = await ProviderInstance.computeStart( - providerUrl, - consumerAccount, - computeEnv.id, - assets[0], - algo - ) + const computeJobs = await ProviderInstance.freeComputeStart( + providerUrl, + consumerAccount, + computeEnv.id, + assets, + algo + ) -``` - -Let's save the compute job it, we re going to use later -```Typescript - computeJobId = computeJobs[0].jobId - // eslint-disable-next-line prefer-destructuring - agreementId = computeJobs[0].agreementId + ``` + + Let's save the compute job it, we re going to use later + ```Typescript + computeJobId = computeJobs[0].jobId + // eslint-disable-next-line prefer-destructuring + agreementId = computeJobs[0].agreementId + } else { + assert( + computeRoutePath === null, + 'Route path for free compute is not defined (perhaps because provider does not support it yet?)' + ) + hasFreeComputeSupport = false + } ``` ## 11. Check compute status and get download compute results URL ### 11.1 Check compute status -You can also add various delays so you see the various states of the compute job -```Typescript - const jobStatus = await ProviderInstance.computeStatus( - providerUrl, - await consumerAccount.getAddress(), - computeJobId, - agreementId - ) -``` - -Now, let's see the current status of the previously started computer job -```Typescript - console.log('Current status of the compute job: ', jobStatus) + if (!hasFreeComputeSupport) { + assert( + computeRoutePath === null, + 'Compute route path for free compute is not defined (perhaps because provider does not support it yet?)' + ) + } else { + You can also add various delays so you see the various states of the compute job + ```Typescript + const jobStatus = await ProviderInstance.computeStatus( + providerUrl, + await consumerAccount.getAddress(), + computeJobId, + agreementId + ) + ``` + + Now, let's see the current status of the previously started computer job + ```Typescript + console.log('Current status of the compute job: ', jobStatus) + } ``` ### 11.2 Get download compute results URL -```Typescript - await sleep(10000) - const downloadURL = await ProviderInstance.getComputeResultUrl( - providerUrl, - consumerAccount, - computeJobId, - 0 - ) -``` - -Let's check the compute results url for the specified index -```Typescript - console.log(`Compute results URL: ${downloadURL}`) + if (!hasFreeComputeSupport) { + assert( + computeRoutePath === null, + 'Compute route path for free compute is not defined (perhaps because provider does not support it yet?)' + ) + } else { + ```Typescript + await sleep(10000) + const downloadURL = await ProviderInstance.getComputeResultUrl( + providerUrl, + consumerAccount, + computeJobId, + 0 + ) + ``` + + Let's check the compute results url for the specified index + ```Typescript + console.log(`Compute results URL: ${downloadURL}`) + } ``` diff --git a/docs/classes/Provider.md b/docs/classes/Provider.md index 9aee359ae..4b26ceeda 100644 --- a/docs/classes/Provider.md +++ b/docs/classes/Provider.md @@ -97,7 +97,7 @@ ___ ### computeStart -▸ **computeStart**(`providerUri`, `consumer`, `computeEnv`, `dataset`, `algorithm`, `signal?`, `additionalDatasets?`, `output?`): `Promise`<[`ComputeJob`](../interfaces/ComputeJob.md) \| [`ComputeJob`](../interfaces/ComputeJob.md)[]\> +▸ **computeStart**(`providerUri`, `signer`, `computeEnv`, `datasets`, `algorithm`, `resources`, `chainId`, `output?`, `freeEnvironment`, `signal?`): `Promise`<[`ComputeJob`](../interfaces/ComputeJob.md) \| [`ComputeJob`](../interfaces/ComputeJob.md)[]\> Instruct the provider to start a compute job @@ -106,13 +106,15 @@ Instruct the provider to start a compute job | Name | Type | Description | | :------ | :------ | :------ | | `providerUri` | `string` | The provider URI. | -| `consumer` | `Signer` | - | +| `signer` | `Signer` | - | The consumer signer/account | `computeEnv` | `string` | The compute environment. | -| `dataset` | [`ComputeAsset`](../interfaces/ComputeAsset.md) | The dataset to start compute on | +| `datasets` | [`ComputeAsset`](../interfaces/ComputeAsset.md) | The dataset to start compute on | | `algorithm` | [`ComputeAlgorithm`](../interfaces/ComputeAlgorithm.md) | The algorithm to start compute with. | -| `signal?` | `AbortSignal` | abort signal | -| `additionalDatasets?` | [`ComputeAsset`](../interfaces/ComputeAsset.md)[] | The additional datasets if that is the case. | +| `resources` | [`ComputeResourceRequest`](../interfaces/ComputeResourcesRequest.md) | The resources to start compute with. | +| `chainId?` | [`number`] | The network for the payments | | `output?` | [`ComputeOutput`](../interfaces/ComputeOutput.md) | The compute job output settings. | +| `signal?` | `AbortSignal` | abort signal | + #### Returns @@ -486,7 +488,7 @@ Initializes the provider for a compute request. | `computeEnv` | `string` | The compute environment. | | `validUntil` | `number` | The job expiration date. | | `providerUri` | `string` | The provider URI. | -| `accountId` | `string` | caller address | +| `signer` | `Signer` | caller account | | `signal?` | `AbortSignal` | abort signal | #### Returns @@ -501,6 +503,36 @@ ProviderComputeInitialize data ___ + +### computeStreamableLogs + +▸ **computeStreamableLogs**(`providerUri`, `signer`, `jobId`, `signal?`): `Promise`<`any`\> + +Gets the streamable compute logs. + +#### Parameters + +| Name | Type | Description | +| :------ | :------ | :------ | +| `providerUri` | `string` | The provider URI. | +| `signer` | `Signer` | The signer. | +| `jobId` | `string` | The Job Id. | +| `signal?` | `AbortSignal` | The abort signal. | + + +#### Returns + +`Promise`<`any`\> + +The compute logs. + +#### Defined in + +[services/Provider.ts:908](https://github.com/oceanprotocol/ocean.js/blob/c99bc5c6/src/services/Provider.ts#L908) + +___ + + ### inputMatch ▸ `Private` **inputMatch**(`input`, `regexp`, `conversorName`): `Object` diff --git a/docs/interfaces/ComputeEnvFees.md b/docs/interfaces/ComputeEnvFees.md new file mode 100644 index 000000000..af6c71ecc --- /dev/null +++ b/docs/interfaces/ComputeEnvFees.md @@ -0,0 +1,30 @@ +[@oceanprotocol/lib](../README.md) / [Exports](../modules.md) / ComputeEnvFees + +# Interface: ComputeEnvFees + +## Table of contents + +### Properties + +- [feeToken](ComputeEnvFees.md#feeToken) +- [prices](ComputeEnvFees.md#prices) + +## Properties + +### feeToken + +• **feeToken**: `string` + +#### Defined in + +[@types/Compute.ts:42](https://github.com/oceanprotocol/ocean.js/blob/c99bc5c6/src/@types/Compute.ts#L42) + +___ + +### prices + +• **prices**: `ComputeResourcesPricingInfo`[] + +#### Defined in + +[@types/Compute.ts:43](https://github.com/oceanprotocol/ocean.js/blob/c99bc5c6/src/@types/Compute.ts#L43) diff --git a/docs/interfaces/ComputeEnvFeesStructure.md b/docs/interfaces/ComputeEnvFeesStructure.md new file mode 100644 index 000000000..069dc662a --- /dev/null +++ b/docs/interfaces/ComputeEnvFeesStructure.md @@ -0,0 +1,20 @@ +[@oceanprotocol/lib](../README.md) / [Exports](../modules.md) / ComputeEnvFeesStructure + +# Interface: ComputeEnvFeesStructure + +## Table of contents + +### Properties + +- [feeToken](ComputeEnvFeesStructure.md#chainId) + +## Properties + +### chainId + +• **chainId**: `ComputeEnvFees` + +#### Defined in + +[@types/Compute.ts:42](https://github.com/oceanprotocol/ocean.js/blob/c99bc5c6/src/@types/Compute.ts#L46) + diff --git a/docs/interfaces/ComputeEnvironment.md b/docs/interfaces/ComputeEnvironment.md index 52997d032..a43ac78a0 100644 --- a/docs/interfaces/ComputeEnvironment.md +++ b/docs/interfaces/ComputeEnvironment.md @@ -7,20 +7,21 @@ ### Properties - [consumerAddress](ComputeEnvironment.md#consumeraddress) -- [cpuNumber](ComputeEnvironment.md#cpunumber) -- [cpuType](ComputeEnvironment.md#cputype) +- [totalCpu](ComputeEnvironment.md#totalCpu) +- [maxCpu](ComputeEnvironment.md#maxCpu) +- [totalRam](ComputeEnvironment.md#totalRam) +- [maxRam](ComputeEnvironment.md#maxRam) +- [maxDisk](ComputeEnvironment.md#maxDisk) - [currentJobs](ComputeEnvironment.md#currentjobs) -- [desc](ComputeEnvironment.md#desc) -- [diskGB](ComputeEnvironment.md#diskgb) -- [gpuNumber](ComputeEnvironment.md#gpunumber) -- [gpuType](ComputeEnvironment.md#gputype) +- [description](ComputeEnvironment.md#description) +- [fees](ComputeEnvironment.md#ComputeEnvFeesStructure) - [id](ComputeEnvironment.md#id) - [lastSeen](ComputeEnvironment.md#lastseen) - [maxJobDuration](ComputeEnvironment.md#maxjobduration) - [maxJobs](ComputeEnvironment.md#maxjobs) -- [priceMin](ComputeEnvironment.md#pricemin) -- [ramGB](ComputeEnvironment.md#ramgb) - [storageExpiry](ComputeEnvironment.md#storageexpiry) +- [lastSeen](ComputeEnvironment.md#lastSeen) +- [free](ComputeEnvironment.md#free) ## Properties @@ -30,27 +31,27 @@ #### Defined in -[@types/Compute.ts:21](https://github.com/oceanprotocol/ocean.js/blob/c99bc5c6/src/@types/Compute.ts#L21) +[@types/Compute.ts:68](https://github.com/oceanprotocol/ocean.js/blob/c99bc5c6/src/@types/Compute.ts#L68) ___ -### cpuNumber +### totalCpu -• **cpuNumber**: `number` +• **totalCpu**: `number` #### Defined in -[@types/Compute.ts:11](https://github.com/oceanprotocol/ocean.js/blob/c99bc5c6/src/@types/Compute.ts#L11) +[@types/Compute.ts:58](https://github.com/oceanprotocol/ocean.js/blob/c99bc5c6/src/@types/Compute.ts#L58) ___ -### cpuType +### maxCpu -• **cpuType**: `string` +• **maxCpu**: `number` #### Defined in -[@types/Compute.ts:12](https://github.com/oceanprotocol/ocean.js/blob/c99bc5c6/src/@types/Compute.ts#L12) +[@types/Compute.ts:59](https://github.com/oceanprotocol/ocean.js/blob/c99bc5c6/src/@types/Compute.ts#L59) ___ @@ -60,47 +61,47 @@ ___ #### Defined in -[@types/Compute.ts:19](https://github.com/oceanprotocol/ocean.js/blob/c99bc5c6/src/@types/Compute.ts#L19) +[@types/Compute.ts:66](https://github.com/oceanprotocol/ocean.js/blob/c99bc5c6/src/@types/Compute.ts#L66) ___ -### desc +### description -• **desc**: `string` +• **description**: `string` #### Defined in -[@types/Compute.ts:18](https://github.com/oceanprotocol/ocean.js/blob/c99bc5c6/src/@types/Compute.ts#L18) +[@types/Compute.ts:65](https://github.com/oceanprotocol/ocean.js/blob/c99bc5c6/src/@types/Compute.ts#L65) ___ -### diskGB +### maxDisk -• **diskGB**: `number` +• **maxDisk**: `number` #### Defined in -[@types/Compute.ts:16](https://github.com/oceanprotocol/ocean.js/blob/c99bc5c6/src/@types/Compute.ts#L16) +[@types/Compute.ts:62](https://github.com/oceanprotocol/ocean.js/blob/c99bc5c6/src/@types/Compute.ts#L62) ___ -### gpuNumber +### maxRam -• **gpuNumber**: `number` +• **maxRam**: `number` #### Defined in -[@types/Compute.ts:13](https://github.com/oceanprotocol/ocean.js/blob/c99bc5c6/src/@types/Compute.ts#L13) +[@types/Compute.ts:61](https://github.com/oceanprotocol/ocean.js/blob/c99bc5c6/src/@types/Compute.ts#L61) ___ -### gpuType +### totalRam -• **gpuType**: `string` +• **totalRam**: `number` #### Defined in -[@types/Compute.ts:14](https://github.com/oceanprotocol/ocean.js/blob/c99bc5c6/src/@types/Compute.ts#L14) +[@types/Compute.ts:60](https://github.com/oceanprotocol/ocean.js/blob/c99bc5c6/src/@types/Compute.ts#L60) ___ @@ -110,7 +111,7 @@ ___ #### Defined in -[@types/Compute.ts:10](https://github.com/oceanprotocol/ocean.js/blob/c99bc5c6/src/@types/Compute.ts#L10) +[@types/Compute.ts:49](https://github.com/oceanprotocol/ocean.js/blob/c99bc5c6/src/@types/Compute.ts#L49) ___ @@ -120,7 +121,7 @@ ___ #### Defined in -[@types/Compute.ts:24](https://github.com/oceanprotocol/ocean.js/blob/c99bc5c6/src/@types/Compute.ts#L24) +[@types/Compute.ts:71](https://github.com/oceanprotocol/ocean.js/blob/c99bc5c6/src/@types/Compute.ts#L71) ___ @@ -130,7 +131,7 @@ ___ #### Defined in -[@types/Compute.ts:23](https://github.com/oceanprotocol/ocean.js/blob/c99bc5c6/src/@types/Compute.ts#L23) +[@types/Compute.ts:70](https://github.com/oceanprotocol/ocean.js/blob/c99bc5c6/src/@types/Compute.ts#L70) ___ @@ -140,27 +141,27 @@ ___ #### Defined in -[@types/Compute.ts:20](https://github.com/oceanprotocol/ocean.js/blob/c99bc5c6/src/@types/Compute.ts#L20) +[@types/Compute.ts:67](https://github.com/oceanprotocol/ocean.js/blob/c99bc5c6/src/@types/Compute.ts#L67) ___ -### priceMin +### fees -• **priceMin**: `number` +• **fees**: `ComputeEnvFeesStructure` #### Defined in -[@types/Compute.ts:17](https://github.com/oceanprotocol/ocean.js/blob/c99bc5c6/src/@types/Compute.ts#L17) +[@types/Compute.ts:63](https://github.com/oceanprotocol/ocean.js/blob/c99bc5c6/src/@types/Compute.ts#L63) ___ -### ramGB +### free -• **ramGB**: `number` +• **free**: `boolean` #### Defined in -[@types/Compute.ts:15](https://github.com/oceanprotocol/ocean.js/blob/c99bc5c6/src/@types/Compute.ts#L15) +[@types/Compute.ts:72](https://github.com/oceanprotocol/ocean.js/blob/c99bc5c6/src/@types/Compute.ts#L72) ___ @@ -170,4 +171,14 @@ ___ #### Defined in -[@types/Compute.ts:22](https://github.com/oceanprotocol/ocean.js/blob/c99bc5c6/src/@types/Compute.ts#L22) +[@types/Compute.ts:69](https://github.com/oceanprotocol/ocean.js/blob/c99bc5c6/src/@types/Compute.ts#L69) + +___ + +### platform + +• **platform**: `RunningPlatform[]` + +#### Defined in + +[@types/Compute.ts:73](https://github.com/oceanprotocol/ocean.js/blob/c99bc5c6/src/@types/Compute.ts#L73) diff --git a/docs/interfaces/ComputeResourcesPricingInfo.md b/docs/interfaces/ComputeResourcesPricingInfo.md new file mode 100644 index 000000000..fcf6d6090 --- /dev/null +++ b/docs/interfaces/ComputeResourcesPricingInfo.md @@ -0,0 +1,30 @@ +[@oceanprotocol/lib](../README.md) / [Exports](../modules.md) / ComputeResourcesPricingInfo + +# Interface: ComputeResourcesPricingInfo + +## Table of contents + +### Properties + +- [type](ComputeResourcesPricingInfo.md#type) +- [price](ComputeResourcesPricingInfo.md#price) + +## Properties + +### type + +• **type**: `ComputeResourceType` + +#### Defined in + +[@types/Compute.ts:38](https://github.com/oceanprotocol/ocean.js/blob/c99bc5c6/src/@types/Compute.ts#L38) + +___ + +### price + +• **price**: `number` + +#### Defined in + +[@types/Compute.ts:39](https://github.com/oceanprotocol/ocean.js/blob/c99bc5c6/src/@types/Compute.ts#L39) diff --git a/docs/interfaces/ComputeResourcesRequest.md b/docs/interfaces/ComputeResourcesRequest.md new file mode 100644 index 000000000..67b7c166c --- /dev/null +++ b/docs/interfaces/ComputeResourcesRequest.md @@ -0,0 +1,30 @@ +[@oceanprotocol/lib](../README.md) / [Exports](../modules.md) / ComputeResourcesRequest + +# Interface: ComputeResourcesRequest + +## Table of contents + +### Properties + +- [id](ComputeResourcesRequest.md#id) +- [amount](ComputeResourcesRequest.md#amount) + +## Properties + +### id + +• **id**: `string` + +#### Defined in + +[@types/Compute.ts:63](https://github.com/oceanprotocol/ocean.js/blob/c99bc5c6/src/@types/Compute.ts#L63) + +___ + +### amount + +• **amount**: `number` + +#### Defined in + +[@types/Compute.ts:64](https://github.com/oceanprotocol/ocean.js/blob/c99bc5c6/src/@types/Compute.ts#L64) diff --git a/docs/interfaces/RunningPlatform.md b/docs/interfaces/RunningPlatform.md new file mode 100644 index 000000000..e6add7f78 --- /dev/null +++ b/docs/interfaces/RunningPlatform.md @@ -0,0 +1,30 @@ +[@oceanprotocol/lib](../README.md) / [Exports](../modules.md) / RunningPlatform + +# Interface: RunningPlatform + +## Table of contents + +### Properties + +- [architecture](RunningPlatform.md#architecture) +- [os](RunningPlatform.md#os) + +## Properties + +### architecture + +• **architecture**: `string` + +#### Defined in + +[@types/Compute.ts:31](https://github.com/oceanprotocol/ocean.js/blob/c99bc5c6/src/@types/Compute.ts#L31) + +___ + +### os + +• **os**: `string` + +#### Defined in + +[@types/Compute.ts:32](https://github.com/oceanprotocol/ocean.js/blob/c99bc5c6/src/@types/Compute.ts#L32) diff --git a/package-lock.json b/package-lock.json index 704c1762c..a32f5c221 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@oceanprotocol/lib", - "version": "3.4.6", + "version": "4.0.0-next.1", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@oceanprotocol/lib", - "version": "3.4.6", + "version": "4.0.0-next.1", "license": "Apache-2.0", "dependencies": { "@oasisprotocol/sapphire-paratime": "^1.3.2", @@ -23529,6 +23529,7 @@ "funding": { "url": "https://github.com/sponsors/sindresorhus" } + } } } diff --git a/package.json b/package.json index e780e39d6..31b140e6a 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "@oceanprotocol/lib", "source": "./src/index.ts", - "version": "3.4.6", + "version": "4.0.0-next.1", "description": "JavaScript client library for Ocean Protocol", "main": "./dist/lib.cjs", "umd:main": "dist/lib.umd.js", diff --git a/src/@types/Compute.ts b/src/@types/Compute.ts index 81142c7cb..670a166bd 100644 --- a/src/@types/Compute.ts +++ b/src/@types/Compute.ts @@ -6,22 +6,98 @@ export type ComputeResultType = | 'configrationLog' | 'publishLog' +// OLD V1 ComputeEnvironment specs +// export interface ComputeEnvironment { +// id: string +// cpuNumber: number +// cpuType: string +// gpuNumber: number +// gpuType: string +// ramGB: number +// diskGB: number +// priceMin: number +// desc: string +// currentJobs: number +// maxJobs: number +// consumerAddress: string +// storageExpiry: number +// maxJobDuration: number +// lastSeen: number +// free: boolean +// } + +// new V2 C2D Compute Environment specs +export interface RunningPlatform { + architecture: string + os: string +} + +export type ComputeResourceType = 'cpu' | 'memory' | 'storage' + +export interface ComputeResourcesPricingInfo { + type: ComputeResourceType + price: number +} +export interface ComputeEnvFees { + feeToken: string + prices: ComputeResourcesPricingInfo[] +} +export interface ComputeEnvFeesStructure { + [chainId: string]: ComputeEnvFees +} + +export interface ComputeResourceRequest { + id: string + amount: number +} + +export interface ComputeResource { + id: ComputeResourceType + type?: string + kind?: string + total: number // total number of specific resource + min: number // min number of resource needed for a job + max: number // max number of resource for a job + inUse?: number // for display purposes +} + +export interface ComputeEnvironmentFreeOptions { + // only if a compute env exposes free jobs + storageExpiry?: number + maxJobDuration?: number + maxJobs?: number // maximum number of simultaneous free jobs + resources?: ComputeResource[] +} export interface ComputeEnvironment { + // legacy + // cpuNumber: number + // cpuType: string + // gpuNumber: number + // gpuType: string + // ramGB: number + // diskGB: number + // priceMin: number + // totalCpu: number // total cpu available for jobs + // maxCpu: number // max cpu for a single job. Imagine a K8 cluster with two nodes, each node with 10 cpus. Total=20, but at most you can allocate 10 cpu for a job + // totalRam: number // total gb of RAM + // maxRam: number // max allocatable GB RAM for a single job. + // maxDisk: number // max GB of disck allocatable for a single job + // currentJobs: number + // lastSeen: number + // legacy id: string - cpuNumber: number - cpuType: string - gpuNumber: number - gpuType: string - ramGB: number - diskGB: number - priceMin: number - desc: string - currentJobs: number - maxJobs: number + description: string consumerAddress: string - storageExpiry: number - maxJobDuration: number - lastSeen: number + storageExpiry?: number // amount of seconds for storage + minJobDuration?: number // min billable seconds for a paid job + maxJobDuration?: number // max duration in seconds for a paid job + maxJobs?: number // maximum number of simultaneous paid jobs + runningJobs: number // amount of running jobs (paid jobs) + runningfreeJobs?: number // amount of running jobs (free jobs) + fees: ComputeEnvFeesStructure + resources?: ComputeResource[] + free?: ComputeEnvironmentFreeOptions + platform?: RunningPlatform } export interface ComputeResult { @@ -59,7 +135,42 @@ export interface ComputeOutput { whitelist?: string[] } +export enum FileObjectType { + URL = 'url', + IPFS = 'ipfs', + ARWEAVE = 'arweave' +} + +export enum EncryptMethod { + AES = 'AES', + ECIES = 'ECIES' +} + +export interface HeadersObject { + [key: string]: string +} + +export interface BaseFileObject { + type: string + encryptedBy?: string + encryptMethod?: EncryptMethod +} + +export interface UrlFileObject extends BaseFileObject { + url: string + method: string + headers?: [HeadersObject] +} + +export interface IpfsFileObject extends BaseFileObject { + hash: string +} + +export interface ArweaveFileObject extends BaseFileObject { + transactionId: string +} export interface ComputeAsset { + fileObject?: BaseFileObject // C2D v2 documentId: string serviceId: string transferTxId?: string @@ -67,6 +178,7 @@ export interface ComputeAsset { } export interface ComputeAlgorithm { + fileObject?: BaseFileObject // C2D v2 documentId?: string serviceId?: string meta?: MetadataAlgorithm diff --git a/src/services/Provider.ts b/src/services/Provider.ts index f5f761b9a..5d996a211 100644 --- a/src/services/Provider.ts +++ b/src/services/Provider.ts @@ -16,7 +16,8 @@ import { UserCustomParameters, Ipfs, Smartcontract, - GraphqlQuery + GraphqlQuery, + ComputeResourceRequest } from '../@types' export class Provider { @@ -302,7 +303,7 @@ export class Provider { public async getComputeEnvironments( providerUri: string, signal?: AbortSignal - ): Promise<{ [chainId: number]: ComputeEnvironment[] }> { + ): Promise { const providerEndpoints = await this.getEndpoints(providerUri) const serviceEndpoints = await this.getServiceEndpoints( providerUri, @@ -324,10 +325,11 @@ export class Provider { } if (response?.ok) { const result = response.json() - if (Array.isArray(result)) { - const providerChain: number = providerEndpoints.chainId - return { [providerChain]: result } - } + // chain is not part of response + // if (Array.isArray(result)) { + // const providerChain: number = providerEndpoints.chainId + // return { [providerChain]: result } + // } return result } const resolvedResponse = await response.json() @@ -418,7 +420,7 @@ export class Provider { * @param {AbortSignal} signal abort signal * @return {Promise} ProviderComputeInitialize data */ - public async initializeCompute( + public async initializeComputeV1( assets: ComputeAsset[], algorithm: ComputeAlgorithm, computeEnv: string, @@ -470,6 +472,90 @@ export class Provider { throw new Error(JSON.stringify(resolvedResponse)) } + /** Initializes the provider for a compute request. + * @param {ComputeAsset[]} assets The datasets array to initialize compute request. + * @param {ComputeAlgorithmber} algorithm The algorithm to use. + * @param {string} computeEnv The compute environment. + * @param {number} validUntil The job expiration date. + * @param {string} providerUri The provider URI. + * @param {Signer} signer caller address + * @param {AbortSignal} signal abort signal + * @return {Promise} ProviderComputeInitialize data + */ + public async initializeCompute( + assets: ComputeAsset[], + algorithm: ComputeAlgorithm, + computeEnv: string, + validUntil: number, + providerUri: string, + signer: Signer, + signal?: AbortSignal + ): Promise { + const providerEndpoints = await this.getEndpoints(providerUri) + const serviceEndpoints = await this.getServiceEndpoints( + providerUri, + providerEndpoints + ) + + // Diff from V1. We might need a signature to get the files object, specially if dealing with confidential evm and template 4 + // otherwise it can be ignored + const consumerAddress = await signer.getAddress() + const nonce = ( + (await this.getNonce( + providerUri, + consumerAddress, + signal, + providerEndpoints, + serviceEndpoints + )) + 1 + ).toString() + + // same signed message as for start compute (consumer address + did[0] + nonce) + let signatureMessage = consumerAddress + signatureMessage += assets[0].documentId + signatureMessage += nonce + const signature = await this.signProviderRequest(signer, signatureMessage) + + const providerData = { + datasets: assets, + algorithm, + compute: { env: computeEnv, validUntil }, + consumerAddress, + signature + } + const initializeUrl = this.getEndpointURL(serviceEndpoints, 'initializeCompute') + ? this.getEndpointURL(serviceEndpoints, 'initializeCompute').urlPath + : null + if (!initializeUrl) return null + + let response + try { + response = await fetch(initializeUrl, { + method: 'POST', + body: JSON.stringify(providerData), + headers: { 'Content-Type': 'application/json' }, + signal + }) + } catch (e) { + LoggerInstance.error('Initialize compute failed: ') + LoggerInstance.error(e) + throw new Error('ComputeJob cannot be initialized') + } + if (response?.ok) { + const params = await response.json() + return params + } + const resolvedResponse = await response.json() + LoggerInstance.error( + 'Initialize compute failed: ', + response.status, + response.statusText, + resolvedResponse + ) + LoggerInstance.error('Payload was:', providerData) + throw new Error(JSON.stringify(resolvedResponse)) + } + /** * Gets the download URL. * @param {string} did - The DID. @@ -530,7 +616,7 @@ export class Provider { return consumeUrl } - /** Instruct the provider to start a compute job + /** Instruct the provider to start a compute job (Old C2D V1) Kept for now, for backwards compatibility * @param {string} providerUri The provider URI. * @param {Signer} signer The consumer signer object. * @param {string} computeEnv The compute environment. @@ -540,8 +626,9 @@ export class Provider { * @param {ComputeAsset[]} additionalDatasets The additional datasets if that is the case. * @param {ComputeOutput} output The compute job output settings. * @return {Promise} The compute job or jobs. + * @deprecated Use {@link computeStart} instead. */ - public async computeStart( + public async computeStartV1( providerUri: string, consumer: Signer, computeEnv: string, @@ -613,6 +700,305 @@ export class Provider { return null } + /** Instruct the provider to start a PAYED compute job (new C2D V2) + * @param {string} providerUri The provider URI. + * @param {Signer} signer The consumer signer object. + * @param {string} computeEnv The compute environment. + * @param {ComputeAsset} datasets The dataset to start compute on + additionalDatasets (the additional datasets if that is the case) + * @param {ComputeAlgorithm} algorithm The algorithm to start compute with. + * @param {ComputeResourceRequest} resources The resources to start compute job with. + * @param {chainId} chainId The chain used to do payments + * @param {ComputeOutput} output The compute job output settings. + * @param {boolean} freeEnvironment is it a free environment? uses different route + * @param {AbortSignal} signal abort signal + * @return {Promise} The compute job or jobs. + */ + public async computeStart( + providerUri: string, + consumer: Signer, + computeEnv: string, + datasets: ComputeAsset[], + algorithm: ComputeAlgorithm, + resources?: ComputeResourceRequest[], + chainId?: number, // network used by payment (only for payed compute jobs) + output?: ComputeOutput, + signal?: AbortSignal + ): Promise { + console.log('called new compute start method...') + console.log('datasets: ', datasets) + console.log('algorithm: ', algorithm) + const providerEndpoints = await this.getEndpoints(providerUri) + const serviceEndpoints = await this.getServiceEndpoints( + providerUri, + providerEndpoints + ) + + const computeStartUrl = this.getEndpointURL(serviceEndpoints, 'computeStart') + ? this.getEndpointURL(serviceEndpoints, 'computeStart').urlPath + : null + + if (!computeStartUrl) { + LoggerInstance.error( + 'Compute start failed: Cannot get proper computeStart route (perhaps not implemented on provider?)' + ) + return null + } + + const consumerAddress = await consumer.getAddress() + const nonce = ( + (await this.getNonce( + providerUri, + consumerAddress, + signal, + providerEndpoints, + serviceEndpoints + )) + 1 + ).toString() + + let signatureMessage = consumerAddress + signatureMessage += datasets[0].documentId + signatureMessage += nonce + const signature = await this.signProviderRequest(consumer, signatureMessage) + const payload = Object() + payload.consumerAddress = consumerAddress + payload.signature = signature + payload.nonce = nonce + payload.environment = computeEnv + payload.resources = resources + payload.chainId = chainId + // kept for backwards compatibility (tests running against existing provider) + payload.dataset = datasets[0] + // new field for C2D v2 + payload.datasets = datasets + payload.algorithm = algorithm + // if (additionalDatasets) payload.additionalDatasets = additionalDatasets + payload.output = output + let response + try { + response = await fetch(computeStartUrl, { + method: 'POST', + body: JSON.stringify(payload), + headers: { 'Content-Type': 'application/json' }, + signal + }) + } catch (e) { + LoggerInstance.error('Compute start failed:') + LoggerInstance.error(e) + LoggerInstance.error('Payload was:', payload) + throw new Error('HTTP request failed calling Provider') + } + if (response?.ok) { + const params = await response.json() + return params + } + LoggerInstance.error( + 'Compute start failed: ', + response.status, + response.statusText, + await response.json() + ) + LoggerInstance.error('Payload was:', payload) + return null + } + + /** Instruct the provider to start a FREE compute job (new C2D V2) + * @param {string} providerUri The provider URI. + * @param {Signer} signer The consumer signer object. + * @param {string} computeEnv The compute environment. + * @param {ComputeAsset} datasets The dataset to start compute on + additionalDatasets (the additional datasets if that is the case) + * @param {ComputeAlgorithm} algorithm The algorithm to start compute with. + * @param {ComputeResourceRequest} resources The resources to start compute job with. + * @param {ComputeOutput} output The compute job output settings. + * @param {AbortSignal} signal abort signal + * @return {Promise} The compute job or jobs. + */ + public async freeComputeStart( + providerUri: string, + consumer: Signer, + computeEnv: string, + datasets: ComputeAsset[], + algorithm: ComputeAlgorithm, + resources?: ComputeResourceRequest[], + output?: ComputeOutput, + signal?: AbortSignal + ): Promise { + console.log('called new free compute start method...') + console.log('datasets: ', datasets) + console.log('algorithm: ', algorithm) + const providerEndpoints = await this.getEndpoints(providerUri) + const serviceEndpoints = await this.getServiceEndpoints( + providerUri, + providerEndpoints + ) + + const computeStartUrl = this.getEndpointURL(serviceEndpoints, 'freeCompute') + ? this.getEndpointURL(serviceEndpoints, 'freeCompute').urlPath + : null + + if (!computeStartUrl) { + LoggerInstance.error( + 'Compute start failed: Cannot get proper computeStart route (perhaps not implemented on provider?)' + ) + return null + } + + const consumerAddress = await consumer.getAddress() + const nonce = ( + (await this.getNonce( + providerUri, + consumerAddress, + signal, + providerEndpoints, + serviceEndpoints + )) + 1 + ).toString() + + const signatureMessage = nonce // datasets[0].documentId + console.log('signatureMessage: ', signatureMessage) + const signature = await this.signProviderRequest(consumer, signatureMessage) + const payload = Object() + payload.consumerAddress = consumerAddress + payload.signature = signature + payload.nonce = nonce + payload.environment = computeEnv + payload.resources = resources + // kept for backwards compatibility (tests running against existing provider) + payload.dataset = datasets[0] + // new field for C2D v2 + payload.datasets = datasets + payload.algorithm = algorithm + // if (additionalDatasets) payload.additionalDatasets = additionalDatasets + payload.output = output + let response + try { + response = await fetch(computeStartUrl, { + method: 'POST', + body: JSON.stringify(payload), + headers: { 'Content-Type': 'application/json' }, + signal + }) + } catch (e) { + LoggerInstance.error('Compute start failed:') + LoggerInstance.error(e) + LoggerInstance.error('Payload was:', payload) + throw new Error('HTTP request failed calling Provider') + } + if (response?.ok) { + const params = await response.json() + return params + } + LoggerInstance.error( + 'Compute start failed: ', + response.status, + response.statusText, + await response.json() + ) + LoggerInstance.error('Payload was:', payload) + return null + } + + /** + * + * @param providerUri provider URL + * @param consumer consumer + * @param jobId jobId + * @param signal abort signal + * @returns logs response + */ + public async computeStreamableLogs( + providerUri: string, + signer: Signer, + jobId: string, + signal?: AbortSignal + ): Promise { + const providerEndpoints = await this.getEndpoints(providerUri) + const serviceEndpoints = await this.getServiceEndpoints( + providerUri, + providerEndpoints + ) + + const computeStreamableLogs = this.getEndpointURL( + serviceEndpoints, + 'computeStreamableLogs' + ) + ? this.getEndpointURL(serviceEndpoints, 'computeStreamableLogs').urlPath + : null + + if (!computeStreamableLogs) { + LoggerInstance.error( + 'Compute start failed: Cannot get proper computeStreamableLogs route (perhaps not implemented on provider?)' + ) + return null + } + const consumerAddress = await signer.getAddress() + const nonce = ( + (await this.getNonce( + providerUri, + consumerAddress, + signal, + providerEndpoints, + serviceEndpoints + )) + 1 + ).toString() + + let url = `?consumerAddress=${consumerAddress}` + url += `&jobId=${jobId}` + url += `&nonce=${nonce}` + + // TODO: define teh signature to use (not implemented yet on node) + const signatureMessage = nonce + const signature = await this.signProviderRequest(signer, signatureMessage) + url += `&signature=${signature}` + + let response + try { + response = await fetch(computeStreamableLogs + url, { + method: 'GET', + headers: { 'Content-Type': 'application/json' }, + signal + }) + console.log('Raw response:', response) + } catch (e) { + LoggerInstance.error('computeStreamableLogs failed:') + LoggerInstance.error(e) + throw new Error('HTTP request failed calling Provider') + } + if (response?.ok || response?.status === 200) { + // do not handle the response here + console.log('Response body:', response.body) + return response.body + } + LoggerInstance.error( + 'computeStreamableLogs failed: ', + response.status, + response.statusText, + await response.json() + ) + return null + } + + public async getComputeStartRoutes( + providerUri: string, + isFreeCompute: boolean = false + ): Promise { + const providerEndpoints = await this.getEndpoints(providerUri) + const serviceEndpoints = await this.getServiceEndpoints( + providerUri, + providerEndpoints + ) + let computeStartUrl = null + if (isFreeCompute) { + computeStartUrl = this.getEndpointURL(serviceEndpoints, 'freeCompute') + ? this.getEndpointURL(serviceEndpoints, 'freeCompute').urlPath + : null + } else { + computeStartUrl = this.getEndpointURL(serviceEndpoints, 'computeStart') + ? this.getEndpointURL(serviceEndpoints, 'computeStart').urlPath + : null + } + return computeStartUrl + } + /** Instruct the provider to Stop the execution of a to stop a compute job. * @param {string} did the asset did * @param {string} consumerAddress The consumer address. @@ -661,7 +1047,7 @@ export class Provider { const signature = await this.signProviderRequest(signer, signatureMessage) const payload = Object() payload.signature = signature - payload.agreementId = this.noZeroX(agreementId) + payload.agreementId = agreementId // this.noZeroX(agreementId) #https://github.com/oceanprotocol/ocean.js/issues/1892 payload.consumerAddress = consumerAddress payload.nonce = nonce if (jobId) payload.jobId = jobId @@ -722,7 +1108,7 @@ export class Provider { : null let url = `?consumerAddress=${consumerAddress}` - url += (agreementId && `&agreementId=${this.noZeroX(agreementId)}`) || '' + url += (agreementId && `&agreementId=${agreementId}`) || '' // ${this.noZeroX(agreementId)} #https://github.com/oceanprotocol/ocean.js/issues/1892 url += (jobId && `&jobId=${jobId}`) || '' if (!computeStatusUrl) return null @@ -847,7 +1233,7 @@ export class Provider { signatureMessage += nonce const signature = await this.signProviderRequest(consumer, signatureMessage) const payload = Object() - payload.documentId = this.noZeroX(did) + payload.documentId = did // this.noZeroX(did) #https://github.com/oceanprotocol/ocean.js/issues/1892 payload.consumerAddress = await consumer.getAddress() payload.jobId = jobId if (signature) payload.signature = signature diff --git a/src/utils/Assets.ts b/src/utils/Assets.ts index f9a34421b..65c377c12 100644 --- a/src/utils/Assets.ts +++ b/src/utils/Assets.ts @@ -17,7 +17,7 @@ import AccessListFactory from '@oceanprotocol/contracts/artifacts/contracts/acce import ERC20Template4 from '@oceanprotocol/contracts/artifacts/contracts/templates/ERC20Template4.sol/ERC20Template4.json' import { calculateActiveTemplateIndex } from './Addresses.js' import { DDOManager } from '@oceanprotocol/ddo-js' -// import * as hre from 'hardhat' +import { FileObjectType } from '../@types' export const DEVELOPMENT_CHAIN_ID = 8996 // template address OR templateId @@ -114,6 +114,15 @@ export async function createAsset( mpFeeAddress: ZERO_ADDRESS } + if ( + !assetUrl.type || + ![FileObjectType.ARWEAVE, FileObjectType.IPFS, FileObjectType.URL].includes( + assetUrl.type.toLowerCase() + ) + ) { + console.log('Missing or invalid files object type, defaulting to "url"') + assetUrl.type = FileObjectType.URL + } // include fileObject in the DT constructor if (config.sdk === 'oasis') { datatokenParams.filesObject = assetUrl diff --git a/src/utils/General.ts b/src/utils/General.ts index 71e441a25..4e5cfc00a 100644 --- a/src/utils/General.ts +++ b/src/utils/General.ts @@ -7,3 +7,7 @@ export async function sleep(ms: number) { setTimeout(resolve, ms) }) } + +export function isDefined(something: any): boolean { + return something !== undefined && something !== null +} diff --git a/test/integration/ComputeExamples.test.ts b/test/integration/ComputeExamples.test.ts index 8f89c9141..a1bf662a4 100644 --- a/test/integration/ComputeExamples.test.ts +++ b/test/integration/ComputeExamples.test.ts @@ -143,7 +143,8 @@ import { configHelperNetworks, ConfigHelper, getEventFromTx, - amountToUnits + amountToUnits, + isDefined } from '../../src/index.js' /// ``` import crypto from 'crypto-js' @@ -276,6 +277,9 @@ let resolvedAlgorithmDdo: DDO let computeJobId: string let agreementId: string + +let computeRoutePath: string +let hasFreeComputeSupport: boolean /// ``` /// ### 4.3 Helper methods @@ -595,7 +599,7 @@ describe('Compute-to-data example tests', async () => { /// let's check the free compute environment /// ```Typescript const computeEnv = computeEnvs[resolvedDatasetDdo.chainId].find( - (ce) => ce.priceMin === 0 + (ce) => ce.priceMin === 0 || isDefined(ce.free) ) console.log('Free compute environment = ', computeEnv) /// ``` @@ -603,111 +607,135 @@ describe('Compute-to-data example tests', async () => { assert(computeEnv, 'Cannot find the free compute env') /// --> - /// Let's have 5 minute of compute access - /// ```Typescript - const mytime = new Date() - const computeMinutes = 5 - mytime.setMinutes(mytime.getMinutes() + computeMinutes) - const computeValidUntil = Math.floor(mytime.getTime() / 1000) - - const assets: ComputeAsset[] = [ - { - documentId: resolvedDatasetDdo.id, - serviceId: resolvedDatasetDdo.services[0].id + computeRoutePath = await ProviderInstance.getComputeStartRoutes(providerUrl, true) + if (isDefined(computeRoutePath)) { + hasFreeComputeSupport = true + /// Let's have 5 minute of compute access + /// ```Typescript + const mytime = new Date() + const computeMinutes = 5 + mytime.setMinutes(mytime.getMinutes() + computeMinutes) + const computeValidUntil = Math.floor(mytime.getTime() / 1000) + + const assets: ComputeAsset[] = [ + { + documentId: resolvedDatasetDdo.id, + serviceId: resolvedDatasetDdo.services[0].id + } + ] + const dtAddressArray = [resolvedDatasetDdo.services[0].datatokenAddress] + const algo: ComputeAlgorithm = { + documentId: resolvedAlgorithmDdo.id, + serviceId: resolvedAlgorithmDdo.services[0].id } - ] - const dtAddressArray = [resolvedDatasetDdo.services[0].datatokenAddress] - const algo: ComputeAlgorithm = { - documentId: resolvedAlgorithmDdo.id, - serviceId: resolvedAlgorithmDdo.services[0].id - } - const providerInitializeComputeResults = await ProviderInstance.initializeCompute( - assets, - algo, - computeEnv.id, - computeValidUntil, - providerUrl, - await consumerAccount.getAddress() - ) - /// ``` - /// - /// ```Typescript - algo.transferTxId = await handleOrder( - providerInitializeComputeResults.algorithm, - resolvedAlgorithmDdo.services[0].datatokenAddress, - consumerAccount, - computeEnv.consumerAddress, - 0 - ) - for (let i = 0; i < providerInitializeComputeResults.datasets.length; i++) { - assets[i].transferTxId = await handleOrder( - providerInitializeComputeResults.datasets[i], - dtAddressArray[i], + const providerInitializeComputeResults = await ProviderInstance.initializeCompute( + assets, + algo, + computeEnv.id, + computeValidUntil, + providerUrl, + consumerAccount + ) + /// ``` + /// + /// ```Typescript + algo.transferTxId = await handleOrder( + providerInitializeComputeResults.algorithm, + resolvedAlgorithmDdo.services[0].datatokenAddress, consumerAccount, computeEnv.consumerAddress, 0 ) - } + for (let i = 0; i < providerInitializeComputeResults.datasets.length; i++) { + assets[i].transferTxId = await handleOrder( + providerInitializeComputeResults.datasets[i], + dtAddressArray[i], + consumerAccount, + computeEnv.consumerAddress, + 0 + ) + } - const computeJobs = await ProviderInstance.computeStart( - providerUrl, - consumerAccount, - computeEnv.id, - assets[0], - algo - ) + const computeJobs = await ProviderInstance.freeComputeStart( + providerUrl, + consumerAccount, + computeEnv.id, + assets, + algo + ) - /// ``` - /// - /// Let's save the compute job it, we re going to use later - /// ```Typescript - computeJobId = computeJobs[0].jobId - // eslint-disable-next-line prefer-destructuring - agreementId = computeJobs[0].agreementId + /// ``` + /// + /// Let's save the compute job it, we re going to use later + /// ```Typescript + computeJobId = computeJobs[0].jobId + // eslint-disable-next-line prefer-destructuring + agreementId = computeJobs[0].agreementId + } else { + assert( + computeRoutePath === null, + 'Route path for free compute is not defined (perhaps because provider does not support it yet?)' + ) + hasFreeComputeSupport = false + } }) /// /// ``` /// ## 11. Check compute status and get download compute results URL it('11.1 Check compute status', async () => { - /// You can also add various delays so you see the various states of the compute job - /// ```Typescript - const jobStatus = await ProviderInstance.computeStatus( - providerUrl, - await consumerAccount.getAddress(), - computeJobId, - agreementId - ) - /// ``` - /// - /// Now, let's see the current status of the previously started computer job - /// ```Typescript - console.log('Current status of the compute job: ', jobStatus) + if (!hasFreeComputeSupport) { + assert( + computeRoutePath === null, + 'Compute route path for free compute is not defined (perhaps because provider does not support it yet?)' + ) + } else { + /// You can also add various delays so you see the various states of the compute job + /// ```Typescript + const jobStatus = await ProviderInstance.computeStatus( + providerUrl, + await consumerAccount.getAddress(), + computeJobId, + agreementId + ) + /// ``` + /// + /// Now, let's see the current status of the previously started computer job + /// ```Typescript + console.log('Current status of the compute job: ', jobStatus) + } }) /// /// ``` it('11.2 Get download compute results URL', async () => { - /// ```Typescript - await sleep(10000) - const downloadURL = await ProviderInstance.getComputeResultUrl( - providerUrl, - consumerAccount, - computeJobId, - 0 - ) - /// ``` - /// - /// Let's check the compute results url for the specified index - /// ```Typescript - console.log(`Compute results URL: ${downloadURL}`) + if (!hasFreeComputeSupport) { + assert( + computeRoutePath === null, + 'Compute route path for free compute is not defined (perhaps because provider does not support it yet?)' + ) + } else { + /// ```Typescript + await sleep(10000) + const downloadURL = await ProviderInstance.getComputeResultUrl( + providerUrl, + consumerAccount, + computeJobId, + 0 + ) + /// ``` + /// + /// Let's check the compute results url for the specified index + /// ```Typescript + console.log(`Compute results URL: ${downloadURL}`) + } }) /// /// ``` }) /// diff --git a/test/integration/ComputeFlow.test.ts b/test/integration/ComputeFlow.test.ts index 8e8fbc5a5..a15e68bb0 100644 --- a/test/integration/ComputeFlow.test.ts +++ b/test/integration/ComputeFlow.test.ts @@ -7,7 +7,8 @@ import { Aquarius, Datatoken, sendTx, - amountToUnits + amountToUnits, + isDefined } from '../../src/index.js' import { ComputeJob, @@ -46,6 +47,8 @@ let paidEnvDatasetTxId let paidEnvAlgoTxId let computeValidUntil +let freeComputeRouteSupport = null + const assetUrl: Files = { datatokenAddress: '0x0', nftAddress: '0x0', @@ -413,7 +416,7 @@ describe('Compute flow tests', async () => { // we choose the free env const computeEnv = computeEnvs[resolvedDdoWith5mTimeout.chainId].find( - (ce) => ce.priceMin === 0 + (ce) => ce.priceMin === 0 || isDefined(ce.free) ) assert(computeEnv, 'Cannot find the free compute env') @@ -435,7 +438,7 @@ describe('Compute flow tests', async () => { computeEnv.id, computeValidUntil, providerUrl, - await consumerAccount.getAddress() + consumerAccount ) assert( !('error' in providerInitializeComputeResults.algorithm), @@ -462,83 +465,106 @@ describe('Compute flow tests', async () => { config ) } - const computeJobs = await ProviderInstance.computeStart( + + freeComputeRouteSupport = await ProviderInstance.getComputeStartRoutes( providerUrl, - consumerAccount, - computeEnv.id, - assets[0], - algo + true ) - freeEnvDatasetTxId = assets[0].transferTxId - freeEnvAlgoTxId = algo.transferTxId - assert(computeJobs, 'Cannot start compute job') - freeComputeJobId = computeJobs[0].jobId + if (freeComputeRouteSupport) { + const computeJobs = await ProviderInstance.freeComputeStart( + providerUrl, + consumerAccount, + computeEnv.id, + assets, + algo + ) + freeEnvDatasetTxId = assets[0].transferTxId + freeEnvAlgoTxId = algo.transferTxId + assert(computeJobs, 'Cannot start compute job') + freeComputeJobId = computeJobs[0].jobId + + delay(100000) + + const jobFinished = await waitTillJobEnds() + console.log('Job finished: ', jobFinished) + } else { + assert( + freeComputeRouteSupport === null, + 'Cannot start free compute job. provider at ' + + providerUrl + + ' does not implement freeCompute route' + ) + } }) - delay(100000) - - const jobFinished = await waitTillJobEnds() - console.log('Job finished: ', jobFinished) - // move to start orders with initial txid's and provider fees it('should restart a computeJob without paying anything, because order is valid and providerFees are still valid', async () => { // we choose the free env - const computeEnv = computeEnvs[resolvedDdoWith5mTimeout.chainId].find( - (ce) => ce.priceMin === 0 - ) - assert(computeEnv, 'Cannot find the free compute env') - - const assets: ComputeAsset[] = [ - { - documentId: resolvedDdoWith5mTimeout.id, - serviceId: resolvedDdoWith5mTimeout.services[0].id, - transferTxId: freeEnvDatasetTxId + if (freeComputeRouteSupport) { + const computeEnv = computeEnvs[resolvedDdoWith5mTimeout.chainId].find( + (ce) => ce.priceMin === 0 || isDefined(ce.free) + ) + assert(computeEnv, 'Cannot find the free compute env') + + const assets: ComputeAsset[] = [ + { + documentId: resolvedDdoWith5mTimeout.id, + serviceId: resolvedDdoWith5mTimeout.services[0].id, + transferTxId: freeEnvDatasetTxId + } + ] + const algo: ComputeAlgorithm = { + documentId: resolvedAlgoDdoWith5mTimeout.id, + serviceId: resolvedAlgoDdoWith5mTimeout.services[0].id, + transferTxId: freeEnvAlgoTxId } - ] - const algo: ComputeAlgorithm = { - documentId: resolvedAlgoDdoWith5mTimeout.id, - serviceId: resolvedAlgoDdoWith5mTimeout.services[0].id, - transferTxId: freeEnvAlgoTxId + providerInitializeComputeResults = await ProviderInstance.initializeCompute( + assets, + algo, + computeEnv.id, + computeValidUntil, + providerUrl, + consumerAccount + ) + assert( + providerInitializeComputeResults.algorithm.validOrder, + 'We should have a valid order for algorithm' + ) + assert( + !providerInitializeComputeResults.algorithm.providerFee, + 'We should not pay providerFees again for algorithm' + ) + assert( + providerInitializeComputeResults.datasets[0].validOrder, + 'We should have a valid order for dataset' + ) + assert( + !providerInitializeComputeResults.datasets[0].providerFee, + 'We should not pay providerFees again for dataset' + ) + algo.transferTxId = providerInitializeComputeResults.algorithm.validOrder + assets[0].transferTxId = providerInitializeComputeResults.datasets[0].validOrder + assert( + algo.transferTxId === freeEnvAlgoTxId && + assets[0].transferTxId === freeEnvDatasetTxId, + 'We should use the same orders, because no fess must be paid' + ) + const computeJobs = await ProviderInstance.computeStart( + providerUrl, + consumerAccount, + computeEnv.id, + assets, + algo + ) + assert(computeJobs, 'Cannot start compute job') + } else { + assert( + freeComputeRouteSupport === null, + 'Cannot start free compute job. provider at ' + + providerUrl + + ' does not implement freeCompute route' + ) } - providerInitializeComputeResults = await ProviderInstance.initializeCompute( - assets, - algo, - computeEnv.id, - computeValidUntil, - providerUrl, - await consumerAccount.getAddress() - ) - assert( - providerInitializeComputeResults.algorithm.validOrder, - 'We should have a valid order for algorithm' - ) - assert( - !providerInitializeComputeResults.algorithm.providerFee, - 'We should not pay providerFees again for algorithm' - ) - assert( - providerInitializeComputeResults.datasets[0].validOrder, - 'We should have a valid order for dataset' - ) - assert( - !providerInitializeComputeResults.datasets[0].providerFee, - 'We should not pay providerFees again for dataset' - ) - algo.transferTxId = providerInitializeComputeResults.algorithm.validOrder - assets[0].transferTxId = providerInitializeComputeResults.datasets[0].validOrder - assert( - algo.transferTxId === freeEnvAlgoTxId && - assets[0].transferTxId === freeEnvDatasetTxId, - 'We should use the same orders, because no fess must be paid' - ) - const computeJobs = await ProviderInstance.computeStart( - providerUrl, - consumerAccount, - computeEnv.id, - assets[0], - algo - ) - assert(computeJobs, 'Cannot start compute job') }) // // moving to paid environments @@ -546,7 +572,7 @@ describe('Compute flow tests', async () => { it('should start a computeJob on a paid environment', async () => { // we choose the paid env const computeEnv = computeEnvs[resolvedDdoWith5mTimeout.chainId].find( - (ce) => ce.priceMin !== 0 + (ce) => ce.priceMin !== 0 || !isDefined(ce.free) ) assert(computeEnv, 'Cannot find the paid compute env') @@ -568,7 +594,7 @@ describe('Compute flow tests', async () => { computeEnv.id, computeValidUntil, providerUrl, - await consumerAccount.getAddress() + consumerAccount ) assert( !('error' in providerInitializeComputeResults.algorithm), @@ -599,7 +625,7 @@ describe('Compute flow tests', async () => { providerUrl, consumerAccount, computeEnv.id, - assets[0], + assets, algo ) paidEnvDatasetTxId = assets[0].transferTxId @@ -623,9 +649,9 @@ describe('Compute flow tests', async () => { it('should restart a computeJob on paid environment, without paying anything, because order is valid and providerFees are still valid', async () => { // we choose the paid env const computeEnv = computeEnvs[resolvedDdoWith5mTimeout.chainId].find( - (ce) => ce.priceMin !== 0 + (ce) => ce.priceMin !== 0 || !isDefined(ce.free) ) - assert(computeEnv, 'Cannot find the free compute env') + assert(computeEnv, 'Cannot find the paid compute env') const assets: ComputeAsset[] = [ { @@ -646,7 +672,7 @@ describe('Compute flow tests', async () => { computeEnv.id, computeValidUntil, providerUrl, - await consumerAccount.getAddress() + consumerAccount ) assert( providerInitializeComputeResults.algorithm.validOrder, @@ -675,7 +701,7 @@ describe('Compute flow tests', async () => { providerUrl, consumerAccount, computeEnv.id, - assets[0], + assets, algo ) assert(computeJobs, 'Cannot start compute job') @@ -691,96 +717,105 @@ describe('Compute flow tests', async () => { }) it('should start a computeJob using the free environment, by paying only providerFee (reuseOrder)', async () => { - // we choose the free env - const computeEnv = computeEnvs[resolvedDdoWith5mTimeout.chainId].find( - (ce) => ce.priceMin === 0 - ) - assert(computeEnv, 'Cannot find the free compute env') - - const assets: ComputeAsset[] = [ - { - documentId: resolvedDdoWith5mTimeout.id, - serviceId: resolvedDdoWith5mTimeout.services[0].id, - transferTxId: freeEnvDatasetTxId + if (freeComputeRouteSupport) { + // we choose the free env + const computeEnv = computeEnvs[resolvedDdoWith5mTimeout.chainId].find( + (ce) => ce.priceMin === 0 || isDefined(ce.free) + ) + assert(computeEnv, 'Cannot find the free compute env') + + const assets: ComputeAsset[] = [ + { + documentId: resolvedDdoWith5mTimeout.id, + serviceId: resolvedDdoWith5mTimeout.services[0].id, + transferTxId: freeEnvDatasetTxId + } + ] + const dtAddressArray = [resolvedDdoWith5mTimeout.services[0].datatokenAddress] + const algo: ComputeAlgorithm = { + documentId: resolvedAlgoDdoWith5mTimeout.id, + serviceId: resolvedAlgoDdoWith5mTimeout.services[0].id, + transferTxId: freeEnvAlgoTxId } - ] - const dtAddressArray = [resolvedDdoWith5mTimeout.services[0].datatokenAddress] - const algo: ComputeAlgorithm = { - documentId: resolvedAlgoDdoWith5mTimeout.id, - serviceId: resolvedAlgoDdoWith5mTimeout.services[0].id, - transferTxId: freeEnvAlgoTxId - } - providerInitializeComputeResults = await ProviderInstance.initializeCompute( - assets, - algo, - computeEnv.id, - computeValidUntil, - providerUrl, - await consumerAccount.getAddress() - ) - assert( - providerInitializeComputeResults.algorithm.validOrder, - 'We should have a valid order for algorithm' - ) - assert( - providerInitializeComputeResults.datasets[0].validOrder, - 'We should have a valid order for dataset' - ) + providerInitializeComputeResults = await ProviderInstance.initializeCompute( + assets, + algo, + computeEnv.id, + computeValidUntil, + providerUrl, + consumerAccount + ) + assert( + providerInitializeComputeResults.algorithm.validOrder, + 'We should have a valid order for algorithm' + ) + assert( + providerInitializeComputeResults.datasets[0].validOrder, + 'We should have a valid order for dataset' + ) - assert( - providerInitializeComputeResults.algorithm.providerFee || - providerInitializeComputeResults.datasets[0].providerFee, - 'We should pay providerFees again for algorithm or dataset. Cannot have empty for both' - ) + assert( + providerInitializeComputeResults.algorithm.providerFee || + providerInitializeComputeResults.datasets[0].providerFee, + 'We should pay providerFees again for algorithm or dataset. Cannot have empty for both' + ) - assert( - !('error' in providerInitializeComputeResults.algorithm), - 'Cannot order algorithm' - ) - algo.transferTxId = await handleComputeOrder( - providerInitializeComputeResults.algorithm, - resolvedAlgoDdoWith5mTimeout.services[0].datatokenAddress, - consumerAccount, - computeEnv.consumerAddress, - 0, - datatoken, - config - ) - for (let i = 0; i < providerInitializeComputeResults.datasets.length; i++) { - assets[i].transferTxId = await handleComputeOrder( - providerInitializeComputeResults.datasets[i], - dtAddressArray[i], + assert( + !('error' in providerInitializeComputeResults.algorithm), + 'Cannot order algorithm' + ) + algo.transferTxId = await handleComputeOrder( + providerInitializeComputeResults.algorithm, + resolvedAlgoDdoWith5mTimeout.services[0].datatokenAddress, consumerAccount, computeEnv.consumerAddress, 0, datatoken, config ) + for (let i = 0; i < providerInitializeComputeResults.datasets.length; i++) { + assets[i].transferTxId = await handleComputeOrder( + providerInitializeComputeResults.datasets[i], + dtAddressArray[i], + consumerAccount, + computeEnv.consumerAddress, + 0, + datatoken, + config + ) + } + assert( + algo.transferTxId !== freeEnvAlgoTxId || + assets[0].transferTxId !== freeEnvDatasetTxId, + 'We should not use the same orders, because providerFee must be paid' + ) + const computeJobs = await ProviderInstance.computeStart( + providerUrl, + consumerAccount, + computeEnv.id, + assets, + algo + ) + // freeEnvDatasetTxId = assets[0].transferTxId + // freeEnvAlgoTxId = algo.transferTxId + assert(computeJobs, 'Cannot start compute job') + } else { + assert( + freeComputeRouteSupport === null, + 'Cannot start free compute job. provider at ' + + providerUrl + + ' does not implement freeCompute route' + ) } - assert( - algo.transferTxId !== freeEnvAlgoTxId || - assets[0].transferTxId !== freeEnvDatasetTxId, - 'We should not use the same orders, because providerFee must be paid' - ) - const computeJobs = await ProviderInstance.computeStart( - providerUrl, - consumerAccount, - computeEnv.id, - assets[0], - algo - ) - // freeEnvDatasetTxId = assets[0].transferTxId - // freeEnvAlgoTxId = algo.transferTxId - assert(computeJobs, 'Cannot start compute job') }) it('should start a computeJob using the paid environment, by paying only providerFee (reuseOrder)', async () => { // we choose the paid env const computeEnv = computeEnvs[resolvedDdoWith5mTimeout.chainId].find( - (ce) => ce.priceMin !== 0 + (ce) => ce.priceMin !== 0 || !isDefined(ce.free) ) - assert(computeEnv, 'Cannot find the free compute env') + assert(computeEnv, 'Cannot find the paid compute env') const assets: ComputeAsset[] = [ { @@ -802,7 +837,7 @@ describe('Compute flow tests', async () => { computeEnv.id, computeValidUntil, providerUrl, - await consumerAccount.getAddress() + consumerAccount ) assert( providerInitializeComputeResults.algorithm.validOrder, @@ -851,7 +886,7 @@ describe('Compute flow tests', async () => { providerUrl, consumerAccount, computeEnv.id, - assets[0], + assets, algo ) // freeEnvDatasetTxId = assets[0].transferTxId diff --git a/test/integration/helpers.ts b/test/integration/helpers.ts index 91caccfcd..8d1f1bcff 100644 --- a/test/integration/helpers.ts +++ b/test/integration/helpers.ts @@ -138,7 +138,8 @@ export async function handleComputeOrder( if (config.chainId !== chainID) { throw new Error('Chain ID from DDO is different than the configured network.') } - if (order.providerFee && order.providerFee.providerFeeAmount) { + const hasProviderFees = order.providerFee && order.providerFee.providerFeeAmount + if (hasProviderFees && Number(order.providerFee.providerFeeAmount) > 0) { await approveWei( payerAccount, config, From 99cfc18cbc461560160915aadbadb004d08e5e56 Mon Sep 17 00:00:00 2001 From: alexcos20 Date: Tue, 4 Mar 2025 11:46:09 +0200 Subject: [PATCH 23/25] README update --- README.md | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/README.md b/README.md index cd862473f..60998ae15 100644 --- a/README.md +++ b/README.md @@ -51,6 +51,15 @@ npm install @oceanprotocol/lib - Visit the [Ocean Protocol website](https://docs.oceanprotocol.com/) for general information about Ocean Protocol. - If you have any difficulties or if you have further questions about how to use ocean.js please reach out to us on [Discord](https://discord.gg/TnXjkR5). - If you notice any bugs or issues with ocean.js please [open an issue on github](https://github.com/oceanprotocol/ocean.js/issues/new?assignees=&labels=bug&template=bug_report.md&title=). +- Developers using this library should ensure their tsconfig.json includes the necessary options: +``` +{ + "moduleResolution": "node", + "esModuleInterop": true, + "allowSyntheticDefaultImports": true +} +``` +- If using Next.js, they may need "moduleResolution": "bundler" for better compatibility. ## 🦑 Development From 4d7725d1d404d56a6dea5b78a8d01f16d31ce94e Mon Sep 17 00:00:00 2001 From: alexcos20 Date: Tue, 4 Mar 2025 11:48:12 +0200 Subject: [PATCH 24/25] push package-lock --- package-lock.json | 407 ---------------------------------------------- 1 file changed, 407 deletions(-) diff --git a/package-lock.json b/package-lock.json index a32f5c221..be38fa345 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1939,278 +1939,6 @@ "node": ">=14.0" } }, - "node_modules/@esbuild/aix-ppc64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.23.1.tgz", - "integrity": "sha512-6VhYk1diRqrhBAqpJEdjASR/+WVRtfjpqKuNw11cLiaWpAT/Uu+nokB+UJnevzy/P9C/ty6AOe0dwueMrGh/iQ==", - "cpu": [ - "ppc64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "aix" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/android-arm": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.23.1.tgz", - "integrity": "sha512-uz6/tEy2IFm9RYOyvKl88zdzZfwEfKZmnX9Cj1BHjeSGNuGLuMD1kR8y5bteYmwqKm1tj8m4cb/aKEorr6fHWQ==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/android-arm64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.23.1.tgz", - "integrity": "sha512-xw50ipykXcLstLeWH7WRdQuysJqejuAGPd30vd1i5zSyKK3WE+ijzHmLKxdiCMtH1pHz78rOg0BKSYOSB/2Khw==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/android-x64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.23.1.tgz", - "integrity": "sha512-nlN9B69St9BwUoB+jkyU090bru8L0NA3yFvAd7k8dNsVH8bi9a8cUAUSEcEEgTp2z3dbEDGJGfP6VUnkQnlReg==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/darwin-arm64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.23.1.tgz", - "integrity": "sha512-YsS2e3Wtgnw7Wq53XXBLcV6JhRsEq8hkfg91ESVadIrzr9wO6jJDMZnCQbHm1Guc5t/CdDiFSSfWP58FNuvT3Q==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/darwin-x64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.23.1.tgz", - "integrity": "sha512-aClqdgTDVPSEGgoCS8QDG37Gu8yc9lTHNAQlsztQ6ENetKEO//b8y31MMu2ZaPbn4kVsIABzVLXYLhCGekGDqw==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/freebsd-arm64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.23.1.tgz", - "integrity": "sha512-h1k6yS8/pN/NHlMl5+v4XPfikhJulk4G+tKGFIOwURBSFzE8bixw1ebjluLOjfwtLqY0kewfjLSrO6tN2MgIhA==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/freebsd-x64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.23.1.tgz", - "integrity": "sha512-lK1eJeyk1ZX8UklqFd/3A60UuZ/6UVfGT2LuGo3Wp4/z7eRTRYY+0xOu2kpClP+vMTi9wKOfXi2vjUpO1Ro76g==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-arm": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.23.1.tgz", - "integrity": "sha512-CXXkzgn+dXAPs3WBwE+Kvnrf4WECwBdfjfeYHpMeVxWE0EceB6vhWGShs6wi0IYEqMSIzdOF1XjQ/Mkm5d7ZdQ==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-arm64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.23.1.tgz", - "integrity": "sha512-/93bf2yxencYDnItMYV/v116zff6UyTjo4EtEQjUBeGiVpMmffDNUyD9UN2zV+V3LRV3/on4xdZ26NKzn6754g==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-ia32": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.23.1.tgz", - "integrity": "sha512-VTN4EuOHwXEkXzX5nTvVY4s7E/Krz7COC8xkftbbKRYAl96vPiUssGkeMELQMOnLOJ8k3BY1+ZY52tttZnHcXQ==", - "cpu": [ - "ia32" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-loong64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.23.1.tgz", - "integrity": "sha512-Vx09LzEoBa5zDnieH8LSMRToj7ir/Jeq0Gu6qJ/1GcBq9GkfoEAoXvLiW1U9J1qE/Y/Oyaq33w5p2ZWrNNHNEw==", - "cpu": [ - "loong64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-mips64el": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.23.1.tgz", - "integrity": "sha512-nrFzzMQ7W4WRLNUOU5dlWAqa6yVeI0P78WKGUo7lg2HShq/yx+UYkeNSE0SSfSure0SqgnsxPvmAUu/vu0E+3Q==", - "cpu": [ - "mips64el" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-ppc64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.23.1.tgz", - "integrity": "sha512-dKN8fgVqd0vUIjxuJI6P/9SSSe/mB9rvA98CSH2sJnlZ/OCZWO1DJvxj8jvKTfYUdGfcq2dDxoKaC6bHuTlgcw==", - "cpu": [ - "ppc64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-riscv64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.23.1.tgz", - "integrity": "sha512-5AV4Pzp80fhHL83JM6LoA6pTQVWgB1HovMBsLQ9OZWLDqVY8MVobBXNSmAJi//Csh6tcY7e7Lny2Hg1tElMjIA==", - "cpu": [ - "riscv64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-s390x": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.23.1.tgz", - "integrity": "sha512-9ygs73tuFCe6f6m/Tb+9LtYxWR4c9yg7zjt2cYkjDbDpV/xVn+68cQxMXCjUpYwEkze2RcU/rMnfIXNRFmSoDw==", - "cpu": [ - "s390x" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, "node_modules/@esbuild/linux-x64": { "version": "0.23.1", "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.23.1.tgz", @@ -2228,125 +1956,6 @@ "node": ">=18" } }, - "node_modules/@esbuild/netbsd-x64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.23.1.tgz", - "integrity": "sha512-aevEkCNu7KlPRpYLjwmdcuNz6bDFiE7Z8XC4CPqExjTvrHugh28QzUXVOZtiYghciKUacNktqxdpymplil1beA==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "netbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/openbsd-arm64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.23.1.tgz", - "integrity": "sha512-3x37szhLexNA4bXhLrCC/LImN/YtWis6WXr1VESlfVtVeoFJBRINPJ3f0a/6LV8zpikqoUg4hyXw0sFBt5Cr+Q==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "openbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/openbsd-x64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.23.1.tgz", - "integrity": "sha512-aY2gMmKmPhxfU+0EdnN+XNtGbjfQgwZj43k8G3fyrDM/UdZww6xrWxmDkuz2eCZchqVeABjV5BpildOrUbBTqA==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "openbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/sunos-x64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.23.1.tgz", - "integrity": "sha512-RBRT2gqEl0IKQABT4XTj78tpk9v7ehp+mazn2HbUeZl1YMdaGAQqhapjGTCe7uw7y0frDi4gS0uHzhvpFuI1sA==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "sunos" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/win32-arm64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.23.1.tgz", - "integrity": "sha512-4O+gPR5rEBe2FpKOVyiJ7wNDPA8nGzDuJ6gN4okSA1gEOYZ67N8JPk58tkWtdtPeLz7lBnY6I5L3jdsr3S+A6A==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/win32-ia32": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.23.1.tgz", - "integrity": "sha512-BcaL0Vn6QwCwre3Y717nVHZbAa4UBEigzFm6VdsVdT/MbZ38xoj1X9HPkZhbmaBGUD1W8vxAfffbDe8bA6AKnQ==", - "cpu": [ - "ia32" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/win32-x64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.23.1.tgz", - "integrity": "sha512-BHpFFeslkWrXWyUPnbKm+xYYVYruCinGcftSBaa8zoF9hZO4BcSCFUvHVTtzpIY6YzUnYtuEhZ+C9iEXjxnasg==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=18" - } - }, "node_modules/@eslint-community/eslint-utils": { "version": "4.4.1", "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.1.tgz", @@ -12384,21 +11993,6 @@ "dev": true, "license": "ISC" }, - "node_modules/fsevents": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", - "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", - "dev": true, - "hasInstallScript": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": "^8.16.0 || ^10.6.0 || >=11.0.0" - } - }, "node_modules/function-bind": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", @@ -23529,7 +23123,6 @@ "funding": { "url": "https://github.com/sponsors/sindresorhus" } - } } } From 059b37de830e9e59909e69260b20577035076a03 Mon Sep 17 00:00:00 2001 From: paulo-ocean Date: Thu, 6 Mar 2025 11:13:41 +0000 Subject: [PATCH 25/25] fix signature message for streamabale logs --- src/services/Provider.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/services/Provider.ts b/src/services/Provider.ts index 5d996a211..9a6602ed8 100644 --- a/src/services/Provider.ts +++ b/src/services/Provider.ts @@ -945,8 +945,8 @@ export class Provider { url += `&jobId=${jobId}` url += `&nonce=${nonce}` - // TODO: define teh signature to use (not implemented yet on node) - const signatureMessage = nonce + // consumer + jobId + nonce + const signatureMessage = `${consumerAddress}${jobId}${nonce}` const signature = await this.signProviderRequest(signer, signatureMessage) url += `&signature=${signature}`