Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix/ Running local integration tests #344

Merged
merged 26 commits into from
Apr 1, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ jobs:
- name: integration tests
run: npm run test:integration:cover
env:
OPERATOR_SERVICE_URL: '["http://172.15.0.13:31000/"]'
OPERATOR_SERVICE_URL: '["http://172.15.0.13:31000"]'
PRIVATE_KEY: ${{ secrets.NODE1_PRIVATE_KEY }}
NODE1_PRIVATE_KEY: ${{ secrets.NODE1_PRIVATE_KEY }}
NODE2_PRIVATE_KEY: ${{ secrets.NODE2_PRIVATE_KEY }}
Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
"test": "npm run lint && npm run test:unit:cover && npm run test:integration:cover",
"test:unit": "npm run build-tests && npm run mocha \"./dist/test/unit/**/*.test.js\"",
"test:integration": "npm run build-tests && npm run mocha \"./dist/test/integration/**/*.test.js\"",
"test:integration:complete": "npm run build-tests && npm run mocha ./dist/test/integration/completeFlow.test.js",
"test:integration:compute": "npm run build-tests && npm run mocha ./dist/test/integration/compute.test.js",
"test:unit:cover": "nyc --report-dir coverage/unit npm run test:unit",
"test:integration:cover": "nyc --report-dir coverage/integration --no-clean npm run test:integration",
"logs": "./scripts/logs.sh"
Expand Down
11 changes: 6 additions & 5 deletions src/components/c2d/compute_engines.ts
Original file line number Diff line number Diff line change
Expand Up @@ -170,9 +170,10 @@ export class C2DEngineOPFK8 extends C2DEngine {
const clusterHash = this.getC2DConfig().hash
const url = `${
this.getC2DConfig().url
}api/v1/operator/environments?chain_id=${chainId}`
}/api/v1/operator/environments?chain_id=${chainId}`
try {
const { data } = await axios.get(url)
if (!data) return envs
// we need to add hash to each env id
for (const [index, val] of data.entries()) {
data[index].id = `${clusterHash}-${val.id}`
Expand Down Expand Up @@ -250,7 +251,7 @@ export class C2DEngineOPFK8 extends C2DEngine {
try {
const response = await axios({
method: 'post',
url: `${this.getC2DConfig().url}api/v1/operator/compute`,
url: `${this.getC2DConfig().url}/api/v1/operator/compute`,
data: payload
})
if (response.status !== 200) {
Expand Down Expand Up @@ -287,7 +288,7 @@ export class C2DEngineOPFK8 extends C2DEngine {
try {
const response = await axios({
method: 'put',
url: `${this.getC2DConfig().url}api/v1/operator/compute`,
url: `${this.getC2DConfig().url}/api/v1/operator/compute`,
data: payload
})
if (response.status !== 200) {
Expand Down Expand Up @@ -322,7 +323,7 @@ export class C2DEngineOPFK8 extends C2DEngine {
try {
const response = await axios({
method: 'get',
url: `${this.getC2DConfig().url}api/v1/operator/compute`,
url: `${this.getC2DConfig().url}/api/v1/operator/compute`,
data: payload
})
if (response.status !== 200) {
Expand Down Expand Up @@ -360,7 +361,7 @@ export class C2DEngineOPFK8 extends C2DEngine {
try {
const response = await axios({
method: 'get',
url: `${this.getC2DConfig().url}api/v1/operator/computeResult`,
url: `${this.getC2DConfig().url}/api/v1/operator/computeResult`,
data: payload,
responseType: 'stream'
})
Expand Down
3 changes: 2 additions & 1 deletion src/components/httpRoutes/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,8 @@ export * from './getOceanPeers.js'
export const httpRoutes = express.Router()

// P2P routes related
export const hasP2PInterface = await (await getConfiguration()).hasP2P
export const hasP2PInterface = (await (await getConfiguration())?.hasP2P) || false

export function sendMissingP2PResponse(res: Response) {
res.status(400).send('Invalid or Non Existing P2P configuration')
}
Expand Down
26 changes: 22 additions & 4 deletions src/test/integration/compute.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ import {
} from '../utils/utils.js'

import { ProviderFees } from '../../@types/Fees.js'
import { homedir } from 'os'

describe('Compute', () => {
let previousConfiguration: OverrideEnvConfig[]
Expand All @@ -70,6 +71,7 @@ describe('Compute', () => {
)
// const chainId = DEVELOPMENT_CHAIN_ID
const mockSupportedNetworks: RPCS = getMockSupportedNetworks()

before(async () => {
previousConfiguration = await setupEnvironment(
null,
Expand All @@ -78,17 +80,21 @@ describe('Compute', () => {
ENVIRONMENT_VARIABLES.RPCS,
ENVIRONMENT_VARIABLES.PRIVATE_KEY,
ENVIRONMENT_VARIABLES.DB_URL,
ENVIRONMENT_VARIABLES.AUTHORIZED_DECRYPTERS
ENVIRONMENT_VARIABLES.AUTHORIZED_DECRYPTERS,
ENVIRONMENT_VARIABLES.ADDRESS_FILE,
ENVIRONMENT_VARIABLES.OPERATOR_SERVICE_URL
],
[
JSON.stringify(mockSupportedNetworks),
'0xc594c6e5def4bab63ac29eed19a134c130388f74f019bc74b8f4389df2837a58',
'http://localhost:8108/?apiKey=xyz',
JSON.stringify(['0xe2DD09d719Da89e5a3D0F2549c7E24566e947260'])
JSON.stringify(['0xe2DD09d719Da89e5a3D0F2549c7E24566e947260']),
`${homedir}/.ocean/ocean-contracts/artifacts/address.json`,
JSON.stringify(['http://localhost:31000'])
]
)
)
config = await getConfiguration(true) // Force reload the configuration
config = await getConfiguration(true)
dbconn = await new Database(config.dbConfig)
oceanNode = await OceanNode.getInstance(dbconn)
// eslint-disable-next-line no-unused-vars
Expand All @@ -103,6 +109,7 @@ describe('Compute', () => {
assert(oceanNode, 'Failed to instantiate OceanNode')
assert(config.c2dClusters, 'Failed to get c2dClusters')
})

// let's publish assets & algos
it('should publish compute datasets & algos', async () => {
publishedComputeDataset = await publishAsset(computeAsset, publisherAccount)
Expand All @@ -118,6 +125,7 @@ describe('Compute', () => {
DEFAULT_TEST_TIMEOUT
)
})

it('Get compute environments', async () => {
const getEnvironmentsTask = {
command: PROTOCOL_COMMANDS.COMPUTE_GET_ENVIRONMENTS,
Expand All @@ -132,6 +140,7 @@ describe('Compute', () => {
expect(response.stream).to.be.instanceOf(Readable)

computeEnvironments = await streamToObject(response.stream as Readable)

// expect 2 envs
expect(computeEnvironments.length === 2, 'incorrect length')
for (const computeEnvironment of computeEnvironments) {
Expand Down Expand Up @@ -238,6 +247,7 @@ describe('Compute', () => {
assert(resultParsed.providerFee.validUntil, 'algorithm validUntil does not exist')
assert(result.datasets[0].validOrder === false, 'incorrect validOrder') // expect false because tx id was not provided and no start order was called before
})

it('should start an order', async function () {
const orderTxReceipt = await orderAsset(
publishedComputeDataset.ddo,
Expand All @@ -252,6 +262,7 @@ describe('Compute', () => {
datasetOrderTxId = orderTxReceipt.hash
assert(datasetOrderTxId, 'transaction id not found')
})

it('Initialize compute with dataset tx and without algoritm tx', async () => {
// now, we have a valid order for dataset, with valid compute provider fees
// expected results:
Expand Down Expand Up @@ -325,6 +336,7 @@ describe('Compute', () => {
)
assert(result.datasets[0].validOrder !== false, 'We should have a valid order') // because we started an order earlier
})

it('should buy algo', async function () {
const orderTxReceipt = await orderAsset(
publishedAlgoDataset.ddo,
Expand All @@ -339,6 +351,7 @@ describe('Compute', () => {
algoOrderTxId = orderTxReceipt.hash
assert(algoOrderTxId, 'transaction id not found')
})

it('Initialize compute with dataset tx and algo with tx', async () => {
// now, we have valid orders for both algo and dataset,
// expected results:
Expand Down Expand Up @@ -397,6 +410,7 @@ describe('Compute', () => {
)
assert(result.datasets[0].validOrder !== false, 'We should have a valid order') // because we started an order earlier
})

it('should fail to start a compute job', async () => {
const nonce = Date.now().toString()
const message = String(nonce)
Expand Down Expand Up @@ -433,6 +447,7 @@ describe('Compute', () => {
assert(response.status.httpStatus === 500, 'Failed to get 500 response')
assert(!response.stream, 'We should not have a stream')
})

it('should start a compute job', async () => {
const nonce = Date.now().toString()
const message = String(nonce)
Expand Down Expand Up @@ -465,7 +480,6 @@ describe('Compute', () => {
}
const response = await new ComputeStartHandler(oceanNode).handle(startComputeTask)
assert(response, 'Failed to get response')
// should fail, because txId '0x123' is not a valid order
assert(response.status.httpStatus === 200, 'Failed to get 200 response')
assert(response.stream, 'Failed to get stream')
expect(response.stream).to.be.instanceOf(Readable)
Expand All @@ -474,6 +488,7 @@ describe('Compute', () => {
// eslint-disable-next-line prefer-destructuring
jobId = jobs[0].jobId
})

it('should stop a compute job', async () => {
const nonce = Date.now().toString()
const message = String(nonce)
Expand All @@ -497,6 +512,7 @@ describe('Compute', () => {
assert(response.stream, 'Failed to get stream')
expect(response.stream).to.be.instanceOf(Readable)
})

it('should get job status by jobId', async () => {
const statusComputeTask: ComputeGetStatusCommand = {
command: PROTOCOL_COMMANDS.COMPUTE_GET_STATUS,
Expand All @@ -514,6 +530,7 @@ describe('Compute', () => {
const jobs = await streamToObject(response.stream as Readable)
console.log(jobs)
})

it('should get job status by consumer', async () => {
const statusComputeTask: ComputeGetStatusCommand = {
command: PROTOCOL_COMMANDS.COMPUTE_GET_STATUS,
Expand All @@ -531,6 +548,7 @@ describe('Compute', () => {
const jobs = await streamToObject(response.stream as Readable)
console.log(jobs)
})

after(async () => {
await tearDownEnvironment(previousConfiguration)
})
Expand Down
7 changes: 5 additions & 2 deletions src/test/integration/download.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ import {
} from '../../utils/address.js'
import { publishAsset, orderAsset } from '../utils/assets.js'
import { downloadAsset } from '../data/assets.js'
import { homedir } from 'os'

describe('Should run a complete node flow.', () => {
let config: OceanNodeConfig
Expand Down Expand Up @@ -67,14 +68,16 @@ describe('Should run a complete node flow.', () => {
ENVIRONMENT_VARIABLES.PRIVATE_KEY,
ENVIRONMENT_VARIABLES.DB_URL,
ENVIRONMENT_VARIABLES.AUTHORIZED_DECRYPTERS,
ENVIRONMENT_VARIABLES.ALLOWED_ADMINS
ENVIRONMENT_VARIABLES.ALLOWED_ADMINS,
ENVIRONMENT_VARIABLES.ADDRESS_FILE
],
[
JSON.stringify(mockSupportedNetworks),
'0xc594c6e5def4bab63ac29eed19a134c130388f74f019bc74b8f4389df2837a58',
'http://localhost:8108/?apiKey=xyz',
JSON.stringify(['0xe2DD09d719Da89e5a3D0F2549c7E24566e947260']),
JSON.stringify(['0xe2DD09d719Da89e5a3D0F2549c7E24566e947260'])
JSON.stringify(['0xe2DD09d719Da89e5a3D0F2549c7E24566e947260']),
`${homedir}/.ocean/ocean-contracts/artifacts/address.json`
]
)
)
Expand Down
28 changes: 16 additions & 12 deletions src/test/integration/encryptDecryptDDO.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ import {
} from '../utils/utils.js'
import { DecryptDDOCommand } from '../../@types/commands.js'
import { EncryptMethod } from '../../@types/fileObject.js'
import { homedir } from 'os'

describe('Should encrypt and decrypt DDO', () => {
let database: Database
Expand Down Expand Up @@ -63,37 +64,40 @@ describe('Should encrypt and decrypt DDO', () => {
let previousConfiguration: OverrideEnvConfig[]

before(async () => {
let artifactsAddresses = getOceanArtifactsAdressesByChainId(DEVELOPMENT_CHAIN_ID)
if (!artifactsAddresses) {
artifactsAddresses = getOceanArtifactsAdresses().development
}

provider = new JsonRpcProvider('http://127.0.0.1:8545')
publisherAccount = (await provider.getSigner(0)) as Signer
publisherAddress = await publisherAccount.getAddress()
genericAsset = genericDDO
factoryContract = new ethers.Contract(
artifactsAddresses.ERC721Factory,
ERC721Factory.abi,
publisherAccount
)

previousConfiguration = await setupEnvironment(
null,
buildEnvOverrideConfig(
[
ENVIRONMENT_VARIABLES.PRIVATE_KEY,
ENVIRONMENT_VARIABLES.RPCS,
ENVIRONMENT_VARIABLES.AUTHORIZED_DECRYPTERS
ENVIRONMENT_VARIABLES.AUTHORIZED_DECRYPTERS,
ENVIRONMENT_VARIABLES.DB_URL,
ENVIRONMENT_VARIABLES.ADDRESS_FILE
],
[
'0xc594c6e5def4bab63ac29eed19a134c130388f74f019bc74b8f4389df2837a58',
JSON.stringify(mockSupportedNetworks),
JSON.stringify([publisherAddress])
JSON.stringify([publisherAddress]),
'http://localhost:8108/?apiKey=xyz',
`${homedir}/.ocean/ocean-contracts/artifacts/address.json`
]
)
)
let artifactsAddresses = getOceanArtifactsAdressesByChainId(DEVELOPMENT_CHAIN_ID)
if (!artifactsAddresses) {
artifactsAddresses = getOceanArtifactsAdresses().development
}

factoryContract = new ethers.Contract(
artifactsAddresses.ERC721Factory,
ERC721Factory.abi,
publisherAccount
)
const dbConfig = {
url: 'http://localhost:8108/?apiKey=xyz'
}
Expand Down
27 changes: 22 additions & 5 deletions src/test/integration/encryptFile.test.ts
Original file line number Diff line number Diff line change
@@ -1,24 +1,37 @@
import { expect, assert } from 'chai'
import { getConfiguration } from '../../utils/config.js'
import { Database } from '../../components/database/index.js'
import { OceanNode } from '../../OceanNode.js'
import { PROTOCOL_COMMANDS } from '../../utils/constants.js'
import { ENVIRONMENT_VARIABLES, PROTOCOL_COMMANDS } from '../../utils/constants.js'
import { OceanNodeConfig } from '../../@types/OceanNode.js'
import { Readable } from 'stream'
import { EncryptFileHandler } from '../../components/core/encryptHandler.js'
import { EncryptFileCommand } from '../../@types/commands'
import { EncryptMethod, FileObjectType, UrlFileObject } from '../../@types/fileObject.js'
import fs from 'fs'
import {
OverrideEnvConfig,
buildEnvOverrideConfig,
setupEnvironment,
tearDownEnvironment
} from '../utils/utils.js'

describe('Encrypt File', () => {
let config: OceanNodeConfig
let dbconn: Database
let oceanNode: OceanNode
let previousConfiguration: OverrideEnvConfig[]

before(async () => {
previousConfiguration = await setupEnvironment(
null,
buildEnvOverrideConfig(
[ENVIRONMENT_VARIABLES.PRIVATE_KEY, ENVIRONMENT_VARIABLES.DB_URL],
[
'0xc594c6e5def4bab63ac29eed19a134c130388f74f019bc74b8f4389df2837a58',
'http://localhost:8108/?apiKey=xyz'
]
)
)
config = await getConfiguration(true) // Force reload the configuration
dbconn = await new Database(config.dbConfig)
oceanNode = await OceanNode.getInstance(dbconn)
})

it('should encrypt files', async () => {
Expand Down Expand Up @@ -108,4 +121,8 @@ describe('Encrypt File', () => {
'Unknown error: Invalid storage type: Unknown'
)
})

after(async () => {
await tearDownEnvironment(previousConfiguration)
})
})
Loading
Loading