From 376d687506cf6d025c70b80facc07a848d65d021 Mon Sep 17 00:00:00 2001 From: Mokhtar Naamani Date: Fri, 28 Jul 2023 00:50:01 +0300 Subject: [PATCH 1/7] add test scenario for running a single flow --- tests/network-tests/run-test-scenario.sh | 2 +- tests/network-tests/run-tests.sh | 2 +- tests/network-tests/src/scenarios/flow.ts | 14 ++++++++++++++ 3 files changed, 16 insertions(+), 2 deletions(-) create mode 100644 tests/network-tests/src/scenarios/flow.ts diff --git a/tests/network-tests/run-test-scenario.sh b/tests/network-tests/run-test-scenario.sh index 7fcabb9f11..7c9af7fd81 100755 --- a/tests/network-tests/run-test-scenario.sh +++ b/tests/network-tests/run-test-scenario.sh @@ -10,4 +10,4 @@ SCENARIO=$1 SCENARIO=${SCENARIO:="content-directory"} # Execute the tests -time DEBUG=integration-tests:* yarn workspace network-tests node-ts-strict src/scenarios/${SCENARIO}.ts +time DEBUG=integration-tests:* yarn workspace network-tests node-ts-strict src/scenarios/${SCENARIO}.ts $2 diff --git a/tests/network-tests/run-tests.sh b/tests/network-tests/run-tests.sh index 48ffc85f91..4d613505d5 100755 --- a/tests/network-tests/run-tests.sh +++ b/tests/network-tests/run-tests.sh @@ -39,7 +39,7 @@ then fi # First scenario.. -IGNORE_HIRED_LEADS=true ./run-test-scenario.sh $1 +IGNORE_HIRED_LEADS=true ./run-test-scenario.sh $1 $2 # In between pickup generated keys from first scenario or bootstrap scene with all well known # keys for workers and members.. diff --git a/tests/network-tests/src/scenarios/flow.ts b/tests/network-tests/src/scenarios/flow.ts new file mode 100644 index 0000000000..94249ea4d3 --- /dev/null +++ b/tests/network-tests/src/scenarios/flow.ts @@ -0,0 +1,14 @@ +import { scenario } from '../Scenario' +import path from 'path' + +// eslint-disable-next-line @typescript-eslint/no-floating-promises +scenario('Single Flow', async ({ job }) => { + const pathToFlow = path.join('../flows', process.argv[2]) + + // eslint-disable-next-line @typescript-eslint/no-var-requires + const flows = require(pathToFlow) + + const flow = flows.default || flows + + job('single-flow', flow) +}) From 53f879b97144376a42d90c3e3ade81b1a53c0425 Mon Sep 17 00:00:00 2001 From: Mokhtar Naamani Date: Fri, 28 Jul 2023 02:39:39 +0300 Subject: [PATCH 2/7] cleanup integration tests startup scripts --- README.md | 8 +-- package.json | 2 + query-node/README.md | 11 ++-- query-node/run-tests.sh | 42 ------------- start-multistorage.sh | 8 ++- start-qn-orion-faucet.sh | 15 ----- start.sh | 8 ++- tests/network-tests/.gitignore | 1 + tests/network-tests/package.json | 1 + tests/network-tests/run-flow.sh | 9 +++ tests/network-tests/run-full-tests.sh | 63 ------------------- tests/network-tests/run-node-docker.sh | 2 + .../run-runtime-upgrade-tests.sh | 4 +- tests/network-tests/run-test-node.sh | 2 + tests/network-tests/run-test-scenario.sh | 7 ++- tests/network-tests/run-tests.sh | 30 ++++----- tests/network-tests/src/Scenario.ts | 8 +-- tests/network-tests/src/scenarios/full.ts | 4 +- .../src/scenarios/setupNewChain.ts | 6 +- .../scenarios/setupNewChainMultiStorage.ts | 6 +- tests/network-tests/test-setup-new-chain.sh | 2 + 21 files changed, 68 insertions(+), 171 deletions(-) delete mode 100755 query-node/run-tests.sh delete mode 100755 start-qn-orion-faucet.sh create mode 100755 tests/network-tests/run-flow.sh delete mode 100755 tests/network-tests/run-full-tests.sh diff --git a/README.md b/README.md index 8d2e3dafef..829e8773f0 100644 --- a/README.md +++ b/README.md @@ -51,13 +51,13 @@ Modify the root `package.json` and change volta section to use node version 16.2 ```sh # Build local npm packages -yarn build:packages +yarn build # Build joystream/node docker testing image RUNTIME_PROFILE=TESTING yarn build:node:docker # Start a local development network -RUNTIME_PROFILE=TESTING yarn start +yarn start ``` ## Software @@ -111,13 +111,13 @@ Look under the 'Assets' section: ```bash # Make sure yarn packages are built -yarn build:packages +yarn build # Build the test joystream-node RUNTIME_PROFILE=TESTING yarn build:node:docker # Run tests -./tests/network-tests/run-full-tests.sh +yarn test ``` ### Contributing diff --git a/package.json b/package.json index ff58cca36e..ee24769c78 100644 --- a/package.json +++ b/package.json @@ -6,8 +6,10 @@ "scripts": { "build:node:docker": "./build-node-docker.sh", "build:packages": "./build-packages.sh", + "build": "yarn build:packages", "setup": "./setup.sh", "start": "./start.sh", + "test": "./tests/network-tests/run-tests.sh", "cargo-checks": "./scripts/cargo-checks.sh", "cargo-build": "./scripts/cargo-build.sh", "lint": "./scripts/lint-typescript.sh", diff --git a/query-node/README.md b/query-node/README.md index 2adb5f8923..ae9e673918 100644 --- a/query-node/README.md +++ b/query-node/README.md @@ -70,13 +70,13 @@ GRAPHQL_PLAYGROUND_CDN="query/server" yarn workspace query-node-root query-node: Run integration tests ``` -./query-node/run-tests.sh +./tests/network-tests/run-tests.sh ``` -To run tests and keep services alive for further inspection, set `DEBUG` shell variable to any true-ish value. +To run tests and keep services alive for further inspection, set `PERSIST` shell variable to any true-ish value. ``` -DEBUG=true ./query-node/run-tests.sh +PERSIST=true ./tests/network-tests/run-tests.sh ``` You can then use queries manually in GraphQL Playground (http://localhost:8081/graphql), @@ -87,8 +87,9 @@ This assumes the scenario is repeatable and any previous test errors didn't brea the blockchain or processor state in a critical way. ``` -DEBUG=true ./query-node/run-tests.sh # run tests first and make sure services stay alive -REUSE_KEYS=true yarn workspace network-tests run-test-scenario content-directory +# run tests first and make sure services stay alive +PERSIST=true ./tests/network-tests/run-tests.sh +yarn workspace network-tests run-test-scenario content-directory ``` Commenting out some of the scenario's flow calls in `network-tests/src/scenarios/content-directory.ts` is not relevant to the current diff --git a/query-node/run-tests.sh b/query-node/run-tests.sh deleted file mode 100755 index e9c7fcdca1..0000000000 --- a/query-node/run-tests.sh +++ /dev/null @@ -1,42 +0,0 @@ -#!/usr/bin/env bash -set -e - -SCRIPT_PATH="$(dirname "${BASH_SOURCE[0]}")" -cd $SCRIPT_PATH - -set -a -. ../.env -set +a - -export JOYSTREAM_NODE_TAG=${JOYSTREAM_NODE_TAG:=$(RUNTIME_PROFILE=TESTING ../scripts/runtime-code-shasum.sh)} - -function cleanup() { - # Show tail end of logs for the processor and indexer containers to - # see any possible errors - (echo "\n\n## Processor Logs ##" && docker logs processor --tail 50) || : - (echo "\n\n## Indexer Logs ##" && docker logs indexer --tail 50) || : - (echo "\n\n## Indexer API Gateway Logs ##" && docker logs hydra-indexer-gateway --tail 50) || : - (echo "\n\n## Graphql Server Logs ##" && docker logs graphql-server --tail 50) || : - docker-compose down -v -} - -if [ -z "$DEBUG" ]; then - trap cleanup EXIT -fi - -# Clean start -docker-compose down -v - -docker-compose -f ../docker-compose.yml up -d joystream-node -./start.sh - -../tests/network-tests/start-storage.sh -export REUSE_KEYS=true -export IGNORE_HIRED_LEADS=true # this directive is needed to run `full` scenario without problems - -# pass the scenario name without .ts extension -SCENARIO=$1 -# fallback if scenario if not specified -SCENARIO=${SCENARIO:="full"} - -time yarn workspace network-tests run-test-scenario ${SCENARIO} diff --git a/start-multistorage.sh b/start-multistorage.sh index 98ba86dd3d..53de2be533 100755 --- a/start-multistorage.sh +++ b/start-multistorage.sh @@ -1,7 +1,13 @@ #!/usr/bin/env bash set -e +SCRIPT_PATH="$(dirname "${BASH_SOURCE[0]}")" +cd $SCRIPT_PATH + +rm tests/network-tests/keys.json || : + # Run a complete joystream development network on your machine using docker +export RUNTIME_PROFILE=${RUNTIME_PROFILE:=TESTING} export JOYSTREAM_NODE_TAG=${JOYSTREAM_NODE_TAG:=$(./scripts/runtime-code-shasum.sh)} INIT_CHAIN_SCENARIO=${INIT_CHAIN_SCENARIO:=setupNewChainMultiStorage} @@ -29,7 +35,7 @@ fi ./query-node/start.sh ## Orion -./start-orion.sh +# ./start-orion.sh ## Init the chain with some state if [[ $SKIP_CHAIN_SETUP != 'true' ]]; then diff --git a/start-qn-orion-faucet.sh b/start-qn-orion-faucet.sh deleted file mode 100755 index cc545db914..0000000000 --- a/start-qn-orion-faucet.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env bash -set -e - -## Query Node Infrastructure -./query-node/start.sh - -## Orion -./start-orion.sh - -## Faucet -export SKIP_QUERY_NODE_CHECKS=true -./tests/network-tests/run-test-scenario.sh setupFaucet - -export INVITER_KEY=$(cat ./tests/network-tests/output.json | jq -r .faucet.suri) -docker-compose up -d faucet diff --git a/start.sh b/start.sh index 9e06c24a6a..58637adeb7 100755 --- a/start.sh +++ b/start.sh @@ -1,7 +1,13 @@ #!/usr/bin/env bash set -e +SCRIPT_PATH="$(dirname "${BASH_SOURCE[0]}")" +cd $SCRIPT_PATH + +rm tests/network-tests/keys.json || : + # Run a complete joystream development network on your machine using docker +export RUNTIME_PROFILE=${RUNTIME_PROFILE:=TESTING} export JOYSTREAM_NODE_TAG=${JOYSTREAM_NODE_TAG:=$(./scripts/runtime-code-shasum.sh)} INIT_CHAIN_SCENARIO=${INIT_CHAIN_SCENARIO:=setupNewChain} @@ -29,7 +35,7 @@ fi ./query-node/start.sh ## Orion -./start-orion.sh +# ./start-orion.sh ## Init the chain with some state if [[ $SKIP_CHAIN_SETUP != 'true' ]]; then diff --git a/tests/network-tests/.gitignore b/tests/network-tests/.gitignore index a98fa023c0..becf12c028 100644 --- a/tests/network-tests/.gitignore +++ b/tests/network-tests/.gitignore @@ -1,3 +1,4 @@ output.json data/ **/generated/* +keys.json diff --git a/tests/network-tests/package.json b/tests/network-tests/package.json index a6f4484247..8c369b10a7 100644 --- a/tests/network-tests/package.json +++ b/tests/network-tests/package.json @@ -6,6 +6,7 @@ "build": "yarn generate:all && tsc --noEmit", "test": "./run-tests.sh", "run-test-scenario": "./run-test-scenario.sh", + "run-flow": "./run-fow.sh", "node-ts-strict": "node -r ts-node/register --unhandled-rejections=strict", "lint": "eslint . --quiet --ext .ts", "checks": "tsc --noEmit --pretty && prettier ./ --check && yarn lint", diff --git a/tests/network-tests/run-flow.sh b/tests/network-tests/run-flow.sh new file mode 100755 index 0000000000..19598808d9 --- /dev/null +++ b/tests/network-tests/run-flow.sh @@ -0,0 +1,9 @@ +#!/usr/bin/env bash +set -e + +SCRIPT_PATH="$(dirname "${BASH_SOURCE[0]}")" +cd $SCRIPT_PATH + +# Execute the flow scenario +time DEBUG=integration-tests:* yarn workspace network-tests \ + node-ts-strict src/scenarios/flow.ts $1 diff --git a/tests/network-tests/run-full-tests.sh b/tests/network-tests/run-full-tests.sh deleted file mode 100755 index ae1a970661..0000000000 --- a/tests/network-tests/run-full-tests.sh +++ /dev/null @@ -1,63 +0,0 @@ -#!/usr/bin/env bash -set -e - -SCRIPT_PATH="$(dirname "${BASH_SOURCE[0]}")" -cd $SCRIPT_PATH - -# Clean start -docker-compose -f ../../docker-compose.yml down -v - -if [ "${DEV_NODE}" == true ] -then - docker-compose -f ../../docker-compose.yml up -d joystream-node - NODE_CONTAINER_ID="joystream-node" - -else - NODE_CONTAINER_ID=$(./run-test-node-docker.sh) -fi - -if [ "${PERSIST}" != true ] -then - function cleanup() { - printf "**************************************************************************\n" - printf "***************************JOSYTREAM NODE LOGS****************************\n" - printf "**************************************************************************\n\n" - docker logs ${NODE_CONTAINER_ID} --tail 50 - docker stop ${NODE_CONTAINER_ID} - docker rm ${NODE_CONTAINER_ID} - - printf "\n\n\n" - printf "**************************************************************************\n" - printf "****************************HYDRA INDEXER LOGS****************************\n" - printf "**************************************************************************\n\n" - docker logs indexer --tail 50 - - printf "\n\n\n" - printf "**************************************************************************\n" - printf "*************************QUERY NODE PROCESSOR LOGS************************\n" - printf "**************************************************************************\n\n" - docker logs processor --tail 50 - - docker-compose -f ../../docker-compose.yml down -v - } - trap cleanup EXIT -fi - -# pass the scenario name without .ts extension -SCENARIO=$1 -# default to "full" if scenario is not specified -SCENARIO=${SCENARIO:=full} - -sleep 3 - -# Display runtime version -yarn workspace api-scripts tsnode-strict src/status.ts | grep Runtime - -# Start a query-node -../../query-node/start.sh - -# Start storage and distribution services -./start-storage.sh - -# Run full tests reusing the existing keys -REUSE_KEYS=true IGNORE_HIRED_LEADS=true SKIP_STORAGE_AND_DISTRIBUTION=true ./run-test-scenario.sh $SCENARIO diff --git a/tests/network-tests/run-node-docker.sh b/tests/network-tests/run-node-docker.sh index ee60a5c1b6..72ad584000 100755 --- a/tests/network-tests/run-node-docker.sh +++ b/tests/network-tests/run-node-docker.sh @@ -4,6 +4,8 @@ set -e SCRIPT_PATH="$(dirname "${BASH_SOURCE[0]}")" cd $SCRIPT_PATH +rm ./keys.json || : + # Log only to stderr # Only output from this script should be the container id of the node at the very end diff --git a/tests/network-tests/run-runtime-upgrade-tests.sh b/tests/network-tests/run-runtime-upgrade-tests.sh index 6929f8c9e5..1321a474ec 100755 --- a/tests/network-tests/run-runtime-upgrade-tests.sh +++ b/tests/network-tests/run-runtime-upgrade-tests.sh @@ -4,6 +4,8 @@ set -e SCRIPT_PATH="$(dirname "${BASH_SOURCE[0]}")" cd $SCRIPT_PATH +rm ./keys.json || : + # Log only to stderr # Only output from this script should be the container id of the node at the very end @@ -187,8 +189,6 @@ function main { # 6. Bootstrap storage infra because we need to run content-directory tests after runtime upgrade if [ "${NO_STORAGE}" != true ]; then ./start-storage.sh - export REUSE_KEYS=true - export SKIP_STORAGE_AND_DISTRIBUTION=true fi ./run-test-scenario.sh runtimeUpgrade diff --git a/tests/network-tests/run-test-node.sh b/tests/network-tests/run-test-node.sh index ab9e821c6e..388b59c639 100755 --- a/tests/network-tests/run-test-node.sh +++ b/tests/network-tests/run-test-node.sh @@ -4,6 +4,8 @@ set -e SCRIPT_PATH="$(dirname "${BASH_SOURCE[0]}")" cd $SCRIPT_PATH +rm ./keys.json || : + # Location used to store chain data, generated spec file and initial members # and balances for the test chain. DATA_PATH=./data diff --git a/tests/network-tests/run-test-scenario.sh b/tests/network-tests/run-test-scenario.sh index 7c9af7fd81..c19ee5bdbc 100755 --- a/tests/network-tests/run-test-scenario.sh +++ b/tests/network-tests/run-test-scenario.sh @@ -7,7 +7,8 @@ cd $SCRIPT_PATH # pass the scenario name without .ts extension SCENARIO=$1 # fallback if scenario not specified -SCENARIO=${SCENARIO:="content-directory"} +SCENARIO=${SCENARIO:="full"} -# Execute the tests -time DEBUG=integration-tests:* yarn workspace network-tests node-ts-strict src/scenarios/${SCENARIO}.ts $2 +# Execute the scenario and optional arguments after following scenario name +time DEBUG=integration-tests:* yarn workspace network-tests \ + node-ts-strict src/scenarios/${SCENARIO}.ts $2 $3 $4 $5 diff --git a/tests/network-tests/run-tests.sh b/tests/network-tests/run-tests.sh index 4d613505d5..b2ba4096b5 100755 --- a/tests/network-tests/run-tests.sh +++ b/tests/network-tests/run-tests.sh @@ -4,45 +4,39 @@ set -e SCRIPT_PATH="$(dirname "${BASH_SOURCE[0]}")" cd $SCRIPT_PATH +rm ./keys.json || : + CONTAINER_ID=$(./run-test-node-docker.sh) -function cleanup() { +if [ "${PERSIST}" == true ] +then + echo "Starting services" +else + function cleanup() { docker logs ${CONTAINER_ID} --tail 15 docker stop ${CONTAINER_ID} docker rm ${CONTAINER_ID} docker-compose -f ../../docker-compose.yml down -v -} + } -trap cleanup EXIT + trap cleanup EXIT +fi sleep 3 # Display runtime version yarn workspace api-scripts tsnode-strict src/status.ts | grep Runtime -# Start any other services we want -# docker-compose -f ../../docker-compose.yml up -d colossus-1 - # Start a query-node if [ "${NO_QN}" != true ] then ../../query-node/start.sh fi -# Execute tests - if [ "${NO_STORAGE}" != true ] then ./start-storage.sh - export REUSE_KEYS=true - export SKIP_STORAGE_AND_DISTRIBUTION=true fi -# First scenario.. -IGNORE_HIRED_LEADS=true ./run-test-scenario.sh $1 $2 - -# In between pickup generated keys from first scenario or bootstrap scene with all well known -# keys for workers and members.. - -# Second scenario.. -# ./run-test-scenario.sh $2 +# Execute tests +./run-test-scenario.sh $1 $2 diff --git a/tests/network-tests/src/Scenario.ts b/tests/network-tests/src/Scenario.ts index cce7709ee6..86134ed128 100644 --- a/tests/network-tests/src/Scenario.ts +++ b/tests/network-tests/src/Scenario.ts @@ -64,16 +64,12 @@ export async function scenario(label: string, scene: (props: ScenarioProps) => P const api = apiFactory.getApi('Key Generation') - // Generate all key ids based on REUSE_KEYS or START_KEY_ID (if provided) - const reuseKeys = Boolean(env.REUSE_KEYS) - let startKeyId: number + let startKeyId = 0 let customKeys: string[] = [] - if (reuseKeys && existsSync(OUTPUT_FILE_PATH)) { + if (existsSync(OUTPUT_FILE_PATH)) { const output = JSON.parse(readFileSync(OUTPUT_FILE_PATH).toString()) as TestsOutput startKeyId = output.keyIds.final customKeys = output.keyIds.custom - } else { - startKeyId = parseInt(env.START_KEY_ID || '0') } await api.createKeyPairs(startKeyId, false) diff --git a/tests/network-tests/src/scenarios/full.ts b/tests/network-tests/src/scenarios/full.ts index 69fd7e52af..e5b62be4cf 100644 --- a/tests/network-tests/src/scenarios/full.ts +++ b/tests/network-tests/src/scenarios/full.ts @@ -80,9 +80,7 @@ scenario('Full', async ({ job, env }) => { const channelPayoutsProposalJob = job('channel payouts proposal', channelPayouts).requires(proposalsJob) // Working groups - const hireLeads = job('lead opening', leadOpening(process.env.IGNORE_HIRED_LEADS === 'true')).after( - channelPayoutsProposalJob - ) + const hireLeads = job('lead opening', leadOpening(true)).after(channelPayoutsProposalJob) job('openings and applications', openingsAndApplications).requires(hireLeads) job('upcoming openings', upcomingOpenings).requires(hireLeads) job('group status', groupStatus).requires(hireLeads) diff --git a/tests/network-tests/src/scenarios/setupNewChain.ts b/tests/network-tests/src/scenarios/setupNewChain.ts index 11f8a06fe7..cb0da4279a 100644 --- a/tests/network-tests/src/scenarios/setupNewChain.ts +++ b/tests/network-tests/src/scenarios/setupNewChain.ts @@ -14,8 +14,6 @@ scenario('Setup new chain', async ({ job }) => { const leads = job('Set WorkingGroup Leads', leaderSetup(true)).requires(councilJob) job('Create video categories', populateVideoCategories).after(leads) - if (!process.env.SKIP_STORAGE_AND_DISTRIBUTION) { - job('initialize storage system', initStorage(defaultStorageConfig)).requires(leads) - job('initialize distribution system', initDistribution(defaultDistributionConfig)).requires(leads) - } + job('initialize storage system', initStorage(defaultStorageConfig)).requires(leads) + job('initialize distribution system', initDistribution(defaultDistributionConfig)).requires(leads) }) diff --git a/tests/network-tests/src/scenarios/setupNewChainMultiStorage.ts b/tests/network-tests/src/scenarios/setupNewChainMultiStorage.ts index 1f2440f6da..db7775501c 100644 --- a/tests/network-tests/src/scenarios/setupNewChainMultiStorage.ts +++ b/tests/network-tests/src/scenarios/setupNewChainMultiStorage.ts @@ -15,8 +15,6 @@ scenario('Setup new chain', async ({ job }) => { const leads = job('Set WorkingGroup Leads', leaderSetup(true)).requires(councilJob) job('Create video categories', populateVideoCategories).after(leads) - if (!process.env.SKIP_STORAGE_AND_DISTRIBUTION) { - job('initialize storage system', initStorage(doubleStorageConfig)).requires(leads) - job('initialize distribution system', initDistribution(doubleDistributionConfig)).requires(leads) - } + job('initialize storage system', initStorage(doubleStorageConfig)).requires(leads) + job('initialize distribution system', initDistribution(doubleDistributionConfig)).requires(leads) }) diff --git a/tests/network-tests/test-setup-new-chain.sh b/tests/network-tests/test-setup-new-chain.sh index a3a345171b..6f951b1db4 100755 --- a/tests/network-tests/test-setup-new-chain.sh +++ b/tests/network-tests/test-setup-new-chain.sh @@ -4,6 +4,8 @@ set -e SCRIPT_PATH="$(dirname "${BASH_SOURCE[0]}")" cd $SCRIPT_PATH +rm ./keys.json || : + # Custom sudo and treasury accounts - export them before start new chain # will be used to configre chainspec and override test framework defaults. export TREASURY_ACCOUNT_URI=//Bob From e6b7e18094755aad91008943142307b7a8cd9f11 Mon Sep 17 00:00:00 2001 From: Mokhtar Naamani Date: Fri, 28 Jul 2023 02:42:21 +0300 Subject: [PATCH 3/7] wait longer on darwin for processor to start --- query-node/start.sh | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/query-node/start.sh b/query-node/start.sh index 63a356938d..e9bad75048 100755 --- a/query-node/start.sh +++ b/query-node/start.sh @@ -19,6 +19,10 @@ docker-compose -f ../docker-compose.yml up -d hydra-indexer-gateway # Start processor docker-compose -f ../docker-compose.yml up -d processor echo "Waiting for processor to be ready..." && sleep 30 +if [[ "$OSTYPE" == "darwin"* ]]; then + # On Docker Desktop things take a bit longer to startup + sleep 150 +fi # Start graphql-server docker-compose -f ../docker-compose.yml up -d graphql-server From baeab45a155fb8ba27b42826004b232d7496762a Mon Sep 17 00:00:00 2001 From: Mokhtar Naamani Date: Fri, 28 Jul 2023 02:58:16 +0300 Subject: [PATCH 4/7] keys.json not output.json --- tests/network-tests/src/Scenario.ts | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/tests/network-tests/src/Scenario.ts b/tests/network-tests/src/Scenario.ts index 86134ed128..81f3ab3adc 100644 --- a/tests/network-tests/src/Scenario.ts +++ b/tests/network-tests/src/Scenario.ts @@ -19,7 +19,7 @@ export type ScenarioProps = { job: (label: string, flows: Flow[] | Flow) => Job } -const OUTPUT_FILE_PATH = 'output.json' +const OUTPUT_FILE_PATH = 'keys.json' type TestsOutput = { accounts: { [k: string]: number } @@ -56,6 +56,8 @@ export async function scenario(label: string, scene: (props: ScenarioProps) => P const env = process.env + const debug = extendDebug('scenario') + // Connect api to the chain const nodeUrl: string = env.NODE_URL || 'ws://127.0.0.1:9944' const provider = new WsProvider(nodeUrl) @@ -67,6 +69,7 @@ export async function scenario(label: string, scene: (props: ScenarioProps) => P let startKeyId = 0 let customKeys: string[] = [] if (existsSync(OUTPUT_FILE_PATH)) { + debug(`Found existing ${OUTPUT_FILE_PATH}, will re-use existing keys.`) const output = JSON.parse(readFileSync(OUTPUT_FILE_PATH).toString()) as TestsOutput startKeyId = output.keyIds.final customKeys = output.keyIds.custom @@ -85,8 +88,6 @@ export async function scenario(label: string, scene: (props: ScenarioProps) => P const query = new QueryNodeApi(queryNodeProvider) - const debug = extendDebug('scenario') - debug(label) const jobs = new JobManager({ apiFactory, query, env }) From fc53d1c8e677d5df5467a394dabc5ed9a3effa21 Mon Sep 17 00:00:00 2001 From: Mokhtar Naamani Date: Fri, 28 Jul 2023 03:08:15 +0300 Subject: [PATCH 5/7] fix reference to keys.json --- start-multistorage.sh | 2 +- start.sh | 2 +- tests/network-tests/.prettierignore | 1 + 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/start-multistorage.sh b/start-multistorage.sh index 53de2be533..77dc1c5c8e 100755 --- a/start-multistorage.sh +++ b/start-multistorage.sh @@ -48,7 +48,7 @@ if [[ $SKIP_CHAIN_SETUP != 'true' ]]; then ./tests/network-tests/run-test-scenario.sh ${INIT_CHAIN_SCENARIO} ## Member faucet - export INVITER_KEY=$(cat ./tests/network-tests/output.json | jq -r .faucet.suri) + export INVITER_KEY=$(cat ./tests/network-tests/keys.json | jq -r .faucet.suri) docker-compose up -d faucet ## Storage Infrastructure Nodes diff --git a/start.sh b/start.sh index 58637adeb7..adbd7a7917 100755 --- a/start.sh +++ b/start.sh @@ -46,7 +46,7 @@ if [[ $SKIP_CHAIN_SETUP != 'true' ]]; then ./tests/network-tests/run-test-scenario.sh ${INIT_CHAIN_SCENARIO} ## Member faucet - export INVITER_KEY=$(cat ./tests/network-tests/output.json | jq -r .faucet.suri) + export INVITER_KEY=$(cat ./tests/network-tests/keys.json | jq -r .faucet.suri) docker-compose up -d faucet ## Storage Infrastructure Nodes diff --git a/tests/network-tests/.prettierignore b/tests/network-tests/.prettierignore index 052a6fb5c5..d40c45a7e8 100644 --- a/tests/network-tests/.prettierignore +++ b/tests/network-tests/.prettierignore @@ -2,3 +2,4 @@ .tmp/ data/ output.json +keys.json From 04506c466842d98c8df67f1f993c5f3d7070284b Mon Sep 17 00:00:00 2001 From: Zeeshan Akram <97m.zeeshan@gmail.com> Date: Tue, 22 Aug 2023 12:17:23 +0500 Subject: [PATCH 6/7] added separate fixtures for storage related fixtures --- tests/network-tests/src/Api.ts | 2 +- .../ClaimAndWithdrawChannelRewardFixture.ts | 1 - .../AcceptStorageBucketInvitationFixture.ts | 63 ++++++ .../storage/CreateStorageBucketFixture.ts | 56 ++++++ .../SetStorageOperatorMetadataFixture.ts | 49 +++++ .../UpdateDynamicBagCreationPolicyFixture.ts | 52 +++++ .../UpdateStorageBucketsPerBagLimitFixture.ts | 41 ++++ ...pdateStorageBucketsVoucherLimitsFixture.ts | 50 +++++ .../src/fixtures/storage/index.ts | 4 + .../network-tests/src/flows/storage/config.ts | 110 +++++++++++ .../src/flows/storage/initDistribution.ts | 76 +------- .../src/flows/storage/initStorage.ts | 179 +++++++----------- .../src/flows/storage/storageSync.ts | 22 +-- .../scenarios/initStorageAndDistribution.ts | 11 +- .../src/scenarios/setupNewChain.ts | 15 +- .../scenarios/setupNewChainMultiStorage.ts | 15 +- utils/api-scripts/src/status.ts | 4 +- 17 files changed, 535 insertions(+), 215 deletions(-) create mode 100644 tests/network-tests/src/fixtures/storage/AcceptStorageBucketInvitationFixture.ts create mode 100644 tests/network-tests/src/fixtures/storage/CreateStorageBucketFixture.ts create mode 100644 tests/network-tests/src/fixtures/storage/SetStorageOperatorMetadataFixture.ts create mode 100644 tests/network-tests/src/fixtures/storage/UpdateDynamicBagCreationPolicyFixture.ts create mode 100644 tests/network-tests/src/fixtures/storage/UpdateStorageBucketsPerBagLimitFixture.ts create mode 100644 tests/network-tests/src/fixtures/storage/UpdateStorageBucketsVoucherLimitsFixture.ts create mode 100644 tests/network-tests/src/fixtures/storage/index.ts create mode 100644 tests/network-tests/src/flows/storage/config.ts diff --git a/tests/network-tests/src/Api.ts b/tests/network-tests/src/Api.ts index c766d9463c..a14e692389 100644 --- a/tests/network-tests/src/Api.ts +++ b/tests/network-tests/src/Api.ts @@ -114,7 +114,7 @@ export class ApiFactory { return new ApiFactory(api, treasuryAccountUri, miniSecret) } catch (err) { - if (connectAttempts === 3) { + if (connectAttempts === 10) { throw new Error('Unable to connect to chain') } } diff --git a/tests/network-tests/src/fixtures/content/channelPayouts/ClaimAndWithdrawChannelRewardFixture.ts b/tests/network-tests/src/fixtures/content/channelPayouts/ClaimAndWithdrawChannelRewardFixture.ts index 31d0533826..6755117336 100644 --- a/tests/network-tests/src/fixtures/content/channelPayouts/ClaimAndWithdrawChannelRewardFixture.ts +++ b/tests/network-tests/src/fixtures/content/channelPayouts/ClaimAndWithdrawChannelRewardFixture.ts @@ -12,7 +12,6 @@ import { import { QueryNodeApi } from '../../../QueryNodeApi' import { EventDetails, EventType } from '../../../types' import { ClaimChannelRewardParams, getExpectedClaims } from './ClaimChannelRewardFixture' -import { BN } from 'bn.js' type ClaimAndWithdrawChannelRewardEventDetails = EventDetails> diff --git a/tests/network-tests/src/fixtures/storage/AcceptStorageBucketInvitationFixture.ts b/tests/network-tests/src/fixtures/storage/AcceptStorageBucketInvitationFixture.ts new file mode 100644 index 0000000000..72154801e2 --- /dev/null +++ b/tests/network-tests/src/fixtures/storage/AcceptStorageBucketInvitationFixture.ts @@ -0,0 +1,63 @@ +import { StorageBucketId, WorkerId } from '@joystream/types/primitives' +import { SubmittableExtrinsic } from '@polkadot/api/types' +import { ISubmittableResult } from '@polkadot/types/types' +import BN from 'bn.js' +import { Api } from '../../Api' +import { StandardizedFixture } from '../../Fixture' +import { QueryNodeApi } from '../../QueryNodeApi' +import { AnyQueryNodeEvent, EventDetails, EventType } from '../../types' + +const TRANSACTOR_ACCOUNT_BALANCE = new BN(9_000_000_000_000_000) + +type StorageBucketInvitationAcceptedEventDetails = EventDetails> + +export type AcceptStorageBucketInvitationParams = { + workerId: WorkerId + bucketId: StorageBucketId + transactorAccountId: string +} + +export class AcceptStorageBucketInvitationFixture extends StandardizedFixture { + private acceptStorageBucketInvitationParams: AcceptStorageBucketInvitationParams[] + + constructor( + api: Api, + query: QueryNodeApi, + acceptStorageBucketInvitationParams: AcceptStorageBucketInvitationParams[] + ) { + super(api, query) + this.acceptStorageBucketInvitationParams = acceptStorageBucketInvitationParams + } + + protected async getSignerAccountOrAccounts(): Promise { + return await Promise.all( + this.acceptStorageBucketInvitationParams.map(async ({ workerId }) => + (await this.api.query.storageWorkingGroup.workerById(workerId)).unwrap().roleAccountId.toString() + ) + ) + } + + public async execute(): Promise { + // Send some funds to transactor accounts + await Promise.all( + this.acceptStorageBucketInvitationParams.map(({ transactorAccountId }) => + this.api.treasuryTransferBalance(transactorAccountId, TRANSACTOR_ACCOUNT_BALANCE) + ) + ) + await super.execute() + } + + protected async getExtrinsics(): Promise[]> { + return this.acceptStorageBucketInvitationParams.map(({ workerId, bucketId, transactorAccountId }) => { + return this.api.tx.storage.acceptStorageBucketInvitation(workerId, bucketId, transactorAccountId) + }) + } + + protected getEventFromResult(result: ISubmittableResult): Promise { + return this.api.getEventDetails(result, 'storage', 'StorageBucketInvitationAccepted') + } + + protected assertQueryNodeEventIsValid(qEvent: AnyQueryNodeEvent, i: number): void { + // TODO: implement QN checks after mappings are added + } +} diff --git a/tests/network-tests/src/fixtures/storage/CreateStorageBucketFixture.ts b/tests/network-tests/src/fixtures/storage/CreateStorageBucketFixture.ts new file mode 100644 index 0000000000..06022b8d69 --- /dev/null +++ b/tests/network-tests/src/fixtures/storage/CreateStorageBucketFixture.ts @@ -0,0 +1,56 @@ +import { StorageBucketId, WorkerId } from '@joystream/types/primitives' +import { SubmittableExtrinsic } from '@polkadot/api/types' +import { ISubmittableResult } from '@polkadot/types/types' +import BN from 'bn.js' +import { Api } from '../../Api' +import { StandardizedFixture } from '../../Fixture' +import { QueryNodeApi } from '../../QueryNodeApi' +import { AnyQueryNodeEvent, EventDetails, EventType } from '../../types' + +type StorageBucketCreatedEventDetails = EventDetails> + +export type CreateStorageBucketParams = { + inviteWorker: WorkerId + sizeLimit: BN + objectLimit: number +} + +export class CreateStorageBucketFixture extends StandardizedFixture { + protected events: StorageBucketCreatedEventDetails[] = [] + + private createStorageBucketParams: CreateStorageBucketParams[] + + constructor(api: Api, query: QueryNodeApi, createStorageBucketParams: CreateStorageBucketParams[]) { + super(api, query) + this.createStorageBucketParams = createStorageBucketParams + } + + public getCreatedStorageBucketsIds(): StorageBucketId[] { + if (!this.events.length) { + throw new Error('Trying to get created storage bucket ids before they were created!') + } + return this.events.map((e) => e.event.data[0]) + } + + protected async getSignerAccountOrAccounts(): Promise { + const lead = await this.api.query.storageWorkingGroup.currentLead() + if (lead.isNone) { + throw new Error(`Cannot get storageWorkingGroup lead: Lead not hired!`) + } + return (await this.api.query.storageWorkingGroup.workerById(lead.unwrap())).unwrap().roleAccountId.toString() + } + + protected async getExtrinsics(): Promise[]> { + return this.createStorageBucketParams.map(({ inviteWorker, sizeLimit, objectLimit }) => + this.api.tx.storage.createStorageBucket(inviteWorker, true, sizeLimit, objectLimit) + ) + } + + protected getEventFromResult(result: ISubmittableResult): Promise { + return this.api.getEventDetails(result, 'storage', 'StorageBucketCreated') + } + + protected assertQueryNodeEventIsValid(qEvent: AnyQueryNodeEvent, i: number): void { + // TODO: implement QN checks after mappings are added + } +} diff --git a/tests/network-tests/src/fixtures/storage/SetStorageOperatorMetadataFixture.ts b/tests/network-tests/src/fixtures/storage/SetStorageOperatorMetadataFixture.ts new file mode 100644 index 0000000000..88aa6518fb --- /dev/null +++ b/tests/network-tests/src/fixtures/storage/SetStorageOperatorMetadataFixture.ts @@ -0,0 +1,49 @@ +import { IStorageBucketOperatorMetadata, StorageBucketOperatorMetadata } from '@joystream/metadata-protobuf' +import { StorageBucketId, WorkerId } from '@joystream/types/primitives' +import { SubmittableExtrinsic } from '@polkadot/api/types' +import { ISubmittableResult } from '@polkadot/types/types' +import { Api } from '../../Api' +import { StandardizedFixture } from '../../Fixture' +import { QueryNodeApi } from '../../QueryNodeApi' +import { AnyQueryNodeEvent, EventDetails, EventType } from '../../types' +import { Utils } from '../../utils' + +type StorageOperatorMetadataSetEventDetails = EventDetails> + +export type SetStorageOperatorMetadataParams = { + workerId: WorkerId + bucketId: StorageBucketId + metadata: IStorageBucketOperatorMetadata +} + +export class SetStorageOperatorMetadataFixture extends StandardizedFixture { + private storageOperatorMetadataParams: SetStorageOperatorMetadataParams[] + + constructor(api: Api, query: QueryNodeApi, storageOperatorMetadataParams: SetStorageOperatorMetadataParams[]) { + super(api, query) + this.storageOperatorMetadataParams = storageOperatorMetadataParams + } + + protected async getSignerAccountOrAccounts(): Promise { + return await Promise.all( + this.storageOperatorMetadataParams.map(async ({ workerId }) => + (await this.api.query.storageWorkingGroup.workerById(workerId)).unwrap().roleAccountId.toString() + ) + ) + } + + protected async getExtrinsics(): Promise[]> { + return this.storageOperatorMetadataParams.map(({ workerId, bucketId, metadata }) => { + const metadataBytes = Utils.metadataToBytes(StorageBucketOperatorMetadata, metadata) + return this.api.tx.storage.setStorageOperatorMetadata(workerId, bucketId, metadataBytes) + }) + } + + protected getEventFromResult(result: ISubmittableResult): Promise { + return this.api.getEventDetails(result, 'storage', 'StorageOperatorMetadataSet') + } + + protected assertQueryNodeEventIsValid(qEvent: AnyQueryNodeEvent, i: number): void { + // TODO: implement QN checks after mappings are added + } +} diff --git a/tests/network-tests/src/fixtures/storage/UpdateDynamicBagCreationPolicyFixture.ts b/tests/network-tests/src/fixtures/storage/UpdateDynamicBagCreationPolicyFixture.ts new file mode 100644 index 0000000000..c7075340b0 --- /dev/null +++ b/tests/network-tests/src/fixtures/storage/UpdateDynamicBagCreationPolicyFixture.ts @@ -0,0 +1,52 @@ +import { SubmittableExtrinsic } from '@polkadot/api/types' +import { PalletStorageDynamicBagIdType as DynamicBagId } from '@polkadot/types/lookup' +import { ISubmittableResult } from '@polkadot/types/types' +import _ from 'lodash' +import { Api } from '../../Api' +import { StandardizedFixture } from '../../Fixture' +import { QueryNodeApi } from '../../QueryNodeApi' +import { AnyQueryNodeEvent, EventDetails, EventType } from '../../types' + +type NumberOfStorageBucketsInDynamicBagCreationPolicyUpdatedEventDetails = EventDetails< + EventType<'storage', 'NumberOfStorageBucketsInDynamicBagCreationPolicyUpdated'> +> + +export type DynamicBagCreationPolicyParams = { + [K in DynamicBagId['type']]?: number +} + +export class UpdateDynamicBagCreationPolicyFixture extends StandardizedFixture { + private policyParams: DynamicBagCreationPolicyParams + + constructor(api: Api, query: QueryNodeApi, policyParams: DynamicBagCreationPolicyParams) { + super(api, query) + this.policyParams = policyParams + } + + protected async getSignerAccountOrAccounts(): Promise { + const lead = await this.api.query.storageWorkingGroup.currentLead() + if (lead.isNone) { + throw new Error(`Cannot get storageWorkingGroup lead: Lead not hired!`) + } + return (await this.api.query.storageWorkingGroup.workerById(lead.unwrap())).unwrap().roleAccountId.toString() + } + + protected async getExtrinsics(): Promise[]> { + return _.entries(this.policyParams).map(([bagType, numberOfBuckets]) => + this.api.tx.storage.updateNumberOfStorageBucketsInDynamicBagCreationPolicy( + bagType as DynamicBagId['type'], + numberOfBuckets + ) + ) + } + + protected getEventFromResult( + result: ISubmittableResult + ): Promise { + return this.api.getEventDetails(result, 'storage', 'NumberOfStorageBucketsInDynamicBagCreationPolicyUpdated') + } + + protected assertQueryNodeEventIsValid(qEvent: AnyQueryNodeEvent, i: number): void { + // TODO: implement QN checks after mappings are added + } +} diff --git a/tests/network-tests/src/fixtures/storage/UpdateStorageBucketsPerBagLimitFixture.ts b/tests/network-tests/src/fixtures/storage/UpdateStorageBucketsPerBagLimitFixture.ts new file mode 100644 index 0000000000..6152d42151 --- /dev/null +++ b/tests/network-tests/src/fixtures/storage/UpdateStorageBucketsPerBagLimitFixture.ts @@ -0,0 +1,41 @@ +import { SubmittableExtrinsic } from '@polkadot/api/types' +import { ISubmittableResult } from '@polkadot/types/types' +import { Api } from '../../Api' +import { StandardizedFixture } from '../../Fixture' +import { QueryNodeApi } from '../../QueryNodeApi' +import { AnyQueryNodeEvent, EventDetails, EventType } from '../../types' + +type StorageBucketsPerBagLimitUpdatedEventDetails = EventDetails< + EventType<'storage', 'StorageBucketsPerBagLimitUpdated'> +> + +export type StorageBucketsPerBagLimitParam = number + +export class UpdateStorageBucketsPerBagLimitFixture extends StandardizedFixture { + private bucketsPerBagLimit: StorageBucketsPerBagLimitParam + + constructor(api: Api, query: QueryNodeApi, bucketsPerBagLimit: StorageBucketsPerBagLimitParam) { + super(api, query) + this.bucketsPerBagLimit = bucketsPerBagLimit + } + + protected async getSignerAccountOrAccounts(): Promise { + const lead = await this.api.query.storageWorkingGroup.currentLead() + if (lead.isNone) { + throw new Error(`Cannot get storageWorkingGroup lead: Lead not hired!`) + } + return (await this.api.query.storageWorkingGroup.workerById(lead.unwrap())).unwrap().roleAccountId.toString() + } + + protected async getExtrinsics(): Promise[]> { + return [this.api.tx.storage.updateStorageBucketsPerBagLimit(this.bucketsPerBagLimit)] + } + + protected getEventFromResult(result: ISubmittableResult): Promise { + return this.api.getEventDetails(result, 'storage', 'StorageBucketsPerBagLimitUpdated') + } + + protected assertQueryNodeEventIsValid(qEvent: AnyQueryNodeEvent, i: number): void { + // TODO: implement QN checks after mappings are added + } +} diff --git a/tests/network-tests/src/fixtures/storage/UpdateStorageBucketsVoucherLimitsFixture.ts b/tests/network-tests/src/fixtures/storage/UpdateStorageBucketsVoucherLimitsFixture.ts new file mode 100644 index 0000000000..7151094870 --- /dev/null +++ b/tests/network-tests/src/fixtures/storage/UpdateStorageBucketsVoucherLimitsFixture.ts @@ -0,0 +1,50 @@ +import { SubmittableExtrinsic } from '@polkadot/api/types' +import { ISubmittableResult } from '@polkadot/types/types' +import BN from 'bn.js' +import { Api } from '../../Api' +import { StandardizedFixture } from '../../Fixture' +import { QueryNodeApi } from '../../QueryNodeApi' +import { AnyQueryNodeEvent, EventDetails, EventType } from '../../types' + +type StorageBucketsVoucherMaxLimitsUpdatedEventDetails = EventDetails< + EventType<'storage', 'StorageBucketsVoucherMaxLimitsUpdated'> +> + +export type StorageBucketsVoucherLimitsParams = { + sizeLimit: BN + objectsLimit: number +} + +export class UpdateStorageBucketsVoucherLimitsFixture extends StandardizedFixture { + private voucherLimits: StorageBucketsVoucherLimitsParams + + constructor(api: Api, query: QueryNodeApi, voucherLimits: StorageBucketsVoucherLimitsParams) { + super(api, query) + this.voucherLimits = voucherLimits + } + + protected async getSignerAccountOrAccounts(): Promise { + const lead = await this.api.query.storageWorkingGroup.currentLead() + if (lead.isNone) { + throw new Error(`Cannot get storageWorkingGroup lead: Lead not hired!`) + } + return (await this.api.query.storageWorkingGroup.workerById(lead.unwrap())).unwrap().roleAccountId.toString() + } + + protected async getExtrinsics(): Promise[]> { + return [ + this.api.tx.storage.updateStorageBucketsVoucherMaxLimits( + this.voucherLimits.sizeLimit, + this.voucherLimits.objectsLimit + ), + ] + } + + protected getEventFromResult(result: ISubmittableResult): Promise { + return this.api.getEventDetails(result, 'storage', 'StorageBucketsVoucherMaxLimitsUpdated') + } + + protected assertQueryNodeEventIsValid(qEvent: AnyQueryNodeEvent, i: number): void { + // TODO: implement QN checks after mappings are added + } +} diff --git a/tests/network-tests/src/fixtures/storage/index.ts b/tests/network-tests/src/fixtures/storage/index.ts new file mode 100644 index 0000000000..32a3902ee2 --- /dev/null +++ b/tests/network-tests/src/fixtures/storage/index.ts @@ -0,0 +1,4 @@ +export { CreateStorageBucketFixture } from './CreateStorageBucketFixture' +export { UpdateDynamicBagCreationPolicyFixture } from './UpdateDynamicBagCreationPolicyFixture' +export { UpdateStorageBucketsPerBagLimitFixture } from './UpdateStorageBucketsPerBagLimitFixture' +export { UpdateStorageBucketsVoucherLimitsFixture } from './UpdateStorageBucketsVoucherLimitsFixture' diff --git a/tests/network-tests/src/flows/storage/config.ts b/tests/network-tests/src/flows/storage/config.ts new file mode 100644 index 0000000000..43567de756 --- /dev/null +++ b/tests/network-tests/src/flows/storage/config.ts @@ -0,0 +1,110 @@ +import { CreateInterface } from '@joystream/types' +import { PalletStorageStaticBagId as StaticBagId } from '@polkadot/types/lookup' +import BN from 'bn.js' +import { InitDistributionConfig } from './initDistribution' +import { InitStorageConfig } from './initStorage' + +export const allStaticBags: CreateInterface[] = [ + 'Council', + { WorkingGroup: 'Content' }, + { WorkingGroup: 'Distribution' }, + { WorkingGroup: 'App' }, + { WorkingGroup: 'OperationsAlpha' }, + { WorkingGroup: 'OperationsBeta' }, + { WorkingGroup: 'OperationsGamma' }, + { WorkingGroup: 'Storage' }, +] + +/** + * Storage Buckets configuration + */ +export const singleStorageBucketConfig: InitStorageConfig = { + dynamicBagPolicy: { + 'Channel': 1, + 'Member': 1, + }, + buckets: [ + { + metadata: { endpoint: process.env.COLOSSUS_1_URL || 'http://localhost:3333' }, + staticBags: allStaticBags, + storageLimit: new BN(1_000_000_000_000), + objectsLimit: 1000000000, + transactorUri: process.env.COLOSSUS_1_TRANSACTOR_URI || '//Colossus1', + }, + ], +} + +export const doubleStorageBucketConfig: InitStorageConfig = { + dynamicBagPolicy: { + 'Channel': 2, + 'Member': 2, + }, + buckets: [ + { + metadata: { endpoint: process.env.COLOSSUS_1_URL || 'http://localhost:3333' }, + staticBags: allStaticBags, + storageLimit: new BN(1_000_000_000_000), + objectsLimit: 1000000000, + transactorUri: process.env.COLOSSUS_1_TRANSACTOR_URI || '//Colossus1', + }, + { + metadata: { endpoint: process.env.COLOSSUS_2_URL || 'http://localhost:3335' }, + staticBags: allStaticBags, + storageLimit: new BN(1_000_000_000_000), + objectsLimit: 1000000000, + transactorUri: process.env.COLOSSUS_2_TRANSACTOR_URI || '//Colossus2', + }, + ], +} + +/** + * Distribution Buckets configuration + */ +export const singleDistributionBucketConfig: InitDistributionConfig = { + families: [ + { + metadata: { region: 'All' }, + dynamicBagPolicy: { + 'Channel': 1, + 'Member': 1, + }, + buckets: [ + { + metadata: { endpoint: process.env.DISTRIBUTOR_1_URL || 'http://localhost:3334' }, + staticBags: allStaticBags, + }, + ], + }, + ], +} + +export const doubleDistributionBucketConfig: InitDistributionConfig = { + families: [ + { + metadata: { region: 'Region 1' }, + dynamicBagPolicy: { + 'Channel': 1, + 'Member': 1, + }, + buckets: [ + { + metadata: { endpoint: process.env.DISTRIBUTOR_1_URL || 'http://localhost:3334' }, + staticBags: allStaticBags, + }, + ], + }, + { + metadata: { region: 'Region 2' }, + dynamicBagPolicy: { + 'Channel': 1, + 'Member': 1, + }, + buckets: [ + { + metadata: { endpoint: process.env.DISTRIBUTOR_2_URL || 'http://localhost:3336' }, + staticBags: allStaticBags, + }, + ], + }, + ], +} diff --git a/tests/network-tests/src/flows/storage/initDistribution.ts b/tests/network-tests/src/flows/storage/initDistribution.ts index 7421e9a691..d6ed9f4273 100644 --- a/tests/network-tests/src/flows/storage/initDistribution.ts +++ b/tests/network-tests/src/flows/storage/initDistribution.ts @@ -13,10 +13,10 @@ import { } from '@polkadot/types/lookup' import _ from 'lodash' import { extendDebug } from '../../Debugger' -import { FlowProps } from '../../Flow' -import { Utils } from '../../utils' import { FixtureRunner } from '../../Fixture' +import { FlowProps } from '../../Flow' import { HireWorkersFixture } from '../../fixtures/workingGroups/HireWorkersFixture' +import { Utils } from '../../utils' type DistributionBucketConfig = { metadata: IDistributionBucketOperatorMetadata @@ -31,70 +31,10 @@ type DistributionFamilyConfig = { } } -type InitDistributionConfig = { +export type InitDistributionConfig = { families: DistributionFamilyConfig[] } -export const allStaticBags: CreateInterface[] = [ - 'Council', - { WorkingGroup: 'Content' }, - { WorkingGroup: 'Distribution' }, - { WorkingGroup: 'App' }, - { WorkingGroup: 'OperationsAlpha' }, - { WorkingGroup: 'OperationsBeta' }, - { WorkingGroup: 'OperationsGamma' }, - { WorkingGroup: 'Storage' }, -] - -export const singleBucketConfig: InitDistributionConfig = { - families: [ - { - metadata: { region: 'All' }, - dynamicBagPolicy: { - 'Channel': 1, - 'Member': 1, - }, - buckets: [ - { - metadata: { endpoint: process.env.DISTRIBUTOR_1_URL || 'http://localhost:3334' }, - staticBags: allStaticBags, - }, - ], - }, - ], -} - -export const doubleBucketConfig: InitDistributionConfig = { - families: [ - { - metadata: { region: 'Region 1' }, - dynamicBagPolicy: { - 'Channel': 1, - 'Member': 1, - }, - buckets: [ - { - metadata: { endpoint: process.env.DISTRIBUTOR_1_URL || 'http://localhost:3334' }, - staticBags: allStaticBags, - }, - ], - }, - { - metadata: { region: 'Region 2' }, - dynamicBagPolicy: { - 'Channel': 1, - 'Member': 1, - }, - buckets: [ - { - metadata: { endpoint: process.env.DISTRIBUTOR_2_URL || 'http://localhost:3336' }, - staticBags: allStaticBags, - }, - ], - }, - ], -} - export default function createFlow({ families }: InitDistributionConfig) { return async function initDistribution({ api, query }: FlowProps): Promise { const debug = extendDebug('flow:initDistribution') @@ -109,9 +49,9 @@ export default function createFlow({ families }: InitDistributionConfig) { const hireWorkersFixture = new HireWorkersFixture(api, query, 'distributionWorkingGroup', totalBucketsNum) await new FixtureRunner(hireWorkersFixture).run() - const operatorIds = hireWorkersFixture.getCreatedWorkerIds() + const workerIds = hireWorkersFixture.getCreatedWorkerIds() - const operatorKeys = await api.getWorkerRoleAccounts(operatorIds, 'distributionWorkingGroup') + const operatorKeys = await api.getWorkerRoleAccounts(workerIds, 'distributionWorkingGroup') // Create families, set buckets per bag limit const createFamilyTxs = families.map(() => api.tx.storage.createDistributionBucketFamily()) @@ -178,20 +118,20 @@ export default function createFlow({ families }: InitDistributionConfig) { // Invite bucket operators const bucketInviteTxs = bucketIds.map((bucketId, i) => - api.tx.storage.inviteDistributionBucketOperator(bucketId, operatorIds[i]) + api.tx.storage.inviteDistributionBucketOperator(bucketId, workerIds[i]) ) await api.sendExtrinsicsAndGetResults(bucketInviteTxs, distributionLeaderKey) // Accept invitations const acceptInvitationTxs = bucketIds.map((bucketId, i) => - api.tx.storage.acceptDistributionBucketInvitation(operatorIds[i], bucketId) + api.tx.storage.acceptDistributionBucketInvitation(workerIds[i], bucketId) ) await api.sendExtrinsicsAndGetResults(acceptInvitationTxs, operatorKeys) // Bucket metadata and static bags const bucketSetupPromises = _.flatten( bucketIds.map((bucketId, i) => { - const operatorId = operatorIds[i] + const operatorId = workerIds[i] const operatorKey = operatorKeys[i] const bucketConfig = bucketById.get(bucketId.toString()) if (!bucketConfig) { diff --git a/tests/network-tests/src/flows/storage/initStorage.ts b/tests/network-tests/src/flows/storage/initStorage.ts index 305767080a..43daec92fc 100644 --- a/tests/network-tests/src/flows/storage/initStorage.ts +++ b/tests/network-tests/src/flows/storage/initStorage.ts @@ -1,15 +1,21 @@ -import { FlowProps } from '../../Flow' -import { extendDebug } from '../../Debugger' -import { IStorageBucketOperatorMetadata, StorageBucketOperatorMetadata } from '@joystream/metadata-protobuf' +import { IStorageBucketOperatorMetadata } from '@joystream/metadata-protobuf' import { CreateInterface, createType } from '@joystream/types' import { PalletStorageDynamicBagIdType as DynamicBagId, PalletStorageStaticBagId as StaticBagId, } from '@polkadot/types/lookup' -import _ from 'lodash' -import { Utils } from '../../utils' import BN from 'bn.js' +import { extendDebug } from '../../Debugger' import { FixtureRunner } from '../../Fixture' +import { FlowProps } from '../../Flow' +import { + CreateStorageBucketFixture, + UpdateDynamicBagCreationPolicyFixture, + UpdateStorageBucketsVoucherLimitsFixture, +} from '../../fixtures/storage' +import { AcceptStorageBucketInvitationFixture } from '../../fixtures/storage/AcceptStorageBucketInvitationFixture' +import { SetStorageOperatorMetadataFixture } from '../../fixtures/storage/SetStorageOperatorMetadataFixture' +import { UpdateBucketsPerBagLimitFixture } from '../../fixtures/storage/UpdateBucketsPerBagLimit' import { HireWorkersFixture } from '../../fixtures/workingGroups/HireWorkersFixture' type StorageBucketConfig = { @@ -18,70 +24,16 @@ type StorageBucketConfig = { storageLimit: BN objectsLimit: number transactorUri: string - transactorBalance: BN } -type InitStorageConfig = { +export type InitStorageConfig = { buckets: StorageBucketConfig[] dynamicBagPolicy: { [K in DynamicBagId['type']]?: number } } -export const allStaticBags: CreateInterface[] = [ - 'Council', - { WorkingGroup: 'Content' }, - { WorkingGroup: 'Distribution' }, - { WorkingGroup: 'App' }, - { WorkingGroup: 'OperationsAlpha' }, - { WorkingGroup: 'OperationsBeta' }, - { WorkingGroup: 'OperationsGamma' }, - { WorkingGroup: 'Storage' }, -] - -export const singleBucketConfig: InitStorageConfig = { - dynamicBagPolicy: { - 'Channel': 1, - 'Member': 1, - }, - buckets: [ - { - metadata: { endpoint: process.env.COLOSSUS_1_URL || 'http://localhost:3333' }, - staticBags: allStaticBags, - storageLimit: new BN(1_000_000_000_000), - objectsLimit: 1000000000, - transactorUri: process.env.COLOSSUS_1_TRANSACTOR_URI || '//Colossus1', - transactorBalance: new BN(9_000_000_000_000_000), - }, - ], -} - -export const doubleBucketConfig: InitStorageConfig = { - dynamicBagPolicy: { - 'Channel': 2, - 'Member': 2, - }, - buckets: [ - { - metadata: { endpoint: process.env.COLOSSUS_1_URL || 'http://localhost:3333' }, - staticBags: allStaticBags, - storageLimit: new BN(1_000_000_000_000), - objectsLimit: 1000000000, - transactorUri: process.env.COLOSSUS_1_TRANSACTOR_URI || '//Colossus1', - transactorBalance: new BN(9_000_000_000_000_000), - }, - { - metadata: { endpoint: process.env.COLOSSUS_2_URL || 'http://localhost:3335' }, - staticBags: allStaticBags, - storageLimit: new BN(1_000_000_000_000), - objectsLimit: 1000000000, - transactorUri: process.env.COLOSSUS_2_TRANSACTOR_URI || '//Colossus2', - transactorBalance: new BN(9_000_000_000_000_000), - }, - ], -} - -export default function createFlow({ buckets, dynamicBagPolicy }: InitStorageConfig) { +export default function initStorage({ buckets, dynamicBagPolicy }: InitStorageConfig) { return async function initStorage({ api, query }: FlowProps): Promise { const debug = extendDebug('flow:initStorage') api.enableDebugTxLogs() @@ -96,67 +48,68 @@ export default function createFlow({ buckets, dynamicBagPolicy }: InitStorageCon const hireWorkersFixture = new HireWorkersFixture(api, query, 'storageWorkingGroup', buckets.length) await new FixtureRunner(hireWorkersFixture).run() - const operatorIds = hireWorkersFixture.getCreatedWorkerIds() + const workerIds = hireWorkersFixture.getCreatedWorkerIds() - const operatorKeys = await api.getWorkerRoleAccounts(operatorIds, 'storageWorkingGroup') + const updateDynamicBagPolicyFixture = new UpdateDynamicBagCreationPolicyFixture(api, query, dynamicBagPolicy) + await new FixtureRunner(updateDynamicBagPolicyFixture).run() - // Set global limits and policies - const updateDynamicBagPolicyTxs = _.entries(dynamicBagPolicy).map(([bagType, numberOfBuckets]) => - api.tx.storage.updateNumberOfStorageBucketsInDynamicBagCreationPolicy( - bagType as DynamicBagId['type'], - numberOfBuckets - ) - ) - const setMaxVoucherLimitsTx = api.tx.storage.updateStorageBucketsVoucherMaxLimits(maxStorageLimit, maxObjectsLimit) - const setBucketPerBagLimitTx = api.tx.storage.updateStorageBucketsPerBagLimit(Math.max(5, buckets.length)) + const updateStorageBucketsVoucherFixture = new UpdateStorageBucketsVoucherLimitsFixture(api, query, { + sizeLimit: maxStorageLimit, + objectsLimit: maxObjectsLimit, + }) + await new FixtureRunner(updateStorageBucketsVoucherFixture).run() - await api.sendExtrinsicsAndGetResults( - [...updateDynamicBagPolicyTxs, setMaxVoucherLimitsTx, setBucketPerBagLimitTx], - storageLeaderKey + const updateStorageBucketsPerBagLimitFixture = new UpdateBucketsPerBagLimitFixture( + 'storage', + api, + query, + Math.max(5, buckets.length) ) + await new FixtureRunner(updateStorageBucketsPerBagLimitFixture).run() // Create buckets - const createBucketTxs = buckets.map((b, i) => - api.tx.storage.createStorageBucket(operatorIds[i], true, b.storageLimit, b.objectsLimit) - ) - const createBucketResults = await api.sendExtrinsicsAndGetResults(createBucketTxs, storageLeaderKey) - const bucketById = new Map() - createBucketResults.forEach((res, i) => { - const bucketId = api.getEvent(res, 'storage', 'StorageBucketCreated').data[0] - bucketById.set(bucketId.toNumber(), buckets[i]) - }) + const createBucketsInput = buckets.map((bucket, i) => ({ + inviteWorker: workerIds[i], + sizeLimit: bucket.storageLimit, + objectLimit: bucket.objectsLimit, + })) + const createStorageBucketFixture = new CreateStorageBucketFixture(api, query, createBucketsInput) + await new FixtureRunner(createStorageBucketFixture).run() + const createdStorageBucketsIds = createStorageBucketFixture.getCreatedStorageBucketsIds() // Accept invitations - const acceptInvitationTxs = Array.from(bucketById.entries()).map(([bucketId, bucketConfig], i) => { - const transactorKey = api.createCustomKeyPair(bucketConfig.transactorUri, true).address - return api.tx.storage.acceptStorageBucketInvitation(operatorIds[i], bucketId, transactorKey) - }) - await api.sendExtrinsicsAndGetResults(acceptInvitationTxs, operatorKeys) - - // Bucket metadata, static bags, transactor balances - const bucketSetupPromises = _.flatten( - Array.from(bucketById.entries()).map(([bucketId, bucketConfig], i) => { - const operatorId = operatorIds[i] - const operatorKey = operatorKeys[i] - const metadataBytes = Utils.metadataToBytes(StorageBucketOperatorMetadata, bucketConfig.metadata) - const setMetaTx = api.tx.storage.setStorageOperatorMetadata(operatorId, bucketId, metadataBytes) - const setMetaPromise = api.sendExtrinsicsAndGetResults([setMetaTx], operatorKey) - const updateBagTxs = (bucketConfig.staticBags || []).map((sBagId) => { - return api.tx.storage.updateStorageBucketsForBag( - createType('PalletStorageBagIdType', { Static: sBagId }), - createType('BTreeSet', [bucketId]), - createType('BTreeSet', []) - ) - }) - const updateBagsPromise = api.sendExtrinsicsAndGetResults(updateBagTxs, storageLeaderKey) - const setupTransactorBalancePromise = (async () => { - const transactorKey = api.getAddressFromSuri(bucketConfig.transactorUri) - return [await api.treasuryTransferBalance(transactorKey, bucketConfig.transactorBalance)] - })() - return [updateBagsPromise, setMetaPromise, setupTransactorBalancePromise] - }) + const acceptInvitationInput = createdStorageBucketsIds.map((bucketId, i) => ({ + workerId: workerIds[i], + bucketId, + transactorAccountId: api.createCustomKeyPair(buckets[i].transactorUri, true).address, + })) + const acceptStorageBucketInvitationFixture = new AcceptStorageBucketInvitationFixture( + api, + query, + acceptInvitationInput ) - await Promise.all(bucketSetupPromises) + await new FixtureRunner(acceptStorageBucketInvitationFixture).run() + + // Set Buckets Metadata + const setMetadataInput = createdStorageBucketsIds.map((bucketId, i) => ({ + workerId: workerIds[i], + bucketId, + metadata: buckets[i].metadata, + })) + const setStorageOperatorMetadataFixture = new SetStorageOperatorMetadataFixture(api, query, setMetadataInput) + await new FixtureRunner(setStorageOperatorMetadataFixture).run() + + // Add all static bags to all buckets + const updateBagTxs = createdStorageBucketsIds.map((bucketId, i) => { + return (buckets[i].staticBags || []).map((sBagId) => { + return api.tx.storage.updateStorageBucketsForBag( + createType('PalletStorageBagIdType', { Static: sBagId }), + createType('BTreeSet', [bucketId]), + createType('BTreeSet', []) + ) + }) + }) + await api.sendExtrinsicsAndGetResults(updateBagTxs, storageLeaderKey) debug('Done') } diff --git a/tests/network-tests/src/flows/storage/storageSync.ts b/tests/network-tests/src/flows/storage/storageSync.ts index 6eb6f95c96..785a5488d1 100644 --- a/tests/network-tests/src/flows/storage/storageSync.ts +++ b/tests/network-tests/src/flows/storage/storageSync.ts @@ -1,17 +1,17 @@ -import { FlowProps } from '../../Flow' -import { extendDebug } from '../../Debugger' -import { BuyMembershipHappyCaseFixture } from '../../fixtures/membership' -import { FixtureRunner } from '../../Fixture' +import { ChannelCreationInputParameters } from '@joystream/cli/src/Types' import { createType } from '@joystream/types' -import { createJoystreamCli } from '../utils' import { BN } from 'bn.js' import { assert } from 'chai' -import { ChannelCreationInputParameters } from '@joystream/cli/src/Types' -import { Utils } from '../../utils' -import { ColossusApi } from '../../../ColossusApi' -import { doubleBucketConfig } from './initStorage' import { readFileSync } from 'fs' import urljoin from 'url-join' +import { ColossusApi } from '../../../ColossusApi' +import { extendDebug } from '../../Debugger' +import { FixtureRunner } from '../../Fixture' +import { FlowProps } from '../../Flow' +import { BuyMembershipHappyCaseFixture } from '../../fixtures/membership' +import { Utils } from '../../utils' +import { createJoystreamCli } from '../utils' +import { doubleStorageBucketConfig } from './config' export async function storageSync({ api, query }: FlowProps): Promise { const debug = extendDebug('flow:storageSync') @@ -102,8 +102,8 @@ export async function storageSync({ api, query }: FlowProps): Promise { (r) => Utils.assert(r, `Cannot find channel ${channelId}`) ) - const colossus1Endpoint = doubleBucketConfig.buckets[0].metadata.endpoint - const colossus2Endpoint = doubleBucketConfig.buckets[1].metadata.endpoint + const colossus1Endpoint = doubleStorageBucketConfig.buckets[0].metadata.endpoint + const colossus2Endpoint = doubleStorageBucketConfig.buckets[1].metadata.endpoint Utils.assert(channel && channel.coverPhoto && channel.avatarPhoto, `Channel ${channelId} has missing assets`) Utils.assert(colossus1Endpoint && colossus2Endpoint, `Colossus endpoints not set`) diff --git a/tests/network-tests/src/scenarios/initStorageAndDistribution.ts b/tests/network-tests/src/scenarios/initStorageAndDistribution.ts index 303017b985..d978fa831c 100644 --- a/tests/network-tests/src/scenarios/initStorageAndDistribution.ts +++ b/tests/network-tests/src/scenarios/initStorageAndDistribution.ts @@ -1,8 +1,9 @@ -import leaderSetup from '../flows/working-groups/leadOpening' -import initStorage, { doubleBucketConfig as defaultStorageConfig } from '../flows/storage/initStorage' -import initDistribution, { doubleBucketConfig as defaultDistributionConfig } from '../flows/storage/initDistribution' import { scenario } from '../Scenario' import electCouncil from '../flows/council/elect' +import { doubleDistributionBucketConfig, doubleStorageBucketConfig } from '../flows/storage/config' +import initDistribution from '../flows/storage/initDistribution' +import initStorage from '../flows/storage/initStorage' +import leaderSetup from '../flows/working-groups/leadOpening' // eslint-disable-next-line @typescript-eslint/no-floating-promises scenario('Init storage and distribution', async ({ job }) => { @@ -10,6 +11,6 @@ scenario('Init storage and distribution', async ({ job }) => { const setupLead = job('setup leads', leaderSetup(true, ['storageWorkingGroup', 'distributionWorkingGroup'])).after( councilJob ) - job('initialize storage system', initStorage(defaultStorageConfig)).after(setupLead) - job('initialize distribution system', initDistribution(defaultDistributionConfig)).after(setupLead) + job('initialize storage system', initStorage(doubleStorageBucketConfig)).after(setupLead) + job('initialize distribution system', initDistribution(doubleDistributionBucketConfig)).after(setupLead) }) diff --git a/tests/network-tests/src/scenarios/setupNewChain.ts b/tests/network-tests/src/scenarios/setupNewChain.ts index cb0da4279a..4fb32794ee 100644 --- a/tests/network-tests/src/scenarios/setupNewChain.ts +++ b/tests/network-tests/src/scenarios/setupNewChain.ts @@ -1,10 +1,11 @@ -import leaderSetup from '../flows/working-groups/leadOpening' -import initFaucet from '../flows/faucet/initFaucet' -import { populateVideoCategories } from '../flows/content/videoCategories' -import initStorage, { singleBucketConfig as defaultStorageConfig } from '../flows/storage/initStorage' -import initDistribution, { singleBucketConfig as defaultDistributionConfig } from '../flows/storage/initDistribution' import { scenario } from '../Scenario' +import { populateVideoCategories } from '../flows/content/videoCategories' import electCouncil from '../flows/council/elect' +import initFaucet from '../flows/faucet/initFaucet' +import { singleDistributionBucketConfig, singleStorageBucketConfig } from '../flows/storage/config' +import initDistribution from '../flows/storage/initDistribution' +import initStorage from '../flows/storage/initStorage' +import leaderSetup from '../flows/working-groups/leadOpening' // eslint-disable-next-line @typescript-eslint/no-floating-promises scenario('Setup new chain', async ({ job }) => { @@ -14,6 +15,6 @@ scenario('Setup new chain', async ({ job }) => { const leads = job('Set WorkingGroup Leads', leaderSetup(true)).requires(councilJob) job('Create video categories', populateVideoCategories).after(leads) - job('initialize storage system', initStorage(defaultStorageConfig)).requires(leads) - job('initialize distribution system', initDistribution(defaultDistributionConfig)).requires(leads) + job('initialize storage system', initStorage(singleStorageBucketConfig)).requires(leads) + job('initialize distribution system', initDistribution(singleDistributionBucketConfig)).requires(leads) }) diff --git a/tests/network-tests/src/scenarios/setupNewChainMultiStorage.ts b/tests/network-tests/src/scenarios/setupNewChainMultiStorage.ts index db7775501c..c65c56874d 100644 --- a/tests/network-tests/src/scenarios/setupNewChainMultiStorage.ts +++ b/tests/network-tests/src/scenarios/setupNewChainMultiStorage.ts @@ -1,10 +1,11 @@ -import leaderSetup from '../flows/working-groups/leadOpening' -import initFaucet from '../flows/faucet/initFaucet' -import { populateVideoCategories } from '../flows/content/videoCategories' -import initStorage, { doubleBucketConfig as doubleStorageConfig } from '../flows/storage/initStorage' -import initDistribution, { doubleBucketConfig as doubleDistributionConfig } from '../flows/storage/initDistribution' import { scenario } from '../Scenario' +import { populateVideoCategories } from '../flows/content/videoCategories' import electCouncil from '../flows/council/elect' +import initFaucet from '../flows/faucet/initFaucet' +import { doubleDistributionBucketConfig, doubleStorageBucketConfig } from '../flows/storage/config' +import initDistribution from '../flows/storage/initDistribution' +import initStorage from '../flows/storage/initStorage' +import leaderSetup from '../flows/working-groups/leadOpening' // eslint-disable-next-line @typescript-eslint/no-floating-promises scenario('Setup new chain', async ({ job }) => { @@ -15,6 +16,6 @@ scenario('Setup new chain', async ({ job }) => { const leads = job('Set WorkingGroup Leads', leaderSetup(true)).requires(councilJob) job('Create video categories', populateVideoCategories).after(leads) - job('initialize storage system', initStorage(doubleStorageConfig)).requires(leads) - job('initialize distribution system', initDistribution(doubleDistributionConfig)).requires(leads) + job('initialize storage system', initStorage(doubleStorageBucketConfig)).requires(leads) + job('initialize distribution system', initDistribution(doubleDistributionBucketConfig)).requires(leads) }) diff --git a/utils/api-scripts/src/status.ts b/utils/api-scripts/src/status.ts index 3435bd9897..beb9cc126c 100644 --- a/utils/api-scripts/src/status.ts +++ b/utils/api-scripts/src/status.ts @@ -1,6 +1,6 @@ +import '@joystream/types' import { ApiPromise, WsProvider } from '@polkadot/api' import BN from 'bn.js' -import '@joystream/types' async function main() { const endpoint = process.env.WS_URI || 'ws://127.0.0.1:9944' @@ -8,7 +8,7 @@ async function main() { // Create the API and wait until ready let api: ApiPromise - let retry = 6 + let retry = 15 while (true) { try { api = new ApiPromise({ provider }) From a32208bf7e0520f457153252b9b92f2ceccc1dea Mon Sep 17 00:00:00 2001 From: Zeeshan Akram <97m.zeeshan@gmail.com> Date: Tue, 22 Aug 2023 13:38:43 +0500 Subject: [PATCH 7/7] fix fxiture import error --- tests/network-tests/src/fixtures/storage/index.ts | 2 ++ tests/network-tests/src/flows/storage/initStorage.ts | 9 ++++----- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/tests/network-tests/src/fixtures/storage/index.ts b/tests/network-tests/src/fixtures/storage/index.ts index 32a3902ee2..fd88c2f2b0 100644 --- a/tests/network-tests/src/fixtures/storage/index.ts +++ b/tests/network-tests/src/fixtures/storage/index.ts @@ -1,4 +1,6 @@ +export { AcceptStorageBucketInvitationFixture } from './AcceptStorageBucketInvitationFixture' export { CreateStorageBucketFixture } from './CreateStorageBucketFixture' +export { SetStorageOperatorMetadataFixture } from './SetStorageOperatorMetadataFixture' export { UpdateDynamicBagCreationPolicyFixture } from './UpdateDynamicBagCreationPolicyFixture' export { UpdateStorageBucketsPerBagLimitFixture } from './UpdateStorageBucketsPerBagLimitFixture' export { UpdateStorageBucketsVoucherLimitsFixture } from './UpdateStorageBucketsVoucherLimitsFixture' diff --git a/tests/network-tests/src/flows/storage/initStorage.ts b/tests/network-tests/src/flows/storage/initStorage.ts index 43daec92fc..aea04c5048 100644 --- a/tests/network-tests/src/flows/storage/initStorage.ts +++ b/tests/network-tests/src/flows/storage/initStorage.ts @@ -9,13 +9,13 @@ import { extendDebug } from '../../Debugger' import { FixtureRunner } from '../../Fixture' import { FlowProps } from '../../Flow' import { + AcceptStorageBucketInvitationFixture, CreateStorageBucketFixture, + SetStorageOperatorMetadataFixture, UpdateDynamicBagCreationPolicyFixture, + UpdateStorageBucketsPerBagLimitFixture, UpdateStorageBucketsVoucherLimitsFixture, } from '../../fixtures/storage' -import { AcceptStorageBucketInvitationFixture } from '../../fixtures/storage/AcceptStorageBucketInvitationFixture' -import { SetStorageOperatorMetadataFixture } from '../../fixtures/storage/SetStorageOperatorMetadataFixture' -import { UpdateBucketsPerBagLimitFixture } from '../../fixtures/storage/UpdateBucketsPerBagLimit' import { HireWorkersFixture } from '../../fixtures/workingGroups/HireWorkersFixture' type StorageBucketConfig = { @@ -59,8 +59,7 @@ export default function initStorage({ buckets, dynamicBagPolicy }: InitStorageCo }) await new FixtureRunner(updateStorageBucketsVoucherFixture).run() - const updateStorageBucketsPerBagLimitFixture = new UpdateBucketsPerBagLimitFixture( - 'storage', + const updateStorageBucketsPerBagLimitFixture = new UpdateStorageBucketsPerBagLimitFixture( api, query, Math.max(5, buckets.length)