Эх сурвалжийг харах

Integration tests: initStorage and initDistribution flows

Leszek Wiesner 3 жил өмнө
parent
commit
148795884f

+ 4 - 9
start.sh

@@ -27,20 +27,15 @@ docker-compose up -d joystream-node
 ## Init the chain with some state
 export SKIP_MOCK_CONTENT=true
 export SKIP_QUERY_NODE_CHECKS=true
-# TODO: Move back to this approach once Giza<->Olympia integration tests merged
-# HOST_IP=$(tests/network-tests/get-host-ip.sh)
-# export COLOSSUS_1_URL="http://${HOST_IP}:3333"
-# export COLOSSUS_1_TRANSACTOR_KEY=$(docker run --rm --pull=always docker.io/parity/subkey:2.0.1 inspect ${COLOSSUS_1_TRANSACTOR_URI} --output-type json | jq .ss58Address -r)
-# export DISTRIBUTOR_1_URL="http://${HOST_IP}:3334"
+HOST_IP=$(tests/network-tests/get-host-ip.sh)
+export COLOSSUS_1_URL="http://${HOST_IP}:3333"
+export COLOSSUS_1_TRANSACTOR_KEY=$(docker run --rm --pull=always docker.io/parity/subkey:2.0.1 inspect ${COLOSSUS_1_TRANSACTOR_URI} --output-type json | jq .ss58Address -r)
+export DISTRIBUTOR_1_URL="http://${HOST_IP}:3334"
 ./tests/integration-tests/run-test-scenario.sh ${INIT_CHAIN_SCENARIO}
 
 ## Member faucet
 docker-compose up -d faucet
 
-## Storage Infrastructure Configuration
-# TODO: Move back to INIT_CHAIN_SCENARIO approach once Giza<->Olympia integration tests merged
-./storage-playground-config.sh
-
 ## Query Node Infrastructure
 ./query-node/start.sh
 

+ 76 - 7
tests/integration-tests/src/Api.ts

@@ -1,12 +1,18 @@
 import { ApiPromise, WsProvider, Keyring } from '@polkadot/api'
 import { u32, BTreeMap } from '@polkadot/types'
-import { ISubmittableResult } from '@polkadot/types/types'
+import { IEvent, ISubmittableResult } from '@polkadot/types/types'
 import { KeyringPair } from '@polkadot/keyring/types'
 import { AccountId, MemberId, PostId, ThreadId } from '@joystream/types/common'
 
 import { AccountInfo, Balance, EventRecord, BlockNumber, BlockHash, LockIdentifier } from '@polkadot/types/interfaces'
 import BN from 'bn.js'
-import { QueryableConsts, QueryableStorage, SubmittableExtrinsic, SubmittableExtrinsics } from '@polkadot/api/types'
+import {
+  AugmentedEvent,
+  QueryableConsts,
+  QueryableStorage,
+  SubmittableExtrinsic,
+  SubmittableExtrinsics,
+} from '@polkadot/api/types'
 import { Sender, LogLevel } from './sender'
 import { Utils } from './utils'
 import { types } from '@joystream/types'
@@ -59,6 +65,13 @@ export type KeyGenInfo = {
   custom: string[]
 }
 
+type EventSection = keyof ApiPromise['events'] & string
+type EventMethod<Section extends EventSection> = keyof ApiPromise['events'][Section] & string
+type EventType<
+  Section extends EventSection,
+  Method extends EventMethod<Section>
+> = ApiPromise['events'][Section][Method] extends AugmentedEvent<'promise', infer T> ? IEvent<T> : never
+
 export class ApiFactory {
   private readonly api: ApiPromise
   private readonly keyring: Keyring
@@ -364,6 +377,54 @@ export class Api {
     return this.api.consts.balances.existentialDeposit
   }
 
+  public findEvent<S extends EventSection, M extends EventMethod<S>>(
+    result: ISubmittableResult | EventRecord[],
+    section: S,
+    method: M
+  ): EventType<S, M> | undefined {
+    if (Array.isArray(result)) {
+      return result.find(({ event }) => event.section === section && event.method === method)?.event as
+        | EventType<S, M>
+        | undefined
+    }
+    return result.findRecord(section, method)?.event as EventType<S, M> | undefined
+  }
+
+  public getEvent<S extends EventSection, M extends EventMethod<S>>(
+    result: ISubmittableResult | EventRecord[],
+    section: S,
+    method: M
+  ): EventType<S, M> {
+    const event = this.findEvent(result, section, method)
+    if (!event) {
+      throw new Error(
+        `Cannot find expected ${section}.${method} event in result: ${JSON.stringify(
+          Array.isArray(result) ? result.map((e) => e.toHuman()) : result.toHuman()
+        )}`
+      )
+    }
+    return event
+  }
+
+  public findEvents<S extends EventSection, M extends EventMethod<S>>(
+    result: ISubmittableResult | EventRecord[],
+    section: S,
+    method: M,
+    expectedCount?: number
+  ): EventType<S, M>[] {
+    const events = Array.isArray(result)
+      ? result.filter(({ event }) => event.section === section && event.method === method).map(({ event }) => event)
+      : result.filterRecords(section, method).map((r) => r.event)
+    if (expectedCount && events.length !== expectedCount) {
+      throw new Error(
+        `Unexpected count of ${section}.${method} events in result: ${JSON.stringify(
+          Array.isArray(result) ? result.map((e) => e.toHuman()) : result.toHuman()
+        )}. ` + `Expected: ${expectedCount}, Got: ${events.length}`
+      )
+    }
+    return (events.sort((a, b) => new BN(a.index).cmp(new BN(b.index))) as unknown) as EventType<S, M>[]
+  }
+
   // TODO: Augmentations comming with new @polkadot/typegen!
 
   public findEventRecord(events: EventRecord[], section: string, method: string): EventRecord | undefined {
@@ -497,12 +558,12 @@ export class Api {
     return opening
   }
 
-  public async getLeader(group: WorkingGroupModuleName): Promise<Worker> {
+  public async getLeader(group: WorkingGroupModuleName): Promise<[WorkerId, Worker]> {
     const leadId = await this.api.query[group].currentLead()
     if (leadId.isNone) {
-      throw new Error('Cannot get lead role key: Lead not yet hired!')
+      throw new Error(`Cannot get ${group} lead: Lead not hired!`)
     }
-    return await this.api.query[group].workerById(leadId.unwrap())
+    return [leadId.unwrap(), await this.api.query[group].workerById(leadId.unwrap())]
   }
 
   public async getActiveWorkerIds(group: WorkingGroupModuleName): Promise<WorkerId[]> {
@@ -515,6 +576,14 @@ export class Api {
     )
   }
 
+  public async getWorkerRoleAccounts(workerIds: WorkerId[], module: WorkingGroupModuleName): Promise<string[]> {
+    const workers = await this.api.query[module].workerById.multi<Worker>(workerIds)
+
+    return workers.map((worker) => {
+      return worker.role_account_id.toString()
+    })
+  }
+
   async assignWorkerRoleAccount(
     group: WorkingGroupModuleName,
     workerId: WorkerId,
@@ -554,11 +623,11 @@ export class Api {
   }
 
   public async getLeadRoleKey(group: WorkingGroupModuleName): Promise<string> {
-    return (await this.getLeader(group)).role_account_id.toString()
+    return (await this.getLeader(group))[1].role_account_id.toString()
   }
 
   public async getLeaderStakingKey(group: WorkingGroupModuleName): Promise<string> {
-    return (await this.getLeader(group)).staking_account_id.toString()
+    return (await this.getLeader(group))[1].staking_account_id.toString()
   }
 
   public async retrieveProposalsEngineEventDetails(

+ 222 - 0
tests/integration-tests/src/flows/storage/initDistribution.ts

@@ -0,0 +1,222 @@
+import { FlowProps } from '../../Flow'
+import { extendDebug } from '../../Debugger'
+import {
+  DistributionBucketFamilyMetadata,
+  DistributionBucketOperatorMetadata,
+  IDistributionBucketFamilyMetadata,
+  IDistributionBucketOperatorMetadata,
+} from '@joystream/metadata-protobuf'
+import { CreateInterface, createType } from '@joystream/types'
+import { BagId, DistributionBucketFamilyId, DynamicBagId, StaticBagId } from '@joystream/types/storage'
+import { SubmittableExtrinsic } from '@polkadot/api/types'
+import _ from 'lodash'
+import { Utils } from '../../utils'
+import { WorkerId } from '@joystream/types/working-group'
+
+type DistributionBucketConfig = {
+  metadata: IDistributionBucketOperatorMetadata
+  staticBags?: CreateInterface<StaticBagId>[]
+  operatorId: number
+}
+
+type DistributionFamilyConfig = {
+  metadata?: IDistributionBucketFamilyMetadata
+  buckets: DistributionBucketConfig[]
+  dynamicBagPolicy: {
+    [K in keyof typeof DynamicBagId.typeDefinitions]?: number
+  }
+}
+
+type InitDistributionConfig = {
+  families: DistributionFamilyConfig[]
+}
+
+export const allStaticBags: CreateInterface<StaticBagId>[] = [
+  'Council',
+  { WorkingGroup: 'Content' },
+  { WorkingGroup: 'Distribution' },
+  { WorkingGroup: 'Gateway' },
+  { WorkingGroup: 'OperationsAlpha' },
+  { WorkingGroup: 'OperationsBeta' },
+  { WorkingGroup: 'OperationsGamma' },
+  { WorkingGroup: 'Storage' },
+]
+
+export const singleBucketConfig: InitDistributionConfig = {
+  families: [
+    {
+      metadata: { region: 'All' },
+      dynamicBagPolicy: {
+        'Channel': 1,
+        'Member': 1,
+      },
+      buckets: [
+        {
+          metadata: { endpoint: process.env.DISTRIBUTOR_1_URL || 'http://localhost:3334' },
+          staticBags: allStaticBags,
+          operatorId: parseInt(process.env.DISTRIBUTOR_1_WORKER_ID || '0'),
+        },
+      ],
+    },
+  ],
+}
+
+export const doubleBucketConfig: InitDistributionConfig = {
+  families: [
+    {
+      metadata: { region: 'Region 1' },
+      dynamicBagPolicy: {
+        'Channel': 1,
+        'Member': 1,
+      },
+      buckets: [
+        {
+          metadata: { endpoint: process.env.DISTRIBUTOR_1_URL || 'http://localhost:3334' },
+          staticBags: allStaticBags,
+          operatorId: parseInt(process.env.DISTRIBUTOR_1_WORKER_ID || '0'),
+        },
+      ],
+    },
+    {
+      metadata: { region: 'Region 2' },
+      dynamicBagPolicy: {
+        'Channel': 1,
+        'Member': 1,
+      },
+      buckets: [
+        {
+          metadata: { endpoint: process.env.DISTRIBUTOR_2_URL || 'http://localhost:3336' },
+          staticBags: allStaticBags,
+          operatorId: parseInt(process.env.DISTRIBUTOR_2_WORKER_ID || '1'),
+        },
+      ],
+    },
+  ],
+}
+
+export default function createFlow({ families }: InitDistributionConfig) {
+  return async function initDistribution({ api }: FlowProps): Promise<void> {
+    const debug = extendDebug('flow:initDistribution')
+    debug('Started')
+
+    // Get working group leaders
+    const [, distributionLeader] = await api.getLeader('distributionWorkingGroup')
+
+    const distributionLeaderKey = distributionLeader.role_account_id.toString()
+    const totalBucketsNum = families.reduce((a, b) => a + b.buckets.length, 0)
+
+    // Hire operators
+    // const hireWorkersFixture = new HireWorkesFixture(api, totalBucketsNum, WorkingGroups.Distribution)
+    // await new FixtureRunner(hireWorkersFixture).run()
+    // const operatorIds = hireWorkersFixture.getHiredWorkers()
+
+    const operatorIds = families.reduce(
+      (ids, { buckets }) => ids.concat(buckets.map((b) => createType('WorkerId', b.operatorId))),
+      [] as WorkerId[]
+    )
+    const operatorKeys = await api.getWorkerRoleAccounts(operatorIds, 'distributionWorkingGroup')
+
+    // Create families, set buckets per bag limit
+    const createFamilyTxs = families.map(() => api.tx.storage.createDistributionBucketFamily())
+    const setBucketsPerBagLimitTx = api.tx.storage.updateDistributionBucketsPerBagLimit(totalBucketsNum)
+    const [createFamilyResults] = await Promise.all([
+      api.sendExtrinsicsAndGetResults(createFamilyTxs, distributionLeaderKey),
+      api.sendExtrinsicsAndGetResults([setBucketsPerBagLimitTx], distributionLeaderKey),
+    ])
+    const familyIds = createFamilyResults
+      .map((r) => api.getEvent(r, 'storage', 'DistributionBucketFamilyCreated').data[0])
+      .sort((a, b) => a.cmp(b))
+    const familyById = new Map<number, DistributionFamilyConfig>()
+    familyIds.forEach((id, i) => familyById.set(id.toNumber(), families[i]))
+
+    // Create buckets, update families metadata, set dynamic bag policies
+    const createBucketTxs = families.reduce(
+      (txs, { buckets }, familyIndex) =>
+        txs.concat(buckets.map(() => api.tx.storage.createDistributionBucket(familyIds[familyIndex], true))),
+      [] as SubmittableExtrinsic<'promise'>[]
+    )
+    const updateFamilyMetadataTxs = familyIds.map((id, i) => {
+      const metadataBytes = Utils.metadataToBytes(DistributionBucketFamilyMetadata, families[i].metadata)
+      return api.tx.storage.setDistributionBucketFamilyMetadata(id, metadataBytes)
+    })
+    const dynamicBagPolicies = new Map<string, [DistributionBucketFamilyId, number][]>()
+    familyIds.forEach((familyId, index) => {
+      const family = families[index]
+      Object.entries(family.dynamicBagPolicy).forEach(([bagType, bucketsN]) => {
+        const current = dynamicBagPolicies.get(bagType) || []
+        dynamicBagPolicies.set(bagType, [...current, [familyId, bucketsN]])
+      })
+    })
+    const updateDynamicBagPolicyTxs = _.entries(dynamicBagPolicies).map(([bagType, policyEntries]) =>
+      api.tx.storage.updateFamiliesInDynamicBagCreationPolicy(
+        bagType as keyof typeof DynamicBagId.typeDefinitions,
+        createType('BTreeMap<DistributionBucketFamilyId, u32>', new Map(policyEntries))
+      )
+    )
+    const [createBucketResults] = await Promise.all([
+      api.sendExtrinsicsAndGetResults(createBucketTxs, distributionLeaderKey),
+      api.sendExtrinsicsAndGetResults(updateFamilyMetadataTxs, distributionLeaderKey),
+      api.sendExtrinsicsAndGetResults(updateDynamicBagPolicyTxs, distributionLeaderKey),
+    ])
+    const bucketIds = createBucketResults
+      .map((r) => {
+        const [, , bucketId] = api.getEvent(r, 'storage', 'DistributionBucketCreated').data
+        return bucketId
+      })
+      .sort(
+        (a, b) =>
+          a.distribution_bucket_family_id.cmp(b.distribution_bucket_family_id) ||
+          a.distribution_bucket_index.cmp(b.distribution_bucket_index)
+      )
+    const bucketById = new Map<string, DistributionBucketConfig>()
+    bucketIds.forEach((bucketId) => {
+      const familyId = bucketId.distribution_bucket_family_id.toNumber()
+      const bucketIndex = bucketId.distribution_bucket_index.toNumber()
+      const family = familyById.get(familyId)
+      if (!family) {
+        throw new Error(`familyById not found: ${familyId}`)
+      }
+      bucketById.set(bucketId.toString(), family.buckets[bucketIndex])
+    })
+
+    // Invite bucket operators
+    const bucketInviteTxs = bucketIds.map((bucketId, i) =>
+      api.tx.storage.inviteDistributionBucketOperator(bucketId, operatorIds[i])
+    )
+    await api.sendExtrinsicsAndGetResults(bucketInviteTxs, distributionLeaderKey)
+
+    // Accept invitations
+    const acceptInvitationTxs = bucketIds.map((bucketId, i) =>
+      api.tx.storage.acceptDistributionBucketInvitation(operatorIds[i], bucketId)
+    )
+    await api.sendExtrinsicsAndGetResults(acceptInvitationTxs, operatorKeys)
+
+    // Bucket metadata and static bags
+    const bucketSetupPromises = _.flatten(
+      bucketIds.map((bucketId, i) => {
+        const operatorId = operatorIds[i]
+        const operatorKey = operatorKeys[i]
+        const bucketConfig = bucketById.get(bucketId.toString())
+        if (!bucketConfig) {
+          throw new Error('Bucket config not found')
+        }
+        const metadataBytes = Utils.metadataToBytes(DistributionBucketOperatorMetadata, bucketConfig.metadata)
+        const setMetaTx = api.tx.storage.setDistributionOperatorMetadata(operatorId, bucketId, metadataBytes)
+        const setMetaPromise = api.sendExtrinsicsAndGetResults([setMetaTx], operatorKey)
+        const updateBagTxs = (bucketConfig.staticBags || []).map((sBagId) => {
+          return api.tx.storage.updateDistributionBucketsForBag(
+            createType<BagId, 'BagId'>('BagId', { Static: sBagId }),
+            bucketId.distribution_bucket_family_id,
+            createType('BTreeSet<DistributionBucketIndex>', [bucketId.distribution_bucket_index]),
+            createType('BTreeSet<DistributionBucketIndex>', [])
+          )
+        })
+        const updateBagsPromise = api.sendExtrinsicsAndGetResults(updateBagTxs, distributionLeaderKey)
+        return [updateBagsPromise, setMetaPromise]
+      })
+    )
+    await Promise.all(bucketSetupPromises)
+
+    debug('Done')
+  }
+}

+ 154 - 0
tests/integration-tests/src/flows/storage/initStorage.ts

@@ -0,0 +1,154 @@
+import { FlowProps } from '../../Flow'
+import { extendDebug } from '../../Debugger'
+import { IStorageBucketOperatorMetadata, StorageBucketOperatorMetadata } from '@joystream/metadata-protobuf'
+import { CreateInterface, createType } from '@joystream/types'
+import { BagId, DynamicBagId, StaticBagId } from '@joystream/types/storage'
+import _ from 'lodash'
+import { Utils } from '../../utils'
+import BN from 'bn.js'
+
+type StorageBucketConfig = {
+  metadata: IStorageBucketOperatorMetadata
+  staticBags?: CreateInterface<StaticBagId>[]
+  storageLimit: BN
+  objectsLimit: number
+  operatorId: number
+  transactorKey: string
+}
+
+type InitStorageConfig = {
+  buckets: StorageBucketConfig[]
+  dynamicBagPolicy: {
+    [K in keyof typeof DynamicBagId.typeDefinitions]?: number
+  }
+}
+
+export const allStaticBags: CreateInterface<StaticBagId>[] = [
+  'Council',
+  { WorkingGroup: 'Content' },
+  { WorkingGroup: 'Distribution' },
+  { WorkingGroup: 'Gateway' },
+  { WorkingGroup: 'OperationsAlpha' },
+  { WorkingGroup: 'OperationsBeta' },
+  { WorkingGroup: 'OperationsGamma' },
+  { WorkingGroup: 'Storage' },
+]
+
+export const singleBucketConfig: InitStorageConfig = {
+  dynamicBagPolicy: {
+    'Channel': 1,
+    'Member': 1,
+  },
+  buckets: [
+    {
+      metadata: { endpoint: process.env.COLOSSUS_1_URL || 'http://localhost:3333' },
+      staticBags: allStaticBags,
+      operatorId: parseInt(process.env.COLOSSUS_1_WORKER_ID || '0'),
+      storageLimit: new BN(1_000_000_000_000),
+      objectsLimit: 1000000000,
+      transactorKey: process.env.COLOSSUS_1_TRANSACTOR_KEY || '5DkE5YD8m5Yzno6EH2RTBnH268TDnnibZMEMjxwYemU4XevU', // //Colossus1
+    },
+  ],
+}
+
+export const doubleBucketConfig: InitStorageConfig = {
+  dynamicBagPolicy: {
+    'Channel': 2,
+    'Member': 2,
+  },
+  buckets: [
+    {
+      metadata: { endpoint: process.env.COLOSSUS_1_URL || 'http://localhost:3333' },
+      staticBags: allStaticBags,
+      operatorId: parseInt(process.env.COLOSSUS_1_WORKER_ID || '0'),
+      storageLimit: new BN(1_000_000_000_000),
+      objectsLimit: 1000000000,
+      transactorKey: process.env.COLOSSUS_1_TRANSACTOR_KEY || '5DkE5YD8m5Yzno6EH2RTBnH268TDnnibZMEMjxwYemU4XevU', // //Colossus1
+    },
+    {
+      metadata: { endpoint: process.env.STORAGE_2_URL || 'http://localhost:3335' },
+      staticBags: allStaticBags,
+      operatorId: parseInt(process.env.STORAGE_2_WORKER_ID || '1'),
+      storageLimit: new BN(1_000_000_000_000),
+      objectsLimit: 1000000000,
+      transactorKey: process.env.COLOSSUS_2_TRANSACTOR_KEY || '5FbzYmQ3HogiEEDSXPYJe58yCcmSh3vsZLodTdBB6YuLDAj7', // //Colossus2
+    },
+  ],
+}
+
+export default function createFlow({ buckets, dynamicBagPolicy }: InitStorageConfig) {
+  return async function initDistribution({ api }: FlowProps): Promise<void> {
+    const debug = extendDebug('flow:initStorage')
+    debug('Started')
+
+    // Get working group leaders
+    const [, storageLeader] = await api.getLeader('storageWorkingGroup')
+
+    const storageLeaderKey = storageLeader.role_account_id.toString()
+    const maxStorageLimit = buckets.sort((a, b) => b.storageLimit.cmp(a.storageLimit))[0].storageLimit
+    const maxObjectsLimit = Math.max(...buckets.map((b) => b.objectsLimit))
+
+    // Hire operators
+    // const hireWorkersFixture = new HireWorkesFixture(api, totalBucketsNum, WorkingGroups.Distribution)
+    // await new FixtureRunner(hireWorkersFixture).run()
+    // const operatorIds = hireWorkersFixture.getHiredWorkers()
+
+    const operatorIds = buckets.map((b) => createType('WorkerId', b.operatorId))
+    const operatorKeys = await api.getWorkerRoleAccounts(operatorIds, 'storageWorkingGroup')
+
+    // Set global limits and policies
+    const updateDynamicBagPolicyTxs = _.entries(dynamicBagPolicy).map(([bagType, numberOfBuckets]) =>
+      api.tx.storage.updateNumberOfStorageBucketsInDynamicBagCreationPolicy(
+        bagType as keyof typeof DynamicBagId.typeDefinitions,
+        numberOfBuckets
+      )
+    )
+    const setMaxVoucherLimitsTx = api.tx.storage.updateStorageBucketsVoucherMaxLimits(maxStorageLimit, maxObjectsLimit)
+    const setBucketPerBagLimitTx = api.tx.storage.updateStorageBucketsPerBagLimit(Math.max(5, buckets.length))
+
+    await api.sendExtrinsicsAndGetResults(
+      [...updateDynamicBagPolicyTxs, setMaxVoucherLimitsTx, setBucketPerBagLimitTx],
+      storageLeaderKey
+    )
+
+    // Create buckets
+    const createBucketTxs = buckets.map((b, i) =>
+      api.tx.storage.createStorageBucket(operatorIds[i], true, b.storageLimit, b.objectsLimit)
+    )
+    const createBucketResults = await api.sendExtrinsicsAndGetResults(createBucketTxs, storageLeaderKey)
+    const bucketById = new Map<number, StorageBucketConfig>()
+    createBucketResults.forEach((res, i) => {
+      const bucketId = api.getEvent(res, 'storage', 'StorageBucketCreated').data[0]
+      bucketById.set(bucketId.toNumber(), buckets[i])
+    })
+
+    // Accept invitations
+    const acceptInvitationTxs = Array.from(bucketById.entries()).map(([bucketId, bucketConfig], i) =>
+      api.tx.storage.acceptStorageBucketInvitation(operatorIds[i], bucketId, bucketConfig.transactorKey)
+    )
+    await api.sendExtrinsicsAndGetResults(acceptInvitationTxs, operatorKeys)
+
+    // Bucket metadata and static bags
+    const bucketSetupPromises = _.flatten(
+      Array.from(bucketById.entries()).map(([bucketId, bucketConfig], i) => {
+        const operatorId = operatorIds[i]
+        const operatorKey = operatorKeys[i]
+        const metadataBytes = Utils.metadataToBytes(StorageBucketOperatorMetadata, bucketConfig.metadata)
+        const setMetaTx = api.tx.storage.setStorageOperatorMetadata(operatorId, bucketId, metadataBytes)
+        const setMetaPromise = api.sendExtrinsicsAndGetResults([setMetaTx], operatorKey)
+        const updateBagTxs = (bucketConfig.staticBags || []).map((sBagId) => {
+          return api.tx.storage.updateStorageBucketsForBag(
+            createType<BagId, 'BagId'>('BagId', { Static: sBagId }),
+            createType('BTreeSet<StorageBucketId>', [bucketId]),
+            createType('BTreeSet<StorageBucketId>', [])
+          )
+        })
+        const updateBagsPromise = api.sendExtrinsicsAndGetResults(updateBagTxs, storageLeaderKey)
+        return [updateBagsPromise, setMetaPromise]
+      })
+    )
+    await Promise.all(bucketSetupPromises)
+
+    debug('Done')
+  }
+}

+ 8 - 1
tests/integration-tests/src/scenarios/setupNewChain.ts

@@ -1,12 +1,19 @@
 import electCouncil from '../flows/council/elect'
 import leaderSetup from '../flows/working-groups/leadOpening'
 import updateAccountsFlow from '../misc/updateAllWorkerRoleAccountsFlow'
+import initStorage, { singleBucketConfig as defaultStorageConfig } from '../flows/storage/initStorage'
+import initDistribution, { singleBucketConfig as defaultDistributionConfig } from '../flows/storage/initDistribution'
 import { scenario } from '../Scenario'
 
 scenario(async ({ job }) => {
   job('Elect Council', electCouncil)
   const leads = job('Set WorkingGroup Leads', leaderSetup)
-  job('Update worker accounts', updateAccountsFlow).after(leads)
+  const updateWorkerAccounts = job('Update worker accounts', updateAccountsFlow).after(leads)
+
+  if (!process.env.SKIP_STORAGE_AND_DISTRIBUTION) {
+    job('initialize storage system', initStorage(defaultStorageConfig)).requires(updateWorkerAccounts)
+    job('initialize distribution system', initDistribution(defaultDistributionConfig)).requires(updateWorkerAccounts)
+  }
 
   // TODO: Mock content
   // assign members known accounts?