Browse Source

Merge branch 'giza_staging' into query_node_active_video_counters_giza_staging

ondratra 3 years ago
parent
commit
fdc47f4d1d
80 changed files with 11284 additions and 2193 deletions
  1. 1 1
      .github/workflows/run-network-tests.yml
  2. 0 0
      chain-metadata.json
  3. 1 1
      cli/package.json
  4. 2 2
      package.json
  5. 36 0
      reproduce-giza-issue.sh
  6. 19 4
      runtime-modules/content/src/lib.rs
  7. 4 1
      runtime-modules/content/src/permissions/mod.rs
  8. 1356 659
      runtime-modules/content/src/tests/channels.rs
  9. 27 20
      runtime-modules/content/src/tests/curators.rs
  10. 920 0
      runtime-modules/content/src/tests/fixtures.rs
  11. 21 74
      runtime-modules/content/src/tests/migration.rs
  12. 96 324
      runtime-modules/content/src/tests/mock.rs
  13. 1 0
      runtime-modules/content/src/tests/mod.rs
  14. 985 502
      runtime-modules/content/src/tests/videos.rs
  15. 18 9
      runtime-modules/storage/src/lib.rs
  16. 128 15
      runtime-modules/storage/src/tests/fixtures.rs
  17. 7 0
      runtime-modules/storage/src/tests/mocks.rs
  18. 283 281
      runtime-modules/storage/src/tests/mod.rs
  19. 5 4
      start.sh
  20. 1 1
      storage-node/package.json
  21. 1 1
      tests/network-tests/.env
  22. 32 0
      tests/network-tests/codegen.yml
  23. 7 0
      tests/network-tests/openapitools.json
  24. 18 3
      tests/network-tests/package.json
  25. 48 0
      tests/network-tests/run-full-tests.sh
  26. 1 1
      tests/network-tests/run-test-scenario.sh
  27. 1 1
      tests/network-tests/run-tests.sh
  28. 145 143
      tests/network-tests/src/Api.ts
  29. 56 67
      tests/network-tests/src/QueryNodeApi.ts
  30. 25 9
      tests/network-tests/src/Scenario.ts
  31. 27 0
      tests/network-tests/src/apis/distributorNode/.openapi-generator-ignore
  32. 5 0
      tests/network-tests/src/apis/distributorNode/.openapi-generator/FILES
  33. 1 0
      tests/network-tests/src/apis/distributorNode/.openapi-generator/VERSION
  34. 410 0
      tests/network-tests/src/apis/distributorNode/api.ts
  35. 74 0
      tests/network-tests/src/apis/distributorNode/base.ts
  36. 150 0
      tests/network-tests/src/apis/distributorNode/common.ts
  37. 108 0
      tests/network-tests/src/apis/distributorNode/configuration.ts
  38. 16 0
      tests/network-tests/src/apis/distributorNode/index.ts
  39. 27 0
      tests/network-tests/src/apis/storageNode/.openapi-generator-ignore
  40. 5 0
      tests/network-tests/src/apis/storageNode/.openapi-generator/FILES
  41. 1 0
      tests/network-tests/src/apis/storageNode/.openapi-generator/VERSION
  42. 738 0
      tests/network-tests/src/apis/storageNode/api.ts
  43. 74 0
      tests/network-tests/src/apis/storageNode/base.ts
  44. 150 0
      tests/network-tests/src/apis/storageNode/common.ts
  45. 108 0
      tests/network-tests/src/apis/storageNode/configuration.ts
  46. 16 0
      tests/network-tests/src/apis/storageNode/index.ts
  47. 54 0
      tests/network-tests/src/cli/base.ts
  48. 48 0
      tests/network-tests/src/cli/distributor.ts
  49. 47 0
      tests/network-tests/src/cli/joystream.ts
  50. 66 0
      tests/network-tests/src/cli/storage.ts
  51. 126 0
      tests/network-tests/src/cli/utils.ts
  52. 2 2
      tests/network-tests/src/fixtures/content/activeVideoCounters.ts
  53. 1 1
      tests/network-tests/src/fixtures/membershipModule.ts
  54. 13 18
      tests/network-tests/src/fixtures/proposalsModule.ts
  55. 115 15
      tests/network-tests/src/fixtures/workingGroupModule.ts
  56. 71 0
      tests/network-tests/src/flows/clis/createChannel.ts
  57. 39 0
      tests/network-tests/src/flows/clis/initDistributionBucket.ts
  58. 53 0
      tests/network-tests/src/flows/clis/initStorageBucket.ts
  59. 23 0
      tests/network-tests/src/flows/membership/makeAliceMember.ts
  60. 6 6
      tests/network-tests/src/flows/proposals/manageLeaderRole.ts
  61. 3 0
      tests/network-tests/src/flows/proposals/workingGroupMintCapacityProposal.ts
  62. 227 0
      tests/network-tests/src/flows/storagev2/initDistribution.ts
  63. 159 0
      tests/network-tests/src/flows/storagev2/initStorage.ts
  64. 3 0
      tests/network-tests/src/flows/workingGroup/manageWorkerAsWorker.ts
  65. 3 0
      tests/network-tests/src/flows/workingGroup/workerPayout.ts
  66. 158 0
      tests/network-tests/src/graphql/generated/queries.ts
  67. 3715 0
      tests/network-tests/src/graphql/generated/schema.ts
  68. 74 0
      tests/network-tests/src/graphql/queries/storagev2.graphql
  69. 18 24
      tests/network-tests/src/scenarios/combined.ts
  70. 16 0
      tests/network-tests/src/scenarios/giza-issue-reproduction-setup.ts
  71. 16 0
      tests/network-tests/src/scenarios/init-storage-and-distribution.ts
  72. 28 0
      tests/network-tests/src/scenarios/proposals.ts
  73. 7 0
      tests/network-tests/src/scenarios/setup-new-chain.ts
  74. 2 2
      tests/network-tests/src/sender.ts
  75. 19 0
      tests/network-tests/src/utils.ts
  76. 1 1
      types/package.json
  77. 1 1
      utils/migration-scripts/src/logging.ts
  78. 2 0
      utils/migration-scripts/src/sumer-giza/AssetsManager.ts
  79. 1 0
      utils/migration-scripts/src/sumer-giza/ContentMigration.ts
  80. 12 0
      yarn.lock

+ 1 - 1
.github/workflows/run-network-tests.yml

@@ -140,7 +140,7 @@ jobs:
       - name: Ensure tests are runnable
         run: yarn workspace network-tests build
       - name: Execute network tests
-        run: tests/network-tests/run-tests.sh full
+        run: tests/network-tests/run-full-tests.sh
 
   new_chain_setup:
     name: Initialize new chain

File diff suppressed because it is too large
+ 0 - 0
chain-metadata.json


+ 1 - 1
cli/package.json

@@ -143,6 +143,6 @@
   "types": "lib/index.d.ts",
   "volta": {
     "node": "14.16.1",
-    "yarn": "1.22.4"
+    "yarn": "1.22.15"
   }
 }

+ 2 - 2
package.json

@@ -65,10 +65,10 @@
   },
   "engines": {
     "node": ">=14.0.0",
-    "yarn": "^1.22.0"
+    "yarn": "^1.22.15"
   },
   "volta": {
     "node": "14.18.0",
-    "yarn": "1.22.4"
+    "yarn": "1.22.15"
   }
 }

+ 36 - 0
reproduce-giza-issue.sh

@@ -0,0 +1,36 @@
+#!/usr/bin/env bash
+set -e
+
+## Run a local development chain
+docker-compose up -d joystream-node
+
+# Init the chain with 2 storage buckets that have high limits set
+# The DynamicBagPolicy for Channel should be "numberOfStorageBuckets: 2" after this step is done
+./tests/network-tests/run-test-scenario.sh giza-issue-reproduction-setup
+
+# Set env for CLI's
+export AUTO_CONFIRM=true
+export ACCOUNT_URI=//testing//worker//Storage//0 # Storage lead account uri for storage CLI
+
+# Setup the CLI:
+yarn joystream-cli api:setUri ws://localhost:9944
+yarn joystream-cli account:choose --address 5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY
+yarn joystream-cli api:setQueryNodeEndpoint http://localhost:8081/graphql
+
+# Set very low limits for storage bucket 0
+yarn storage-node leader:set-bucket-limits -i 0 -s 100 -o 1
+# Create a channel (the transaction will fail due to low limits of bucket 0)
+yarn joystream-cli content:createChannel --context Member -i ./cli/examples/content/CreateChannel.json || true
+# Update DynamicBagPolicy to 1 storage bucket per channel bag
+yarn storage-node leader:update-dynamic-bag-policy -t Channel -n 1
+# Disable storage bucket 0
+yarn storage-node leader:update-bucket-status -i 0 --set off
+# Create a channel (the transaction still fails, which is unexpected)
+yarn joystream-cli content:createChannel --context Member -i ./cli/examples/content/CreateChannel.json || true
+# Increase limits of bucket 0
+yarn storage-node leader:set-bucket-limits -i 0 -s 1000000000 -o 1000
+# Create a channel
+yarn joystream-cli content:createChannel --context Member -i ./cli/examples/content/CreateChannel.json
+# Notice that channel bag get's assigned to both bucket 0 and 1, even though:
+# 1. Bucket 0 is disabled
+# 2. DynamicBagPolicy for Channel has "numberOfStorageBuckets: 1"

+ 19 - 4
runtime-modules/content/src/lib.rs

@@ -1,3 +1,19 @@
+// Extrinsics list
+// - create channel
+// - update channel
+// - delete channel
+// - create video
+// - update video
+// - delete video
+// - update channel chensorship status
+// - update video chensorship status
+// - create channel category
+// - update channel category
+// - delete channel category
+// - create video category
+// - update video category
+// - delete video category
+
 // Ensure we're `no_std` when compiling for Wasm.
 #![cfg_attr(not(feature = "std"), no_std)]
 #![recursion_limit = "256"]
@@ -679,10 +695,11 @@ decl_module! {
             actor: ContentActor<T::CuratorGroupId, T::CuratorId, T::MemberId>,
             params: ChannelCreationParameters<T>,
         ) {
+            // ensure migration is done
+            ensure!(Self::is_migration_done(), Error::<T>::MigrationNotFinished);
+
             // channel creator account
             let sender = ensure_signed(origin)?;
-            // ensure migration is done
-             ensure!(Self::is_migration_done(), Error::<T>::MigrationNotFinished);
 
             ensure_actor_authorized_to_create_channel::<T>(
                 &sender,
@@ -1118,7 +1135,6 @@ decl_module! {
             video_id: T::VideoId,
             params: VideoUpdateParameters<T>,
         ) {
-
             let sender = ensure_signed(origin.clone())?;
             // check that video exists, retrieve corresponding channel id.
             let video = Self::ensure_video_validity(&video_id)?;
@@ -1180,7 +1196,6 @@ decl_module! {
             video_id: T::VideoId,
             assets_to_remove: BTreeSet<DataObjectId<T>>,
         ) {
-
            let sender = ensure_signed(origin.clone())?;
 
             // check that video exists

+ 4 - 1
runtime-modules/content/src/permissions/mod.rs

@@ -261,13 +261,16 @@ pub fn ensure_actor_authorized_to_update_channel_assets<T: Trait>(
             Ok(())
         }
         ContentActor::Member(member_id) => {
-            // ensure valid member
+            // ensure member account and origin correspondence
             ensure_member_auth_success::<T>(sender, member_id)?;
             // ensure member is channel owner
             ensure_member_is_channel_owner::<T>(&channel.owner, member_id)?;
             Ok(())
         }
         ContentActor::Collaborator(member_id) => {
+            // ensure member account and origin correspondence
+            ensure_member_auth_success::<T>(sender, member_id)?;
+            // ensure valid collaborator
             ensure!(
                 channel.collaborators.contains(member_id),
                 Error::<T>::ActorNotAuthorized

+ 1356 - 659
runtime-modules/content/src/tests/channels.rs

@@ -1,811 +1,1508 @@
 #![cfg(test)]
 
 use super::curators;
+use super::fixtures::*;
 use super::mock::*;
 use crate::*;
-use frame_support::traits::Currency;
 use frame_support::{assert_err, assert_ok};
 
 #[test]
-fn successful_channel_deletion() {
+fn channel_censoring() {
     with_default_mock_builder(|| {
         // Run to block one to see emitted events
         run_to_block(1);
 
-        create_initial_storage_buckets();
-
-        // create an account with enought balance
-        let _ = balances::Module::<Test>::deposit_creating(
-            &FIRST_MEMBER_ORIGIN,
-            <Test as balances::Trait>::Balance::from(INITIAL_BALANCE),
-        );
-
-        // 3 assets added at creation
-        let assets = StorageAssetsRecord {
-            object_creation_list: vec![
-                DataObjectCreationParameters {
-                    size: 3,
-                    ipfs_content_id: b"first".to_vec(),
-                },
-                DataObjectCreationParameters {
-                    size: 3,
-                    ipfs_content_id: b"second".to_vec(),
-                },
-                DataObjectCreationParameters {
-                    size: 3,
-                    ipfs_content_id: b"third".to_vec(),
-                },
-            ],
-            expected_data_size_fee: storage::DataObjectPerMegabyteFee::<Test>::get(),
-        };
-        let channel_id = NextChannelId::<Test>::get();
-
-        // create channel
-        create_channel_mock(
-            FIRST_MEMBER_ORIGIN,
-            ContentActor::Member(FIRST_MEMBER_ID),
+        let channel_id = Content::next_channel_id();
+        assert_ok!(Content::create_channel(
+            Origin::signed(DEFAULT_MEMBER_ACCOUNT_ID),
+            ContentActor::Member(DEFAULT_MEMBER_ID),
             ChannelCreationParametersRecord {
-                assets: Some(assets),
+                assets: None,
                 meta: None,
                 reward_account: None,
                 collaborators: BTreeSet::new(),
-            },
-            Ok(()),
-        );
+            }
+        ));
 
-        // attempt to delete channel with non zero assets should result in error: objects
-        // are misspecified
-        delete_channel_mock(
-            FIRST_MEMBER_ORIGIN,
-            ContentActor::Member(FIRST_MEMBER_ID),
+        let group_id = curators::add_curator_to_new_group(DEFAULT_CURATOR_ID);
+
+        // Curator can censor channels
+        let is_censored = true;
+        assert_ok!(Content::update_channel_censorship_status(
+            Origin::signed(DEFAULT_CURATOR_ACCOUNT_ID),
+            ContentActor::Curator(group_id, DEFAULT_CURATOR_ID),
             channel_id,
-            2u64,
-            Err(Error::<Test>::InvalidBagSizeSpecified.into()),
+            is_censored,
+            vec![]
+        ));
+
+        assert_eq!(
+            System::events().last().unwrap().event,
+            MetaEvent::content(RawEvent::ChannelCensorshipStatusUpdated(
+                ContentActor::Curator(group_id, DEFAULT_CURATOR_ID),
+                channel_id,
+                is_censored,
+                vec![]
+            ))
         );
 
-        // successful deletion because we empty the bag first
-        delete_channel_mock(
-            FIRST_MEMBER_ORIGIN,
-            ContentActor::Member(FIRST_MEMBER_ID),
+        let channel = Content::channel_by_id(channel_id);
+
+        assert!(channel.is_censored);
+
+        // Curator can un-censor channels
+        let is_censored = false;
+        assert_ok!(Content::update_channel_censorship_status(
+            Origin::signed(DEFAULT_CURATOR_ACCOUNT_ID),
+            ContentActor::Curator(group_id, DEFAULT_CURATOR_ID),
             channel_id,
-            3u64, // now assets are 0
-            Ok(()),
+            is_censored,
+            vec![]
+        ));
+
+        assert_eq!(
+            System::events().last().unwrap().event,
+            MetaEvent::content(RawEvent::ChannelCensorshipStatusUpdated(
+                ContentActor::Curator(group_id, DEFAULT_CURATOR_ID),
+                channel_id,
+                is_censored,
+                vec![]
+            ))
         );
 
-        // create a channel with no assets:
-        let empty_channel_id = Content::next_channel_id();
-        create_channel_mock(
-            FIRST_MEMBER_ORIGIN,
-            ContentActor::Member(FIRST_MEMBER_ID),
+        let channel = Content::channel_by_id(channel_id);
+
+        assert!(!channel.is_censored);
+
+        // Member cannot censor channels
+        let is_censored = true;
+        assert_err!(
+            Content::update_channel_censorship_status(
+                Origin::signed(DEFAULT_MEMBER_ACCOUNT_ID),
+                ContentActor::Member(DEFAULT_MEMBER_ID),
+                channel_id,
+                is_censored,
+                vec![]
+            ),
+            Error::<Test>::ActorNotAuthorized
+        );
+
+        let curator_channel_id = Content::next_channel_id();
+
+        // create curator channel
+        assert_ok!(Content::create_channel(
+            Origin::signed(DEFAULT_CURATOR_ACCOUNT_ID),
+            ContentActor::Curator(group_id, DEFAULT_CURATOR_ID),
             ChannelCreationParametersRecord {
                 assets: None,
                 meta: None,
                 reward_account: None,
                 collaborators: BTreeSet::new(),
-            },
-            Ok(()),
+            }
+        ));
+
+        // Curator cannot censor curator group channels
+        assert_err!(
+            Content::update_channel_censorship_status(
+                Origin::signed(DEFAULT_CURATOR_ACCOUNT_ID),
+                ContentActor::Curator(group_id, DEFAULT_CURATOR_ID),
+                curator_channel_id,
+                is_censored,
+                vec![]
+            ),
+            Error::<Test>::CannotCensoreCuratorGroupOwnedChannels
         );
 
-        delete_channel_mock(
-            FIRST_MEMBER_ORIGIN,
-            ContentActor::Member(FIRST_MEMBER_ID),
-            empty_channel_id,
-            0u64,
-            Ok(()),
+        // Lead can still censor curator group channels
+        assert_ok!(Content::update_channel_censorship_status(
+            Origin::signed(LEAD_ACCOUNT_ID),
+            ContentActor::Lead,
+            curator_channel_id,
+            is_censored,
+            vec![]
+        ));
+    })
+}
+
+// channel creation tests
+#[test]
+fn successful_channel_creation_with_member_context() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+        CreateChannelFixture::default()
+            .with_sender(DEFAULT_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Member(DEFAULT_MEMBER_ID))
+            .call_and_assert(Ok(()));
+    })
+}
+
+#[test]
+fn successful_channel_creation_with_curator_context() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+        let default_curator_group_id = curators::add_curator_to_new_group(DEFAULT_CURATOR_ID);
+        CreateChannelFixture::default()
+            .with_sender(DEFAULT_CURATOR_ACCOUNT_ID)
+            .with_actor(ContentActor::Curator(
+                default_curator_group_id,
+                DEFAULT_CURATOR_ID,
+            ))
+            .call_and_assert(Ok(()));
+    })
+}
+
+#[test]
+fn unsuccessful_channel_creation_with_lead_context() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+        CreateChannelFixture::default()
+            .with_sender(LEAD_ACCOUNT_ID)
+            .with_actor(ContentActor::Lead)
+            .call_and_assert(Err(Error::<Test>::ActorCannotOwnChannel.into()));
+    })
+}
+
+#[test]
+fn unsuccessful_channel_creation_with_collaborator_context() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+        CreateChannelFixture::default()
+            .with_sender(COLLABORATOR_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Collaborator(COLLABORATOR_MEMBER_ID))
+            .call_and_assert(Err(Error::<Test>::ActorCannotOwnChannel.into()));
+    })
+}
+
+#[test]
+fn unsuccessful_channel_creation_with_uncorresponding_member_id_and_origin() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+        CreateChannelFixture::default()
+            .with_sender(DEFAULT_MEMBER_ACCOUNT_ID + 100)
+            .with_actor(ContentActor::Member(DEFAULT_MEMBER_ID))
+            .call_and_assert(Err(Error::<Test>::MemberAuthFailed.into()));
+    })
+}
+
+#[test]
+fn unsuccessful_channel_creation_with_uncorresponding_curator_id_and_origin() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+        let default_curator_group_id = curators::add_curator_to_new_group(DEFAULT_CURATOR_ID);
+        CreateChannelFixture::default()
+            .with_sender(DEFAULT_CURATOR_ACCOUNT_ID + 100)
+            .with_actor(ContentActor::Curator(
+                default_curator_group_id,
+                DEFAULT_CURATOR_ID,
+            ))
+            .call_and_assert(Err(Error::<Test>::CuratorAuthFailed.into()));
+    })
+}
+
+#[test]
+fn successful_channel_creation_with_storage_upload_and_member_context() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        CreateChannelFixture::default()
+            .with_sender(DEFAULT_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Member(DEFAULT_MEMBER_ID))
+            .with_assets(StorageAssets::<Test> {
+                expected_data_size_fee: Storage::<Test>::data_object_per_mega_byte_fee(),
+                object_creation_list: create_data_objects_helper(),
+            })
+            .call_and_assert(Ok(()));
+    })
+}
+
+#[test]
+fn successful_channel_creation_with_storage_upload_and_curator_context() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_CURATOR_ACCOUNT_ID, INITIAL_BALANCE);
+        let default_curator_group_id = curators::add_curator_to_new_group(DEFAULT_CURATOR_ID);
+        CreateChannelFixture::default()
+            .with_sender(DEFAULT_CURATOR_ACCOUNT_ID)
+            .with_actor(ContentActor::Curator(
+                default_curator_group_id,
+                DEFAULT_CURATOR_ID,
+            ))
+            .with_assets(StorageAssets::<Test> {
+                expected_data_size_fee: Storage::<Test>::data_object_per_mega_byte_fee(),
+                object_creation_list: create_data_objects_helper(),
+            })
+            .call_and_assert(Ok(()));
+    })
+}
+
+#[test]
+fn unsuccessful_channel_creation_with_invalid_expected_data_size_fee() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_CURATOR_ACCOUNT_ID, INITIAL_BALANCE);
+        let default_curator_group_id = curators::add_curator_to_new_group(DEFAULT_CURATOR_ID);
+        CreateChannelFixture::default()
+            .with_sender(DEFAULT_CURATOR_ACCOUNT_ID)
+            .with_actor(ContentActor::Curator(
+                default_curator_group_id,
+                DEFAULT_CURATOR_ID,
+            ))
+            .with_assets(StorageAssets::<Test> {
+                // setting a purposely high fee to trigger error
+                expected_data_size_fee: BalanceOf::<Test>::from(1_000_000u64),
+                object_creation_list: create_data_objects_helper(),
+            })
+            .call_and_assert(Err(storage::Error::<Test>::DataSizeFeeChanged.into()));
+    })
+}
+
+#[test]
+fn unsuccessful_channel_creation_with_insufficient_balance() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+        create_initial_storage_buckets_helper();
+        CreateChannelFixture::default()
+            .with_sender(DEFAULT_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Member(DEFAULT_MEMBER_ID))
+            .with_assets(StorageAssets::<Test> {
+                expected_data_size_fee: Storage::<Test>::data_object_per_mega_byte_fee(),
+                object_creation_list: create_data_objects_helper(),
+            })
+            .call_and_assert(Err(storage::Error::<Test>::InsufficientBalance.into()));
+    })
+}
+
+#[test]
+fn unsuccessful_channel_creation_with_no_bucket_with_sufficient_size_available() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+
+        CreateChannelFixture::default()
+            .with_sender(DEFAULT_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Member(DEFAULT_MEMBER_ID))
+            .with_assets(StorageAssets::<Test> {
+                expected_data_size_fee: Storage::<Test>::data_object_per_mega_byte_fee(),
+                object_creation_list: vec![DataObjectCreationParameters {
+                    size: STORAGE_BUCKET_OBJECTS_SIZE_LIMIT + 1,
+                    ipfs_content_id: vec![1u8],
+                }],
+            })
+            .call_and_assert(Err(
+                storage::Error::<Test>::StorageBucketIdCollectionsAreEmpty.into(),
+            ));
+    })
+}
+
+#[test]
+fn unsuccessful_channel_creation_with_no_bucket_with_sufficient_number_available() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(
+            DEFAULT_MEMBER_ACCOUNT_ID,
+            DATA_OBJECT_DELETION_PRIZE * (STORAGE_BUCKET_OBJECTS_NUMBER_LIMIT + 1),
+        );
+        CreateChannelFixture::default()
+            .with_sender(DEFAULT_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Member(DEFAULT_MEMBER_ID))
+            .with_assets(StorageAssets::<Test> {
+                expected_data_size_fee: Storage::<Test>::data_object_per_mega_byte_fee(),
+                object_creation_list: (0..(STORAGE_BUCKET_OBJECTS_NUMBER_LIMIT + 1))
+                    .map(|_| DataObjectCreationParameters {
+                        size: 1,
+                        ipfs_content_id: vec![1u8],
+                    })
+                    .collect(),
+            })
+            .call_and_assert(Err(
+                storage::Error::<Test>::StorageBucketIdCollectionsAreEmpty.into(),
+            ));
+    })
+}
+
+#[test]
+fn unsuccessful_channel_creation_with_data_limits_exceeded() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        CreateChannelFixture::default()
+            .with_sender(DEFAULT_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Member(DEFAULT_MEMBER_ID))
+            .with_assets(StorageAssets::<Test> {
+                expected_data_size_fee: Storage::<Test>::data_object_per_mega_byte_fee(),
+                object_creation_list: vec![DataObjectCreationParameters {
+                    size: VOUCHER_OBJECTS_SIZE_LIMIT + 1,
+                    ipfs_content_id: vec![1u8],
+                }],
+            })
+            .call_and_assert(Err(storage::Error::<Test>::MaxDataObjectSizeExceeded.into()));
+    })
+}
+
+#[test]
+fn successful_channel_creation_with_collaborators_set() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        CreateChannelFixture::default()
+            .with_sender(DEFAULT_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Member(DEFAULT_MEMBER_ID))
+            .with_collaborators(vec![COLLABORATOR_MEMBER_ID].into_iter().collect())
+            .call_and_assert(Ok(()));
+
+        let default_curator_group_id = curators::add_curator_to_new_group(DEFAULT_CURATOR_ID);
+        CreateChannelFixture::default()
+            .with_sender(DEFAULT_CURATOR_ACCOUNT_ID)
+            .with_actor(ContentActor::Curator(
+                default_curator_group_id,
+                DEFAULT_CURATOR_ID,
+            ))
+            .with_collaborators(vec![COLLABORATOR_MEMBER_ID].into_iter().collect())
+            .call_and_assert(Ok(()));
+    })
+}
+
+#[test]
+fn unsuccessful_channel_creation_with_invalid_collaborators_set() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+        CreateChannelFixture::default()
+            .with_sender(DEFAULT_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Member(DEFAULT_MEMBER_ID))
+            .with_collaborators(vec![COLLABORATOR_MEMBER_ID + 100].into_iter().collect())
+            .call_and_assert(Err(Error::<Test>::CollaboratorIsNotValidMember.into()));
+    })
+}
+
+#[test]
+fn successful_channel_creation_with_reward_account() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        CreateChannelFixture::default()
+            .with_sender(DEFAULT_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Member(DEFAULT_MEMBER_ID))
+            .with_reward_account(DEFAULT_MEMBER_ACCOUNT_ID)
+            .call_and_assert(Ok(()));
+
+        let default_curator_group_id = curators::add_curator_to_new_group(DEFAULT_CURATOR_ID);
+        CreateChannelFixture::default()
+            .with_sender(DEFAULT_CURATOR_ACCOUNT_ID)
+            .with_actor(ContentActor::Curator(
+                default_curator_group_id,
+                DEFAULT_CURATOR_ID,
+            ))
+            .with_reward_account(DEFAULT_CURATOR_ACCOUNT_ID)
+            .call_and_assert(Ok(()));
+    })
+}
+
+// channel update tests
+#[test]
+fn unsuccessful_channel_update_with_uncorresponding_member_id_and_origin() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
+
+        UpdateChannelFixture::default()
+            .with_sender(DEFAULT_MEMBER_ACCOUNT_ID + 100)
+            .with_actor(ContentActor::Member(DEFAULT_MEMBER_ID))
+            .call_and_assert(Err(Error::<Test>::MemberAuthFailed.into()));
+    })
+}
+
+#[test]
+fn unsuccessful_channel_update_with_uncorresponding_curator_id_and_origin() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_CURATOR_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_curator_owned_channel();
+
+        let default_curator_group_id = curators::add_curator_to_new_group(DEFAULT_CURATOR_ID);
+        UpdateChannelFixture::default()
+            .with_sender(DEFAULT_CURATOR_ACCOUNT_ID + 100)
+            .with_actor(ContentActor::Curator(
+                default_curator_group_id,
+                DEFAULT_CURATOR_ID,
+            ))
+            .call_and_assert(Err(Error::<Test>::CuratorAuthFailed.into()));
+    })
+}
+
+#[test]
+fn unsuccessful_channel_update_with_uncorresponding_collaborator_id_and_origin() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
+
+        UpdateChannelFixture::default()
+            .with_sender(COLLABORATOR_MEMBER_ACCOUNT_ID + 100)
+            .with_actor(ContentActor::Collaborator(COLLABORATOR_MEMBER_ID))
+            .call_and_assert(Err(Error::<Test>::MemberAuthFailed.into()));
+    })
+}
+
+#[test]
+fn unsuccessful_channel_update_with_invalid_channel_id() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
+
+        UpdateChannelFixture::default()
+            .with_sender(DEFAULT_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Member(DEFAULT_MEMBER_ID))
+            .with_channel_id(ChannelId::zero())
+            .call_and_assert(Err(Error::<Test>::ChannelDoesNotExist.into()));
+    })
+}
+
+#[test]
+fn successful_channel_update_with_assets_uploaded_by_collaborator() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        increase_account_balance_helper(COLLABORATOR_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
+
+        UpdateChannelFixture::default()
+            .with_sender(COLLABORATOR_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Collaborator(COLLABORATOR_MEMBER_ID))
+            .with_assets_to_upload(StorageAssets::<Test> {
+                expected_data_size_fee: Storage::<Test>::data_object_per_mega_byte_fee(),
+                object_creation_list: create_data_objects_helper(),
+            })
+            .call_and_assert(Ok(()));
+    })
+}
+
+#[test]
+fn successful_channel_update_with_assets_removed_by_collaborator() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
+
+        UpdateChannelFixture::default()
+            .with_sender(COLLABORATOR_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Collaborator(COLLABORATOR_MEMBER_ID))
+            // data objects ids start at index 1
+            .with_assets_to_remove((1..(DATA_OBJECTS_NUMBER as u64 - 1)).collect())
+            .call_and_assert(Ok(()));
+    })
+}
+
+#[test]
+fn successful_channel_update_with_assets_uploaded_by_member() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
+
+        UpdateChannelFixture::default()
+            .with_sender(DEFAULT_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Member(DEFAULT_MEMBER_ID))
+            .with_assets_to_upload(StorageAssets::<Test> {
+                expected_data_size_fee: Storage::<Test>::data_object_per_mega_byte_fee(),
+                object_creation_list: create_data_objects_helper(),
+            })
+            .call_and_assert(Ok(()));
+    })
+}
+
+#[test]
+fn successful_channel_update_with_assets_removed_by_member() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
+
+        UpdateChannelFixture::default()
+            .with_sender(DEFAULT_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Member(DEFAULT_MEMBER_ID))
+            // data objects ids start at index 1
+            .with_assets_to_remove((1..(DATA_OBJECTS_NUMBER as u64 - 1)).collect())
+            .call_and_assert(Ok(()));
+    })
+}
+
+#[test]
+fn successful_channel_update_with_assets_uploaded_by_curator() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_CURATOR_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_curator_owned_channel();
+
+        let default_curator_group_id = NextCuratorGroupId::<Test>::get() - 1;
+        UpdateChannelFixture::default()
+            .with_sender(DEFAULT_CURATOR_ACCOUNT_ID)
+            .with_actor(ContentActor::Curator(
+                default_curator_group_id,
+                DEFAULT_CURATOR_ID,
+            ))
+            .with_assets_to_upload(StorageAssets::<Test> {
+                expected_data_size_fee: Storage::<Test>::data_object_per_mega_byte_fee(),
+                object_creation_list: create_data_objects_helper(),
+            })
+            .call_and_assert(Ok(()));
+    })
+}
+
+#[test]
+fn successful_channel_update_with_assets_removed_by_curator() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_CURATOR_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_curator_owned_channel();
+
+        let default_curator_group_id = NextCuratorGroupId::<Test>::get() - 1;
+        UpdateChannelFixture::default()
+            .with_sender(DEFAULT_CURATOR_ACCOUNT_ID)
+            .with_actor(ContentActor::Curator(
+                default_curator_group_id,
+                DEFAULT_CURATOR_ID,
+            ))
+            // data objects ids start at index 1
+            .with_assets_to_remove((1..(DATA_OBJECTS_NUMBER as u64 - 1)).collect())
+            .call_and_assert(Ok(()));
+    })
+}
+
+#[test]
+fn successful_curator_channel_update_with_assets_uploaded_by_lead() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_CURATOR_ACCOUNT_ID, INITIAL_BALANCE);
+        increase_account_balance_helper(LEAD_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_curator_owned_channel();
+
+        UpdateChannelFixture::default()
+            .with_sender(LEAD_ACCOUNT_ID)
+            .with_actor(ContentActor::Lead)
+            .with_assets_to_upload(StorageAssets::<Test> {
+                expected_data_size_fee: Storage::<Test>::data_object_per_mega_byte_fee(),
+                object_creation_list: create_data_objects_helper(),
+            })
+            .call_and_assert(Ok(()));
+    })
+}
+
+#[test]
+fn unsuccessful_curator_channel_update_with_assets_uploaded_by_invalid_lead_origin() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_CURATOR_ACCOUNT_ID, INITIAL_BALANCE);
+        increase_account_balance_helper(LEAD_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_curator_owned_channel();
+
+        UpdateChannelFixture::default()
+            .with_sender(LEAD_ACCOUNT_ID + 100)
+            .with_actor(ContentActor::Lead)
+            .with_assets_to_upload(StorageAssets::<Test> {
+                expected_data_size_fee: Storage::<Test>::data_object_per_mega_byte_fee(),
+                object_creation_list: create_data_objects_helper(),
+            })
+            .call_and_assert(Err(Error::<Test>::LeadAuthFailed.into()));
+    })
+}
+
+#[test]
+fn successful_channel_update_with_assets_removed_by_lead() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_CURATOR_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_curator_owned_channel();
+
+        UpdateChannelFixture::default()
+            .with_sender(LEAD_ACCOUNT_ID)
+            .with_actor(ContentActor::Lead)
+            // data objects ids start at index 1
+            .with_assets_to_remove((1..(DATA_OBJECTS_NUMBER as u64 - 1)).collect())
+            .call_and_assert(Ok(()));
+    })
+}
+
+#[test]
+fn unsuccessful_channel_update_with_assets_removed_by_invalid_lead_origin() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_CURATOR_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_curator_owned_channel();
+
+        UpdateChannelFixture::default()
+            .with_sender(LEAD_ACCOUNT_ID + 100)
+            .with_actor(ContentActor::Lead)
+            // data objects ids start at index 1
+            .with_assets_to_remove((1..(DATA_OBJECTS_NUMBER as u64 - 1)).collect())
+            .call_and_assert(Err(Error::<Test>::LeadAuthFailed.into()));
+    })
+}
+
+#[test]
+fn unsuccessful_channel_update_with_assets_uploaded_by_unauthorized_collaborator() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        increase_account_balance_helper(
+            UNAUTHORIZED_COLLABORATOR_MEMBER_ACCOUNT_ID,
+            INITIAL_BALANCE,
         );
+        create_default_member_owned_channel();
+
+        UpdateChannelFixture::default()
+            .with_sender(UNAUTHORIZED_COLLABORATOR_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Collaborator(
+                UNAUTHORIZED_COLLABORATOR_MEMBER_ID,
+            ))
+            .with_assets_to_upload(StorageAssets::<Test> {
+                expected_data_size_fee: Storage::<Test>::data_object_per_mega_byte_fee(),
+                object_creation_list: create_data_objects_helper(),
+            })
+            .call_and_assert(Err(Error::<Test>::ActorNotAuthorized.into()));
+    })
+}
+
+#[test]
+fn unsuccessful_channel_update_with_assets_removed_by_unauthorized_collaborator() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
+
+        UpdateChannelFixture::default()
+            .with_sender(UNAUTHORIZED_COLLABORATOR_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Collaborator(
+                UNAUTHORIZED_COLLABORATOR_MEMBER_ID,
+            ))
+            // data objects ids start at index 1
+            .with_assets_to_remove((1..(DATA_OBJECTS_NUMBER as u64 - 1)).collect())
+            .call_and_assert(Err(Error::<Test>::ActorNotAuthorized.into()));
+    })
+}
+
+#[test]
+fn unsuccessful_channel_update_with_assets_uploaded_by_unauthorized_member() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        increase_account_balance_helper(UNAUTHORIZED_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
+
+        UpdateChannelFixture::default()
+            .with_sender(UNAUTHORIZED_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Member(UNAUTHORIZED_MEMBER_ID))
+            .with_assets_to_upload(StorageAssets::<Test> {
+                expected_data_size_fee: Storage::<Test>::data_object_per_mega_byte_fee(),
+                object_creation_list: create_data_objects_helper(),
+            })
+            .call_and_assert(Err(Error::<Test>::ActorNotAuthorized.into()));
+    })
+}
+
+#[test]
+fn unsuccessful_channel_update_with_assets_removed_by_unathorized_member() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
+
+        UpdateChannelFixture::default()
+            .with_sender(UNAUTHORIZED_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Member(UNAUTHORIZED_MEMBER_ID))
+            // data objects ids start at index 1
+            .with_assets_to_remove((1..(DATA_OBJECTS_NUMBER as u64 - 1)).collect())
+            .call_and_assert(Err(Error::<Test>::ActorNotAuthorized.into()));
+    })
+}
+
+#[test]
+fn unsuccessful_channel_update_with_assets_uploaded_by_unauthorized_curator() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_CURATOR_ACCOUNT_ID, INITIAL_BALANCE);
+        increase_account_balance_helper(UNAUTHORIZED_CURATOR_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_curator_owned_channel();
+
+        let unauthorized_curator_group_id =
+            curators::add_curator_to_new_group(UNAUTHORIZED_CURATOR_ID);
+        UpdateChannelFixture::default()
+            .with_sender(UNAUTHORIZED_CURATOR_ACCOUNT_ID)
+            .with_actor(ContentActor::Curator(
+                unauthorized_curator_group_id,
+                UNAUTHORIZED_CURATOR_ID,
+            ))
+            .with_assets_to_upload(StorageAssets::<Test> {
+                expected_data_size_fee: Storage::<Test>::data_object_per_mega_byte_fee(),
+                object_creation_list: create_data_objects_helper(),
+            })
+            .call_and_assert(Err(Error::<Test>::ActorNotAuthorized.into()));
+    })
+}
+
+#[test]
+fn unsuccessful_channel_update_with_assets_removed_by_unauthorized_curator() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_CURATOR_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_curator_owned_channel();
+
+        let unauthorized_curator_group_id =
+            curators::add_curator_to_new_group(UNAUTHORIZED_CURATOR_ID);
+        UpdateChannelFixture::default()
+            .with_sender(UNAUTHORIZED_CURATOR_ACCOUNT_ID)
+            .with_actor(ContentActor::Curator(
+                unauthorized_curator_group_id,
+                UNAUTHORIZED_CURATOR_ID,
+            ))
+            // data objects ids start at index 1
+            .with_assets_to_remove((1..(DATA_OBJECTS_NUMBER as u64 - 1)).collect())
+            .call_and_assert(Err(Error::<Test>::ActorNotAuthorized.into()));
+    })
+}
+
+#[test]
+fn unsuccessful_member_channel_update_with_assets_uploaded_by_lead() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        increase_account_balance_helper(LEAD_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
+
+        UpdateChannelFixture::default()
+            .with_sender(LEAD_ACCOUNT_ID)
+            .with_actor(ContentActor::Lead)
+            .with_assets_to_upload(StorageAssets::<Test> {
+                expected_data_size_fee: Storage::<Test>::data_object_per_mega_byte_fee(),
+                object_creation_list: create_data_objects_helper(),
+            })
+            .call_and_assert(Err(Error::<Test>::ActorNotAuthorized.into()));
+    })
+}
+
+#[test]
+fn unsuccessful_member_channel_update_with_assets_removed_by_lead() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
+
+        UpdateChannelFixture::default()
+            .with_sender(LEAD_ACCOUNT_ID)
+            .with_actor(ContentActor::Lead)
+            // data objects ids start at index 1
+            .with_assets_to_remove((1..(DATA_OBJECTS_NUMBER as u64 - 1)).collect())
+            .call_and_assert(Err(Error::<Test>::ActorNotAuthorized.into()));
+    })
+}
+
+#[test]
+fn successful_channel_update_with_collaborators_set_updated_by_member() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
+
+        UpdateChannelFixture::default()
+            .with_sender(DEFAULT_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Member(DEFAULT_MEMBER_ID))
+            .with_collaborators(BTreeSet::new())
+            .call_and_assert(Ok(()));
+    })
+}
+
+#[test]
+fn unsuccessful_channel_update_with_collaborators_set_updated_by_unauthorized_member() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
+
+        UpdateChannelFixture::default()
+            .with_sender(UNAUTHORIZED_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Member(UNAUTHORIZED_MEMBER_ID))
+            .with_collaborators(BTreeSet::new())
+            .call_and_assert(Err(Error::<Test>::ActorNotAuthorized.into()));
+    })
+}
+
+#[test]
+fn successful_channel_update_with_collaborators_set_updated_by_curator() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_CURATOR_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_curator_owned_channel();
+
+        let default_curator_group_id = NextCuratorGroupId::<Test>::get() - 1;
+        UpdateChannelFixture::default()
+            .with_sender(DEFAULT_CURATOR_ACCOUNT_ID)
+            .with_actor(ContentActor::Curator(
+                default_curator_group_id,
+                DEFAULT_CURATOR_ID,
+            ))
+            .with_collaborators(BTreeSet::new())
+            .call_and_assert(Ok(()));
+    })
+}
+
+#[test]
+fn unsuccessful_channel_update_with_collaborators_set_updated_by_unauthorized_curator() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_CURATOR_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_curator_owned_channel();
+
+        let unauthorized_curator_group_id =
+            curators::add_curator_to_new_group(UNAUTHORIZED_CURATOR_ID);
+        UpdateChannelFixture::default()
+            .with_sender(UNAUTHORIZED_CURATOR_ACCOUNT_ID)
+            .with_actor(ContentActor::Curator(
+                unauthorized_curator_group_id,
+                UNAUTHORIZED_CURATOR_ID,
+            ))
+            .with_collaborators(BTreeSet::new())
+            .call_and_assert(Err(Error::<Test>::ActorNotAuthorized.into()));
+    })
+}
+
+#[test]
+fn unsuccessful_channel_update_with_collaborators_set_updated_by_collaborator() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
+
+        UpdateChannelFixture::default()
+            .with_sender(COLLABORATOR_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Collaborator(COLLABORATOR_MEMBER_ID))
+            .with_collaborators(BTreeSet::new())
+            .call_and_assert(Err(Error::<Test>::ActorNotAuthorized.into()));
+    })
+}
+
+#[test]
+fn unsuccessful_member_channel_update_with_collaborators_set_updated_by_lead() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
+
+        UpdateChannelFixture::default()
+            .with_sender(LEAD_ACCOUNT_ID)
+            .with_actor(ContentActor::Lead)
+            .with_collaborators(BTreeSet::new())
+            .call_and_assert(Err(Error::<Test>::ActorNotAuthorized.into()));
+    })
+}
+
+#[test]
+fn successful_curator_channel_update_with_collaborators_set_updated_by_lead() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_CURATOR_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_curator_owned_channel();
+
+        UpdateChannelFixture::default()
+            .with_sender(LEAD_ACCOUNT_ID)
+            .with_actor(ContentActor::Lead)
+            .with_collaborators(BTreeSet::new())
+            .call_and_assert(Ok(()));
+    })
+}
+
+#[test]
+fn unsuccessful_curator_channel_update_with_collaborators_set_updated_by_invalid_lead_origin() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_CURATOR_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_curator_owned_channel();
+
+        UpdateChannelFixture::default()
+            .with_sender(LEAD_ACCOUNT_ID + 100)
+            .with_actor(ContentActor::Lead)
+            .with_collaborators(BTreeSet::new())
+            .call_and_assert(Err(Error::<Test>::LeadAuthFailed.into()));
+    })
+}
+
+#[test]
+fn successful_channel_update_with_reward_account_updated_by_member() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
+
+        UpdateChannelFixture::default()
+            .with_sender(DEFAULT_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Member(DEFAULT_MEMBER_ID))
+            .with_reward_account(Some(None))
+            .call_and_assert(Ok(()));
+    })
+}
+
+#[test]
+fn unsuccessful_channel_update_with_reward_account_updated_by_unauthorized_member() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
+
+        UpdateChannelFixture::default()
+            .with_sender(UNAUTHORIZED_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Member(UNAUTHORIZED_MEMBER_ID))
+            .with_reward_account(Some(None))
+            .call_and_assert(Err(Error::<Test>::ActorNotAuthorized.into()));
+    })
+}
+
+#[test]
+fn successful_channel_update_with_reward_account_updated_by_curator() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_CURATOR_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_curator_owned_channel();
+
+        let default_curator_group_id = NextCuratorGroupId::<Test>::get() - 1;
+        UpdateChannelFixture::default()
+            .with_sender(DEFAULT_CURATOR_ACCOUNT_ID)
+            .with_actor(ContentActor::Curator(
+                default_curator_group_id,
+                DEFAULT_CURATOR_ID,
+            ))
+            .with_reward_account(Some(None))
+            .call_and_assert(Ok(()));
+    })
+}
+
+#[test]
+fn unsuccessful_channel_update_with_reward_account_updated_by_unauthorized_curator() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_CURATOR_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_curator_owned_channel();
+
+        let unauthorized_curator_group_id =
+            curators::add_curator_to_new_group(UNAUTHORIZED_CURATOR_ID);
+        UpdateChannelFixture::default()
+            .with_sender(UNAUTHORIZED_CURATOR_ACCOUNT_ID)
+            .with_actor(ContentActor::Curator(
+                unauthorized_curator_group_id,
+                UNAUTHORIZED_CURATOR_ID,
+            ))
+            .with_reward_account(Some(None))
+            .call_and_assert(Err(Error::<Test>::ActorNotAuthorized.into()));
+    })
+}
+
+#[test]
+fn unsuccessful_channel_update_with_reward_account_updated_by_collaborator() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
+
+        UpdateChannelFixture::default()
+            .with_sender(COLLABORATOR_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Collaborator(COLLABORATOR_MEMBER_ID))
+            .with_reward_account(Some(None))
+            .call_and_assert(Err(Error::<Test>::ActorNotAuthorized.into()));
     })
 }
 
 #[test]
-fn successful_channel_assets_deletion() {
+fn unsuccessful_member_channel_update_with_reward_account_updated_by_lead() {
     with_default_mock_builder(|| {
-        // Run to block one to see emitted events
         run_to_block(1);
 
-        create_initial_storage_buckets();
-        // create an account with enought balance
-        let _ = balances::Module::<Test>::deposit_creating(
-            &FIRST_MEMBER_ORIGIN,
-            <Test as balances::Trait>::Balance::from(INITIAL_BALANCE),
-        );
-
-        // 3 assets
-        let assets = StorageAssetsRecord {
-            object_creation_list: vec![
-                DataObjectCreationParameters {
-                    size: 3,
-                    ipfs_content_id: b"first".to_vec(),
-                },
-                DataObjectCreationParameters {
-                    size: 3,
-                    ipfs_content_id: b"second".to_vec(),
-                },
-                DataObjectCreationParameters {
-                    size: 3,
-                    ipfs_content_id: b"third".to_vec(),
-                },
-            ],
-            expected_data_size_fee: storage::DataObjectPerMegabyteFee::<Test>::get(),
-        };
-
-        let channel_id = NextChannelId::<Test>::get();
-        // create channel
-        create_channel_mock(
-            FIRST_MEMBER_ORIGIN,
-            ContentActor::Member(FIRST_MEMBER_ID),
-            ChannelCreationParametersRecord {
-                assets: Some(assets),
-                meta: None,
-                reward_account: None,
-                collaborators: BTreeSet::new(),
-            },
-            Ok(()),
-        );
-
-        // delete assets
-        let assets_to_remove = [0u64, 1u64].iter().map(|&x| x).collect::<BTreeSet<_>>();
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
 
-        // delete channel assets
-        assert_ok!(Content::update_channel(
-            Origin::signed(FIRST_MEMBER_ORIGIN),
-            ContentActor::Member(FIRST_MEMBER_ID),
-            channel_id,
-            ChannelUpdateParametersRecord {
-                assets_to_upload: None,
-                new_meta: None,
-                reward_account: None,
-                assets_to_remove: assets_to_remove,
-                collaborators: None,
-            },
-        ));
+        UpdateChannelFixture::default()
+            .with_sender(LEAD_ACCOUNT_ID)
+            .with_actor(ContentActor::Lead)
+            .with_reward_account(Some(None))
+            .call_and_assert(Err(Error::<Test>::ActorNotAuthorized.into()));
     })
 }
 
 #[test]
-fn succesful_channel_update() {
+fn successful_curator_channel_update_with_reward_account_updated_by_lead() {
     with_default_mock_builder(|| {
-        // Run to block one to see emitted events
         run_to_block(1);
 
-        create_initial_storage_buckets();
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_CURATOR_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_curator_owned_channel();
 
-        // create an account with enought balance
-        let _ = balances::Module::<Test>::deposit_creating(
-            &FIRST_MEMBER_ORIGIN,
-            <Test as balances::Trait>::Balance::from(INITIAL_BALANCE),
-        );
+        UpdateChannelFixture::default()
+            .with_sender(LEAD_ACCOUNT_ID)
+            .with_actor(ContentActor::Lead)
+            .with_reward_account(Some(None))
+            .call_and_assert(Ok(()));
+    })
+}
 
-        // 2 + 1 assets to be uploaded
-        let first_obj_id = Storage::<Test>::next_data_object_id();
-        let first_batch = StorageAssetsRecord {
-            object_creation_list: vec![
-                DataObjectCreationParameters {
-                    size: 3,
-                    ipfs_content_id: b"first".to_vec(),
-                },
-                DataObjectCreationParameters {
-                    size: 3,
-                    ipfs_content_id: b"second".to_vec(),
-                },
-            ],
-            expected_data_size_fee: storage::DataObjectPerMegabyteFee::<Test>::get(),
-        };
-        let first_batch_ids =
-            (first_obj_id..Storage::<Test>::next_data_object_id()).collect::<BTreeSet<_>>();
-
-        let second_batch = StorageAssetsRecord {
-            object_creation_list: vec![
-                DataObjectCreationParameters {
-                    size: 3,
-                    ipfs_content_id: b"first".to_vec(),
-                },
-                DataObjectCreationParameters {
-                    size: 3,
-                    ipfs_content_id: b"second".to_vec(),
-                },
-            ],
-            expected_data_size_fee: storage::DataObjectPerMegabyteFee::<Test>::get(),
-        };
-
-        let channel_id = NextChannelId::<Test>::get();
-
-        // create channel with first batch of assets
-        create_channel_mock(
-            FIRST_MEMBER_ORIGIN,
-            ContentActor::Member(FIRST_MEMBER_ID),
-            ChannelCreationParametersRecord {
-                assets: Some(first_batch),
-                meta: None,
-                reward_account: None,
-                collaborators: BTreeSet::new(),
-            },
-            Ok(()),
-        );
+#[test]
+fn unsuccessful_curator_channel_update_with_reward_account_updated_by_invalid_lead_origin() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
 
-        // update channel by adding the second batch of assets
-        update_channel_mock(
-            FIRST_MEMBER_ORIGIN,
-            ContentActor::Member(FIRST_MEMBER_ID),
-            channel_id,
-            ChannelUpdateParametersRecord {
-                assets_to_upload: Some(second_batch),
-                new_meta: None,
-                reward_account: None,
-                assets_to_remove: BTreeSet::new(),
-                collaborators: None,
-            },
-            Ok(()),
-        );
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_CURATOR_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_curator_owned_channel();
 
-        // update channel by removing the first batch of assets
-        update_channel_mock(
-            FIRST_MEMBER_ORIGIN,
-            ContentActor::Member(FIRST_MEMBER_ID),
-            channel_id,
-            ChannelUpdateParametersRecord {
-                assets_to_upload: None,
-                new_meta: None,
-                reward_account: None,
-                assets_to_remove: first_batch_ids,
-                collaborators: None,
-            },
-            Ok(()),
-        );
+        UpdateChannelFixture::default()
+            .with_sender(LEAD_ACCOUNT_ID + 100)
+            .with_actor(ContentActor::Lead)
+            .with_reward_account(Some(None))
+            .call_and_assert(Err(Error::<Test>::LeadAuthFailed.into()));
     })
 }
 
 #[test]
-fn succesful_channel_creation() {
+fn unsuccessful_channel_update_with_data_limits_exceeded() {
     with_default_mock_builder(|| {
-        // Run to block one to see emitted events
         run_to_block(1);
 
-        create_initial_storage_buckets();
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
+
+        UpdateChannelFixture::default()
+            .with_sender(DEFAULT_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Member(DEFAULT_MEMBER_ID))
+            .with_assets_to_upload(StorageAssets::<Test> {
+                expected_data_size_fee: Storage::<Test>::data_object_per_mega_byte_fee(),
+                object_creation_list: vec![DataObjectCreationParameters {
+                    size: VOUCHER_OBJECTS_SIZE_LIMIT + 1,
+                    ipfs_content_id: vec![1u8],
+                }],
+            })
+            .call_and_assert(Err(storage::Error::<Test>::MaxDataObjectSizeExceeded.into()));
+    })
+}
 
-        // create an account with enought balance
-        let _ = balances::Module::<Test>::deposit_creating(
-            &FIRST_MEMBER_ORIGIN,
-            <Test as balances::Trait>::Balance::from(INITIAL_BALANCE),
-        );
+#[test]
+fn unsuccessful_channel_update_with_invalid_objects_id_to_remove() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
 
-        // 3 assets to be uploaded
-        let assets = StorageAssetsRecord {
-            object_creation_list: vec![
-                DataObjectCreationParameters {
-                    size: 3,
-                    ipfs_content_id: b"first".to_vec(),
-                },
-                DataObjectCreationParameters {
-                    size: 3,
-                    ipfs_content_id: b"second".to_vec(),
-                },
-                DataObjectCreationParameters {
-                    size: 3,
-                    ipfs_content_id: b"third".to_vec(),
-                },
-            ],
-            expected_data_size_fee: storage::DataObjectPerMegabyteFee::<Test>::get(),
-        };
-
-        // create channel
-        create_channel_mock(
-            FIRST_MEMBER_ORIGIN,
-            ContentActor::Member(FIRST_MEMBER_ID),
-            ChannelCreationParametersRecord {
-                assets: Some(assets),
-                meta: None,
-                reward_account: None,
-                collaborators: BTreeSet::new(),
-            },
-            Ok(()),
-        );
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
+
+        UpdateChannelFixture::default()
+            .with_sender(DEFAULT_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Member(DEFAULT_MEMBER_ID))
+            .with_assets_to_remove(
+                ((DATA_OBJECTS_NUMBER as u64 + 1)..(2 * DATA_OBJECTS_NUMBER as u64)).collect(),
+            )
+            .call_and_assert(Err(storage::Error::<Test>::DataObjectDoesntExist.into()));
     })
 }
 
 #[test]
-fn lead_cannot_create_channel() {
+fn unsuccessful_channel_update_with_invalid_collaborators_set() {
     with_default_mock_builder(|| {
-        create_initial_storage_buckets();
-        assert_err!(
-            Content::create_channel(
-                Origin::signed(LEAD_ORIGIN),
-                ContentActor::Lead,
-                ChannelCreationParametersRecord {
-                    assets: None,
-                    meta: None,
-                    reward_account: None,
-                    collaborators: BTreeSet::new(),
-                }
-            ),
-            Error::<Test>::ActorCannotOwnChannel
-        );
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
+
+        UpdateChannelFixture::default()
+            .with_sender(DEFAULT_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Member(DEFAULT_MEMBER_ID))
+            .with_collaborators(vec![COLLABORATOR_MEMBER_ID + 100].into_iter().collect())
+            .call_and_assert(Err(Error::<Test>::CollaboratorIsNotValidMember.into()));
     })
 }
 
 #[test]
-fn curator_owned_channels() {
+fn unsuccessful_channel_update_with_invalid_expected_data_size_fee() {
     with_default_mock_builder(|| {
-        // Run to block one to see emitted events
         run_to_block(1);
 
-        // Curator group doesn't exist yet
-        assert_err!(
-            Content::create_channel(
-                Origin::signed(FIRST_CURATOR_ORIGIN),
-                ContentActor::Curator(FIRST_CURATOR_GROUP_ID, FIRST_CURATOR_ID),
-                ChannelCreationParametersRecord {
-                    assets: None,
-                    meta: None,
-                    reward_account: None,
-                    collaborators: BTreeSet::new(),
-                }
-            ),
-            Error::<Test>::CuratorGroupIsNotActive
-        );
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
+
+        UpdateChannelFixture::default()
+            .with_sender(DEFAULT_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Member(DEFAULT_MEMBER_ID))
+            .with_assets_to_upload(StorageAssets::<Test> {
+                // setting a purposely high fee to trigger error
+                expected_data_size_fee: BalanceOf::<Test>::from(1_000_000u64),
+                object_creation_list: create_data_objects_helper(),
+            })
+            .call_and_assert(Err(storage::Error::<Test>::DataSizeFeeChanged.into()));
+    })
+}
 
-        let group_id = curators::add_curator_to_new_group(FIRST_CURATOR_ID);
-        assert_eq!(FIRST_CURATOR_GROUP_ID, group_id);
+#[test]
+fn unsuccessful_channel_update_with_insufficient_balance() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
 
-        // Curator from wrong group
-        assert_err!(
-            Content::create_channel(
-                Origin::signed(SECOND_CURATOR_ORIGIN),
-                ContentActor::Curator(FIRST_CURATOR_GROUP_ID, SECOND_CURATOR_ID),
-                ChannelCreationParametersRecord {
-                    assets: None,
-                    meta: None,
-                    reward_account: None,
-                    collaborators: BTreeSet::new(),
-                }
-            ),
-            Error::<Test>::CuratorIsNotAMemberOfGivenCuratorGroup
-        );
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
+        slash_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID);
+
+        UpdateChannelFixture::default()
+            .with_sender(DEFAULT_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Member(DEFAULT_MEMBER_ID))
+            .with_assets_to_upload(StorageAssets::<Test> {
+                expected_data_size_fee: Storage::<Test>::data_object_per_mega_byte_fee(),
+                object_creation_list: create_data_objects_helper(),
+            })
+            .call_and_assert(Err(storage::Error::<Test>::InsufficientBalance.into()));
+    })
+}
 
-        // Curator in correct active group, but wrong origin
-        assert_err!(
-            Content::create_channel(
-                Origin::signed(SECOND_CURATOR_ORIGIN),
-                ContentActor::Curator(FIRST_CURATOR_GROUP_ID, FIRST_CURATOR_ID),
-                ChannelCreationParametersRecord {
-                    assets: None,
-                    meta: None,
-                    reward_account: None,
-                    collaborators: BTreeSet::new(),
-                }
-            ),
-            Error::<Test>::CuratorAuthFailed
-        );
+#[test]
+fn unsuccessful_channel_update_with_no_bucket_with_sufficient_object_size_limit() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
 
-        let channel_id = Content::next_channel_id();
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
+
+        UpdateChannelFixture::default()
+            .with_sender(DEFAULT_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Member(DEFAULT_MEMBER_ID))
+            .with_assets_to_upload(StorageAssets::<Test> {
+                expected_data_size_fee: Storage::<Test>::data_object_per_mega_byte_fee(),
+                object_creation_list: vec![DataObjectCreationParameters {
+                    size: STORAGE_BUCKET_OBJECTS_SIZE_LIMIT + 1,
+                    ipfs_content_id: vec![1u8],
+                }],
+            })
+            .call_and_assert(Err(
+                storage::Error::<Test>::StorageBucketObjectSizeLimitReached.into(),
+            ));
+    })
+}
 
-        // Curator in correct active group, with correct origin
-        assert_ok!(Content::create_channel(
-            Origin::signed(FIRST_CURATOR_ORIGIN),
-            ContentActor::Curator(FIRST_CURATOR_GROUP_ID, FIRST_CURATOR_ID),
-            ChannelCreationParametersRecord {
-                assets: None,
-                meta: None,
-                reward_account: None,
-                collaborators: BTreeSet::new(),
-            }
-        ));
+#[test]
+fn unsuccessful_channel_update_with_no_bucket_with_sufficient_object_number_limit() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
 
-        assert_eq!(
-            System::events().last().unwrap().event,
-            MetaEvent::content(RawEvent::ChannelCreated(
-                ContentActor::Curator(FIRST_CURATOR_GROUP_ID, FIRST_CURATOR_ID),
-                channel_id,
-                ChannelRecord {
-                    owner: ChannelOwner::CuratorGroup(FIRST_CURATOR_GROUP_ID),
-                    is_censored: false,
-                    reward_account: None,
-                    num_videos: 0,
-                    collaborators: BTreeSet::new(),
-                },
-                ChannelCreationParametersRecord {
-                    assets: None,
-                    meta: None,
-                    reward_account: None,
-                    collaborators: BTreeSet::new(),
-                }
-            ))
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(
+            DEFAULT_MEMBER_ACCOUNT_ID,
+            // balance necessary to create channel + video with specified no. of assets
+            DATA_OBJECT_DELETION_PRIZE * (STORAGE_BUCKET_OBJECTS_NUMBER_LIMIT + 1)
+                + DATA_OBJECT_DELETION_PRIZE * DATA_OBJECTS_NUMBER,
         );
 
-        // Curator can update channel
-        assert_ok!(Content::update_channel(
-            Origin::signed(FIRST_CURATOR_ORIGIN),
-            ContentActor::Curator(FIRST_CURATOR_GROUP_ID, FIRST_CURATOR_ID),
-            channel_id,
-            ChannelUpdateParametersRecord {
-                assets_to_upload: None,
-                new_meta: None,
-                reward_account: None,
-                assets_to_remove: BTreeSet::new(),
-                collaborators: None,
-            }
-        ));
-
-        // Lead can update curator owned channels
-        assert_ok!(Content::update_channel(
-            Origin::signed(LEAD_ORIGIN),
-            ContentActor::Lead,
-            channel_id,
-            ChannelUpdateParametersRecord {
-                assets_to_upload: None,
-                new_meta: None,
-                reward_account: None,
-                assets_to_remove: BTreeSet::new(),
-                collaborators: None,
-            }
-        ));
+        create_default_member_owned_channel();
+
+        UpdateChannelFixture::default()
+            .with_sender(DEFAULT_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Member(DEFAULT_MEMBER_ID))
+            .with_assets_to_upload(StorageAssets::<Test> {
+                expected_data_size_fee: Storage::<Test>::data_object_per_mega_byte_fee(),
+                object_creation_list: (0..(STORAGE_BUCKET_OBJECTS_NUMBER_LIMIT + 1))
+                    .map(|_| DataObjectCreationParameters {
+                        size: 1,
+                        ipfs_content_id: vec![1u8],
+                    })
+                    .collect(),
+            })
+            .call_and_assert(Err(
+                storage::Error::<Test>::StorageBucketObjectNumberLimitReached.into(),
+            ));
     })
 }
 
+// channel deletion tests
 #[test]
-fn invalid_member_cannot_create_channel() {
+fn successful_curator_channel_deletion_by_lead() {
     with_default_mock_builder(|| {
-        // Run to block one to see emitted events
         run_to_block(1);
 
-        create_initial_storage_buckets();
-        // Not a member
-        create_channel_mock(
-            FIRST_MEMBER_ORIGIN,
-            ContentActor::Member(UNKNOWN_MEMBER_ID),
-            ChannelCreationParametersRecord {
-                assets: None,
-                meta: None,
-                reward_account: None,
-                collaborators: BTreeSet::new(),
-            },
-            Err(Error::<Test>::MemberAuthFailed.into()),
-        );
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_CURATOR_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_curator_owned_channel();
+
+        DeleteChannelFixture::default()
+            .with_sender(LEAD_ACCOUNT_ID)
+            .with_actor(ContentActor::Lead)
+            .call_and_assert(Ok(()));
     })
 }
 
 #[test]
-fn invalid_member_cannot_update_channel() {
+fn unsuccessful_curator_channel_deletion_by_invalid_lead_origin() {
     with_default_mock_builder(|| {
-        // Run to block one to see emitted events
         run_to_block(1);
 
-        create_initial_storage_buckets();
-        create_channel_mock(
-            FIRST_MEMBER_ORIGIN,
-            ContentActor::Member(FIRST_MEMBER_ID),
-            ChannelCreationParametersRecord {
-                assets: None,
-                meta: None,
-                reward_account: None,
-                collaborators: BTreeSet::new(),
-            },
-            Ok(()),
-        );
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_CURATOR_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_curator_owned_channel();
 
-        update_channel_mock(
-            FIRST_MEMBER_ORIGIN,
-            ContentActor::Member(UNKNOWN_MEMBER_ID),
-            <Test as storage::Trait>::ChannelId::one(),
-            ChannelUpdateParametersRecord {
-                assets_to_upload: None,
-                new_meta: None,
-                reward_account: None,
-                collaborators: None,
-                assets_to_remove: BTreeSet::new(),
-            },
-            Err(Error::<Test>::MemberAuthFailed.into()),
-        );
+        DeleteChannelFixture::default()
+            .with_sender(LEAD_ACCOUNT_ID + 100)
+            .with_actor(ContentActor::Lead)
+            .call_and_assert(Err(Error::<Test>::LeadAuthFailed.into()));
     })
 }
 
 #[test]
-fn invalid_member_cannot_delete_channel() {
+fn unsuccessful_member_channel_deletion_by_lead() {
     with_default_mock_builder(|| {
-        // Run to block one to see emitted events
         run_to_block(1);
 
-        create_initial_storage_buckets();
-
-        create_channel_mock(
-            FIRST_MEMBER_ORIGIN,
-            ContentActor::Member(FIRST_MEMBER_ID),
-            ChannelCreationParametersRecord {
-                assets: None,
-                meta: None,
-                reward_account: None,
-                collaborators: BTreeSet::new(),
-            },
-            Ok(()),
-        );
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
 
-        delete_channel_mock(
-            FIRST_MEMBER_ORIGIN,
-            ContentActor::Member(UNKNOWN_MEMBER_ID),
-            <Test as storage::Trait>::ChannelId::one(),
-            0u64,
-            Err(Error::<Test>::MemberAuthFailed.into()),
-        );
+        DeleteChannelFixture::default()
+            .with_sender(LEAD_ACCOUNT_ID)
+            .with_actor(ContentActor::Lead)
+            .call_and_assert(Err(Error::<Test>::ActorNotAuthorized.into()));
     })
 }
 
 #[test]
-fn non_authorized_collaborators_cannot_update_channel() {
+fn unsuccessful_channel_deletion_by_collaborator() {
     with_default_mock_builder(|| {
-        // Run to block one to see emitted events
         run_to_block(1);
 
-        helper_init_accounts(vec![FIRST_MEMBER_ORIGIN, COLLABORATOR_MEMBER_ORIGIN]);
-
-        create_initial_storage_buckets();
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
 
-        // create channel
-        create_channel_mock(
-            FIRST_MEMBER_ORIGIN,
-            ContentActor::Member(FIRST_MEMBER_ID),
-            ChannelCreationParametersRecord {
-                assets: Some(helper_generate_storage_assets(vec![2, 3])),
-                meta: None,
-                reward_account: None,
-                collaborators: BTreeSet::new(),
-            },
-            Ok(()),
-        );
+        DeleteChannelFixture::default()
+            .with_sender(COLLABORATOR_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Collaborator(COLLABORATOR_MEMBER_ID))
+            .call_and_assert(Err(Error::<Test>::ActorNotAuthorized.into()));
+    })
+}
 
-        // attempt for an non auth. collaborator to update channel assets
-        update_channel_mock(
-            COLLABORATOR_MEMBER_ORIGIN,
-            ContentActor::Collaborator(COLLABORATOR_MEMBER_ID),
-            <Test as storage::Trait>::ChannelId::one(),
-            ChannelUpdateParametersRecord {
-                assets_to_upload: Some(helper_generate_storage_assets(vec![5])),
-                new_meta: None,
-                reward_account: None,
-                assets_to_remove: vec![DataObjectId::<Test>::one()]
-                    .into_iter()
-                    .collect::<BTreeSet<_>>(),
-                collaborators: None,
-            },
-            Err(Error::<Test>::ActorNotAuthorized.into()),
-        );
+#[test]
+fn successful_channel_deletion_by_member() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
 
-        // add collaborators
-        update_channel_mock(
-            FIRST_MEMBER_ORIGIN,
-            ContentActor::Member(FIRST_MEMBER_ID),
-            <Test as storage::Trait>::ChannelId::one(),
-            ChannelUpdateParametersRecord {
-                assets_to_upload: None,
-                new_meta: None,
-                reward_account: None,
-                assets_to_remove: BTreeSet::new(),
-                collaborators: Some(
-                    vec![COLLABORATOR_MEMBER_ID]
-                        .into_iter()
-                        .collect::<BTreeSet<_>>(),
-                ),
-            },
-            Ok(()),
-        );
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
 
-        // attempt for a valid collaborator to update channel fields outside
-        // of his scope
-        update_channel_mock(
-            COLLABORATOR_MEMBER_ORIGIN,
-            ContentActor::Collaborator(COLLABORATOR_MEMBER_ID),
-            <Test as storage::Trait>::ChannelId::one(),
-            ChannelUpdateParametersRecord {
-                assets_to_upload: None,
-                new_meta: None,
-                reward_account: Some(Some(COLLABORATOR_MEMBER_ORIGIN)),
-                assets_to_remove: BTreeSet::new(),
-                collaborators: None,
-            },
-            Err(Error::<Test>::ActorNotAuthorized.into()),
-        );
+        DeleteChannelFixture::default()
+            .with_sender(DEFAULT_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Member(DEFAULT_MEMBER_ID))
+            .call_and_assert(Ok(()));
     })
 }
 
 #[test]
-fn authorized_collaborators_can_update_channel() {
+fn successful_channel_deletion_by_curator() {
     with_default_mock_builder(|| {
-        // Run to block one to see emitted events
         run_to_block(1);
 
-        helper_init_accounts(vec![FIRST_MEMBER_ORIGIN, COLLABORATOR_MEMBER_ORIGIN]);
-
-        create_initial_storage_buckets();
-        // create channel
-        create_channel_mock(
-            FIRST_MEMBER_ORIGIN,
-            ContentActor::Member(FIRST_MEMBER_ID),
-            ChannelCreationParametersRecord {
-                assets: Some(helper_generate_storage_assets(vec![2, 3])),
-                meta: None,
-                reward_account: None,
-                collaborators: vec![COLLABORATOR_MEMBER_ID]
-                    .into_iter()
-                    .collect::<BTreeSet<_>>(),
-            },
-            Ok(()),
-        );
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_CURATOR_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_curator_owned_channel();
 
-        // attempt for an auth. collaborator to update channel assets
-        update_channel_mock(
-            COLLABORATOR_MEMBER_ORIGIN,
-            ContentActor::Collaborator(COLLABORATOR_MEMBER_ID),
-            <Test as storage::Trait>::ChannelId::one(),
-            ChannelUpdateParametersRecord {
-                assets_to_upload: Some(helper_generate_storage_assets(vec![5])),
-                new_meta: None,
-                reward_account: None,
-                assets_to_remove: vec![DataObjectId::<Test>::one()]
-                    .into_iter()
-                    .collect::<BTreeSet<_>>(),
-                collaborators: None,
-            },
-            Ok(()),
-        );
+        let default_curator_group_id = Content::next_curator_group_id() - 1;
+        DeleteChannelFixture::default()
+            .with_sender(DEFAULT_CURATOR_ACCOUNT_ID)
+            .with_actor(ContentActor::Curator(
+                default_curator_group_id,
+                DEFAULT_CURATOR_ID,
+            ))
+            .call_and_assert(Ok(()));
     })
 }
 
 #[test]
-fn channel_censoring() {
+fn unsuccessful_channel_deletion_by_unauthorized_member() {
     with_default_mock_builder(|| {
-        // Run to block one to see emitted events
         run_to_block(1);
 
-        let channel_id = Content::next_channel_id();
-        assert_ok!(Content::create_channel(
-            Origin::signed(FIRST_MEMBER_ORIGIN),
-            ContentActor::Member(FIRST_MEMBER_ID),
-            ChannelCreationParametersRecord {
-                assets: None,
-                meta: None,
-                reward_account: None,
-                collaborators: BTreeSet::new(),
-            }
-        ));
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
 
-        let group_id = curators::add_curator_to_new_group(FIRST_CURATOR_ID);
+        DeleteChannelFixture::default()
+            .with_sender(UNAUTHORIZED_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Member(UNAUTHORIZED_MEMBER_ID))
+            .call_and_assert(Err(Error::<Test>::ActorNotAuthorized.into()));
+    })
+}
 
-        // Curator can censor channels
-        let is_censored = true;
-        assert_ok!(Content::update_channel_censorship_status(
-            Origin::signed(FIRST_CURATOR_ORIGIN),
-            ContentActor::Curator(group_id, FIRST_CURATOR_ID),
-            channel_id,
-            is_censored,
-            vec![]
-        ));
+#[test]
+fn unsuccessful_channel_deletion_by_unauthorized_curator() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
 
-        assert_eq!(
-            System::events().last().unwrap().event,
-            MetaEvent::content(RawEvent::ChannelCensorshipStatusUpdated(
-                ContentActor::Curator(group_id, FIRST_CURATOR_ID),
-                channel_id,
-                is_censored,
-                vec![]
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_CURATOR_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_curator_owned_channel();
+
+        let unauthorized_curator_group_id =
+            curators::add_curator_to_new_group(UNAUTHORIZED_CURATOR_ID);
+        DeleteChannelFixture::default()
+            .with_sender(UNAUTHORIZED_CURATOR_ACCOUNT_ID)
+            .with_actor(ContentActor::Curator(
+                unauthorized_curator_group_id,
+                UNAUTHORIZED_CURATOR_ID,
             ))
-        );
-
-        let channel = Content::channel_by_id(channel_id);
-
-        assert!(channel.is_censored);
+            .call_and_assert(Err(Error::<Test>::ActorNotAuthorized.into()));
+    })
+}
 
-        // Curator can un-censor channels
-        let is_censored = false;
-        assert_ok!(Content::update_channel_censorship_status(
-            Origin::signed(FIRST_CURATOR_ORIGIN),
-            ContentActor::Curator(group_id, FIRST_CURATOR_ID),
-            channel_id,
-            is_censored,
-            vec![]
-        ));
+#[test]
+fn unsuccessful_channel_deletion_by_uncorresponding_member_id_and_origin() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
 
-        assert_eq!(
-            System::events().last().unwrap().event,
-            MetaEvent::content(RawEvent::ChannelCensorshipStatusUpdated(
-                ContentActor::Curator(group_id, FIRST_CURATOR_ID),
-                channel_id,
-                is_censored,
-                vec![]
-            ))
-        );
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
 
-        let channel = Content::channel_by_id(channel_id);
+        DeleteChannelFixture::default()
+            .with_sender(UNAUTHORIZED_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Member(DEFAULT_MEMBER_ID))
+            .call_and_assert(Err(Error::<Test>::MemberAuthFailed.into()));
+    })
+}
 
-        assert!(!channel.is_censored);
+#[test]
+fn unsuccessful_channel_deletion_by_uncorresponding_curator_id_and_origin() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
 
-        // Member cannot censor channels
-        let is_censored = true;
-        assert_err!(
-            Content::update_channel_censorship_status(
-                Origin::signed(FIRST_MEMBER_ORIGIN),
-                ContentActor::Member(FIRST_MEMBER_ID),
-                channel_id,
-                is_censored,
-                vec![]
-            ),
-            Error::<Test>::ActorNotAuthorized
-        );
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_CURATOR_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_curator_owned_channel();
 
-        let curator_channel_id = Content::next_channel_id();
+        let default_curator_group_id = Content::next_curator_group_id() - 1;
+        DeleteChannelFixture::default()
+            .with_sender(UNAUTHORIZED_CURATOR_ACCOUNT_ID)
+            .with_actor(ContentActor::Curator(
+                default_curator_group_id,
+                DEFAULT_CURATOR_ID,
+            ))
+            .call_and_assert(Err(Error::<Test>::CuratorAuthFailed.into()));
+    })
+}
 
-        // create curator channel
-        assert_ok!(Content::create_channel(
-            Origin::signed(FIRST_CURATOR_ORIGIN),
-            ContentActor::Curator(group_id, FIRST_CURATOR_ID),
-            ChannelCreationParametersRecord {
-                assets: None,
-                meta: None,
-                reward_account: None,
-                collaborators: BTreeSet::new(),
-            }
-        ));
+#[test]
+fn unsuccessful_channel_deletion_with_invalid_channel_id() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
 
-        // Curator cannot censor curator group channels
-        assert_err!(
-            Content::update_channel_censorship_status(
-                Origin::signed(FIRST_CURATOR_ORIGIN),
-                ContentActor::Curator(group_id, FIRST_CURATOR_ID),
-                curator_channel_id,
-                is_censored,
-                vec![]
-            ),
-            Error::<Test>::CannotCensoreCuratorGroupOwnedChannels
-        );
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
 
-        // Lead can still censor curator group channels
-        assert_ok!(Content::update_channel_censorship_status(
-            Origin::signed(LEAD_ORIGIN),
-            ContentActor::Lead,
-            curator_channel_id,
-            is_censored,
-            vec![]
-        ));
+        DeleteChannelFixture::default()
+            .with_sender(DEFAULT_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Member(DEFAULT_MEMBER_ID))
+            .with_channel_id(Zero::zero())
+            .call_and_assert(Err(Error::<Test>::ChannelDoesNotExist.into()));
     })
 }
 
 #[test]
-fn channel_creation_doesnt_leave_bags_dangling() {
+fn unsuccessful_channel_deletion_with_invalid_bag_size() {
     with_default_mock_builder(|| {
-        // in order to emit events
         run_to_block(1);
 
-        create_initial_storage_buckets();
-        // number of assets big enought to make upload_data_objects throw
-        let asset_num = 100_000usize;
-        let mut object_creation_list =
-            Vec::<DataObjectCreationParameters>::with_capacity(asset_num);
-        for _i in 0..asset_num {
-            object_creation_list.push(DataObjectCreationParameters {
-                size: 1_000_000, // size big enought to make upload_data_objects throw
-                ipfs_content_id: b"test".to_vec(),
-            });
-        }
-
-        let assets = StorageAssetsRecord {
-            object_creation_list: object_creation_list,
-            expected_data_size_fee: storage::DataObjectPerMegabyteFee::<Test>::get(),
-        };
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
 
-        let channel_id = NextChannelId::<Test>::get();
-        // create channel
-        create_channel_mock(
-            FIRST_MEMBER_ORIGIN,
-            ContentActor::Member(FIRST_MEMBER_ID),
-            ChannelCreationParametersRecord {
-                assets: Some(assets),
-                meta: Some(vec![]),
-                reward_account: None,
-                collaborators: BTreeSet::new(),
-            },
-            Err(storage::Error::<Test>::MaxDataObjectSizeExceeded.into()),
-        );
+        assert!(DATA_OBJECTS_NUMBER > 0);
 
-        // ensure that no bag are left dangling
-        let dyn_bag = DynamicBagIdType::<MemberId, ChannelId>::Channel(channel_id);
-        let bag_id = storage::BagIdType::from(dyn_bag.clone());
-        assert!(<Test as Trait>::DataObjectStorage::ensure_bag_exists(&bag_id).is_err());
+        DeleteChannelFixture::default()
+            .with_sender(DEFAULT_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Member(DEFAULT_MEMBER_ID))
+            // default member owned channel has DATA_OBJECTS_NUMBER > 0 assets
+            .with_num_objects_to_delete(0u64)
+            .call_and_assert(Err(Error::<Test>::InvalidBagSizeSpecified.into()));
     })
 }

+ 27 - 20
runtime-modules/content/src/tests/curators.rs

@@ -7,15 +7,17 @@ use frame_support::{assert_err, assert_ok};
 pub fn add_curator_to_new_group(curator_id: CuratorId) -> CuratorGroupId {
     let curator_group_id = Content::next_curator_group_id();
     // create new group and add curator id to it
-    assert_ok!(Content::create_curator_group(Origin::signed(LEAD_ORIGIN)));
+    assert_ok!(Content::create_curator_group(Origin::signed(
+        LEAD_ACCOUNT_ID
+    )));
     assert_ok!(Content::add_curator_to_group(
-        Origin::signed(LEAD_ORIGIN),
+        Origin::signed(LEAD_ACCOUNT_ID),
         curator_group_id,
         curator_id
     ));
     // make group active
     assert_ok!(Content::set_curator_group_status(
-        Origin::signed(LEAD_ORIGIN),
+        Origin::signed(LEAD_ACCOUNT_ID),
         curator_group_id,
         true
     ));
@@ -29,7 +31,9 @@ fn curator_group_management() {
         run_to_block(1);
 
         let curator_group_id = Content::next_curator_group_id();
-        assert_ok!(Content::create_curator_group(Origin::signed(LEAD_ORIGIN)));
+        assert_ok!(Content::create_curator_group(Origin::signed(
+            LEAD_ACCOUNT_ID
+        )));
 
         assert_eq!(
             System::events().last().unwrap().event,
@@ -44,7 +48,7 @@ fn curator_group_management() {
 
         // Activate group
         assert_ok!(Content::set_curator_group_status(
-            Origin::signed(LEAD_ORIGIN),
+            Origin::signed(LEAD_ACCOUNT_ID),
             curator_group_id,
             true
         ));
@@ -60,35 +64,35 @@ fn curator_group_management() {
         // Cannot add non curators into group
         assert_err!(
             Content::add_curator_to_group(
-                Origin::signed(LEAD_ORIGIN),
+                Origin::signed(LEAD_ACCOUNT_ID),
                 curator_group_id,
-                MEMBERS_COUNT + 1 // not a curator
+                DEFAULT_CURATOR_ID + 1 // not a curator
             ),
             Error::<Test>::CuratorIdInvalid
         );
 
         // Add curator to group
         assert_ok!(Content::add_curator_to_group(
-            Origin::signed(LEAD_ORIGIN),
+            Origin::signed(LEAD_ACCOUNT_ID),
             curator_group_id,
-            FIRST_CURATOR_ID
+            DEFAULT_CURATOR_ID
         ));
 
         assert_eq!(
             System::events().last().unwrap().event,
-            MetaEvent::content(RawEvent::CuratorAdded(curator_group_id, FIRST_CURATOR_ID))
+            MetaEvent::content(RawEvent::CuratorAdded(curator_group_id, DEFAULT_CURATOR_ID))
         );
 
         // Ensure curator is in group
         let group = Content::curator_group_by_id(curator_group_id);
-        assert!(group.has_curator(&FIRST_CURATOR_ID));
+        assert!(group.has_curator(&DEFAULT_CURATOR_ID));
 
         // Cannot add same curator again
         assert_err!(
             Content::add_curator_to_group(
-                Origin::signed(LEAD_ORIGIN),
+                Origin::signed(LEAD_ACCOUNT_ID),
                 curator_group_id,
-                FIRST_CURATOR_ID
+                DEFAULT_CURATOR_ID
             ),
             Error::<Test>::CuratorIsAlreadyAMemberOfGivenCuratorGroup
         );
@@ -96,7 +100,7 @@ fn curator_group_management() {
         // Cannot remove curator if not in group
         assert_err!(
             Content::remove_curator_from_group(
-                Origin::signed(LEAD_ORIGIN),
+                Origin::signed(LEAD_ACCOUNT_ID),
                 curator_group_id,
                 MEMBERS_COUNT + 1 // not a curator
             ),
@@ -105,25 +109,28 @@ fn curator_group_management() {
 
         // Remove curator from group
         assert_ok!(Content::remove_curator_from_group(
-            Origin::signed(LEAD_ORIGIN),
+            Origin::signed(LEAD_ACCOUNT_ID),
             curator_group_id,
-            FIRST_CURATOR_ID
+            DEFAULT_CURATOR_ID
         ));
 
         assert_eq!(
             System::events().last().unwrap().event,
-            MetaEvent::content(RawEvent::CuratorRemoved(curator_group_id, FIRST_CURATOR_ID))
+            MetaEvent::content(RawEvent::CuratorRemoved(
+                curator_group_id,
+                DEFAULT_CURATOR_ID
+            ))
         );
 
         let group = Content::curator_group_by_id(curator_group_id);
-        assert!(!group.has_curator(&FIRST_CURATOR_ID));
+        assert!(!group.has_curator(&DEFAULT_CURATOR_ID));
 
         // Already removed cannot remove again
         assert_err!(
             Content::remove_curator_from_group(
-                Origin::signed(LEAD_ORIGIN),
+                Origin::signed(LEAD_ACCOUNT_ID),
                 curator_group_id,
-                FIRST_CURATOR_ID
+                DEFAULT_CURATOR_ID
             ),
             Error::<Test>::CuratorIsNotAMemberOfGivenCuratorGroup
         );

+ 920 - 0
runtime-modules/content/src/tests/fixtures.rs

@@ -0,0 +1,920 @@
+use super::curators;
+use super::mock::*;
+use crate::*;
+use frame_support::assert_ok;
+use frame_support::traits::Currency;
+
+// type aliases
+type AccountId = <Test as frame_system::Trait>::AccountId;
+type VideoId = <Test as Trait>::VideoId;
+
+// fixtures
+pub struct CreateChannelFixture {
+    sender: AccountId,
+    actor: ContentActor<CuratorGroupId, CuratorId, MemberId>,
+    params: ChannelCreationParameters<Test>,
+}
+
+impl CreateChannelFixture {
+    pub fn default() -> Self {
+        Self {
+            sender: DEFAULT_MEMBER_ACCOUNT_ID,
+            actor: ContentActor::Member(DEFAULT_MEMBER_ID),
+            params: ChannelCreationParameters::<Test> {
+                assets: None,
+                meta: None,
+                reward_account: None,
+                collaborators: BTreeSet::new(),
+            },
+        }
+    }
+
+    pub fn with_sender(self, sender: AccountId) -> Self {
+        Self { sender, ..self }
+    }
+
+    pub fn with_actor(self, actor: ContentActor<CuratorGroupId, CuratorId, MemberId>) -> Self {
+        Self { actor, ..self }
+    }
+
+    pub fn with_assets(self, assets: StorageAssets<Test>) -> Self {
+        Self {
+            params: ChannelCreationParameters::<Test> {
+                assets: Some(assets),
+                ..self.params
+            },
+            ..self
+        }
+    }
+
+    pub fn with_collaborators(self, collaborators: BTreeSet<MemberId>) -> Self {
+        Self {
+            params: ChannelCreationParameters::<Test> {
+                collaborators: collaborators,
+                ..self.params
+            },
+            ..self
+        }
+    }
+
+    pub fn with_reward_account(self, reward_account: AccountId) -> Self {
+        Self {
+            params: ChannelCreationParameters::<Test> {
+                reward_account: Some(reward_account),
+                ..self.params
+            },
+            ..self
+        }
+    }
+
+    pub fn call_and_assert(&self, expected_result: DispatchResult) {
+        let origin = Origin::signed(self.sender.clone());
+        let balance_pre = Balances::usable_balance(self.sender);
+        let channel_id = Content::next_channel_id();
+        let channel_bag_id = Content::bag_id_for_channel(&channel_id);
+        let beg_obj_id = storage::NextDataObjectId::<Test>::get();
+        let actual_result =
+            Content::create_channel(origin, self.actor.clone(), self.params.clone());
+        let end_obj_id = storage::NextDataObjectId::<Test>::get();
+
+        assert_eq!(actual_result, expected_result);
+
+        let balance_post = Balances::usable_balance(self.sender);
+
+        if actual_result.is_ok() {
+            // ensure channel is on chain
+            assert!(ChannelById::<Test>::contains_key(&channel_id));
+
+            // channel counter increased
+            assert_eq!(
+                Content::next_channel_id(),
+                channel_id.saturating_add(One::one())
+            );
+
+            // dynamic bag for channel is created
+            assert_ok!(Storage::<Test>::ensure_bag_exists(&channel_bag_id));
+
+            // event correctly deposited
+            let owner = Content::actor_to_channel_owner(&self.actor).unwrap();
+            assert_eq!(
+                System::events().last().unwrap().event,
+                MetaEvent::content(RawEvent::ChannelCreated(
+                    self.actor.clone(),
+                    channel_id,
+                    ChannelRecord {
+                        owner: owner,
+                        is_censored: false,
+                        reward_account: self.params.reward_account.clone(),
+                        collaborators: self.params.collaborators.clone(),
+                        num_videos: Zero::zero(),
+                    },
+                    self.params.clone(),
+                ))
+            );
+
+            if let Some(assets) = self.params.assets.as_ref() {
+                // balance accounting is correct
+                let bag_deletion_prize = BalanceOf::<Test>::zero();
+                let objects_deletion_prize =
+                    assets
+                        .object_creation_list
+                        .iter()
+                        .fold(BalanceOf::<Test>::zero(), |acc, _| {
+                            acc.saturating_add(
+                                <Test as storage::Trait>::DataObjectDeletionPrize::get(),
+                            )
+                        });
+
+                assert_eq!(
+                    balance_pre.saturating_sub(balance_post),
+                    bag_deletion_prize.saturating_add(objects_deletion_prize),
+                );
+
+                assert!((beg_obj_id..end_obj_id).all(|id| {
+                    storage::DataObjectsById::<Test>::contains_key(&channel_bag_id, id)
+                }));
+            }
+        } else {
+            assert_eq!(balance_post, balance_pre);
+            assert_eq!(beg_obj_id, end_obj_id);
+            assert!(!storage::Bags::<Test>::contains_key(&channel_bag_id));
+            assert!(!ChannelById::<Test>::contains_key(&channel_id));
+            assert_eq!(NextChannelId::<Test>::get(), channel_id);
+        }
+    }
+}
+
+pub struct CreateVideoFixture {
+    sender: AccountId,
+    actor: ContentActor<CuratorGroupId, CuratorId, MemberId>,
+    params: VideoCreationParameters<Test>,
+    channel_id: ChannelId,
+}
+
+impl CreateVideoFixture {
+    pub fn default() -> Self {
+        Self {
+            sender: DEFAULT_MEMBER_ACCOUNT_ID,
+            actor: ContentActor::Member(DEFAULT_MEMBER_ACCOUNT_ID),
+            params: VideoCreationParameters::<Test> {
+                assets: None,
+                meta: None,
+            },
+            channel_id: ChannelId::one(), // channel index starts at 1
+        }
+    }
+
+    pub fn with_sender(self, sender: AccountId) -> Self {
+        Self { sender, ..self }
+    }
+
+    pub fn with_channel_id(self, channel_id: ChannelId) -> Self {
+        Self { channel_id, ..self }
+    }
+
+    pub fn with_actor(self, actor: ContentActor<CuratorGroupId, CuratorId, MemberId>) -> Self {
+        Self { actor, ..self }
+    }
+
+    pub fn with_assets(self, assets: StorageAssets<Test>) -> Self {
+        Self {
+            params: VideoCreationParameters::<Test> {
+                assets: Some(assets),
+                ..self.params
+            },
+            ..self
+        }
+    }
+
+    pub fn call_and_assert(&self, expected_result: DispatchResult) {
+        let origin = Origin::signed(self.sender.clone());
+        let balance_pre = Balances::usable_balance(self.sender);
+        let channel_bag_id = Content::bag_id_for_channel(&self.channel_id);
+        let video_id = Content::next_video_id();
+        let beg_obj_id = storage::NextDataObjectId::<Test>::get();
+
+        let actual_result = Content::create_video(
+            origin,
+            self.actor.clone(),
+            self.channel_id,
+            self.params.clone(),
+        );
+
+        let balance_post = Balances::usable_balance(self.sender);
+        let end_obj_id = storage::NextDataObjectId::<Test>::get();
+
+        assert_eq!(actual_result, expected_result);
+
+        if actual_result.is_ok() {
+            assert!(VideoById::<Test>::contains_key(&video_id));
+
+            assert_eq!(
+                Content::next_video_id(),
+                video_id.saturating_add(One::one())
+            );
+
+            assert_eq!(
+                System::events().last().unwrap().event,
+                MetaEvent::content(RawEvent::VideoCreated(
+                    self.actor,
+                    self.channel_id,
+                    video_id,
+                    self.params.clone(),
+                ))
+            );
+
+            if let Some(assets) = self.params.assets.as_ref() {
+                // balance accounting is correct
+                let bag_deletion_prize = BalanceOf::<Test>::zero();
+                let objects_deletion_prize =
+                    assets
+                        .object_creation_list
+                        .iter()
+                        .fold(BalanceOf::<Test>::zero(), |acc, _| {
+                            acc.saturating_add(
+                                <Test as storage::Trait>::DataObjectDeletionPrize::get(),
+                            )
+                        });
+
+                assert_eq!(
+                    balance_pre.saturating_sub(balance_post),
+                    bag_deletion_prize.saturating_add(objects_deletion_prize),
+                );
+
+                assert!((beg_obj_id..end_obj_id).all(|id| {
+                    storage::DataObjectsById::<Test>::contains_key(&channel_bag_id, id)
+                }));
+            }
+        } else {
+            assert!(!VideoById::<Test>::contains_key(&video_id));
+
+            assert_eq!(Content::next_video_id(), video_id);
+
+            if self.params.assets.is_some() {
+                assert_eq!(balance_pre, balance_post,);
+
+                assert!(!(beg_obj_id..end_obj_id).any(|id| {
+                    storage::DataObjectsById::<Test>::contains_key(&channel_bag_id, id)
+                }));
+            }
+        }
+    }
+}
+
+pub struct UpdateChannelFixture {
+    sender: AccountId,
+    actor: ContentActor<CuratorGroupId, CuratorId, MemberId>,
+    channel_id: ChannelId,
+    params: ChannelUpdateParameters<Test>,
+}
+
+impl UpdateChannelFixture {
+    pub fn default() -> Self {
+        Self {
+            sender: DEFAULT_MEMBER_ACCOUNT_ID,
+            actor: ContentActor::Member(DEFAULT_MEMBER_ACCOUNT_ID),
+            channel_id: ChannelId::one(), // channel index starts at 1
+            params: ChannelUpdateParameters::<Test> {
+                assets_to_upload: None,
+                new_meta: None,
+                reward_account: None,
+                assets_to_remove: BTreeSet::new(),
+                collaborators: None,
+            },
+        }
+    }
+
+    pub fn with_sender(self, sender: AccountId) -> Self {
+        Self { sender, ..self }
+    }
+
+    pub fn with_actor(self, actor: ContentActor<CuratorGroupId, CuratorId, MemberId>) -> Self {
+        Self { actor, ..self }
+    }
+
+    pub fn with_channel_id(self, channel_id: ChannelId) -> Self {
+        Self { channel_id, ..self }
+    }
+
+    pub fn with_assets_to_upload(self, assets: StorageAssets<Test>) -> Self {
+        Self {
+            params: ChannelUpdateParameters::<Test> {
+                assets_to_upload: Some(assets),
+                ..self.params
+            },
+            ..self
+        }
+    }
+
+    pub fn with_assets_to_remove(self, assets: BTreeSet<DataObjectId<Test>>) -> Self {
+        Self {
+            params: ChannelUpdateParameters::<Test> {
+                assets_to_remove: assets,
+                ..self.params
+            },
+            ..self
+        }
+    }
+
+    pub fn with_collaborators(self, collaborators: BTreeSet<MemberId>) -> Self {
+        Self {
+            params: ChannelUpdateParameters::<Test> {
+                collaborators: Some(collaborators),
+                ..self.params
+            },
+            ..self
+        }
+    }
+
+    pub fn with_reward_account(self, reward_account: Option<Option<AccountId>>) -> Self {
+        Self {
+            params: ChannelUpdateParameters::<Test> {
+                reward_account,
+                ..self.params
+            },
+            ..self
+        }
+    }
+
+    pub fn call_and_assert(&self, expected_result: DispatchResult) {
+        let origin = Origin::signed(self.sender.clone());
+        let balance_pre = Balances::usable_balance(self.sender);
+        let channel_pre = Content::channel_by_id(&self.channel_id);
+        let bag_id_for_channel = Content::bag_id_for_channel(&self.channel_id);
+
+        let deletion_prize_deposited =
+            self.params
+                .assets_to_upload
+                .as_ref()
+                .map_or(BalanceOf::<Test>::zero(), |assets| {
+                    assets
+                        .object_creation_list
+                        .iter()
+                        .fold(BalanceOf::<Test>::zero(), |acc, _| {
+                            acc.saturating_add(
+                                <Test as storage::Trait>::DataObjectDeletionPrize::get(),
+                            )
+                        })
+                });
+
+        let deletion_prize_withdrawn = if !self.params.assets_to_remove.is_empty() {
+            self.params
+                .assets_to_remove
+                .iter()
+                .fold(BalanceOf::<Test>::zero(), |acc, id| {
+                    acc + storage::DataObjectsById::<Test>::get(&bag_id_for_channel, id)
+                        .deletion_prize
+                })
+        } else {
+            BalanceOf::<Test>::zero()
+        };
+
+        let beg_obj_id = storage::NextDataObjectId::<Test>::get();
+
+        let actual_result = Content::update_channel(
+            origin,
+            self.actor.clone(),
+            self.channel_id,
+            self.params.clone(),
+        );
+
+        let channel_post = Content::channel_by_id(&self.channel_id);
+        let end_obj_id = storage::NextDataObjectId::<Test>::get();
+        let balance_post = Balances::usable_balance(self.sender);
+
+        assert_eq!(actual_result, expected_result);
+
+        match actual_result {
+            Ok(()) => {
+                let owner = channel_post.owner.clone();
+                assert_eq!(
+                    System::events().last().unwrap().event,
+                    MetaEvent::content(RawEvent::ChannelUpdated(
+                        self.actor.clone(),
+                        self.channel_id,
+                        ChannelRecord {
+                            owner: owner,
+                            is_censored: channel_pre.is_censored,
+                            reward_account: self
+                                .params
+                                .reward_account
+                                .clone()
+                                .unwrap_or(channel_pre.reward_account),
+                            collaborators: self
+                                .params
+                                .collaborators
+                                .clone()
+                                .unwrap_or(channel_pre.collaborators),
+                            num_videos: channel_pre.num_videos,
+                        },
+                        self.params.clone(),
+                    ))
+                );
+
+                assert_eq!(
+                    balance_post.saturating_sub(balance_pre),
+                    deletion_prize_withdrawn.saturating_sub(deletion_prize_deposited),
+                );
+
+                if self.params.assets_to_upload.is_some() {
+                    assert!((beg_obj_id..end_obj_id).all(|id| {
+                        storage::DataObjectsById::<Test>::contains_key(&bag_id_for_channel, id)
+                    }));
+                }
+
+                assert!(!self.params.assets_to_remove.iter().any(|id| {
+                    storage::DataObjectsById::<Test>::contains_key(&bag_id_for_channel, id)
+                }));
+            }
+            Err(err) => {
+                assert_eq!(channel_pre, channel_post);
+                assert_eq!(balance_pre, balance_post);
+                assert_eq!(beg_obj_id, end_obj_id);
+
+                if err != storage::Error::<Test>::DataObjectDoesntExist.into() {
+                    assert!(self.params.assets_to_remove.iter().all(|id| {
+                        storage::DataObjectsById::<Test>::contains_key(&bag_id_for_channel, id)
+                    }))
+                }
+            }
+        }
+    }
+}
+
+pub struct UpdateVideoFixture {
+    sender: AccountId,
+    actor: ContentActor<CuratorGroupId, CuratorId, MemberId>,
+    video_id: VideoId,
+    params: VideoUpdateParameters<Test>,
+}
+
+impl UpdateVideoFixture {
+    pub fn default() -> Self {
+        Self {
+            sender: DEFAULT_MEMBER_ACCOUNT_ID,
+            actor: ContentActor::Member(DEFAULT_MEMBER_ID),
+            video_id: VideoId::one(),
+            params: VideoUpdateParameters::<Test> {
+                assets_to_upload: None,
+                assets_to_remove: BTreeSet::new(),
+                new_meta: None,
+            },
+        }
+    }
+
+    pub fn with_sender(self, sender: AccountId) -> Self {
+        Self { sender, ..self }
+    }
+
+    pub fn with_actor(self, actor: ContentActor<CuratorGroupId, CuratorId, MemberId>) -> Self {
+        Self { actor, ..self }
+    }
+
+    pub fn with_video_id(self, video_id: VideoId) -> Self {
+        Self { video_id, ..self }
+    }
+
+    pub fn with_assets_to_upload(self, assets: StorageAssets<Test>) -> Self {
+        Self {
+            params: VideoUpdateParameters::<Test> {
+                assets_to_upload: Some(assets),
+                ..self.params
+            },
+            ..self
+        }
+    }
+    pub fn with_assets_to_remove(self, assets: BTreeSet<DataObjectId<Test>>) -> Self {
+        Self {
+            params: VideoUpdateParameters::<Test> {
+                assets_to_remove: assets,
+                ..self.params
+            },
+            ..self
+        }
+    }
+    pub fn call_and_assert(&self, expected_result: DispatchResult) {
+        let origin = Origin::signed(self.sender.clone());
+        let balance_pre = Balances::usable_balance(self.sender);
+        let video_pre = Content::video_by_id(&self.video_id);
+        let bag_id_for_channel = Content::bag_id_for_channel(&video_pre.in_channel);
+        let beg_obj_id = storage::NextDataObjectId::<Test>::get();
+
+        let deletion_prize_deposited =
+            self.params
+                .assets_to_upload
+                .as_ref()
+                .map_or(BalanceOf::<Test>::zero(), |assets| {
+                    assets
+                        .object_creation_list
+                        .iter()
+                        .fold(BalanceOf::<Test>::zero(), |acc, _| {
+                            acc.saturating_add(
+                                <Test as storage::Trait>::DataObjectDeletionPrize::get(),
+                            )
+                        })
+                });
+
+        let deletion_prize_withdrawn = if !self.params.assets_to_remove.is_empty() {
+            self.params
+                .assets_to_remove
+                .iter()
+                .fold(BalanceOf::<Test>::zero(), |acc, obj_id| {
+                    acc + storage::DataObjectsById::<Test>::get(&bag_id_for_channel, obj_id)
+                        .deletion_prize
+                })
+        } else {
+            BalanceOf::<Test>::zero()
+        };
+
+        let actual_result = Content::update_video(
+            origin,
+            self.actor.clone(),
+            self.video_id,
+            self.params.clone(),
+        );
+
+        let end_obj_id = storage::NextDataObjectId::<Test>::get();
+        let balance_post = Balances::usable_balance(self.sender);
+        let video_post = Content::video_by_id(&self.video_id);
+
+        assert_eq!(actual_result, expected_result);
+
+        match actual_result {
+            Ok(()) => {
+                assert_eq!(
+                    System::events().last().unwrap().event,
+                    MetaEvent::content(RawEvent::VideoUpdated(
+                        self.actor.clone(),
+                        self.video_id,
+                        self.params.clone()
+                    ))
+                );
+
+                assert_eq!(
+                    balance_post.saturating_sub(balance_pre),
+                    deletion_prize_withdrawn.saturating_sub(deletion_prize_deposited),
+                );
+
+                if self.params.assets_to_upload.is_some() {
+                    assert!((beg_obj_id..end_obj_id).all(|id| {
+                        storage::DataObjectsById::<Test>::contains_key(&bag_id_for_channel, id)
+                    }));
+                }
+
+                assert!(!self.params.assets_to_remove.iter().any(|obj_id| {
+                    storage::DataObjectsById::<Test>::contains_key(&bag_id_for_channel, obj_id)
+                }));
+            }
+            Err(err) => {
+                assert_eq!(video_pre, video_post);
+                assert_eq!(balance_pre, balance_post);
+                assert_eq!(beg_obj_id, end_obj_id);
+
+                if err != storage::Error::<Test>::DataObjectDoesntExist.into() {
+                    assert!(self.params.assets_to_remove.iter().all(|id| {
+                        storage::DataObjectsById::<Test>::contains_key(&bag_id_for_channel, id)
+                    }));
+                }
+            }
+        }
+    }
+}
+
+pub struct DeleteChannelFixture {
+    sender: AccountId,
+    actor: ContentActor<CuratorGroupId, CuratorId, MemberId>,
+    channel_id: ChannelId,
+    num_objects_to_delete: u64,
+}
+
+impl DeleteChannelFixture {
+    pub fn default() -> Self {
+        Self {
+            sender: DEFAULT_MEMBER_ACCOUNT_ID,
+            actor: ContentActor::Member(DEFAULT_MEMBER_ID),
+            channel_id: ChannelId::one(),
+            num_objects_to_delete: DATA_OBJECTS_NUMBER as u64,
+        }
+    }
+    pub fn with_sender(self, sender: AccountId) -> Self {
+        Self { sender, ..self }
+    }
+
+    pub fn with_actor(self, actor: ContentActor<CuratorGroupId, CuratorId, MemberId>) -> Self {
+        Self { actor, ..self }
+    }
+
+    pub fn with_num_objects_to_delete(self, num_objects_to_delete: u64) -> Self {
+        Self {
+            num_objects_to_delete,
+            ..self
+        }
+    }
+
+    pub fn with_channel_id(self, channel_id: ChannelId) -> Self {
+        Self { channel_id, ..self }
+    }
+    pub fn call_and_assert(&self, expected_result: DispatchResult) {
+        let origin = Origin::signed(self.sender.clone());
+        let balance_pre = Balances::usable_balance(self.sender);
+        let bag_id_for_channel = Content::bag_id_for_channel(&self.channel_id);
+        let bag_deletion_prize = storage::Bags::<Test>::get(&bag_id_for_channel)
+            .deletion_prize
+            .unwrap_or(BalanceOf::<Test>::zero());
+        let objects_deletion_prize =
+            storage::DataObjectsById::<Test>::iter_prefix(&bag_id_for_channel)
+                .fold(BalanceOf::<Test>::zero(), |acc, (_, obj)| {
+                    acc + obj.deletion_prize
+                });
+
+        let channel_objects_ids =
+            storage::DataObjectsById::<Test>::iter_prefix(&bag_id_for_channel)
+                .map(|(id, _)| id)
+                .collect::<BTreeSet<_>>();
+
+        let actual_result = Content::delete_channel(
+            origin,
+            self.actor.clone(),
+            self.channel_id,
+            self.num_objects_to_delete,
+        );
+
+        let balance_post = Balances::usable_balance(self.sender);
+        assert_eq!(actual_result, expected_result);
+
+        match actual_result {
+            Ok(()) => {
+                assert_eq!(
+                    System::events().last().unwrap().event,
+                    MetaEvent::content(RawEvent::ChannelDeleted(
+                        self.actor.clone(),
+                        self.channel_id,
+                    ))
+                );
+
+                let deletion_prize = bag_deletion_prize.saturating_add(objects_deletion_prize);
+
+                assert_eq!(balance_post.saturating_sub(balance_pre), deletion_prize,);
+                assert!(!<ChannelById<Test>>::contains_key(&self.channel_id));
+                assert!(!channel_objects_ids.iter().any(|id| {
+                    storage::DataObjectsById::<Test>::contains_key(&bag_id_for_channel, id)
+                }));
+                assert!(!storage::Bags::<Test>::contains_key(&bag_id_for_channel));
+            }
+
+            Err(err) => {
+                assert_eq!(balance_pre, balance_post);
+                if err != Error::<Test>::ChannelDoesNotExist.into() {
+                    assert!(ChannelById::<Test>::contains_key(&self.channel_id));
+                    assert!(channel_objects_ids.iter().all(|id| {
+                        storage::DataObjectsById::<Test>::contains_key(&bag_id_for_channel, id)
+                    }));
+                    assert!(storage::Bags::<Test>::contains_key(&bag_id_for_channel));
+                }
+            }
+        }
+    }
+}
+
+pub struct DeleteVideoFixture {
+    sender: AccountId,
+    actor: ContentActor<CuratorGroupId, CuratorId, MemberId>,
+    video_id: VideoId,
+    assets_to_remove: BTreeSet<DataObjectId<Test>>,
+}
+
+impl DeleteVideoFixture {
+    pub fn default() -> Self {
+        Self {
+            sender: DEFAULT_MEMBER_ACCOUNT_ID,
+            actor: ContentActor::Member(DEFAULT_MEMBER_ID),
+            video_id: VideoId::one(),
+            assets_to_remove: BTreeSet::new(),
+        }
+    }
+
+    pub fn with_sender(self, sender: AccountId) -> Self {
+        Self { sender, ..self }
+    }
+
+    pub fn with_actor(self, actor: ContentActor<CuratorGroupId, CuratorId, MemberId>) -> Self {
+        Self { actor, ..self }
+    }
+
+    pub fn with_assets_to_remove(self, assets_to_remove: BTreeSet<DataObjectId<Test>>) -> Self {
+        Self {
+            assets_to_remove,
+            ..self
+        }
+    }
+
+    pub fn with_video_id(self, video_id: VideoId) -> Self {
+        Self { video_id, ..self }
+    }
+
+    pub fn call_and_assert(&self, expected_result: DispatchResult) {
+        let origin = Origin::signed(self.sender.clone());
+        let balance_pre = Balances::usable_balance(self.sender);
+        let video_pre = <VideoById<Test>>::get(&self.video_id);
+        let channel_bag_id = Content::bag_id_for_channel(&video_pre.in_channel);
+        let deletion_prize =
+            self.assets_to_remove
+                .iter()
+                .fold(BalanceOf::<Test>::zero(), |acc, obj_id| {
+                    acc + storage::DataObjectsById::<Test>::get(&channel_bag_id, obj_id)
+                        .deletion_prize
+                });
+
+        let actual_result = Content::delete_video(
+            origin,
+            self.actor.clone(),
+            self.video_id,
+            self.assets_to_remove.clone(),
+        );
+
+        let balance_post = Balances::usable_balance(self.sender);
+
+        assert_eq!(actual_result, expected_result);
+
+        match actual_result {
+            Ok(()) => {
+                assert_eq!(
+                    System::events().last().unwrap().event,
+                    MetaEvent::content(RawEvent::VideoDeleted(self.actor.clone(), self.video_id,))
+                );
+
+                assert_eq!(balance_post.saturating_sub(balance_pre), deletion_prize);
+
+                assert!(!self.assets_to_remove.iter().any(|obj_id| {
+                    storage::DataObjectsById::<Test>::contains_key(&channel_bag_id, obj_id)
+                }));
+
+                assert!(!<VideoById<Test>>::contains_key(&self.video_id));
+            }
+            Err(err) => {
+                assert_eq!(balance_pre, balance_post);
+
+                if err == storage::Error::<Test>::DataObjectDoesntExist.into() {
+                    let video_post = <VideoById<Test>>::get(&self.video_id);
+                    assert_eq!(video_pre, video_post);
+                    assert!(VideoById::<Test>::contains_key(&self.video_id));
+                } else if err == Error::<Test>::VideoDoesNotExist.into() {
+                    assert!(self.assets_to_remove.iter().all(|id| {
+                        storage::DataObjectsById::<Test>::contains_key(&channel_bag_id, id)
+                    }));
+                } else {
+                    let video_post = <VideoById<Test>>::get(&self.video_id);
+                    assert_eq!(video_pre, video_post);
+                    assert!(VideoById::<Test>::contains_key(&self.video_id));
+                    assert!(self.assets_to_remove.iter().all(|id| {
+                        storage::DataObjectsById::<Test>::contains_key(&channel_bag_id, id)
+                    }));
+                }
+            }
+        }
+    }
+}
+
+// helper functions
+pub fn increase_account_balance_helper(account_id: u64, balance: u64) {
+    let _ = Balances::deposit_creating(&account_id, balance);
+}
+
+pub fn slash_account_balance_helper(account_id: u64) {
+    let _ = Balances::slash(&account_id, Balances::total_balance(&account_id));
+}
+
+pub fn create_data_object_candidates_helper(
+    starting_ipfs_index: u8,
+    number: u64,
+) -> Vec<DataObjectCreationParameters> {
+    let range = (starting_ipfs_index as u64)..((starting_ipfs_index as u64) + number);
+
+    range
+        .into_iter()
+        .map(|_| DataObjectCreationParameters {
+            size: DEFAULT_OBJECT_SIZE,
+            ipfs_content_id: vec![1u8],
+        })
+        .collect()
+}
+
+pub fn create_data_objects_helper() -> Vec<DataObjectCreationParameters> {
+    create_data_object_candidates_helper(1, DATA_OBJECTS_NUMBER)
+}
+
+pub fn create_initial_storage_buckets_helper() {
+    // first set limits
+    assert_eq!(
+        Storage::<Test>::update_storage_buckets_voucher_max_limits(
+            Origin::signed(STORAGE_WG_LEADER_ACCOUNT_ID),
+            VOUCHER_OBJECTS_SIZE_LIMIT,
+            VOUCHER_OBJECTS_NUMBER_LIMIT,
+        ),
+        Ok(())
+    );
+
+    // create bucket(s)
+    assert_eq!(
+        Storage::<Test>::create_storage_bucket(
+            Origin::signed(STORAGE_WG_LEADER_ACCOUNT_ID),
+            None,
+            STORAGE_BUCKET_ACCEPTING_BAGS,
+            STORAGE_BUCKET_OBJECTS_SIZE_LIMIT,
+            STORAGE_BUCKET_OBJECTS_NUMBER_LIMIT,
+        ),
+        Ok(())
+    );
+}
+
+pub fn create_default_member_owned_channel() {
+    CreateChannelFixture::default()
+        .with_sender(DEFAULT_MEMBER_ACCOUNT_ID)
+        .with_actor(ContentActor::Member(DEFAULT_MEMBER_ID))
+        .with_assets(StorageAssets::<Test> {
+            expected_data_size_fee: Storage::<Test>::data_object_per_mega_byte_fee(),
+            object_creation_list: create_data_objects_helper(),
+        })
+        .with_reward_account(DEFAULT_MEMBER_ACCOUNT_ID)
+        .with_collaborators(vec![COLLABORATOR_MEMBER_ID].into_iter().collect())
+        .call_and_assert(Ok(()));
+}
+
+pub fn create_default_curator_owned_channel() {
+    let curator_group_id = curators::add_curator_to_new_group(DEFAULT_CURATOR_ID);
+    CreateChannelFixture::default()
+        .with_sender(DEFAULT_CURATOR_ACCOUNT_ID)
+        .with_actor(ContentActor::Curator(curator_group_id, DEFAULT_CURATOR_ID))
+        .with_assets(StorageAssets::<Test> {
+            expected_data_size_fee: Storage::<Test>::data_object_per_mega_byte_fee(),
+            object_creation_list: create_data_objects_helper(),
+        })
+        .with_reward_account(DEFAULT_CURATOR_ACCOUNT_ID)
+        .with_collaborators(vec![COLLABORATOR_MEMBER_ID].into_iter().collect())
+        .call_and_assert(Ok(()));
+}
+
+pub fn create_default_member_owned_channel_with_video() {
+    create_default_member_owned_channel();
+
+    CreateVideoFixture::default()
+        .with_sender(DEFAULT_MEMBER_ACCOUNT_ID)
+        .with_actor(ContentActor::Member(DEFAULT_MEMBER_ID))
+        .with_assets(StorageAssets::<Test> {
+            expected_data_size_fee: Storage::<Test>::data_object_per_mega_byte_fee(),
+            object_creation_list: create_data_objects_helper(),
+        })
+        .with_channel_id(NextChannelId::<Test>::get() - 1)
+        .call_and_assert(Ok(()));
+}
+
+pub fn create_default_curator_owned_channel_with_video() {
+    create_default_curator_owned_channel();
+    let curator_group_id = NextCuratorGroupId::<Test>::get() - 1;
+
+    CreateVideoFixture::default()
+        .with_sender(DEFAULT_CURATOR_ACCOUNT_ID)
+        .with_actor(ContentActor::Curator(curator_group_id, DEFAULT_CURATOR_ID))
+        .with_assets(StorageAssets::<Test> {
+            expected_data_size_fee: Storage::<Test>::data_object_per_mega_byte_fee(),
+            object_creation_list: create_data_objects_helper(),
+        })
+        .with_channel_id(NextChannelId::<Test>::get() - 1)
+        .call_and_assert(Ok(()));
+}
+
+pub fn create_default_member_owned_channels_with_videos() -> (u64, u64) {
+    for _ in 0..OUTSTANDING_CHANNELS {
+        create_default_member_owned_channel();
+    }
+
+    for i in 0..OUTSTANDING_VIDEOS {
+        CreateVideoFixture::default()
+            .with_sender(DEFAULT_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Member(DEFAULT_MEMBER_ID))
+            .with_assets(StorageAssets::<Test> {
+                expected_data_size_fee: Storage::<Test>::data_object_per_mega_byte_fee(),
+                object_creation_list: create_data_objects_helper(),
+            })
+            .with_channel_id(i % OUTSTANDING_CHANNELS + 1)
+            .call_and_assert(Ok(()));
+    }
+
+    // assert that the specified channels have been created
+    assert_eq!(VideoById::<Test>::iter().count() as u64, OUTSTANDING_VIDEOS);
+    assert_eq!(
+        ChannelById::<Test>::iter().count() as u64,
+        OUTSTANDING_CHANNELS
+    );
+
+    let channels_migrations_per_block = <Test as Trait>::ChannelsMigrationsEachBlock::get();
+    let videos_migrations_per_block = <Test as Trait>::VideosMigrationsEachBlock::get();
+
+    // return the number of blocks required for migration
+    let divide_with_ceiling =
+        |x: u64, y: u64| (x / y) + ((x.checked_rem(y).unwrap_or_default() > 0u64) as u64);
+    (
+        divide_with_ceiling(OUTSTANDING_CHANNELS, channels_migrations_per_block),
+        divide_with_ceiling(OUTSTANDING_VIDEOS, videos_migrations_per_block),
+    )
+}

+ 21 - 74
runtime-modules/content/src/tests/migration.rs

@@ -1,9 +1,7 @@
 #![cfg(test)]
-
+use super::fixtures::*;
 use super::mock::*;
-use crate::sp_api_hidden_includes_decl_storage::hidden_include::traits::Currency;
 use crate::*;
-use std::ops::Rem;
 
 fn assert_video_and_channel_existrinsics_with(result: DispatchResult) {
     let params = VideoCreationParametersRecord {
@@ -27,8 +25,8 @@ fn assert_video_and_channel_existrinsics_with(result: DispatchResult) {
 
     assert_eq!(
         Content::create_channel(
-            Origin::signed(FIRST_MEMBER_ORIGIN),
-            ContentActor::Member(FIRST_MEMBER_ID),
+            Origin::signed(DEFAULT_MEMBER_ACCOUNT_ID),
+            ContentActor::Member(DEFAULT_MEMBER_ID),
             ChannelCreationParametersRecord {
                 assets: None,
                 meta: Some(vec![]),
@@ -41,8 +39,8 @@ fn assert_video_and_channel_existrinsics_with(result: DispatchResult) {
 
     assert_eq!(
         Content::create_video(
-            Origin::signed(FIRST_MEMBER_ORIGIN),
-            ContentActor::Member(FIRST_MEMBER_ID),
+            Origin::signed(DEFAULT_MEMBER_ACCOUNT_ID),
+            ContentActor::Member(DEFAULT_MEMBER_ID),
             channel_id.clone(),
             params.clone()
         ),
@@ -50,8 +48,8 @@ fn assert_video_and_channel_existrinsics_with(result: DispatchResult) {
     );
     assert_eq!(
         Content::update_channel(
-            Origin::signed(FIRST_MEMBER_ORIGIN),
-            ContentActor::Member(FIRST_MEMBER_ID),
+            Origin::signed(DEFAULT_MEMBER_ACCOUNT_ID),
+            ContentActor::Member(DEFAULT_MEMBER_ID),
             channel_id.clone(),
             ChannelUpdateParametersRecord {
                 assets_to_upload: None,
@@ -65,8 +63,8 @@ fn assert_video_and_channel_existrinsics_with(result: DispatchResult) {
     );
     assert_eq!(
         Content::update_video(
-            Origin::signed(FIRST_MEMBER_ORIGIN),
-            ContentActor::Member(FIRST_MEMBER_ID),
+            Origin::signed(DEFAULT_MEMBER_ACCOUNT_ID),
+            ContentActor::Member(DEFAULT_MEMBER_ID),
             video_id.clone(),
             VideoUpdateParametersRecord {
                 assets_to_upload: None,
@@ -79,8 +77,8 @@ fn assert_video_and_channel_existrinsics_with(result: DispatchResult) {
 
     assert_eq!(
         Content::update_channel_censorship_status(
-            Origin::signed(FIRST_MEMBER_ORIGIN),
-            ContentActor::Member(FIRST_MEMBER_ID),
+            Origin::signed(DEFAULT_MEMBER_ACCOUNT_ID),
+            ContentActor::Member(DEFAULT_MEMBER_ID),
             channel_id.clone(),
             false,
             b"test".to_vec()
@@ -90,8 +88,8 @@ fn assert_video_and_channel_existrinsics_with(result: DispatchResult) {
 
     assert_eq!(
         Content::update_video_censorship_status(
-            Origin::signed(FIRST_MEMBER_ORIGIN),
-            ContentActor::Member(FIRST_MEMBER_ID),
+            Origin::signed(DEFAULT_MEMBER_ACCOUNT_ID),
+            ContentActor::Member(DEFAULT_MEMBER_ID),
             video_id.clone(),
             false,
             b"test".to_vec()
@@ -101,8 +99,8 @@ fn assert_video_and_channel_existrinsics_with(result: DispatchResult) {
 
     assert_eq!(
         Content::delete_video(
-            Origin::signed(FIRST_MEMBER_ORIGIN),
-            ContentActor::Member(FIRST_MEMBER_ID),
+            Origin::signed(DEFAULT_MEMBER_ACCOUNT_ID),
+            ContentActor::Member(DEFAULT_MEMBER_ID),
             video_id.clone(),
             BTreeSet::new(),
         ),
@@ -110,8 +108,8 @@ fn assert_video_and_channel_existrinsics_with(result: DispatchResult) {
     );
     assert_eq!(
         Content::delete_channel(
-            Origin::signed(FIRST_MEMBER_ORIGIN),
-            ContentActor::Member(FIRST_MEMBER_ID),
+            Origin::signed(DEFAULT_MEMBER_ACCOUNT_ID),
+            ContentActor::Member(DEFAULT_MEMBER_ID),
             channel_id.clone(),
             0u64,
         ),
@@ -119,72 +117,21 @@ fn assert_video_and_channel_existrinsics_with(result: DispatchResult) {
     );
 }
 
-fn setup_scenario_with(n_videos: u64, n_channels: u64) -> (u64, u64) {
-    let _ = balances::Module::<Test>::deposit_creating(
-        &FIRST_MEMBER_ORIGIN,
-        <Test as balances::Trait>::Balance::from(10_000u32),
-    );
-
-    // create n_channels channels
-    for _ in 0..n_channels {
-        create_channel_mock(
-            FIRST_MEMBER_ORIGIN,
-            ContentActor::Member(FIRST_MEMBER_ID),
-            ChannelCreationParametersRecord {
-                assets: None,
-                meta: Some(vec![]),
-                reward_account: None,
-                collaborators: BTreeSet::new(),
-            },
-            Ok(()),
-        );
-    }
-
-    let params = VideoCreationParametersRecord {
-        assets: None,
-        meta: None,
-    };
-
-    // create n_videos videos
-    for i in 0..n_videos {
-        create_video_mock(
-            FIRST_MEMBER_ORIGIN,
-            ContentActor::Member(FIRST_MEMBER_ID),
-            i.rem(n_channels) + 1,
-            params.clone(),
-            Ok(()),
-        );
-    }
-
-    // assert that the specified channels have been created
-    assert_eq!(VideoById::<Test>::iter().count() as u64, n_videos);
-    assert_eq!(ChannelById::<Test>::iter().count() as u64, n_channels);
-
-    let channels_migrations_per_block = <Test as Trait>::ChannelsMigrationsEachBlock::get();
-    let videos_migrations_per_block = <Test as Trait>::VideosMigrationsEachBlock::get();
-
-    // return the number of blocks required for migration
-    let divide_with_ceiling =
-        |x: u64, y: u64| (x / y) + ((x.checked_rem(y).unwrap_or_default() > 0u64) as u64);
-    (
-        divide_with_ceiling(n_channels, channels_migrations_per_block),
-        divide_with_ceiling(n_videos, videos_migrations_per_block),
-    )
-}
-
 #[test]
 fn migration_test() {
     with_default_mock_builder(|| {
-        const START_MIGRATION_AT_BLOCK: u64 = 1;
         run_to_block(START_MIGRATION_AT_BLOCK);
 
         // setup scenario
-        let (blocks_channels, blocks_videos) = setup_scenario_with(100u64, 100u64);
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_initial_storage_buckets_helper();
+        let (blocks_channels, blocks_videos) = create_default_member_owned_channels_with_videos();
 
         // block at which all migrations should be completed
         let last_migration_block = std::cmp::max(blocks_channels, blocks_videos);
 
         // ensure we have setup scenario to properly test migration over multiple blocks
+        println!("last migration block:\t{:?}", last_migration_block);
         assert!(last_migration_block > START_MIGRATION_AT_BLOCK);
 
         // triggering migration

+ 96 - 324
runtime-modules/content/src/tests/mock.rs

@@ -2,7 +2,7 @@
 
 use crate::*;
 use frame_support::dispatch::{DispatchError, DispatchResult};
-use frame_support::traits::{Currency, OnFinalize, OnInitialize};
+use frame_support::traits::{OnFinalize, OnInitialize};
 use frame_support::{impl_outer_event, impl_outer_origin, parameter_types};
 use sp_core::H256;
 use sp_runtime::{
@@ -20,39 +20,53 @@ pub type CuratorGroupId = <Test as ContentActorAuthenticator>::CuratorGroupId;
 pub type MemberId = <Test as MembershipTypes>::MemberId;
 pub type ChannelId = <Test as StorageOwnership>::ChannelId;
 
-/// Origins
-
-pub const LEAD_ORIGIN: u64 = 1;
-
-pub const FIRST_CURATOR_ORIGIN: u64 = 2;
-pub const SECOND_CURATOR_ORIGIN: u64 = 3;
-
-pub const FIRST_MEMBER_ORIGIN: u64 = 4;
-pub const SECOND_MEMBER_ORIGIN: u64 = 5;
-pub const UNKNOWN_ORIGIN: u64 = 7777;
-pub const UNKNOWN_MEMBER_ID: u64 = 7777;
+/// Accounts
+pub const DEFAULT_MEMBER_ACCOUNT_ID: u64 = 101;
+pub const DEFAULT_CURATOR_ACCOUNT_ID: u64 = 102;
+pub const LEAD_ACCOUNT_ID: u64 = 103;
+pub const COLLABORATOR_MEMBER_ACCOUNT_ID: u64 = 104;
+pub const UNAUTHORIZED_MEMBER_ACCOUNT_ID: u64 = 105;
+pub const UNAUTHORIZED_CURATOR_ACCOUNT_ID: u64 = 106;
+pub const UNAUTHORIZED_COLLABORATOR_MEMBER_ACCOUNT_ID: u64 = 107;
+pub const UNAUTHORIZED_LEAD_ACCOUNT_ID: u64 = 108;
 
 // Members range from MemberId 1 to 10
 pub const MEMBERS_COUNT: MemberId = 10;
 
 /// Runtime Id's
-
-pub const FIRST_CURATOR_ID: CuratorId = 1;
-pub const SECOND_CURATOR_ID: CuratorId = 2;
-
-pub const FIRST_CURATOR_GROUP_ID: CuratorGroupId = 1;
-// pub const SECOND_CURATOR_GROUP_ID: CuratorGroupId = 2;
-
-pub const FIRST_MEMBER_ID: MemberId = 1;
-pub const SECOND_MEMBER_ID: MemberId = 2;
-
-// members that act as collaborators
-pub const COLLABORATOR_MEMBER_ORIGIN: MemberId = 8;
-pub const COLLABORATOR_MEMBER_ID: MemberId = 9;
-
-/// Constants
-// initial balancer for an account
-pub const INITIAL_BALANCE: u32 = 1_000_000;
+pub const DEFAULT_MEMBER_ID: MemberId = 201;
+pub const DEFAULT_CURATOR_ID: CuratorId = 202;
+pub const COLLABORATOR_MEMBER_ID: u64 = 204;
+pub const UNAUTHORIZED_MEMBER_ID: u64 = 205;
+pub const UNAUTHORIZED_CURATOR_ID: u64 = 206;
+pub const UNAUTHORIZED_COLLABORATOR_MEMBER_ID: u64 = 207;
+
+// Storage module & migration parameters
+// # objects in a channel == # objects in a video is assumed, changing this will make tests fail
+// TODO: set separate amount of objects per channel / video in Olympia release tests
+
+pub const DATA_OBJECT_DELETION_PRIZE: u64 = 5;
+pub const DEFAULT_OBJECT_SIZE: u64 = 5;
+pub const DATA_OBJECTS_NUMBER: u64 = 10;
+pub const VIDEO_MIGRATIONS_PER_BLOCK: u64 = 2;
+pub const CHANNEL_MIGRATIONS_PER_BLOCK: u64 = 1;
+pub const MIGRATION_BLOCKS: u64 = 4;
+
+pub const OUTSTANDING_VIDEOS: u64 = MIGRATION_BLOCKS * VIDEO_MIGRATIONS_PER_BLOCK;
+pub const OUTSTANDING_CHANNELS: u64 = MIGRATION_BLOCKS * CHANNEL_MIGRATIONS_PER_BLOCK;
+pub const TOTAL_OBJECTS_NUMBER: u64 =
+    DATA_OBJECTS_NUMBER * (OUTSTANDING_VIDEOS + OUTSTANDING_CHANNELS);
+pub const TOTAL_BALANCE_REQUIRED: u64 = TOTAL_OBJECTS_NUMBER * DATA_OBJECT_DELETION_PRIZE;
+
+pub const STORAGE_BUCKET_OBJECTS_NUMBER_LIMIT: u64 = TOTAL_OBJECTS_NUMBER;
+pub const STORAGE_BUCKET_OBJECTS_SIZE_LIMIT: u64 =
+    DEFAULT_OBJECT_SIZE * STORAGE_BUCKET_OBJECTS_NUMBER_LIMIT;
+pub const STORAGE_BUCKET_ACCEPTING_BAGS: bool = true;
+pub const VOUCHER_OBJECTS_NUMBER_LIMIT: u64 = 2 * STORAGE_BUCKET_OBJECTS_NUMBER_LIMIT;
+pub const VOUCHER_OBJECTS_SIZE_LIMIT: u64 = VOUCHER_OBJECTS_NUMBER_LIMIT * DEFAULT_OBJECT_SIZE;
+pub const INITIAL_BALANCE: u64 = TOTAL_BALANCE_REQUIRED;
+
+pub const START_MIGRATION_AT_BLOCK: u64 = 1;
 
 impl_outer_origin! {
     pub enum Origin for Test {}
@@ -171,36 +185,72 @@ impl ContentActorAuthenticator for Test {
     type CuratorGroupId = u64;
 
     fn validate_member_id(member_id: &Self::MemberId) -> bool {
-        *member_id < MEMBERS_COUNT
+        match *member_id {
+            DEFAULT_MEMBER_ID => true,
+            UNAUTHORIZED_MEMBER_ID => true,
+            COLLABORATOR_MEMBER_ID => true,
+            UNAUTHORIZED_COLLABORATOR_MEMBER_ID => true,
+            _ => false,
+        }
     }
 
     fn is_lead(account_id: &Self::AccountId) -> bool {
-        let lead_account_id = ensure_signed(Origin::signed(LEAD_ORIGIN)).unwrap();
-        *account_id == lead_account_id
+        *account_id == ensure_signed(Origin::signed(LEAD_ACCOUNT_ID)).unwrap()
     }
 
     fn is_curator(curator_id: &Self::CuratorId, account_id: &Self::AccountId) -> bool {
-        let first_curator_account_id = ensure_signed(Origin::signed(FIRST_CURATOR_ORIGIN)).unwrap();
-        let second_curator_account_id =
-            ensure_signed(Origin::signed(SECOND_CURATOR_ORIGIN)).unwrap();
-        (first_curator_account_id == *account_id && FIRST_CURATOR_ID == *curator_id)
-            || (second_curator_account_id == *account_id && SECOND_CURATOR_ID == *curator_id)
+        match *curator_id {
+            DEFAULT_CURATOR_ID => {
+                *account_id == ensure_signed(Origin::signed(DEFAULT_CURATOR_ACCOUNT_ID)).unwrap()
+            }
+
+            UNAUTHORIZED_CURATOR_ID => {
+                *account_id
+                    == ensure_signed(Origin::signed(UNAUTHORIZED_CURATOR_ACCOUNT_ID)).unwrap()
+            }
+
+            _ => false,
+        }
     }
 
     fn is_member(member_id: &Self::MemberId, account_id: &Self::AccountId) -> bool {
-        let unknown_member_account_id = ensure_signed(Origin::signed(UNKNOWN_ORIGIN)).unwrap();
-        *member_id < MEMBERS_COUNT && unknown_member_account_id != *account_id
+        match *member_id {
+            DEFAULT_MEMBER_ID => {
+                *account_id == ensure_signed(Origin::signed(DEFAULT_MEMBER_ACCOUNT_ID)).unwrap()
+            }
+
+            UNAUTHORIZED_MEMBER_ID => {
+                *account_id
+                    == ensure_signed(Origin::signed(UNAUTHORIZED_MEMBER_ACCOUNT_ID)).unwrap()
+            }
+
+            UNAUTHORIZED_COLLABORATOR_MEMBER_ID => {
+                *account_id
+                    == ensure_signed(Origin::signed(UNAUTHORIZED_COLLABORATOR_MEMBER_ACCOUNT_ID))
+                        .unwrap()
+            }
+
+            COLLABORATOR_MEMBER_ID => {
+                *account_id
+                    == ensure_signed(Origin::signed(COLLABORATOR_MEMBER_ACCOUNT_ID)).unwrap()
+            }
+            _ => false,
+        }
     }
 
     fn is_valid_curator_id(curator_id: &Self::CuratorId) -> bool {
-        *curator_id == FIRST_CURATOR_ID || *curator_id == SECOND_CURATOR_ID
+        match *curator_id {
+            DEFAULT_CURATOR_ID => true,
+            UNAUTHORIZED_CURATOR_ID => true,
+            _ => false,
+        }
     }
 }
 
 parameter_types! {
     pub const MaxNumberOfDataObjectsPerBag: u64 = 4;
     pub const MaxDistributionBucketFamilyNumber: u64 = 4;
-    pub const DataObjectDeletionPrize: u64 = 10;
+    pub const DataObjectDeletionPrize: u64 = DATA_OBJECT_DELETION_PRIZE;
     pub const StorageModuleId: ModuleId = ModuleId(*b"mstorage"); // module storage
     pub const BlacklistSizeLimit: u64 = 1;
     pub const MaxNumberOfPendingInvitationsPerDistributionBucket: u64 = 1;
@@ -212,7 +262,7 @@ parameter_types! {
     pub const DefaultChannelDynamicBagNumberOfStorageBuckets: u64 = 4;
     pub const DistributionBucketsPerBagValueConstraint: storage::DistributionBucketsPerBagValueConstraint =
         storage::StorageBucketsPerBagValueConstraint {min: 3, max_min_diff: 7};
-    pub const MaxDataObjectSize: u64 = 400;
+    pub const MaxDataObjectSize: u64 = VOUCHER_OBJECTS_SIZE_LIMIT;
 }
 
 pub const STORAGE_WG_LEADER_ACCOUNT_ID: u64 = 100001;
@@ -315,9 +365,6 @@ impl storage::Trait for Test {
     }
 }
 
-pub const DEFAULT_MEMBER_ID: u64 = 100;
-pub const DEFAULT_MEMBER_ACCOUNT_ID: u64 = 101;
-
 impl common::origin::ActorOriginValidator<Origin, u64, u64> for () {
     fn ensure_actor_origin(origin: Origin, member_id: u64) -> Result<u64, &'static str> {
         let signed_account_id = frame_system::ensure_signed(origin)?;
@@ -335,8 +382,8 @@ impl common::origin::ActorOriginValidator<Origin, u64, u64> for () {
 parameter_types! {
     pub const MaxNumberOfCuratorsPerGroup: u32 = 10;
     pub const ChannelOwnershipPaymentEscrowId: [u8; 8] = *b"12345678";
-    pub const VideosMigrationsEachBlock: u64 = 20;
-    pub const ChannelsMigrationsEachBlock: u64 = 10;
+    pub const VideosMigrationsEachBlock: u64 = VIDEO_MIGRATIONS_PER_BLOCK;
+    pub const ChannelsMigrationsEachBlock: u64 = CHANNEL_MIGRATIONS_PER_BLOCK;
 }
 
 impl Trait for Test {
@@ -374,6 +421,7 @@ impl Trait for Test {
     type DataObjectStorage = storage::Module<Self>;
 
     type VideosMigrationsEachBlock = VideosMigrationsEachBlock;
+
     type ChannelsMigrationsEachBlock = ChannelsMigrationsEachBlock;
 }
 
@@ -460,280 +508,4 @@ pub fn run_to_block(n: u64) {
 
 pub type CollectiveFlip = randomness_collective_flip::Module<Test>;
 
-pub fn create_channel_mock(
-    sender: u64,
-    actor: ContentActor<CuratorGroupId, CuratorId, MemberId>,
-    params: ChannelCreationParameters<Test>,
-    result: DispatchResult,
-) {
-    let channel_id = Content::next_channel_id();
-
-    assert_eq!(
-        Content::create_channel(Origin::signed(sender), actor.clone(), params.clone()),
-        result.clone(),
-    );
-
-    if result.is_ok() {
-        let owner = Content::actor_to_channel_owner(&actor).unwrap();
-
-        assert_eq!(
-            System::events().last().unwrap().event,
-            MetaEvent::content(RawEvent::ChannelCreated(
-                actor.clone(),
-                channel_id,
-                ChannelRecord {
-                    owner: owner,
-                    is_censored: false,
-                    reward_account: params.reward_account.clone(),
-
-                    collaborators: params.collaborators.clone(),
-                    num_videos: 0,
-                },
-                params,
-            ))
-        );
-    }
-}
-
-pub fn update_channel_mock(
-    sender: u64,
-    actor: ContentActor<CuratorGroupId, CuratorId, MemberId>,
-    channel_id: ChannelId,
-    params: ChannelUpdateParameters<Test>,
-    result: DispatchResult,
-) {
-    let channel_pre = ChannelById::<Test>::get(channel_id.clone());
-
-    assert_eq!(
-        Content::update_channel(
-            Origin::signed(sender),
-            actor.clone(),
-            channel_id.clone(),
-            params.clone(),
-        ),
-        result.clone(),
-    );
-
-    if result.is_ok() {
-        assert_eq!(
-            System::events().last().unwrap().event,
-            MetaEvent::content(RawEvent::ChannelUpdated(
-                actor.clone(),
-                channel_id,
-                ChannelRecord {
-                    owner: channel_pre.owner.clone(),
-                    is_censored: channel_pre.is_censored,
-                    reward_account: params
-                        .reward_account
-                        .map_or_else(|| channel_pre.reward_account.clone(), |account| account),
-                    collaborators: params
-                        .collaborators
-                        .clone()
-                        .unwrap_or(channel_pre.collaborators),
-                    num_videos: channel_pre.num_videos,
-                },
-                params,
-            ))
-        );
-    }
-}
-
-pub fn delete_channel_mock(
-    sender: u64,
-    actor: ContentActor<CuratorGroupId, CuratorId, MemberId>,
-    channel_id: ChannelId,
-    objects_num: u64,
-    result: DispatchResult,
-) {
-    assert_eq!(
-        Content::delete_channel(
-            Origin::signed(sender),
-            actor.clone(),
-            channel_id.clone(),
-            objects_num,
-        ),
-        result.clone(),
-    );
-
-    if result.is_ok() {
-        assert_eq!(
-            System::events().last().unwrap().event,
-            MetaEvent::content(RawEvent::ChannelDeleted(actor.clone(), channel_id))
-        )
-    }
-}
-
-pub fn create_video_mock(
-    sender: u64,
-    actor: ContentActor<CuratorGroupId, CuratorId, MemberId>,
-    channel_id: ChannelId,
-    params: VideoCreationParameters<Test>,
-    result: DispatchResult,
-) {
-    let video_id = Content::next_video_id();
-    let num_videos_pre = Content::channel_by_id(channel_id).num_videos;
-
-    assert_eq!(
-        Content::create_video(
-            Origin::signed(sender),
-            actor.clone(),
-            channel_id.clone(),
-            params.clone()
-        ),
-        result.clone(),
-    );
-
-    if result.is_ok() {
-        assert_eq!(
-            System::events().last().unwrap().event,
-            MetaEvent::content(RawEvent::VideoCreated(
-                actor.clone(),
-                channel_id,
-                video_id,
-                params.clone(),
-            ))
-        );
-        assert_eq!(
-            num_videos_pre + 1,
-            Content::channel_by_id(channel_id).num_videos,
-        );
-    }
-}
-pub fn update_video_mock(
-    sender: u64,
-    actor: ContentActor<CuratorGroupId, CuratorId, MemberId>,
-    video_id: <Test as Trait>::VideoId,
-    params: VideoUpdateParameters<Test>,
-    result: DispatchResult,
-) {
-    // let channel_id = Content::video_by_id(video_id.clone()).in_channel;
-    // let num_videos_pre = Content::channel_by_id(channel_id).num_videos;
-
-    assert_eq!(
-        Content::update_video(
-            Origin::signed(sender),
-            actor.clone(),
-            video_id.clone(),
-            params.clone(),
-        ),
-        result.clone(),
-    );
-
-    if result.is_ok() {
-        assert_eq!(
-            System::events().last().unwrap().event,
-            MetaEvent::content(RawEvent::VideoUpdated(
-                actor.clone(),
-                video_id,
-                params.clone(),
-            ))
-        );
-    }
-}
-
-pub fn delete_video_mock(
-    sender: u64,
-    actor: ContentActor<CuratorGroupId, CuratorId, MemberId>,
-    video_id: <Test as Trait>::VideoId,
-    assets_to_remove: BTreeSet<DataObjectId<Test>>,
-    result: DispatchResult,
-) {
-    assert_eq!(
-        Content::delete_video(
-            Origin::signed(sender),
-            actor.clone(),
-            video_id.clone(),
-            assets_to_remove.clone(),
-        ),
-        result.clone(),
-    );
-
-    if result.is_ok() {
-        assert_eq!(
-            System::events().last().unwrap().event,
-            MetaEvent::content(RawEvent::VideoDeleted(actor.clone(), video_id))
-        );
-    }
-}
-
-// helper functions
-pub fn helper_generate_storage_assets(sizes: Vec<u64>) -> StorageAssets<Test> {
-    StorageAssetsRecord {
-        object_creation_list: sizes
-            .into_iter()
-            .map(|s| DataObjectCreationParameters {
-                size: s,
-                ipfs_content_id: s.encode(),
-            })
-            .collect::<Vec<_>>(),
-        expected_data_size_fee: storage::DataObjectPerMegabyteFee::<Test>::get(),
-    }
-}
-
-pub fn helper_init_accounts(accounts: Vec<u64>) {
-    // give channel owner funds to permit collaborators to update assets
-    for acc in accounts.iter() {
-        let _ = balances::Module::<Test>::deposit_creating(
-            acc,
-            <Test as balances::Trait>::Balance::from(INITIAL_BALANCE),
-        );
-    }
-}
-
-pub fn create_initial_storage_buckets() {
-    // first set limits
-    assert_eq!(
-        Storage::<Test>::update_storage_buckets_voucher_max_limits(
-            Origin::signed(STORAGE_WG_LEADER_ACCOUNT_ID),
-            400,
-            40
-        ),
-        Ok(())
-    );
-
-    // create bucket(s)
-    assert_eq!(
-        Storage::<Test>::create_storage_bucket(
-            Origin::signed(STORAGE_WG_LEADER_ACCOUNT_ID),
-            None,
-            true,
-            100,
-            10,
-        ),
-        Ok(())
-    );
-}
-
-pub fn create_channel_with_bag() {
-    // 3 assets added at creation
-    let assets = StorageAssetsRecord {
-        object_creation_list: vec![
-            DataObjectCreationParameters {
-                size: 3,
-                ipfs_content_id: b"first".to_vec(),
-            },
-            DataObjectCreationParameters {
-                size: 3,
-                ipfs_content_id: b"second".to_vec(),
-            },
-            DataObjectCreationParameters {
-                size: 3,
-                ipfs_content_id: b"third".to_vec(),
-            },
-        ],
-        expected_data_size_fee: storage::DataObjectPerMegabyteFee::<Test>::get(),
-    };
-
-    // create channel
-    create_channel_mock(
-        FIRST_MEMBER_ORIGIN,
-        ContentActor::Member(FIRST_MEMBER_ID),
-        ChannelCreationParametersRecord {
-            assets: Some(assets),
-            meta: None,
-            reward_account: None,
-            collaborators: BTreeSet::new(),
-        },
-        Ok(()),
-    );
-}
+pub type Balances = balances::Module<Test>;

+ 1 - 0
runtime-modules/content/src/tests/mod.rs

@@ -2,6 +2,7 @@
 
 mod channels;
 mod curators;
+mod fixtures;
 mod migration;
 mod mock;
 mod videos;

+ 985 - 502
runtime-modules/content/src/tests/videos.rs

@@ -1,294 +1,26 @@
 #![cfg(test)]
 use super::curators;
+use super::fixtures::*;
 use super::mock::*;
 use crate::*;
-use frame_support::traits::Currency;
 use frame_support::{assert_err, assert_ok};
 
-fn create_member_channel() -> ChannelId {
-    let channel_id = Content::next_channel_id();
-
-    // Member can create the channel
-    assert_ok!(Content::create_channel(
-        Origin::signed(FIRST_MEMBER_ORIGIN),
-        ContentActor::Member(FIRST_MEMBER_ID),
-        ChannelCreationParametersRecord {
-            assets: None,
-            meta: None,
-            reward_account: None,
-            collaborators: BTreeSet::<MemberId>::new(),
-        }
-    ));
-
-    channel_id
-}
-
-#[test]
-fn video_creation_successful() {
-    with_default_mock_builder(|| {
-        run_to_block(1);
-
-        create_initial_storage_buckets();
-
-        // depositi initial balance
-        let _ = balances::Module::<Test>::deposit_creating(
-            &FIRST_MEMBER_ORIGIN,
-            <Test as balances::Trait>::Balance::from(INITIAL_BALANCE),
-        );
-
-        let channel_id = NextChannelId::<Test>::get();
-
-        create_initial_storage_buckets();
-
-        create_channel_with_bag();
-
-        let params = VideoCreationParametersRecord {
-            assets: Some(StorageAssetsRecord {
-                object_creation_list: vec![
-                    DataObjectCreationParameters {
-                        size: 3,
-                        ipfs_content_id: b"first".to_vec(),
-                    },
-                    DataObjectCreationParameters {
-                        size: 3,
-                        ipfs_content_id: b"second".to_vec(),
-                    },
-                    DataObjectCreationParameters {
-                        size: 3,
-                        ipfs_content_id: b"third".to_vec(),
-                    },
-                ],
-                expected_data_size_fee: storage::DataObjectPerMegabyteFee::<Test>::get(),
-            }),
-            meta: Some(b"test".to_vec()),
-        };
-
-        create_video_mock(
-            FIRST_MEMBER_ORIGIN,
-            ContentActor::Member(FIRST_MEMBER_ID),
-            channel_id,
-            params,
-            Ok(()),
-        )
-    })
-}
-
-#[test]
-fn video_update_successful() {
-    with_default_mock_builder(|| {
-        run_to_block(1);
-
-        create_initial_storage_buckets();
-        let _ = balances::Module::<Test>::deposit_creating(
-            &FIRST_MEMBER_ORIGIN,
-            <Test as balances::Trait>::Balance::from(INITIAL_BALANCE),
-        );
-
-        let channel_id = NextChannelId::<Test>::get();
-
-        create_channel_with_bag();
-
-        // create video with 3 assets
-        let params = VideoCreationParametersRecord {
-            assets: Some(StorageAssetsRecord {
-                object_creation_list: vec![
-                    DataObjectCreationParameters {
-                        size: 3,
-                        ipfs_content_id: b"first".to_vec(),
-                    },
-                    DataObjectCreationParameters {
-                        size: 3,
-                        ipfs_content_id: b"second".to_vec(),
-                    },
-                    DataObjectCreationParameters {
-                        size: 3,
-                        ipfs_content_id: b"third".to_vec(),
-                    },
-                ],
-                expected_data_size_fee: storage::DataObjectPerMegabyteFee::<Test>::get(),
-            }),
-            meta: Some(b"test".to_vec()),
-        };
-
-        let video_id = Content::next_video_id();
-
-        let first_obj_id = Storage::<Test>::next_data_object_id();
-
-        create_video_mock(
-            FIRST_MEMBER_ORIGIN,
-            ContentActor::Member(FIRST_MEMBER_ID),
-            channel_id,
-            params,
-            Ok(()),
-        );
-
-        // add 1 asset
-        let update_params = VideoUpdateParametersRecord {
-            assets_to_upload: Some(StorageAssetsRecord {
-                object_creation_list: vec![DataObjectCreationParameters {
-                    size: 3,
-                    ipfs_content_id: b"first".to_vec(),
-                }],
-                expected_data_size_fee: storage::DataObjectPerMegabyteFee::<Test>::get(),
-            }),
-            new_meta: None,
-            assets_to_remove: BTreeSet::new(),
-        };
-
-        let last_obj_id = Storage::<Test>::next_data_object_id();
-
-        update_video_mock(
-            FIRST_MEMBER_ORIGIN,
-            ContentActor::Member(FIRST_MEMBER_ID),
-            video_id,
-            update_params,
-            Ok(()),
-        );
-
-        // remove all assets from the channel the video is in
-        update_video_mock(
-            FIRST_MEMBER_ORIGIN,
-            ContentActor::Member(FIRST_MEMBER_ID),
-            video_id,
-            VideoUpdateParametersRecord {
-                assets_to_upload: None,
-                new_meta: None,
-                assets_to_remove: (first_obj_id..last_obj_id).collect::<BTreeSet<_>>(),
-            },
-            Ok(()),
-        );
-    })
-}
-
 #[test]
-fn member_can_create_videos() {
+fn curators_can_censor_videos() {
     with_default_mock_builder(|| {
         // Run to block one to see emitted events
         run_to_block(1);
-        let channel_id = create_member_channel();
-
-        let video_id = Content::next_video_id();
-        assert_ok!(Content::create_video(
-            Origin::signed(FIRST_MEMBER_ORIGIN),
-            ContentActor::Member(FIRST_MEMBER_ID),
-            channel_id,
-            VideoCreationParametersRecord {
-                assets: None,
-                meta: None,
-            }
-        ));
-
-        assert_eq!(
-            System::events().last().unwrap().event,
-            MetaEvent::content(RawEvent::VideoCreated(
-                ContentActor::Member(FIRST_MEMBER_ID),
-                channel_id,
-                video_id,
-                VideoCreationParametersRecord {
-                    assets: None,
-                    meta: None,
-                }
-            ))
-        );
-
-        // Video is created in correct channel
-        let video = Content::video_by_id(video_id);
-        assert_eq!(channel_id, video.in_channel);
-
-        // Can update own video
-        assert_ok!(Content::update_video(
-            Origin::signed(FIRST_MEMBER_ORIGIN),
-            ContentActor::Member(FIRST_MEMBER_ID),
-            video_id,
-            VideoUpdateParametersRecord {
-                assets_to_upload: None,
-                new_meta: None,
-                assets_to_remove: BTreeSet::new(),
-            },
-        ));
 
-        assert_eq!(
-            System::events().last().unwrap().event,
-            MetaEvent::content(RawEvent::VideoUpdated(
-                ContentActor::Member(FIRST_MEMBER_ID),
-                video_id,
-                VideoUpdateParametersRecord {
-                    assets_to_upload: None,
-                    new_meta: None,
-                    assets_to_remove: BTreeSet::new(),
-                }
-            ))
-        );
-
-        // Member cannot create video in a channel they do not own
-        assert_err!(
-            Content::create_video(
-                Origin::signed(SECOND_MEMBER_ORIGIN),
-                ContentActor::Member(SECOND_MEMBER_ID),
-                channel_id,
-                VideoCreationParametersRecord {
-                    assets: None,
-                    meta: None,
-                }
-            ),
-            Error::<Test>::ActorNotAuthorized
-        );
-
-        // Member cannot update video in a channel they do not own
-        assert_err!(
-            Content::update_video(
-                Origin::signed(SECOND_MEMBER_ORIGIN),
-                ContentActor::Member(SECOND_MEMBER_ID),
-                video_id,
-                VideoUpdateParametersRecord {
-                    assets_to_upload: None,
-                    new_meta: None,
-                    assets_to_remove: BTreeSet::new(),
-                },
-            ),
-            Error::<Test>::ActorNotAuthorized
-        );
-
-        // Member cannot delete video in a channel they do not own
-        assert_err!(
-            Content::delete_video(
-                Origin::signed(SECOND_MEMBER_ORIGIN),
-                ContentActor::Member(SECOND_MEMBER_ID),
-                video_id,
-                BTreeSet::new(),
-            ),
-            Error::<Test>::ActorNotAuthorized
-        );
-
-        // Owner can delete their video
-        assert_ok!(Content::delete_video(
-            Origin::signed(FIRST_MEMBER_ORIGIN),
-            ContentActor::Member(FIRST_MEMBER_ID),
-            video_id,
-            BTreeSet::new(),
-        ));
-
-        assert_eq!(
-            System::events().last().unwrap().event,
-            MetaEvent::content(RawEvent::VideoDeleted(
-                ContentActor::Member(FIRST_MEMBER_ID),
-                video_id
-            ))
-        );
-    })
-}
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
 
-#[test]
-fn curators_can_censor_videos() {
-    with_default_mock_builder(|| {
-        // Run to block one to see emitted events
-        run_to_block(1);
-        let channel_id = create_member_channel();
+        let channel_id = NextChannelId::<Test>::get() - 1;
 
         let video_id = Content::next_video_id();
         assert_ok!(Content::create_video(
-            Origin::signed(FIRST_MEMBER_ORIGIN),
-            ContentActor::Member(FIRST_MEMBER_ID),
+            Origin::signed(DEFAULT_MEMBER_ACCOUNT_ID),
+            ContentActor::Member(DEFAULT_MEMBER_ID),
             channel_id,
             VideoCreationParametersRecord {
                 assets: None,
@@ -296,13 +28,13 @@ fn curators_can_censor_videos() {
             }
         ));
 
-        let group_id = curators::add_curator_to_new_group(FIRST_CURATOR_ID);
+        let group_id = curators::add_curator_to_new_group(DEFAULT_CURATOR_ID);
 
         // Curator can censor videos
         let is_censored = true;
         assert_ok!(Content::update_video_censorship_status(
-            Origin::signed(FIRST_CURATOR_ORIGIN),
-            ContentActor::Curator(group_id, FIRST_CURATOR_ID),
+            Origin::signed(DEFAULT_CURATOR_ACCOUNT_ID),
+            ContentActor::Curator(group_id, DEFAULT_CURATOR_ID),
             video_id,
             is_censored,
             vec![]
@@ -311,7 +43,7 @@ fn curators_can_censor_videos() {
         assert_eq!(
             System::events().last().unwrap().event,
             MetaEvent::content(RawEvent::VideoCensorshipStatusUpdated(
-                ContentActor::Curator(group_id, FIRST_CURATOR_ID),
+                ContentActor::Curator(group_id, DEFAULT_CURATOR_ID),
                 video_id,
                 is_censored,
                 vec![]
@@ -325,8 +57,8 @@ fn curators_can_censor_videos() {
         // Curator can un-censor videos
         let is_censored = false;
         assert_ok!(Content::update_video_censorship_status(
-            Origin::signed(FIRST_CURATOR_ORIGIN),
-            ContentActor::Curator(group_id, FIRST_CURATOR_ID),
+            Origin::signed(DEFAULT_CURATOR_ACCOUNT_ID),
+            ContentActor::Curator(group_id, DEFAULT_CURATOR_ID),
             video_id,
             is_censored,
             vec![]
@@ -335,7 +67,7 @@ fn curators_can_censor_videos() {
         assert_eq!(
             System::events().last().unwrap().event,
             MetaEvent::content(RawEvent::VideoCensorshipStatusUpdated(
-                ContentActor::Curator(group_id, FIRST_CURATOR_ID),
+                ContentActor::Curator(group_id, DEFAULT_CURATOR_ID),
                 video_id,
                 is_censored,
                 vec![]
@@ -349,8 +81,8 @@ fn curators_can_censor_videos() {
         // Members cannot censor videos
         assert_err!(
             Content::update_video_censorship_status(
-                Origin::signed(FIRST_MEMBER_ORIGIN),
-                ContentActor::Member(FIRST_MEMBER_ORIGIN),
+                Origin::signed(DEFAULT_MEMBER_ACCOUNT_ID),
+                ContentActor::Member(DEFAULT_MEMBER_ACCOUNT_ID),
                 channel_id,
                 true,
                 vec![]
@@ -368,7 +100,7 @@ fn featured_videos() {
 
         // Lead can update curator owned channels
         assert_ok!(Content::set_featured_videos(
-            Origin::signed(LEAD_ORIGIN),
+            Origin::signed(LEAD_ACCOUNT_ID),
             ContentActor::Lead,
             vec![1, 2, 3]
         ));
@@ -383,8 +115,8 @@ fn featured_videos() {
 
         assert_err!(
             Content::set_featured_videos(
-                Origin::signed(FIRST_MEMBER_ORIGIN),
-                ContentActor::Member(FIRST_MEMBER_ID),
+                Origin::signed(DEFAULT_MEMBER_ACCOUNT_ID),
+                ContentActor::Member(DEFAULT_MEMBER_ID),
                 vec![1, 2, 3]
             ),
             Error::<Test>::ActorNotAuthorized
@@ -393,277 +125,1028 @@ fn featured_videos() {
 }
 
 #[test]
-fn non_authorized_collaborators_cannot_add_video() {
+fn successful_video_creation_by_member() {
     with_default_mock_builder(|| {
-        // Run to block one to see emitted events
         run_to_block(1);
 
-        create_initial_storage_buckets();
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
+
+        CreateVideoFixture::default()
+            .with_sender(DEFAULT_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Member(DEFAULT_MEMBER_ID))
+            .with_assets(StorageAssets::<Test> {
+                expected_data_size_fee: Storage::<Test>::data_object_per_mega_byte_fee(),
+                object_creation_list: create_data_objects_helper(),
+            })
+            .call_and_assert(Ok(()));
+    })
+}
 
-        helper_init_accounts(vec![FIRST_MEMBER_ORIGIN, COLLABORATOR_MEMBER_ORIGIN]);
+#[test]
+fn successful_video_creation_by_collaborator() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
 
-        // create channel
-        create_channel_mock(
-            FIRST_MEMBER_ORIGIN,
-            ContentActor::Member(FIRST_MEMBER_ID),
-            ChannelCreationParametersRecord {
-                assets: Some(helper_generate_storage_assets(vec![2, 3])),
-                meta: None,
-                reward_account: None,
-                collaborators: BTreeSet::new(),
-            },
-            Ok(()),
-        );
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        increase_account_balance_helper(COLLABORATOR_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
+
+        CreateVideoFixture::default()
+            .with_sender(COLLABORATOR_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Collaborator(COLLABORATOR_MEMBER_ID))
+            .with_assets(StorageAssets::<Test> {
+                expected_data_size_fee: Storage::<Test>::data_object_per_mega_byte_fee(),
+                object_creation_list: create_data_objects_helper(),
+            })
+            .call_and_assert(Ok(()));
+    })
+}
 
-        create_video_mock(
-            COLLABORATOR_MEMBER_ORIGIN,
-            ContentActor::Collaborator(COLLABORATOR_MEMBER_ID),
-            <Test as storage::Trait>::ChannelId::one(),
-            VideoCreationParametersRecord {
-                assets: Some(helper_generate_storage_assets(vec![1, 2])),
-                meta: None,
-            },
-            Err(Error::<Test>::ActorNotAuthorized.into()),
-        );
+#[test]
+fn successful_video_creation_by_lead() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_CURATOR_ACCOUNT_ID, INITIAL_BALANCE);
+        increase_account_balance_helper(LEAD_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_curator_owned_channel();
+
+        CreateVideoFixture::default()
+            .with_sender(LEAD_ACCOUNT_ID)
+            .with_actor(ContentActor::Lead)
+            .with_assets(StorageAssets::<Test> {
+                expected_data_size_fee: Storage::<Test>::data_object_per_mega_byte_fee(),
+                object_creation_list: create_data_objects_helper(),
+            })
+            .call_and_assert(Ok(()));
     })
 }
 
 #[test]
-fn non_authorized_collaborators_cannot_update_video() {
+fn successful_video_creation_by_curator() {
     with_default_mock_builder(|| {
-        // Run to block one to see emitted events
         run_to_block(1);
 
-        helper_init_accounts(vec![FIRST_MEMBER_ORIGIN, COLLABORATOR_MEMBER_ORIGIN]);
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_CURATOR_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_curator_owned_channel();
 
-        create_initial_storage_buckets();
-        // create channel
-        create_channel_mock(
-            FIRST_MEMBER_ORIGIN,
-            ContentActor::Member(FIRST_MEMBER_ID),
-            ChannelCreationParametersRecord {
-                assets: Some(helper_generate_storage_assets(vec![2, 3])),
-                meta: None,
-                reward_account: None,
-                collaborators: BTreeSet::new(),
-            },
-            Ok(()),
-        );
+        let default_curator_group_id = NextCuratorGroupId::<Test>::get() - 1;
+        CreateVideoFixture::default()
+            .with_sender(DEFAULT_CURATOR_ACCOUNT_ID)
+            .with_actor(ContentActor::Curator(
+                default_curator_group_id,
+                DEFAULT_CURATOR_ID,
+            ))
+            .with_assets(StorageAssets::<Test> {
+                expected_data_size_fee: Storage::<Test>::data_object_per_mega_byte_fee(),
+                object_creation_list: create_data_objects_helper(),
+            })
+            .call_and_assert(Ok(()));
+    })
+}
 
-        // create video
-        create_video_mock(
-            FIRST_MEMBER_ORIGIN,
-            ContentActor::Member(FIRST_MEMBER_ID),
-            <Test as storage::Trait>::ChannelId::one(),
-            VideoCreationParametersRecord {
-                assets: Some(helper_generate_storage_assets(vec![1, 2])),
-                meta: None,
-            },
-            Ok(()),
-        );
+#[test]
+fn unsuccessful_video_creation_with_member_auth_failure() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
 
-        update_video_mock(
-            COLLABORATOR_MEMBER_ORIGIN,
-            ContentActor::Collaborator(COLLABORATOR_MEMBER_ID),
-            <Test as Trait>::VideoId::one(),
-            VideoUpdateParametersRecord {
-                assets_to_upload: Some(helper_generate_storage_assets(vec![5])),
-                new_meta: None,
-                assets_to_remove: vec![DataObjectId::<Test>::one()]
-                    .into_iter()
-                    .collect::<BTreeSet<_>>(),
-            },
-            Err(Error::<Test>::ActorNotAuthorized.into()),
-        );
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
+
+        CreateVideoFixture::default()
+            .with_sender(UNAUTHORIZED_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Member(DEFAULT_MEMBER_ID))
+            .with_assets(StorageAssets::<Test> {
+                expected_data_size_fee: Storage::<Test>::data_object_per_mega_byte_fee(),
+                object_creation_list: create_data_objects_helper(),
+            })
+            .call_and_assert(Err(Error::<Test>::MemberAuthFailed.into()));
     })
 }
 
 #[test]
-fn non_authorized_collaborators_cannot_delete_video() {
+fn successful_video_creation_with_collaborator_auth_failure() {
     with_default_mock_builder(|| {
-        // Run to block one to see emitted events
         run_to_block(1);
 
-        helper_init_accounts(vec![FIRST_MEMBER_ORIGIN, COLLABORATOR_MEMBER_ORIGIN]);
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
+
+        CreateVideoFixture::default()
+            .with_sender(UNAUTHORIZED_COLLABORATOR_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Collaborator(COLLABORATOR_MEMBER_ID))
+            .with_assets(StorageAssets::<Test> {
+                expected_data_size_fee: Storage::<Test>::data_object_per_mega_byte_fee(),
+                object_creation_list: create_data_objects_helper(),
+            })
+            .call_and_assert(Err(Error::<Test>::MemberAuthFailed.into()));
+    })
+}
 
-        create_initial_storage_buckets();
-        // create channel
-        create_channel_mock(
-            FIRST_MEMBER_ORIGIN,
-            ContentActor::Member(FIRST_MEMBER_ID),
-            ChannelCreationParametersRecord {
-                assets: Some(helper_generate_storage_assets(vec![2, 3])),
-                meta: None,
-                reward_account: None,
-                collaborators: BTreeSet::new(),
-            },
-            Ok(()),
-        );
+#[test]
+fn unsuccessful_video_creation_with_lead_auth_failure() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
 
-        // create video
-        create_video_mock(
-            FIRST_MEMBER_ORIGIN,
-            ContentActor::Member(FIRST_MEMBER_ID),
-            <Test as storage::Trait>::ChannelId::one(),
-            VideoCreationParametersRecord {
-                assets: Some(helper_generate_storage_assets(vec![1, 2])),
-                meta: None,
-            },
-            Ok(()),
-        );
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_CURATOR_ACCOUNT_ID, INITIAL_BALANCE);
+        increase_account_balance_helper(LEAD_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_curator_owned_channel();
 
-        delete_video_mock(
-            COLLABORATOR_MEMBER_ORIGIN,
-            ContentActor::Collaborator(COLLABORATOR_MEMBER_ID),
-            <Test as Trait>::VideoId::one(),
-            vec![
-                DataObjectId::<Test>::one(),
-                DataObjectId::<Test>::from(2u64),
-            ]
-            .into_iter()
-            .collect::<BTreeSet<_>>(),
-            Err(Error::<Test>::ActorNotAuthorized.into()),
-        );
+        CreateVideoFixture::default()
+            .with_sender(UNAUTHORIZED_LEAD_ACCOUNT_ID)
+            .with_actor(ContentActor::Lead)
+            .call_and_assert(Err(Error::<Test>::LeadAuthFailed.into()));
     })
 }
 
 #[test]
-fn authorized_collaborators_can_add_video() {
+fn unsuccessful_video_creation_with_curator_auth_failure() {
     with_default_mock_builder(|| {
-        // Run to block one to see emitted events
         run_to_block(1);
 
-        helper_init_accounts(vec![FIRST_MEMBER_ORIGIN, COLLABORATOR_MEMBER_ORIGIN]);
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_CURATOR_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_curator_owned_channel();
 
-        create_initial_storage_buckets();
-        // create channel
-        create_channel_mock(
-            FIRST_MEMBER_ORIGIN,
-            ContentActor::Member(FIRST_MEMBER_ID),
-            ChannelCreationParametersRecord {
-                assets: Some(helper_generate_storage_assets(vec![2, 3])),
-                meta: None,
-                reward_account: None,
-                collaborators: vec![COLLABORATOR_MEMBER_ID]
-                    .into_iter()
-                    .collect::<BTreeSet<_>>(),
-            },
-            Ok(()),
-        );
+        let default_curator_group_id = NextCuratorGroupId::<Test>::get() - 1;
+        CreateVideoFixture::default()
+            .with_sender(UNAUTHORIZED_CURATOR_ACCOUNT_ID)
+            .with_actor(ContentActor::Curator(
+                default_curator_group_id,
+                DEFAULT_CURATOR_ID,
+            ))
+            .call_and_assert(Err(Error::<Test>::CuratorAuthFailed.into()));
+    })
+}
 
-        create_video_mock(
-            COLLABORATOR_MEMBER_ORIGIN,
-            ContentActor::Collaborator(COLLABORATOR_MEMBER_ID),
-            <Test as storage::Trait>::ChannelId::one(),
-            VideoCreationParametersRecord {
-                assets: Some(helper_generate_storage_assets(vec![1, 2])),
-                meta: None,
-            },
-            Ok(()),
-        );
+#[test]
+fn unsuccessful_video_creation_with_unauth_member() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
+
+        CreateVideoFixture::default()
+            .with_sender(UNAUTHORIZED_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Member(UNAUTHORIZED_MEMBER_ID))
+            .call_and_assert(Err(Error::<Test>::ActorNotAuthorized.into()));
     })
 }
 
 #[test]
-fn authorized_collaborators_can_update_video() {
+fn successful_video_creation_with_unauth_collaborator() {
     with_default_mock_builder(|| {
-        // Run to block one to see emitted events
         run_to_block(1);
 
-        helper_init_accounts(vec![FIRST_MEMBER_ORIGIN, COLLABORATOR_MEMBER_ORIGIN]);
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        increase_account_balance_helper(UNAUTHORIZED_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
 
-        create_initial_storage_buckets();
+        CreateVideoFixture::default()
+            .with_sender(UNAUTHORIZED_COLLABORATOR_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Collaborator(
+                UNAUTHORIZED_COLLABORATOR_MEMBER_ID,
+            ))
+            .call_and_assert(Err(Error::<Test>::ActorNotAuthorized.into()));
+    })
+}
 
-        // create channel
-        create_channel_mock(
-            FIRST_MEMBER_ORIGIN,
-            ContentActor::Member(FIRST_MEMBER_ID),
-            ChannelCreationParametersRecord {
-                assets: Some(helper_generate_storage_assets(vec![2, 3])),
-                meta: None,
-                reward_account: None,
-                collaborators: vec![COLLABORATOR_MEMBER_ID]
-                    .into_iter()
-                    .collect::<BTreeSet<_>>(),
-            },
-            Ok(()),
-        );
+#[test]
+fn unsuccessful_video_creation_with_unauth_curator() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
 
-        // create video
-        create_video_mock(
-            COLLABORATOR_MEMBER_ORIGIN,
-            ContentActor::Collaborator(COLLABORATOR_MEMBER_ID),
-            <Test as storage::Trait>::ChannelId::one(),
-            VideoCreationParametersRecord {
-                assets: Some(helper_generate_storage_assets(vec![1, 2])),
-                meta: None,
-            },
-            Ok(()),
-        );
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_CURATOR_ACCOUNT_ID, INITIAL_BALANCE);
+        increase_account_balance_helper(UNAUTHORIZED_CURATOR_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_curator_owned_channel();
+
+        let unauthorized_curator_group_id =
+            curators::add_curator_to_new_group(UNAUTHORIZED_CURATOR_ID);
+        CreateVideoFixture::default()
+            .with_sender(UNAUTHORIZED_CURATOR_ACCOUNT_ID)
+            .with_actor(ContentActor::Curator(
+                unauthorized_curator_group_id,
+                UNAUTHORIZED_CURATOR_ID,
+            ))
+            .call_and_assert(Err(Error::<Test>::ActorNotAuthorized.into()));
+    })
+}
 
-        update_video_mock(
-            COLLABORATOR_MEMBER_ORIGIN,
-            ContentActor::Collaborator(COLLABORATOR_MEMBER_ID),
-            <Test as Trait>::VideoId::one(),
-            VideoUpdateParametersRecord {
-                assets_to_upload: Some(helper_generate_storage_assets(vec![5])),
-                new_meta: None,
-                assets_to_remove: vec![DataObjectId::<Test>::one()]
-                    .into_iter()
-                    .collect::<BTreeSet<_>>(),
-            },
-            Ok(()),
-        );
+#[test]
+fn unsuccessful_video_creation_by_lead_with_member_owned_channel() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        increase_account_balance_helper(LEAD_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
+
+        CreateVideoFixture::default()
+            .with_sender(LEAD_ACCOUNT_ID)
+            .with_actor(ContentActor::Lead)
+            .call_and_assert(Err(Error::<Test>::ActorNotAuthorized.into()));
     })
 }
 
 #[test]
-fn authorized_collaborators_can_delete_video() {
+fn unsuccessful_video_creation_with_invalid_channel_id() {
     with_default_mock_builder(|| {
-        // Run to block one to see emitted events
         run_to_block(1);
 
-        helper_init_accounts(vec![FIRST_MEMBER_ORIGIN, COLLABORATOR_MEMBER_ORIGIN]);
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
 
-        create_initial_storage_buckets();
-        // create channel
-        create_channel_mock(
-            FIRST_MEMBER_ORIGIN,
-            ContentActor::Member(FIRST_MEMBER_ID),
-            ChannelCreationParametersRecord {
-                assets: Some(helper_generate_storage_assets(vec![2, 3])),
-                meta: None,
-                reward_account: None,
-                collaborators: vec![COLLABORATOR_MEMBER_ID]
-                    .into_iter()
-                    .collect::<BTreeSet<_>>(),
-            },
-            Ok(()),
-        );
+        CreateVideoFixture::default()
+            .with_sender(DEFAULT_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Member(DEFAULT_MEMBER_ID))
+            .with_channel_id(Zero::zero())
+            .call_and_assert(Err(Error::<Test>::ChannelDoesNotExist.into()));
+    })
+}
 
-        // create video
-        create_video_mock(
-            COLLABORATOR_MEMBER_ORIGIN,
-            ContentActor::Collaborator(COLLABORATOR_MEMBER_ID),
-            <Test as storage::Trait>::ChannelId::one(),
-            VideoCreationParametersRecord {
-                assets: Some(helper_generate_storage_assets(vec![1, 2])),
-                meta: None,
-            },
-            Ok(()),
+#[test]
+fn unsuccessful_video_creation_with_invalid_expected_data_size_fee() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
+
+        CreateVideoFixture::default()
+            .with_sender(DEFAULT_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Member(DEFAULT_MEMBER_ID))
+            .with_assets(StorageAssets::<Test> {
+                // setting a purposely high fee to trigger error
+                expected_data_size_fee: BalanceOf::<Test>::from(1_000_000u64),
+                object_creation_list: create_data_objects_helper(),
+            })
+            .call_and_assert(Err(storage::Error::<Test>::DataSizeFeeChanged.into()));
+    })
+}
+
+#[test]
+fn unsuccessful_video_creation_with_insufficient_balance() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
+        slash_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID);
+
+        CreateVideoFixture::default()
+            .with_sender(DEFAULT_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Member(DEFAULT_MEMBER_ID))
+            .with_assets(StorageAssets::<Test> {
+                expected_data_size_fee: Storage::<Test>::data_object_per_mega_byte_fee(),
+                object_creation_list: create_data_objects_helper(),
+            })
+            .call_and_assert(Err(storage::Error::<Test>::InsufficientBalance.into()));
+    })
+}
+
+#[test]
+fn unsuccessful_video_creation_due_to_bucket_having_insufficient_objects_size_left() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
+
+        CreateVideoFixture::default()
+            .with_sender(DEFAULT_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Member(DEFAULT_MEMBER_ID))
+            .with_assets(StorageAssets::<Test> {
+                expected_data_size_fee: Storage::<Test>::data_object_per_mega_byte_fee(),
+                object_creation_list: vec![DataObjectCreationParameters {
+                    size: STORAGE_BUCKET_OBJECTS_SIZE_LIMIT + 1,
+                    ipfs_content_id: vec![1u8],
+                }],
+            })
+            .call_and_assert(Err(
+                storage::Error::<Test>::StorageBucketObjectSizeLimitReached.into(),
+            ));
+    })
+}
+
+#[test]
+fn unsuccessful_video_creation_due_to_bucket_having_insufficient_objects_number_left() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(
+            DEFAULT_MEMBER_ACCOUNT_ID,
+            // balance necessary to create channel + video with specified no. of assets
+            DATA_OBJECT_DELETION_PRIZE * (STORAGE_BUCKET_OBJECTS_NUMBER_LIMIT + 1)
+                + DATA_OBJECT_DELETION_PRIZE * DATA_OBJECTS_NUMBER,
         );
 
-        delete_video_mock(
-            COLLABORATOR_MEMBER_ORIGIN,
-            ContentActor::Collaborator(COLLABORATOR_MEMBER_ID),
-            <Test as Trait>::VideoId::one(),
-            vec![
-                DataObjectId::<Test>::one(),
-                DataObjectId::<Test>::from(2u64),
-            ]
-            .into_iter()
-            .collect::<BTreeSet<_>>(),
-            Ok(()),
+        create_default_member_owned_channel();
+
+        CreateVideoFixture::default()
+            .with_sender(DEFAULT_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Member(DEFAULT_MEMBER_ID))
+            .with_assets(StorageAssets::<Test> {
+                expected_data_size_fee: Storage::<Test>::data_object_per_mega_byte_fee(),
+                object_creation_list: (0..(STORAGE_BUCKET_OBJECTS_NUMBER_LIMIT + 1))
+                    .map(|_| DataObjectCreationParameters {
+                        size: 1,
+                        ipfs_content_id: vec![1u8],
+                    })
+                    .collect(),
+            })
+            .call_and_assert(Err(
+                storage::Error::<Test>::StorageBucketObjectNumberLimitReached.into(),
+            ));
+    })
+}
+
+#[test]
+fn unsuccessful_video_creation_with_max_object_size_limits_exceeded() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel();
+
+        CreateVideoFixture::default()
+            .with_sender(DEFAULT_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Member(DEFAULT_MEMBER_ID))
+            .with_assets(StorageAssets::<Test> {
+                expected_data_size_fee: Storage::<Test>::data_object_per_mega_byte_fee(),
+                object_creation_list: vec![DataObjectCreationParameters {
+                    size: VOUCHER_OBJECTS_SIZE_LIMIT + 1,
+                    ipfs_content_id: vec![1u8],
+                }],
+            })
+            .call_and_assert(Err(storage::Error::<Test>::MaxDataObjectSizeExceeded.into()));
+    })
+}
+
+#[test]
+fn successful_video_update_by_member_with_assets_upload() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel_with_video();
+
+        UpdateVideoFixture::default()
+            .with_assets_to_upload(StorageAssets::<Test> {
+                expected_data_size_fee: Storage::<Test>::data_object_per_mega_byte_fee(),
+                object_creation_list: create_data_objects_helper(),
+            })
+            .call_and_assert(Ok(()));
+    })
+}
+
+#[test]
+fn successful_video_creation_by_collaborator_with_assets_upload() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        increase_account_balance_helper(COLLABORATOR_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel_with_video();
+
+        UpdateVideoFixture::default()
+            .with_sender(COLLABORATOR_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Collaborator(COLLABORATOR_MEMBER_ID))
+            .with_assets_to_upload(StorageAssets::<Test> {
+                expected_data_size_fee: Storage::<Test>::data_object_per_mega_byte_fee(),
+                object_creation_list: create_data_objects_helper(),
+            })
+            .call_and_assert(Ok(()));
+    })
+}
+
+#[test]
+fn successful_video_creation_by_lead_with_assets_upload() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_CURATOR_ACCOUNT_ID, INITIAL_BALANCE);
+        increase_account_balance_helper(LEAD_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_curator_owned_channel_with_video();
+
+        UpdateVideoFixture::default()
+            .with_sender(LEAD_ACCOUNT_ID)
+            .with_actor(ContentActor::Lead)
+            .with_assets_to_upload(StorageAssets::<Test> {
+                expected_data_size_fee: Storage::<Test>::data_object_per_mega_byte_fee(),
+                object_creation_list: create_data_objects_helper(),
+            })
+            .call_and_assert(Ok(()));
+    })
+}
+
+#[test]
+fn successful_video_update_by_curator_with_assets_upload() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_CURATOR_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_curator_owned_channel_with_video();
+
+        let default_curator_group_id = NextCuratorGroupId::<Test>::get() - 1;
+        UpdateVideoFixture::default()
+            .with_sender(DEFAULT_CURATOR_ACCOUNT_ID)
+            .with_actor(ContentActor::Curator(
+                default_curator_group_id,
+                DEFAULT_CURATOR_ID,
+            ))
+            .with_assets_to_upload(StorageAssets::<Test> {
+                expected_data_size_fee: Storage::<Test>::data_object_per_mega_byte_fee(),
+                object_creation_list: create_data_objects_helper(),
+            })
+            .call_and_assert(Ok(()));
+    })
+}
+
+#[test]
+fn successful_video_update_by_member_with_assets_removal() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel_with_video();
+        let video_assets = ((DATA_OBJECTS_NUMBER as u64)..(2 * DATA_OBJECTS_NUMBER as u64 - 1))
+            .collect::<BTreeSet<_>>();
+
+        UpdateVideoFixture::default()
+            .with_assets_to_remove(video_assets)
+            .call_and_assert(Ok(()));
+    })
+}
+
+#[test]
+fn successful_video_update_by_collaborator_with_assets_removal() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        increase_account_balance_helper(COLLABORATOR_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel_with_video();
+        let video_assets = ((DATA_OBJECTS_NUMBER as u64)..(2 * DATA_OBJECTS_NUMBER as u64 - 1))
+            .collect::<BTreeSet<_>>();
+
+        UpdateVideoFixture::default()
+            .with_sender(COLLABORATOR_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Collaborator(COLLABORATOR_MEMBER_ID))
+            .with_assets_to_remove(video_assets)
+            .call_and_assert(Ok(()));
+    })
+}
+
+#[test]
+fn successful_video_update_by_lead_with_assets_removal() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_CURATOR_ACCOUNT_ID, INITIAL_BALANCE);
+        increase_account_balance_helper(LEAD_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_curator_owned_channel_with_video();
+        let video_assets = ((DATA_OBJECTS_NUMBER as u64)..(2 * DATA_OBJECTS_NUMBER as u64 - 1))
+            .collect::<BTreeSet<_>>();
+
+        UpdateVideoFixture::default()
+            .with_sender(LEAD_ACCOUNT_ID)
+            .with_actor(ContentActor::Lead)
+            .with_assets_to_remove(video_assets)
+            .call_and_assert(Ok(()));
+    })
+}
+
+#[test]
+fn successful_video_update_by_curator_with_assets_removal() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_CURATOR_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_curator_owned_channel_with_video();
+        let video_assets = ((DATA_OBJECTS_NUMBER as u64)..(2 * DATA_OBJECTS_NUMBER as u64 - 1))
+            .collect::<BTreeSet<_>>();
+
+        let default_curator_group_id = NextCuratorGroupId::<Test>::get() - 1;
+        UpdateVideoFixture::default()
+            .with_sender(DEFAULT_CURATOR_ACCOUNT_ID)
+            .with_actor(ContentActor::Curator(
+                default_curator_group_id,
+                DEFAULT_CURATOR_ID,
+            ))
+            .with_assets_to_remove(video_assets)
+            .call_and_assert(Ok(()));
+    })
+}
+
+#[test]
+fn unsuccessful_video_update_with_member_auth_failure() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel_with_video();
+
+        CreateVideoFixture::default()
+            .with_sender(UNAUTHORIZED_MEMBER_ACCOUNT_ID)
+            .call_and_assert(Err(Error::<Test>::MemberAuthFailed.into()));
+    })
+}
+
+#[test]
+fn successful_video_update_with_collaborator_auth_failure() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        increase_account_balance_helper(UNAUTHORIZED_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel_with_video();
+
+        CreateVideoFixture::default()
+            .with_sender(UNAUTHORIZED_COLLABORATOR_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Collaborator(COLLABORATOR_MEMBER_ID))
+            .call_and_assert(Err(Error::<Test>::MemberAuthFailed.into()));
+    })
+}
+
+#[test]
+fn unsuccessful_video_update_with_curator_auth_failure() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_CURATOR_ACCOUNT_ID, INITIAL_BALANCE);
+        increase_account_balance_helper(UNAUTHORIZED_CURATOR_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_curator_owned_channel_with_video();
+
+        let default_curator_group_id = curators::add_curator_to_new_group(DEFAULT_CURATOR_ID);
+        CreateVideoFixture::default()
+            .with_sender(UNAUTHORIZED_CURATOR_ACCOUNT_ID)
+            .with_actor(ContentActor::Curator(
+                default_curator_group_id,
+                DEFAULT_CURATOR_ID,
+            ))
+            .call_and_assert(Err(Error::<Test>::CuratorAuthFailed.into()));
+    })
+}
+
+#[test]
+fn unsuccessful_video_update_by_lead_with_member_owned_channel() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        increase_account_balance_helper(LEAD_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel_with_video();
+
+        CreateVideoFixture::default()
+            .with_sender(UNAUTHORIZED_LEAD_ACCOUNT_ID)
+            .with_actor(ContentActor::Lead)
+            .call_and_assert(Err(Error::<Test>::LeadAuthFailed.into()));
+    })
+}
+
+#[test]
+fn unsuccessful_video_update_with_unauth_member() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel_with_video();
+
+        CreateVideoFixture::default()
+            .with_sender(UNAUTHORIZED_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Member(UNAUTHORIZED_MEMBER_ID))
+            .call_and_assert(Err(Error::<Test>::ActorNotAuthorized.into()));
+    })
+}
+
+#[test]
+fn unsuccessful_video_update_with_unauth_collaborator() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        increase_account_balance_helper(UNAUTHORIZED_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel_with_video();
+
+        CreateVideoFixture::default()
+            .with_sender(UNAUTHORIZED_COLLABORATOR_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Collaborator(
+                UNAUTHORIZED_COLLABORATOR_MEMBER_ID,
+            ))
+            .call_and_assert(Err(Error::<Test>::ActorNotAuthorized.into()));
+    })
+}
+
+#[test]
+fn unsuccessful_video_update_with_unauth_curator() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_CURATOR_ACCOUNT_ID, INITIAL_BALANCE);
+        increase_account_balance_helper(UNAUTHORIZED_CURATOR_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_curator_owned_channel_with_video();
+
+        let unauthorized_curator_group_id =
+            curators::add_curator_to_new_group(UNAUTHORIZED_CURATOR_ID);
+        CreateVideoFixture::default()
+            .with_sender(UNAUTHORIZED_CURATOR_ACCOUNT_ID)
+            .with_actor(ContentActor::Curator(
+                unauthorized_curator_group_id,
+                UNAUTHORIZED_CURATOR_ID,
+            ))
+            .call_and_assert(Err(Error::<Test>::ActorNotAuthorized.into()));
+    })
+}
+
+#[test]
+fn unsuccessful_video_update_with_invalid_expected_data_size_fee() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel_with_video();
+
+        UpdateVideoFixture::default()
+            .with_assets_to_upload(StorageAssets::<Test> {
+                // setting a purposely high fee to trigger error
+                expected_data_size_fee: BalanceOf::<Test>::from(1_000_000u64),
+                object_creation_list: create_data_objects_helper(),
+            })
+            .call_and_assert(Err(storage::Error::<Test>::DataSizeFeeChanged.into()));
+    })
+}
+
+#[test]
+fn unsuccessful_video_update_with_insufficient_balance() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel_with_video();
+        slash_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID);
+
+        UpdateVideoFixture::default()
+            .with_assets_to_upload(StorageAssets::<Test> {
+                expected_data_size_fee: Storage::<Test>::data_object_per_mega_byte_fee(),
+                object_creation_list: create_data_objects_helper(),
+            })
+            .call_and_assert(Err(storage::Error::<Test>::InsufficientBalance.into()));
+    })
+}
+
+#[test]
+fn unsuccessful_video_update_due_to_bucket_having_insufficient_objects_size_left() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel_with_video();
+
+        UpdateVideoFixture::default()
+            .with_assets_to_upload(StorageAssets::<Test> {
+                expected_data_size_fee: Storage::<Test>::data_object_per_mega_byte_fee(),
+                object_creation_list: vec![DataObjectCreationParameters {
+                    size: STORAGE_BUCKET_OBJECTS_SIZE_LIMIT + 1,
+                    ipfs_content_id: vec![1u8],
+                }],
+            })
+            .call_and_assert(Err(
+                storage::Error::<Test>::StorageBucketObjectSizeLimitReached.into(),
+            ));
+    })
+}
+
+#[test]
+fn unsuccessful_video_update_due_to_bucket_having_insufficient_objects_number_left() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(
+            DEFAULT_MEMBER_ACCOUNT_ID,
+            // balance necessary to create channel with video + video with specified no. of assets
+            DATA_OBJECT_DELETION_PRIZE * (STORAGE_BUCKET_OBJECTS_NUMBER_LIMIT + 1)
+                + DATA_OBJECT_DELETION_PRIZE * DATA_OBJECTS_NUMBER * 2,
         );
+
+        create_default_member_owned_channel_with_video();
+
+        UpdateVideoFixture::default()
+            .with_assets_to_upload(StorageAssets::<Test> {
+                expected_data_size_fee: Storage::<Test>::data_object_per_mega_byte_fee(),
+                object_creation_list: (0..(STORAGE_BUCKET_OBJECTS_NUMBER_LIMIT + 1))
+                    .map(|_| DataObjectCreationParameters {
+                        size: 1,
+                        ipfs_content_id: vec![1u8],
+                    })
+                    .collect(),
+            })
+            .call_and_assert(Err(
+                storage::Error::<Test>::StorageBucketObjectNumberLimitReached.into(),
+            ));
+    })
+}
+
+#[test]
+fn unsuccessful_video_update_with_max_object_size_limits_exceeded() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel_with_video();
+
+        UpdateVideoFixture::default()
+            .with_assets_to_upload(StorageAssets::<Test> {
+                expected_data_size_fee: Storage::<Test>::data_object_per_mega_byte_fee(),
+                object_creation_list: vec![DataObjectCreationParameters {
+                    size: <Test as storage::Trait>::MaxDataObjectSize::get() + 1,
+                    ipfs_content_id: vec![1u8],
+                }],
+            })
+            .call_and_assert(Err(storage::Error::<Test>::MaxDataObjectSizeExceeded.into()));
+    })
+}
+
+#[test]
+fn unsuccessful_video_update_with_invalid_object_ids() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel_with_video();
+        let invalid_objects_ids = (1..DATA_OBJECTS_NUMBER)
+            .map(|i| Storage::<Test>::next_data_object_id() + i)
+            .collect::<BTreeSet<_>>();
+
+        UpdateVideoFixture::default()
+            .with_assets_to_remove(invalid_objects_ids)
+            .call_and_assert(Err(storage::Error::<Test>::DataObjectDoesntExist.into()));
+    })
+}
+
+#[test]
+fn unsuccessful_video_update_with_invalid_video_id() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel_with_video();
+
+        UpdateVideoFixture::default()
+            .with_video_id(Zero::zero())
+            .call_and_assert(Err(Error::<Test>::VideoDoesNotExist.into()));
+    })
+}
+
+#[test]
+fn successful_video_deletion_by_member_with_assets_removal() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel_with_video();
+        let video_assets = ((DATA_OBJECTS_NUMBER as u64)..(2 * DATA_OBJECTS_NUMBER as u64 - 1))
+            .collect::<BTreeSet<_>>();
+
+        DeleteVideoFixture::default()
+            .with_assets_to_remove(video_assets)
+            .call_and_assert(Ok(()));
+    })
+}
+
+#[test]
+fn unsuccessful_video_deletion_by_collaborator() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel_with_video();
+
+        DeleteVideoFixture::default()
+            .with_sender(COLLABORATOR_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Collaborator(COLLABORATOR_MEMBER_ID))
+            .call_and_assert(Ok(()));
+    })
+}
+
+#[test]
+fn unsuccessful_video_deletion_by_lead_with_member_owned_channel() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel_with_video();
+
+        DeleteVideoFixture::default()
+            .with_sender(LEAD_ACCOUNT_ID)
+            .with_actor(ContentActor::Lead)
+            .call_and_assert(Err(Error::<Test>::ActorNotAuthorized.into()));
+    })
+}
+
+#[test]
+fn successful_video_deletion_by_lead_with_curator_owned_channel() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_CURATOR_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_curator_owned_channel_with_video();
+        let video_assets = ((DATA_OBJECTS_NUMBER as u64)..(2 * DATA_OBJECTS_NUMBER as u64 - 1))
+            .collect::<BTreeSet<_>>();
+
+        DeleteVideoFixture::default()
+            .with_sender(LEAD_ACCOUNT_ID)
+            .with_actor(ContentActor::Lead)
+            .with_assets_to_remove(video_assets)
+            .call_and_assert(Ok(()));
+    })
+}
+
+#[test]
+fn successful_video_deletion_by_curator_with_assets_removal() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_CURATOR_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_curator_owned_channel_with_video();
+        let video_assets = ((DATA_OBJECTS_NUMBER as u64)..(2 * DATA_OBJECTS_NUMBER as u64 - 1))
+            .collect::<BTreeSet<_>>();
+
+        let default_curator_group_id = NextCuratorGroupId::<Test>::get() - 1;
+        DeleteVideoFixture::default()
+            .with_sender(DEFAULT_CURATOR_ACCOUNT_ID)
+            .with_actor(ContentActor::Curator(
+                default_curator_group_id,
+                DEFAULT_CURATOR_ID,
+            ))
+            .with_assets_to_remove(video_assets)
+            .call_and_assert(Ok(()));
+    })
+}
+
+#[test]
+fn unsuccessful_video_deletion_by_member_with_auth_failure() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel_with_video();
+
+        DeleteVideoFixture::default()
+            .with_sender(UNAUTHORIZED_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Member(DEFAULT_MEMBER_ACCOUNT_ID))
+            .call_and_assert(Err(Error::<Test>::MemberAuthFailed.into()));
+    })
+}
+
+#[test]
+fn unsuccessful_video_deletion_by_curator_with_auth_failure() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_CURATOR_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_curator_owned_channel_with_video();
+
+        let default_curator_group_id = NextCuratorGroupId::<Test>::get() - 1;
+        DeleteVideoFixture::default()
+            .with_sender(UNAUTHORIZED_CURATOR_ACCOUNT_ID)
+            .with_actor(ContentActor::Curator(
+                default_curator_group_id,
+                DEFAULT_CURATOR_ID,
+            ))
+            .call_and_assert(Err(Error::<Test>::CuratorAuthFailed.into()));
+    })
+}
+
+#[test]
+fn unsuccessful_video_deletion_with_lead_auth_failure() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_CURATOR_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_curator_owned_channel_with_video();
+
+        DeleteVideoFixture::default()
+            .with_sender(UNAUTHORIZED_LEAD_ACCOUNT_ID)
+            .with_actor(ContentActor::Lead)
+            .call_and_assert(Err(Error::<Test>::LeadAuthFailed.into()));
+    })
+}
+
+#[test]
+fn unsuccessful_video_deletion_by_unauth_member() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel_with_video();
+
+        DeleteVideoFixture::default()
+            .with_sender(UNAUTHORIZED_MEMBER_ACCOUNT_ID)
+            .with_actor(ContentActor::Member(UNAUTHORIZED_MEMBER_ID))
+            .call_and_assert(Err(Error::<Test>::ActorNotAuthorized.into()));
+    })
+}
+
+#[test]
+fn unsuccessful_video_deletion_by_unauth_curator() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_CURATOR_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_curator_owned_channel_with_video();
+
+        let unauthorized_curator_group_id =
+            curators::add_curator_to_new_group(UNAUTHORIZED_CURATOR_ID);
+        DeleteVideoFixture::default()
+            .with_sender(UNAUTHORIZED_CURATOR_ACCOUNT_ID)
+            .with_actor(ContentActor::Curator(
+                unauthorized_curator_group_id,
+                UNAUTHORIZED_CURATOR_ID,
+            ))
+            .call_and_assert(Err(Error::<Test>::ActorNotAuthorized.into()));
+    })
+}
+
+#[test]
+fn unsuccessful_video_deletion_with_invalid_video_id() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel_with_video();
+
+        DeleteVideoFixture::default()
+            .with_video_id(Zero::zero())
+            .call_and_assert(Err(Error::<Test>::VideoDoesNotExist.into()));
+    })
+}
+
+#[test]
+fn unsuccessful_video_deletion_with_invalid_object_ids() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        create_initial_storage_buckets_helper();
+        increase_account_balance_helper(DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        create_default_member_owned_channel_with_video();
+        let invalid_objects_ids = (1..DATA_OBJECTS_NUMBER)
+            .map(|i| Storage::<Test>::next_data_object_id() + i)
+            .collect::<BTreeSet<_>>();
+
+        DeleteVideoFixture::default()
+            .with_assets_to_remove(invalid_objects_ids)
+            .call_and_assert(Err(storage::Error::<Test>::DataObjectDoesntExist.into()));
     })
 }

+ 18 - 9
runtime-modules/storage/src/lib.rs

@@ -1469,6 +1469,9 @@ decl_error! {
         /// Different Accounts for dynamic bag deletion prize and upload fees
         AccountsNotCoherent,
 
+        /// Different Accounts for dynamic bag id and parameters bag id
+        BagsNotCoherent,
+
         /// Invalid transactor account ID for this bucket.
         InvalidTransactorAccount,
     }
@@ -2844,21 +2847,22 @@ impl<T: Trait> Module<T> {
     ) -> Result<Option<BagUpdate<BalanceOf<T>>>, DispatchError> {
         let bag_id: BagId<T> = dynamic_bag_id.clone().into();
         ensure!(
-            !<Bags<T>>::contains_key(bag_id),
+            !<Bags<T>>::contains_key(bag_id.clone()),
             Error::<T>::DynamicBagExists
         );
 
-        // call can upload data explicitly
         let bag_change = upload_params
             .as_ref()
             .map(|params| {
-                // ensure coherent account ids for prize
+                // ensure coherent account ids & bag ids
                 if let Some(deletion_prize) = deletion_prize {
                     ensure!(
                         params.deletion_prize_source_account_id == deletion_prize.account_id,
                         Error::<T>::AccountsNotCoherent,
                     );
                 }
+                ensure!(bag_id == params.bag_id, Error::<T>::BagsNotCoherent);
+
                 Self::validate_bag_change(params)
             })
             .transpose()?;
@@ -2871,12 +2875,17 @@ impl<T: Trait> Module<T> {
                 Self::compute_upload_fees(bag_change)
             }));
 
-        Self::ensure_sufficient_balance_for_upload(
-            deletion_prize
-                .as_ref()
-                .map(|deletion_prize| deletion_prize.account_id.clone()),
-            total_upload_fee,
-        )?;
+        // either bag_prize account or objects_prize account used (provided they are the same)
+        let designated_account = deletion_prize
+            .as_ref()
+            .map(|dp| dp.account_id.clone())
+            .or_else(|| {
+                upload_params
+                    .as_ref()
+                    .map(|p| p.deletion_prize_source_account_id.clone())
+            });
+
+        Self::ensure_sufficient_balance_for_upload(designated_account, total_upload_fee)?;
 
         Ok(bag_change)
     }

+ 128 - 15
runtime-modules/storage/src/tests/fixtures.rs

@@ -2,8 +2,10 @@ use frame_support::dispatch::DispatchResult;
 use frame_support::storage::StorageMap;
 use frame_support::traits::{Currency, OnFinalize, OnInitialize};
 use frame_system::{EventRecord, Phase, RawOrigin};
+use sp_runtime::{traits::Zero, DispatchError};
 use sp_std::collections::btree_map::BTreeMap;
 use sp_std::collections::btree_set::BTreeSet;
+use std::convert::TryInto;
 
 use super::mocks::{
     Balances, CollectiveFlip, Storage, System, Test, TestEvent, DEFAULT_MEMBER_ACCOUNT_ID,
@@ -11,7 +13,9 @@ use super::mocks::{
 };
 
 use crate::tests::mocks::{
-    DEFAULT_DISTRIBUTION_PROVIDER_ACCOUNT_ID, DISTRIBUTION_WG_LEADER_ACCOUNT_ID,
+    DEFAULT_DISTRIBUTION_PROVIDER_ACCOUNT_ID, DEFAULT_MEMBER_ID,
+    DEFAULT_STORAGE_BUCKET_OBJECTS_LIMIT, DEFAULT_STORAGE_BUCKET_SIZE_LIMIT,
+    DISTRIBUTION_WG_LEADER_ACCOUNT_ID,
 };
 use crate::{
     BagId, Cid, DataObjectCreationParameters, DataObjectStorage, DistributionBucket,
@@ -102,6 +106,9 @@ impl EventFixture {
 
 const DEFAULT_ACCOUNT_ID: u64 = 1;
 const DEFAULT_WORKER_ID: u64 = 1;
+pub const DEFAULT_DATA_OBJECTS_NUMBER: u64 = DEFAULT_STORAGE_BUCKET_OBJECTS_LIMIT / 2;
+pub const DEFAULT_DATA_OBJECTS_SIZE: u64 =
+    DEFAULT_STORAGE_BUCKET_SIZE_LIMIT / DEFAULT_DATA_OBJECTS_NUMBER - 1;
 
 pub struct CreateStorageBucketFixture {
     origin: RawOrigin<u64>,
@@ -342,7 +349,7 @@ pub fn create_data_object_candidates(
     range
         .into_iter()
         .map(|idx| DataObjectCreationParameters {
-            size: 10 * idx as u64,
+            size: DEFAULT_DATA_OBJECTS_SIZE,
             ipfs_content_id: vec![idx],
         })
         .collect()
@@ -1151,6 +1158,7 @@ impl CreateDynamicBagFixture {
 }
 
 pub struct CreateDynamicBagWithObjectsFixture {
+    sender: u64,
     bag_id: DynamicBagId<Test>,
     deletion_prize: Option<DynamicBagDeletionPrize<Test>>,
     upload_parameters: UploadParameters<Test>,
@@ -1158,10 +1166,68 @@ pub struct CreateDynamicBagWithObjectsFixture {
 
 impl CreateDynamicBagWithObjectsFixture {
     pub fn default() -> Self {
+        let bag_id = DynamicBagId::<Test>::Member(DEFAULT_MEMBER_ID);
+        let sender_acc = DEFAULT_MEMBER_ACCOUNT_ID;
+        Self {
+            sender: sender_acc.clone(),
+            bag_id: bag_id.clone(),
+            deletion_prize: None,
+            upload_parameters: UploadParameters::<Test> {
+                bag_id: bag_id.into(),
+                expected_data_size_fee: crate::Module::<Test>::data_object_per_mega_byte_fee(),
+                object_creation_list: create_data_object_candidates(
+                    1,
+                    DEFAULT_DATA_OBJECTS_NUMBER.try_into().unwrap(),
+                ),
+                deletion_prize_source_account_id: sender_acc,
+            },
+        }
+    }
+
+    pub fn with_expected_data_size_fee(self, expected_data_size_fee: u64) -> Self {
+        Self {
+            upload_parameters: UploadParameters::<Test> {
+                expected_data_size_fee,
+                ..self.upload_parameters
+            },
+            ..self
+        }
+    }
+
+    pub fn with_params_bag_id(self, bag_id: BagId<Test>) -> Self {
         Self {
-            bag_id: Default::default(),
-            deletion_prize: Default::default(),
-            upload_parameters: Default::default(),
+            upload_parameters: UploadParameters::<Test> {
+                bag_id,
+                ..self.upload_parameters
+            },
+            ..self
+        }
+    }
+
+    pub fn with_objects(self, object_creation_list: Vec<DataObjectCreationParameters>) -> Self {
+        Self {
+            upload_parameters: UploadParameters::<Test> {
+                object_creation_list,
+                ..self.upload_parameters
+            },
+            ..self
+        }
+    }
+
+    pub fn with_upload_parameters(self, upload_parameters: UploadParameters<Test>) -> Self {
+        Self {
+            upload_parameters,
+            ..self
+        }
+    }
+
+    pub fn with_objects_prize_source_account(self, deletion_prize_source_account_id: u64) -> Self {
+        Self {
+            upload_parameters: UploadParameters::<Test> {
+                deletion_prize_source_account_id,
+                ..self.upload_parameters
+            },
+            ..self
         }
     }
 
@@ -1179,25 +1245,55 @@ impl CreateDynamicBagWithObjectsFixture {
         }
     }
 
-    pub fn with_objects(self, upload_parameters: UploadParameters<Test>) -> Self {
-        Self {
-            upload_parameters,
-            ..self
-        }
-    }
-
     pub fn call_and_assert(&self, expected_result: DispatchResult) {
+        let balance_pre = Balances::usable_balance(self.sender);
+        let bag_id: BagId<Test> = self.bag_id.clone().into();
+        let total_size_required = self
+            .upload_parameters
+            .object_creation_list
+            .iter()
+            .fold(0, |acc, it| acc + it.size);
+
         let actual_result = Storage::create_dynamic_bag_with_objects_constraints(
             self.bag_id.clone(),
             self.deletion_prize.clone(),
             self.upload_parameters.clone(),
         );
 
+        let balance_post = Balances::usable_balance(self.sender);
+
         assert_eq!(actual_result, expected_result);
 
-        if actual_result.is_ok() {
-            let bag_id: BagId<Test> = self.bag_id.clone().into();
-            assert!(<crate::Bags<Test>>::contains_key(&bag_id));
+        match actual_result {
+            Ok(()) => {
+                assert!(<crate::Bags<Test>>::contains_key(&bag_id));
+
+                let bag = crate::Bags::<Test>::get(&bag_id);
+                assert_eq!(
+                    balance_pre.saturating_sub(balance_post),
+                    self.deletion_prize
+                        .as_ref()
+                        .map_or_else(|| Zero::zero(), |dprize| dprize.prize)
+                );
+
+                let total_objects_required =
+                    self.upload_parameters.object_creation_list.len() as u64;
+
+                assert!(bag.stored_by.iter().all(|id| {
+                    let bucket = crate::StorageBucketById::<Test>::get(id);
+                    let enough_size =
+                        bucket.voucher.size_limit >= total_size_required + bucket.voucher.size_used;
+                    let enough_objects = bucket.voucher.objects_limit
+                        >= total_objects_required + bucket.voucher.objects_used;
+                    enough_size && enough_objects && bucket.accepting_new_bags
+                }));
+            }
+            Err(err) => {
+                assert_eq!(balance_pre, balance_post);
+                if into_str(err) != "DynamicBagExists" {
+                    assert!(!crate::Bags::<Test>::contains_key(&bag_id))
+                }
+            }
         }
     }
 }
@@ -2044,3 +2140,20 @@ impl SetDistributionBucketFamilyMetadataFixture {
         assert_eq!(actual_result, expected_result);
     }
 }
+
+// helper methods
+impl CreateStorageBucketFixture {
+    pub fn create_several(&self, bucket_number: u64) -> BTreeSet<u64> {
+        let mut bucket_ids = BTreeSet::new();
+        for _ in 0..bucket_number {
+            let bucket_id = self.call_and_assert(Ok(())).unwrap();
+            bucket_ids.insert(bucket_id);
+        }
+        bucket_ids
+    }
+}
+
+// wrapper to silence compiler error
+fn into_str(err: DispatchError) -> &'static str {
+    err.into()
+}

+ 7 - 0
runtime-modules/storage/src/tests/mocks.rs

@@ -74,6 +74,13 @@ pub const DEFAULT_STORAGE_PROVIDER_ID: u64 = 10;
 pub const ANOTHER_STORAGE_PROVIDER_ID: u64 = 11;
 pub const DEFAULT_DISTRIBUTION_PROVIDER_ID: u64 = 12;
 pub const ANOTHER_DISTRIBUTION_PROVIDER_ID: u64 = 13;
+pub const INITIAL_BALANCE: u64 = 10_000;
+pub const BAG_DELETION_PRIZE_VALUE: u64 = 100;
+pub const VOUCHER_SIZE_LIMIT: u64 = 100;
+pub const VOUCHER_OBJECTS_LIMIT: u64 = 20;
+pub const DEFAULT_STORAGE_BUCKET_SIZE_LIMIT: u64 = 100;
+pub const DEFAULT_STORAGE_BUCKET_OBJECTS_LIMIT: u64 = 10;
+pub const DEFAULT_STORAGE_BUCKETS_NUMBER: u64 = 10;
 
 impl crate::Trait for Test {
     type Event = TestEvent;

+ 283 - 281
runtime-modules/storage/src/tests/mod.rs

@@ -9,6 +9,7 @@ use frame_support::{StorageDoubleMap, StorageMap, StorageValue};
 use frame_system::RawOrigin;
 use sp_std::collections::btree_map::BTreeMap;
 use sp_std::collections::btree_set::BTreeSet;
+use sp_std::convert::TryInto;
 use sp_std::iter::{repeat, FromIterator};
 
 use common::working_group::WorkingGroup;
@@ -21,18 +22,49 @@ use crate::{
 };
 
 use mocks::{
-    build_test_externalities, Balances, DataObjectDeletionPrize,
+    build_test_externalities, Balances, BlacklistSizeLimit, DataObjectDeletionPrize,
     DefaultChannelDynamicBagNumberOfStorageBuckets, DefaultMemberDynamicBagNumberOfStorageBuckets,
     InitialStorageBucketsNumberForDynamicBag, MaxDataObjectSize, MaxDistributionBucketFamilyNumber,
     MaxRandomIterationNumber, Storage, Test, ANOTHER_DISTRIBUTION_PROVIDER_ID,
-    ANOTHER_STORAGE_PROVIDER_ID, DEFAULT_DISTRIBUTION_PROVIDER_ACCOUNT_ID,
-    DEFAULT_DISTRIBUTION_PROVIDER_ID, DEFAULT_MEMBER_ACCOUNT_ID, DEFAULT_MEMBER_ID,
+    ANOTHER_STORAGE_PROVIDER_ID, BAG_DELETION_PRIZE_VALUE,
+    DEFAULT_DISTRIBUTION_PROVIDER_ACCOUNT_ID, DEFAULT_DISTRIBUTION_PROVIDER_ID,
+    DEFAULT_MEMBER_ACCOUNT_ID, DEFAULT_MEMBER_ID, DEFAULT_STORAGE_BUCKETS_NUMBER,
+    DEFAULT_STORAGE_BUCKET_OBJECTS_LIMIT, DEFAULT_STORAGE_BUCKET_SIZE_LIMIT,
     DEFAULT_STORAGE_PROVIDER_ACCOUNT_ID, DEFAULT_STORAGE_PROVIDER_ID,
-    DISTRIBUTION_WG_LEADER_ACCOUNT_ID, STORAGE_WG_LEADER_ACCOUNT_ID,
+    DISTRIBUTION_WG_LEADER_ACCOUNT_ID, INITIAL_BALANCE, STORAGE_WG_LEADER_ACCOUNT_ID,
+    VOUCHER_OBJECTS_LIMIT, VOUCHER_SIZE_LIMIT,
 };
 
 use fixtures::*;
 
+// helper
+
+fn create_storage_buckets(buckets_number: u64) -> BTreeSet<u64> {
+    set_max_voucher_limits();
+    CreateStorageBucketFixture::default()
+        .with_origin(RawOrigin::Signed(STORAGE_WG_LEADER_ACCOUNT_ID))
+        .with_objects_limit(DEFAULT_STORAGE_BUCKET_OBJECTS_LIMIT)
+        .with_size_limit(DEFAULT_STORAGE_BUCKET_SIZE_LIMIT)
+        .create_several(buckets_number)
+}
+
+fn default_bag_deletion_prize() -> Option<DynamicBagDeletionPrize<Test>> {
+    Some(DynamicBagDeletionPrize::<Test> {
+        prize: BAG_DELETION_PRIZE_VALUE,
+        account_id: DEFAULT_MEMBER_ACCOUNT_ID,
+    })
+}
+
+fn default_upload_parameters() -> UploadParameters<Test> {
+    let dynamic_bag_id = DynamicBagId::<Test>::Member(DEFAULT_MEMBER_ID);
+    UploadParameters::<Test> {
+        bag_id: BagId::<Test>::from(dynamic_bag_id.clone()),
+        object_creation_list: create_single_data_object(),
+        deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
+        expected_data_size_fee: Storage::data_object_per_mega_byte_fee(),
+    }
+}
+
 #[test]
 fn create_storage_bucket_succeeded() {
     build_test_externalities().execute_with(|| {
@@ -3049,10 +3081,7 @@ fn set_storage_bucket_voucher_limits_fails_with_invalid_storage_bucket() {
 }
 
 fn set_max_voucher_limits() {
-    let new_size_limit = 100;
-    let new_objects_limit = 1;
-
-    set_max_voucher_limits_with_params(new_size_limit, new_objects_limit);
+    set_max_voucher_limits_with_params(VOUCHER_SIZE_LIMIT, VOUCHER_OBJECTS_LIMIT);
 }
 
 fn set_max_voucher_limits_with_params(size_limit: u64, objects_limit: u64) {
@@ -3103,7 +3132,7 @@ fn create_dynamic_bag_succeeded() {
 
         let dynamic_bag_id = DynamicBagId::<Test>::Member(DEFAULT_MEMBER_ID);
 
-        create_storage_buckets(10);
+        create_storage_buckets(DEFAULT_STORAGE_BUCKETS_NUMBER);
 
         let deletion_prize_value = 100;
         let deletion_prize_account_id = DEFAULT_MEMBER_ACCOUNT_ID;
@@ -3400,37 +3429,6 @@ fn test_storage_bucket_iterators() {
     });
 }
 
-fn create_storage_buckets(buckets_number: u64) -> BTreeSet<u64> {
-    set_max_voucher_limits();
-
-    let objects_limit = 1;
-    let size_limit = 100;
-
-    create_storage_buckets_with_limits(buckets_number, size_limit, objects_limit)
-}
-
-fn create_storage_buckets_with_limits(
-    buckets_number: u64,
-    size_limit: u64,
-    objects_limit: u64,
-) -> BTreeSet<u64> {
-    let mut bucket_ids = BTreeSet::new();
-
-    for _ in 0..buckets_number {
-        let bucket_id = CreateStorageBucketFixture::default()
-            .with_origin(RawOrigin::Signed(STORAGE_WG_LEADER_ACCOUNT_ID))
-            .with_invite_worker(None)
-            .with_objects_limit(objects_limit)
-            .with_size_limit(size_limit)
-            .call_and_assert(Ok(()))
-            .unwrap();
-
-        bucket_ids.insert(bucket_id);
-    }
-
-    bucket_ids
-}
-
 #[test]
 fn update_number_of_storage_buckets_in_dynamic_bag_creation_policy_succeeded() {
     build_test_externalities().execute_with(|| {
@@ -5068,14 +5066,14 @@ fn set_distribution_bucket_family_metadata_fails_with_invalid_distribution_bucke
 }
 
 #[test]
-fn create_dynamic_bag_with_objects_succeeds() {
+fn can_delete_dynamic_bags_with_objects_succeeded() {
     build_test_externalities().execute_with(|| {
         let starting_block = 1;
         run_to_block(starting_block);
 
         let dynamic_bag_id = DynamicBagId::<Test>::Member(DEFAULT_MEMBER_ID);
 
-        create_storage_buckets(10);
+        create_storage_buckets(DEFAULT_STORAGE_BUCKETS_NUMBER);
 
         let deletion_prize_value = 100;
         let deletion_prize_account_id = DEFAULT_MEMBER_ACCOUNT_ID;
@@ -5107,332 +5105,336 @@ fn create_dynamic_bag_with_objects_succeeds() {
         CreateDynamicBagWithObjectsFixture::default()
             .with_bag_id(dynamic_bag_id.clone())
             .with_deletion_prize(deletion_prize.clone())
-            .with_objects(upload_parameters)
+            .with_upload_parameters(upload_parameters)
             .call_and_assert(Ok(()));
 
-        let bag = Storage::dynamic_bag(&dynamic_bag_id);
-
-        // Check that IDs are within possible range.
-        assert!(bag
-            .stored_by
-            .iter()
-            .all(|id| { *id < Storage::next_storage_bucket_id() }));
-
-        let creation_policy =
-            Storage::get_dynamic_bag_creation_policy(dynamic_bag_id.clone().into());
-        assert_eq!(
-            bag.stored_by.len(),
-            creation_policy.number_of_storage_buckets as usize
-        );
+        CanDeleteDynamicBagWithObjectsFixture::default()
+            .with_bag_id(dynamic_bag_id.clone())
+            .call_and_assert(Ok(()));
+    });
+}
 
-        assert_eq!(bag.deletion_prize.unwrap(), deletion_prize_value);
+#[test]
+fn cannot_delete_dynamic_bags_with_objects_with_insufficient_treasury_balance() {
+    build_test_externalities().execute_with(|| {
+        let starting_block = 1;
+        run_to_block(starting_block);
 
-        // post-check balances
+        let dynamic_bag_id = DynamicBagId::<Test>::Member(DEFAULT_MEMBER_ID);
+        create_storage_buckets(DEFAULT_STORAGE_BUCKETS_NUMBER);
+        increase_account_balance(&DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+        // pre-check balances
         assert_eq!(
             Balances::usable_balance(&DEFAULT_MEMBER_ACCOUNT_ID),
-            initial_balance - deletion_prize_value
+            INITIAL_BALANCE
         );
         assert_eq!(
             Balances::usable_balance(&<StorageTreasury<Test>>::module_account_id()),
-            deletion_prize_value
+            0
         );
 
-        EventFixture::assert_last_crate_event(RawEvent::DynamicBagCreated(
-            dynamic_bag_id,
-            deletion_prize,
-            BTreeSet::from_iter(bag.stored_by),
-            BTreeSet::from_iter(bag.distributed_by),
-        ));
+        CreateDynamicBagWithObjectsFixture::default()
+            .with_bag_id(dynamic_bag_id.clone())
+            .with_deletion_prize(default_bag_deletion_prize())
+            .with_upload_parameters(default_upload_parameters())
+            .call_and_assert(Ok(()));
+
+        let _ = Balances::slash(
+            &<StorageTreasury<Test>>::module_account_id(),
+            BAG_DELETION_PRIZE_VALUE,
+        );
+
+        CanDeleteDynamicBagWithObjectsFixture::default()
+            .with_bag_id(dynamic_bag_id.clone())
+            .call_and_assert(Err(Error::<Test>::InsufficientTreasuryBalance.into()));
     });
 }
 
 #[test]
-fn create_dynamic_bag_with_objects_fails_with_no_bucket_availables_with_sufficient_objects_limit() {
+fn unsuccessful_dyn_bag_creation_with_existing_bag_id() {
     build_test_externalities().execute_with(|| {
-        let starting_block = 1;
-        run_to_block(starting_block);
+        run_to_block(1);
 
-        // set limit size 100 and limit obj number 20
-        set_max_voucher_limits_with_params(100, 20);
+        create_storage_buckets(DEFAULT_STORAGE_BUCKETS_NUMBER);
+        increase_account_balance(&DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
 
         let dynamic_bag_id = DynamicBagId::<Test>::Member(DEFAULT_MEMBER_ID);
+        CreateDynamicBagFixture::default()
+            .with_bag_id(dynamic_bag_id)
+            .call_and_assert(Ok(()));
 
-        // create 10 buckets each with size limit 10 and num object limit 1
-        create_storage_buckets_with_limits(10, 10, 1);
+        CreateDynamicBagWithObjectsFixture::default()
+            .call_and_assert(Err(Error::<Test>::DynamicBagExists.into()));
+    })
+}
 
-        let deletion_prize_value = 100;
-        let deletion_prize_account_id = DEFAULT_MEMBER_ACCOUNT_ID;
-        let initial_balance = 10000;
-        increase_account_balance(&deletion_prize_account_id, initial_balance);
+#[test]
+fn unsuccessful_dyn_bag_creation_with_insufficient_balance_for_bag_prize_and_upload_fees() {
+    build_test_externalities().execute_with(|| {
+        run_to_block(1);
 
-        let deletion_prize = Some(DynamicBagDeletionPrize::<Test> {
-            prize: deletion_prize_value,
-            account_id: deletion_prize_account_id,
-        });
+        create_storage_buckets(DEFAULT_STORAGE_BUCKETS_NUMBER);
 
-        let upload_parameters = UploadParameters::<Test> {
-            bag_id: BagId::<Test>::from(dynamic_bag_id.clone()),
-            object_creation_list: create_data_object_candidates(1, 3),
-            deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
-            expected_data_size_fee: Storage::data_object_per_mega_byte_fee(),
-        };
+        CreateDynamicBagWithObjectsFixture::default()
+            .with_deletion_prize(default_bag_deletion_prize())
+            .call_and_assert(Err(Error::<Test>::InsufficientBalance.into()));
+    })
+}
 
-        // pre-check balances
-        assert_eq!(
-            Balances::usable_balance(&DEFAULT_MEMBER_ACCOUNT_ID),
-            initial_balance
-        );
-        assert_eq!(
-            Balances::usable_balance(&<StorageTreasury<Test>>::module_account_id()),
-            0
-        );
+#[test]
+fn unsuccessful_dyn_bag_creation_with_different_accounts_for_prize_and_params() {
+    build_test_externalities().execute_with(|| {
+        run_to_block(1);
+
+        create_storage_buckets(DEFAULT_STORAGE_BUCKETS_NUMBER);
+        increase_account_balance(&DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
 
-        // this fails because num objects == 3 & bucket.num_objects_limit == 1
         CreateDynamicBagWithObjectsFixture::default()
-            .with_bag_id(dynamic_bag_id.clone())
-            .with_deletion_prize(deletion_prize.clone())
-            .with_objects(upload_parameters.clone())
-            .call_and_assert(Err(Error::<Test>::StorageBucketIdCollectionsAreEmpty.into()));
+            .with_deletion_prize(default_bag_deletion_prize())
+            .with_objects_prize_source_account(DEFAULT_MEMBER_ACCOUNT_ID + 100)
+            .call_and_assert(Err(Error::<Test>::AccountsNotCoherent.into()));
+    })
+}
+
+#[test]
+fn unsuccessful_dyn_bag_creation_with_zero_objects_size() {
+    build_test_externalities().execute_with(|| {
+        run_to_block(1);
 
-        // set bucket size limits to be large enought and retry
-        let new_objects_number_limit = 10;
-        let new_objects_size_limit = 100;
-        let bucket_id_to_enlarge = 1;
+        create_storage_buckets(DEFAULT_STORAGE_BUCKETS_NUMBER);
+        increase_account_balance(&DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
 
-        SetStorageBucketVoucherLimitsFixture::default()
-            .with_origin(RawOrigin::Signed(STORAGE_WG_LEADER_ACCOUNT_ID))
-            .with_storage_bucket_id(bucket_id_to_enlarge)
-            .with_new_objects_number_limit(new_objects_number_limit)
-            .with_new_objects_size_limit(new_objects_size_limit)
-            .call_and_assert(Ok(()));
+        let objects: Vec<DataObjectCreationParameters> = (1..DEFAULT_DATA_OBJECTS_NUMBER)
+            .into_iter()
+            .map(|idx| DataObjectCreationParameters {
+                size: 0,
+                ipfs_content_id: vec![idx.try_into().unwrap()],
+            })
+            .collect();
 
-        // this succeeds now
         CreateDynamicBagWithObjectsFixture::default()
-            .with_bag_id(dynamic_bag_id.clone())
-            .with_deletion_prize(deletion_prize.clone())
-            .with_objects(upload_parameters)
-            .call_and_assert(Ok(()));
+            .with_objects(objects)
+            .call_and_assert(Err(Error::<Test>::ZeroObjectSize.into()));
     })
 }
 
 #[test]
-fn create_dynamic_bag_with_objects_fails_with_no_bucket_availables_with_sufficient_size_limit() {
+fn unsuccessful_dyn_bag_creation_with_object_size_exceeding_max_obj_size() {
     build_test_externalities().execute_with(|| {
-        let starting_block = 1;
-        run_to_block(starting_block);
+        run_to_block(1);
 
-        // set limit size 100 and limit obj number 20
-        set_max_voucher_limits_with_params(100, 20);
+        create_storage_buckets(DEFAULT_STORAGE_BUCKETS_NUMBER);
+        increase_account_balance(&DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
 
-        let dynamic_bag_id = DynamicBagId::<Test>::Member(DEFAULT_MEMBER_ID);
+        let objects: Vec<DataObjectCreationParameters> = (1..DEFAULT_DATA_OBJECTS_NUMBER)
+            .into_iter()
+            .map(|idx| DataObjectCreationParameters {
+                // set size high on purpose to trigger error
+                size: 1_000_000,
+                ipfs_content_id: vec![idx.try_into().unwrap()],
+            })
+            .collect();
 
-        // create 10 buckets each with size limit 1 and num object limit 10
-        create_storage_buckets_with_limits(10, 1, 10);
+        CreateDynamicBagWithObjectsFixture::default()
+            .with_objects(objects)
+            .call_and_assert(Err(Error::<Test>::MaxDataObjectSizeExceeded.into()));
+    })
+}
 
-        let deletion_prize_value = 100;
-        let deletion_prize_account_id = DEFAULT_MEMBER_ACCOUNT_ID;
-        let initial_balance = 10000;
-        increase_account_balance(&deletion_prize_account_id, initial_balance);
+#[test]
+fn unsuccessful_dyn_bag_creation_with_buckets_having_insufficient_size_available() {
+    build_test_externalities().execute_with(|| {
+        run_to_block(1);
 
-        let deletion_prize = Some(DynamicBagDeletionPrize::<Test> {
-            prize: deletion_prize_value,
-            account_id: deletion_prize_account_id,
-        });
+        create_storage_buckets(DEFAULT_STORAGE_BUCKETS_NUMBER);
+        increase_account_balance(&DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
 
-        // try uploading with 3 objects each exceeding bucket size limit
-        let upload_parameters = UploadParameters::<Test> {
-            bag_id: BagId::<Test>::from(dynamic_bag_id.clone()),
-            object_creation_list: create_data_object_candidates(1, 3),
-            deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
-            expected_data_size_fee: Storage::data_object_per_mega_byte_fee(),
-        };
+        let objects: Vec<DataObjectCreationParameters> = (1..2)
+            .map(|idx| DataObjectCreationParameters {
+                size: DEFAULT_STORAGE_BUCKET_SIZE_LIMIT + 1,
+                ipfs_content_id: vec![idx],
+            })
+            .collect();
 
-        // pre-check balances
-        assert_eq!(
-            Balances::usable_balance(&DEFAULT_MEMBER_ACCOUNT_ID),
-            initial_balance
-        );
-        assert_eq!(
-            Balances::usable_balance(&<StorageTreasury<Test>>::module_account_id()),
-            0
-        );
+        CreateDynamicBagWithObjectsFixture::default()
+            .with_objects(objects)
+            .call_and_assert(Err(Error::<Test>::StorageBucketIdCollectionsAreEmpty.into()));
+    })
+}
+
+#[test]
+fn unsuccessful_dyn_bag_creation_with_buckets_having_insufficient_objects_available() {
+    build_test_externalities().execute_with(|| {
+        run_to_block(1);
+
+        create_storage_buckets(DEFAULT_STORAGE_BUCKETS_NUMBER);
+        increase_account_balance(&DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+
+        let objects: Vec<DataObjectCreationParameters> = (1..(DEFAULT_STORAGE_BUCKET_OBJECTS_LIMIT
+            + 1))
+            .map(|idx| DataObjectCreationParameters {
+                size: DEFAULT_DATA_OBJECTS_SIZE,
+                ipfs_content_id: vec![idx.try_into().unwrap()],
+            })
+            .collect();
 
         CreateDynamicBagWithObjectsFixture::default()
-            .with_bag_id(dynamic_bag_id.clone())
-            .with_deletion_prize(deletion_prize.clone())
-            .with_objects(upload_parameters.clone())
+            .with_objects(objects)
             .call_and_assert(Err(Error::<Test>::StorageBucketIdCollectionsAreEmpty.into()));
+    })
+}
 
-        // set bucket size limits to be large enought and retry
-        let new_objects_number_limit = 10;
-        let new_objects_size_limit = 100;
-        let bucket_id_to_enlarge = 1;
+#[test]
+fn unsuccessful_dyn_bag_creation_with_empty_ipfs_ids() {
+    build_test_externalities().execute_with(|| {
+        run_to_block(1);
 
-        SetStorageBucketVoucherLimitsFixture::default()
-            .with_origin(RawOrigin::Signed(STORAGE_WG_LEADER_ACCOUNT_ID))
-            .with_storage_bucket_id(bucket_id_to_enlarge)
-            .with_new_objects_number_limit(new_objects_number_limit)
-            .with_new_objects_size_limit(new_objects_size_limit)
-            .call_and_assert(Ok(()));
+        create_storage_buckets(DEFAULT_STORAGE_BUCKETS_NUMBER);
+        increase_account_balance(&DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
 
+        let objects: Vec<DataObjectCreationParameters> = (1..DEFAULT_DATA_OBJECTS_NUMBER)
+            .map(|_| DataObjectCreationParameters {
+                size: DEFAULT_DATA_OBJECTS_SIZE,
+                ipfs_content_id: vec![],
+            })
+            .collect();
         CreateDynamicBagWithObjectsFixture::default()
-            .with_bag_id(dynamic_bag_id.clone())
-            .with_deletion_prize(deletion_prize.clone())
-            .with_objects(upload_parameters)
-            .call_and_assert(Ok(()));
+            .with_objects(objects)
+            .call_and_assert(Err(Error::<Test>::EmptyContentId.into()));
     })
 }
 
 #[test]
-fn create_dynamic_bag_with_objects_fails_with_unsufficient_balance() {
+fn unsuccessful_dyn_bag_creation_with_empty_objects_list() {
     build_test_externalities().execute_with(|| {
-        let starting_block = 1;
-        run_to_block(starting_block);
+        run_to_block(1);
 
-        // set limit size 100 and limit obj number 20
-        set_max_voucher_limits_with_params(100, 20);
-        // create 3 buckets with size limit 10 and objects limit 3
-        create_storage_buckets_with_limits(3, 10, 3);
+        create_storage_buckets(DEFAULT_STORAGE_BUCKETS_NUMBER);
+        increase_account_balance(&DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
 
-        let dynamic_bag_id = DynamicBagId::<Test>::Member(DEFAULT_MEMBER_ID);
+        CreateDynamicBagWithObjectsFixture::default()
+            .with_objects(vec![])
+            .call_and_assert(Err(Error::<Test>::NoObjectsOnUpload.into()));
+    })
+}
 
-        let deletion_prize_value = 100;
-        let deletion_prize_account_id = DEFAULT_MEMBER_ACCOUNT_ID;
-        let initial_balance = 100; // just enough for the del prize
-        increase_account_balance(&deletion_prize_account_id, initial_balance);
+#[test]
+fn unsuccessful_dyn_bag_creation_with_invalid_expected_data_fee() {
+    build_test_externalities().execute_with(|| {
+        run_to_block(1);
 
-        let deletion_prize = Some(DynamicBagDeletionPrize::<Test> {
-            prize: deletion_prize_value,
-            account_id: deletion_prize_account_id,
-        });
+        create_storage_buckets(DEFAULT_STORAGE_BUCKETS_NUMBER);
+        increase_account_balance(&DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
 
-        // try uploading with > 0 objects exceeding balance
-        let data_objects = create_data_object_candidates(1, 3);
-        let upload_parameters = UploadParameters::<Test> {
-            bag_id: BagId::<Test>::from(dynamic_bag_id.clone()),
-            object_creation_list: data_objects.clone(),
-            deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
-            expected_data_size_fee: Storage::data_object_per_mega_byte_fee(),
-        };
+        CreateDynamicBagWithObjectsFixture::default()
+            .with_expected_data_size_fee(Storage::data_object_per_mega_byte_fee() + 100)
+            .call_and_assert(Err(Error::<Test>::DataSizeFeeChanged.into()));
+    })
+}
 
-        // pre-check balances
-        assert_eq!(
-            Balances::usable_balance(&DEFAULT_MEMBER_ACCOUNT_ID),
-            initial_balance
-        );
-        assert_eq!(
-            Balances::usable_balance(&<StorageTreasury<Test>>::module_account_id()),
-            0
-        );
+#[test]
+fn unsuccessful_dyn_bag_creation_with_dynamic_and_param_bag_differing() {
+    build_test_externalities().execute_with(|| {
+        run_to_block(1);
+
+        create_storage_buckets(DEFAULT_STORAGE_BUCKETS_NUMBER);
+        increase_account_balance(&DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
 
         CreateDynamicBagWithObjectsFixture::default()
-            .with_bag_id(dynamic_bag_id.clone())
-            .with_deletion_prize(deletion_prize.clone())
-            .with_objects(upload_parameters.clone())
-            .call_and_assert(Err(Error::<Test>::InsufficientBalance.into()));
+            .with_params_bag_id(DynamicBagId::<Test>::Channel(0u64).into())
+            .call_and_assert(Err(Error::<Test>::BagsNotCoherent.into()));
     })
 }
 
 #[test]
-fn can_delete_dynamic_bags_with_objects_succeeded() {
+fn unsuccessful_dyn_bag_creation_with_upload_blocking() {
     build_test_externalities().execute_with(|| {
-        let starting_block = 1;
-        run_to_block(starting_block);
+        run_to_block(1);
 
-        let dynamic_bag_id = DynamicBagId::<Test>::Member(DEFAULT_MEMBER_ID);
+        create_storage_buckets(DEFAULT_STORAGE_BUCKETS_NUMBER);
+        increase_account_balance(&DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
 
-        create_storage_buckets(10);
+        UpdateUploadingBlockedStatusFixture::default()
+            .with_origin(RawOrigin::Signed(STORAGE_WG_LEADER_ACCOUNT_ID))
+            .with_new_status(true)
+            .call_and_assert(Ok(()));
 
-        let deletion_prize_value = 100;
-        let deletion_prize_account_id = DEFAULT_MEMBER_ACCOUNT_ID;
-        let initial_balance = 10000;
-        increase_account_balance(&deletion_prize_account_id, initial_balance);
+        CreateDynamicBagWithObjectsFixture::default()
+            .call_and_assert(Err(Error::<Test>::UploadingBlocked.into()));
+    })
+}
 
-        let deletion_prize = Some(DynamicBagDeletionPrize::<Test> {
-            prize: deletion_prize_value,
-            account_id: deletion_prize_account_id,
-        });
+#[test]
+fn unsuccessful_dyn_bag_creation_with_blacklisted_ipfs_id() {
+    build_test_externalities().execute_with(|| {
+        run_to_block(1);
 
-        let upload_parameters = UploadParameters::<Test> {
-            bag_id: BagId::<Test>::from(dynamic_bag_id.clone()),
-            object_creation_list: create_single_data_object(),
-            deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
-            expected_data_size_fee: Storage::data_object_per_mega_byte_fee(),
-        };
+        create_storage_buckets(DEFAULT_STORAGE_BUCKETS_NUMBER);
+        increase_account_balance(&DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
 
-        // pre-check balances
-        assert_eq!(
-            Balances::usable_balance(&DEFAULT_MEMBER_ACCOUNT_ID),
-            initial_balance
-        );
-        assert_eq!(
-            Balances::usable_balance(&<StorageTreasury<Test>>::module_account_id()),
-            0
-        );
+        let objects: Vec<DataObjectCreationParameters> = (0..BlacklistSizeLimit::get())
+            .map(|idx| DataObjectCreationParameters {
+                size: DEFAULT_DATA_OBJECTS_SIZE,
+                ipfs_content_id: vec![idx.try_into().unwrap()],
+            })
+            .collect();
 
-        CreateDynamicBagWithObjectsFixture::default()
-            .with_bag_id(dynamic_bag_id.clone())
-            .with_deletion_prize(deletion_prize.clone())
-            .with_objects(upload_parameters)
+        UpdateBlacklistFixture::default()
+            .with_origin(RawOrigin::Signed(STORAGE_WG_LEADER_ACCOUNT_ID))
+            .with_add_hashes(
+                objects
+                    .iter()
+                    .map(|obj| obj.ipfs_content_id.clone())
+                    .collect(),
+            )
             .call_and_assert(Ok(()));
 
-        CanDeleteDynamicBagWithObjectsFixture::default()
-            .with_bag_id(dynamic_bag_id.clone())
-            .call_and_assert(Ok(()));
-    });
+        CreateDynamicBagWithObjectsFixture::default()
+            .with_objects(objects)
+            .call_and_assert(Err(Error::<Test>::DataObjectBlacklisted.into()));
+    })
 }
 
 #[test]
-fn cannot_delete_dynamic_bags_with_objects_with_unsufficient_treasury_balance() {
+fn successful_dyn_bag_creation_with_upload_and_no_deletion_prize() {
     build_test_externalities().execute_with(|| {
-        let starting_block = 1;
-        run_to_block(starting_block);
+        run_to_block(1);
 
-        let dynamic_bag_id = DynamicBagId::<Test>::Member(DEFAULT_MEMBER_ID);
-
-        create_storage_buckets(10);
-
-        let deletion_prize_value = 100;
-        let deletion_prize_account_id = DEFAULT_MEMBER_ACCOUNT_ID;
-        let initial_balance = 10000;
-        increase_account_balance(&deletion_prize_account_id, initial_balance);
+        create_storage_buckets(DEFAULT_STORAGE_BUCKETS_NUMBER);
+        increase_account_balance(&DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
 
-        let deletion_prize = Some(DynamicBagDeletionPrize::<Test> {
-            prize: deletion_prize_value,
-            account_id: deletion_prize_account_id,
-        });
+        CreateDynamicBagWithObjectsFixture::default().call_and_assert(Ok(()));
+    })
+}
 
-        let upload_parameters = UploadParameters::<Test> {
-            bag_id: BagId::<Test>::from(dynamic_bag_id.clone()),
-            object_creation_list: create_single_data_object(),
-            deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
-            expected_data_size_fee: Storage::data_object_per_mega_byte_fee(),
-        };
+#[test]
+fn successful_dyn_bag_creation_with_all_parameters_specified() {
+    build_test_externalities().execute_with(|| {
+        run_to_block(1);
 
-        // pre-check balances
-        assert_eq!(
-            Balances::usable_balance(&DEFAULT_MEMBER_ACCOUNT_ID),
-            initial_balance
-        );
-        assert_eq!(
-            Balances::usable_balance(&<StorageTreasury<Test>>::module_account_id()),
-            0
-        );
+        create_storage_buckets(DEFAULT_STORAGE_BUCKETS_NUMBER);
+        increase_account_balance(&DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
 
         CreateDynamicBagWithObjectsFixture::default()
-            .with_bag_id(dynamic_bag_id.clone())
-            .with_deletion_prize(deletion_prize.clone())
-            .with_objects(upload_parameters)
+            .with_deletion_prize(default_bag_deletion_prize())
             .call_and_assert(Ok(()));
+    })
+}
 
-        // Corrupt module balance enough so that it doesn't reach sufficient balance for deletion
-        let _ = Balances::slash(
-            &<StorageTreasury<Test>>::module_account_id(),
-            deletion_prize_value,
-        );
+#[test]
+fn unsuccessful_dyn_bag_creation_with_no_bucket_accepting() {
+    build_test_externalities().execute_with(|| {
+        run_to_block(1);
 
-        CanDeleteDynamicBagWithObjectsFixture::default()
-            .with_bag_id(dynamic_bag_id.clone())
-            .call_and_assert(Err(Error::<Test>::InsufficientTreasuryBalance.into()));
-    });
+        set_max_voucher_limits();
+        CreateStorageBucketFixture::default()
+            .with_origin(RawOrigin::Signed(STORAGE_WG_LEADER_ACCOUNT_ID))
+            .with_accepting_new_bags(false)
+            .create_several(DEFAULT_STORAGE_BUCKETS_NUMBER);
+
+        increase_account_balance(&DEFAULT_MEMBER_ACCOUNT_ID, INITIAL_BALANCE);
+
+        CreateDynamicBagWithObjectsFixture::default()
+            .call_and_assert(Err(Error::<Test>::StorageBucketIdCollectionsAreEmpty.into()));
+    })
 }

+ 5 - 4
start.sh

@@ -26,6 +26,10 @@ docker-compose up -d joystream-node
 
 ## Init the chain with some state
 export SKIP_MOCK_CONTENT=true
+HOST_IP=$(tests/network-tests/get-host-ip.sh)
+export COLOSSUS_1_URL="http://${HOST_IP}:3333"
+export COLOSSUS_1_TRANSACTOR_KEY=$(docker run --rm --pull=always docker.io/parity/subkey:2.0.1 inspect ${COLOSSUS_1_TRANSACTOR_URI} --output-type json | jq .ss58Address -r)
+export DISTRIBUTOR_1_URL="http://${HOST_IP}:3334"
 ./tests/network-tests/run-test-scenario.sh ${INIT_CHAIN_SCENARIO}
 
 ## Set sudo as the membership screening authority
@@ -34,9 +38,6 @@ yarn workspace api-scripts set-sudo-as-screening-auth
 ## Member faucet
 docker-compose up -d faucet
 
-## Storage Infrastructure Configuration
-./storage-playground-config.sh
-
 ## Query Node Infrastructure
 ./query-node/start.sh
 
@@ -58,4 +59,4 @@ else
   while true; do
     read
   done
-fi
+fi

+ 1 - 1
storage-node/package.json

@@ -94,7 +94,7 @@
   },
   "volta": {
     "node": "14.16.1",
-    "yarn": "1.22.5"
+    "yarn": "1.22.15"
   },
   "files": [
     "/bin",

+ 1 - 1
tests/network-tests/.env

@@ -37,7 +37,7 @@ WORKING_GROUP_ROLE_STAKE=10
 # Reward interval for working group tests
 LONG_REWARD_INTERVAL=99999
 # First reward interval for working group reward test
-SHORT_FIRST_REWARD_INTERVAL=3
+SHORT_FIRST_REWARD_INTERVAL = 6
 # Reward interval for working group reward test
 SHORT_REWARD_INTERVAL=3
 # Payout amount for working group tests

+ 32 - 0
tests/network-tests/codegen.yml

@@ -0,0 +1,32 @@
+overwrite: true
+
+schema: '../../query-node/generated/graphql-server/generated/schema.graphql'
+
+documents:
+  - './src/graphql/queries/*.graphql'
+
+config:
+  scalars:
+    Date: Date
+  preResolveTypes: true # avoid using Pick
+  skipTypename: true # skip __typename field in typings unless it's part of the query
+
+generates:
+  src/graphql/generated/schema.ts:
+    hooks:
+      afterOneFileWrite:
+        - prettier --write
+        - eslint --fix
+    plugins:
+      - typescript
+  src/graphql/generated/queries.ts:
+    preset: import-types
+    presetConfig:
+      typesPath: ./schema
+    hooks:
+      afterOneFileWrite:
+        - prettier --write
+        - eslint --fix
+    plugins:
+      - typescript-operations
+      - typescript-document-nodes

+ 7 - 0
tests/network-tests/openapitools.json

@@ -0,0 +1,7 @@
+{
+  "$schema": "node_modules/@openapitools/openapi-generator-cli/config.schema.json",
+  "spaces": 2,
+  "generator-cli": {
+    "version": "5.2.1"
+  }
+}

+ 18 - 3
tests/network-tests/package.json

@@ -9,7 +9,12 @@
     "node-ts-strict": "node -r ts-node/register --unhandled-rejections=strict",
     "lint": "eslint . --quiet --ext .ts",
     "checks": "tsc --noEmit --pretty && prettier ./ --check && yarn lint",
-    "format": "prettier ./ --write "
+    "format": "prettier ./ --write ",
+    "generate:api:storage-node": "yarn openapi-generator-cli generate -i ../../storage-node/src/api-spec/openapi.yaml -g typescript-axios -o ./src/apis/storageNode",
+    "generate:api:distributor-node": "yarn openapi-generator-cli generate -i ../../distributor-node/src/api-spec/public.yml -g typescript-axios -o ./src/apis/distributorNode",
+    "generate:api:all": "yarn generate:api:storage-node && yarn generate:api:distributor-node && yarn format",
+    "generate:types:graphql": "graphql-codegen",
+    "generate:all": "yarn generate:types:graphql && yarn generate:api:all"
   },
   "dependencies": {
     "@apollo/client": "^3.2.5",
@@ -23,7 +28,11 @@
     "bn.js": "^4.11.8",
     "dotenv": "^8.2.0",
     "fs": "^0.0.1-security",
-    "uuid": "^7.0.3"
+    "uuid": "^7.0.3",
+    "axios": "^0.21.1",
+    "bmp-js": "^0.1.0",
+    "@types/bmp-js": "^0.1.0",
+    "node-cleanup": "^2.1.2"
   },
   "devDependencies": {
     "@polkadot/ts": "^0.4.8",
@@ -32,7 +41,13 @@
     "chai": "^4.2.0",
     "prettier": "^2.2.1",
     "ts-node": "^10.2.1",
-    "typescript": "^4.4.3"
+    "typescript": "^4.4.3",
+    "@openapitools/openapi-generator-cli": "^2.3.6",
+    "@graphql-codegen/cli": "^1.21.4",
+    "@graphql-codegen/typescript": "^1.22.0",
+    "@graphql-codegen/import-types-preset": "^1.18.1",
+    "@graphql-codegen/typescript-operations": "^1.17.16",
+    "@graphql-codegen/typescript-document-nodes": "^1.17.11"
   },
   "volta": {
     "extends": "../../package.json"

+ 48 - 0
tests/network-tests/run-full-tests.sh

@@ -0,0 +1,48 @@
+#!/usr/bin/env bash
+set -e
+
+SCRIPT_PATH="$(dirname "${BASH_SOURCE[0]}")"
+cd $SCRIPT_PATH
+
+CONTAINER_ID=$(./run-test-node-docker.sh)
+
+function cleanup() {
+    docker logs ${CONTAINER_ID} --tail 15
+    docker-compose -f ../../docker-compose.yml down -v
+}
+
+trap cleanup EXIT
+
+sleep 3
+
+# Display runtime version
+yarn workspace api-scripts tsnode-strict src/status.ts | grep Runtime
+
+# Start any other services we want
+# docker-compose -f ../../docker-compose.yml up -d colossus-1
+
+# Start a query-node
+../../query-node/start.sh
+
+# Run proposals tests first, since they require no leads hired
+./run-test-scenario.sh proposals
+
+# Setup storage & distribution
+HOST_IP=$(./get-host-ip.sh)
+# Because proposals tests hire and then fire each lead,
+# we need to override COLOSSUS_1_WORKER_ID (0 => 1) and DISTRIBUTOR_1_WORKER_ID (0 => 1)
+export COLOSSUS_1_URL="http://${HOST_IP}:3333"
+export COLOSSUS_1_WORKER_ID=1
+export COLOSSUS_1_WORKER_URI=//testing//worker//Storage//${COLOSSUS_1_WORKER_ID}
+export COLOSSUS_1_TRANSACTOR_KEY=$(docker run --rm --pull=always docker.io/parity/subkey:2.0.1 inspect ${COLOSSUS_1_TRANSACTOR_URI} --output-type json | jq .ss58Address -r)
+export DISTRIBUTOR_1_URL="http://${HOST_IP}:3334"
+export DISTRIBUTOR_1_WORKER_ID=1
+export DISTRIBUTOR_1_ACCOUNT_URI=//testing//worker//Distribution//${DISTRIBUTOR_1_WORKER_ID}
+REUSE_KEYS=true ./run-test-scenario.sh init-storage-and-distribution
+
+# Start colossus & argus
+docker-compose -f ../../docker-compose.yml up -d colossus-1
+docker-compose -f ../../docker-compose.yml up -d distributor-1
+
+# Run combined tests reusing the existing keys
+REUSE_KEYS=true ./run-test-scenario.sh combined

+ 1 - 1
tests/network-tests/run-test-scenario.sh

@@ -10,4 +10,4 @@ SCENARIO=$1
 SCENARIO=${SCENARIO:=full}
 
 # Execute the tests
-time DEBUG=integration-tests* yarn workspace network-tests node-ts-strict src/scenarios/${SCENARIO}.ts
+time DEBUG=integration-tests*,-integration-tests:query-node-api:* yarn workspace network-tests node-ts-strict src/scenarios/${SCENARIO}.ts

+ 1 - 1
tests/network-tests/run-tests.sh

@@ -40,4 +40,4 @@ yarn workspace api-scripts tsnode-strict src/status.ts | grep Runtime
 # keys for workers and members..
 
 # Second scenario..
-# ./run-test-scenario.sh $2
+# ./run-test-scenario.sh $2

+ 145 - 143
tests/network-tests/src/Api.ts

@@ -1,13 +1,12 @@
-import { ApiPromise, WsProvider, Keyring } from '@polkadot/api'
+import { ApiPromise, WsProvider, Keyring, SubmittableResult } from '@polkadot/api'
 import { Bytes, BTreeSet, Option, u32, Vec, StorageKey } from '@polkadot/types'
-import { Codec, ISubmittableResult } from '@polkadot/types/types'
+import { Codec, ISubmittableResult, IEvent } from '@polkadot/types/types'
 import { KeyringPair } from '@polkadot/keyring/types'
 import { decodeAddress } from '@polkadot/keyring'
 import { MemberId, PaidMembershipTerms, PaidTermId } from '@joystream/types/members'
 import { Mint, MintId } from '@joystream/types/mint'
 import {
   Application,
-  ApplicationIdToWorkerIdMap,
   Worker,
   WorkerId,
   OpeningPolicyCommitment,
@@ -15,9 +14,9 @@ import {
 } from '@joystream/types/working-group'
 import { ElectionStake, Seat } from '@joystream/types/council'
 import { DataObjectId, StorageBucketId } from '@joystream/types/storage'
-import { AccountInfo, Balance, BalanceOf, BlockNumber, EventRecord } from '@polkadot/types/interfaces'
+import { AccountInfo, Balance, BalanceOf, BlockNumber, EventRecord, AccountId } from '@polkadot/types/interfaces'
 import BN from 'bn.js'
-import { SubmittableExtrinsic } from '@polkadot/api/types'
+import { AugmentedEvent, SubmittableExtrinsic } from '@polkadot/api/types'
 import { Sender, LogLevel } from './sender'
 import { Utils } from './utils'
 import { Stake, StakedState, StakeId } from '@joystream/types/stake'
@@ -31,7 +30,6 @@ import {
   OpeningId,
 } from '@joystream/types/hiring'
 import { FillOpeningParameters, ProposalId } from '@joystream/types/proposals'
-// import { v4 as uuid } from 'uuid'
 import { extendDebug } from './Debugger'
 import { InvertedPromise } from './InvertedPromise'
 import { VideoId, VideoCategoryId } from '@joystream/types/content'
@@ -40,10 +38,28 @@ import { ChannelCategoryMetadata, VideoCategoryMetadata } from '@joystream/metad
 import { metadataToBytes } from '../../../cli/lib/helpers/serialization'
 import { assert } from 'chai'
 import { WorkingGroups } from './WorkingGroups'
-import { v4 as uuid } from 'uuid'
 
-type AnyMetadata = {
-  serializeBinary(): Uint8Array
+const workingGroupNameByGroup: { [key in WorkingGroups]: string } = {
+  'distributionWorkingGroup': 'Distribution',
+  'storageWorkingGroup': 'Storage',
+  'contentWorkingGroup': 'Content',
+  'gatewayWorkingGroup': 'Gateway',
+  'operationsWorkingGroupAlpha': 'OperationsAlpha',
+  'operationsWorkingGroupBeta': 'OperationsBeta',
+  'operationsWorkingGroupGamma': 'OperationsGamma',
+}
+
+type EventSection = keyof ApiPromise['events'] & string
+type EventMethod<Section extends EventSection> = keyof ApiPromise['events'][Section] & string
+type EventType<
+  Section extends EventSection,
+  Method extends EventMethod<Section>
+> = ApiPromise['events'][Section][Method] extends AugmentedEvent<'promise', infer T> ? IEvent<T> : never
+
+export type KeyGenInfo = {
+  start: number
+  final: number
+  custom: string[]
 }
 
 export class ApiFactory {
@@ -51,9 +67,14 @@ export class ApiFactory {
   private readonly keyring: Keyring
   // number used as part of key derivation path
   private keyId = 0
+  // stores names of the created custom keys
+  private customKeys: string[] = []
   // mapping from account address to key id.
   // To be able to re-derive keypair externally when mini-secret is known.
   readonly addressesToKeyId: Map<string, number> = new Map()
+  // mapping from account address to suri.
+  // To be able to get the suri of a known key for the purpose of, for example, interacting with the CLIs
+  readonly addressesToSuri: Map<string, string>
   // mini secret used in SURI key derivation path
   private readonly miniSecret: string
 
@@ -98,6 +119,7 @@ export class ApiFactory {
     this.keyring.addFromUri(sudoAccountUri)
     this.miniSecret = miniSecret
     this.addressesToKeyId = new Map()
+    this.addressesToSuri = new Map()
     this.keyId = 0
   }
 
@@ -109,29 +131,52 @@ export class ApiFactory {
     const keys: { key: KeyringPair; id: number }[] = []
     for (let i = 0; i < n; i++) {
       const id = this.keyId++
-      const key = this.createCustomKeyPair(`${id}`)
+      const key = this.createKeyPair(`${id}`)
       keys.push({ key, id })
       this.addressesToKeyId.set(key.address, id)
     }
     return keys
   }
 
+  private createKeyPair(suriPath: string, isCustom = false): KeyringPair {
+    if (isCustom) {
+      this.customKeys.push(suriPath)
+    }
+    const uri = `${this.miniSecret}//testing//${suriPath}`
+    const pair = this.keyring.addFromUri(uri)
+    this.addressesToSuri.set(pair.address, uri)
+    return pair
+  }
+
   public createCustomKeyPair(customPath: string): KeyringPair {
-    return this.keyring.addFromUri(customPath + uuid().substring(0, 8))
+    return this.createKeyPair(customPath, true)
   }
 
-  public keyGenInfo(): { start: number; final: number } {
+  public keyGenInfo(): KeyGenInfo {
     const start = 0
     const final = this.keyId
     return {
       start,
       final,
+      custom: this.customKeys,
     }
   }
 
   public getAllGeneratedAccounts(): { [k: string]: number } {
     return Object.fromEntries(this.addressesToKeyId)
   }
+
+  public getKeypair(address: AccountId | string): KeyringPair {
+    return this.keyring.getPair(address)
+  }
+
+  public getSuri(address: AccountId | string): string {
+    const suri = this.addressesToSuri.get(address.toString())
+    if (!suri) {
+      throw new Error(`Suri for address ${address} not available!`)
+    }
+    return suri
+  }
 }
 
 export class Api {
@@ -148,11 +193,44 @@ export class Api {
     this.sender = new Sender(api, keyring, label)
   }
 
-  // expose only direct ability to query chain
-  get query() {
+  public get query(): ApiPromise['query'] {
     return this.api.query
   }
 
+  public get consts(): ApiPromise['consts'] {
+    return this.api.consts
+  }
+
+  public get tx(): ApiPromise['tx'] {
+    return this.api.tx
+  }
+
+  public signAndSend(tx: SubmittableExtrinsic<'promise'>, account: string | AccountId): Promise<ISubmittableResult> {
+    return this.sender.signAndSend(tx, account)
+  }
+
+  public signAndSendMany(
+    txs: SubmittableExtrinsic<'promise'>[],
+    account: string | AccountId
+  ): Promise<ISubmittableResult[]> {
+    return Promise.all(txs.map((tx) => this.sender.signAndSend(tx, account)))
+  }
+
+  public signAndSendManyByMany(
+    txs: SubmittableExtrinsic<'promise'>[],
+    accounts: string[] | AccountId[]
+  ): Promise<ISubmittableResult[]> {
+    return Promise.all(txs.map((tx, i) => this.sender.signAndSend(tx, accounts[i])))
+  }
+
+  public getKeypair(address: string | AccountId): KeyringPair {
+    return this.factory.getKeypair(address)
+  }
+
+  public getSuri(address: string | AccountId): string {
+    return this.factory.getSuri(address)
+  }
+
   public enableDebugTxLogs(): void {
     this.sender.setLogLevel(LogLevel.Debug)
   }
@@ -169,7 +247,7 @@ export class Api {
     return this.factory.createCustomKeyPair(path)
   }
 
-  public keyGenInfo(): { start: number; final: number } {
+  public keyGenInfo(): KeyGenInfo {
     return this.factory.keyGenInfo()
   }
 
@@ -179,24 +257,7 @@ export class Api {
 
   // Well known WorkingGroup enum defined in runtime
   public getWorkingGroupString(workingGroup: WorkingGroups): string {
-    switch (workingGroup) {
-      case WorkingGroups.Storage:
-        return 'Storage'
-      case WorkingGroups.Content:
-        return 'Content'
-      case WorkingGroups.Gateway:
-        return 'Gateway'
-      case WorkingGroups.OperationsAlpha:
-        return 'OperationsAlpha'
-      case WorkingGroups.OperationsBeta:
-        return 'OperationsBeta'
-      case WorkingGroups.OperationsGamma:
-        return 'OperationsGamma'
-      case WorkingGroups.Distribution:
-        return 'Distribution'
-      default:
-        throw new Error(`Invalid working group string representation: ${workingGroup}`)
-    }
+    return workingGroupNameByGroup[workingGroup]
   }
 
   public async makeSudoCall(tx: SubmittableExtrinsic<'promise'>): Promise<ISubmittableResult> {
@@ -834,79 +895,52 @@ export class Api {
     return this.getBlockDuration().muln(durationInBlocks).toNumber()
   }
 
-  public findEventRecord(events: EventRecord[], section: string, method: string): EventRecord | undefined {
-    return events.find((record) => record.event.section === section && record.event.method === method)
-  }
-
-  public findMemberRegisteredEvent(events: EventRecord[]): MemberId | undefined {
-    const record = this.findEventRecord(events, 'members', 'MemberRegistered')
-    if (record) {
-      return record.event.data[0] as MemberId
-    }
-  }
-
-  public findProposalCreatedEvent(events: EventRecord[]): ProposalId | undefined {
-    const record = this.findEventRecord(events, 'proposalsEngine', 'ProposalCreated')
-    if (record) {
-      return record.event.data[1] as ProposalId
+  public findEvent<S extends EventSection, M extends EventMethod<S>>(
+    result: SubmittableResult | EventRecord[],
+    section: S,
+    method: M
+  ): EventType<S, M> | undefined {
+    if (Array.isArray(result)) {
+      return result.find(({ event }) => event.section === section && event.method === method)?.event as
+        | EventType<S, M>
+        | undefined
     }
-  }
-
-  public findOpeningAddedEvent(events: EventRecord[], workingGroup: WorkingGroups): OpeningId | undefined {
-    const record = this.findEventRecord(events, workingGroup, 'OpeningAdded')
-    if (record) {
-      return record.event.data[0] as OpeningId
-    }
-  }
-
-  public findLeaderSetEvent(events: EventRecord[], workingGroup: WorkingGroups): WorkerId | undefined {
-    const record = this.findEventRecord(events, workingGroup, 'LeaderSet')
-    if (record) {
-      return (record.event.data as unknown) as WorkerId
-    }
-  }
-
-  public findBeganApplicationReviewEvent(
-    events: EventRecord[],
-    workingGroup: WorkingGroups
-  ): ApplicationId | undefined {
-    const record = this.findEventRecord(events, workingGroup, 'BeganApplicationReview')
-    if (record) {
-      return (record.event.data as unknown) as ApplicationId
-    }
-  }
-
-  public findTerminatedLeaderEvent(events: EventRecord[], workingGroup: WorkingGroups): EventRecord | undefined {
-    return this.findEventRecord(events, workingGroup, 'TerminatedLeader')
-  }
-
-  public findWorkerRewardAmountUpdatedEvent(
-    events: EventRecord[],
-    workingGroup: WorkingGroups,
-    workerId: WorkerId
-  ): WorkerId | undefined {
-    const record = this.findEventRecord(events, workingGroup, 'WorkerRewardAmountUpdated')
-    if (record) {
-      const id = (record.event.data[0] as unknown) as WorkerId
-      if (id.eq(workerId)) {
-        return workerId
-      }
+    return result.findRecord(section, method)?.event as EventType<S, M> | undefined
+  }
+
+  public getEvent<S extends EventSection, M extends EventMethod<S>>(
+    result: SubmittableResult | EventRecord[],
+    section: S,
+    method: M
+  ): EventType<S, M> {
+    const event = this.findEvent(result, section, method)
+    if (!event) {
+      throw new Error(
+        `Cannot find expected ${section}.${method} event in result: ${JSON.stringify(
+          Array.isArray(result) ? result.map((e) => e.toHuman()) : result.toHuman()
+        )}`
+      )
     }
-  }
-
-  public findStakeDecreasedEvent(events: EventRecord[], workingGroup: WorkingGroups): EventRecord | undefined {
-    return this.findEventRecord(events, workingGroup, 'StakeDecreased')
-  }
-
-  public findStakeSlashedEvent(events: EventRecord[], workingGroup: WorkingGroups): EventRecord | undefined {
-    return this.findEventRecord(events, workingGroup, 'StakeSlashed')
-  }
-
-  public findMintCapacityChangedEvent(events: EventRecord[], workingGroup: WorkingGroups): BN | undefined {
-    const record = this.findEventRecord(events, workingGroup, 'MintCapacityChanged')
-    if (record) {
-      return (record.event.data[1] as unknown) as BN
+    return event
+  }
+
+  public findEvents<S extends EventSection, M extends EventMethod<S>>(
+    result: SubmittableResult | EventRecord[],
+    section: S,
+    method: M,
+    expectedCount?: number
+  ): EventType<S, M>[] {
+    const events = Array.isArray(result)
+      ? result.filter(({ event }) => event.section === section && event.method === method).map(({ event }) => event)
+      : result.filterRecords(section, method).map((r) => r.event)
+    if (expectedCount && events.length !== expectedCount) {
+      throw new Error(
+        `Unexpected count of ${section}.${method} events in result: ${JSON.stringify(
+          Array.isArray(result) ? result.map((e) => e.toHuman()) : result.toHuman()
+        )}. ` + `Expected: ${expectedCount}, Got: ${events.length}`
+      )
     }
+    return (events.sort((a, b) => new BN(a.index).cmp(new BN(b.index))) as unknown) as EventType<S, M>[]
   }
 
   // Subscribe to system events, resolves to an InvertedPromise or rejects if subscription fails.
@@ -941,26 +975,6 @@ export class Api {
     return invertedPromise
   }
 
-  public findOpeningFilledEvent(
-    events: EventRecord[],
-    workingGroup: WorkingGroups
-  ): ApplicationIdToWorkerIdMap | undefined {
-    const record = this.findEventRecord(events, workingGroup, 'OpeningFilled')
-    if (record) {
-      return (record.event.data[1] as unknown) as ApplicationIdToWorkerIdMap
-    }
-  }
-
-  public findApplicationReviewBeganEvent(
-    events: EventRecord[],
-    workingGroup: WorkingGroups
-  ): ApplicationId | undefined {
-    const record = this.findEventRecord(events, workingGroup, 'BeganApplicationReview')
-    if (record) {
-      return (record.event.data as unknown) as ApplicationId
-    }
-  }
-
   public async getWorkingGroupMintCapacity(module: WorkingGroups): Promise<BN> {
     const mintId: MintId = await this.api.query[module].mint<MintId>()
     const mint: Mint = await this.api.query.minting.mints<Mint>(mintId)
@@ -1735,13 +1749,11 @@ export class Api {
   }
 
   public async getWorkerRoleAccounts(workerIds: WorkerId[], module: WorkingGroups): Promise<string[]> {
-    const entries: [StorageKey, Worker][] = await this.api.query[module].workerById.entries<Worker>()
+    const workers = await this.api.query[module].workerById.multi<Worker>(workerIds)
 
-    return entries
-      .filter(([idKey]) => {
-        return workerIds.includes(idKey.args[0] as WorkerId)
-      })
-      .map(([, worker]) => worker.role_account_id.toString())
+    return workers.map((worker) => {
+      return worker.role_account_id.toString()
+    })
   }
 
   public async getStake(id: StakeId): Promise<Stake> {
@@ -1824,13 +1836,8 @@ export class Api {
 
     const result = await this.sender.signAndSend(tx, memberControllerAccount)
 
-    const record = this.findEventRecord(result.events, 'content', 'ChannelCreated')
-    if (record) {
-      return record.event.data[1] as ChannelId
-    }
-
-    // TODO: get error from 'result'
-    throw new Error('Failed to create channel')
+    const event = this.getEvent(result.events, 'content', 'ChannelCreated')
+    return event.data[1]
   }
 
   // Create a mock video, throws on failure
@@ -1849,13 +1856,8 @@ export class Api {
 
     const result = await this.sender.signAndSend(tx, memberControllerAccount)
 
-    const record = this.findEventRecord(result.events, 'content', 'VideoCreated')
-    if (record) {
-      return record.event.data[2] as VideoId
-    }
-
-    // TODO: get error from 'result'
-    throw new Error('Failed to create video')
+    const event = this.getEvent(result.events, 'content', 'VideoCreated')
+    return event.data[2]
   }
 
   async createChannelCategoryAsLead(name: string): Promise<ISubmittableResult> {

+ 56 - 67
tests/network-tests/src/QueryNodeApi.ts

@@ -1,6 +1,18 @@
-import { gql, ApolloClient, ApolloQueryResult, NormalizedCacheObject } from '@apollo/client'
 import { BLOCKTIME } from './consts'
+import { gql, ApolloClient, ApolloQueryResult, DocumentNode, NormalizedCacheObject } from '@apollo/client/core'
 import { extendDebug, Debugger } from './Debugger'
+import {
+  StorageDataObjectFieldsFragment,
+  GetDataObjectsByIdsQuery,
+  GetDataObjectsByIdsQueryVariables,
+  GetDataObjectsByIds,
+  ChannelFieldsFragment,
+  GetChannelById,
+  GetChannelByIdQuery,
+  GetChannelByIdQueryVariables,
+} from './graphql/generated/queries'
+import { Maybe } from './graphql/generated/schema'
+import { OperationDefinitionNode } from 'graphql'
 import { Utils } from './utils'
 
 export class QueryNodeApi {
@@ -56,80 +68,57 @@ export class QueryNodeApi {
     }
   }
 
-  public async getChannelbyHandle(handle: string): Promise<ApolloQueryResult<any>> {
-    const GET_CHANNEL_BY_TITLE = gql`
-      query($handle: String!) {
-        channels(where: { handle_eq: $handle }) {
-          handle
-          description
-          coverPhotoUrl
-          avatarPhotoUrl
-          isPublic
-          isCurated
-          videos {
-            title
-            description
-            duration
-            thumbnailUrl
-            isExplicit
-            isPublic
-          }
-        }
-      }
-    `
-
-    return await this.queryNodeProvider.query({ query: GET_CHANNEL_BY_TITLE, variables: { handle } })
+  private debugQuery(query: DocumentNode, args: Record<string, unknown>): void {
+    const queryDef = query.definitions.find((d) => d.kind === 'OperationDefinition') as OperationDefinitionNode
+    this.queryDebug(`${queryDef.name!.value}(${JSON.stringify(args)})`)
   }
 
-  public async performFullTextSearchOnChannelTitle(text: string): Promise<ApolloQueryResult<any>> {
-    const FULL_TEXT_SEARCH_ON_CHANNEL_TITLE = gql`
-      query($text: String!) {
-        search(text: $text) {
-          item {
-            ... on Channel {
-              handle
-              description
-            }
-          }
-        }
-      }
-    `
+  // Query entity by unique input
+  private async uniqueEntityQuery<
+    QueryT extends { [k: string]: Maybe<Record<string, unknown>> | undefined },
+    VariablesT extends Record<string, unknown>
+  >(
+    query: DocumentNode,
+    variables: VariablesT,
+    resultKey: keyof QueryT
+  ): Promise<Required<QueryT>[keyof QueryT] | null> {
+    this.debugQuery(query, variables)
+    return (await this.queryNodeProvider.query<QueryT, VariablesT>({ query, variables })).data[resultKey] || null
+  }
 
-    return await this.queryNodeProvider.query({ query: FULL_TEXT_SEARCH_ON_CHANNEL_TITLE, variables: { text } })
+  // Query entities by "non-unique" input and return first result
+  private async firstEntityQuery<QueryT extends { [k: string]: unknown[] }, VariablesT extends Record<string, unknown>>(
+    query: DocumentNode,
+    variables: VariablesT,
+    resultKey: keyof QueryT
+  ): Promise<QueryT[keyof QueryT][number] | null> {
+    this.debugQuery(query, variables)
+    return (await this.queryNodeProvider.query<QueryT, VariablesT>({ query, variables })).data[resultKey][0] || null
   }
 
-  public async performFullTextSearchOnVideoTitle(text: string): Promise<ApolloQueryResult<any>> {
-    const FULL_TEXT_SEARCH_ON_VIDEO_TITLE = gql`
-      query($text: String!) {
-        search(text: $text) {
-          item {
-            ... on Video {
-              title
-            }
-          }
-        }
-      }
-    `
+  // Query multiple entities
+  private async multipleEntitiesQuery<
+    QueryT extends { [k: string]: unknown[] },
+    VariablesT extends Record<string, unknown>
+  >(query: DocumentNode, variables: VariablesT, resultKey: keyof QueryT): Promise<QueryT[keyof QueryT]> {
+    this.debugQuery(query, variables)
+    return (await this.queryNodeProvider.query<QueryT, VariablesT>({ query, variables })).data[resultKey]
+  }
 
-    return await this.queryNodeProvider.query({ query: FULL_TEXT_SEARCH_ON_VIDEO_TITLE, variables: { text } })
+  public async channelById(id: string): Promise<Maybe<ChannelFieldsFragment>> {
+    return this.uniqueEntityQuery<GetChannelByIdQuery, GetChannelByIdQueryVariables>(
+      GetChannelById,
+      { id },
+      'channelByUniqueInput'
+    )
   }
 
-  public async performWhereQueryByVideoTitle(title: string): Promise<ApolloQueryResult<any>> {
-    const WHERE_QUERY_ON_VIDEO_TITLE = gql`
-      query($title: String!) {
-        videos(where: { title_eq: $title }) {
-          media {
-            location {
-              __typename
-              ... on JoystreamMediaLocation {
-                dataObjectId
-              }
-            }
-          }
-        }
-      }
-    `
-    return await this.queryNodeProvider.query({ query: WHERE_QUERY_ON_VIDEO_TITLE, variables: { title } })
+  public async getDataObjectsByIds(ids: string[]): Promise<StorageDataObjectFieldsFragment[]> {
+    return this.multipleEntitiesQuery<GetDataObjectsByIdsQuery, GetDataObjectsByIdsQueryVariables>(
+      GetDataObjectsByIds,
+      { ids },
+      'storageDataObjects'
+    )
   }
 
   public async getChannels(): Promise<ApolloQueryResult<any>> {

+ 25 - 9
tests/network-tests/src/Scenario.ts

@@ -1,5 +1,5 @@
 import { WsProvider } from '@polkadot/api'
-import { ApiFactory, Api } from './Api'
+import { ApiFactory, Api, KeyGenInfo } from './Api'
 import { QueryNodeApi } from './QueryNodeApi'
 import { config } from 'dotenv'
 import { ApolloClient, InMemoryCache, HttpLink } from '@apollo/client'
@@ -18,22 +18,29 @@ export type ScenarioProps = {
   job: (label: string, flows: Flow[] | Flow) => Job
 }
 
+const OUTPUT_FILE_PATH = 'output.json'
+
+type TestsOutput = {
+  accounts: { [k: string]: number }
+  keyIds: KeyGenInfo
+  miniSecret: string
+}
+
 function writeOutput(api: Api, miniSecret: string) {
-  const outputFilename = 'output.json'
-  console.error('Writing generated account to', outputFilename)
+  console.error('Writing generated account to', OUTPUT_FILE_PATH)
   // account to key ids
   const accounts = api.getAllGeneratedAccounts()
 
   // first and last key id used to generate keys in this scenario
   const keyIds = api.keyGenInfo()
 
-  const output = {
+  const output: TestsOutput = {
     accounts,
     keyIds,
     miniSecret,
   }
 
-  fs.writeFileSync(outputFilename, JSON.stringify(output, undefined, 2))
+  fs.writeFileSync(OUTPUT_FILE_PATH, JSON.stringify(output, undefined, 2))
 }
 
 export async function scenario(scene: (props: ScenarioProps) => Promise<void>): Promise<void> {
@@ -54,12 +61,21 @@ export async function scenario(scene: (props: ScenarioProps) => Promise<void>):
 
   const api = apiFactory.getApi('Key Generation')
 
-  // Generate all key ids before START_KEY_ID
-  const startKeyId = parseInt(env.START_KEY_ID || '0')
-  if (startKeyId) {
-    api.createKeyPairs(startKeyId)
+  // Generate all key ids based on REUSE_KEYS or START_KEY_ID (if provided)
+  const reuseKeys = Boolean(env.REUSE_KEYS)
+  let startKeyId: number
+  let customKeys: string[] = []
+  if (reuseKeys) {
+    const output = JSON.parse(fs.readFileSync(OUTPUT_FILE_PATH).toString()) as TestsOutput
+    startKeyId = output.keyIds.final
+    customKeys = output.keyIds.custom
+  } else {
+    startKeyId = parseInt(env.START_KEY_ID || '0')
   }
 
+  api.createKeyPairs(startKeyId)
+  customKeys.forEach((k) => api.createCustomKeyPair(k))
+
   const queryNodeUrl: string = env.QUERY_NODE_URL || 'http://127.0.0.1:8081/graphql'
 
   const queryNodeProvider = new ApolloClient({

+ 27 - 0
tests/network-tests/src/apis/distributorNode/.openapi-generator-ignore

@@ -0,0 +1,27 @@
+# OpenAPI Generator Ignore
+# Generated by openapi-generator https://github.com/openapitools/openapi-generator
+
+# Use this file to prevent files from being overwritten by the generator.
+# The patterns follow closely to .gitignore or .dockerignore.
+
+# As an example, the C# client generator defines ApiClient.cs.
+# You can make changes and tell OpenAPI Generator to ignore just this file by uncommenting the following line:
+#ApiClient.cs
+
+# You can match any string of characters against a directory, file or extension with a single asterisk (*):
+#foo/*/qux
+# The above matches foo/bar/qux and foo/baz/qux, but not foo/bar/baz/qux
+
+# You can recursively match patterns against a directory, file or extension with a double asterisk (**):
+#foo/**/qux
+# This matches foo/bar/qux, foo/baz/qux, and foo/bar/baz/qux
+
+# You can also negate patterns with an exclamation (!).
+# For example, you can ignore all files in a docs folder with the file extension .md:
+#docs/*.md
+# Then explicitly reverse the ignore rule for a single file:
+#!docs/README.md
+
+git_push.sh
+.npmignore
+.gitignore

+ 5 - 0
tests/network-tests/src/apis/distributorNode/.openapi-generator/FILES

@@ -0,0 +1,5 @@
+api.ts
+base.ts
+common.ts
+configuration.ts
+index.ts

+ 1 - 0
tests/network-tests/src/apis/distributorNode/.openapi-generator/VERSION

@@ -0,0 +1 @@
+5.2.1

+ 410 - 0
tests/network-tests/src/apis/distributorNode/api.ts

@@ -0,0 +1,410 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Distributor node public API
+ * Distributor node public API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+import { Configuration } from './configuration'
+import globalAxios, { AxiosPromise, AxiosInstance } from 'axios'
+// Some imports not used depending on template conditions
+// @ts-ignore
+import {
+  DUMMY_BASE_URL,
+  assertParamExists,
+  setApiKeyToObject,
+  setBasicAuthToObject,
+  setBearerAuthToObject,
+  setOAuthToObject,
+  setSearchParams,
+  serializeDataIfNeeded,
+  toPathString,
+  createRequestFunction,
+} from './common'
+// @ts-ignore
+import { BASE_PATH, COLLECTION_FORMATS, RequestArgs, BaseAPI, RequiredError } from './base'
+
+/**
+ * @type BucketsResponse
+ * @export
+ */
+export type BucketsResponse = BucketsResponseOneOf | BucketsResponseOneOf1
+
+/**
+ *
+ * @export
+ * @interface BucketsResponseOneOf
+ */
+export interface BucketsResponseOneOf {
+  /**
+   *
+   * @type {Array<string>}
+   * @memberof BucketsResponseOneOf
+   */
+  bucketIds: Array<string>
+}
+/**
+ *
+ * @export
+ * @interface BucketsResponseOneOf1
+ */
+export interface BucketsResponseOneOf1 {
+  /**
+   *
+   * @type {number}
+   * @memberof BucketsResponseOneOf1
+   */
+  allByWorkerId: number
+}
+/**
+ *
+ * @export
+ * @interface ErrorResponse
+ */
+export interface ErrorResponse {
+  /**
+   *
+   * @type {string}
+   * @memberof ErrorResponse
+   */
+  type?: string
+  /**
+   *
+   * @type {string}
+   * @memberof ErrorResponse
+   */
+  message: string
+}
+/**
+ *
+ * @export
+ * @interface StatusResponse
+ */
+export interface StatusResponse {
+  /**
+   *
+   * @type {string}
+   * @memberof StatusResponse
+   */
+  id: string
+  /**
+   *
+   * @type {number}
+   * @memberof StatusResponse
+   */
+  objectsInCache: number
+  /**
+   *
+   * @type {number}
+   * @memberof StatusResponse
+   */
+  storageLimit: number
+  /**
+   *
+   * @type {number}
+   * @memberof StatusResponse
+   */
+  storageUsed: number
+  /**
+   *
+   * @type {number}
+   * @memberof StatusResponse
+   */
+  uptime: number
+  /**
+   *
+   * @type {number}
+   * @memberof StatusResponse
+   */
+  downloadsInProgress: number
+}
+
+/**
+ * DefaultApi - axios parameter creator
+ * @export
+ */
+export const DefaultApiAxiosParamCreator = function (configuration?: Configuration) {
+  return {
+    /**
+     * Returns a media file.
+     * @param {string} objectId Data Object ID
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    publicAsset: async (objectId: string, options: any = {}): Promise<RequestArgs> => {
+      // verify required parameter 'objectId' is not null or undefined
+      assertParamExists('publicAsset', 'objectId', objectId)
+      const localVarPath = `/assets/{objectId}`.replace(`{${'objectId'}}`, encodeURIComponent(String(objectId)))
+      // use dummy base URL string because the URL constructor only accepts absolute URLs.
+      const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL)
+      let baseOptions
+      if (configuration) {
+        baseOptions = configuration.baseOptions
+      }
+
+      const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options }
+      const localVarHeaderParameter = {} as any
+      const localVarQueryParameter = {} as any
+
+      setSearchParams(localVarUrlObj, localVarQueryParameter, options.query)
+      let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}
+      localVarRequestOptions.headers = { ...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers }
+
+      return {
+        url: toPathString(localVarUrlObj),
+        options: localVarRequestOptions,
+      }
+    },
+    /**
+     * Returns asset response headers (cache status, content type and/or length, accepted ranges etc.)
+     * @param {string} objectId Data Object ID
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    publicAssetHead: async (objectId: string, options: any = {}): Promise<RequestArgs> => {
+      // verify required parameter 'objectId' is not null or undefined
+      assertParamExists('publicAssetHead', 'objectId', objectId)
+      const localVarPath = `/assets/{objectId}`.replace(`{${'objectId'}}`, encodeURIComponent(String(objectId)))
+      // use dummy base URL string because the URL constructor only accepts absolute URLs.
+      const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL)
+      let baseOptions
+      if (configuration) {
+        baseOptions = configuration.baseOptions
+      }
+
+      const localVarRequestOptions = { method: 'HEAD', ...baseOptions, ...options }
+      const localVarHeaderParameter = {} as any
+      const localVarQueryParameter = {} as any
+
+      setSearchParams(localVarUrlObj, localVarQueryParameter, options.query)
+      let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}
+      localVarRequestOptions.headers = { ...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers }
+
+      return {
+        url: toPathString(localVarUrlObj),
+        options: localVarRequestOptions,
+      }
+    },
+    /**
+     * Returns list of distributed buckets
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    publicBuckets: async (options: any = {}): Promise<RequestArgs> => {
+      const localVarPath = `/buckets`
+      // use dummy base URL string because the URL constructor only accepts absolute URLs.
+      const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL)
+      let baseOptions
+      if (configuration) {
+        baseOptions = configuration.baseOptions
+      }
+
+      const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options }
+      const localVarHeaderParameter = {} as any
+      const localVarQueryParameter = {} as any
+
+      setSearchParams(localVarUrlObj, localVarQueryParameter, options.query)
+      let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}
+      localVarRequestOptions.headers = { ...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers }
+
+      return {
+        url: toPathString(localVarUrlObj),
+        options: localVarRequestOptions,
+      }
+    },
+    /**
+     * Returns json object describing current node status.
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    publicStatus: async (options: any = {}): Promise<RequestArgs> => {
+      const localVarPath = `/status`
+      // use dummy base URL string because the URL constructor only accepts absolute URLs.
+      const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL)
+      let baseOptions
+      if (configuration) {
+        baseOptions = configuration.baseOptions
+      }
+
+      const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options }
+      const localVarHeaderParameter = {} as any
+      const localVarQueryParameter = {} as any
+
+      setSearchParams(localVarUrlObj, localVarQueryParameter, options.query)
+      let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}
+      localVarRequestOptions.headers = { ...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers }
+
+      return {
+        url: toPathString(localVarUrlObj),
+        options: localVarRequestOptions,
+      }
+    },
+  }
+}
+
+/**
+ * DefaultApi - functional programming interface
+ * @export
+ */
+export const DefaultApiFp = function (configuration?: Configuration) {
+  const localVarAxiosParamCreator = DefaultApiAxiosParamCreator(configuration)
+  return {
+    /**
+     * Returns a media file.
+     * @param {string} objectId Data Object ID
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    async publicAsset(
+      objectId: string,
+      options?: any
+    ): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<any>> {
+      const localVarAxiosArgs = await localVarAxiosParamCreator.publicAsset(objectId, options)
+      return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)
+    },
+    /**
+     * Returns asset response headers (cache status, content type and/or length, accepted ranges etc.)
+     * @param {string} objectId Data Object ID
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    async publicAssetHead(
+      objectId: string,
+      options?: any
+    ): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<void>> {
+      const localVarAxiosArgs = await localVarAxiosParamCreator.publicAssetHead(objectId, options)
+      return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)
+    },
+    /**
+     * Returns list of distributed buckets
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    async publicBuckets(
+      options?: any
+    ): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<BucketsResponse>> {
+      const localVarAxiosArgs = await localVarAxiosParamCreator.publicBuckets(options)
+      return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)
+    },
+    /**
+     * Returns json object describing current node status.
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    async publicStatus(
+      options?: any
+    ): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<StatusResponse>> {
+      const localVarAxiosArgs = await localVarAxiosParamCreator.publicStatus(options)
+      return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)
+    },
+  }
+}
+
+/**
+ * DefaultApi - factory interface
+ * @export
+ */
+export const DefaultApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) {
+  const localVarFp = DefaultApiFp(configuration)
+  return {
+    /**
+     * Returns a media file.
+     * @param {string} objectId Data Object ID
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    publicAsset(objectId: string, options?: any): AxiosPromise<any> {
+      return localVarFp.publicAsset(objectId, options).then((request) => request(axios, basePath))
+    },
+    /**
+     * Returns asset response headers (cache status, content type and/or length, accepted ranges etc.)
+     * @param {string} objectId Data Object ID
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    publicAssetHead(objectId: string, options?: any): AxiosPromise<void> {
+      return localVarFp.publicAssetHead(objectId, options).then((request) => request(axios, basePath))
+    },
+    /**
+     * Returns list of distributed buckets
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    publicBuckets(options?: any): AxiosPromise<BucketsResponse> {
+      return localVarFp.publicBuckets(options).then((request) => request(axios, basePath))
+    },
+    /**
+     * Returns json object describing current node status.
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    publicStatus(options?: any): AxiosPromise<StatusResponse> {
+      return localVarFp.publicStatus(options).then((request) => request(axios, basePath))
+    },
+  }
+}
+
+/**
+ * DefaultApi - object-oriented interface
+ * @export
+ * @class DefaultApi
+ * @extends {BaseAPI}
+ */
+export class DefaultApi extends BaseAPI {
+  /**
+   * Returns a media file.
+   * @param {string} objectId Data Object ID
+   * @param {*} [options] Override http request option.
+   * @throws {RequiredError}
+   * @memberof DefaultApi
+   */
+  public publicAsset(objectId: string, options?: any) {
+    return DefaultApiFp(this.configuration)
+      .publicAsset(objectId, options)
+      .then((request) => request(this.axios, this.basePath))
+  }
+
+  /**
+   * Returns asset response headers (cache status, content type and/or length, accepted ranges etc.)
+   * @param {string} objectId Data Object ID
+   * @param {*} [options] Override http request option.
+   * @throws {RequiredError}
+   * @memberof DefaultApi
+   */
+  public publicAssetHead(objectId: string, options?: any) {
+    return DefaultApiFp(this.configuration)
+      .publicAssetHead(objectId, options)
+      .then((request) => request(this.axios, this.basePath))
+  }
+
+  /**
+   * Returns list of distributed buckets
+   * @param {*} [options] Override http request option.
+   * @throws {RequiredError}
+   * @memberof DefaultApi
+   */
+  public publicBuckets(options?: any) {
+    return DefaultApiFp(this.configuration)
+      .publicBuckets(options)
+      .then((request) => request(this.axios, this.basePath))
+  }
+
+  /**
+   * Returns json object describing current node status.
+   * @param {*} [options] Override http request option.
+   * @throws {RequiredError}
+   * @memberof DefaultApi
+   */
+  public publicStatus(options?: any) {
+    return DefaultApiFp(this.configuration)
+      .publicStatus(options)
+      .then((request) => request(this.axios, this.basePath))
+  }
+}

+ 74 - 0
tests/network-tests/src/apis/distributorNode/base.ts

@@ -0,0 +1,74 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Distributor node public API
+ * Distributor node public API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+import { Configuration } from './configuration'
+// Some imports not used depending on template conditions
+// @ts-ignore
+import globalAxios, { AxiosPromise, AxiosInstance } from 'axios'
+
+export const BASE_PATH = 'http://localhost:3334/api/v1'.replace(/\/+$/, '')
+
+/**
+ *
+ * @export
+ */
+export const COLLECTION_FORMATS = {
+  csv: ',',
+  ssv: ' ',
+  tsv: '\t',
+  pipes: '|',
+}
+
+/**
+ *
+ * @export
+ * @interface RequestArgs
+ */
+export interface RequestArgs {
+  url: string
+  options: any
+}
+
+/**
+ *
+ * @export
+ * @class BaseAPI
+ */
+export class BaseAPI {
+  protected configuration: Configuration | undefined
+
+  constructor(
+    configuration?: Configuration,
+    protected basePath: string = BASE_PATH,
+    protected axios: AxiosInstance = globalAxios
+  ) {
+    if (configuration) {
+      this.configuration = configuration
+      this.basePath = configuration.basePath || this.basePath
+    }
+  }
+}
+
+/**
+ *
+ * @export
+ * @class RequiredError
+ * @extends {Error}
+ */
+export class RequiredError extends Error {
+  name: 'RequiredError' = 'RequiredError'
+  constructor(public field: string, msg?: string) {
+    super(msg)
+  }
+}

+ 150 - 0
tests/network-tests/src/apis/distributorNode/common.ts

@@ -0,0 +1,150 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Distributor node public API
+ * Distributor node public API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+import { Configuration } from './configuration'
+import { RequiredError, RequestArgs } from './base'
+import { AxiosInstance } from 'axios'
+
+/**
+ *
+ * @export
+ */
+export const DUMMY_BASE_URL = 'https://example.com'
+
+/**
+ *
+ * @throws {RequiredError}
+ * @export
+ */
+export const assertParamExists = function (functionName: string, paramName: string, paramValue: unknown) {
+  if (paramValue === null || paramValue === undefined) {
+    throw new RequiredError(
+      paramName,
+      `Required parameter ${paramName} was null or undefined when calling ${functionName}.`
+    )
+  }
+}
+
+/**
+ *
+ * @export
+ */
+export const setApiKeyToObject = async function (object: any, keyParamName: string, configuration?: Configuration) {
+  if (configuration && configuration.apiKey) {
+    const localVarApiKeyValue =
+      typeof configuration.apiKey === 'function' ? await configuration.apiKey(keyParamName) : await configuration.apiKey
+    object[keyParamName] = localVarApiKeyValue
+  }
+}
+
+/**
+ *
+ * @export
+ */
+export const setBasicAuthToObject = function (object: any, configuration?: Configuration) {
+  if (configuration && (configuration.username || configuration.password)) {
+    object['auth'] = { username: configuration.username, password: configuration.password }
+  }
+}
+
+/**
+ *
+ * @export
+ */
+export const setBearerAuthToObject = async function (object: any, configuration?: Configuration) {
+  if (configuration && configuration.accessToken) {
+    const accessToken =
+      typeof configuration.accessToken === 'function'
+        ? await configuration.accessToken()
+        : await configuration.accessToken
+    object['Authorization'] = 'Bearer ' + accessToken
+  }
+}
+
+/**
+ *
+ * @export
+ */
+export const setOAuthToObject = async function (
+  object: any,
+  name: string,
+  scopes: string[],
+  configuration?: Configuration
+) {
+  if (configuration && configuration.accessToken) {
+    const localVarAccessTokenValue =
+      typeof configuration.accessToken === 'function'
+        ? await configuration.accessToken(name, scopes)
+        : await configuration.accessToken
+    object['Authorization'] = 'Bearer ' + localVarAccessTokenValue
+  }
+}
+
+/**
+ *
+ * @export
+ */
+export const setSearchParams = function (url: URL, ...objects: any[]) {
+  const searchParams = new URLSearchParams(url.search)
+  for (const object of objects) {
+    for (const key in object) {
+      if (Array.isArray(object[key])) {
+        searchParams.delete(key)
+        for (const item of object[key]) {
+          searchParams.append(key, item)
+        }
+      } else {
+        searchParams.set(key, object[key])
+      }
+    }
+  }
+  url.search = searchParams.toString()
+}
+
+/**
+ *
+ * @export
+ */
+export const serializeDataIfNeeded = function (value: any, requestOptions: any, configuration?: Configuration) {
+  const nonString = typeof value !== 'string'
+  const needsSerialization =
+    nonString && configuration && configuration.isJsonMime
+      ? configuration.isJsonMime(requestOptions.headers['Content-Type'])
+      : nonString
+  return needsSerialization ? JSON.stringify(value !== undefined ? value : {}) : value || ''
+}
+
+/**
+ *
+ * @export
+ */
+export const toPathString = function (url: URL) {
+  return url.pathname + url.search + url.hash
+}
+
+/**
+ *
+ * @export
+ */
+export const createRequestFunction = function (
+  axiosArgs: RequestArgs,
+  globalAxios: AxiosInstance,
+  BASE_PATH: string,
+  configuration?: Configuration
+) {
+  return (axios: AxiosInstance = globalAxios, basePath: string = BASE_PATH) => {
+    const axiosRequestArgs = { ...axiosArgs.options, url: (configuration?.basePath || basePath) + axiosArgs.url }
+    return axios.request(axiosRequestArgs)
+  }
+}

+ 108 - 0
tests/network-tests/src/apis/distributorNode/configuration.ts

@@ -0,0 +1,108 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Distributor node public API
+ * Distributor node public API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+export interface ConfigurationParameters {
+  apiKey?: string | Promise<string> | ((name: string) => string) | ((name: string) => Promise<string>)
+  username?: string
+  password?: string
+  accessToken?:
+    | string
+    | Promise<string>
+    | ((name?: string, scopes?: string[]) => string)
+    | ((name?: string, scopes?: string[]) => Promise<string>)
+  basePath?: string
+  baseOptions?: any
+  formDataCtor?: new () => any
+}
+
+export class Configuration {
+  /**
+   * parameter for apiKey security
+   * @param name security name
+   * @memberof Configuration
+   */
+  apiKey?: string | Promise<string> | ((name: string) => string) | ((name: string) => Promise<string>)
+  /**
+   * parameter for basic security
+   *
+   * @type {string}
+   * @memberof Configuration
+   */
+  username?: string
+  /**
+   * parameter for basic security
+   *
+   * @type {string}
+   * @memberof Configuration
+   */
+  password?: string
+  /**
+   * parameter for oauth2 security
+   * @param name security name
+   * @param scopes oauth2 scope
+   * @memberof Configuration
+   */
+  accessToken?:
+    | string
+    | Promise<string>
+    | ((name?: string, scopes?: string[]) => string)
+    | ((name?: string, scopes?: string[]) => Promise<string>)
+  /**
+   * override base path
+   *
+   * @type {string}
+   * @memberof Configuration
+   */
+  basePath?: string
+  /**
+   * base options for axios calls
+   *
+   * @type {any}
+   * @memberof Configuration
+   */
+  baseOptions?: any
+  /**
+   * The FormData constructor that will be used to create multipart form data
+   * requests. You can inject this here so that execution environments that
+   * do not support the FormData class can still run the generated client.
+   *
+   * @type {new () => FormData}
+   */
+  formDataCtor?: new () => any
+
+  constructor(param: ConfigurationParameters = {}) {
+    this.apiKey = param.apiKey
+    this.username = param.username
+    this.password = param.password
+    this.accessToken = param.accessToken
+    this.basePath = param.basePath
+    this.baseOptions = param.baseOptions
+    this.formDataCtor = param.formDataCtor
+  }
+
+  /**
+   * Check if the given MIME is a JSON MIME.
+   * JSON MIME examples:
+   *   application/json
+   *   application/json; charset=UTF8
+   *   APPLICATION/JSON
+   *   application/vnd.company+json
+   * @param mime - MIME (Multipurpose Internet Mail Extensions)
+   * @return True if the given MIME is JSON, false otherwise.
+   */
+  public isJsonMime(mime: string): boolean {
+    const jsonMime: RegExp = new RegExp('^(application/json|[^;/ \t]+/[^;/ \t]+[+]json)[ \t]*(;.*)?$', 'i')
+    return mime !== null && (jsonMime.test(mime) || mime.toLowerCase() === 'application/json-patch+json')
+  }
+}

+ 16 - 0
tests/network-tests/src/apis/distributorNode/index.ts

@@ -0,0 +1,16 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Distributor node public API
+ * Distributor node public API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+export * from './api'
+export * from './configuration'

+ 27 - 0
tests/network-tests/src/apis/storageNode/.openapi-generator-ignore

@@ -0,0 +1,27 @@
+# OpenAPI Generator Ignore
+# Generated by openapi-generator https://github.com/openapitools/openapi-generator
+
+# Use this file to prevent files from being overwritten by the generator.
+# The patterns follow closely to .gitignore or .dockerignore.
+
+# As an example, the C# client generator defines ApiClient.cs.
+# You can make changes and tell OpenAPI Generator to ignore just this file by uncommenting the following line:
+#ApiClient.cs
+
+# You can match any string of characters against a directory, file or extension with a single asterisk (*):
+#foo/*/qux
+# The above matches foo/bar/qux and foo/baz/qux, but not foo/bar/baz/qux
+
+# You can recursively match patterns against a directory, file or extension with a double asterisk (**):
+#foo/**/qux
+# This matches foo/bar/qux, foo/baz/qux, and foo/bar/baz/qux
+
+# You can also negate patterns with an exclamation (!).
+# For example, you can ignore all files in a docs folder with the file extension .md:
+#docs/*.md
+# Then explicitly reverse the ignore rule for a single file:
+#!docs/README.md
+
+git_push.sh
+.npmignore
+.gitignore

+ 5 - 0
tests/network-tests/src/apis/storageNode/.openapi-generator/FILES

@@ -0,0 +1,5 @@
+api.ts
+base.ts
+common.ts
+configuration.ts
+index.ts

+ 1 - 0
tests/network-tests/src/apis/storageNode/.openapi-generator/VERSION

@@ -0,0 +1 @@
+5.2.1

+ 738 - 0
tests/network-tests/src/apis/storageNode/api.ts

@@ -0,0 +1,738 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Storage node API
+ * Storage node API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+import { Configuration } from './configuration'
+import globalAxios, { AxiosPromise, AxiosInstance } from 'axios'
+// Some imports not used depending on template conditions
+// @ts-ignore
+import {
+  DUMMY_BASE_URL,
+  assertParamExists,
+  setApiKeyToObject,
+  setBasicAuthToObject,
+  setBearerAuthToObject,
+  setOAuthToObject,
+  setSearchParams,
+  serializeDataIfNeeded,
+  toPathString,
+  createRequestFunction,
+} from './common'
+// @ts-ignore
+import { BASE_PATH, COLLECTION_FORMATS, RequestArgs, BaseAPI, RequiredError } from './base'
+
+/**
+ *
+ * @export
+ * @interface DataStatsResponse
+ */
+export interface DataStatsResponse {
+  /**
+   *
+   * @type {number}
+   * @memberof DataStatsResponse
+   */
+  totalSize: number
+  /**
+   *
+   * @type {number}
+   * @memberof DataStatsResponse
+   */
+  objectNumber: number
+  /**
+   *
+   * @type {number}
+   * @memberof DataStatsResponse
+   */
+  tempDirSize?: number
+  /**
+   *
+   * @type {number}
+   * @memberof DataStatsResponse
+   */
+  tempDownloads?: number
+}
+/**
+ *
+ * @export
+ * @interface ErrorResponse
+ */
+export interface ErrorResponse {
+  /**
+   *
+   * @type {string}
+   * @memberof ErrorResponse
+   */
+  type?: string
+  /**
+   *
+   * @type {string}
+   * @memberof ErrorResponse
+   */
+  message: string
+}
+/**
+ *
+ * @export
+ * @interface InlineResponse201
+ */
+export interface InlineResponse201 {
+  /**
+   *
+   * @type {string}
+   * @memberof InlineResponse201
+   */
+  id?: string
+}
+/**
+ *
+ * @export
+ * @interface TokenRequest
+ */
+export interface TokenRequest {
+  /**
+   *
+   * @type {TokenRequestData}
+   * @memberof TokenRequest
+   */
+  data: TokenRequestData
+  /**
+   *
+   * @type {string}
+   * @memberof TokenRequest
+   */
+  signature: string
+}
+/**
+ *
+ * @export
+ * @interface TokenRequestData
+ */
+export interface TokenRequestData {
+  /**
+   *
+   * @type {number}
+   * @memberof TokenRequestData
+   */
+  memberId: number
+  /**
+   *
+   * @type {string}
+   * @memberof TokenRequestData
+   */
+  accountId: string
+  /**
+   *
+   * @type {number}
+   * @memberof TokenRequestData
+   */
+  dataObjectId: number
+  /**
+   *
+   * @type {number}
+   * @memberof TokenRequestData
+   */
+  storageBucketId: number
+  /**
+   *
+   * @type {string}
+   * @memberof TokenRequestData
+   */
+  bagId: string
+}
+/**
+ *
+ * @export
+ * @interface VersionResponse
+ */
+export interface VersionResponse {
+  /**
+   *
+   * @type {string}
+   * @memberof VersionResponse
+   */
+  version: string
+  /**
+   *
+   * @type {string}
+   * @memberof VersionResponse
+   */
+  userAgent?: string
+}
+
+/**
+ * FilesApi - axios parameter creator
+ * @export
+ */
+export const FilesApiAxiosParamCreator = function (configuration?: Configuration) {
+  return {
+    /**
+     * Returns a media file.
+     * @param {string} id Data object ID
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    filesApiGetFile: async (id: string, options: any = {}): Promise<RequestArgs> => {
+      // verify required parameter 'id' is not null or undefined
+      assertParamExists('filesApiGetFile', 'id', id)
+      const localVarPath = `/files/{id}`.replace(`{${'id'}}`, encodeURIComponent(String(id)))
+      // use dummy base URL string because the URL constructor only accepts absolute URLs.
+      const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL)
+      let baseOptions
+      if (configuration) {
+        baseOptions = configuration.baseOptions
+      }
+
+      const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options }
+      const localVarHeaderParameter = {} as any
+      const localVarQueryParameter = {} as any
+
+      setSearchParams(localVarUrlObj, localVarQueryParameter, options.query)
+      let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}
+      localVarRequestOptions.headers = { ...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers }
+
+      return {
+        url: toPathString(localVarUrlObj),
+        options: localVarRequestOptions,
+      }
+    },
+    /**
+     * Returns a media file headers.
+     * @param {string} id Data object ID
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    filesApiGetFileHeaders: async (id: string, options: any = {}): Promise<RequestArgs> => {
+      // verify required parameter 'id' is not null or undefined
+      assertParamExists('filesApiGetFileHeaders', 'id', id)
+      const localVarPath = `/files/{id}`.replace(`{${'id'}}`, encodeURIComponent(String(id)))
+      // use dummy base URL string because the URL constructor only accepts absolute URLs.
+      const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL)
+      let baseOptions
+      if (configuration) {
+        baseOptions = configuration.baseOptions
+      }
+
+      const localVarRequestOptions = { method: 'HEAD', ...baseOptions, ...options }
+      const localVarHeaderParameter = {} as any
+      const localVarQueryParameter = {} as any
+
+      setSearchParams(localVarUrlObj, localVarQueryParameter, options.query)
+      let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}
+      localVarRequestOptions.headers = { ...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers }
+
+      return {
+        url: toPathString(localVarUrlObj),
+        options: localVarRequestOptions,
+      }
+    },
+    /**
+     * Upload data
+     * @param {string} dataObjectId Data object runtime ID
+     * @param {string} storageBucketId Storage bucket ID
+     * @param {string} bagId Bag ID
+     * @param {any} [file] Data file
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    filesApiUploadFile: async (
+      dataObjectId: string,
+      storageBucketId: string,
+      bagId: string,
+      file?: any,
+      options: any = {}
+    ): Promise<RequestArgs> => {
+      // verify required parameter 'dataObjectId' is not null or undefined
+      assertParamExists('filesApiUploadFile', 'dataObjectId', dataObjectId)
+      // verify required parameter 'storageBucketId' is not null or undefined
+      assertParamExists('filesApiUploadFile', 'storageBucketId', storageBucketId)
+      // verify required parameter 'bagId' is not null or undefined
+      assertParamExists('filesApiUploadFile', 'bagId', bagId)
+      const localVarPath = `/files`
+      // use dummy base URL string because the URL constructor only accepts absolute URLs.
+      const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL)
+      let baseOptions
+      if (configuration) {
+        baseOptions = configuration.baseOptions
+      }
+
+      const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options }
+      const localVarHeaderParameter = {} as any
+      const localVarQueryParameter = {} as any
+      const localVarFormParams = new ((configuration && configuration.formDataCtor) || FormData)()
+
+      if (file !== undefined) {
+        localVarFormParams.append('file', file as any)
+      }
+
+      if (dataObjectId !== undefined) {
+        localVarFormParams.append('dataObjectId', dataObjectId as any)
+      }
+
+      if (storageBucketId !== undefined) {
+        localVarFormParams.append('storageBucketId', storageBucketId as any)
+      }
+
+      if (bagId !== undefined) {
+        localVarFormParams.append('bagId', bagId as any)
+      }
+
+      localVarHeaderParameter['Content-Type'] = 'multipart/form-data'
+
+      setSearchParams(localVarUrlObj, localVarQueryParameter, options.query)
+      let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}
+      localVarRequestOptions.headers = { ...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers }
+      localVarRequestOptions.data = localVarFormParams
+
+      return {
+        url: toPathString(localVarUrlObj),
+        options: localVarRequestOptions,
+      }
+    },
+  }
+}
+
+/**
+ * FilesApi - functional programming interface
+ * @export
+ */
+export const FilesApiFp = function (configuration?: Configuration) {
+  const localVarAxiosParamCreator = FilesApiAxiosParamCreator(configuration)
+  return {
+    /**
+     * Returns a media file.
+     * @param {string} id Data object ID
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    async filesApiGetFile(
+      id: string,
+      options?: any
+    ): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<any>> {
+      const localVarAxiosArgs = await localVarAxiosParamCreator.filesApiGetFile(id, options)
+      return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)
+    },
+    /**
+     * Returns a media file headers.
+     * @param {string} id Data object ID
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    async filesApiGetFileHeaders(
+      id: string,
+      options?: any
+    ): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<void>> {
+      const localVarAxiosArgs = await localVarAxiosParamCreator.filesApiGetFileHeaders(id, options)
+      return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)
+    },
+    /**
+     * Upload data
+     * @param {string} dataObjectId Data object runtime ID
+     * @param {string} storageBucketId Storage bucket ID
+     * @param {string} bagId Bag ID
+     * @param {any} [file] Data file
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    async filesApiUploadFile(
+      dataObjectId: string,
+      storageBucketId: string,
+      bagId: string,
+      file?: any,
+      options?: any
+    ): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<InlineResponse201>> {
+      const localVarAxiosArgs = await localVarAxiosParamCreator.filesApiUploadFile(
+        dataObjectId,
+        storageBucketId,
+        bagId,
+        file,
+        options
+      )
+      return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)
+    },
+  }
+}
+
+/**
+ * FilesApi - factory interface
+ * @export
+ */
+export const FilesApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) {
+  const localVarFp = FilesApiFp(configuration)
+  return {
+    /**
+     * Returns a media file.
+     * @param {string} id Data object ID
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    filesApiGetFile(id: string, options?: any): AxiosPromise<any> {
+      return localVarFp.filesApiGetFile(id, options).then((request) => request(axios, basePath))
+    },
+    /**
+     * Returns a media file headers.
+     * @param {string} id Data object ID
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    filesApiGetFileHeaders(id: string, options?: any): AxiosPromise<void> {
+      return localVarFp.filesApiGetFileHeaders(id, options).then((request) => request(axios, basePath))
+    },
+    /**
+     * Upload data
+     * @param {string} dataObjectId Data object runtime ID
+     * @param {string} storageBucketId Storage bucket ID
+     * @param {string} bagId Bag ID
+     * @param {any} [file] Data file
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    filesApiUploadFile(
+      dataObjectId: string,
+      storageBucketId: string,
+      bagId: string,
+      file?: any,
+      options?: any
+    ): AxiosPromise<InlineResponse201> {
+      return localVarFp
+        .filesApiUploadFile(dataObjectId, storageBucketId, bagId, file, options)
+        .then((request) => request(axios, basePath))
+    },
+  }
+}
+
+/**
+ * FilesApi - object-oriented interface
+ * @export
+ * @class FilesApi
+ * @extends {BaseAPI}
+ */
+export class FilesApi extends BaseAPI {
+  /**
+   * Returns a media file.
+   * @param {string} id Data object ID
+   * @param {*} [options] Override http request option.
+   * @throws {RequiredError}
+   * @memberof FilesApi
+   */
+  public filesApiGetFile(id: string, options?: any) {
+    return FilesApiFp(this.configuration)
+      .filesApiGetFile(id, options)
+      .then((request) => request(this.axios, this.basePath))
+  }
+
+  /**
+   * Returns a media file headers.
+   * @param {string} id Data object ID
+   * @param {*} [options] Override http request option.
+   * @throws {RequiredError}
+   * @memberof FilesApi
+   */
+  public filesApiGetFileHeaders(id: string, options?: any) {
+    return FilesApiFp(this.configuration)
+      .filesApiGetFileHeaders(id, options)
+      .then((request) => request(this.axios, this.basePath))
+  }
+
+  /**
+   * Upload data
+   * @param {string} dataObjectId Data object runtime ID
+   * @param {string} storageBucketId Storage bucket ID
+   * @param {string} bagId Bag ID
+   * @param {any} [file] Data file
+   * @param {*} [options] Override http request option.
+   * @throws {RequiredError}
+   * @memberof FilesApi
+   */
+  public filesApiUploadFile(dataObjectId: string, storageBucketId: string, bagId: string, file?: any, options?: any) {
+    return FilesApiFp(this.configuration)
+      .filesApiUploadFile(dataObjectId, storageBucketId, bagId, file, options)
+      .then((request) => request(this.axios, this.basePath))
+  }
+}
+
+/**
+ * StateApi - axios parameter creator
+ * @export
+ */
+export const StateApiAxiosParamCreator = function (configuration?: Configuration) {
+  return {
+    /**
+     * Returns all local data objects.
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    stateApiGetAllLocalDataObjects: async (options: any = {}): Promise<RequestArgs> => {
+      const localVarPath = `/state/data-objects`
+      // use dummy base URL string because the URL constructor only accepts absolute URLs.
+      const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL)
+      let baseOptions
+      if (configuration) {
+        baseOptions = configuration.baseOptions
+      }
+
+      const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options }
+      const localVarHeaderParameter = {} as any
+      const localVarQueryParameter = {} as any
+
+      setSearchParams(localVarUrlObj, localVarQueryParameter, options.query)
+      let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}
+      localVarRequestOptions.headers = { ...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers }
+
+      return {
+        url: toPathString(localVarUrlObj),
+        options: localVarRequestOptions,
+      }
+    },
+    /**
+     * Returns local data objects for the bag.
+     * @param {string} bagId Bag ID
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    stateApiGetLocalDataObjectsByBagId: async (bagId: string, options: any = {}): Promise<RequestArgs> => {
+      // verify required parameter 'bagId' is not null or undefined
+      assertParamExists('stateApiGetLocalDataObjectsByBagId', 'bagId', bagId)
+      const localVarPath = `/state/bags/{bagId}/data-objects`.replace(`{${'bagId'}}`, encodeURIComponent(String(bagId)))
+      // use dummy base URL string because the URL constructor only accepts absolute URLs.
+      const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL)
+      let baseOptions
+      if (configuration) {
+        baseOptions = configuration.baseOptions
+      }
+
+      const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options }
+      const localVarHeaderParameter = {} as any
+      const localVarQueryParameter = {} as any
+
+      setSearchParams(localVarUrlObj, localVarQueryParameter, options.query)
+      let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}
+      localVarRequestOptions.headers = { ...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers }
+
+      return {
+        url: toPathString(localVarUrlObj),
+        options: localVarRequestOptions,
+      }
+    },
+    /**
+     * Returns local uploading directory stats.
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    stateApiGetLocalDataStats: async (options: any = {}): Promise<RequestArgs> => {
+      const localVarPath = `/state/data`
+      // use dummy base URL string because the URL constructor only accepts absolute URLs.
+      const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL)
+      let baseOptions
+      if (configuration) {
+        baseOptions = configuration.baseOptions
+      }
+
+      const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options }
+      const localVarHeaderParameter = {} as any
+      const localVarQueryParameter = {} as any
+
+      setSearchParams(localVarUrlObj, localVarQueryParameter, options.query)
+      let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}
+      localVarRequestOptions.headers = { ...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers }
+
+      return {
+        url: toPathString(localVarUrlObj),
+        options: localVarRequestOptions,
+      }
+    },
+    /**
+     * Returns server version.
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    stateApiGetVersion: async (options: any = {}): Promise<RequestArgs> => {
+      const localVarPath = `/version`
+      // use dummy base URL string because the URL constructor only accepts absolute URLs.
+      const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL)
+      let baseOptions
+      if (configuration) {
+        baseOptions = configuration.baseOptions
+      }
+
+      const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options }
+      const localVarHeaderParameter = {} as any
+      const localVarQueryParameter = {} as any
+
+      setSearchParams(localVarUrlObj, localVarQueryParameter, options.query)
+      let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}
+      localVarRequestOptions.headers = { ...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers }
+
+      return {
+        url: toPathString(localVarUrlObj),
+        options: localVarRequestOptions,
+      }
+    },
+  }
+}
+
+/**
+ * StateApi - functional programming interface
+ * @export
+ */
+export const StateApiFp = function (configuration?: Configuration) {
+  const localVarAxiosParamCreator = StateApiAxiosParamCreator(configuration)
+  return {
+    /**
+     * Returns all local data objects.
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    async stateApiGetAllLocalDataObjects(
+      options?: any
+    ): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<Array<string>>> {
+      const localVarAxiosArgs = await localVarAxiosParamCreator.stateApiGetAllLocalDataObjects(options)
+      return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)
+    },
+    /**
+     * Returns local data objects for the bag.
+     * @param {string} bagId Bag ID
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    async stateApiGetLocalDataObjectsByBagId(
+      bagId: string,
+      options?: any
+    ): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<Array<string>>> {
+      const localVarAxiosArgs = await localVarAxiosParamCreator.stateApiGetLocalDataObjectsByBagId(bagId, options)
+      return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)
+    },
+    /**
+     * Returns local uploading directory stats.
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    async stateApiGetLocalDataStats(
+      options?: any
+    ): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<DataStatsResponse>> {
+      const localVarAxiosArgs = await localVarAxiosParamCreator.stateApiGetLocalDataStats(options)
+      return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)
+    },
+    /**
+     * Returns server version.
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    async stateApiGetVersion(
+      options?: any
+    ): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<VersionResponse>> {
+      const localVarAxiosArgs = await localVarAxiosParamCreator.stateApiGetVersion(options)
+      return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)
+    },
+  }
+}
+
+/**
+ * StateApi - factory interface
+ * @export
+ */
+export const StateApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) {
+  const localVarFp = StateApiFp(configuration)
+  return {
+    /**
+     * Returns all local data objects.
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    stateApiGetAllLocalDataObjects(options?: any): AxiosPromise<Array<string>> {
+      return localVarFp.stateApiGetAllLocalDataObjects(options).then((request) => request(axios, basePath))
+    },
+    /**
+     * Returns local data objects for the bag.
+     * @param {string} bagId Bag ID
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    stateApiGetLocalDataObjectsByBagId(bagId: string, options?: any): AxiosPromise<Array<string>> {
+      return localVarFp.stateApiGetLocalDataObjectsByBagId(bagId, options).then((request) => request(axios, basePath))
+    },
+    /**
+     * Returns local uploading directory stats.
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    stateApiGetLocalDataStats(options?: any): AxiosPromise<DataStatsResponse> {
+      return localVarFp.stateApiGetLocalDataStats(options).then((request) => request(axios, basePath))
+    },
+    /**
+     * Returns server version.
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    stateApiGetVersion(options?: any): AxiosPromise<VersionResponse> {
+      return localVarFp.stateApiGetVersion(options).then((request) => request(axios, basePath))
+    },
+  }
+}
+
+/**
+ * StateApi - object-oriented interface
+ * @export
+ * @class StateApi
+ * @extends {BaseAPI}
+ */
+export class StateApi extends BaseAPI {
+  /**
+   * Returns all local data objects.
+   * @param {*} [options] Override http request option.
+   * @throws {RequiredError}
+   * @memberof StateApi
+   */
+  public stateApiGetAllLocalDataObjects(options?: any) {
+    return StateApiFp(this.configuration)
+      .stateApiGetAllLocalDataObjects(options)
+      .then((request) => request(this.axios, this.basePath))
+  }
+
+  /**
+   * Returns local data objects for the bag.
+   * @param {string} bagId Bag ID
+   * @param {*} [options] Override http request option.
+   * @throws {RequiredError}
+   * @memberof StateApi
+   */
+  public stateApiGetLocalDataObjectsByBagId(bagId: string, options?: any) {
+    return StateApiFp(this.configuration)
+      .stateApiGetLocalDataObjectsByBagId(bagId, options)
+      .then((request) => request(this.axios, this.basePath))
+  }
+
+  /**
+   * Returns local uploading directory stats.
+   * @param {*} [options] Override http request option.
+   * @throws {RequiredError}
+   * @memberof StateApi
+   */
+  public stateApiGetLocalDataStats(options?: any) {
+    return StateApiFp(this.configuration)
+      .stateApiGetLocalDataStats(options)
+      .then((request) => request(this.axios, this.basePath))
+  }
+
+  /**
+   * Returns server version.
+   * @param {*} [options] Override http request option.
+   * @throws {RequiredError}
+   * @memberof StateApi
+   */
+  public stateApiGetVersion(options?: any) {
+    return StateApiFp(this.configuration)
+      .stateApiGetVersion(options)
+      .then((request) => request(this.axios, this.basePath))
+  }
+}

+ 74 - 0
tests/network-tests/src/apis/storageNode/base.ts

@@ -0,0 +1,74 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Storage node API
+ * Storage node API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+import { Configuration } from './configuration'
+// Some imports not used depending on template conditions
+// @ts-ignore
+import globalAxios, { AxiosPromise, AxiosInstance } from 'axios'
+
+export const BASE_PATH = 'http://localhost:3333/api/v1'.replace(/\/+$/, '')
+
+/**
+ *
+ * @export
+ */
+export const COLLECTION_FORMATS = {
+  csv: ',',
+  ssv: ' ',
+  tsv: '\t',
+  pipes: '|',
+}
+
+/**
+ *
+ * @export
+ * @interface RequestArgs
+ */
+export interface RequestArgs {
+  url: string
+  options: any
+}
+
+/**
+ *
+ * @export
+ * @class BaseAPI
+ */
+export class BaseAPI {
+  protected configuration: Configuration | undefined
+
+  constructor(
+    configuration?: Configuration,
+    protected basePath: string = BASE_PATH,
+    protected axios: AxiosInstance = globalAxios
+  ) {
+    if (configuration) {
+      this.configuration = configuration
+      this.basePath = configuration.basePath || this.basePath
+    }
+  }
+}
+
+/**
+ *
+ * @export
+ * @class RequiredError
+ * @extends {Error}
+ */
+export class RequiredError extends Error {
+  name: 'RequiredError' = 'RequiredError'
+  constructor(public field: string, msg?: string) {
+    super(msg)
+  }
+}

+ 150 - 0
tests/network-tests/src/apis/storageNode/common.ts

@@ -0,0 +1,150 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Storage node API
+ * Storage node API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+import { Configuration } from './configuration'
+import { RequiredError, RequestArgs } from './base'
+import { AxiosInstance } from 'axios'
+
+/**
+ *
+ * @export
+ */
+export const DUMMY_BASE_URL = 'https://example.com'
+
+/**
+ *
+ * @throws {RequiredError}
+ * @export
+ */
+export const assertParamExists = function (functionName: string, paramName: string, paramValue: unknown) {
+  if (paramValue === null || paramValue === undefined) {
+    throw new RequiredError(
+      paramName,
+      `Required parameter ${paramName} was null or undefined when calling ${functionName}.`
+    )
+  }
+}
+
+/**
+ *
+ * @export
+ */
+export const setApiKeyToObject = async function (object: any, keyParamName: string, configuration?: Configuration) {
+  if (configuration && configuration.apiKey) {
+    const localVarApiKeyValue =
+      typeof configuration.apiKey === 'function' ? await configuration.apiKey(keyParamName) : await configuration.apiKey
+    object[keyParamName] = localVarApiKeyValue
+  }
+}
+
+/**
+ *
+ * @export
+ */
+export const setBasicAuthToObject = function (object: any, configuration?: Configuration) {
+  if (configuration && (configuration.username || configuration.password)) {
+    object['auth'] = { username: configuration.username, password: configuration.password }
+  }
+}
+
+/**
+ *
+ * @export
+ */
+export const setBearerAuthToObject = async function (object: any, configuration?: Configuration) {
+  if (configuration && configuration.accessToken) {
+    const accessToken =
+      typeof configuration.accessToken === 'function'
+        ? await configuration.accessToken()
+        : await configuration.accessToken
+    object['Authorization'] = 'Bearer ' + accessToken
+  }
+}
+
+/**
+ *
+ * @export
+ */
+export const setOAuthToObject = async function (
+  object: any,
+  name: string,
+  scopes: string[],
+  configuration?: Configuration
+) {
+  if (configuration && configuration.accessToken) {
+    const localVarAccessTokenValue =
+      typeof configuration.accessToken === 'function'
+        ? await configuration.accessToken(name, scopes)
+        : await configuration.accessToken
+    object['Authorization'] = 'Bearer ' + localVarAccessTokenValue
+  }
+}
+
+/**
+ *
+ * @export
+ */
+export const setSearchParams = function (url: URL, ...objects: any[]) {
+  const searchParams = new URLSearchParams(url.search)
+  for (const object of objects) {
+    for (const key in object) {
+      if (Array.isArray(object[key])) {
+        searchParams.delete(key)
+        for (const item of object[key]) {
+          searchParams.append(key, item)
+        }
+      } else {
+        searchParams.set(key, object[key])
+      }
+    }
+  }
+  url.search = searchParams.toString()
+}
+
+/**
+ *
+ * @export
+ */
+export const serializeDataIfNeeded = function (value: any, requestOptions: any, configuration?: Configuration) {
+  const nonString = typeof value !== 'string'
+  const needsSerialization =
+    nonString && configuration && configuration.isJsonMime
+      ? configuration.isJsonMime(requestOptions.headers['Content-Type'])
+      : nonString
+  return needsSerialization ? JSON.stringify(value !== undefined ? value : {}) : value || ''
+}
+
+/**
+ *
+ * @export
+ */
+export const toPathString = function (url: URL) {
+  return url.pathname + url.search + url.hash
+}
+
+/**
+ *
+ * @export
+ */
+export const createRequestFunction = function (
+  axiosArgs: RequestArgs,
+  globalAxios: AxiosInstance,
+  BASE_PATH: string,
+  configuration?: Configuration
+) {
+  return (axios: AxiosInstance = globalAxios, basePath: string = BASE_PATH) => {
+    const axiosRequestArgs = { ...axiosArgs.options, url: (configuration?.basePath || basePath) + axiosArgs.url }
+    return axios.request(axiosRequestArgs)
+  }
+}

+ 108 - 0
tests/network-tests/src/apis/storageNode/configuration.ts

@@ -0,0 +1,108 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Storage node API
+ * Storage node API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+export interface ConfigurationParameters {
+  apiKey?: string | Promise<string> | ((name: string) => string) | ((name: string) => Promise<string>)
+  username?: string
+  password?: string
+  accessToken?:
+    | string
+    | Promise<string>
+    | ((name?: string, scopes?: string[]) => string)
+    | ((name?: string, scopes?: string[]) => Promise<string>)
+  basePath?: string
+  baseOptions?: any
+  formDataCtor?: new () => any
+}
+
+export class Configuration {
+  /**
+   * parameter for apiKey security
+   * @param name security name
+   * @memberof Configuration
+   */
+  apiKey?: string | Promise<string> | ((name: string) => string) | ((name: string) => Promise<string>)
+  /**
+   * parameter for basic security
+   *
+   * @type {string}
+   * @memberof Configuration
+   */
+  username?: string
+  /**
+   * parameter for basic security
+   *
+   * @type {string}
+   * @memberof Configuration
+   */
+  password?: string
+  /**
+   * parameter for oauth2 security
+   * @param name security name
+   * @param scopes oauth2 scope
+   * @memberof Configuration
+   */
+  accessToken?:
+    | string
+    | Promise<string>
+    | ((name?: string, scopes?: string[]) => string)
+    | ((name?: string, scopes?: string[]) => Promise<string>)
+  /**
+   * override base path
+   *
+   * @type {string}
+   * @memberof Configuration
+   */
+  basePath?: string
+  /**
+   * base options for axios calls
+   *
+   * @type {any}
+   * @memberof Configuration
+   */
+  baseOptions?: any
+  /**
+   * The FormData constructor that will be used to create multipart form data
+   * requests. You can inject this here so that execution environments that
+   * do not support the FormData class can still run the generated client.
+   *
+   * @type {new () => FormData}
+   */
+  formDataCtor?: new () => any
+
+  constructor(param: ConfigurationParameters = {}) {
+    this.apiKey = param.apiKey
+    this.username = param.username
+    this.password = param.password
+    this.accessToken = param.accessToken
+    this.basePath = param.basePath
+    this.baseOptions = param.baseOptions
+    this.formDataCtor = param.formDataCtor
+  }
+
+  /**
+   * Check if the given MIME is a JSON MIME.
+   * JSON MIME examples:
+   *   application/json
+   *   application/json; charset=UTF8
+   *   APPLICATION/JSON
+   *   application/vnd.company+json
+   * @param mime - MIME (Multipurpose Internet Mail Extensions)
+   * @return True if the given MIME is JSON, false otherwise.
+   */
+  public isJsonMime(mime: string): boolean {
+    const jsonMime: RegExp = new RegExp('^(application/json|[^;/ \t]+/[^;/ \t]+[+]json)[ \t]*(;.*)?$', 'i')
+    return mime !== null && (jsonMime.test(mime) || mime.toLowerCase() === 'application/json-patch+json')
+  }
+}

+ 16 - 0
tests/network-tests/src/apis/storageNode/index.ts

@@ -0,0 +1,16 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Storage node API
+ * Storage node API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+export * from './api'
+export * from './configuration'

+ 54 - 0
tests/network-tests/src/cli/base.ts

@@ -0,0 +1,54 @@
+import path from 'path'
+import { execFile } from 'child_process'
+import { promisify } from 'util'
+import { Sender } from '../sender'
+
+export type CommandResult = { stdout: string; stderr: string; out: string }
+
+export abstract class CLI {
+  protected env: Record<string, string>
+  protected readonly rootPath: string
+  protected readonly binPath: string
+  protected defaultArgs: string[]
+
+  constructor(rootPath: string, defaultEnv: Record<string, string> = {}, defaultArgs: string[] = []) {
+    this.rootPath = rootPath
+    this.binPath = path.resolve(rootPath, './bin/run')
+    this.env = {
+      ...process.env,
+      AUTO_CONFIRM: 'true',
+      FORCE_COLOR: '0',
+      ...defaultEnv,
+    }
+    this.defaultArgs = [...defaultArgs]
+  }
+
+  protected getArgs(customArgs: string[]): string[] {
+    return [...this.defaultArgs, ...customArgs]
+  }
+
+  protected getFlagStringValue(args: string[], flag: string, alias?: string): string | undefined {
+    const flagIndex = args.lastIndexOf(flag)
+    const aliasIndex = alias ? args.lastIndexOf(alias) : -1
+    const flagOrAliasIndex = Math.max(flagIndex, aliasIndex)
+    if (flagOrAliasIndex === -1) {
+      return undefined
+    }
+    const nextArg = args[flagOrAliasIndex + 1]
+    return nextArg
+  }
+
+  async run(command: string, customArgs: string[] = [], lockKeys: string[] = []): Promise<CommandResult> {
+    const pExecFile = promisify(execFile)
+    const { env } = this
+    const { stdout, stderr } = await Sender.asyncLock.acquire(
+      lockKeys.map((k) => `nonce-${k}`),
+      () =>
+        pExecFile(this.binPath, [command, ...this.getArgs(customArgs)], {
+          env,
+          cwd: this.rootPath,
+        })
+    )
+    return { stdout, stderr, out: stdout.trim() }
+  }
+}

+ 48 - 0
tests/network-tests/src/cli/distributor.ts

@@ -0,0 +1,48 @@
+import path from 'path'
+import { spawn } from 'child_process'
+import { DistributorNodeConfiguration } from '@joystream/distributor-cli/src/types/generated/ConfigJson'
+import { CLI, CommandResult } from './base'
+import { WorkerId } from '@joystream/types/working-group'
+import { ProcessManager } from './utils'
+import Keyring from '@polkadot/keyring'
+
+const CLI_ROOT_PATH = path.resolve(__dirname, '../../../../distributor-node')
+
+export class DistributorCLI extends CLI {
+  protected keys: string[]
+
+  constructor(keyUris: string[]) {
+    const keys: DistributorNodeConfiguration['keys'] = keyUris.map((suri) => ({
+      suri,
+    })) as DistributorNodeConfiguration['keys']
+    const defaultEnv = {
+      JOYSTREAM_DISTRIBUTOR__KEYS: JSON.stringify(keys),
+    }
+    super(CLI_ROOT_PATH, defaultEnv)
+    const keyring = new Keyring({ type: 'sr25519' })
+    keyUris.forEach((uri) => keyring.addFromUri(uri))
+    this.keys = keyring.getPairs().map((p) => p.address)
+  }
+
+  async run(command: string, customArgs: string[] = [], keyLocks?: string[]): Promise<CommandResult> {
+    return super.run(command, customArgs, keyLocks || this.keys)
+  }
+
+  async spawnServer(
+    operatorId: number | WorkerId,
+    port = 3334,
+    buckets: number[] | 'all' = 'all'
+  ): Promise<ProcessManager> {
+    const { env } = this
+    const serverEnv = {
+      ...env,
+      JOYSTREAM_DISTRIBUTOR__PORT: port.toString(),
+      JOYSTREAM_DISTRIBUTOR__WORKER_ID: operatorId.toString(),
+      JOYSTREAM_DISTRIBUTOR__BUCKETS: buckets === 'all' ? 'all' : JSON.stringify(buckets),
+    }
+    const serverProcess = spawn(this.binPath, ['start'], { env: serverEnv, cwd: this.rootPath })
+    const serverManager = new ProcessManager('Distributor node server', serverProcess, 'stdout')
+    await serverManager.untilOutput(`listening on port ${port}`)
+    return serverManager
+  }
+}

+ 47 - 0
tests/network-tests/src/cli/joystream.ts

@@ -0,0 +1,47 @@
+import { KeyringPair } from '@polkadot/keyring/types'
+import path from 'path'
+import { CLI, CommandResult } from './base'
+import { TmpFileManager } from './utils'
+import { ChannelInputParameters } from '@joystream/cli/src/Types'
+
+const CLI_ROOT_PATH = path.resolve(__dirname, '../../../../cli')
+
+export class JoystreamCLI extends CLI {
+  protected keys: string[] = []
+  protected tmpFileManager: TmpFileManager
+
+  constructor(tmpFileManager: TmpFileManager) {
+    const defaultEnv = {
+      HOME: tmpFileManager.tmpDataDir,
+    }
+    super(CLI_ROOT_PATH, defaultEnv)
+    this.tmpFileManager = tmpFileManager
+  }
+
+  async init(): Promise<void> {
+    await this.run('api:setUri', [process.env.NODE_URL || 'ws://127.0.0.1:9944'])
+    await this.run('api:setQueryNodeEndpoint', [process.env.QUERY_NODE_URL || 'http://127.0.0.1:8081/graphql'])
+  }
+
+  async importKey(pair: KeyringPair): Promise<void> {
+    const jsonFile = this.tmpFileManager.jsonFile(pair.toJson())
+    await this.run('account:import', [
+      '--backupFilePath',
+      jsonFile,
+      '--name',
+      `Account${this.keys.length}`,
+      '--password',
+      '',
+    ])
+    this.keys.push(pair.address)
+  }
+
+  async run(command: string, customArgs: string[] = [], keyLocks?: string[]): Promise<CommandResult> {
+    return super.run(command, customArgs, keyLocks || this.keys)
+  }
+
+  async createChannel(inputData: ChannelInputParameters, args: string[]): Promise<CommandResult> {
+    const jsonFile = this.tmpFileManager.jsonFile(inputData)
+    return this.run('content:createChannel', ['--input', jsonFile, ...args])
+  }
+}

+ 66 - 0
tests/network-tests/src/cli/storage.ts

@@ -0,0 +1,66 @@
+import path from 'path'
+import { CLI, CommandResult } from './base'
+import { spawn } from 'child_process'
+import { v4 as uuid } from 'uuid'
+import { WorkerId } from '@joystream/types/working-group'
+import os from 'os'
+import { ProcessManager } from './utils'
+import fs from 'fs'
+import { Keyring } from '@polkadot/keyring'
+
+const CLI_ROOT_PATH = path.resolve(__dirname, '../../../../storage-node')
+
+export class StorageCLI extends CLI {
+  constructor(defaultSuri?: string) {
+    super(CLI_ROOT_PATH, undefined, defaultSuri ? ['--accountUri', defaultSuri] : [])
+  }
+
+  setDefaultSuri(defaultSuri: string): void {
+    this.defaultArgs = ['--accountUri', defaultSuri]
+  }
+
+  async run(command: string, customArgs: string[] = []): Promise<CommandResult> {
+    const args = this.getArgs(customArgs)
+    const accountUri = this.getFlagStringValue(args, '--accountUri', '-y')
+    if (!accountUri) {
+      throw new Error('Missing accountUri')
+    }
+    const accountKey = new Keyring({ type: 'sr25519' }).createFromUri(accountUri).address
+    return super.run(command, args, [accountKey])
+  }
+
+  async spawnServer(
+    operatorId: number | WorkerId,
+    port = 3333,
+    sync = true,
+    syncInterval = 1
+  ): Promise<ProcessManager> {
+    const queryNodeHost = new URL(process.env.QUERY_NODE_URL || '').host
+    const apiUrl = new URL(process.env.NODE_URL || '').toString()
+    const uploadsDir = path.join(os.tmpdir(), uuid())
+    fs.mkdirSync(uploadsDir)
+    const { env } = this
+    const args = [
+      ...this.defaultArgs,
+      '--worker',
+      operatorId.toString(),
+      '--port',
+      port.toString(),
+      '--queryNodeHost',
+      queryNodeHost,
+      '--apiUrl',
+      apiUrl,
+      '--uploads',
+      uploadsDir,
+    ]
+    if (sync) {
+      args.push('--sync')
+      args.push('--syncInterval')
+      args.push(syncInterval.toString())
+    }
+    const serverProcess = spawn(this.binPath, ['server', ...args], { env, cwd: this.rootPath })
+    const serverListener = new ProcessManager('Storage node server', serverProcess, 'stderr')
+    await serverListener.untilOutput('Listening')
+    return serverListener
+  }
+}

+ 126 - 0
tests/network-tests/src/cli/utils.ts

@@ -0,0 +1,126 @@
+import fs, { mkdirSync, rmSync } from 'fs'
+import path from 'path'
+import { v4 as uuid } from 'uuid'
+import { ChildProcessWithoutNullStreams } from 'child_process'
+import { Utils } from '../utils'
+import _ from 'lodash'
+import bmp from 'bmp-js'
+import nodeCleanup from 'node-cleanup'
+
+export class TmpFileManager {
+  tmpDataDir: string
+
+  constructor(baseDir?: string) {
+    this.tmpDataDir = path.join(
+      baseDir || process.env.DATA_PATH || path.join(__filename, '../../../data'),
+      'joystream-testing',
+      uuid()
+    )
+    mkdirSync(this.tmpDataDir, { recursive: true })
+    nodeCleanup(() => {
+      rmSync(this.tmpDataDir, { recursive: true, force: true })
+    })
+  }
+
+  public jsonFile(value: unknown): string {
+    const tmpFilePath = path.join(this.tmpDataDir, `${uuid()}.json`)
+    fs.writeFileSync(tmpFilePath, JSON.stringify(value))
+    return tmpFilePath
+  }
+
+  public randomImgFile(width: number, height: number): string {
+    const data = Buffer.from(Array.from({ length: width * height * 3 }, () => _.random(0, 255)))
+    const rawBmp = bmp.encode({ width, height, data })
+    const tmpFilePath = path.join(this.tmpDataDir, `${uuid()}.bmp`)
+    fs.writeFileSync(tmpFilePath, rawBmp.data)
+    return tmpFilePath
+  }
+}
+
+type OutputType = 'stdout' | 'stderr'
+
+export class ProcessManager {
+  private label: string
+  private stdout = ''
+  private stderr = ''
+  private subprocess: ChildProcessWithoutNullStreams
+  private defaultOutput: OutputType
+  private onStdoutListener: (chunk: Uint8Array) => void
+  private onStderrListener: (chunk: Uint8Array) => void
+
+  constructor(
+    label: string,
+    subprocess: ChildProcessWithoutNullStreams,
+    defaultOutput: OutputType = 'stdout',
+    maxOutputSize = 1024 * 1024 * 10
+  ) {
+    this.label = label
+    this.defaultOutput = defaultOutput
+    this.subprocess = subprocess
+    const onDataListener = (outputType: OutputType) => (chunk: Uint8Array) => {
+      const chunkStr = Buffer.from(chunk).toString()
+      this[outputType] += chunkStr
+      if (this[outputType].length > maxOutputSize) {
+        this[outputType] = this[outputType].slice(-maxOutputSize)
+      }
+    }
+    this.onStdoutListener = onDataListener('stdout')
+    this.onStderrListener = onDataListener('stderr')
+
+    subprocess.stdout.on('data', this.onStdoutListener)
+    subprocess.stderr.on('data', this.onStderrListener)
+    nodeCleanup(() => {
+      console.log(this.recentOutput())
+      subprocess.kill()
+    })
+  }
+
+  private recentOutput() {
+    const length = parseInt(process.env.SUBPROCESSES_FINAL_LOG_LENGTH || '20')
+    return (
+      `\n\nLast STDOUT of ${this.label}:\n ${this.stdout.split('\n').slice(-length).join('\n')}\n\n` +
+      `Last STDERR of ${this.label}:\n ${this.stderr.split('\n').slice(-length).join('\n')}\n\n`
+    )
+  }
+
+  kill(): void {
+    this.subprocess.kill()
+  }
+
+  expectAlive(): void {
+    if (this.subprocess.exitCode !== null) {
+      throw new Error(`Process ${this.label} exited unexpectedly with code: ${this.subprocess.exitCode}`)
+    }
+  }
+
+  expectOutput(expected: string, outputType?: OutputType): void {
+    const outT = outputType || this.defaultOutput
+    if (!this[outT].includes(expected)) {
+      throw new Error(`Expected output: "${expected}" missing in ${this.label} process (${outputType})`)
+    }
+  }
+
+  async untilOutput(
+    expected: string,
+    outputType?: 'stderr' | 'stdout',
+    failOnExit = true,
+    timeoutMs = 120000,
+    waitMs = 1000
+  ): Promise<void> {
+    const start = Date.now()
+    while (true) {
+      try {
+        this.expectOutput(expected, outputType)
+        return
+      } catch (e) {
+        if (failOnExit) {
+          this.expectAlive()
+        }
+        if (Date.now() - start + waitMs >= timeoutMs) {
+          throw new Error(`untilOutput timeout reached. ${(e as Error).message}`)
+        }
+        await Utils.wait(waitMs)
+      }
+    }
+  }
+}

+ 2 - 2
tests/network-tests/src/fixtures/content/activeVideoCounters.ts

@@ -193,7 +193,7 @@ export class ActiveVideoCountersFixture extends BaseQueryNodeFixture {
     this.debug('Checking video categories active video counters (2)')
     await this.assertCounterMatch('videoCategories', videoCategoryIds[1], oneMovedVideoCount)
 
-    /** Giza doesn't support changing channels - uncoment this on later releases where it's \\\
+    /** Giza doesn't support changing channels - uncoment this on later releases where it's supported
 
     // move one video to another channel
 
@@ -251,7 +251,7 @@ export class ActiveVideoCountersFixture extends BaseQueryNodeFixture {
       workerId
     )
 
-    const storageBucketId = this.api.findStorageBucketCreated(createBucketResult.events) as DataObjectId
+    const storageBucketId = this.api.getEvent(createBucketResult.events, 'storage', 'StorageBucketCreated').data[0]
 
     this.debug('Accepting storage bucket invitation')
     await this.api.acceptStorageBucketInvitation(storageGroupWorker.keyringPair.address, workerId, storageBucketId)

+ 1 - 1
tests/network-tests/src/fixtures/membershipModule.ts

@@ -42,7 +42,7 @@ export class BuyMembershipHappyCaseFixture extends BaseFixture {
         )
       )
     )
-      .map(({ events }) => this.api.findMemberRegisteredEvent(events))
+      .map((r) => this.api.findEvent(r, 'members', 'MemberRegistered')?.data[0])
       .filter((id) => id !== undefined) as MemberId[]
 
     this.debug(`Registered ${this.memberIds.length} new members`)

+ 13 - 18
tests/network-tests/src/fixtures/proposalsModule.ts

@@ -68,7 +68,7 @@ export class CreateWorkingGroupLeaderOpeningFixture extends BaseFixture {
       workingGroup: this.workingGroup,
     })
 
-    this.result = this.api.findProposalCreatedEvent(result.events)
+    this.result = this.api.findEvent(result, 'proposalsEngine', 'ProposalCreated')?.data[1]
   }
 }
 
@@ -110,7 +110,7 @@ export class BeginWorkingGroupLeaderApplicationReviewFixture extends BaseFixture
       this.workingGroup
     )
 
-    this.result = this.api.findProposalCreatedEvent(result.events)
+    this.result = this.api.findEvent(result, 'proposalsEngine', 'ProposalCreated')?.data[1]
   }
 }
 
@@ -176,7 +176,7 @@ export class FillLeaderOpeningProposalFixture extends BaseFixture {
       workingGroup: workingGroupString,
     })
 
-    this.result = this.api.findProposalCreatedEvent(result.events)
+    this.result = this.api.findEvent(result, 'proposalsEngine', 'ProposalCreated')?.data[1]
   }
 }
 
@@ -223,7 +223,7 @@ export class TerminateLeaderRoleProposalFixture extends BaseFixture {
       this.slash,
       workingGroupString
     )
-    this.result = this.api.findProposalCreatedEvent(result.events)
+    this.result = this.api.findEvent(result, 'proposalsEngine', 'ProposalCreated')?.data[1]
   }
 }
 
@@ -269,7 +269,7 @@ export class SetLeaderRewardProposalFixture extends BaseFixture {
       workingGroupString
     )
 
-    this.result = this.api.findProposalCreatedEvent(result.events)
+    this.result = this.api.findEvent(result, 'proposalsEngine', 'ProposalCreated')?.data[1]
   }
 }
 
@@ -315,7 +315,7 @@ export class DecreaseLeaderStakeProposalFixture extends BaseFixture {
       workingGroupString
     )
 
-    this.result = this.api.findProposalCreatedEvent(result.events)
+    this.result = this.api.findEvent(result, 'proposalsEngine', 'ProposalCreated')?.data[1]
   }
 }
 
@@ -359,7 +359,7 @@ export class SlashLeaderProposalFixture extends BaseFixture {
       this.slashAmount,
       workingGroupString
     )
-    this.result = this.api.findProposalCreatedEvent(result.events)
+    this.result = this.api.findEvent(result, 'proposalsEngine', 'ProposalCreated')?.data[1]
   }
 }
 
@@ -401,7 +401,7 @@ export class WorkingGroupMintCapacityProposalFixture extends BaseFixture {
       this.mintCapacity,
       workingGroupString
     )
-    this.result = this.api.findProposalCreatedEvent(result.events)
+    this.result = this.api.findEvent(result, 'proposalsEngine', 'ProposalCreated')?.data[1]
   }
 }
 
@@ -471,8 +471,7 @@ export class ElectionParametersProposalFixture extends BaseFixture {
       proposedMinVotingStake
     )
 
-    const proposalNumber = this.api.findProposalCreatedEvent(proposalCreationResult.events) as ProposalId
-    assert.notEqual(proposalNumber, undefined)
+    const proposalNumber = this.api.getEvent(proposalCreationResult, 'proposalsEngine', 'ProposalCreated').data[1]
 
     const approveProposalFixture = new VoteForProposalFixture(this.api, proposalNumber)
     await approveProposalFixture.execute()
@@ -563,8 +562,7 @@ export class SpendingProposalFixture extends BaseFixture {
       this.spendingBalance,
       fundingRecipient
     )
-    const proposalNumber: ProposalId = this.api.findProposalCreatedEvent(result.events) as ProposalId
-    assert.notEqual(proposalNumber, undefined)
+    const proposalNumber = this.api.getEvent(result, 'proposalsEngine', 'ProposalCreated').data[1]
 
     // Approving spending proposal
     const balanceBeforeMinting: BN = await this.api.getBalance(fundingRecipient)
@@ -609,8 +607,7 @@ export class TextProposalFixture extends BaseFixture {
 
     // Proposal creation
     const result = await this.api.proposeText(this.proposer, proposalStake, proposalTitle, description, proposalText)
-    const proposalNumber = this.api.findProposalCreatedEvent(result.events) as ProposalId
-    assert.notEqual(proposalNumber, undefined)
+    const proposalNumber = this.api.getEvent(result, 'proposalsEngine', 'ProposalCreated').data[1]
 
     // Approving text proposal
     const approveProposalFixture = new VoteForProposalFixture(this.api, proposalNumber)
@@ -651,8 +648,7 @@ export class ValidatorCountProposalFixture extends BaseFixture {
       proposalStake,
       this.proposedValidatorCount
     )
-    const proposalNumber: ProposalId = this.api.findProposalCreatedEvent(result.events) as ProposalId
-    assert.notEqual(proposalNumber, undefined)
+    const proposalNumber = this.api.getEvent(result, 'proposalsEngine', 'ProposalCreated').data[1]
 
     // Approving the proposal
     const approveProposalFixture = new VoteForProposalFixture(this.api, proposalNumber)
@@ -700,8 +696,7 @@ export class UpdateRuntimeFixture extends BaseFixture {
       'runtime to test proposal functionality' + uuid().substring(0, 8),
       runtime
     )
-    const proposalNumber: ProposalId = this.api.findProposalCreatedEvent(result.events) as ProposalId
-    assert.notEqual(proposalNumber, undefined)
+    const proposalNumber = this.api.getEvent(result, 'proposalsEngine', 'ProposalCreated').data[1]
 
     // Approving runtime update proposal
     const approveProposalFixture = new VoteForProposalFixture(this.api, proposalNumber)

+ 115 - 15
tests/network-tests/src/fixtures/workingGroupModule.ts

@@ -5,10 +5,11 @@ import { WorkingGroups } from '../WorkingGroups'
 import { KeyringPair } from '@polkadot/keyring/types'
 import { v4 as uuid } from 'uuid'
 import { RewardRelationship } from '@joystream/types/recurring-rewards'
-import { Application, ApplicationIdToWorkerIdMap, Worker, WorkerId } from '@joystream/types/working-group'
+import { Application, Worker, WorkerId } from '@joystream/types/working-group'
 import { Utils } from '../utils'
 import { ApplicationId, Opening as HiringOpening, OpeningId } from '@joystream/types/hiring'
-import { BaseFixture } from '../Fixture'
+import { BaseFixture, FixtureRunner } from '../Fixture'
+import { BuyMembershipHappyCaseFixture } from './membershipModule'
 
 export class AddWorkerOpeningFixture extends BaseFixture {
   private applicationStake: BN
@@ -77,7 +78,7 @@ export class AddWorkerOpeningFixture extends BaseFixture {
     )
 
     // We don't assert, we allow potential failure
-    this.result = this.api.findOpeningAddedEvent(result.events, this.module)
+    this.result = this.api.findEvent(result, this.module, 'OpeningAdded')?.data[0]
   }
 }
 
@@ -129,7 +130,7 @@ export class SudoAddLeaderOpeningFixture extends BaseFixture {
     )
 
     // We don't assert, we allow potential failure
-    this.result = this.api.findOpeningAddedEvent(result.events, this.module)
+    this.result = this.api.findEvent(result, this.module, 'OpeningAdded')?.data[0]
   }
 }
 
@@ -271,7 +272,7 @@ export class BeginApplicationReviewFixture extends BaseFixture {
     // const beginApplicantReviewPromise: Promise<ApplicationId> = this.api.expectApplicationReviewBegan()
     const result = await this.api.beginApplicantReview(leadAccount, this.openingId, this.module)
 
-    assert.notEqual(this.api.findApplicationReviewBeganEvent(result.events, this.module), undefined)
+    this.api.getEvent(result, this.module, 'BeganApplicationReview')
   }
 }
 
@@ -353,11 +354,7 @@ export class FillOpeningFixture extends BaseFixture {
       this.payoutInterval,
       this.module
     )
-    const applicationIdToWorkerIdMap = this.api.findOpeningFilledEvent(
-      result.events,
-      this.module
-    ) as ApplicationIdToWorkerIdMap
-    assert.notEqual(applicationIdToWorkerIdMap, undefined)
+    const applicationIdToWorkerIdMap = this.api.getEvent(result, this.module, 'OpeningFilled').data[1]
 
     this.workerIds = []
     applicationIdToWorkerIdMap.forEach((workerId) => this.workerIds.push(workerId))
@@ -413,11 +410,7 @@ export class SudoFillLeaderOpeningFixture extends BaseFixture {
     )
 
     // Assertions
-    const applicationIdToWorkerIdMap = this.api.findOpeningFilledEvent(
-      result.events,
-      this.module
-    ) as ApplicationIdToWorkerIdMap
-    assert.notEqual(applicationIdToWorkerIdMap, undefined)
+    const applicationIdToWorkerIdMap = this.api.getEvent(result, this.module, 'OpeningFilled').data[1]
     assert.equal(applicationIdToWorkerIdMap.size, 1)
 
     applicationIdToWorkerIdMap.forEach(async (workerId, applicationId) => {
@@ -721,3 +714,110 @@ export class AwaitPayoutFixture extends BaseFixture {
     )
   }
 }
+
+type HireWorkesConfig = {
+  applicationStake: BN
+  roleStake: BN
+  firstRewardInterval: BN
+  rewardInterval: BN
+  payoutAmount: BN
+  unstakingPeriod: BN
+  openingActivationDelay: BN
+}
+
+export class HireWorkesFixture extends BaseFixture {
+  private numberOfWorkers: number
+  private config: HireWorkesConfig
+  private module: WorkingGroups
+  private workerIds: WorkerId[] = []
+
+  constructor(api: Api, numberOfWorkers: number, module: WorkingGroups, config?: Partial<HireWorkesConfig>) {
+    super(api)
+    this.numberOfWorkers = numberOfWorkers
+    this.module = module
+    this.config = {
+      applicationStake: config?.applicationStake || new BN(process.env.WORKING_GROUP_APPLICATION_STAKE!),
+      roleStake: config?.roleStake || new BN(process.env.WORKING_GROUP_ROLE_STAKE!),
+      firstRewardInterval: config?.firstRewardInterval || new BN(process.env.SHORT_FIRST_REWARD_INTERVAL!),
+      rewardInterval: config?.rewardInterval || new BN(process.env.SHORT_REWARD_INTERVAL!),
+      payoutAmount: config?.payoutAmount || new BN(process.env.PAYOUT_AMOUNT!),
+      unstakingPeriod: config?.unstakingPeriod || new BN(process.env.STORAGE_WORKING_GROUP_UNSTAKING_PERIOD!),
+      openingActivationDelay: config?.openingActivationDelay || new BN(0),
+    }
+  }
+
+  public getHiredWorkers(): WorkerId[] {
+    if (!this.executed) {
+      throw new Error('Fixture not yet executed!')
+    }
+    return this.workerIds
+  }
+
+  public async execute(): Promise<void> {
+    const { api, module } = this
+    const {
+      applicationStake,
+      roleStake,
+      openingActivationDelay,
+      unstakingPeriod,
+      firstRewardInterval,
+      rewardInterval,
+      payoutAmount,
+    } = this.config
+
+    const lead = await api.getGroupLead(module)
+    assert(lead)
+
+    const paidTemrsId = api.createPaidTermId(new BN(process.env.MEMBERSHIP_PAID_TERMS!))
+    const newMembers = api.createKeyPairs(this.numberOfWorkers).map(({ key }) => key.address)
+
+    const memberSetFixture = new BuyMembershipHappyCaseFixture(api, newMembers, paidTemrsId)
+    // Recreating set of members
+    await new FixtureRunner(memberSetFixture).run()
+    const applicants = newMembers
+
+    const addWorkerOpeningFixture = new AddWorkerOpeningFixture(
+      api,
+      applicationStake,
+      roleStake,
+      openingActivationDelay,
+      unstakingPeriod,
+      module
+    )
+    // Add worker opening
+    await new FixtureRunner(addWorkerOpeningFixture).run()
+
+    // First apply for worker opening
+    const applyForWorkerOpeningFixture = new ApplyForOpeningFixture(
+      api,
+      applicants,
+      applicationStake,
+      roleStake,
+      addWorkerOpeningFixture.getCreatedOpeningId() as OpeningId,
+      module
+    )
+    await new FixtureRunner(applyForWorkerOpeningFixture).run()
+    const applicationIds = applyForWorkerOpeningFixture.getApplicationIds()
+
+    // Begin application review
+    const beginApplicationReviewFixture = new BeginApplicationReviewFixture(
+      api,
+      addWorkerOpeningFixture.getCreatedOpeningId() as OpeningId,
+      module
+    )
+    await new FixtureRunner(beginApplicationReviewFixture).run()
+
+    // Fill worker opening
+    const fillOpeningFixture = new FillOpeningFixture(
+      api,
+      applicationIds,
+      addWorkerOpeningFixture.getCreatedOpeningId() as OpeningId,
+      firstRewardInterval,
+      rewardInterval,
+      payoutAmount,
+      module
+    )
+    await new FixtureRunner(fillOpeningFixture).run()
+    this.workerIds = fillOpeningFixture.getWorkerIds()
+  }
+}

+ 71 - 0
tests/network-tests/src/flows/clis/createChannel.ts

@@ -0,0 +1,71 @@
+import { FlowProps } from '../../Flow'
+import { extendDebug } from '../../Debugger'
+import { JoystreamCLI } from '../../cli/joystream'
+import { BuyMembershipHappyCaseFixture } from '../../fixtures/membershipModule'
+import { BN } from '@polkadot/util'
+import { FixtureRunner } from '../../Fixture'
+import { TmpFileManager } from '../../cli/utils'
+import { assert } from 'chai'
+import { Utils } from '../../utils'
+import { statSync } from 'fs'
+
+export default async function createChannel({ api, env, query }: FlowProps): Promise<void> {
+  const debug = extendDebug('flow:createChannel')
+  debug('Started')
+
+  // Create channel owner membership
+  const [channelOwnerKeypair] = await api.createKeyPairs(1)
+  const paidTermId = api.createPaidTermId(new BN(+(env.MEMBERSHIP_PAID_TERMS || 0)))
+  const buyMembershipFixture = new BuyMembershipHappyCaseFixture(api, [channelOwnerKeypair.key.address], paidTermId)
+  await new FixtureRunner(buyMembershipFixture).run()
+
+  // Send some funds to pay the deletion_prize
+  const channelOwnerBalance = api.consts.storage.dataObjectDeletionPrize.muln(2)
+  await api.treasuryTransferBalance(channelOwnerKeypair.key.address, channelOwnerBalance)
+
+  // Create Joystream CLI
+  const tmpFileManager = new TmpFileManager()
+  const joystreamCli = new JoystreamCLI(tmpFileManager)
+
+  // Init CLI, import & select channel owner key
+  await joystreamCli.init()
+  await joystreamCli.importKey(channelOwnerKeypair.key)
+
+  // Create channel
+  const avatarPhotoPath = tmpFileManager.randomImgFile(300, 300)
+  const coverPhotoPath = tmpFileManager.randomImgFile(1920, 500)
+  const channelInput = {
+    title: 'Test channel',
+    avatarPhotoPath,
+    coverPhotoPath,
+    description: 'This is a test channel',
+    isPublic: true,
+    language: 'en',
+    rewardAccount: channelOwnerKeypair.key.address,
+  }
+  const { out: createChannelOut } = await joystreamCli.createChannel(channelInput, ['--context', 'Member'])
+
+  const channelIdMatch = /Channel with id ([0-9]+) successfully created/.exec(createChannelOut)
+  if (!channelIdMatch) {
+    throw new Error(`No channel id found in output:\n${createChannelOut}`)
+  }
+  const [, channelId] = channelIdMatch
+
+  await query.tryQueryWithTimeout(
+    () => query.channelById(channelId),
+    (channel) => {
+      Utils.assert(channel, 'Channel not found')
+      assert.equal(channel.title, channelInput.title)
+      assert.equal(channel.description, channelInput.description)
+      assert.equal(channel.isPublic, channelInput.isPublic)
+      assert.equal(channel.language?.iso, channelInput.language)
+      assert.equal(channel.rewardAccount, channelInput.rewardAccount)
+      assert.equal(channel.avatarPhoto?.type.__typename, 'DataObjectTypeChannelAvatar')
+      assert.equal(channel.avatarPhoto?.size, statSync(avatarPhotoPath).size)
+      assert.equal(channel.coverPhoto?.type.__typename, 'DataObjectTypeChannelCoverPhoto')
+      assert.equal(channel.coverPhoto?.size, statSync(coverPhotoPath).size)
+    }
+  )
+
+  debug('Done')
+}

+ 39 - 0
tests/network-tests/src/flows/clis/initDistributionBucket.ts

@@ -0,0 +1,39 @@
+import { FlowProps } from '../../Flow'
+import { extendDebug } from '../../Debugger'
+import { WorkingGroups } from '../../WorkingGroups'
+import { DistributorCLI } from '../../cli/distributor'
+
+export default async function initDistributionBucket({ api }: FlowProps): Promise<void> {
+  const debug = extendDebug('flow:initDistributionBucketViaCLI')
+  debug('Started')
+
+  const leaderId = await api.getLeadWorkerId(WorkingGroups.Distribution)
+  const leader = await api.getGroupLead(WorkingGroups.Distribution)
+  if (!leaderId || !leader) {
+    throw new Error('Active distribution leader is required in this flow!')
+  }
+  const operatorId = leaderId.toString()
+  const leaderSuri = api.getSuri(leader.role_account_id)
+
+  const cli = new DistributorCLI([leaderSuri])
+
+  await cli.run('leader:set-buckets-per-bag-limit', ['--limit', '10'])
+  const { out: familyId } = await cli.run('leader:create-bucket-family')
+  const { out: bucketIndex } = await cli.run('leader:create-bucket', ['--familyId', familyId, '--acceptingBags', 'yes'])
+  const bucketId = `${familyId}:${bucketIndex}`
+  await cli.run('leader:update-bag', ['--bagId', 'static:council', '--familyId', familyId, '--add', bucketIndex])
+  await cli.run('leader:update-dynamic-bag-policy', ['--type', 'Channel', '--policy', `${familyId}:1`])
+  await cli.run('leader:update-bucket-mode', ['--bucketId', bucketId, '--mode', 'on'])
+  await cli.run('leader:invite-bucket-operator', ['--bucketId', bucketId, '--workerId', operatorId])
+  await cli.run('operator:accept-invitation', ['--bucketId', bucketId, '--workerId', operatorId])
+  await cli.run('operator:set-metadata', [
+    '--bucketId',
+    bucketId,
+    '--workerId',
+    operatorId,
+    '--endpoint',
+    'http://localhost:3334',
+  ])
+
+  debug('Done')
+}

+ 53 - 0
tests/network-tests/src/flows/clis/initStorageBucket.ts

@@ -0,0 +1,53 @@
+import { FlowProps } from '../../Flow'
+import { extendDebug } from '../../Debugger'
+import { WorkingGroups } from '../../WorkingGroups'
+import { StorageCLI } from '../../cli/storage'
+
+export default async function initStorageBucket({ api }: FlowProps): Promise<void> {
+  const debug = extendDebug('flow:initStorageBucketViaCLI')
+  debug('Started')
+
+  const leaderId = await api.getLeadWorkerId(WorkingGroups.Storage)
+  const leader = await api.getGroupLead(WorkingGroups.Storage)
+  if (!leaderId || !leader) {
+    throw new Error('Active storage leader is required in this flow!')
+  }
+  const leaderSuri = api.getSuri(leader.role_account_id)
+  const transactorKey = '5DkE5YD8m5Yzno6EH2RTBnH268TDnnibZMEMjxwYemU4XevU' // //Colossus1
+
+  const operatorId = leaderId.toString()
+
+  const cli = new StorageCLI(leaderSuri)
+  await cli.run('leader:update-bag-limit', ['--limit', '10'])
+  await cli.run('leader:update-voucher-limits', ['--objects', '1000', '--size', '10000000000'])
+  const { out: bucketId } = await cli.run('leader:create-bucket', [
+    '--invited',
+    operatorId,
+    '--allow',
+    '--number',
+    '1000',
+    '--size',
+    '10000000000',
+  ])
+  await cli.run('operator:accept-invitation', [
+    '--workerId',
+    operatorId,
+    '--bucketId',
+    bucketId,
+    '--transactorAccountId',
+    transactorKey,
+  ])
+  await cli.run('leader:update-bag', ['--add', bucketId, '--bagId', 'static:council'])
+  await cli.run('leader:update-dynamic-bag-policy', ['--bagType', 'Channel', '--number', '1'])
+  await cli.run('operator:set-metadata', [
+    '--bucketId',
+    bucketId,
+    '--operatorId',
+    operatorId,
+    '--endpoint',
+    'http://localhost:3333',
+  ])
+  await cli.run('leader:update-data-fee', ['-f', '0'])
+
+  debug('Done')
+}

+ 23 - 0
tests/network-tests/src/flows/membership/makeAliceMember.ts

@@ -0,0 +1,23 @@
+import { FlowProps } from '../../Flow'
+import { BuyMembershipHappyCaseFixture } from '../../fixtures/membershipModule'
+import { PaidTermId } from '@joystream/types/members'
+import BN from 'bn.js'
+import { extendDebug } from '../../Debugger'
+import { FixtureRunner } from '../../Fixture'
+
+export default async function makeAliceMember({ api, env }: FlowProps): Promise<void> {
+  const debug = extendDebug('flow:makeAliceMember')
+  debug('Started')
+
+  const paidTerms: PaidTermId = api.createPaidTermId(new BN(+env.MEMBERSHIP_PAID_TERMS!))
+
+  // Assert membership can be bought if sufficient funds are available
+  const happyCaseFixture = new BuyMembershipHappyCaseFixture(
+    api,
+    ['5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY'],
+    paidTerms
+  )
+  await new FixtureRunner(happyCaseFixture).run()
+
+  debug('Done')
+}

+ 6 - 6
tests/network-tests/src/flows/proposals/manageLeaderRole.ts

@@ -15,7 +15,6 @@ import {
 } from '../../fixtures/proposalsModule'
 import { ApplyForOpeningFixture } from '../../fixtures/workingGroupModule'
 import { PaidTermId } from '@joystream/types/members'
-import { OpeningId } from '@joystream/types/hiring'
 import { ProposalId } from '@joystream/types/proposals'
 import { WorkerId } from '@joystream/types/working-group'
 import { assert } from 'chai'
@@ -30,6 +29,9 @@ export default {
   content: async function ({ api, env, lock }: FlowProps): Promise<void> {
     return manageLeaderRole(api, env, WorkingGroups.Content, lock)
   },
+  distribution: async function ({ api, env, lock }: FlowProps): Promise<void> {
+    return manageLeaderRole(api, env, WorkingGroups.Distribution, lock)
+  },
 }
 
 async function manageLeaderRole(api: Api, env: NodeJS.ProcessEnv, group: WorkingGroups, lock: ResourceLocker) {
@@ -84,8 +86,7 @@ async function manageLeaderRole(api: Api, env: NodeJS.ProcessEnv, group: Working
 
   await new FixtureRunner(voteForCreateOpeningProposalFixture).run()
 
-  const openingId = api.findOpeningAddedEvent(voteForCreateOpeningProposalFixture.events, group) as OpeningId
-  assert(openingId)
+  const openingId = api.getEvent(voteForCreateOpeningProposalFixture.events, group, 'OpeningAdded').data[0]
 
   const applyForLeaderOpeningFixture = new ApplyForOpeningFixture(
     api,
@@ -151,9 +152,8 @@ async function manageLeaderRole(api: Api, env: NodeJS.ProcessEnv, group: Working
 
   const leadId = (await api.getLeadWorkerId(group)) as WorkerId
   assert(leadId)
-  const workerId = api.findWorkerRewardAmountUpdatedEvent(voteForeLeaderRewardFixture.events, group, leadId) as WorkerId
-  assert(workerId)
-  assert(leadId!.eq(workerId))
+  const workerId = api.getEvent(voteForeLeaderRewardFixture.events, group, 'WorkerRewardAmountUpdated').data[0]
+  assert(workerId.eq(leadId))
   const rewardRelationship = await api.getWorkerRewardRelationship(leadId!, group)
   assert(rewardRelationship.amount_per_payout.eq(alteredPayoutAmount))
 

+ 3 - 0
tests/network-tests/src/flows/proposals/workingGroupMintCapacityProposal.ts

@@ -20,6 +20,9 @@ export default {
   content: async function ({ api, env, lock }: FlowProps): Promise<void> {
     return workingGroupMintCapactiy(api, env, WorkingGroups.Content, lock)
   },
+  distribution: async function ({ api, env, lock }: FlowProps): Promise<void> {
+    return workingGroupMintCapactiy(api, env, WorkingGroups.Distribution, lock)
+  },
 }
 
 async function workingGroupMintCapactiy(api: Api, env: NodeJS.ProcessEnv, group: WorkingGroups, lock: ResourceLocker) {

+ 227 - 0
tests/network-tests/src/flows/storagev2/initDistribution.ts

@@ -0,0 +1,227 @@
+import { FlowProps } from '../../Flow'
+import { extendDebug } from '../../Debugger'
+import { WorkingGroups } from '../../WorkingGroups'
+import {
+  DistributionBucketFamilyMetadata,
+  DistributionBucketOperatorMetadata,
+  IDistributionBucketFamilyMetadata,
+  IDistributionBucketOperatorMetadata,
+} from '@joystream/metadata-protobuf'
+import { CreateInterface, createType } from '@joystream/types'
+import { BagId, DistributionBucketFamilyId, DynamicBagId, StaticBagId } from '@joystream/types/storage'
+import { SubmittableExtrinsic } from '@polkadot/api/types'
+import _ from 'lodash'
+import { Utils } from '../../utils'
+import { WorkerId } from '@joystream/types/working-group'
+
+type DistributionBucketConfig = {
+  metadata: IDistributionBucketOperatorMetadata
+  staticBags?: CreateInterface<StaticBagId>[]
+  operatorId: number
+}
+
+type DistributionFamilyConfig = {
+  metadata?: IDistributionBucketFamilyMetadata
+  buckets: DistributionBucketConfig[]
+  dynamicBagPolicy: {
+    [K in keyof typeof DynamicBagId.typeDefinitions]?: number
+  }
+}
+
+type InitDistributionConfig = {
+  families: DistributionFamilyConfig[]
+}
+
+export const allStaticBags: CreateInterface<StaticBagId>[] = [
+  'Council',
+  { WorkingGroup: 'Content' },
+  { WorkingGroup: 'Distribution' },
+  { WorkingGroup: 'Gateway' },
+  { WorkingGroup: 'OperationsAlpha' },
+  { WorkingGroup: 'OperationsBeta' },
+  { WorkingGroup: 'OperationsGamma' },
+  { WorkingGroup: 'Storage' },
+]
+
+export const singleBucketConfig: InitDistributionConfig = {
+  families: [
+    {
+      metadata: { region: 'All' },
+      dynamicBagPolicy: {
+        'Channel': 1,
+        'Member': 1,
+      },
+      buckets: [
+        {
+          metadata: { endpoint: process.env.DISTRIBUTOR_1_URL || 'http://localhost:3334' },
+          staticBags: allStaticBags,
+          operatorId: parseInt(process.env.DISTRIBUTOR_1_WORKER_ID || '0'),
+        },
+      ],
+    },
+  ],
+}
+
+export const doubleBucketConfig: InitDistributionConfig = {
+  families: [
+    {
+      metadata: { region: 'Region 1' },
+      dynamicBagPolicy: {
+        'Channel': 1,
+        'Member': 1,
+      },
+      buckets: [
+        {
+          metadata: { endpoint: process.env.DISTRIBUTOR_1_URL || 'http://localhost:3334' },
+          staticBags: allStaticBags,
+          operatorId: parseInt(process.env.DISTRIBUTOR_1_WORKER_ID || '0'),
+        },
+      ],
+    },
+    {
+      metadata: { region: 'Region 2' },
+      dynamicBagPolicy: {
+        'Channel': 1,
+        'Member': 1,
+      },
+      buckets: [
+        {
+          metadata: { endpoint: process.env.DISTRIBUTOR_2_URL || 'http://localhost:3336' },
+          staticBags: allStaticBags,
+          operatorId: parseInt(process.env.DISTRIBUTOR_2_WORKER_ID || '1'),
+        },
+      ],
+    },
+  ],
+}
+
+export default function createFlow({ families }: InitDistributionConfig) {
+  return async function initDistribution({ api }: FlowProps): Promise<void> {
+    const debug = extendDebug('flow:initDistribution')
+    debug('Started')
+
+    // Get working group leaders
+    const distributionLeaderId = await api.getLeadWorkerId(WorkingGroups.Distribution)
+    const distributionLeader = await api.getGroupLead(WorkingGroups.Distribution)
+    if (!distributionLeaderId || !distributionLeader) {
+      throw new Error('Active distributor leader is required in this flow!')
+    }
+
+    const distributionLeaderKey = distributionLeader.role_account_id.toString()
+    const totalBucketsNum = families.reduce((a, b) => a + b.buckets.length, 0)
+
+    // Hire operators
+    // const hireWorkersFixture = new HireWorkesFixture(api, totalBucketsNum, WorkingGroups.Distribution)
+    // await new FixtureRunner(hireWorkersFixture).run()
+    // const operatorIds = hireWorkersFixture.getHiredWorkers()
+
+    const operatorIds = families.reduce(
+      (ids, { buckets }) => ids.concat(buckets.map((b) => createType('WorkerId', b.operatorId))),
+      [] as WorkerId[]
+    )
+    const operatorKeys = await api.getWorkerRoleAccounts(operatorIds, WorkingGroups.Distribution)
+
+    // Create families, set buckets per bag limit
+    const createFamilyTxs = families.map(() => api.tx.storage.createDistributionBucketFamily())
+    const setBucketsPerBagLimitTx = api.tx.storage.updateDistributionBucketsPerBagLimit(totalBucketsNum)
+    const [createFamilyResults] = await Promise.all([
+      api.signAndSendMany(createFamilyTxs, distributionLeaderKey),
+      api.signAndSendMany([setBucketsPerBagLimitTx], distributionLeaderKey),
+    ])
+    const familyIds = createFamilyResults
+      .map((r) => api.getEvent(r, 'storage', 'DistributionBucketFamilyCreated').data[0])
+      .sort((a, b) => a.cmp(b))
+    const familyById = new Map<number, DistributionFamilyConfig>()
+    familyIds.forEach((id, i) => familyById.set(id.toNumber(), families[i]))
+
+    // Create buckets, update families metadata, set dynamic bag policies
+    const createBucketTxs = families.reduce(
+      (txs, { buckets }, familyIndex) =>
+        txs.concat(buckets.map(() => api.tx.storage.createDistributionBucket(familyIds[familyIndex], true))),
+      [] as SubmittableExtrinsic<'promise'>[]
+    )
+    const updateFamilyMetadataTxs = familyIds.map((id, i) => {
+      const metadataBytes = Utils.metadataToBytes(DistributionBucketFamilyMetadata, families[i].metadata)
+      return api.tx.storage.setDistributionBucketFamilyMetadata(id, metadataBytes)
+    })
+    const dynamicBagPolicies = new Map<string, [DistributionBucketFamilyId, number][]>()
+    familyIds.forEach((familyId, index) => {
+      const family = families[index]
+      Object.entries(family.dynamicBagPolicy).forEach(([bagType, bucketsN]) => {
+        const current = dynamicBagPolicies.get(bagType) || []
+        dynamicBagPolicies.set(bagType, [...current, [familyId, bucketsN]])
+      })
+    })
+    const updateDynamicBagPolicyTxs = _.entries(dynamicBagPolicies).map(([bagType, policyEntries]) =>
+      api.tx.storage.updateFamiliesInDynamicBagCreationPolicy(
+        bagType as keyof typeof DynamicBagId.typeDefinitions,
+        createType('BTreeMap<DistributionBucketFamilyId, u32>', new Map(policyEntries))
+      )
+    )
+    const [createBucketResults] = await Promise.all([
+      api.signAndSendMany(createBucketTxs, distributionLeaderKey),
+      api.signAndSendMany(updateFamilyMetadataTxs, distributionLeaderKey),
+      api.signAndSendMany(updateDynamicBagPolicyTxs, distributionLeaderKey),
+    ])
+    const bucketIds = createBucketResults
+      .map((r) => {
+        const [, , bucketId] = api.getEvent(r, 'storage', 'DistributionBucketCreated').data
+        return bucketId
+      })
+      .sort(
+        (a, b) =>
+          a.distribution_bucket_family_id.cmp(b.distribution_bucket_family_id) ||
+          a.distribution_bucket_index.cmp(b.distribution_bucket_index)
+      )
+    const bucketById = new Map<string, DistributionBucketConfig>()
+    bucketIds.forEach((bucketId) => {
+      const familyId = bucketId.distribution_bucket_family_id.toNumber()
+      const bucketIndex = bucketId.distribution_bucket_index.toNumber()
+      const family = familyById.get(familyId)
+      if (!family) {
+        throw new Error(`familyById not found: ${familyId}`)
+      }
+      bucketById.set(bucketId.toString(), family.buckets[bucketIndex])
+    })
+
+    // Invite bucket operators
+    const bucketInviteTxs = bucketIds.map((bucketId, i) =>
+      api.tx.storage.inviteDistributionBucketOperator(bucketId, operatorIds[i])
+    )
+    await api.signAndSendMany(bucketInviteTxs, distributionLeaderKey)
+
+    // Accept invitations
+    const acceptInvitationTxs = bucketIds.map((bucketId, i) =>
+      api.tx.storage.acceptDistributionBucketInvitation(operatorIds[i], bucketId)
+    )
+    await api.signAndSendManyByMany(acceptInvitationTxs, operatorKeys)
+
+    // Bucket metadata and static bags
+    const bucketSetupPromises = _.flatten(
+      bucketIds.map((bucketId, i) => {
+        const operatorId = operatorIds[i]
+        const operatorKey = operatorKeys[i]
+        const bucketConfig = bucketById.get(bucketId.toString())
+        if (!bucketConfig) {
+          throw new Error('Bucket config not found')
+        }
+        const metadataBytes = Utils.metadataToBytes(DistributionBucketOperatorMetadata, bucketConfig.metadata)
+        const setMetaTx = api.tx.storage.setDistributionOperatorMetadata(operatorId, bucketId, metadataBytes)
+        const setMetaPromise = api.signAndSendMany([setMetaTx], operatorKey)
+        const updateBagTxs = (bucketConfig.staticBags || []).map((sBagId) => {
+          return api.tx.storage.updateDistributionBucketsForBag(
+            createType<BagId, 'BagId'>('BagId', { Static: sBagId }),
+            bucketId.distribution_bucket_family_id,
+            createType('BTreeSet<DistributionBucketIndex>', [bucketId.distribution_bucket_index]),
+            createType('BTreeSet<DistributionBucketIndex>', [])
+          )
+        })
+        const updateBagsPromise = api.signAndSendMany(updateBagTxs, distributionLeaderKey)
+        return [updateBagsPromise, setMetaPromise]
+      })
+    )
+    await Promise.all(bucketSetupPromises)
+
+    debug('Done')
+  }
+}

+ 159 - 0
tests/network-tests/src/flows/storagev2/initStorage.ts

@@ -0,0 +1,159 @@
+import { FlowProps } from '../../Flow'
+import { extendDebug } from '../../Debugger'
+import { WorkingGroups } from '../../WorkingGroups'
+import { IStorageBucketOperatorMetadata, StorageBucketOperatorMetadata } from '@joystream/metadata-protobuf'
+import { CreateInterface, createType } from '@joystream/types'
+import { BagId, DynamicBagId, StaticBagId } from '@joystream/types/storage'
+import _ from 'lodash'
+import { Utils } from '../../utils'
+import BN from 'bn.js'
+
+type StorageBucketConfig = {
+  metadata: IStorageBucketOperatorMetadata
+  staticBags?: CreateInterface<StaticBagId>[]
+  storageLimit: BN
+  objectsLimit: number
+  operatorId: number
+  transactorKey: string
+}
+
+type InitStorageConfig = {
+  buckets: StorageBucketConfig[]
+  dynamicBagPolicy: {
+    [K in keyof typeof DynamicBagId.typeDefinitions]?: number
+  }
+}
+
+export const allStaticBags: CreateInterface<StaticBagId>[] = [
+  'Council',
+  { WorkingGroup: 'Content' },
+  { WorkingGroup: 'Distribution' },
+  { WorkingGroup: 'Gateway' },
+  { WorkingGroup: 'OperationsAlpha' },
+  { WorkingGroup: 'OperationsBeta' },
+  { WorkingGroup: 'OperationsGamma' },
+  { WorkingGroup: 'Storage' },
+]
+
+export const singleBucketConfig: InitStorageConfig = {
+  dynamicBagPolicy: {
+    'Channel': 1,
+    'Member': 1,
+  },
+  buckets: [
+    {
+      metadata: { endpoint: process.env.COLOSSUS_1_URL || 'http://localhost:3333' },
+      staticBags: allStaticBags,
+      operatorId: parseInt(process.env.COLOSSUS_1_WORKER_ID || '0'),
+      storageLimit: new BN(1_000_000_000_000),
+      objectsLimit: 1000000000,
+      transactorKey: process.env.COLOSSUS_1_TRANSACTOR_KEY || '5DkE5YD8m5Yzno6EH2RTBnH268TDnnibZMEMjxwYemU4XevU', // //Colossus1
+    },
+  ],
+}
+
+export const doubleBucketConfig: InitStorageConfig = {
+  dynamicBagPolicy: {
+    'Channel': 2,
+    'Member': 2,
+  },
+  buckets: [
+    {
+      metadata: { endpoint: process.env.COLOSSUS_1_URL || 'http://localhost:3333' },
+      staticBags: allStaticBags,
+      operatorId: parseInt(process.env.COLOSSUS_1_WORKER_ID || '0'),
+      storageLimit: new BN(1_000_000_000_000),
+      objectsLimit: 1000000000,
+      transactorKey: process.env.COLOSSUS_1_TRANSACTOR_KEY || '5DkE5YD8m5Yzno6EH2RTBnH268TDnnibZMEMjxwYemU4XevU', // //Colossus1
+    },
+    {
+      metadata: { endpoint: process.env.STORAGE_2_URL || 'http://localhost:3335' },
+      staticBags: allStaticBags,
+      operatorId: parseInt(process.env.STORAGE_2_WORKER_ID || '1'),
+      storageLimit: new BN(1_000_000_000_000),
+      objectsLimit: 1000000000,
+      transactorKey: process.env.COLOSSUS_2_TRANSACTOR_KEY || '5FbzYmQ3HogiEEDSXPYJe58yCcmSh3vsZLodTdBB6YuLDAj7', // //Colossus2
+    },
+  ],
+}
+
+export default function createFlow({ buckets, dynamicBagPolicy }: InitStorageConfig) {
+  return async function initDistribution({ api }: FlowProps): Promise<void> {
+    const debug = extendDebug('flow:initStorage')
+    debug('Started')
+
+    // Get working group leaders
+    const storageLeaderId = await api.getLeadWorkerId(WorkingGroups.Storage)
+    const storageLeader = await api.getGroupLead(WorkingGroups.Storage)
+    if (!storageLeaderId || !storageLeader) {
+      throw new Error('Active storage leader is required in this flow!')
+    }
+
+    const storageLeaderKey = storageLeader.role_account_id.toString()
+    const maxStorageLimit = buckets.sort((a, b) => b.storageLimit.cmp(a.storageLimit))[0].storageLimit
+    const maxObjectsLimit = Math.max(...buckets.map((b) => b.objectsLimit))
+
+    // Hire operators
+    // const hireWorkersFixture = new HireWorkesFixture(api, totalBucketsNum, WorkingGroups.Distribution)
+    // await new FixtureRunner(hireWorkersFixture).run()
+    // const operatorIds = hireWorkersFixture.getHiredWorkers()
+
+    const operatorIds = buckets.map((b) => createType('WorkerId', b.operatorId))
+    const operatorKeys = await api.getWorkerRoleAccounts(operatorIds, WorkingGroups.Storage)
+
+    // Set global limits and policies
+    const updateDynamicBagPolicyTxs = _.entries(dynamicBagPolicy).map(([bagType, numberOfBuckets]) =>
+      api.tx.storage.updateNumberOfStorageBucketsInDynamicBagCreationPolicy(
+        bagType as keyof typeof DynamicBagId.typeDefinitions,
+        numberOfBuckets
+      )
+    )
+    const setMaxVoucherLimitsTx = api.tx.storage.updateStorageBucketsVoucherMaxLimits(maxStorageLimit, maxObjectsLimit)
+    const setBucketPerBagLimitTx = api.tx.storage.updateStorageBucketsPerBagLimit(Math.max(5, buckets.length))
+
+    await api.signAndSendMany(
+      [...updateDynamicBagPolicyTxs, setMaxVoucherLimitsTx, setBucketPerBagLimitTx],
+      storageLeaderKey
+    )
+
+    // Create buckets
+    const createBucketTxs = buckets.map((b, i) =>
+      api.tx.storage.createStorageBucket(operatorIds[i], true, b.storageLimit, b.objectsLimit)
+    )
+    const createBucketResults = await api.signAndSendMany(createBucketTxs, storageLeaderKey)
+    const bucketById = new Map<number, StorageBucketConfig>()
+    createBucketResults.forEach((res, i) => {
+      const bucketId = api.getEvent(res, 'storage', 'StorageBucketCreated').data[0]
+      bucketById.set(bucketId.toNumber(), buckets[i])
+    })
+
+    // Accept invitations
+    const acceptInvitationTxs = Array.from(bucketById.entries()).map(([bucketId, bucketConfig], i) =>
+      api.tx.storage.acceptStorageBucketInvitation(operatorIds[i], bucketId, bucketConfig.transactorKey)
+    )
+    await api.signAndSendManyByMany(acceptInvitationTxs, operatorKeys)
+
+    // Bucket metadata and static bags
+    const bucketSetupPromises = _.flatten(
+      Array.from(bucketById.entries()).map(([bucketId, bucketConfig], i) => {
+        const operatorId = operatorIds[i]
+        const operatorKey = operatorKeys[i]
+        const metadataBytes = Utils.metadataToBytes(StorageBucketOperatorMetadata, bucketConfig.metadata)
+        const setMetaTx = api.tx.storage.setStorageOperatorMetadata(operatorId, bucketId, metadataBytes)
+        const setMetaPromise = api.signAndSendMany([setMetaTx], operatorKey)
+        const updateBagTxs = (bucketConfig.staticBags || []).map((sBagId) => {
+          return api.tx.storage.updateStorageBucketsForBag(
+            createType<BagId, 'BagId'>('BagId', { Static: sBagId }),
+            createType('BTreeSet<StorageBucketId>', [bucketId]),
+            createType('BTreeSet<StorageBucketId>', [])
+          )
+        })
+        const updateBagsPromise = api.signAndSendMany(updateBagTxs, storageLeaderKey)
+        return [updateBagsPromise, setMetaPromise]
+      })
+    )
+    await Promise.all(bucketSetupPromises)
+
+    debug('Done')
+  }
+}

+ 3 - 0
tests/network-tests/src/flows/workingGroup/manageWorkerAsWorker.ts

@@ -23,6 +23,9 @@ export default {
   content: async function ({ api, env }: FlowProps): Promise<void> {
     return manageWorkerAsWorker(api, env, WorkingGroups.Content)
   },
+  distribution: async function ({ api, env }: FlowProps): Promise<void> {
+    return manageWorkerAsWorker(api, env, WorkingGroups.Distribution)
+  },
 }
 
 // Manage worker as worker

+ 3 - 0
tests/network-tests/src/flows/workingGroup/workerPayout.ts

@@ -29,6 +29,9 @@ export default {
   content: async function ({ api, env, lock }: FlowProps): Promise<void> {
     return workerPayouts(api, env, WorkingGroups.Content, lock)
   },
+  distribution: async function ({ api, env, lock }: FlowProps): Promise<void> {
+    return workerPayouts(api, env, WorkingGroups.Distribution, lock)
+  },
 }
 
 async function workerPayouts(api: Api, env: NodeJS.ProcessEnv, group: WorkingGroups, lock: ResourceLocker) {

+ 158 - 0
tests/network-tests/src/graphql/generated/queries.ts

@@ -0,0 +1,158 @@
+import * as Types from './schema'
+
+import gql from 'graphql-tag'
+type DataObjectTypeFields_DataObjectTypeChannelAvatar_Fragment = {
+  __typename: 'DataObjectTypeChannelAvatar'
+  channel?: Types.Maybe<{ id: string }>
+}
+
+type DataObjectTypeFields_DataObjectTypeChannelCoverPhoto_Fragment = {
+  __typename: 'DataObjectTypeChannelCoverPhoto'
+  channel?: Types.Maybe<{ id: string }>
+}
+
+type DataObjectTypeFields_DataObjectTypeVideoMedia_Fragment = {
+  __typename: 'DataObjectTypeVideoMedia'
+  video?: Types.Maybe<{ id: string }>
+}
+
+type DataObjectTypeFields_DataObjectTypeVideoThumbnail_Fragment = {
+  __typename: 'DataObjectTypeVideoThumbnail'
+  video?: Types.Maybe<{ id: string }>
+}
+
+type DataObjectTypeFields_DataObjectTypeUnknown_Fragment = { __typename: 'DataObjectTypeUnknown' }
+
+export type DataObjectTypeFieldsFragment =
+  | DataObjectTypeFields_DataObjectTypeChannelAvatar_Fragment
+  | DataObjectTypeFields_DataObjectTypeChannelCoverPhoto_Fragment
+  | DataObjectTypeFields_DataObjectTypeVideoMedia_Fragment
+  | DataObjectTypeFields_DataObjectTypeVideoThumbnail_Fragment
+  | DataObjectTypeFields_DataObjectTypeUnknown_Fragment
+
+export type StorageDataObjectFieldsFragment = {
+  id: string
+  ipfsHash: string
+  isAccepted: boolean
+  size: any
+  deletionPrize: any
+  unsetAt?: Types.Maybe<any>
+  storageBagId: string
+  type:
+    | DataObjectTypeFields_DataObjectTypeChannelAvatar_Fragment
+    | DataObjectTypeFields_DataObjectTypeChannelCoverPhoto_Fragment
+    | DataObjectTypeFields_DataObjectTypeVideoMedia_Fragment
+    | DataObjectTypeFields_DataObjectTypeVideoThumbnail_Fragment
+    | DataObjectTypeFields_DataObjectTypeUnknown_Fragment
+}
+
+export type ChannelFieldsFragment = {
+  title?: Types.Maybe<string>
+  description?: Types.Maybe<string>
+  isPublic?: Types.Maybe<boolean>
+  rewardAccount?: Types.Maybe<string>
+  isCensored: boolean
+  language?: Types.Maybe<{ iso: string }>
+  ownerMember?: Types.Maybe<{ id: string }>
+  ownerCuratorGroup?: Types.Maybe<{ id: string }>
+  category?: Types.Maybe<{ name?: Types.Maybe<string> }>
+  avatarPhoto?: Types.Maybe<StorageDataObjectFieldsFragment>
+  coverPhoto?: Types.Maybe<StorageDataObjectFieldsFragment>
+}
+
+export type GetDataObjectsByIdsQueryVariables = Types.Exact<{
+  ids?: Types.Maybe<Array<Types.Scalars['ID']> | Types.Scalars['ID']>
+}>
+
+export type GetDataObjectsByIdsQuery = { storageDataObjects: Array<StorageDataObjectFieldsFragment> }
+
+export type GetChannelByIdQueryVariables = Types.Exact<{
+  id: Types.Scalars['ID']
+}>
+
+export type GetChannelByIdQuery = { channelByUniqueInput?: Types.Maybe<ChannelFieldsFragment> }
+
+export const DataObjectTypeFields = gql`
+  fragment DataObjectTypeFields on DataObjectType {
+    __typename
+    ... on DataObjectTypeChannelAvatar {
+      channel {
+        id
+      }
+    }
+    ... on DataObjectTypeChannelCoverPhoto {
+      channel {
+        id
+      }
+    }
+    ... on DataObjectTypeVideoThumbnail {
+      video {
+        id
+      }
+    }
+    ... on DataObjectTypeVideoMedia {
+      video {
+        id
+      }
+    }
+  }
+`
+export const StorageDataObjectFields = gql`
+  fragment StorageDataObjectFields on StorageDataObject {
+    id
+    ipfsHash
+    isAccepted
+    size
+    type {
+      ...DataObjectTypeFields
+    }
+    deletionPrize
+    unsetAt
+    storageBagId
+  }
+  ${DataObjectTypeFields}
+`
+export const ChannelFields = gql`
+  fragment ChannelFields on Channel {
+    title
+    description
+    isPublic
+    language {
+      iso
+    }
+    rewardAccount
+    isCensored
+    ownerMember {
+      id
+    }
+    ownerCuratorGroup {
+      id
+    }
+    category {
+      name
+    }
+    avatarPhoto {
+      ...StorageDataObjectFields
+    }
+    coverPhoto {
+      ...StorageDataObjectFields
+    }
+  }
+  ${StorageDataObjectFields}
+`
+export const GetDataObjectsByIds = gql`
+  query getDataObjectsByIds($ids: [ID!]) {
+    storageDataObjects(where: { id_in: $ids }) {
+      ...StorageDataObjectFields
+    }
+  }
+  ${StorageDataObjectFields}
+`
+export const GetChannelById = gql`
+  query getChannelById($id: ID!) {
+    channelByUniqueInput(where: { id: $id }) {
+      ...ChannelFields
+    }
+  }
+  ${ChannelFields}
+`

+ 3715 - 0
tests/network-tests/src/graphql/generated/schema.ts

@@ -0,0 +1,3715 @@
+export type Maybe<T> = T | null
+export type Exact<T extends { [key: string]: unknown }> = { [K in keyof T]: T[K] }
+export type MakeOptional<T, K extends keyof T> = Omit<T, K> & { [SubKey in K]?: Maybe<T[SubKey]> }
+export type MakeMaybe<T, K extends keyof T> = Omit<T, K> & { [SubKey in K]: Maybe<T[SubKey]> }
+/** All built-in and custom scalars, mapped to their actual values */
+export type Scalars = {
+  ID: string
+  String: string
+  Boolean: boolean
+  Int: number
+  Float: number
+  /** The javascript `Date` as string. Type represents date and time as the ISO Date string. */
+  DateTime: any
+  /** GraphQL representation of BigInt */
+  BigInt: any
+  /** The `JSONObject` scalar type represents JSON objects as specified by [ECMA-404](http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf). */
+  JSONObject: any
+}
+
+export type BaseGraphQlObject = {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+}
+
+export type BaseModel = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+}
+
+export type BaseModelUuid = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+}
+
+export type BaseWhereInput = {
+  id_eq?: Maybe<Scalars['String']>
+  id_in?: Maybe<Array<Scalars['String']>>
+  createdAt_eq?: Maybe<Scalars['String']>
+  createdAt_lt?: Maybe<Scalars['String']>
+  createdAt_lte?: Maybe<Scalars['String']>
+  createdAt_gt?: Maybe<Scalars['String']>
+  createdAt_gte?: Maybe<Scalars['String']>
+  createdById_eq?: Maybe<Scalars['String']>
+  updatedAt_eq?: Maybe<Scalars['String']>
+  updatedAt_lt?: Maybe<Scalars['String']>
+  updatedAt_lte?: Maybe<Scalars['String']>
+  updatedAt_gt?: Maybe<Scalars['String']>
+  updatedAt_gte?: Maybe<Scalars['String']>
+  updatedById_eq?: Maybe<Scalars['String']>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['String']>
+  deletedAt_lt?: Maybe<Scalars['String']>
+  deletedAt_lte?: Maybe<Scalars['String']>
+  deletedAt_gt?: Maybe<Scalars['String']>
+  deletedAt_gte?: Maybe<Scalars['String']>
+  deletedById_eq?: Maybe<Scalars['String']>
+}
+
+export type Channel = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  ownerMember?: Maybe<Membership>
+  ownerMemberId?: Maybe<Scalars['String']>
+  ownerCuratorGroup?: Maybe<CuratorGroup>
+  ownerCuratorGroupId?: Maybe<Scalars['String']>
+  category?: Maybe<ChannelCategory>
+  categoryId?: Maybe<Scalars['String']>
+  /** Reward account where revenue is sent if set. */
+  rewardAccount?: Maybe<Scalars['String']>
+  /** The title of the Channel */
+  title?: Maybe<Scalars['String']>
+  /** The description of a Channel */
+  description?: Maybe<Scalars['String']>
+  coverPhoto?: Maybe<StorageDataObject>
+  coverPhotoId?: Maybe<Scalars['String']>
+  avatarPhoto?: Maybe<StorageDataObject>
+  avatarPhotoId?: Maybe<Scalars['String']>
+  /** Flag signaling whether a channel is public. */
+  isPublic?: Maybe<Scalars['Boolean']>
+  /** Flag signaling whether a channel is censored. */
+  isCensored: Scalars['Boolean']
+  language?: Maybe<Language>
+  languageId?: Maybe<Scalars['String']>
+  videos: Array<Video>
+  /** Number of the block the channel was created in */
+  createdInBlock: Scalars['Int']
+  collaborators: Array<Membership>
+}
+
+export type ChannelCategoriesByNameFtsOutput = {
+  item: ChannelCategoriesByNameSearchResult
+  rank: Scalars['Float']
+  isTypeOf: Scalars['String']
+  highlight: Scalars['String']
+}
+
+export type ChannelCategoriesByNameSearchResult = ChannelCategory
+
+/** Category of media channel */
+export type ChannelCategory = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** The name of the category */
+  name?: Maybe<Scalars['String']>
+  channels: Array<Channel>
+  createdInBlock: Scalars['Int']
+}
+
+export type ChannelCategoryConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<ChannelCategoryEdge>
+  pageInfo: PageInfo
+}
+
+export type ChannelCategoryCreateInput = {
+  name?: Maybe<Scalars['String']>
+  createdInBlock: Scalars['Float']
+}
+
+export type ChannelCategoryEdge = {
+  node: ChannelCategory
+  cursor: Scalars['String']
+}
+
+export enum ChannelCategoryOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  NameAsc = 'name_ASC',
+  NameDesc = 'name_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+}
+
+export type ChannelCategoryUpdateInput = {
+  name?: Maybe<Scalars['String']>
+  createdInBlock?: Maybe<Scalars['Float']>
+}
+
+export type ChannelCategoryWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  name_eq?: Maybe<Scalars['String']>
+  name_contains?: Maybe<Scalars['String']>
+  name_startsWith?: Maybe<Scalars['String']>
+  name_endsWith?: Maybe<Scalars['String']>
+  name_in?: Maybe<Array<Scalars['String']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  channels_none?: Maybe<ChannelWhereInput>
+  channels_some?: Maybe<ChannelWhereInput>
+  channels_every?: Maybe<ChannelWhereInput>
+  AND?: Maybe<Array<ChannelCategoryWhereInput>>
+  OR?: Maybe<Array<ChannelCategoryWhereInput>>
+}
+
+export type ChannelCategoryWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type ChannelConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<ChannelEdge>
+  pageInfo: PageInfo
+}
+
+export type ChannelCreateInput = {
+  ownerMember?: Maybe<Scalars['ID']>
+  ownerCuratorGroup?: Maybe<Scalars['ID']>
+  category?: Maybe<Scalars['ID']>
+  rewardAccount?: Maybe<Scalars['String']>
+  title?: Maybe<Scalars['String']>
+  description?: Maybe<Scalars['String']>
+  coverPhoto?: Maybe<Scalars['ID']>
+  avatarPhoto?: Maybe<Scalars['ID']>
+  isPublic?: Maybe<Scalars['Boolean']>
+  isCensored: Scalars['Boolean']
+  language?: Maybe<Scalars['ID']>
+  createdInBlock: Scalars['Float']
+}
+
+export type ChannelEdge = {
+  node: Channel
+  cursor: Scalars['String']
+}
+
+export enum ChannelOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  OwnerMemberAsc = 'ownerMember_ASC',
+  OwnerMemberDesc = 'ownerMember_DESC',
+  OwnerCuratorGroupAsc = 'ownerCuratorGroup_ASC',
+  OwnerCuratorGroupDesc = 'ownerCuratorGroup_DESC',
+  CategoryAsc = 'category_ASC',
+  CategoryDesc = 'category_DESC',
+  RewardAccountAsc = 'rewardAccount_ASC',
+  RewardAccountDesc = 'rewardAccount_DESC',
+  TitleAsc = 'title_ASC',
+  TitleDesc = 'title_DESC',
+  DescriptionAsc = 'description_ASC',
+  DescriptionDesc = 'description_DESC',
+  CoverPhotoAsc = 'coverPhoto_ASC',
+  CoverPhotoDesc = 'coverPhoto_DESC',
+  AvatarPhotoAsc = 'avatarPhoto_ASC',
+  AvatarPhotoDesc = 'avatarPhoto_DESC',
+  IsPublicAsc = 'isPublic_ASC',
+  IsPublicDesc = 'isPublic_DESC',
+  IsCensoredAsc = 'isCensored_ASC',
+  IsCensoredDesc = 'isCensored_DESC',
+  LanguageAsc = 'language_ASC',
+  LanguageDesc = 'language_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+}
+
+export type ChannelUpdateInput = {
+  ownerMember?: Maybe<Scalars['ID']>
+  ownerCuratorGroup?: Maybe<Scalars['ID']>
+  category?: Maybe<Scalars['ID']>
+  rewardAccount?: Maybe<Scalars['String']>
+  title?: Maybe<Scalars['String']>
+  description?: Maybe<Scalars['String']>
+  coverPhoto?: Maybe<Scalars['ID']>
+  avatarPhoto?: Maybe<Scalars['ID']>
+  isPublic?: Maybe<Scalars['Boolean']>
+  isCensored?: Maybe<Scalars['Boolean']>
+  language?: Maybe<Scalars['ID']>
+  createdInBlock?: Maybe<Scalars['Float']>
+}
+
+export type ChannelWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  rewardAccount_eq?: Maybe<Scalars['String']>
+  rewardAccount_contains?: Maybe<Scalars['String']>
+  rewardAccount_startsWith?: Maybe<Scalars['String']>
+  rewardAccount_endsWith?: Maybe<Scalars['String']>
+  rewardAccount_in?: Maybe<Array<Scalars['String']>>
+  title_eq?: Maybe<Scalars['String']>
+  title_contains?: Maybe<Scalars['String']>
+  title_startsWith?: Maybe<Scalars['String']>
+  title_endsWith?: Maybe<Scalars['String']>
+  title_in?: Maybe<Array<Scalars['String']>>
+  description_eq?: Maybe<Scalars['String']>
+  description_contains?: Maybe<Scalars['String']>
+  description_startsWith?: Maybe<Scalars['String']>
+  description_endsWith?: Maybe<Scalars['String']>
+  description_in?: Maybe<Array<Scalars['String']>>
+  isPublic_eq?: Maybe<Scalars['Boolean']>
+  isPublic_in?: Maybe<Array<Scalars['Boolean']>>
+  isCensored_eq?: Maybe<Scalars['Boolean']>
+  isCensored_in?: Maybe<Array<Scalars['Boolean']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  ownerMember?: Maybe<MembershipWhereInput>
+  ownerCuratorGroup?: Maybe<CuratorGroupWhereInput>
+  category?: Maybe<ChannelCategoryWhereInput>
+  coverPhoto?: Maybe<StorageDataObjectWhereInput>
+  avatarPhoto?: Maybe<StorageDataObjectWhereInput>
+  language?: Maybe<LanguageWhereInput>
+  videos_none?: Maybe<VideoWhereInput>
+  videos_some?: Maybe<VideoWhereInput>
+  videos_every?: Maybe<VideoWhereInput>
+  collaborators_none?: Maybe<MembershipWhereInput>
+  collaborators_some?: Maybe<MembershipWhereInput>
+  collaborators_every?: Maybe<MembershipWhereInput>
+  AND?: Maybe<Array<ChannelWhereInput>>
+  OR?: Maybe<Array<ChannelWhereInput>>
+}
+
+export type ChannelWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export enum Continent {
+  Af = 'AF',
+  Na = 'NA',
+  Oc = 'OC',
+  An = 'AN',
+  As = 'AS',
+  Eu = 'EU',
+  Sa = 'SA',
+}
+
+export type CuratorGroup = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Curators belonging to this group */
+  curatorIds: Array<Scalars['Int']>
+  /** Is group active or not */
+  isActive: Scalars['Boolean']
+  channels: Array<Channel>
+}
+
+export type CuratorGroupConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<CuratorGroupEdge>
+  pageInfo: PageInfo
+}
+
+export type CuratorGroupCreateInput = {
+  curatorIds: Array<Scalars['Int']>
+  isActive: Scalars['Boolean']
+}
+
+export type CuratorGroupEdge = {
+  node: CuratorGroup
+  cursor: Scalars['String']
+}
+
+export enum CuratorGroupOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  IsActiveAsc = 'isActive_ASC',
+  IsActiveDesc = 'isActive_DESC',
+}
+
+export type CuratorGroupUpdateInput = {
+  curatorIds?: Maybe<Array<Scalars['Int']>>
+  isActive?: Maybe<Scalars['Boolean']>
+}
+
+export type CuratorGroupWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  curatorIds_containsAll?: Maybe<Array<Scalars['Int']>>
+  curatorIds_containsNone?: Maybe<Array<Scalars['Int']>>
+  curatorIds_containsAny?: Maybe<Array<Scalars['Int']>>
+  isActive_eq?: Maybe<Scalars['Boolean']>
+  isActive_in?: Maybe<Array<Scalars['Boolean']>>
+  channels_none?: Maybe<ChannelWhereInput>
+  channels_some?: Maybe<ChannelWhereInput>
+  channels_every?: Maybe<ChannelWhereInput>
+  AND?: Maybe<Array<CuratorGroupWhereInput>>
+  OR?: Maybe<Array<CuratorGroupWhereInput>>
+}
+
+export type CuratorGroupWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type DataObjectType =
+  | DataObjectTypeChannelAvatar
+  | DataObjectTypeChannelCoverPhoto
+  | DataObjectTypeVideoMedia
+  | DataObjectTypeVideoThumbnail
+  | DataObjectTypeUnknown
+
+export type DataObjectTypeChannelAvatar = {
+  /** Related channel entity */
+  channel?: Maybe<Channel>
+}
+
+export type DataObjectTypeChannelCoverPhoto = {
+  /** Related channel entity */
+  channel?: Maybe<Channel>
+}
+
+export type DataObjectTypeUnknown = {
+  phantom?: Maybe<Scalars['Int']>
+}
+
+export type DataObjectTypeVideoMedia = {
+  /** Related video entity */
+  video?: Maybe<Video>
+}
+
+export type DataObjectTypeVideoThumbnail = {
+  /** Related video entity */
+  video?: Maybe<Video>
+}
+
+export type DeleteResponse = {
+  id: Scalars['ID']
+}
+
+export type DistributionBucket = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  family: DistributionBucketFamily
+  familyId: Scalars['String']
+  /** Bucket index within the family */
+  bucketIndex: Scalars['Int']
+  operators: Array<DistributionBucketOperator>
+  /** Whether the bucket is accepting any new bags */
+  acceptingNewBags: Scalars['Boolean']
+  /** Whether the bucket is currently distributing content */
+  distributing: Scalars['Boolean']
+  bags: Array<StorageBag>
+}
+
+export type DistributionBucketConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<DistributionBucketEdge>
+  pageInfo: PageInfo
+}
+
+export type DistributionBucketCreateInput = {
+  family: Scalars['ID']
+  bucketIndex: Scalars['Float']
+  acceptingNewBags: Scalars['Boolean']
+  distributing: Scalars['Boolean']
+}
+
+export type DistributionBucketEdge = {
+  node: DistributionBucket
+  cursor: Scalars['String']
+}
+
+export type DistributionBucketFamily = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  metadata?: Maybe<DistributionBucketFamilyMetadata>
+  metadataId?: Maybe<Scalars['String']>
+  buckets: Array<DistributionBucket>
+}
+
+export type DistributionBucketFamilyConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<DistributionBucketFamilyEdge>
+  pageInfo: PageInfo
+}
+
+export type DistributionBucketFamilyCreateInput = {
+  metadata?: Maybe<Scalars['ID']>
+}
+
+export type DistributionBucketFamilyEdge = {
+  node: DistributionBucketFamily
+  cursor: Scalars['String']
+}
+
+export type DistributionBucketFamilyGeographicArea = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Geographical area (continent / country / subdivision) */
+  area: GeographicalArea
+  distributionBucketFamilyMetadata: DistributionBucketFamilyMetadata
+  distributionBucketFamilyMetadataId: Scalars['String']
+}
+
+export type DistributionBucketFamilyGeographicAreaConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<DistributionBucketFamilyGeographicAreaEdge>
+  pageInfo: PageInfo
+}
+
+export type DistributionBucketFamilyGeographicAreaCreateInput = {
+  area: Scalars['JSONObject']
+  distributionBucketFamilyMetadata: Scalars['ID']
+}
+
+export type DistributionBucketFamilyGeographicAreaEdge = {
+  node: DistributionBucketFamilyGeographicArea
+  cursor: Scalars['String']
+}
+
+export enum DistributionBucketFamilyGeographicAreaOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  DistributionBucketFamilyMetadataAsc = 'distributionBucketFamilyMetadata_ASC',
+  DistributionBucketFamilyMetadataDesc = 'distributionBucketFamilyMetadata_DESC',
+}
+
+export type DistributionBucketFamilyGeographicAreaUpdateInput = {
+  area?: Maybe<Scalars['JSONObject']>
+  distributionBucketFamilyMetadata?: Maybe<Scalars['ID']>
+}
+
+export type DistributionBucketFamilyGeographicAreaWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  area_json?: Maybe<Scalars['JSONObject']>
+  distributionBucketFamilyMetadata?: Maybe<DistributionBucketFamilyMetadataWhereInput>
+  AND?: Maybe<Array<DistributionBucketFamilyGeographicAreaWhereInput>>
+  OR?: Maybe<Array<DistributionBucketFamilyGeographicAreaWhereInput>>
+}
+
+export type DistributionBucketFamilyGeographicAreaWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type DistributionBucketFamilyMetadata = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Name of the geographical region covered by the family (ie.: us-east-1) */
+  region?: Maybe<Scalars['String']>
+  /** Optional, more specific description of the region covered by the family */
+  description?: Maybe<Scalars['String']>
+  areas: Array<DistributionBucketFamilyGeographicArea>
+  /** List of targets (hosts/ips) best suited latency measurements for the family */
+  latencyTestTargets?: Maybe<Array<Scalars['String']>>
+  distributionbucketfamilymetadata?: Maybe<Array<DistributionBucketFamily>>
+}
+
+export type DistributionBucketFamilyMetadataConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<DistributionBucketFamilyMetadataEdge>
+  pageInfo: PageInfo
+}
+
+export type DistributionBucketFamilyMetadataCreateInput = {
+  region?: Maybe<Scalars['String']>
+  description?: Maybe<Scalars['String']>
+  latencyTestTargets?: Maybe<Array<Scalars['String']>>
+}
+
+export type DistributionBucketFamilyMetadataEdge = {
+  node: DistributionBucketFamilyMetadata
+  cursor: Scalars['String']
+}
+
+export enum DistributionBucketFamilyMetadataOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  RegionAsc = 'region_ASC',
+  RegionDesc = 'region_DESC',
+  DescriptionAsc = 'description_ASC',
+  DescriptionDesc = 'description_DESC',
+}
+
+export type DistributionBucketFamilyMetadataUpdateInput = {
+  region?: Maybe<Scalars['String']>
+  description?: Maybe<Scalars['String']>
+  latencyTestTargets?: Maybe<Array<Scalars['String']>>
+}
+
+export type DistributionBucketFamilyMetadataWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  region_eq?: Maybe<Scalars['String']>
+  region_contains?: Maybe<Scalars['String']>
+  region_startsWith?: Maybe<Scalars['String']>
+  region_endsWith?: Maybe<Scalars['String']>
+  region_in?: Maybe<Array<Scalars['String']>>
+  description_eq?: Maybe<Scalars['String']>
+  description_contains?: Maybe<Scalars['String']>
+  description_startsWith?: Maybe<Scalars['String']>
+  description_endsWith?: Maybe<Scalars['String']>
+  description_in?: Maybe<Array<Scalars['String']>>
+  latencyTestTargets_containsAll?: Maybe<Array<Scalars['String']>>
+  latencyTestTargets_containsNone?: Maybe<Array<Scalars['String']>>
+  latencyTestTargets_containsAny?: Maybe<Array<Scalars['String']>>
+  areas_none?: Maybe<DistributionBucketFamilyGeographicAreaWhereInput>
+  areas_some?: Maybe<DistributionBucketFamilyGeographicAreaWhereInput>
+  areas_every?: Maybe<DistributionBucketFamilyGeographicAreaWhereInput>
+  distributionbucketfamilymetadata_none?: Maybe<DistributionBucketFamilyWhereInput>
+  distributionbucketfamilymetadata_some?: Maybe<DistributionBucketFamilyWhereInput>
+  distributionbucketfamilymetadata_every?: Maybe<DistributionBucketFamilyWhereInput>
+  AND?: Maybe<Array<DistributionBucketFamilyMetadataWhereInput>>
+  OR?: Maybe<Array<DistributionBucketFamilyMetadataWhereInput>>
+}
+
+export type DistributionBucketFamilyMetadataWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export enum DistributionBucketFamilyOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  MetadataAsc = 'metadata_ASC',
+  MetadataDesc = 'metadata_DESC',
+}
+
+export type DistributionBucketFamilyUpdateInput = {
+  metadata?: Maybe<Scalars['ID']>
+}
+
+export type DistributionBucketFamilyWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  metadata?: Maybe<DistributionBucketFamilyMetadataWhereInput>
+  buckets_none?: Maybe<DistributionBucketWhereInput>
+  buckets_some?: Maybe<DistributionBucketWhereInput>
+  buckets_every?: Maybe<DistributionBucketWhereInput>
+  AND?: Maybe<Array<DistributionBucketFamilyWhereInput>>
+  OR?: Maybe<Array<DistributionBucketFamilyWhereInput>>
+}
+
+export type DistributionBucketFamilyWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type DistributionBucketOperator = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  distributionBucket: DistributionBucket
+  distributionBucketId: Scalars['String']
+  /** ID of the distribution group worker */
+  workerId: Scalars['Int']
+  /** Current operator status */
+  status: DistributionBucketOperatorStatus
+  metadata?: Maybe<DistributionBucketOperatorMetadata>
+  metadataId?: Maybe<Scalars['String']>
+}
+
+export type DistributionBucketOperatorConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<DistributionBucketOperatorEdge>
+  pageInfo: PageInfo
+}
+
+export type DistributionBucketOperatorCreateInput = {
+  distributionBucket: Scalars['ID']
+  workerId: Scalars['Float']
+  status: DistributionBucketOperatorStatus
+  metadata?: Maybe<Scalars['ID']>
+}
+
+export type DistributionBucketOperatorEdge = {
+  node: DistributionBucketOperator
+  cursor: Scalars['String']
+}
+
+export type DistributionBucketOperatorMetadata = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Root distributor node api endpoint */
+  nodeEndpoint?: Maybe<Scalars['String']>
+  nodeLocation?: Maybe<NodeLocationMetadata>
+  nodeLocationId?: Maybe<Scalars['String']>
+  /** Additional information about the node/operator */
+  extra?: Maybe<Scalars['String']>
+  distributionbucketoperatormetadata?: Maybe<Array<DistributionBucketOperator>>
+}
+
+export type DistributionBucketOperatorMetadataConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<DistributionBucketOperatorMetadataEdge>
+  pageInfo: PageInfo
+}
+
+export type DistributionBucketOperatorMetadataCreateInput = {
+  nodeEndpoint?: Maybe<Scalars['String']>
+  nodeLocation?: Maybe<Scalars['ID']>
+  extra?: Maybe<Scalars['String']>
+}
+
+export type DistributionBucketOperatorMetadataEdge = {
+  node: DistributionBucketOperatorMetadata
+  cursor: Scalars['String']
+}
+
+export enum DistributionBucketOperatorMetadataOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  NodeEndpointAsc = 'nodeEndpoint_ASC',
+  NodeEndpointDesc = 'nodeEndpoint_DESC',
+  NodeLocationAsc = 'nodeLocation_ASC',
+  NodeLocationDesc = 'nodeLocation_DESC',
+  ExtraAsc = 'extra_ASC',
+  ExtraDesc = 'extra_DESC',
+}
+
+export type DistributionBucketOperatorMetadataUpdateInput = {
+  nodeEndpoint?: Maybe<Scalars['String']>
+  nodeLocation?: Maybe<Scalars['ID']>
+  extra?: Maybe<Scalars['String']>
+}
+
+export type DistributionBucketOperatorMetadataWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  nodeEndpoint_eq?: Maybe<Scalars['String']>
+  nodeEndpoint_contains?: Maybe<Scalars['String']>
+  nodeEndpoint_startsWith?: Maybe<Scalars['String']>
+  nodeEndpoint_endsWith?: Maybe<Scalars['String']>
+  nodeEndpoint_in?: Maybe<Array<Scalars['String']>>
+  extra_eq?: Maybe<Scalars['String']>
+  extra_contains?: Maybe<Scalars['String']>
+  extra_startsWith?: Maybe<Scalars['String']>
+  extra_endsWith?: Maybe<Scalars['String']>
+  extra_in?: Maybe<Array<Scalars['String']>>
+  nodeLocation?: Maybe<NodeLocationMetadataWhereInput>
+  distributionbucketoperatormetadata_none?: Maybe<DistributionBucketOperatorWhereInput>
+  distributionbucketoperatormetadata_some?: Maybe<DistributionBucketOperatorWhereInput>
+  distributionbucketoperatormetadata_every?: Maybe<DistributionBucketOperatorWhereInput>
+  AND?: Maybe<Array<DistributionBucketOperatorMetadataWhereInput>>
+  OR?: Maybe<Array<DistributionBucketOperatorMetadataWhereInput>>
+}
+
+export type DistributionBucketOperatorMetadataWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export enum DistributionBucketOperatorOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  DistributionBucketAsc = 'distributionBucket_ASC',
+  DistributionBucketDesc = 'distributionBucket_DESC',
+  WorkerIdAsc = 'workerId_ASC',
+  WorkerIdDesc = 'workerId_DESC',
+  StatusAsc = 'status_ASC',
+  StatusDesc = 'status_DESC',
+  MetadataAsc = 'metadata_ASC',
+  MetadataDesc = 'metadata_DESC',
+}
+
+export enum DistributionBucketOperatorStatus {
+  Invited = 'INVITED',
+  Active = 'ACTIVE',
+}
+
+export type DistributionBucketOperatorUpdateInput = {
+  distributionBucket?: Maybe<Scalars['ID']>
+  workerId?: Maybe<Scalars['Float']>
+  status?: Maybe<DistributionBucketOperatorStatus>
+  metadata?: Maybe<Scalars['ID']>
+}
+
+export type DistributionBucketOperatorWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  workerId_eq?: Maybe<Scalars['Int']>
+  workerId_gt?: Maybe<Scalars['Int']>
+  workerId_gte?: Maybe<Scalars['Int']>
+  workerId_lt?: Maybe<Scalars['Int']>
+  workerId_lte?: Maybe<Scalars['Int']>
+  workerId_in?: Maybe<Array<Scalars['Int']>>
+  status_eq?: Maybe<DistributionBucketOperatorStatus>
+  status_in?: Maybe<Array<DistributionBucketOperatorStatus>>
+  distributionBucket?: Maybe<DistributionBucketWhereInput>
+  metadata?: Maybe<DistributionBucketOperatorMetadataWhereInput>
+  AND?: Maybe<Array<DistributionBucketOperatorWhereInput>>
+  OR?: Maybe<Array<DistributionBucketOperatorWhereInput>>
+}
+
+export type DistributionBucketOperatorWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export enum DistributionBucketOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  FamilyAsc = 'family_ASC',
+  FamilyDesc = 'family_DESC',
+  BucketIndexAsc = 'bucketIndex_ASC',
+  BucketIndexDesc = 'bucketIndex_DESC',
+  AcceptingNewBagsAsc = 'acceptingNewBags_ASC',
+  AcceptingNewBagsDesc = 'acceptingNewBags_DESC',
+  DistributingAsc = 'distributing_ASC',
+  DistributingDesc = 'distributing_DESC',
+}
+
+export type DistributionBucketUpdateInput = {
+  family?: Maybe<Scalars['ID']>
+  bucketIndex?: Maybe<Scalars['Float']>
+  acceptingNewBags?: Maybe<Scalars['Boolean']>
+  distributing?: Maybe<Scalars['Boolean']>
+}
+
+export type DistributionBucketWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  bucketIndex_eq?: Maybe<Scalars['Int']>
+  bucketIndex_gt?: Maybe<Scalars['Int']>
+  bucketIndex_gte?: Maybe<Scalars['Int']>
+  bucketIndex_lt?: Maybe<Scalars['Int']>
+  bucketIndex_lte?: Maybe<Scalars['Int']>
+  bucketIndex_in?: Maybe<Array<Scalars['Int']>>
+  acceptingNewBags_eq?: Maybe<Scalars['Boolean']>
+  acceptingNewBags_in?: Maybe<Array<Scalars['Boolean']>>
+  distributing_eq?: Maybe<Scalars['Boolean']>
+  distributing_in?: Maybe<Array<Scalars['Boolean']>>
+  family?: Maybe<DistributionBucketFamilyWhereInput>
+  operators_none?: Maybe<DistributionBucketOperatorWhereInput>
+  operators_some?: Maybe<DistributionBucketOperatorWhereInput>
+  operators_every?: Maybe<DistributionBucketOperatorWhereInput>
+  bags_none?: Maybe<StorageBagWhereInput>
+  bags_some?: Maybe<StorageBagWhereInput>
+  bags_every?: Maybe<StorageBagWhereInput>
+  AND?: Maybe<Array<DistributionBucketWhereInput>>
+  OR?: Maybe<Array<DistributionBucketWhereInput>>
+}
+
+export type DistributionBucketWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type GeoCoordinates = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  latitude: Scalars['Float']
+  longitude: Scalars['Float']
+  nodelocationmetadatacoordinates?: Maybe<Array<NodeLocationMetadata>>
+}
+
+export type GeoCoordinatesConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<GeoCoordinatesEdge>
+  pageInfo: PageInfo
+}
+
+export type GeoCoordinatesCreateInput = {
+  latitude: Scalars['Float']
+  longitude: Scalars['Float']
+}
+
+export type GeoCoordinatesEdge = {
+  node: GeoCoordinates
+  cursor: Scalars['String']
+}
+
+export enum GeoCoordinatesOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  LatitudeAsc = 'latitude_ASC',
+  LatitudeDesc = 'latitude_DESC',
+  LongitudeAsc = 'longitude_ASC',
+  LongitudeDesc = 'longitude_DESC',
+}
+
+export type GeoCoordinatesUpdateInput = {
+  latitude?: Maybe<Scalars['Float']>
+  longitude?: Maybe<Scalars['Float']>
+}
+
+export type GeoCoordinatesWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  latitude_eq?: Maybe<Scalars['Float']>
+  latitude_gt?: Maybe<Scalars['Float']>
+  latitude_gte?: Maybe<Scalars['Float']>
+  latitude_lt?: Maybe<Scalars['Float']>
+  latitude_lte?: Maybe<Scalars['Float']>
+  latitude_in?: Maybe<Array<Scalars['Float']>>
+  longitude_eq?: Maybe<Scalars['Float']>
+  longitude_gt?: Maybe<Scalars['Float']>
+  longitude_gte?: Maybe<Scalars['Float']>
+  longitude_lt?: Maybe<Scalars['Float']>
+  longitude_lte?: Maybe<Scalars['Float']>
+  longitude_in?: Maybe<Array<Scalars['Float']>>
+  nodelocationmetadatacoordinates_none?: Maybe<NodeLocationMetadataWhereInput>
+  nodelocationmetadatacoordinates_some?: Maybe<NodeLocationMetadataWhereInput>
+  nodelocationmetadatacoordinates_every?: Maybe<NodeLocationMetadataWhereInput>
+  AND?: Maybe<Array<GeoCoordinatesWhereInput>>
+  OR?: Maybe<Array<GeoCoordinatesWhereInput>>
+}
+
+export type GeoCoordinatesWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type GeographicalArea = GeographicalAreaContinent | GeographicalAreaCountry | GeographicalAreaSubdivistion
+
+export type GeographicalAreaContinent = {
+  code?: Maybe<Continent>
+}
+
+export type GeographicalAreaContinentCreateInput = {
+  code?: Maybe<Continent>
+}
+
+export type GeographicalAreaContinentUpdateInput = {
+  code?: Maybe<Continent>
+}
+
+export type GeographicalAreaContinentWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  code_eq?: Maybe<Continent>
+  code_in?: Maybe<Array<Continent>>
+  AND?: Maybe<Array<GeographicalAreaContinentWhereInput>>
+  OR?: Maybe<Array<GeographicalAreaContinentWhereInput>>
+}
+
+export type GeographicalAreaContinentWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type GeographicalAreaCountry = {
+  /** ISO 3166-1 alpha-2 country code */
+  code?: Maybe<Scalars['String']>
+}
+
+export type GeographicalAreaSubdivistion = {
+  /** ISO 3166-2 subdivision code */
+  code?: Maybe<Scalars['String']>
+}
+
+export type Language = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Language identifier ISO 639-1 */
+  iso: Scalars['String']
+  createdInBlock: Scalars['Int']
+  channellanguage?: Maybe<Array<Channel>>
+  videolanguage?: Maybe<Array<Video>>
+}
+
+export type LanguageConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<LanguageEdge>
+  pageInfo: PageInfo
+}
+
+export type LanguageCreateInput = {
+  iso: Scalars['String']
+  createdInBlock: Scalars['Float']
+}
+
+export type LanguageEdge = {
+  node: Language
+  cursor: Scalars['String']
+}
+
+export enum LanguageOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  IsoAsc = 'iso_ASC',
+  IsoDesc = 'iso_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+}
+
+export type LanguageUpdateInput = {
+  iso?: Maybe<Scalars['String']>
+  createdInBlock?: Maybe<Scalars['Float']>
+}
+
+export type LanguageWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  iso_eq?: Maybe<Scalars['String']>
+  iso_contains?: Maybe<Scalars['String']>
+  iso_startsWith?: Maybe<Scalars['String']>
+  iso_endsWith?: Maybe<Scalars['String']>
+  iso_in?: Maybe<Array<Scalars['String']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  channellanguage_none?: Maybe<ChannelWhereInput>
+  channellanguage_some?: Maybe<ChannelWhereInput>
+  channellanguage_every?: Maybe<ChannelWhereInput>
+  videolanguage_none?: Maybe<VideoWhereInput>
+  videolanguage_some?: Maybe<VideoWhereInput>
+  videolanguage_every?: Maybe<VideoWhereInput>
+  AND?: Maybe<Array<LanguageWhereInput>>
+  OR?: Maybe<Array<LanguageWhereInput>>
+}
+
+export type LanguageWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type License = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** License code defined by Joystream */
+  code?: Maybe<Scalars['Int']>
+  /** Attribution (if required by the license) */
+  attribution?: Maybe<Scalars['String']>
+  /** Custom license content */
+  customText?: Maybe<Scalars['String']>
+  videolicense?: Maybe<Array<Video>>
+}
+
+export type LicenseConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<LicenseEdge>
+  pageInfo: PageInfo
+}
+
+export type LicenseCreateInput = {
+  code?: Maybe<Scalars['Float']>
+  attribution?: Maybe<Scalars['String']>
+  customText?: Maybe<Scalars['String']>
+}
+
+export type LicenseEdge = {
+  node: License
+  cursor: Scalars['String']
+}
+
+export enum LicenseOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  CodeAsc = 'code_ASC',
+  CodeDesc = 'code_DESC',
+  AttributionAsc = 'attribution_ASC',
+  AttributionDesc = 'attribution_DESC',
+  CustomTextAsc = 'customText_ASC',
+  CustomTextDesc = 'customText_DESC',
+}
+
+export type LicenseUpdateInput = {
+  code?: Maybe<Scalars['Float']>
+  attribution?: Maybe<Scalars['String']>
+  customText?: Maybe<Scalars['String']>
+}
+
+export type LicenseWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  code_eq?: Maybe<Scalars['Int']>
+  code_gt?: Maybe<Scalars['Int']>
+  code_gte?: Maybe<Scalars['Int']>
+  code_lt?: Maybe<Scalars['Int']>
+  code_lte?: Maybe<Scalars['Int']>
+  code_in?: Maybe<Array<Scalars['Int']>>
+  attribution_eq?: Maybe<Scalars['String']>
+  attribution_contains?: Maybe<Scalars['String']>
+  attribution_startsWith?: Maybe<Scalars['String']>
+  attribution_endsWith?: Maybe<Scalars['String']>
+  attribution_in?: Maybe<Array<Scalars['String']>>
+  customText_eq?: Maybe<Scalars['String']>
+  customText_contains?: Maybe<Scalars['String']>
+  customText_startsWith?: Maybe<Scalars['String']>
+  customText_endsWith?: Maybe<Scalars['String']>
+  customText_in?: Maybe<Array<Scalars['String']>>
+  videolicense_none?: Maybe<VideoWhereInput>
+  videolicense_some?: Maybe<VideoWhereInput>
+  videolicense_every?: Maybe<VideoWhereInput>
+  AND?: Maybe<Array<LicenseWhereInput>>
+  OR?: Maybe<Array<LicenseWhereInput>>
+}
+
+export type LicenseWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type MembersByHandleFtsOutput = {
+  item: MembersByHandleSearchResult
+  rank: Scalars['Float']
+  isTypeOf: Scalars['String']
+  highlight: Scalars['String']
+}
+
+export type MembersByHandleSearchResult = Membership
+
+/** Stored information about a registered user */
+export type Membership = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** The unique handle chosen by member */
+  handle: Scalars['String']
+  /** A Url to member's Avatar image */
+  avatarUri?: Maybe<Scalars['String']>
+  /** Short text chosen by member to share information about themselves */
+  about?: Maybe<Scalars['String']>
+  /** Member's controller account id */
+  controllerAccount: Scalars['String']
+  /** Member's root account id */
+  rootAccount: Scalars['String']
+  /** Blocknumber when member was registered */
+  createdInBlock: Scalars['Int']
+  /** How the member was registered */
+  entry: MembershipEntryMethod
+  /** The type of subscription the member has purchased if any. */
+  subscription?: Maybe<Scalars['Int']>
+  channels: Array<Channel>
+  collaboratorInChannels: Array<Channel>
+}
+
+export type MembershipConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<MembershipEdge>
+  pageInfo: PageInfo
+}
+
+export type MembershipCreateInput = {
+  handle: Scalars['String']
+  avatarUri?: Maybe<Scalars['String']>
+  about?: Maybe<Scalars['String']>
+  controllerAccount: Scalars['String']
+  rootAccount: Scalars['String']
+  createdInBlock: Scalars['Float']
+  entry: MembershipEntryMethod
+  subscription?: Maybe<Scalars['Float']>
+}
+
+export type MembershipEdge = {
+  node: Membership
+  cursor: Scalars['String']
+}
+
+export enum MembershipEntryMethod {
+  Paid = 'PAID',
+  Screening = 'SCREENING',
+  Genesis = 'GENESIS',
+}
+
+export enum MembershipOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  HandleAsc = 'handle_ASC',
+  HandleDesc = 'handle_DESC',
+  AvatarUriAsc = 'avatarUri_ASC',
+  AvatarUriDesc = 'avatarUri_DESC',
+  AboutAsc = 'about_ASC',
+  AboutDesc = 'about_DESC',
+  ControllerAccountAsc = 'controllerAccount_ASC',
+  ControllerAccountDesc = 'controllerAccount_DESC',
+  RootAccountAsc = 'rootAccount_ASC',
+  RootAccountDesc = 'rootAccount_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+  EntryAsc = 'entry_ASC',
+  EntryDesc = 'entry_DESC',
+  SubscriptionAsc = 'subscription_ASC',
+  SubscriptionDesc = 'subscription_DESC',
+}
+
+export type MembershipUpdateInput = {
+  handle?: Maybe<Scalars['String']>
+  avatarUri?: Maybe<Scalars['String']>
+  about?: Maybe<Scalars['String']>
+  controllerAccount?: Maybe<Scalars['String']>
+  rootAccount?: Maybe<Scalars['String']>
+  createdInBlock?: Maybe<Scalars['Float']>
+  entry?: Maybe<MembershipEntryMethod>
+  subscription?: Maybe<Scalars['Float']>
+}
+
+export type MembershipWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  handle_eq?: Maybe<Scalars['String']>
+  handle_contains?: Maybe<Scalars['String']>
+  handle_startsWith?: Maybe<Scalars['String']>
+  handle_endsWith?: Maybe<Scalars['String']>
+  handle_in?: Maybe<Array<Scalars['String']>>
+  avatarUri_eq?: Maybe<Scalars['String']>
+  avatarUri_contains?: Maybe<Scalars['String']>
+  avatarUri_startsWith?: Maybe<Scalars['String']>
+  avatarUri_endsWith?: Maybe<Scalars['String']>
+  avatarUri_in?: Maybe<Array<Scalars['String']>>
+  about_eq?: Maybe<Scalars['String']>
+  about_contains?: Maybe<Scalars['String']>
+  about_startsWith?: Maybe<Scalars['String']>
+  about_endsWith?: Maybe<Scalars['String']>
+  about_in?: Maybe<Array<Scalars['String']>>
+  controllerAccount_eq?: Maybe<Scalars['String']>
+  controllerAccount_contains?: Maybe<Scalars['String']>
+  controllerAccount_startsWith?: Maybe<Scalars['String']>
+  controllerAccount_endsWith?: Maybe<Scalars['String']>
+  controllerAccount_in?: Maybe<Array<Scalars['String']>>
+  rootAccount_eq?: Maybe<Scalars['String']>
+  rootAccount_contains?: Maybe<Scalars['String']>
+  rootAccount_startsWith?: Maybe<Scalars['String']>
+  rootAccount_endsWith?: Maybe<Scalars['String']>
+  rootAccount_in?: Maybe<Array<Scalars['String']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  entry_eq?: Maybe<MembershipEntryMethod>
+  entry_in?: Maybe<Array<MembershipEntryMethod>>
+  subscription_eq?: Maybe<Scalars['Int']>
+  subscription_gt?: Maybe<Scalars['Int']>
+  subscription_gte?: Maybe<Scalars['Int']>
+  subscription_lt?: Maybe<Scalars['Int']>
+  subscription_lte?: Maybe<Scalars['Int']>
+  subscription_in?: Maybe<Array<Scalars['Int']>>
+  channels_none?: Maybe<ChannelWhereInput>
+  channels_some?: Maybe<ChannelWhereInput>
+  channels_every?: Maybe<ChannelWhereInput>
+  collaboratorInChannels_none?: Maybe<ChannelWhereInput>
+  collaboratorInChannels_some?: Maybe<ChannelWhereInput>
+  collaboratorInChannels_every?: Maybe<ChannelWhereInput>
+  AND?: Maybe<Array<MembershipWhereInput>>
+  OR?: Maybe<Array<MembershipWhereInput>>
+}
+
+export type MembershipWhereUniqueInput = {
+  id?: Maybe<Scalars['ID']>
+  handle?: Maybe<Scalars['String']>
+}
+
+export type NodeLocationMetadata = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** ISO 3166-1 alpha-2 country code (2 letters) */
+  countryCode?: Maybe<Scalars['String']>
+  /** City name */
+  city?: Maybe<Scalars['String']>
+  coordinates?: Maybe<GeoCoordinates>
+  coordinatesId?: Maybe<Scalars['String']>
+  distributionbucketoperatormetadatanodeLocation?: Maybe<Array<DistributionBucketOperatorMetadata>>
+  storagebucketoperatormetadatanodeLocation?: Maybe<Array<StorageBucketOperatorMetadata>>
+}
+
+export type NodeLocationMetadataConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<NodeLocationMetadataEdge>
+  pageInfo: PageInfo
+}
+
+export type NodeLocationMetadataCreateInput = {
+  countryCode?: Maybe<Scalars['String']>
+  city?: Maybe<Scalars['String']>
+  coordinates?: Maybe<Scalars['ID']>
+}
+
+export type NodeLocationMetadataEdge = {
+  node: NodeLocationMetadata
+  cursor: Scalars['String']
+}
+
+export enum NodeLocationMetadataOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  CountryCodeAsc = 'countryCode_ASC',
+  CountryCodeDesc = 'countryCode_DESC',
+  CityAsc = 'city_ASC',
+  CityDesc = 'city_DESC',
+  CoordinatesAsc = 'coordinates_ASC',
+  CoordinatesDesc = 'coordinates_DESC',
+}
+
+export type NodeLocationMetadataUpdateInput = {
+  countryCode?: Maybe<Scalars['String']>
+  city?: Maybe<Scalars['String']>
+  coordinates?: Maybe<Scalars['ID']>
+}
+
+export type NodeLocationMetadataWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  countryCode_eq?: Maybe<Scalars['String']>
+  countryCode_contains?: Maybe<Scalars['String']>
+  countryCode_startsWith?: Maybe<Scalars['String']>
+  countryCode_endsWith?: Maybe<Scalars['String']>
+  countryCode_in?: Maybe<Array<Scalars['String']>>
+  city_eq?: Maybe<Scalars['String']>
+  city_contains?: Maybe<Scalars['String']>
+  city_startsWith?: Maybe<Scalars['String']>
+  city_endsWith?: Maybe<Scalars['String']>
+  city_in?: Maybe<Array<Scalars['String']>>
+  coordinates?: Maybe<GeoCoordinatesWhereInput>
+  distributionbucketoperatormetadatanodeLocation_none?: Maybe<DistributionBucketOperatorMetadataWhereInput>
+  distributionbucketoperatormetadatanodeLocation_some?: Maybe<DistributionBucketOperatorMetadataWhereInput>
+  distributionbucketoperatormetadatanodeLocation_every?: Maybe<DistributionBucketOperatorMetadataWhereInput>
+  storagebucketoperatormetadatanodeLocation_none?: Maybe<StorageBucketOperatorMetadataWhereInput>
+  storagebucketoperatormetadatanodeLocation_some?: Maybe<StorageBucketOperatorMetadataWhereInput>
+  storagebucketoperatormetadatanodeLocation_every?: Maybe<StorageBucketOperatorMetadataWhereInput>
+  AND?: Maybe<Array<NodeLocationMetadataWhereInput>>
+  OR?: Maybe<Array<NodeLocationMetadataWhereInput>>
+}
+
+export type NodeLocationMetadataWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type PageInfo = {
+  hasNextPage: Scalars['Boolean']
+  hasPreviousPage: Scalars['Boolean']
+  startCursor?: Maybe<Scalars['String']>
+  endCursor?: Maybe<Scalars['String']>
+}
+
+export type ProcessorState = {
+  lastCompleteBlock: Scalars['Float']
+  lastProcessedEvent: Scalars['String']
+  indexerHead: Scalars['Float']
+  chainHead: Scalars['Float']
+}
+
+export type Query = {
+  channelCategories: Array<ChannelCategory>
+  channelCategoryByUniqueInput?: Maybe<ChannelCategory>
+  channelCategoriesConnection: ChannelCategoryConnection
+  channels: Array<Channel>
+  channelByUniqueInput?: Maybe<Channel>
+  channelsConnection: ChannelConnection
+  curatorGroups: Array<CuratorGroup>
+  curatorGroupByUniqueInput?: Maybe<CuratorGroup>
+  curatorGroupsConnection: CuratorGroupConnection
+  distributionBucketFamilyGeographicAreas: Array<DistributionBucketFamilyGeographicArea>
+  distributionBucketFamilyGeographicAreaByUniqueInput?: Maybe<DistributionBucketFamilyGeographicArea>
+  distributionBucketFamilyGeographicAreasConnection: DistributionBucketFamilyGeographicAreaConnection
+  distributionBucketFamilyMetadata: Array<DistributionBucketFamilyMetadata>
+  distributionBucketFamilyMetadataByUniqueInput?: Maybe<DistributionBucketFamilyMetadata>
+  distributionBucketFamilyMetadataConnection: DistributionBucketFamilyMetadataConnection
+  distributionBucketFamilies: Array<DistributionBucketFamily>
+  distributionBucketFamilyByUniqueInput?: Maybe<DistributionBucketFamily>
+  distributionBucketFamiliesConnection: DistributionBucketFamilyConnection
+  distributionBucketOperatorMetadata: Array<DistributionBucketOperatorMetadata>
+  distributionBucketOperatorMetadataByUniqueInput?: Maybe<DistributionBucketOperatorMetadata>
+  distributionBucketOperatorMetadataConnection: DistributionBucketOperatorMetadataConnection
+  distributionBucketOperators: Array<DistributionBucketOperator>
+  distributionBucketOperatorByUniqueInput?: Maybe<DistributionBucketOperator>
+  distributionBucketOperatorsConnection: DistributionBucketOperatorConnection
+  distributionBuckets: Array<DistributionBucket>
+  distributionBucketByUniqueInput?: Maybe<DistributionBucket>
+  distributionBucketsConnection: DistributionBucketConnection
+  geoCoordinates: Array<GeoCoordinates>
+  geoCoordinatesByUniqueInput?: Maybe<GeoCoordinates>
+  geoCoordinatesConnection: GeoCoordinatesConnection
+  languages: Array<Language>
+  languageByUniqueInput?: Maybe<Language>
+  languagesConnection: LanguageConnection
+  licenses: Array<License>
+  licenseByUniqueInput?: Maybe<License>
+  licensesConnection: LicenseConnection
+  memberships: Array<Membership>
+  membershipByUniqueInput?: Maybe<Membership>
+  membershipsConnection: MembershipConnection
+  nodeLocationMetadata: Array<NodeLocationMetadata>
+  nodeLocationMetadataByUniqueInput?: Maybe<NodeLocationMetadata>
+  nodeLocationMetadataConnection: NodeLocationMetadataConnection
+  channelCategoriesByName: Array<ChannelCategoriesByNameFtsOutput>
+  membersByHandle: Array<MembersByHandleFtsOutput>
+  search: Array<SearchFtsOutput>
+  videoCategoriesByName: Array<VideoCategoriesByNameFtsOutput>
+  storageBags: Array<StorageBag>
+  storageBagByUniqueInput?: Maybe<StorageBag>
+  storageBagsConnection: StorageBagConnection
+  storageBucketOperatorMetadata: Array<StorageBucketOperatorMetadata>
+  storageBucketOperatorMetadataByUniqueInput?: Maybe<StorageBucketOperatorMetadata>
+  storageBucketOperatorMetadataConnection: StorageBucketOperatorMetadataConnection
+  storageBuckets: Array<StorageBucket>
+  storageBucketByUniqueInput?: Maybe<StorageBucket>
+  storageBucketsConnection: StorageBucketConnection
+  storageDataObjects: Array<StorageDataObject>
+  storageDataObjectByUniqueInput?: Maybe<StorageDataObject>
+  storageDataObjectsConnection: StorageDataObjectConnection
+  storageSystemParameters: Array<StorageSystemParameters>
+  storageSystemParametersByUniqueInput?: Maybe<StorageSystemParameters>
+  storageSystemParametersConnection: StorageSystemParametersConnection
+  videoCategories: Array<VideoCategory>
+  videoCategoryByUniqueInput?: Maybe<VideoCategory>
+  videoCategoriesConnection: VideoCategoryConnection
+  videoMediaEncodings: Array<VideoMediaEncoding>
+  videoMediaEncodingByUniqueInput?: Maybe<VideoMediaEncoding>
+  videoMediaEncodingsConnection: VideoMediaEncodingConnection
+  videoMediaMetadata: Array<VideoMediaMetadata>
+  videoMediaMetadataByUniqueInput?: Maybe<VideoMediaMetadata>
+  videoMediaMetadataConnection: VideoMediaMetadataConnection
+  videos: Array<Video>
+  videoByUniqueInput?: Maybe<Video>
+  videosConnection: VideoConnection
+  workers: Array<Worker>
+  workerByUniqueInput?: Maybe<Worker>
+  workersConnection: WorkerConnection
+}
+
+export type QueryChannelCategoriesArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<ChannelCategoryWhereInput>
+  orderBy?: Maybe<Array<ChannelCategoryOrderByInput>>
+}
+
+export type QueryChannelCategoryByUniqueInputArgs = {
+  where: ChannelCategoryWhereUniqueInput
+}
+
+export type QueryChannelCategoriesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<ChannelCategoryWhereInput>
+  orderBy?: Maybe<Array<ChannelCategoryOrderByInput>>
+}
+
+export type QueryChannelsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<ChannelWhereInput>
+  orderBy?: Maybe<Array<ChannelOrderByInput>>
+}
+
+export type QueryChannelByUniqueInputArgs = {
+  where: ChannelWhereUniqueInput
+}
+
+export type QueryChannelsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<ChannelWhereInput>
+  orderBy?: Maybe<Array<ChannelOrderByInput>>
+}
+
+export type QueryCuratorGroupsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<CuratorGroupWhereInput>
+  orderBy?: Maybe<Array<CuratorGroupOrderByInput>>
+}
+
+export type QueryCuratorGroupByUniqueInputArgs = {
+  where: CuratorGroupWhereUniqueInput
+}
+
+export type QueryCuratorGroupsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<CuratorGroupWhereInput>
+  orderBy?: Maybe<Array<CuratorGroupOrderByInput>>
+}
+
+export type QueryDistributionBucketFamilyGeographicAreasArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<DistributionBucketFamilyGeographicAreaWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketFamilyGeographicAreaOrderByInput>>
+}
+
+export type QueryDistributionBucketFamilyGeographicAreaByUniqueInputArgs = {
+  where: DistributionBucketFamilyGeographicAreaWhereUniqueInput
+}
+
+export type QueryDistributionBucketFamilyGeographicAreasConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<DistributionBucketFamilyGeographicAreaWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketFamilyGeographicAreaOrderByInput>>
+}
+
+export type QueryDistributionBucketFamilyMetadataArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<DistributionBucketFamilyMetadataWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketFamilyMetadataOrderByInput>>
+}
+
+export type QueryDistributionBucketFamilyMetadataByUniqueInputArgs = {
+  where: DistributionBucketFamilyMetadataWhereUniqueInput
+}
+
+export type QueryDistributionBucketFamilyMetadataConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<DistributionBucketFamilyMetadataWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketFamilyMetadataOrderByInput>>
+}
+
+export type QueryDistributionBucketFamiliesArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<DistributionBucketFamilyWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketFamilyOrderByInput>>
+}
+
+export type QueryDistributionBucketFamilyByUniqueInputArgs = {
+  where: DistributionBucketFamilyWhereUniqueInput
+}
+
+export type QueryDistributionBucketFamiliesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<DistributionBucketFamilyWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketFamilyOrderByInput>>
+}
+
+export type QueryDistributionBucketOperatorMetadataArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<DistributionBucketOperatorMetadataWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketOperatorMetadataOrderByInput>>
+}
+
+export type QueryDistributionBucketOperatorMetadataByUniqueInputArgs = {
+  where: DistributionBucketOperatorMetadataWhereUniqueInput
+}
+
+export type QueryDistributionBucketOperatorMetadataConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<DistributionBucketOperatorMetadataWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketOperatorMetadataOrderByInput>>
+}
+
+export type QueryDistributionBucketOperatorsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<DistributionBucketOperatorWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketOperatorOrderByInput>>
+}
+
+export type QueryDistributionBucketOperatorByUniqueInputArgs = {
+  where: DistributionBucketOperatorWhereUniqueInput
+}
+
+export type QueryDistributionBucketOperatorsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<DistributionBucketOperatorWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketOperatorOrderByInput>>
+}
+
+export type QueryDistributionBucketsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<DistributionBucketWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketOrderByInput>>
+}
+
+export type QueryDistributionBucketByUniqueInputArgs = {
+  where: DistributionBucketWhereUniqueInput
+}
+
+export type QueryDistributionBucketsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<DistributionBucketWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketOrderByInput>>
+}
+
+export type QueryGeoCoordinatesArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<GeoCoordinatesWhereInput>
+  orderBy?: Maybe<Array<GeoCoordinatesOrderByInput>>
+}
+
+export type QueryGeoCoordinatesByUniqueInputArgs = {
+  where: GeoCoordinatesWhereUniqueInput
+}
+
+export type QueryGeoCoordinatesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<GeoCoordinatesWhereInput>
+  orderBy?: Maybe<Array<GeoCoordinatesOrderByInput>>
+}
+
+export type QueryLanguagesArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<LanguageWhereInput>
+  orderBy?: Maybe<Array<LanguageOrderByInput>>
+}
+
+export type QueryLanguageByUniqueInputArgs = {
+  where: LanguageWhereUniqueInput
+}
+
+export type QueryLanguagesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<LanguageWhereInput>
+  orderBy?: Maybe<Array<LanguageOrderByInput>>
+}
+
+export type QueryLicensesArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<LicenseWhereInput>
+  orderBy?: Maybe<Array<LicenseOrderByInput>>
+}
+
+export type QueryLicenseByUniqueInputArgs = {
+  where: LicenseWhereUniqueInput
+}
+
+export type QueryLicensesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<LicenseWhereInput>
+  orderBy?: Maybe<Array<LicenseOrderByInput>>
+}
+
+export type QueryMembershipsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<MembershipWhereInput>
+  orderBy?: Maybe<Array<MembershipOrderByInput>>
+}
+
+export type QueryMembershipByUniqueInputArgs = {
+  where: MembershipWhereUniqueInput
+}
+
+export type QueryMembershipsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<MembershipWhereInput>
+  orderBy?: Maybe<Array<MembershipOrderByInput>>
+}
+
+export type QueryNodeLocationMetadataArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<NodeLocationMetadataWhereInput>
+  orderBy?: Maybe<Array<NodeLocationMetadataOrderByInput>>
+}
+
+export type QueryNodeLocationMetadataByUniqueInputArgs = {
+  where: NodeLocationMetadataWhereUniqueInput
+}
+
+export type QueryNodeLocationMetadataConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<NodeLocationMetadataWhereInput>
+  orderBy?: Maybe<Array<NodeLocationMetadataOrderByInput>>
+}
+
+export type QueryChannelCategoriesByNameArgs = {
+  whereChannelCategory?: Maybe<ChannelCategoryWhereInput>
+  skip?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  text: Scalars['String']
+}
+
+export type QueryMembersByHandleArgs = {
+  whereMembership?: Maybe<MembershipWhereInput>
+  skip?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  text: Scalars['String']
+}
+
+export type QuerySearchArgs = {
+  whereVideo?: Maybe<VideoWhereInput>
+  whereChannel?: Maybe<ChannelWhereInput>
+  skip?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  text: Scalars['String']
+}
+
+export type QueryVideoCategoriesByNameArgs = {
+  whereVideoCategory?: Maybe<VideoCategoryWhereInput>
+  skip?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  text: Scalars['String']
+}
+
+export type QueryStorageBagsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<StorageBagWhereInput>
+  orderBy?: Maybe<Array<StorageBagOrderByInput>>
+}
+
+export type QueryStorageBagByUniqueInputArgs = {
+  where: StorageBagWhereUniqueInput
+}
+
+export type QueryStorageBagsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<StorageBagWhereInput>
+  orderBy?: Maybe<Array<StorageBagOrderByInput>>
+}
+
+export type QueryStorageBucketOperatorMetadataArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<StorageBucketOperatorMetadataWhereInput>
+  orderBy?: Maybe<Array<StorageBucketOperatorMetadataOrderByInput>>
+}
+
+export type QueryStorageBucketOperatorMetadataByUniqueInputArgs = {
+  where: StorageBucketOperatorMetadataWhereUniqueInput
+}
+
+export type QueryStorageBucketOperatorMetadataConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<StorageBucketOperatorMetadataWhereInput>
+  orderBy?: Maybe<Array<StorageBucketOperatorMetadataOrderByInput>>
+}
+
+export type QueryStorageBucketsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<StorageBucketWhereInput>
+  orderBy?: Maybe<Array<StorageBucketOrderByInput>>
+}
+
+export type QueryStorageBucketByUniqueInputArgs = {
+  where: StorageBucketWhereUniqueInput
+}
+
+export type QueryStorageBucketsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<StorageBucketWhereInput>
+  orderBy?: Maybe<Array<StorageBucketOrderByInput>>
+}
+
+export type QueryStorageDataObjectsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<StorageDataObjectWhereInput>
+  orderBy?: Maybe<Array<StorageDataObjectOrderByInput>>
+}
+
+export type QueryStorageDataObjectByUniqueInputArgs = {
+  where: StorageDataObjectWhereUniqueInput
+}
+
+export type QueryStorageDataObjectsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<StorageDataObjectWhereInput>
+  orderBy?: Maybe<Array<StorageDataObjectOrderByInput>>
+}
+
+export type QueryStorageSystemParametersArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<StorageSystemParametersWhereInput>
+  orderBy?: Maybe<Array<StorageSystemParametersOrderByInput>>
+}
+
+export type QueryStorageSystemParametersByUniqueInputArgs = {
+  where: StorageSystemParametersWhereUniqueInput
+}
+
+export type QueryStorageSystemParametersConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<StorageSystemParametersWhereInput>
+  orderBy?: Maybe<Array<StorageSystemParametersOrderByInput>>
+}
+
+export type QueryVideoCategoriesArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<VideoCategoryWhereInput>
+  orderBy?: Maybe<Array<VideoCategoryOrderByInput>>
+}
+
+export type QueryVideoCategoryByUniqueInputArgs = {
+  where: VideoCategoryWhereUniqueInput
+}
+
+export type QueryVideoCategoriesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<VideoCategoryWhereInput>
+  orderBy?: Maybe<Array<VideoCategoryOrderByInput>>
+}
+
+export type QueryVideoMediaEncodingsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<VideoMediaEncodingWhereInput>
+  orderBy?: Maybe<Array<VideoMediaEncodingOrderByInput>>
+}
+
+export type QueryVideoMediaEncodingByUniqueInputArgs = {
+  where: VideoMediaEncodingWhereUniqueInput
+}
+
+export type QueryVideoMediaEncodingsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<VideoMediaEncodingWhereInput>
+  orderBy?: Maybe<Array<VideoMediaEncodingOrderByInput>>
+}
+
+export type QueryVideoMediaMetadataArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<VideoMediaMetadataWhereInput>
+  orderBy?: Maybe<Array<VideoMediaMetadataOrderByInput>>
+}
+
+export type QueryVideoMediaMetadataByUniqueInputArgs = {
+  where: VideoMediaMetadataWhereUniqueInput
+}
+
+export type QueryVideoMediaMetadataConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<VideoMediaMetadataWhereInput>
+  orderBy?: Maybe<Array<VideoMediaMetadataOrderByInput>>
+}
+
+export type QueryVideosArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<VideoWhereInput>
+  orderBy?: Maybe<Array<VideoOrderByInput>>
+}
+
+export type QueryVideoByUniqueInputArgs = {
+  where: VideoWhereUniqueInput
+}
+
+export type QueryVideosConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<VideoWhereInput>
+  orderBy?: Maybe<Array<VideoOrderByInput>>
+}
+
+export type QueryWorkersArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<WorkerWhereInput>
+  orderBy?: Maybe<Array<WorkerOrderByInput>>
+}
+
+export type QueryWorkerByUniqueInputArgs = {
+  where: WorkerWhereUniqueInput
+}
+
+export type QueryWorkersConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<WorkerWhereInput>
+  orderBy?: Maybe<Array<WorkerOrderByInput>>
+}
+
+export type SearchFtsOutput = {
+  item: SearchSearchResult
+  rank: Scalars['Float']
+  isTypeOf: Scalars['String']
+  highlight: Scalars['String']
+}
+
+export type SearchSearchResult = Channel | Video
+
+export type StandardDeleteResponse = {
+  id: Scalars['ID']
+}
+
+export type StorageBag = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  objects: Array<StorageDataObject>
+  storageBuckets: Array<StorageBucket>
+  distributionBuckets: Array<DistributionBucket>
+  /** Owner of the storage bag */
+  owner: StorageBagOwner
+}
+
+export type StorageBagConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<StorageBagEdge>
+  pageInfo: PageInfo
+}
+
+export type StorageBagCreateInput = {
+  owner: Scalars['JSONObject']
+}
+
+export type StorageBagEdge = {
+  node: StorageBag
+  cursor: Scalars['String']
+}
+
+export enum StorageBagOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+}
+
+export type StorageBagOwner =
+  | StorageBagOwnerCouncil
+  | StorageBagOwnerWorkingGroup
+  | StorageBagOwnerMember
+  | StorageBagOwnerChannel
+  | StorageBagOwnerDao
+
+export type StorageBagOwnerChannel = {
+  channelId?: Maybe<Scalars['Int']>
+}
+
+export type StorageBagOwnerCouncil = {
+  phantom?: Maybe<Scalars['Int']>
+}
+
+export type StorageBagOwnerDao = {
+  daoId?: Maybe<Scalars['Int']>
+}
+
+export type StorageBagOwnerMember = {
+  memberId?: Maybe<Scalars['Int']>
+}
+
+export type StorageBagOwnerWorkingGroup = {
+  workingGroupId?: Maybe<Scalars['String']>
+}
+
+export type StorageBagUpdateInput = {
+  owner?: Maybe<Scalars['JSONObject']>
+}
+
+export type StorageBagWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  owner_json?: Maybe<Scalars['JSONObject']>
+  objects_none?: Maybe<StorageDataObjectWhereInput>
+  objects_some?: Maybe<StorageDataObjectWhereInput>
+  objects_every?: Maybe<StorageDataObjectWhereInput>
+  storageBuckets_none?: Maybe<StorageBucketWhereInput>
+  storageBuckets_some?: Maybe<StorageBucketWhereInput>
+  storageBuckets_every?: Maybe<StorageBucketWhereInput>
+  distributionBuckets_none?: Maybe<DistributionBucketWhereInput>
+  distributionBuckets_some?: Maybe<DistributionBucketWhereInput>
+  distributionBuckets_every?: Maybe<DistributionBucketWhereInput>
+  AND?: Maybe<Array<StorageBagWhereInput>>
+  OR?: Maybe<Array<StorageBagWhereInput>>
+}
+
+export type StorageBagWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageBucket = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Current bucket operator status */
+  operatorStatus: StorageBucketOperatorStatus
+  operatorMetadata?: Maybe<StorageBucketOperatorMetadata>
+  operatorMetadataId?: Maybe<Scalars['String']>
+  /** Whether the bucket is accepting any new storage bags */
+  acceptingNewBags: Scalars['Boolean']
+  bags: Array<StorageBag>
+  /** Bucket's data object size limit in bytes */
+  dataObjectsSizeLimit: Scalars['BigInt']
+  /** Bucket's data object count limit */
+  dataObjectCountLimit: Scalars['BigInt']
+  /** Number of assigned data objects */
+  dataObjectsCount: Scalars['BigInt']
+  /** Total size of assigned data objects */
+  dataObjectsSize: Scalars['BigInt']
+}
+
+export type StorageBucketConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<StorageBucketEdge>
+  pageInfo: PageInfo
+}
+
+export type StorageBucketCreateInput = {
+  operatorStatus: Scalars['JSONObject']
+  operatorMetadata?: Maybe<Scalars['ID']>
+  acceptingNewBags: Scalars['Boolean']
+  dataObjectsSizeLimit: Scalars['String']
+  dataObjectCountLimit: Scalars['String']
+  dataObjectsCount: Scalars['String']
+  dataObjectsSize: Scalars['String']
+}
+
+export type StorageBucketEdge = {
+  node: StorageBucket
+  cursor: Scalars['String']
+}
+
+export type StorageBucketOperatorMetadata = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Root node endpoint */
+  nodeEndpoint?: Maybe<Scalars['String']>
+  nodeLocation?: Maybe<NodeLocationMetadata>
+  nodeLocationId?: Maybe<Scalars['String']>
+  /** Additional information about the node/operator */
+  extra?: Maybe<Scalars['String']>
+  storagebucketoperatorMetadata?: Maybe<Array<StorageBucket>>
+}
+
+export type StorageBucketOperatorMetadataConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<StorageBucketOperatorMetadataEdge>
+  pageInfo: PageInfo
+}
+
+export type StorageBucketOperatorMetadataCreateInput = {
+  nodeEndpoint?: Maybe<Scalars['String']>
+  nodeLocation?: Maybe<Scalars['ID']>
+  extra?: Maybe<Scalars['String']>
+}
+
+export type StorageBucketOperatorMetadataEdge = {
+  node: StorageBucketOperatorMetadata
+  cursor: Scalars['String']
+}
+
+export enum StorageBucketOperatorMetadataOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  NodeEndpointAsc = 'nodeEndpoint_ASC',
+  NodeEndpointDesc = 'nodeEndpoint_DESC',
+  NodeLocationAsc = 'nodeLocation_ASC',
+  NodeLocationDesc = 'nodeLocation_DESC',
+  ExtraAsc = 'extra_ASC',
+  ExtraDesc = 'extra_DESC',
+}
+
+export type StorageBucketOperatorMetadataUpdateInput = {
+  nodeEndpoint?: Maybe<Scalars['String']>
+  nodeLocation?: Maybe<Scalars['ID']>
+  extra?: Maybe<Scalars['String']>
+}
+
+export type StorageBucketOperatorMetadataWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  nodeEndpoint_eq?: Maybe<Scalars['String']>
+  nodeEndpoint_contains?: Maybe<Scalars['String']>
+  nodeEndpoint_startsWith?: Maybe<Scalars['String']>
+  nodeEndpoint_endsWith?: Maybe<Scalars['String']>
+  nodeEndpoint_in?: Maybe<Array<Scalars['String']>>
+  extra_eq?: Maybe<Scalars['String']>
+  extra_contains?: Maybe<Scalars['String']>
+  extra_startsWith?: Maybe<Scalars['String']>
+  extra_endsWith?: Maybe<Scalars['String']>
+  extra_in?: Maybe<Array<Scalars['String']>>
+  nodeLocation?: Maybe<NodeLocationMetadataWhereInput>
+  storagebucketoperatorMetadata_none?: Maybe<StorageBucketWhereInput>
+  storagebucketoperatorMetadata_some?: Maybe<StorageBucketWhereInput>
+  storagebucketoperatorMetadata_every?: Maybe<StorageBucketWhereInput>
+  AND?: Maybe<Array<StorageBucketOperatorMetadataWhereInput>>
+  OR?: Maybe<Array<StorageBucketOperatorMetadataWhereInput>>
+}
+
+export type StorageBucketOperatorMetadataWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageBucketOperatorStatus =
+  | StorageBucketOperatorStatusMissing
+  | StorageBucketOperatorStatusInvited
+  | StorageBucketOperatorStatusActive
+
+export type StorageBucketOperatorStatusActive = {
+  workerId: Scalars['Int']
+  transactorAccountId: Scalars['String']
+}
+
+export type StorageBucketOperatorStatusInvited = {
+  workerId: Scalars['Int']
+}
+
+export type StorageBucketOperatorStatusMissing = {
+  phantom?: Maybe<Scalars['Int']>
+}
+
+export enum StorageBucketOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  OperatorMetadataAsc = 'operatorMetadata_ASC',
+  OperatorMetadataDesc = 'operatorMetadata_DESC',
+  AcceptingNewBagsAsc = 'acceptingNewBags_ASC',
+  AcceptingNewBagsDesc = 'acceptingNewBags_DESC',
+  DataObjectsSizeLimitAsc = 'dataObjectsSizeLimit_ASC',
+  DataObjectsSizeLimitDesc = 'dataObjectsSizeLimit_DESC',
+  DataObjectCountLimitAsc = 'dataObjectCountLimit_ASC',
+  DataObjectCountLimitDesc = 'dataObjectCountLimit_DESC',
+  DataObjectsCountAsc = 'dataObjectsCount_ASC',
+  DataObjectsCountDesc = 'dataObjectsCount_DESC',
+  DataObjectsSizeAsc = 'dataObjectsSize_ASC',
+  DataObjectsSizeDesc = 'dataObjectsSize_DESC',
+}
+
+export type StorageBucketUpdateInput = {
+  operatorStatus?: Maybe<Scalars['JSONObject']>
+  operatorMetadata?: Maybe<Scalars['ID']>
+  acceptingNewBags?: Maybe<Scalars['Boolean']>
+  dataObjectsSizeLimit?: Maybe<Scalars['String']>
+  dataObjectCountLimit?: Maybe<Scalars['String']>
+  dataObjectsCount?: Maybe<Scalars['String']>
+  dataObjectsSize?: Maybe<Scalars['String']>
+}
+
+export type StorageBucketWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  operatorStatus_json?: Maybe<Scalars['JSONObject']>
+  acceptingNewBags_eq?: Maybe<Scalars['Boolean']>
+  acceptingNewBags_in?: Maybe<Array<Scalars['Boolean']>>
+  dataObjectsSizeLimit_eq?: Maybe<Scalars['BigInt']>
+  dataObjectsSizeLimit_gt?: Maybe<Scalars['BigInt']>
+  dataObjectsSizeLimit_gte?: Maybe<Scalars['BigInt']>
+  dataObjectsSizeLimit_lt?: Maybe<Scalars['BigInt']>
+  dataObjectsSizeLimit_lte?: Maybe<Scalars['BigInt']>
+  dataObjectsSizeLimit_in?: Maybe<Array<Scalars['BigInt']>>
+  dataObjectCountLimit_eq?: Maybe<Scalars['BigInt']>
+  dataObjectCountLimit_gt?: Maybe<Scalars['BigInt']>
+  dataObjectCountLimit_gte?: Maybe<Scalars['BigInt']>
+  dataObjectCountLimit_lt?: Maybe<Scalars['BigInt']>
+  dataObjectCountLimit_lte?: Maybe<Scalars['BigInt']>
+  dataObjectCountLimit_in?: Maybe<Array<Scalars['BigInt']>>
+  dataObjectsCount_eq?: Maybe<Scalars['BigInt']>
+  dataObjectsCount_gt?: Maybe<Scalars['BigInt']>
+  dataObjectsCount_gte?: Maybe<Scalars['BigInt']>
+  dataObjectsCount_lt?: Maybe<Scalars['BigInt']>
+  dataObjectsCount_lte?: Maybe<Scalars['BigInt']>
+  dataObjectsCount_in?: Maybe<Array<Scalars['BigInt']>>
+  dataObjectsSize_eq?: Maybe<Scalars['BigInt']>
+  dataObjectsSize_gt?: Maybe<Scalars['BigInt']>
+  dataObjectsSize_gte?: Maybe<Scalars['BigInt']>
+  dataObjectsSize_lt?: Maybe<Scalars['BigInt']>
+  dataObjectsSize_lte?: Maybe<Scalars['BigInt']>
+  dataObjectsSize_in?: Maybe<Array<Scalars['BigInt']>>
+  operatorMetadata?: Maybe<StorageBucketOperatorMetadataWhereInput>
+  bags_none?: Maybe<StorageBagWhereInput>
+  bags_some?: Maybe<StorageBagWhereInput>
+  bags_every?: Maybe<StorageBagWhereInput>
+  AND?: Maybe<Array<StorageBucketWhereInput>>
+  OR?: Maybe<Array<StorageBucketWhereInput>>
+}
+
+export type StorageBucketWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageDataObject = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Whether the data object was uploaded and accepted by the storage provider */
+  isAccepted: Scalars['Boolean']
+  /** Data object size in bytes */
+  size: Scalars['BigInt']
+  storageBag: StorageBag
+  storageBagId: Scalars['String']
+  /** IPFS content hash */
+  ipfsHash: Scalars['String']
+  /** The type of the asset that the data object represents (if known) */
+  type: DataObjectType
+  /** Prize for removing the data object */
+  deletionPrize: Scalars['BigInt']
+  /** If the object is no longer used as an asset - the time at which it was unset (if known) */
+  unsetAt?: Maybe<Scalars['DateTime']>
+  channelcoverPhoto?: Maybe<Array<Channel>>
+  channelavatarPhoto?: Maybe<Array<Channel>>
+  videothumbnailPhoto?: Maybe<Array<Video>>
+  videomedia?: Maybe<Array<Video>>
+}
+
+export type StorageDataObjectConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<StorageDataObjectEdge>
+  pageInfo: PageInfo
+}
+
+export type StorageDataObjectCreateInput = {
+  isAccepted: Scalars['Boolean']
+  size: Scalars['String']
+  storageBag: Scalars['ID']
+  ipfsHash: Scalars['String']
+  type: Scalars['JSONObject']
+  deletionPrize: Scalars['String']
+  unsetAt?: Maybe<Scalars['DateTime']>
+}
+
+export type StorageDataObjectEdge = {
+  node: StorageDataObject
+  cursor: Scalars['String']
+}
+
+export enum StorageDataObjectOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  IsAcceptedAsc = 'isAccepted_ASC',
+  IsAcceptedDesc = 'isAccepted_DESC',
+  SizeAsc = 'size_ASC',
+  SizeDesc = 'size_DESC',
+  StorageBagAsc = 'storageBag_ASC',
+  StorageBagDesc = 'storageBag_DESC',
+  IpfsHashAsc = 'ipfsHash_ASC',
+  IpfsHashDesc = 'ipfsHash_DESC',
+  DeletionPrizeAsc = 'deletionPrize_ASC',
+  DeletionPrizeDesc = 'deletionPrize_DESC',
+  UnsetAtAsc = 'unsetAt_ASC',
+  UnsetAtDesc = 'unsetAt_DESC',
+}
+
+export type StorageDataObjectUpdateInput = {
+  isAccepted?: Maybe<Scalars['Boolean']>
+  size?: Maybe<Scalars['String']>
+  storageBag?: Maybe<Scalars['ID']>
+  ipfsHash?: Maybe<Scalars['String']>
+  type?: Maybe<Scalars['JSONObject']>
+  deletionPrize?: Maybe<Scalars['String']>
+  unsetAt?: Maybe<Scalars['DateTime']>
+}
+
+export type StorageDataObjectWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  isAccepted_eq?: Maybe<Scalars['Boolean']>
+  isAccepted_in?: Maybe<Array<Scalars['Boolean']>>
+  size_eq?: Maybe<Scalars['BigInt']>
+  size_gt?: Maybe<Scalars['BigInt']>
+  size_gte?: Maybe<Scalars['BigInt']>
+  size_lt?: Maybe<Scalars['BigInt']>
+  size_lte?: Maybe<Scalars['BigInt']>
+  size_in?: Maybe<Array<Scalars['BigInt']>>
+  ipfsHash_eq?: Maybe<Scalars['String']>
+  ipfsHash_contains?: Maybe<Scalars['String']>
+  ipfsHash_startsWith?: Maybe<Scalars['String']>
+  ipfsHash_endsWith?: Maybe<Scalars['String']>
+  ipfsHash_in?: Maybe<Array<Scalars['String']>>
+  type_json?: Maybe<Scalars['JSONObject']>
+  deletionPrize_eq?: Maybe<Scalars['BigInt']>
+  deletionPrize_gt?: Maybe<Scalars['BigInt']>
+  deletionPrize_gte?: Maybe<Scalars['BigInt']>
+  deletionPrize_lt?: Maybe<Scalars['BigInt']>
+  deletionPrize_lte?: Maybe<Scalars['BigInt']>
+  deletionPrize_in?: Maybe<Array<Scalars['BigInt']>>
+  unsetAt_eq?: Maybe<Scalars['DateTime']>
+  unsetAt_lt?: Maybe<Scalars['DateTime']>
+  unsetAt_lte?: Maybe<Scalars['DateTime']>
+  unsetAt_gt?: Maybe<Scalars['DateTime']>
+  unsetAt_gte?: Maybe<Scalars['DateTime']>
+  storageBag?: Maybe<StorageBagWhereInput>
+  channelcoverPhoto_none?: Maybe<ChannelWhereInput>
+  channelcoverPhoto_some?: Maybe<ChannelWhereInput>
+  channelcoverPhoto_every?: Maybe<ChannelWhereInput>
+  channelavatarPhoto_none?: Maybe<ChannelWhereInput>
+  channelavatarPhoto_some?: Maybe<ChannelWhereInput>
+  channelavatarPhoto_every?: Maybe<ChannelWhereInput>
+  videothumbnailPhoto_none?: Maybe<VideoWhereInput>
+  videothumbnailPhoto_some?: Maybe<VideoWhereInput>
+  videothumbnailPhoto_every?: Maybe<VideoWhereInput>
+  videomedia_none?: Maybe<VideoWhereInput>
+  videomedia_some?: Maybe<VideoWhereInput>
+  videomedia_every?: Maybe<VideoWhereInput>
+  AND?: Maybe<Array<StorageDataObjectWhereInput>>
+  OR?: Maybe<Array<StorageDataObjectWhereInput>>
+}
+
+export type StorageDataObjectWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+/** Global storage system parameters */
+export type StorageSystemParameters = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Blacklisted content hashes */
+  blacklist: Array<Scalars['String']>
+  /** How many buckets can be assigned to store a bag */
+  storageBucketsPerBagLimit: Scalars['Int']
+  /** How many buckets can be assigned to distribute a bag */
+  distributionBucketsPerBagLimit: Scalars['Int']
+  /** Whether the uploading is globally blocked */
+  uploadingBlocked: Scalars['Boolean']
+  /** Additional fee for storing 1 MB of data */
+  dataObjectFeePerMb: Scalars['BigInt']
+  /** Global max. number of objects a storage bucket can store (can also be further limitted the provider) */
+  storageBucketMaxObjectsCountLimit: Scalars['BigInt']
+  /** Global max. size of objects a storage bucket can store (can also be further limitted the provider) */
+  storageBucketMaxObjectsSizeLimit: Scalars['BigInt']
+  /** ID of the next data object when created */
+  nextDataObjectId: Scalars['BigInt']
+}
+
+export type StorageSystemParametersConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<StorageSystemParametersEdge>
+  pageInfo: PageInfo
+}
+
+export type StorageSystemParametersCreateInput = {
+  blacklist: Array<Scalars['String']>
+  storageBucketsPerBagLimit: Scalars['Float']
+  distributionBucketsPerBagLimit: Scalars['Float']
+  uploadingBlocked: Scalars['Boolean']
+  dataObjectFeePerMb: Scalars['String']
+  storageBucketMaxObjectsCountLimit: Scalars['String']
+  storageBucketMaxObjectsSizeLimit: Scalars['String']
+  nextDataObjectId: Scalars['String']
+}
+
+export type StorageSystemParametersEdge = {
+  node: StorageSystemParameters
+  cursor: Scalars['String']
+}
+
+export enum StorageSystemParametersOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  StorageBucketsPerBagLimitAsc = 'storageBucketsPerBagLimit_ASC',
+  StorageBucketsPerBagLimitDesc = 'storageBucketsPerBagLimit_DESC',
+  DistributionBucketsPerBagLimitAsc = 'distributionBucketsPerBagLimit_ASC',
+  DistributionBucketsPerBagLimitDesc = 'distributionBucketsPerBagLimit_DESC',
+  UploadingBlockedAsc = 'uploadingBlocked_ASC',
+  UploadingBlockedDesc = 'uploadingBlocked_DESC',
+  DataObjectFeePerMbAsc = 'dataObjectFeePerMb_ASC',
+  DataObjectFeePerMbDesc = 'dataObjectFeePerMb_DESC',
+  StorageBucketMaxObjectsCountLimitAsc = 'storageBucketMaxObjectsCountLimit_ASC',
+  StorageBucketMaxObjectsCountLimitDesc = 'storageBucketMaxObjectsCountLimit_DESC',
+  StorageBucketMaxObjectsSizeLimitAsc = 'storageBucketMaxObjectsSizeLimit_ASC',
+  StorageBucketMaxObjectsSizeLimitDesc = 'storageBucketMaxObjectsSizeLimit_DESC',
+  NextDataObjectIdAsc = 'nextDataObjectId_ASC',
+  NextDataObjectIdDesc = 'nextDataObjectId_DESC',
+}
+
+export type StorageSystemParametersUpdateInput = {
+  blacklist?: Maybe<Array<Scalars['String']>>
+  storageBucketsPerBagLimit?: Maybe<Scalars['Float']>
+  distributionBucketsPerBagLimit?: Maybe<Scalars['Float']>
+  uploadingBlocked?: Maybe<Scalars['Boolean']>
+  dataObjectFeePerMb?: Maybe<Scalars['String']>
+  storageBucketMaxObjectsCountLimit?: Maybe<Scalars['String']>
+  storageBucketMaxObjectsSizeLimit?: Maybe<Scalars['String']>
+  nextDataObjectId?: Maybe<Scalars['String']>
+}
+
+export type StorageSystemParametersWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  blacklist_containsAll?: Maybe<Array<Scalars['String']>>
+  blacklist_containsNone?: Maybe<Array<Scalars['String']>>
+  blacklist_containsAny?: Maybe<Array<Scalars['String']>>
+  storageBucketsPerBagLimit_eq?: Maybe<Scalars['Int']>
+  storageBucketsPerBagLimit_gt?: Maybe<Scalars['Int']>
+  storageBucketsPerBagLimit_gte?: Maybe<Scalars['Int']>
+  storageBucketsPerBagLimit_lt?: Maybe<Scalars['Int']>
+  storageBucketsPerBagLimit_lte?: Maybe<Scalars['Int']>
+  storageBucketsPerBagLimit_in?: Maybe<Array<Scalars['Int']>>
+  distributionBucketsPerBagLimit_eq?: Maybe<Scalars['Int']>
+  distributionBucketsPerBagLimit_gt?: Maybe<Scalars['Int']>
+  distributionBucketsPerBagLimit_gte?: Maybe<Scalars['Int']>
+  distributionBucketsPerBagLimit_lt?: Maybe<Scalars['Int']>
+  distributionBucketsPerBagLimit_lte?: Maybe<Scalars['Int']>
+  distributionBucketsPerBagLimit_in?: Maybe<Array<Scalars['Int']>>
+  uploadingBlocked_eq?: Maybe<Scalars['Boolean']>
+  uploadingBlocked_in?: Maybe<Array<Scalars['Boolean']>>
+  dataObjectFeePerMb_eq?: Maybe<Scalars['BigInt']>
+  dataObjectFeePerMb_gt?: Maybe<Scalars['BigInt']>
+  dataObjectFeePerMb_gte?: Maybe<Scalars['BigInt']>
+  dataObjectFeePerMb_lt?: Maybe<Scalars['BigInt']>
+  dataObjectFeePerMb_lte?: Maybe<Scalars['BigInt']>
+  dataObjectFeePerMb_in?: Maybe<Array<Scalars['BigInt']>>
+  storageBucketMaxObjectsCountLimit_eq?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsCountLimit_gt?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsCountLimit_gte?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsCountLimit_lt?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsCountLimit_lte?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsCountLimit_in?: Maybe<Array<Scalars['BigInt']>>
+  storageBucketMaxObjectsSizeLimit_eq?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsSizeLimit_gt?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsSizeLimit_gte?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsSizeLimit_lt?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsSizeLimit_lte?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsSizeLimit_in?: Maybe<Array<Scalars['BigInt']>>
+  nextDataObjectId_eq?: Maybe<Scalars['BigInt']>
+  nextDataObjectId_gt?: Maybe<Scalars['BigInt']>
+  nextDataObjectId_gte?: Maybe<Scalars['BigInt']>
+  nextDataObjectId_lt?: Maybe<Scalars['BigInt']>
+  nextDataObjectId_lte?: Maybe<Scalars['BigInt']>
+  nextDataObjectId_in?: Maybe<Array<Scalars['BigInt']>>
+  AND?: Maybe<Array<StorageSystemParametersWhereInput>>
+  OR?: Maybe<Array<StorageSystemParametersWhereInput>>
+}
+
+export type StorageSystemParametersWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type Subscription = {
+  stateSubscription: ProcessorState
+}
+
+export type Video = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  channel: Channel
+  channelId: Scalars['String']
+  category?: Maybe<VideoCategory>
+  categoryId?: Maybe<Scalars['String']>
+  /** The title of the video */
+  title?: Maybe<Scalars['String']>
+  /** The description of the Video */
+  description?: Maybe<Scalars['String']>
+  /** Video duration in seconds */
+  duration?: Maybe<Scalars['Int']>
+  thumbnailPhoto?: Maybe<StorageDataObject>
+  thumbnailPhotoId?: Maybe<Scalars['String']>
+  language?: Maybe<Language>
+  languageId?: Maybe<Scalars['String']>
+  /** Whether or not Video contains marketing */
+  hasMarketing?: Maybe<Scalars['Boolean']>
+  /** If the Video was published on other platform before beeing published on Joystream - the original publication date */
+  publishedBeforeJoystream?: Maybe<Scalars['DateTime']>
+  /** Whether the Video is supposed to be publically displayed */
+  isPublic?: Maybe<Scalars['Boolean']>
+  /** Flag signaling whether a video is censored. */
+  isCensored: Scalars['Boolean']
+  /** Whether the Video contains explicit material. */
+  isExplicit?: Maybe<Scalars['Boolean']>
+  license?: Maybe<License>
+  licenseId?: Maybe<Scalars['String']>
+  media?: Maybe<StorageDataObject>
+  mediaId?: Maybe<Scalars['String']>
+  mediaMetadata?: Maybe<VideoMediaMetadata>
+  mediaMetadataId?: Maybe<Scalars['String']>
+  createdInBlock: Scalars['Int']
+  /** Is video featured or not */
+  isFeatured: Scalars['Boolean']
+}
+
+export type VideoCategoriesByNameFtsOutput = {
+  item: VideoCategoriesByNameSearchResult
+  rank: Scalars['Float']
+  isTypeOf: Scalars['String']
+  highlight: Scalars['String']
+}
+
+export type VideoCategoriesByNameSearchResult = VideoCategory
+
+export type VideoCategory = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** The name of the category */
+  name?: Maybe<Scalars['String']>
+  videos: Array<Video>
+  createdInBlock: Scalars['Int']
+}
+
+export type VideoCategoryConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<VideoCategoryEdge>
+  pageInfo: PageInfo
+}
+
+export type VideoCategoryCreateInput = {
+  name?: Maybe<Scalars['String']>
+  createdInBlock: Scalars['Float']
+}
+
+export type VideoCategoryEdge = {
+  node: VideoCategory
+  cursor: Scalars['String']
+}
+
+export enum VideoCategoryOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  NameAsc = 'name_ASC',
+  NameDesc = 'name_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+}
+
+export type VideoCategoryUpdateInput = {
+  name?: Maybe<Scalars['String']>
+  createdInBlock?: Maybe<Scalars['Float']>
+}
+
+export type VideoCategoryWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  name_eq?: Maybe<Scalars['String']>
+  name_contains?: Maybe<Scalars['String']>
+  name_startsWith?: Maybe<Scalars['String']>
+  name_endsWith?: Maybe<Scalars['String']>
+  name_in?: Maybe<Array<Scalars['String']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  videos_none?: Maybe<VideoWhereInput>
+  videos_some?: Maybe<VideoWhereInput>
+  videos_every?: Maybe<VideoWhereInput>
+  AND?: Maybe<Array<VideoCategoryWhereInput>>
+  OR?: Maybe<Array<VideoCategoryWhereInput>>
+}
+
+export type VideoCategoryWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type VideoConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<VideoEdge>
+  pageInfo: PageInfo
+}
+
+export type VideoCreateInput = {
+  channel: Scalars['ID']
+  category?: Maybe<Scalars['ID']>
+  title?: Maybe<Scalars['String']>
+  description?: Maybe<Scalars['String']>
+  duration?: Maybe<Scalars['Float']>
+  thumbnailPhoto?: Maybe<Scalars['ID']>
+  language?: Maybe<Scalars['ID']>
+  hasMarketing?: Maybe<Scalars['Boolean']>
+  publishedBeforeJoystream?: Maybe<Scalars['DateTime']>
+  isPublic?: Maybe<Scalars['Boolean']>
+  isCensored: Scalars['Boolean']
+  isExplicit?: Maybe<Scalars['Boolean']>
+  license?: Maybe<Scalars['ID']>
+  media?: Maybe<Scalars['ID']>
+  mediaMetadata?: Maybe<Scalars['ID']>
+  createdInBlock: Scalars['Float']
+  isFeatured: Scalars['Boolean']
+}
+
+export type VideoEdge = {
+  node: Video
+  cursor: Scalars['String']
+}
+
+export type VideoMediaEncoding = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Encoding of the video media object */
+  codecName?: Maybe<Scalars['String']>
+  /** Media container format */
+  container?: Maybe<Scalars['String']>
+  /** Content MIME type */
+  mimeMediaType?: Maybe<Scalars['String']>
+  videomediametadataencoding?: Maybe<Array<VideoMediaMetadata>>
+}
+
+export type VideoMediaEncodingConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<VideoMediaEncodingEdge>
+  pageInfo: PageInfo
+}
+
+export type VideoMediaEncodingCreateInput = {
+  codecName?: Maybe<Scalars['String']>
+  container?: Maybe<Scalars['String']>
+  mimeMediaType?: Maybe<Scalars['String']>
+}
+
+export type VideoMediaEncodingEdge = {
+  node: VideoMediaEncoding
+  cursor: Scalars['String']
+}
+
+export enum VideoMediaEncodingOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  CodecNameAsc = 'codecName_ASC',
+  CodecNameDesc = 'codecName_DESC',
+  ContainerAsc = 'container_ASC',
+  ContainerDesc = 'container_DESC',
+  MimeMediaTypeAsc = 'mimeMediaType_ASC',
+  MimeMediaTypeDesc = 'mimeMediaType_DESC',
+}
+
+export type VideoMediaEncodingUpdateInput = {
+  codecName?: Maybe<Scalars['String']>
+  container?: Maybe<Scalars['String']>
+  mimeMediaType?: Maybe<Scalars['String']>
+}
+
+export type VideoMediaEncodingWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  codecName_eq?: Maybe<Scalars['String']>
+  codecName_contains?: Maybe<Scalars['String']>
+  codecName_startsWith?: Maybe<Scalars['String']>
+  codecName_endsWith?: Maybe<Scalars['String']>
+  codecName_in?: Maybe<Array<Scalars['String']>>
+  container_eq?: Maybe<Scalars['String']>
+  container_contains?: Maybe<Scalars['String']>
+  container_startsWith?: Maybe<Scalars['String']>
+  container_endsWith?: Maybe<Scalars['String']>
+  container_in?: Maybe<Array<Scalars['String']>>
+  mimeMediaType_eq?: Maybe<Scalars['String']>
+  mimeMediaType_contains?: Maybe<Scalars['String']>
+  mimeMediaType_startsWith?: Maybe<Scalars['String']>
+  mimeMediaType_endsWith?: Maybe<Scalars['String']>
+  mimeMediaType_in?: Maybe<Array<Scalars['String']>>
+  videomediametadataencoding_none?: Maybe<VideoMediaMetadataWhereInput>
+  videomediametadataencoding_some?: Maybe<VideoMediaMetadataWhereInput>
+  videomediametadataencoding_every?: Maybe<VideoMediaMetadataWhereInput>
+  AND?: Maybe<Array<VideoMediaEncodingWhereInput>>
+  OR?: Maybe<Array<VideoMediaEncodingWhereInput>>
+}
+
+export type VideoMediaEncodingWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type VideoMediaMetadata = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  encoding?: Maybe<VideoMediaEncoding>
+  encodingId?: Maybe<Scalars['String']>
+  /** Video media width in pixels */
+  pixelWidth?: Maybe<Scalars['Int']>
+  /** Video media height in pixels */
+  pixelHeight?: Maybe<Scalars['Int']>
+  /** Video media size in bytes */
+  size?: Maybe<Scalars['BigInt']>
+  video?: Maybe<Video>
+  createdInBlock: Scalars['Int']
+}
+
+export type VideoMediaMetadataConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<VideoMediaMetadataEdge>
+  pageInfo: PageInfo
+}
+
+export type VideoMediaMetadataCreateInput = {
+  encoding?: Maybe<Scalars['ID']>
+  pixelWidth?: Maybe<Scalars['Float']>
+  pixelHeight?: Maybe<Scalars['Float']>
+  size?: Maybe<Scalars['String']>
+  createdInBlock: Scalars['Float']
+}
+
+export type VideoMediaMetadataEdge = {
+  node: VideoMediaMetadata
+  cursor: Scalars['String']
+}
+
+export enum VideoMediaMetadataOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  EncodingAsc = 'encoding_ASC',
+  EncodingDesc = 'encoding_DESC',
+  PixelWidthAsc = 'pixelWidth_ASC',
+  PixelWidthDesc = 'pixelWidth_DESC',
+  PixelHeightAsc = 'pixelHeight_ASC',
+  PixelHeightDesc = 'pixelHeight_DESC',
+  SizeAsc = 'size_ASC',
+  SizeDesc = 'size_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+}
+
+export type VideoMediaMetadataUpdateInput = {
+  encoding?: Maybe<Scalars['ID']>
+  pixelWidth?: Maybe<Scalars['Float']>
+  pixelHeight?: Maybe<Scalars['Float']>
+  size?: Maybe<Scalars['String']>
+  createdInBlock?: Maybe<Scalars['Float']>
+}
+
+export type VideoMediaMetadataWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  pixelWidth_eq?: Maybe<Scalars['Int']>
+  pixelWidth_gt?: Maybe<Scalars['Int']>
+  pixelWidth_gte?: Maybe<Scalars['Int']>
+  pixelWidth_lt?: Maybe<Scalars['Int']>
+  pixelWidth_lte?: Maybe<Scalars['Int']>
+  pixelWidth_in?: Maybe<Array<Scalars['Int']>>
+  pixelHeight_eq?: Maybe<Scalars['Int']>
+  pixelHeight_gt?: Maybe<Scalars['Int']>
+  pixelHeight_gte?: Maybe<Scalars['Int']>
+  pixelHeight_lt?: Maybe<Scalars['Int']>
+  pixelHeight_lte?: Maybe<Scalars['Int']>
+  pixelHeight_in?: Maybe<Array<Scalars['Int']>>
+  size_eq?: Maybe<Scalars['BigInt']>
+  size_gt?: Maybe<Scalars['BigInt']>
+  size_gte?: Maybe<Scalars['BigInt']>
+  size_lt?: Maybe<Scalars['BigInt']>
+  size_lte?: Maybe<Scalars['BigInt']>
+  size_in?: Maybe<Array<Scalars['BigInt']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  encoding?: Maybe<VideoMediaEncodingWhereInput>
+  video?: Maybe<VideoWhereInput>
+  AND?: Maybe<Array<VideoMediaMetadataWhereInput>>
+  OR?: Maybe<Array<VideoMediaMetadataWhereInput>>
+}
+
+export type VideoMediaMetadataWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export enum VideoOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  ChannelAsc = 'channel_ASC',
+  ChannelDesc = 'channel_DESC',
+  CategoryAsc = 'category_ASC',
+  CategoryDesc = 'category_DESC',
+  TitleAsc = 'title_ASC',
+  TitleDesc = 'title_DESC',
+  DescriptionAsc = 'description_ASC',
+  DescriptionDesc = 'description_DESC',
+  DurationAsc = 'duration_ASC',
+  DurationDesc = 'duration_DESC',
+  ThumbnailPhotoAsc = 'thumbnailPhoto_ASC',
+  ThumbnailPhotoDesc = 'thumbnailPhoto_DESC',
+  LanguageAsc = 'language_ASC',
+  LanguageDesc = 'language_DESC',
+  HasMarketingAsc = 'hasMarketing_ASC',
+  HasMarketingDesc = 'hasMarketing_DESC',
+  PublishedBeforeJoystreamAsc = 'publishedBeforeJoystream_ASC',
+  PublishedBeforeJoystreamDesc = 'publishedBeforeJoystream_DESC',
+  IsPublicAsc = 'isPublic_ASC',
+  IsPublicDesc = 'isPublic_DESC',
+  IsCensoredAsc = 'isCensored_ASC',
+  IsCensoredDesc = 'isCensored_DESC',
+  IsExplicitAsc = 'isExplicit_ASC',
+  IsExplicitDesc = 'isExplicit_DESC',
+  LicenseAsc = 'license_ASC',
+  LicenseDesc = 'license_DESC',
+  MediaAsc = 'media_ASC',
+  MediaDesc = 'media_DESC',
+  MediaMetadataAsc = 'mediaMetadata_ASC',
+  MediaMetadataDesc = 'mediaMetadata_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+  IsFeaturedAsc = 'isFeatured_ASC',
+  IsFeaturedDesc = 'isFeatured_DESC',
+}
+
+export type VideoUpdateInput = {
+  channel?: Maybe<Scalars['ID']>
+  category?: Maybe<Scalars['ID']>
+  title?: Maybe<Scalars['String']>
+  description?: Maybe<Scalars['String']>
+  duration?: Maybe<Scalars['Float']>
+  thumbnailPhoto?: Maybe<Scalars['ID']>
+  language?: Maybe<Scalars['ID']>
+  hasMarketing?: Maybe<Scalars['Boolean']>
+  publishedBeforeJoystream?: Maybe<Scalars['DateTime']>
+  isPublic?: Maybe<Scalars['Boolean']>
+  isCensored?: Maybe<Scalars['Boolean']>
+  isExplicit?: Maybe<Scalars['Boolean']>
+  license?: Maybe<Scalars['ID']>
+  media?: Maybe<Scalars['ID']>
+  mediaMetadata?: Maybe<Scalars['ID']>
+  createdInBlock?: Maybe<Scalars['Float']>
+  isFeatured?: Maybe<Scalars['Boolean']>
+}
+
+export type VideoWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  title_eq?: Maybe<Scalars['String']>
+  title_contains?: Maybe<Scalars['String']>
+  title_startsWith?: Maybe<Scalars['String']>
+  title_endsWith?: Maybe<Scalars['String']>
+  title_in?: Maybe<Array<Scalars['String']>>
+  description_eq?: Maybe<Scalars['String']>
+  description_contains?: Maybe<Scalars['String']>
+  description_startsWith?: Maybe<Scalars['String']>
+  description_endsWith?: Maybe<Scalars['String']>
+  description_in?: Maybe<Array<Scalars['String']>>
+  duration_eq?: Maybe<Scalars['Int']>
+  duration_gt?: Maybe<Scalars['Int']>
+  duration_gte?: Maybe<Scalars['Int']>
+  duration_lt?: Maybe<Scalars['Int']>
+  duration_lte?: Maybe<Scalars['Int']>
+  duration_in?: Maybe<Array<Scalars['Int']>>
+  hasMarketing_eq?: Maybe<Scalars['Boolean']>
+  hasMarketing_in?: Maybe<Array<Scalars['Boolean']>>
+  publishedBeforeJoystream_eq?: Maybe<Scalars['DateTime']>
+  publishedBeforeJoystream_lt?: Maybe<Scalars['DateTime']>
+  publishedBeforeJoystream_lte?: Maybe<Scalars['DateTime']>
+  publishedBeforeJoystream_gt?: Maybe<Scalars['DateTime']>
+  publishedBeforeJoystream_gte?: Maybe<Scalars['DateTime']>
+  isPublic_eq?: Maybe<Scalars['Boolean']>
+  isPublic_in?: Maybe<Array<Scalars['Boolean']>>
+  isCensored_eq?: Maybe<Scalars['Boolean']>
+  isCensored_in?: Maybe<Array<Scalars['Boolean']>>
+  isExplicit_eq?: Maybe<Scalars['Boolean']>
+  isExplicit_in?: Maybe<Array<Scalars['Boolean']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  isFeatured_eq?: Maybe<Scalars['Boolean']>
+  isFeatured_in?: Maybe<Array<Scalars['Boolean']>>
+  channel?: Maybe<ChannelWhereInput>
+  category?: Maybe<VideoCategoryWhereInput>
+  thumbnailPhoto?: Maybe<StorageDataObjectWhereInput>
+  language?: Maybe<LanguageWhereInput>
+  license?: Maybe<LicenseWhereInput>
+  media?: Maybe<StorageDataObjectWhereInput>
+  mediaMetadata?: Maybe<VideoMediaMetadataWhereInput>
+  AND?: Maybe<Array<VideoWhereInput>>
+  OR?: Maybe<Array<VideoWhereInput>>
+}
+
+export type VideoWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type Worker = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Sign of worker still being active */
+  isActive: Scalars['Boolean']
+  /** Runtime identifier */
+  workerId: Scalars['String']
+  /** Associated working group */
+  type: WorkerType
+  /** Custom metadata set by provider */
+  metadata?: Maybe<Scalars['String']>
+}
+
+export type WorkerConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<WorkerEdge>
+  pageInfo: PageInfo
+}
+
+export type WorkerCreateInput = {
+  isActive: Scalars['Boolean']
+  workerId: Scalars['String']
+  type: WorkerType
+  metadata?: Maybe<Scalars['String']>
+}
+
+export type WorkerEdge = {
+  node: Worker
+  cursor: Scalars['String']
+}
+
+export enum WorkerOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  IsActiveAsc = 'isActive_ASC',
+  IsActiveDesc = 'isActive_DESC',
+  WorkerIdAsc = 'workerId_ASC',
+  WorkerIdDesc = 'workerId_DESC',
+  TypeAsc = 'type_ASC',
+  TypeDesc = 'type_DESC',
+  MetadataAsc = 'metadata_ASC',
+  MetadataDesc = 'metadata_DESC',
+}
+
+export enum WorkerType {
+  Gateway = 'GATEWAY',
+  Storage = 'STORAGE',
+}
+
+export type WorkerUpdateInput = {
+  isActive?: Maybe<Scalars['Boolean']>
+  workerId?: Maybe<Scalars['String']>
+  type?: Maybe<WorkerType>
+  metadata?: Maybe<Scalars['String']>
+}
+
+export type WorkerWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  isActive_eq?: Maybe<Scalars['Boolean']>
+  isActive_in?: Maybe<Array<Scalars['Boolean']>>
+  workerId_eq?: Maybe<Scalars['String']>
+  workerId_contains?: Maybe<Scalars['String']>
+  workerId_startsWith?: Maybe<Scalars['String']>
+  workerId_endsWith?: Maybe<Scalars['String']>
+  workerId_in?: Maybe<Array<Scalars['String']>>
+  type_eq?: Maybe<WorkerType>
+  type_in?: Maybe<Array<WorkerType>>
+  metadata_eq?: Maybe<Scalars['String']>
+  metadata_contains?: Maybe<Scalars['String']>
+  metadata_startsWith?: Maybe<Scalars['String']>
+  metadata_endsWith?: Maybe<Scalars['String']>
+  metadata_in?: Maybe<Array<Scalars['String']>>
+  AND?: Maybe<Array<WorkerWhereInput>>
+  OR?: Maybe<Array<WorkerWhereInput>>
+}
+
+export type WorkerWhereUniqueInput = {
+  id: Scalars['ID']
+}

+ 74 - 0
tests/network-tests/src/graphql/queries/storagev2.graphql

@@ -0,0 +1,74 @@
+fragment DataObjectTypeFields on DataObjectType {
+  __typename
+  ... on DataObjectTypeChannelAvatar {
+    channel {
+      id
+    }
+  }
+  ... on DataObjectTypeChannelCoverPhoto {
+    channel {
+      id
+    }
+  }
+  ... on DataObjectTypeVideoThumbnail {
+    video {
+      id
+    }
+  }
+  ... on DataObjectTypeVideoMedia {
+    video {
+      id
+    }
+  }
+}
+
+fragment StorageDataObjectFields on StorageDataObject {
+  id
+  ipfsHash
+  isAccepted
+  size
+  type {
+    ...DataObjectTypeFields
+  }
+  deletionPrize
+  unsetAt
+  storageBagId
+}
+
+fragment ChannelFields on Channel {
+  title
+  description
+  isPublic
+  language {
+    iso
+  }
+  rewardAccount
+  isCensored
+  ownerMember {
+    id
+  }
+  ownerCuratorGroup {
+    id
+  }
+  category {
+    name
+  }
+  avatarPhoto {
+    ...StorageDataObjectFields
+  }
+  coverPhoto {
+    ...StorageDataObjectFields
+  }
+}
+
+query getDataObjectsByIds($ids: [ID!]) {
+  storageDataObjects(where: { id_in: $ids }) {
+    ...StorageDataObjectFields
+  }
+}
+
+query getChannelById($id: ID!) {
+  channelByUniqueInput(where: { id: $id }) {
+    ...ChannelFields
+  }
+}

+ 18 - 24
tests/network-tests/src/scenarios/full.ts → tests/network-tests/src/scenarios/combined.ts

@@ -1,50 +1,44 @@
 import creatingMemberships from '../flows/membership/creatingMemberships'
-import councilSetup from '../flows/council/setup'
 import leaderSetup from '../flows/workingGroup/leaderSetup'
-import electionParametersProposal from '../flows/proposals/electionParametersProposal'
-import manageLeaderRole from '../flows/proposals/manageLeaderRole'
-import spendingProposal from '../flows/proposals/spendingProposal'
-import textProposal from '../flows/proposals/textProposal'
-import validatorCountProposal from '../flows/proposals/validatorCountProposal'
-import wgMintCapacityProposal from '../flows/proposals/workingGroupMintCapacityProposal'
 import atLeastValueBug from '../flows/workingGroup/atLeastValueBug'
 import { manageWorkerFlow } from '../flows/workingGroup/manageWorkerAsLead'
 import manageWorkerAsWorker from '../flows/workingGroup/manageWorkerAsWorker'
 import workerPayout from '../flows/workingGroup/workerPayout'
+import initDistributionBucket from '../flows/clis/initDistributionBucket'
+import initStorageBucket from '../flows/clis/initStorageBucket'
+import createChannel from '../flows/clis/createChannel'
 import { scenario } from '../Scenario'
 import { WorkingGroups } from '../WorkingGroups'
 
 scenario(async ({ job }) => {
+  // These tests assume:
+  // - storage setup (including hired lead)
+  // - existing council
   job('creating members', creatingMemberships)
 
-  const councilJob = job('council setup', councilSetup)
-
-  const proposalsJob = job('proposals', [
-    electionParametersProposal,
-    spendingProposal,
-    textProposal,
-    validatorCountProposal,
-    wgMintCapacityProposal.storage,
-    wgMintCapacityProposal.content,
-    manageLeaderRole.storage,
-    manageLeaderRole.content,
-  ]).requires(councilJob)
-
   const leadSetupJob = job('setup leads', [
-    leaderSetup(WorkingGroups.Storage),
-    leaderSetup(WorkingGroups.Content),
-  ]).after(proposalsJob)
+    leaderSetup(WorkingGroups.Storage, true),
+    leaderSetup(WorkingGroups.Content, true),
+    leaderSetup(WorkingGroups.Distribution, true),
+  ])
 
   // Test bug only on one instance of working group is sufficient
   job('at least value bug', atLeastValueBug).requires(leadSetupJob)
 
   // tests minting payouts (requires council to set mint capacity)
-  job('worker payouts', [workerPayout.storage, workerPayout.content]).requires(leadSetupJob).requires(councilJob)
+  job('worker payouts', [workerPayout.storage, workerPayout.content, workerPayout.distribution]).requires(leadSetupJob)
 
   job('working group tests', [
     manageWorkerFlow(WorkingGroups.Storage),
     manageWorkerAsWorker.storage,
     manageWorkerFlow(WorkingGroups.Content),
     manageWorkerAsWorker.content,
+    manageWorkerFlow(WorkingGroups.Distribution),
+    manageWorkerAsWorker.distribution,
   ]).requires(leadSetupJob)
+
+  const createChannelJob = job('create channel via CLI', createChannel)
+  job('init storage and distribution buckets via CLI', [initDistributionBucket, initStorageBucket]).after(
+    createChannelJob
+  )
 })

+ 16 - 0
tests/network-tests/src/scenarios/giza-issue-reproduction-setup.ts

@@ -0,0 +1,16 @@
+import makeAliceMember from '../flows/membership/makeAliceMember'
+import leaderSetup from '../flows/workingGroup/leaderSetup'
+import { hireWorkersFlow } from '../flows/workingGroup/manageWorkerAsLead'
+import updateAccountsFlow from '../misc/updateAllWorkerRoleAccountsFlow'
+import initStorage, { doubleBucketConfig as storageConfig } from '../flows/storagev2/initStorage'
+import { WorkingGroups } from '../WorkingGroups'
+import { scenario } from '../Scenario'
+
+scenario(async ({ job }) => {
+  job('Make Alice a member', makeAliceMember)
+
+  const leads = job('Set Storage Lead', leaderSetup(WorkingGroups.Storage))
+  const workers = job('Hire Storage Worker', hireWorkersFlow(WorkingGroups.Storage, 1)).after(leads)
+  const updateWorkerAccounts = job('Update worker accounts', updateAccountsFlow).after(workers)
+  job('initialize storage system (2 buckets)', initStorage(storageConfig)).requires(updateWorkerAccounts)
+})

+ 16 - 0
tests/network-tests/src/scenarios/init-storage-and-distribution.ts

@@ -0,0 +1,16 @@
+import leaderSetup from '../flows/workingGroup/leaderSetup'
+import initStorage, { singleBucketConfig as defaultStorageConfig } from '../flows/storagev2/initStorage'
+import initDistribution, { singleBucketConfig as defaultDistributionConfig } from '../flows/storagev2/initDistribution'
+import { scenario } from '../Scenario'
+import { WorkingGroups } from '../WorkingGroups'
+import updateAccountsFlow from '../misc/updateAllWorkerRoleAccountsFlow'
+
+scenario(async ({ job }) => {
+  const setupLead = job('setup leads', [
+    leaderSetup(WorkingGroups.Distribution, true),
+    leaderSetup(WorkingGroups.Storage, true),
+  ])
+  const updateWorkerAccounts = job('Update worker accounts', updateAccountsFlow).after(setupLead)
+  job('initialize storage system', initStorage(defaultStorageConfig)).after(updateWorkerAccounts)
+  job('initialize distribution system', initDistribution(defaultDistributionConfig)).after(updateWorkerAccounts)
+})

+ 28 - 0
tests/network-tests/src/scenarios/proposals.ts

@@ -0,0 +1,28 @@
+import creatingMemberships from '../flows/membership/creatingMemberships'
+import councilSetup from '../flows/council/setup'
+import electionParametersProposal from '../flows/proposals/electionParametersProposal'
+import manageLeaderRole from '../flows/proposals/manageLeaderRole'
+import spendingProposal from '../flows/proposals/spendingProposal'
+import textProposal from '../flows/proposals/textProposal'
+import validatorCountProposal from '../flows/proposals/validatorCountProposal'
+import wgMintCapacityProposal from '../flows/proposals/workingGroupMintCapacityProposal'
+import { scenario } from '../Scenario'
+
+scenario(async ({ job }) => {
+  job('creating members', creatingMemberships)
+
+  const councilJob = job('council setup', councilSetup)
+
+  job('proposals', [
+    electionParametersProposal,
+    spendingProposal,
+    textProposal,
+    validatorCountProposal,
+    wgMintCapacityProposal.storage,
+    wgMintCapacityProposal.content,
+    wgMintCapacityProposal.distribution,
+    manageLeaderRole.storage,
+    manageLeaderRole.content,
+    manageLeaderRole.distribution,
+  ]).requires(councilJob)
+})

+ 7 - 0
tests/network-tests/src/scenarios/setup-new-chain.ts

@@ -2,6 +2,8 @@ import assignCouncil from '../flows/council/assign'
 import leaderSetup from '../flows/workingGroup/leaderSetup'
 import mockContentFlow from '../misc/mockContentFlow'
 import updateAccountsFlow from '../misc/updateAllWorkerRoleAccountsFlow'
+import initStorage, { singleBucketConfig as defaultStorageConfig } from '../flows/storagev2/initStorage'
+import initDistribution, { singleBucketConfig as defaultDistributionConfig } from '../flows/storagev2/initDistribution'
 import { AllWorkingGroups } from '../WorkingGroups'
 import { scenario } from '../Scenario'
 
@@ -16,6 +18,11 @@ scenario(async ({ job }) => {
 
   const updateWorkerAccounts = job('Update worker accounts', updateAccountsFlow).after(leads)
 
+  if (!process.env.SKIP_STORAGE_AND_DISTRIBUTION) {
+    job('initialize storage system', initStorage(defaultStorageConfig)).requires(updateWorkerAccounts)
+    job('initialize distribution system', initDistribution(defaultDistributionConfig)).requires(updateWorkerAccounts)
+  }
+
   // Create some mock content in content directory - without assets or any real metadata
   job('Create Mock Content', mockContentFlow).after(updateWorkerAccounts)
 

+ 2 - 2
tests/network-tests/src/sender.ts

@@ -17,7 +17,7 @@ export enum LogLevel {
 
 export class Sender {
   private readonly api: ApiPromise
-  private static readonly asyncLock: AsyncLock = new AsyncLock({ maxPending: 2048 })
+  static readonly asyncLock: AsyncLock = new AsyncLock({ maxPending: 2048 })
   private readonly keyring: Keyring
   private readonly debug: Debugger.Debugger
   private logs: LogLevel = LogLevel.None
@@ -114,7 +114,7 @@ export class Sender {
     // Instead use a single lock for all calls, to force all transactions to be submitted in same order
     // of call to signAndSend. Otherwise it raises chance of race conditions.
     // It happens in rare cases and has lead some tests to fail occasionally in the past
-    await Sender.asyncLock.acquire('tx-queue', async () => {
+    await Sender.asyncLock.acquire(['tx-queue', `nonce-${account.toString()}`], async () => {
       const nonce = await this.api.rpc.system.accountNextIndex(senderKeyPair.address)
       const signedTx = tx.sign(senderKeyPair, { nonce })
       sentTx = signedTx.toHuman()

+ 19 - 0
tests/network-tests/src/utils.ts

@@ -5,6 +5,10 @@ import BN from 'bn.js'
 import fs from 'fs'
 import { decodeAddress } from '@polkadot/keyring'
 import { Seat } from '@joystream/types/council'
+import { metaToObject } from '@joystream/metadata-protobuf/utils'
+import { AnyMetadataClass, DecodedMetadataObject } from '@joystream/metadata-protobuf/types'
+import { createType } from '@joystream/types'
+import { Bytes } from '@polkadot/types'
 
 export class Utils {
   private static LENGTH_ADDRESS = 32 + 1 // publicKey + prefix
@@ -50,4 +54,19 @@ export class Utils {
   public static camelToSnakeCase(key: string): string {
     return key.replace(/[A-Z]/g, (letter) => `_${letter.toLowerCase()}`)
   }
+
+  public static metadataToBytes<T>(metaClass: AnyMetadataClass<T>, obj: T): Bytes {
+    return createType('Bytes', '0x' + Buffer.from(metaClass.encode(obj).finish()).toString('hex'))
+  }
+
+  public static metadataFromBytes<T>(metaClass: AnyMetadataClass<T>, bytes: Bytes): DecodedMetadataObject<T> {
+    // We use `toObject()` to get rid of .prototype defaults for optional fields
+    return metaToObject(metaClass, metaClass.decode(bytes.toU8a(true)))
+  }
+
+  public static assert(condition: any, msg?: string): asserts condition {
+    if (!condition) {
+      throw new Error(msg || 'Assertion failed')
+    }
+  }
 }

+ 1 - 1
types/package.json

@@ -65,6 +65,6 @@
   "homepage": "https://github.com/Joystream/joystream",
   "volta": {
     "node": "14.16.1",
-    "yarn": "1.22.4"
+    "yarn": "1.22.15"
   }
 }

+ 1 - 1
utils/migration-scripts/src/logging.ts

@@ -10,7 +10,7 @@ winston.addColors(colors)
 
 export function createLogger(label: string): Logger {
   return winston.createLogger({
-    level: 'debug',
+    level: process.env.DEBUG ? 'debug' : 'info',
     transports: [new winston.transports.Console()],
     defaultMeta: { label },
     format: winston.format.combine(

+ 2 - 0
utils/migration-scripts/src/sumer-giza/AssetsManager.ts

@@ -202,9 +202,11 @@ export class AssetsManager {
     let lastError: Error | undefined
     for (const endpoint of endpoints) {
       try {
+        this.logger.debug(`Trying to fetch asset ${contentId} from ${endpoint}...`)
         const tmpAssetPath = await this.fetchAsset(endpoint, contentId, expectedSize)
         return tmpAssetPath
       } catch (e) {
+        this.logger.debug(`Fetching ${contentId} from ${endpoint} failed: ${(e as Error).message}`)
         lastError = e as Error
         continue
       }

+ 1 - 0
utils/migration-scripts/src/sumer-giza/ContentMigration.ts

@@ -55,6 +55,7 @@ export class ContentMigration {
     const forcedChannelOwner = await this.getForcedChannelOwner()
     const assetsManager = await AssetsManager.create({
       api,
+      queryNodeApi,
       config,
     })
     const { idsMap: channelsMap, videoIds } = await new ChannelMigration({

+ 12 - 0
yarn.lock

@@ -5695,6 +5695,13 @@
   resolved "https://registry.yarnpkg.com/@types/bluebird/-/bluebird-3.5.36.tgz#00d9301d4dc35c2f6465a8aec634bb533674c652"
   integrity sha512-HBNx4lhkxN7bx6P0++W8E289foSu8kO8GCk2unhuVggO+cE7rh9DhZUyPhUxNRG9m+5B5BTKxZQ5ZP92x/mx9Q==
 
+"@types/bmp-js@^0.1.0":
+  version "0.1.0"
+  resolved "https://registry.yarnpkg.com/@types/bmp-js/-/bmp-js-0.1.0.tgz#301afe2bb3ac7ef0f18465966e4166f0491b3332"
+  integrity sha512-uMU85ROcmlY1f4mVPTlNodRXa6Z5f0AIxvv5b0pvjty3KNg7ljf5lNSspHgaF6iFDCiGpLQmJna+VwEpUC9TyA==
+  dependencies:
+    "@types/node" "*"
+
 "@types/bn.js@^4.11.5", "@types/bn.js@^4.11.6":
   version "4.11.6"
   resolved "https://registry.yarnpkg.com/@types/bn.js/-/bn.js-4.11.6.tgz#c306c70d9358aaea33cd4eda092a742b9505967c"
@@ -9527,6 +9534,11 @@ bluebird@~3.4.1:
   resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.4.7.tgz#f72d760be09b7f76d08ed8fae98b289a8d05fab3"
   integrity sha1-9y12C+Cbf3bQjtj66Ysomo0F+rM=
 
+bmp-js@^0.1.0:
+  version "0.1.0"
+  resolved "https://registry.yarnpkg.com/bmp-js/-/bmp-js-0.1.0.tgz#e05a63f796a6c1ff25f4771ec7adadc148c07233"
+  integrity sha1-4Fpj95amwf8l9Hcex62twUjAcjM=
+
 bn.js@4.12.0, bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.11.8, bn.js@^4.11.9, bn.js@^4.12.0, bn.js@^5.0.0, bn.js@^5.1.1, bn.js@^5.1.2, bn.js@^5.1.3, bn.js@^5.2.0:
   version "4.12.0"
   resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.12.0.tgz#775b3f278efbb9718eec7361f483fb36fbbfea88"

Some files were not shown because too many files changed in this diff