Browse Source

Merge branch 'giza' into storage_node_v2_file_logging

Leszek Wiesner 3 years ago
parent
commit
e46741824e
45 changed files with 6093 additions and 555 deletions
  1. 3 3
      Cargo.lock
  2. 0 0
      chain-metadata.json
  3. 1 1
      node/Cargo.toml
  4. 1 0
      package.json
  5. 2 2
      query-node/codegen/package.json
  6. 242 240
      query-node/codegen/yarn.lock
  7. 2 2
      query-node/mappings/package.json
  8. 1 1
      query-node/package.json
  9. 156 86
      runtime-modules/content/src/lib.rs
  10. 66 6
      runtime-modules/content/src/tests/channels.rs
  11. 60 2
      runtime-modules/content/src/tests/mock.rs
  12. 17 24
      runtime-modules/content/src/tests/videos.rs
  13. 336 125
      runtime-modules/storage/src/lib.rs
  14. 32 7
      runtime-modules/storage/src/random_buckets/storage_bucket_picker.rs
  15. 74 0
      runtime-modules/storage/src/tests/fixtures.rs
  16. 408 14
      runtime-modules/storage/src/tests/mod.rs
  17. 1 1
      runtime/Cargo.toml
  18. 3 3
      runtime/src/lib.rs
  19. 1 1
      utils/chain-spec-builder/Cargo.toml
  20. 1 0
      utils/migration-scripts/.eslintignore
  21. 9 0
      utils/migration-scripts/.gitignore
  22. 2 0
      utils/migration-scripts/.prettierignore
  23. 116 0
      utils/migration-scripts/README.md
  24. 3 0
      utils/migration-scripts/bin/run
  25. 3 0
      utils/migration-scripts/bin/run.cmd
  26. 93 0
      utils/migration-scripts/package.json
  27. 142 0
      utils/migration-scripts/src/RuntimeApi.ts
  28. 73 0
      utils/migration-scripts/src/commands/sumer-giza/migrateContent.ts
  29. 51 0
      utils/migration-scripts/src/commands/sumer-giza/retryFailedUploads.ts
  30. 1 0
      utils/migration-scripts/src/index.ts
  31. 308 0
      utils/migration-scripts/src/sumer-giza/AssetsManager.ts
  32. 40 0
      utils/migration-scripts/src/sumer-giza/AssetsMigration.ts
  33. 121 0
      utils/migration-scripts/src/sumer-giza/BaseMigration.ts
  34. 162 0
      utils/migration-scripts/src/sumer-giza/ChannelsMigration.ts
  35. 22 0
      utils/migration-scripts/src/sumer-giza/ContentHash.ts
  36. 69 0
      utils/migration-scripts/src/sumer-giza/ContentMigration.ts
  37. 30 0
      utils/migration-scripts/src/sumer-giza/ImageResizer.ts
  38. 192 0
      utils/migration-scripts/src/sumer-giza/VideosMigration.ts
  39. 120 0
      utils/migration-scripts/src/sumer-giza/sumer-query-node/api.ts
  40. 33 0
      utils/migration-scripts/src/sumer-giza/sumer-query-node/codegen.yml
  41. 229 0
      utils/migration-scripts/src/sumer-giza/sumer-query-node/generated/queries.ts
  42. 2565 0
      utils/migration-scripts/src/sumer-giza/sumer-query-node/generated/schema.ts
  43. 122 0
      utils/migration-scripts/src/sumer-giza/sumer-query-node/queries/queries.graphql
  44. 21 0
      utils/migration-scripts/tsconfig.json
  45. 159 37
      yarn.lock

+ 3 - 3
Cargo.lock

@@ -731,7 +731,7 @@ dependencies = [
 
 [[package]]
 name = "chain-spec-builder"
-version = "3.1.1"
+version = "3.2.0"
 dependencies = [
  "ansi_term 0.12.1",
  "enum-utils",
@@ -2332,7 +2332,7 @@ dependencies = [
 
 [[package]]
 name = "joystream-node"
-version = "5.7.0"
+version = "5.10.0"
 dependencies = [
  "frame-benchmarking",
  "frame-benchmarking-cli",
@@ -2393,7 +2393,7 @@ dependencies = [
 
 [[package]]
 name = "joystream-node-runtime"
-version = "9.9.0"
+version = "9.10.0"
 dependencies = [
  "frame-benchmarking",
  "frame-executive",

File diff suppressed because it is too large
+ 0 - 0
chain-metadata.json


+ 1 - 1
node/Cargo.toml

@@ -3,7 +3,7 @@ authors = ['Joystream contributors']
 build = 'build.rs'
 edition = '2018'
 name = 'joystream-node'
-version = '5.7.0'
+version = '5.10.0'
 default-run = "joystream-node"
 
 [[bin]]

+ 1 - 0
package.json

@@ -24,6 +24,7 @@
     "pioneer",
     "pioneer/packages/*",
     "utils/api-scripts",
+    "utils/migration-scripts",
     "query-node",
     "query-node/mappings",
     "query-node/generated/graphql-server",

+ 2 - 2
query-node/codegen/package.json

@@ -8,7 +8,7 @@
     "postinstall": "cd .. && yarn workspace query-node-mappings postHydraCLIInstall"
   },
   "dependencies": {
-    "@joystream/hydra-cli": "3.1.0-alpha.13",
-    "@joystream/hydra-typegen": "3.1.0-alpha.13"
+    "@joystream/hydra-cli": "3.1.0-alpha.16",
+    "@joystream/hydra-typegen": "3.1.0-alpha.16"
   }
 }

+ 242 - 240
query-node/codegen/yarn.lock

@@ -33,9 +33,9 @@
   dependencies:
     xss "^1.0.8"
 
-"@apollographql/graphql-playground-react@https://github.com/Joystream/graphql-playground/releases/download/query-templates%401.7.27/graphql-playground-react-v1.7.27.tgz":
-  version "1.7.27"
-  resolved "https://github.com/Joystream/graphql-playground/releases/download/query-templates%401.7.27/graphql-playground-react-v1.7.27.tgz#f29765a3a182204bf2bb166a3ed10c7273637af9"
+"@apollographql/graphql-playground-react@https://github.com/Joystream/graphql-playground/releases/download/joystream%401.7.28/graphql-playground-react-v1.7.28.tgz":
+  version "1.7.28"
+  resolved "https://github.com/Joystream/graphql-playground/releases/download/joystream%401.7.28/graphql-playground-react-v1.7.28.tgz#24c9c54e14ae0ba13c894738b4b87301f5801b26"
   dependencies:
     "@types/lru-cache" "^4.1.1"
     apollo-link "^1.2.13"
@@ -205,10 +205,10 @@
   dependencies:
     regenerator-runtime "^0.13.4"
 
-"@babel/runtime@^7.14.6", "@babel/runtime@^7.15.3":
-  version "7.15.4"
-  resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.15.4.tgz#fd17d16bfdf878e6dd02d19753a39fa8a8d9c84a"
-  integrity sha512-99catp6bHCaxr4sJ/DbTGgHS4+Rs2RVd2g7iOap6SLGPDknRK9ztKNsE/Fg6QhSeh1FGE5f6gHGQmvvn3I3xhw==
+"@babel/runtime@^7.16.3":
+  version "7.16.7"
+  resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.16.7.tgz#03ff99f64106588c9c403c6ecb8c3bafbbdff1fa"
+  integrity sha512-9E9FJowqAsytyOY6LG+1KuueckRL+aQW+mKvXRXnuFGyRAyepJPmEo9vgMfXUA6O9u3IeEdv9MAkppFcaQwogQ==
   dependencies:
     regenerator-runtime "^0.13.4"
 
@@ -306,15 +306,15 @@
   resolved "https://registry.yarnpkg.com/@josephg/resolvable/-/resolvable-1.0.1.tgz#69bc4db754d79e1a2f17a650d3466e038d94a5eb"
   integrity sha512-CtzORUwWTTOTqfVtHaKRJ0I1kNQd1bpn3sUh8I3nJDVY+5/M/Oe1DnEWzPQvqq/xPIIkzzzIP7mfCoAjFRvDhg==
 
-"@joystream/hydra-cli@3.1.0-alpha.13":
-  version "3.1.0-alpha.13"
-  resolved "https://registry.yarnpkg.com/@joystream/hydra-cli/-/hydra-cli-3.1.0-alpha.13.tgz#230485159e285f303757443e173d87fbe97f2835"
-  integrity sha512-hSUaSDRTHg8Y2atiRTl810aiscIKkdSEHUVlsfMb1fD7n9vCAX7hel2oUyfPMoW6NpnQaptkOtVinaLyAr/bkg==
+"@joystream/hydra-cli@3.1.0-alpha.16":
+  version "3.1.0-alpha.16"
+  resolved "https://registry.yarnpkg.com/@joystream/hydra-cli/-/hydra-cli-3.1.0-alpha.16.tgz#3bebe326b2ae6ad96b821797ca699c581217ac45"
+  integrity sha512-2Dq5fBqJWdqE0OhvI/kBA0i3gngnDmd0AaSFhJ03LE3mKTvzhapaOyWmEgO9vqQCSopSi0wWorizzksnu2+GQw==
   dependencies:
     "@inquirer/input" "^0.0.13-alpha.0"
     "@inquirer/password" "^0.0.12-alpha.0"
     "@inquirer/select" "^0.0.13-alpha.0"
-    "@joystream/warthog" "^2.40.0"
+    "@joystream/warthog" "~2.41.2"
     "@oclif/command" "^1.5.20"
     "@oclif/config" "^1"
     "@oclif/errors" "^1.3.3"
@@ -342,15 +342,15 @@
     pluralize "^8.0.0"
     tslib "1.11.2"
 
-"@joystream/hydra-typegen@3.1.0-alpha.13":
-  version "3.1.0-alpha.13"
-  resolved "https://registry.yarnpkg.com/@joystream/hydra-typegen/-/hydra-typegen-3.1.0-alpha.13.tgz#cb19dbe4b496a1b003b6c0a663ffa961743a07ca"
-  integrity sha512-ayIYrPc7ofQEsRIKL71Hvdm8/tqFNo4s1WwjwW7xAScTqIjimgG4y/3OjQbsgXzcLB03E4UOE0ECLwqzoYDrug==
+"@joystream/hydra-typegen@3.1.0-alpha.16":
+  version "3.1.0-alpha.16"
+  resolved "https://registry.yarnpkg.com/@joystream/hydra-typegen/-/hydra-typegen-3.1.0-alpha.16.tgz#5756b714767be8f3b237dba270386113c64b1245"
+  integrity sha512-ik1iegF7qZXeumsJ8baeff5VAxgrc6+yyRIZNFgWrCRDVEnP613XNFpUIcKzuXme7BhCVeaY5ynLaQUtU6lcUw==
   dependencies:
     "@oclif/command" "^1.8.0"
     "@oclif/config" "^1"
     "@oclif/errors" "^1.3.3"
-    "@polkadot/api" "4.16.2"
+    "@polkadot/api" "5.9.1"
     debug "^4.3.1"
     handlebars "^4.7.6"
     lodash "^4.17.20"
@@ -358,12 +358,12 @@
     yaml "^1.10.0"
     yaml-validator "^3.0.0"
 
-"@joystream/warthog@^2.40.0":
-  version "2.40.0"
-  resolved "https://registry.yarnpkg.com/@joystream/warthog/-/warthog-2.40.0.tgz#6384803b0326dd43b554aac65c68838249f1119e"
-  integrity sha512-fNlN0rzCPWvt1lrBXz24UFdwMMJBrrGPB1ObruQXJXTbZeZ+OuqIJLCCw2j+JjeT/Tl569VM4/S69jA+usCfng==
+"@joystream/warthog@~2.41.2":
+  version "2.41.2"
+  resolved "https://registry.yarnpkg.com/@joystream/warthog/-/warthog-2.41.2.tgz#6d3cf5c977320d1c77be518e848e011a9699b22d"
+  integrity sha512-1w6aT5P3xiI/HaTtqJrVj4Yp1/gxG8cGTeYgzlwr3iq8J11skwE4rLCHQucHfVueyBX49AaqWrhl+wI2ACqk4Q==
   dependencies:
-    "@apollographql/graphql-playground-react" "https://github.com/Joystream/graphql-playground/releases/download/query-templates%401.7.27/graphql-playground-react-v1.7.27.tgz"
+    "@apollographql/graphql-playground-react" "https://github.com/Joystream/graphql-playground/releases/download/joystream%401.7.28/graphql-playground-react-v1.7.28.tgz"
     "@types/app-root-path" "^1.2.4"
     "@types/bn.js" "^4.11.6"
     "@types/caller" "^1.0.0"
@@ -422,7 +422,7 @@
     typedi "^0.8.0"
     typeorm "0.2.37"
     typeorm-typedi-extensions "^0.4.1"
-    typescript "^3.9.7"
+    typescript "^4.4"
 
 "@nodelib/fs.scandir@2.1.5":
   version "2.1.5"
@@ -560,226 +560,204 @@
   resolved "https://registry.yarnpkg.com/@oclif/screen/-/screen-1.0.4.tgz#b740f68609dfae8aa71c3a6cab15d816407ba493"
   integrity sha512-60CHpq+eqnTxLZQ4PGHYNwUX572hgpMHGPtTWMjdTMsAvlm69lZV/4ly6O3sAYkomo4NggGcomrDpBe34rxUqw==
 
-"@polkadot/api-derive@4.16.2":
-  version "4.16.2"
-  resolved "https://registry.yarnpkg.com/@polkadot/api-derive/-/api-derive-4.16.2.tgz#8ed97fec7965a1be1c5d87a3639752d5cdfdbc8a"
-  integrity sha512-xRAIGoeULK+E7uep5D0eDUN6m0KcMV4eOPkmvyfp7ndxfaf94ydfEOw+QemrnT1T/chA/qq96EYvuBe3lv5w1Q==
-  dependencies:
-    "@babel/runtime" "^7.14.6"
-    "@polkadot/api" "4.16.2"
-    "@polkadot/rpc-core" "4.16.2"
-    "@polkadot/types" "4.16.2"
-    "@polkadot/util" "^6.10.1"
-    "@polkadot/util-crypto" "^6.10.1"
-    "@polkadot/x-rxjs" "^6.10.1"
-
-"@polkadot/api@4.16.2":
-  version "4.16.2"
-  resolved "https://registry.yarnpkg.com/@polkadot/api/-/api-4.16.2.tgz#361fbeb690d8b646387e9f8bec22929aca09d691"
-  integrity sha512-x+fWc7mE3ZuGxoFCTf/Tnv0z7rDTM198M9LnWUJdadyNT3QAtE+Cjgo1bCrroTnuD3whd0jhFLfLQCwz95RrwA==
-  dependencies:
-    "@babel/runtime" "^7.14.6"
-    "@polkadot/api-derive" "4.16.2"
-    "@polkadot/keyring" "^6.10.1"
-    "@polkadot/metadata" "4.16.2"
-    "@polkadot/rpc-core" "4.16.2"
-    "@polkadot/rpc-provider" "4.16.2"
-    "@polkadot/types" "4.16.2"
-    "@polkadot/types-known" "4.16.2"
-    "@polkadot/util" "^6.10.1"
-    "@polkadot/util-crypto" "^6.10.1"
-    "@polkadot/x-rxjs" "^6.10.1"
+"@polkadot/api-derive@5.9.1":
+  version "5.9.1"
+  resolved "https://registry.yarnpkg.com/@polkadot/api-derive/-/api-derive-5.9.1.tgz#5937069920ded1439e6672b9d6be1072421b256b"
+  integrity sha512-iMrVKnYIS3UQciDlFqww6AFyXgG+iN8UqWu8QbTuZecri3qrSmM3Nn8Jkvju3meZIacwWIMSmBcnj8+zef3rkQ==
+  dependencies:
+    "@babel/runtime" "^7.15.4"
+    "@polkadot/api" "5.9.1"
+    "@polkadot/rpc-core" "5.9.1"
+    "@polkadot/types" "5.9.1"
+    "@polkadot/util" "^7.3.1"
+    "@polkadot/util-crypto" "^7.3.1"
+    rxjs "^7.3.0"
+
+"@polkadot/api@5.9.1":
+  version "5.9.1"
+  resolved "https://registry.yarnpkg.com/@polkadot/api/-/api-5.9.1.tgz#ce314cc34f0a47098d039db7b9036bb491c2898c"
+  integrity sha512-POpIXn/Ao+NLB0uMldXdXU44dVbRr6+6Ax77Z0R285M8Z2EiF5jl2K3SPvlowLo4SntxiCSaHQxCekYhUcJKlw==
+  dependencies:
+    "@babel/runtime" "^7.15.4"
+    "@polkadot/api-derive" "5.9.1"
+    "@polkadot/keyring" "^7.3.1"
+    "@polkadot/rpc-core" "5.9.1"
+    "@polkadot/rpc-provider" "5.9.1"
+    "@polkadot/types" "5.9.1"
+    "@polkadot/types-known" "5.9.1"
+    "@polkadot/util" "^7.3.1"
+    "@polkadot/util-crypto" "^7.3.1"
     eventemitter3 "^4.0.7"
+    rxjs "^7.3.0"
+
+"@polkadot/keyring@^7.3.1":
+  version "7.9.2"
+  resolved "https://registry.yarnpkg.com/@polkadot/keyring/-/keyring-7.9.2.tgz#1f5bf6b7bdb5942d275aebf72d4ed98abe874fa8"
+  integrity sha512-6UGoIxhiTyISkYEZhUbCPpgVxaneIfb/DBVlHtbvaABc8Mqh1KuqcTIq19Mh9wXlBuijl25rw4lUASrE/9sBqg==
+  dependencies:
+    "@babel/runtime" "^7.16.3"
+    "@polkadot/util" "7.9.2"
+    "@polkadot/util-crypto" "7.9.2"
+
+"@polkadot/networks@7.9.2", "@polkadot/networks@^7.3.1":
+  version "7.9.2"
+  resolved "https://registry.yarnpkg.com/@polkadot/networks/-/networks-7.9.2.tgz#03e3f3ac6bdea177517436537826055df60bcb9a"
+  integrity sha512-4obI1RdW5/7TFwbwKA9oqw8aggVZ65JAUvIFMd2YmMC2T4+NiZLnok0WhRkhZkUnqjLIHXYNwq7Ho1i39dte0g==
+  dependencies:
+    "@babel/runtime" "^7.16.3"
+
+"@polkadot/rpc-core@5.9.1":
+  version "5.9.1"
+  resolved "https://registry.yarnpkg.com/@polkadot/rpc-core/-/rpc-core-5.9.1.tgz#68e2a2ea18c15aa15743e7487a407fdd65d1d900"
+  integrity sha512-5fXiICAcjp7ow81DnIl2Dq/xuCtJUqyjJkxe9jNHJWBluBxOouqYDb8bYPPGSdckiaVyYe0l8lA9fBUFMdEt6w==
+  dependencies:
+    "@babel/runtime" "^7.15.4"
+    "@polkadot/rpc-provider" "5.9.1"
+    "@polkadot/types" "5.9.1"
+    "@polkadot/util" "^7.3.1"
+    rxjs "^7.3.0"
 
-"@polkadot/keyring@^6.10.1":
-  version "6.11.1"
-  resolved "https://registry.yarnpkg.com/@polkadot/keyring/-/keyring-6.11.1.tgz#2510c349c965c74cc2f108f114f1048856940604"
-  integrity sha512-rW8INl7pO6Dmaffd6Df1yAYCRWa2RmWQ0LGfJeA/M6seVIkI6J3opZqAd4q2Op+h9a7z4TESQGk8yggOEL+Csg==
-  dependencies:
-    "@babel/runtime" "^7.14.6"
-    "@polkadot/util" "6.11.1"
-    "@polkadot/util-crypto" "6.11.1"
-
-"@polkadot/metadata@4.16.2":
-  version "4.16.2"
-  resolved "https://registry.yarnpkg.com/@polkadot/metadata/-/metadata-4.16.2.tgz#2a90c9e6ac500ee1b176a5e0e08b64c8d7bf5458"
-  integrity sha512-wx5DwAxV8zEDQzgdeDFRRlDb89CqmgY/eKusvMgzRuLG5Z4Hu4jxQ6LnBsjVmA70BBhgs+uAuJ7mzY76OO4wDw==
-  dependencies:
-    "@babel/runtime" "^7.14.6"
-    "@polkadot/types" "4.16.2"
-    "@polkadot/types-known" "4.16.2"
-    "@polkadot/util" "^6.10.1"
-    "@polkadot/util-crypto" "^6.10.1"
-
-"@polkadot/networks@6.11.1", "@polkadot/networks@^6.10.1":
-  version "6.11.1"
-  resolved "https://registry.yarnpkg.com/@polkadot/networks/-/networks-6.11.1.tgz#8fd189593f6ee4f8bf64378d0aaae09e39a37d35"
-  integrity sha512-0C6Ha2kvr42se3Gevx6UhHzv3KnPHML0N73Amjwvdr4y0HLZ1Nfw+vcm5yqpz5gpiehqz97XqFrsPRauYdcksQ==
-  dependencies:
-    "@babel/runtime" "^7.14.6"
-
-"@polkadot/rpc-core@4.16.2":
-  version "4.16.2"
-  resolved "https://registry.yarnpkg.com/@polkadot/rpc-core/-/rpc-core-4.16.2.tgz#a839407a1c00048a10ed711ad3dd1b52f8fd20cc"
-  integrity sha512-NAMkN5rtccLL7G0aeMqxx/R38exkJ/xVNEZh9Y/okw8w0iOCnZk72ge9ABkd/SJbLxm6l+5c87cTXUK77r1zTQ==
-  dependencies:
-    "@babel/runtime" "^7.14.6"
-    "@polkadot/metadata" "4.16.2"
-    "@polkadot/rpc-provider" "4.16.2"
-    "@polkadot/types" "4.16.2"
-    "@polkadot/util" "^6.10.1"
-    "@polkadot/x-rxjs" "^6.10.1"
-
-"@polkadot/rpc-provider@4.16.2":
-  version "4.16.2"
-  resolved "https://registry.yarnpkg.com/@polkadot/rpc-provider/-/rpc-provider-4.16.2.tgz#73a0b6818ec57d10b735b1e471eb7d88dd8a39db"
-  integrity sha512-aAq3mHkgHziQrZQdNuxGSrkKKksA8Kk0N8WWsW1DZOkjt7rlF3vdmCguHTPlOzO4NHmeDsGVlGGBzjOza8QNbA==
-  dependencies:
-    "@babel/runtime" "^7.14.6"
-    "@polkadot/types" "4.16.2"
-    "@polkadot/util" "^6.10.1"
-    "@polkadot/util-crypto" "^6.10.1"
-    "@polkadot/x-fetch" "^6.10.1"
-    "@polkadot/x-global" "^6.10.1"
-    "@polkadot/x-ws" "^6.10.1"
+"@polkadot/rpc-provider@5.9.1":
+  version "5.9.1"
+  resolved "https://registry.yarnpkg.com/@polkadot/rpc-provider/-/rpc-provider-5.9.1.tgz#8e67769c05ba71ecf4f5bc0c5a60eb9afc699167"
+  integrity sha512-9zamxfnsY7iCswXIK22W0Ji1XHLprm97js3WLw3lP2hr/uSim4Cv4y07zY/z4dDQyF0gJtjKwR27Wo9CZqdr6A==
+  dependencies:
+    "@babel/runtime" "^7.15.4"
+    "@polkadot/types" "5.9.1"
+    "@polkadot/util" "^7.3.1"
+    "@polkadot/util-crypto" "^7.3.1"
+    "@polkadot/x-fetch" "^7.3.1"
+    "@polkadot/x-global" "^7.3.1"
+    "@polkadot/x-ws" "^7.3.1"
     eventemitter3 "^4.0.7"
 
-"@polkadot/types-known@4.16.2":
-  version "4.16.2"
-  resolved "https://registry.yarnpkg.com/@polkadot/types-known/-/types-known-4.16.2.tgz#94e54adb3ba767342f9aed226eb4aa973520b911"
-  integrity sha512-ydeS1SnO25O//TThzUBYjthCOH3h70j1IRVQ+CPVhVbZJoMRr47hIysFTBjyxyKVTQtj20vniZV8+qq6oiWggA==
-  dependencies:
-    "@babel/runtime" "^7.14.6"
-    "@polkadot/networks" "^6.10.1"
-    "@polkadot/types" "4.16.2"
-    "@polkadot/util" "^6.10.1"
-
-"@polkadot/types@4.16.2":
-  version "4.16.2"
-  resolved "https://registry.yarnpkg.com/@polkadot/types/-/types-4.16.2.tgz#06dfedf19a50d659863c068ba1444efbc214c302"
-  integrity sha512-JSIvVKIBhRHCswDPYMoy4TLvR9O1NT5mqyIBoLjNKur0WShLk1jVtiyKbU+2/AuCbM1nehiWagmAlWmMFNaDMw==
-  dependencies:
-    "@babel/runtime" "^7.14.6"
-    "@polkadot/metadata" "4.16.2"
-    "@polkadot/util" "^6.10.1"
-    "@polkadot/util-crypto" "^6.10.1"
-    "@polkadot/x-rxjs" "^6.10.1"
-
-"@polkadot/util-crypto@6.11.1", "@polkadot/util-crypto@^6.10.1":
-  version "6.11.1"
-  resolved "https://registry.yarnpkg.com/@polkadot/util-crypto/-/util-crypto-6.11.1.tgz#7a36acf5c8bf52541609ec0b0b2a69af295d652e"
-  integrity sha512-fWA1Nz17FxWJslweZS4l0Uo30WXb5mYV1KEACVzM+BSZAvG5eoiOAYX6VYZjyw6/7u53XKrWQlD83iPsg3KvZw==
-  dependencies:
-    "@babel/runtime" "^7.14.6"
-    "@polkadot/networks" "6.11.1"
-    "@polkadot/util" "6.11.1"
-    "@polkadot/wasm-crypto" "^4.0.2"
-    "@polkadot/x-randomvalues" "6.11.1"
-    base-x "^3.0.8"
-    base64-js "^1.5.1"
+"@polkadot/types-known@5.9.1":
+  version "5.9.1"
+  resolved "https://registry.yarnpkg.com/@polkadot/types-known/-/types-known-5.9.1.tgz#e52fc7b803bc7cb3f41028f88963deb4ccee40af"
+  integrity sha512-7lpLuIVGaKziQRzPMnTxyjlYy3spL6WqUg3CcEzmJUKQeUonHglOliQh8JSSz1bcP+YuNHGXK1cKsTjHb+GYxA==
+  dependencies:
+    "@babel/runtime" "^7.15.4"
+    "@polkadot/networks" "^7.3.1"
+    "@polkadot/types" "5.9.1"
+    "@polkadot/util" "^7.3.1"
+
+"@polkadot/types@5.9.1":
+  version "5.9.1"
+  resolved "https://registry.yarnpkg.com/@polkadot/types/-/types-5.9.1.tgz#74cf4695795f2aa365ff85d3873e22c430100bc9"
+  integrity sha512-30vcSlNBxPyWYZaxKDr/BoMhfLCRKB265XxpnnNJmbdZZsL+N4Zp2mJR9/UbA6ypmJBkUjD7b1s9AYsLwUs+8w==
+  dependencies:
+    "@babel/runtime" "^7.15.4"
+    "@polkadot/util" "^7.3.1"
+    "@polkadot/util-crypto" "^7.3.1"
+    rxjs "^7.3.0"
+
+"@polkadot/util-crypto@7.9.2", "@polkadot/util-crypto@^7.3.1":
+  version "7.9.2"
+  resolved "https://registry.yarnpkg.com/@polkadot/util-crypto/-/util-crypto-7.9.2.tgz#cdc336f92a6bc3d40c5a23734e1974fb777817f0"
+  integrity sha512-nNwqUwP44eCH9jKKcPie+IHLKkg9LMe6H7hXo91hy3AtoslnNrT51tP3uAm5yllhLvswJfnAgnlHq7ybCgqeFw==
+  dependencies:
+    "@babel/runtime" "^7.16.3"
+    "@polkadot/networks" "7.9.2"
+    "@polkadot/util" "7.9.2"
+    "@polkadot/wasm-crypto" "^4.4.1"
+    "@polkadot/x-randomvalues" "7.9.2"
     blakejs "^1.1.1"
-    bn.js "^4.11.9"
+    bn.js "^4.12.0"
     create-hash "^1.2.0"
+    ed2curve "^0.3.0"
     elliptic "^6.5.4"
     hash.js "^1.1.7"
     js-sha3 "^0.8.0"
+    micro-base "^0.9.0"
     scryptsy "^2.1.0"
     tweetnacl "^1.0.3"
     xxhashjs "^0.2.2"
 
-"@polkadot/util@6.11.1", "@polkadot/util@^6.10.1":
-  version "6.11.1"
-  resolved "https://registry.yarnpkg.com/@polkadot/util/-/util-6.11.1.tgz#8950b038ba3e6ebfc0a7ff47feeb972e81b2626c"
-  integrity sha512-TEdCetr9rsdUfJZqQgX/vxLuV4XU8KMoKBMJdx+JuQ5EWemIdQkEtMBdL8k8udNGbgSNiYFA6rPppATeIxAScg==
+"@polkadot/util@7.9.2", "@polkadot/util@^7.3.1":
+  version "7.9.2"
+  resolved "https://registry.yarnpkg.com/@polkadot/util/-/util-7.9.2.tgz#567ac659516d6b685ed7e796919901d92e5cbe6b"
+  integrity sha512-6ABY6ErgkCsM4C6+X+AJSY4pBGwbKlHZmUtHftaiTvbaj4XuA4nTo3GU28jw8wY0Jh2cJZJvt6/BJ5GVkm5tBA==
   dependencies:
-    "@babel/runtime" "^7.14.6"
-    "@polkadot/x-textdecoder" "6.11.1"
-    "@polkadot/x-textencoder" "6.11.1"
+    "@babel/runtime" "^7.16.3"
+    "@polkadot/x-textdecoder" "7.9.2"
+    "@polkadot/x-textencoder" "7.9.2"
     "@types/bn.js" "^4.11.6"
-    bn.js "^4.11.9"
-    camelcase "^5.3.1"
+    bn.js "^4.12.0"
+    camelcase "^6.2.1"
     ip-regex "^4.3.0"
 
-"@polkadot/wasm-crypto-asmjs@^4.2.1":
-  version "4.2.1"
-  resolved "https://registry.yarnpkg.com/@polkadot/wasm-crypto-asmjs/-/wasm-crypto-asmjs-4.2.1.tgz#6b7eae1c011709f8042dfd30872a5fc5e9e021c0"
-  integrity sha512-ON9EBpTNDCI3QRUmuQJIegYoAcwvxDaNNA7uwKTaEEStu8LjCIbQxbt4WbOBYWI0PoUpl4iIluXdT3XZ3V3jXA==
-  dependencies:
-    "@babel/runtime" "^7.15.3"
-
-"@polkadot/wasm-crypto-wasm@^4.2.1":
-  version "4.2.1"
-  resolved "https://registry.yarnpkg.com/@polkadot/wasm-crypto-wasm/-/wasm-crypto-wasm-4.2.1.tgz#2a86f9b405e7195c3f523798c6ce4afffd19737e"
-  integrity sha512-Rs2CKiR4D+2hKzmKBfPNYxcd2E8NfLWia0av4fgicjT9YsWIWOGQUi9AtSOfazPOR9FrjxKJy+chQxAkcfKMnQ==
-  dependencies:
-    "@babel/runtime" "^7.15.3"
-
-"@polkadot/wasm-crypto@^4.0.2":
-  version "4.2.1"
-  resolved "https://registry.yarnpkg.com/@polkadot/wasm-crypto/-/wasm-crypto-4.2.1.tgz#4d09402f5ac71a90962fb58cbe4b1707772a4fb6"
-  integrity sha512-C/A/QnemOilRTLnM0LfhPY2N/x3ZFd1ihm9sXYyuh98CxtekSVYI9h4IJ5Jrgz5imSUHgvt9oJLqJ5GbWQV/Zg==
-  dependencies:
-    "@babel/runtime" "^7.15.3"
-    "@polkadot/wasm-crypto-asmjs" "^4.2.1"
-    "@polkadot/wasm-crypto-wasm" "^4.2.1"
-
-"@polkadot/x-fetch@^6.10.1":
-  version "6.11.1"
-  resolved "https://registry.yarnpkg.com/@polkadot/x-fetch/-/x-fetch-6.11.1.tgz#97d44d78ef0285eec6f6dbc4006302308ec8e24c"
-  integrity sha512-qJyLLnm+4SQEZ002UDz2wWnXbnnH84rIS0mLKZ5k82H4lMYY+PQflvzv6sbu463e/lgiEao+6zvWS6DSKv1Yog==
-  dependencies:
-    "@babel/runtime" "^7.14.6"
-    "@polkadot/x-global" "6.11.1"
-    "@types/node-fetch" "^2.5.10"
-    node-fetch "^2.6.1"
-
-"@polkadot/x-global@6.11.1", "@polkadot/x-global@^6.10.1":
-  version "6.11.1"
-  resolved "https://registry.yarnpkg.com/@polkadot/x-global/-/x-global-6.11.1.tgz#c292b3825fea60e9b33fff1790323fc57de1ca5d"
-  integrity sha512-lsBK/e4KbjfieyRmnPs7bTiGbP/6EoCZz7rqD/voNS5qsJAaXgB9LR+ilubun9gK/TDpebyxgO+J19OBiQPIRw==
-  dependencies:
-    "@babel/runtime" "^7.14.6"
-
-"@polkadot/x-randomvalues@6.11.1":
-  version "6.11.1"
-  resolved "https://registry.yarnpkg.com/@polkadot/x-randomvalues/-/x-randomvalues-6.11.1.tgz#f006fa250c8e82c92ccb769976a45a8e7f3df28b"
-  integrity sha512-2MfUfGZSOkuPt7GF5OJkPDbl4yORI64SUuKM25EGrJ22o1UyoBnPOClm9eYujLMD6BfDZRM/7bQqqoLW+NuHVw==
-  dependencies:
-    "@babel/runtime" "^7.14.6"
-    "@polkadot/x-global" "6.11.1"
-
-"@polkadot/x-rxjs@^6.10.1":
-  version "6.11.1"
-  resolved "https://registry.yarnpkg.com/@polkadot/x-rxjs/-/x-rxjs-6.11.1.tgz#5454708b61da70eea05708611d9148fce9372498"
-  integrity sha512-zIciEmij7SUuXXg9g/683Irx6GogxivrQS2pgBir2DI/YZq+um52+Dqg1mqsEZt74N4KMTMnzAZAP6LJOBOMww==
-  dependencies:
-    "@babel/runtime" "^7.14.6"
-    rxjs "^6.6.7"
-
-"@polkadot/x-textdecoder@6.11.1":
-  version "6.11.1"
-  resolved "https://registry.yarnpkg.com/@polkadot/x-textdecoder/-/x-textdecoder-6.11.1.tgz#6cc314645681cc4639085c03b65328671c7f182c"
-  integrity sha512-DI1Ym2lyDSS/UhnTT2e9WutukevFZ0WGpzj4eotuG2BTHN3e21uYtYTt24SlyRNMrWJf5+TkZItmZeqs1nwAfQ==
+"@polkadot/wasm-crypto-asmjs@^4.5.1":
+  version "4.5.1"
+  resolved "https://registry.yarnpkg.com/@polkadot/wasm-crypto-asmjs/-/wasm-crypto-asmjs-4.5.1.tgz#e1025a49e106db11d1187caf65f56c960ea2ad2b"
+  integrity sha512-DOdRiWhxVvmqTvp+E9z1j+Yr0zDOGsDvqnT/eNw0Dl1FVUOImsEa7FKns/urASmcxCVEE1jtUWSnij29jrORMQ==
   dependencies:
-    "@babel/runtime" "^7.14.6"
-    "@polkadot/x-global" "6.11.1"
+    "@babel/runtime" "^7.16.3"
 
-"@polkadot/x-textencoder@6.11.1":
-  version "6.11.1"
-  resolved "https://registry.yarnpkg.com/@polkadot/x-textencoder/-/x-textencoder-6.11.1.tgz#73e89da5b91954ae380042c19314c90472f59d9e"
-  integrity sha512-8ipjWdEuqFo+R4Nxsc3/WW9CSEiprX4XU91a37ZyRVC4e9R1bmvClrpXmRQLVcAQyhRvG8DKOOtWbz8xM+oXKg==
+"@polkadot/wasm-crypto-wasm@^4.5.1":
+  version "4.5.1"
+  resolved "https://registry.yarnpkg.com/@polkadot/wasm-crypto-wasm/-/wasm-crypto-wasm-4.5.1.tgz#063a58ff7ddd939b7886a6a238109a8d2c416e46"
+  integrity sha512-hPwke85HxpgG/RAlwdCE8u5w7bThvWg399mlB+XjogXMxOUWBZSgq2XYbgzROUXx27inK9nStF4Pnc4zJnqs9A==
   dependencies:
-    "@babel/runtime" "^7.14.6"
-    "@polkadot/x-global" "6.11.1"
+    "@babel/runtime" "^7.16.3"
 
-"@polkadot/x-ws@^6.10.1":
-  version "6.11.1"
-  resolved "https://registry.yarnpkg.com/@polkadot/x-ws/-/x-ws-6.11.1.tgz#338adc7309e3a8e660fce8eb42f975426da48d10"
-  integrity sha512-GNu4ywrMlVi0QF6QSpKwYWMK6JRK+kadgN/zEhMoH1z5h8LwpqDLv128j5WspWbQti2teCQtridjf7t2Lzoe8Q==
-  dependencies:
-    "@babel/runtime" "^7.14.6"
-    "@polkadot/x-global" "6.11.1"
-    "@types/websocket" "^1.0.3"
+"@polkadot/wasm-crypto@^4.4.1":
+  version "4.5.1"
+  resolved "https://registry.yarnpkg.com/@polkadot/wasm-crypto/-/wasm-crypto-4.5.1.tgz#e1ac6d846a0ad8e991cec128994524183ef6e8fd"
+  integrity sha512-Cr21ais3Kq3aedIHZ3J1tjgeD/+K8FCiwEawr0oRywNBSJR8wyuZMePs4swR/6xm8wbBkpqoBVHz/UQHqqQJmA==
+  dependencies:
+    "@babel/runtime" "^7.16.3"
+    "@polkadot/wasm-crypto-asmjs" "^4.5.1"
+    "@polkadot/wasm-crypto-wasm" "^4.5.1"
+
+"@polkadot/x-fetch@^7.3.1":
+  version "7.9.2"
+  resolved "https://registry.yarnpkg.com/@polkadot/x-fetch/-/x-fetch-7.9.2.tgz#fe943be5854f7355630388b1b5d2bb52f1a3afb2"
+  integrity sha512-zutLkFJVaLVpY3cIGYJD0AReLfAnPr2J82Ca4pvy/BxqwwGYuGLcn36A4m6nliGBP2lcH4oYY+mcCqIwoPWQUQ==
+  dependencies:
+    "@babel/runtime" "^7.16.3"
+    "@polkadot/x-global" "7.9.2"
+    "@types/node-fetch" "^2.5.12"
+    node-fetch "^2.6.6"
+
+"@polkadot/x-global@7.9.2", "@polkadot/x-global@^7.3.1":
+  version "7.9.2"
+  resolved "https://registry.yarnpkg.com/@polkadot/x-global/-/x-global-7.9.2.tgz#b272b0a3bedaad3bcbf075ec4682abe68cf2a850"
+  integrity sha512-JX5CrGWckHf1P9xKXq4vQCAuMUbL81l2hOWX7xeP8nv4caHEpmf5T1wD1iMdQBL5PFifo6Pg0V6/oZBB+bts7A==
+  dependencies:
+    "@babel/runtime" "^7.16.3"
+
+"@polkadot/x-randomvalues@7.9.2":
+  version "7.9.2"
+  resolved "https://registry.yarnpkg.com/@polkadot/x-randomvalues/-/x-randomvalues-7.9.2.tgz#0c9bb7b48a0791c2a32e9605a31a5ce56fee621d"
+  integrity sha512-svQfG31yCXf6yVyIgP0NgCzEy7oc3Lw054ZspkaqjOivxYdrXaf5w3JSSUyM/MRjI2+nk+B/EyJoMYcfSwTfsQ==
+  dependencies:
+    "@babel/runtime" "^7.16.3"
+    "@polkadot/x-global" "7.9.2"
+
+"@polkadot/x-textdecoder@7.9.2":
+  version "7.9.2"
+  resolved "https://registry.yarnpkg.com/@polkadot/x-textdecoder/-/x-textdecoder-7.9.2.tgz#a78548e33efeb3a25f761fec9787b2bcae7f0608"
+  integrity sha512-wfwbSHXPhrOAl12QvlIOGNkMH/N/h8PId2ytIjvM/8zPPFB5Il6DWSFLtVapOGEpIFjEWbd5t8Td4pHBVXIEbg==
+  dependencies:
+    "@babel/runtime" "^7.16.3"
+    "@polkadot/x-global" "7.9.2"
+
+"@polkadot/x-textencoder@7.9.2":
+  version "7.9.2"
+  resolved "https://registry.yarnpkg.com/@polkadot/x-textencoder/-/x-textencoder-7.9.2.tgz#b32bfd6fbff8587c56452f58252a52d62bbcd5b9"
+  integrity sha512-A19wwYINuZwU2dUyQ/mMzB0ISjyfc4cISfL4zCMUAVgj7xVoXMYV2GfjNdMpA8Wsjch3su6pxLbtJ2wU03sRTQ==
+  dependencies:
+    "@babel/runtime" "^7.16.3"
+    "@polkadot/x-global" "7.9.2"
+
+"@polkadot/x-ws@^7.3.1":
+  version "7.9.2"
+  resolved "https://registry.yarnpkg.com/@polkadot/x-ws/-/x-ws-7.9.2.tgz#016df26fa829b74f8b1e31a1dcd6e34256c1231f"
+  integrity sha512-+yppMsZtvDztVOSmkqAQuhR6TfV1Axa6ergAsWb52DrfXvFP5geqtARsI6ZdDgMsE3qHSVQTcJz8vgNOr5+ztQ==
+  dependencies:
+    "@babel/runtime" "^7.16.3"
+    "@polkadot/x-global" "7.9.2"
+    "@types/websocket" "^1.0.4"
     websocket "^1.0.34"
 
 "@protobufjs/aspromise@^1.1.1", "@protobufjs/aspromise@^1.1.2":
@@ -1164,7 +1142,7 @@
   resolved "https://registry.yarnpkg.com/@types/node-emoji/-/node-emoji-1.8.1.tgz#689cb74fdf6e84309bcafce93a135dfecd01de3f"
   integrity sha512-0fRfA90FWm6KJfw6P9QGyo0HDTCmthZ7cWaBQndITlaWLTZ6njRyKwrwpzpg+n6kBXBIGKeUHEQuBx7bphGJkA==
 
-"@types/node-fetch@^2.5.10":
+"@types/node-fetch@^2.5.12":
   version "2.5.12"
   resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.5.12.tgz#8a6f779b1d4e60b7a57fb6fd48d84fb545b9cc66"
   integrity sha512-MKgC4dlq4kKNa/mYrwpKfzQMB5X3ee5U6fSprkKpToBqBmX4nFZL9cW5jl6sWn+xpRJ7ypWh2yyqqr8UUCstSw==
@@ -1280,7 +1258,7 @@
   resolved "https://registry.yarnpkg.com/@types/validator/-/validator-13.6.3.tgz#31ca2e997bf13a0fffca30a25747d5b9f7dbb7de"
   integrity sha512-fWG42pMJOL4jKsDDZZREnXLjc3UE0R8LOJfARWYg6U966rxDT7TYejYzLnUF5cvSObGg34nd0+H2wHHU5Omdfw==
 
-"@types/websocket@^1.0.3":
+"@types/websocket@^1.0.4":
   version "1.0.4"
   resolved "https://registry.yarnpkg.com/@types/websocket/-/websocket-1.0.4.tgz#1dc497280d8049a5450854dd698ee7e6ea9e60b8"
   integrity sha512-qn1LkcFEKK8RPp459jkjzsfpbsx36BBt3oC3pITYtkoBw/aVX+EZFa5j3ThCRTNpLFvIMr5dSTD4RaMdilIOpA==
@@ -1706,14 +1684,7 @@ balanced-match@^1.0.0:
   resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee"
   integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==
 
-base-x@^3.0.8:
-  version "3.0.8"
-  resolved "https://registry.yarnpkg.com/base-x/-/base-x-3.0.8.tgz#1e1106c2537f0162e8b52474a557ebb09000018d"
-  integrity sha512-Rl/1AWP4J/zRrk54hhlxH4drNxPJXYUaKffODVI53/dAsV4t9fBxyxYKAVPU1XBHxYwOWP9h9H0hM2MVw4YfJA==
-  dependencies:
-    safe-buffer "^5.0.1"
-
-base64-js@^1.3.1, base64-js@^1.5.1:
+base64-js@^1.3.1:
   version "1.5.1"
   resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a"
   integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==
@@ -1738,7 +1709,7 @@ bluebird@^3.3.5, bluebird@^3.5.5:
   resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.2.tgz#9f229c15be272454ffa973ace0dbee79a1b0c36f"
   integrity sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==
 
-bn.js@^4.11.9:
+bn.js@^4.11.9, bn.js@^4.12.0:
   version "4.12.0"
   resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.12.0.tgz#775b3f278efbb9718eec7361f483fb36fbbfea88"
   integrity sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==
@@ -1861,11 +1832,16 @@ camelcase@^3.0.0:
   resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-3.0.0.tgz#32fc4b9fcdaf845fcdf7e73bb97cac2261f0ab0a"
   integrity sha1-MvxLn82vhF/N9+c7uXysImHwqwo=
 
-camelcase@^5.0.0, camelcase@^5.3.1:
+camelcase@^5.0.0:
   version "5.3.1"
   resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320"
   integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==
 
+camelcase@^6.2.1:
+  version "6.3.0"
+  resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-6.3.0.tgz#5685b95eb209ac9c0c177467778c9c84df58ba9a"
+  integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==
+
 camelize@^1.0.0:
   version "1.0.0"
   resolved "https://registry.yarnpkg.com/camelize/-/camelize-1.0.0.tgz#164a5483e630fa4321e5af07020e531831b2609b"
@@ -2491,6 +2467,13 @@ duplexer@^0.1.1:
   resolved "https://registry.yarnpkg.com/duplexer/-/duplexer-0.1.2.tgz#3abe43aef3835f8ae077d136ddce0f276b0400e6"
   integrity sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg==
 
+ed2curve@^0.3.0:
+  version "0.3.0"
+  resolved "https://registry.yarnpkg.com/ed2curve/-/ed2curve-0.3.0.tgz#322b575152a45305429d546b071823a93129a05d"
+  integrity sha512-8w2fmmq3hv9rCrcI7g9hms2pMunQr1JINfcjwR9tAyZqhtyaMN991lF/ZfHfr5tzZQ8c7y7aBgZbjfbd0fjFwQ==
+  dependencies:
+    tweetnacl "1.x.x"
+
 ee-first@1.1.1:
   version "1.1.1"
   resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d"
@@ -4213,6 +4196,11 @@ methods@~1.1.2:
   resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee"
   integrity sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=
 
+micro-base@^0.9.0:
+  version "0.9.0"
+  resolved "https://registry.yarnpkg.com/micro-base/-/micro-base-0.9.0.tgz#09cfe20285bec0ea97f41dc3d10e3fba3d0266ee"
+  integrity sha512-4+tOMKidYT5nQ6/UNmYrGVO5PMcnJdfuR4NC8HK8s2H61B4itOhA9yrsjBdqGV7ecdtej36x3YSIfPLRmPrspg==
+
 micromatch@^4.0.4:
   version "4.0.4"
   resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.4.tgz#896d519dfe9db25fce94ceb7a500919bf881ebf9"
@@ -4373,6 +4361,13 @@ node-fetch@^2.6.1:
   dependencies:
     whatwg-url "^5.0.0"
 
+node-fetch@^2.6.6:
+  version "2.6.6"
+  resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.6.tgz#1751a7c01834e8e1697758732e9efb6eeadfaf89"
+  integrity sha512-Z8/6vRlTUChSdIgMa51jxQ4lrw/Jy5SOW10ObaA47/RElsAN2c5Pn8bTgFGWn/ibwzXTE8qwr1Yzx28vsecXEA==
+  dependencies:
+    whatwg-url "^5.0.0"
+
 node-fingerprint@0.0.2:
   version "0.0.2"
   resolved "https://registry.yarnpkg.com/node-fingerprint/-/node-fingerprint-0.0.2.tgz#31cbabeb71a67ae7dd5a7dc042e51c3c75868501"
@@ -5352,13 +5347,20 @@ run-parallel@^1.1.9:
   dependencies:
     queue-microtask "^1.2.2"
 
-rxjs@^6.3.3, rxjs@^6.5.1, rxjs@^6.6.7:
+rxjs@^6.3.3, rxjs@^6.5.1:
   version "6.6.7"
   resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-6.6.7.tgz#90ac018acabf491bf65044235d5863c4dab804c9"
   integrity sha512-hTdwr+7yYNIT5n4AMYp85KA6yw2Va0FLa3Rguvbpa4W3I5xynaBZo41cM3XM+4Q6fRMj3sBYIR1VAmZMXYJvRQ==
   dependencies:
     tslib "^1.9.0"
 
+rxjs@^7.3.0:
+  version "7.5.1"
+  resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-7.5.1.tgz#af73df343cbcab37628197f43ea0c8256f54b157"
+  integrity sha512-KExVEeZWxMZnZhUZtsJcFwz8IvPvgu4G2Z2QyqjZQzUGr32KDYuSxrEYO4w3tFFNbfLozcrKUTvTPi+E9ywJkQ==
+  dependencies:
+    tslib "^2.1.0"
+
 safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1:
   version "5.1.2"
   resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d"
@@ -5927,7 +5929,7 @@ tunnel-agent@^0.6.0:
   dependencies:
     safe-buffer "^5.0.1"
 
-tweetnacl@^1.0.3:
+tweetnacl@1.x.x, tweetnacl@^1.0.3:
   version "1.0.3"
   resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-1.0.3.tgz#ac0af71680458d8a6378d0d0d050ab1407d35596"
   integrity sha512-6rt+RN7aOi1nGMyC4Xa5DdYiukl2UWCbcJft7YhxReBGQD7OAM8Pbxw6YMo4r2diNEA8FEmu32YOn9rhaiE5yw==
@@ -6041,10 +6043,10 @@ typescript-tuple@^2.2.1:
   dependencies:
     typescript-compare "^0.0.2"
 
-typescript@^3.9.7:
-  version "3.9.10"
-  resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.9.10.tgz#70f3910ac7a51ed6bef79da7800690b19bf778b8"
-  integrity sha512-w6fIxVE/H1PkLKcCPsFqKE7Kv7QUwhU8qQY2MueZXWx5cPZdwFupLgKK3vntcK98BtNHZtAF4LA/yl2a7k8R6Q==
+typescript@^4.4:
+  version "4.5.4"
+  resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.5.4.tgz#a17d3a0263bf5c8723b9c52f43c5084edf13c2e8"
+  integrity sha512-VgYs2A2QIRuGphtzFV7aQJduJ2gyfTljngLzjpfW9FoYZF6xuw1W0vW9ghCKLfcWrCFxK81CSGRAvS1pn4fIUg==
 
 uc.micro@^1.0.1, uc.micro@^1.0.5:
   version "1.0.6"

+ 2 - 2
query-node/mappings/package.json

@@ -20,8 +20,8 @@
   },
   "dependencies": {
     "@polkadot/types": "5.9.1",
-    "@joystream/hydra-common": "3.1.0-alpha.13",
-    "@joystream/hydra-db-utils": "3.1.0-alpha.13",
+    "@joystream/hydra-common": "3.1.0-alpha.16",
+    "@joystream/hydra-db-utils": "3.1.0-alpha.16",
     "@joystream/metadata-protobuf": "^1.0.0",
     "@joystream/sumer-types": "npm:@joystream/types@^0.16.0",
     "@joystream/types": "^0.17.0",

+ 1 - 1
query-node/package.json

@@ -41,7 +41,7 @@
     "tslib": "^2.0.0",
     "@types/bn.js": "^4.11.6",
     "bn.js": "^5.1.2",
-    "@joystream/hydra-processor": "3.1.0-alpha.13"
+    "@joystream/hydra-processor": "3.1.0-alpha.16"
   },
   "volta": {
 		"extends": "../package.json"

+ 156 - 86
runtime-modules/content/src/lib.rs

@@ -586,7 +586,6 @@ decl_module! {
             // Ensure given origin is lead
             ensure_lead_auth_success::<T>(&sender)?;
 
-
             // Ensure curator group under provided curator_group_id already exist, retrieve corresponding one
             let curator_group = Self::ensure_curator_group_exists(&curator_group_id)?;
 
@@ -644,7 +643,6 @@ decl_module! {
             Self::deposit_event(RawEvent::CuratorRemoved(curator_group_id, curator_id));
         }
 
-        // TODO: Add Option<reward_account> to ChannelCreationParameters ?
         #[weight = 10_000_000] // TODO: adjust weight
         pub fn create_channel(
             origin,
@@ -668,27 +666,66 @@ decl_module! {
             // ensure collaborator member ids are valid
             Self::validate_collaborator_set(&params.collaborators)?;
 
+            let upload_params = params.assets.as_ref().map(|assets| {
+                Self::construct_upload_parameters(
+                    assets,
+                    &channel_id,
+                    &sender
+                )});
+
+            let channel_bag_id = Self::bag_id_for_channel(&channel_id);
+
+            let deletion_prize = storage::DynamicBagDeletionPrize::<T> {
+                prize: Zero::zero(), // put 0 for Giza release
+                account_id: sender.clone(),
+            };
+
+            if Storage::<T>::ensure_bag_exists(&channel_bag_id).is_err() {
+                if let Some(params) = upload_params.clone() {
+                    Storage::<T>::can_create_dynamic_bag_with_objects_constraints(
+                        &DynamicBagIdType::<T::MemberId, T::ChannelId>::Channel(channel_id),
+                        &Some(deletion_prize.clone()),
+                        &params
+                    )?;
+                } else {
+                    Storage::<T>::can_create_dynamic_bag(
+                        &DynamicBagIdType::<T::MemberId, T::ChannelId>::Channel(channel_id),
+                        &Some(deletion_prize.clone()),
+                    )?;
+                }
+            }
+
             //
             // == MUTATION SAFE ==
             //
 
-            // upload to storage
-            if let Some(upload_assets) = params.assets.as_ref() {
-                Self::upload_assets_to_storage(
-                    upload_assets,
-                    &channel_id,
-                    &sender,
+            if Storage::<T>::ensure_bag_exists(&channel_bag_id).is_err() {
+                if let Some(params) = upload_params.clone() {
+                    Storage::<T>::create_dynamic_bag_with_objects_constraints(
+                        DynamicBagIdType::<T::MemberId, T::ChannelId>::Channel(channel_id),
+                        Some(deletion_prize),
+                        params,
                 )?;
+                // create_dynamic_bag_with_objects with its can* guard ensures that this invocation succeds
+                } else {
+                    Storage::<T>::create_dynamic_bag(
+                        DynamicBagIdType::<T::MemberId, T::ChannelId>::Channel(channel_id),
+                        Some(deletion_prize),
+                    )?;
+                }
+            }
+
+             // this will not fail because can_create_dynamic_bag_with_objects_constraints will check also for successful upload conditions
+            if let Some(params) = upload_params.clone() {
+                Storage::<T>::upload_data_objects(params)?;
             }
 
             // Only increment next channel id if adding content was successful
             NextChannelId::<T>::mutate(|id| *id += T::ChannelId::one());
 
-
             // channel creation
             let channel: Channel<T> = ChannelRecord {
                 owner: channel_owner,
-                // a newly create channel has zero videos ??
                 num_videos: 0u64,
                 is_censored: false,
                 reward_account: params.reward_account.clone(),
@@ -735,27 +772,50 @@ decl_module! {
                 channel.collaborators = new_collabs.clone();
             }
 
+            if let Some(upload_assets) = params.assets_to_upload.as_ref() {
+                let params = Self::construct_upload_parameters(
+                    upload_assets,
+                    &channel_id,
+                    &sender
+                );
+
+                Storage::<T>::can_upload_data_objects(&params)?;
+            }
+
+            if !params.assets_to_remove.is_empty() {
+                Storage::<T>::can_delete_data_objects(
+                    &Self::bag_id_for_channel(&channel_id),
+                    &params.assets_to_remove
+                )?;
+            }
+
             //
             // == MUTATION SAFE ==
             //
 
-            // upload assets to storage
             if let Some(upload_assets) = params.assets_to_upload.as_ref() {
-                Self::upload_assets_to_storage(
+                let params = Self::construct_upload_parameters(
                     upload_assets,
                     &channel_id,
-                    &sender,
-                )?;
+                    &sender
+                );
+
+                Storage::<T>::upload_data_objects(params.clone())?;
             }
 
-            // remove eassets from storage
-            Self::remove_assets_from_storage(&params.assets_to_remove, &channel_id, &sender)?;
+            if !params.assets_to_remove.is_empty() {
+                Storage::<T>::delete_data_objects(
+                    sender,
+                    Self::bag_id_for_channel(&channel_id),
+                    params.assets_to_remove.clone()
+                )?;
+            }
 
             // Update the channel
             ChannelById::<T>::insert(channel_id, channel.clone());
 
             Self::deposit_event(RawEvent::ChannelUpdated(actor, channel_id, channel, params));
-}
+        }
 
         // extrinsics for channel deletion
         #[weight = 10_000_000] // TODO: adjust weight
@@ -791,19 +851,36 @@ decl_module! {
                     Error::<T>::InvalidBagSizeSpecified
                 );
 
+                // construct collection of assets to be removed
+                let assets_to_remove = T::DataObjectStorage::get_data_objects_id(&bag_id);
+
+                if !assets_to_remove.is_empty() {
+                    Storage::<T>::can_delete_dynamic_bag_with_objects(
+                        &dyn_bag,
+                    )?;
+
+                    Storage::<T>::can_delete_data_objects(
+                        &bag_id,
+                        &assets_to_remove,
+                    )?;
+                } else {
+                    Storage::<T>::can_delete_dynamic_bag(
+                        &dyn_bag,
+                    )?;
+                }
+
                 //
                 // == MUTATION SAFE ==
                 //
 
-                // construct collection of assets to be removed
-                let assets_to_remove = T::DataObjectStorage::get_data_objects_id(&bag_id);
-
                 // remove specified assets from storage
-                Self::remove_assets_from_storage(
-                    &assets_to_remove,
-                    &channel_id,
-                    &sender,
-                )?;
+                if !assets_to_remove.is_empty() {
+                    Storage::<T>::delete_data_objects(
+                        sender.clone(),
+                        Self::bag_id_for_channel(&channel_id),
+                        assets_to_remove.clone(),
+                    )?;
+                }
 
                 // delete channel dynamic bag
                 Storage::<T>::delete_dynamic_bag(
@@ -850,8 +927,6 @@ decl_module! {
                 channel.is_censored = is_censored
             });
 
-            // TODO: unset the reward account ? so no revenue can be earned for censored channels?
-
             Self::deposit_event(RawEvent::ChannelCensorshipStatusUpdated(actor, channel_id, is_censored, rationale));
         }
 
@@ -968,13 +1043,14 @@ decl_module! {
             // == MUTATION SAFE ==
             //
 
-            // upload to storage
+            // upload to storage: check is performed beforehand in the extrinsics so storage state won't be endangered
             if let Some(upload_assets) = params.assets.as_ref() {
-                Self::upload_assets_to_storage(
+                let params = Self::construct_upload_parameters(
                     upload_assets,
                     &channel_id,
-                    &sender,
-                )?;
+                    &sender
+                );
+                 Storage::<T>::upload_data_objects(params)?;
             }
 
             // create the video struct
@@ -1024,19 +1100,40 @@ decl_module! {
                 &channel,
             )?;
 
+            if let Some(upload_assets) = params.assets_to_upload.as_ref() {
+                let params = Self::construct_upload_parameters(
+                    upload_assets,
+                    &channel_id,
+                    &sender
+                );
+                Storage::<T>::can_upload_data_objects(&params)?;
+            }
+
+            if !params.assets_to_remove.is_empty() {
+                Storage::<T>::can_delete_data_objects(
+                    &Self::bag_id_for_channel(&channel_id),
+                    &params.assets_to_remove,
+                )?;
+            }
+
             //
             // == MUTATION SAFE ==
             //
 
-            // remove specified assets from channel bag in storage
-            Self::remove_assets_from_storage(&params.assets_to_remove, &channel_id, &sender)?;
-
-            // atomically upload to storage and return the # of uploaded assets
             if let Some(upload_assets) = params.assets_to_upload.as_ref() {
-                Self::upload_assets_to_storage(
+                let params = Self::construct_upload_parameters(
                     upload_assets,
                     &channel_id,
-                    &sender,
+                    &sender
+                );
+                Storage::<T>::upload_data_objects(params)?;
+            }
+
+            if !params.assets_to_remove.is_empty() {
+                Storage::<T>::delete_data_objects(
+                    sender,
+                    Self::bag_id_for_channel(&channel_id),
+                    params.assets_to_remove.clone(),
                 )?;
             }
 
@@ -1069,12 +1166,24 @@ decl_module! {
             // ensure video can be removed
             Self::ensure_video_can_be_removed(&video)?;
 
+            if !assets_to_remove.is_empty() {
+                Storage::<T>::can_delete_data_objects(
+                    &Self::bag_id_for_channel(&channel_id),
+                    &assets_to_remove,
+                )?;
+            }
+
             //
             // == MUTATION SAFE ==
             //
 
-            // remove specified assets from channel bag in storage
-            Self::remove_assets_from_storage(&assets_to_remove, &channel_id, &sender)?;
+            if !assets_to_remove.is_empty() {
+                Storage::<T>::delete_data_objects(
+                    sender,
+                    Self::bag_id_for_channel(&channel_id),
+                    assets_to_remove.clone()
+                )?;
+            }
 
             // Remove video
             VideoById::<T>::remove(video_id);
@@ -1369,28 +1478,6 @@ impl<T: Trait> Module<T> {
         Ok(VideoCategoryById::<T>::get(video_category_id))
     }
 
-    fn pick_upload_parameters_from_assets(
-        assets: &StorageAssets<T>,
-        channel_id: &T::ChannelId,
-        prize_source_account: &T::AccountId,
-    ) -> UploadParameters<T> {
-        // dynamic bag for a media object
-        let dyn_bag = DynamicBagIdType::<T::MemberId, T::ChannelId>::Channel(*channel_id);
-        let bag_id = BagIdType::from(dyn_bag.clone());
-
-        if T::DataObjectStorage::ensure_bag_exists(&bag_id).is_err() {
-            // create_dynamic_bag checks automatically satifsfied with None as second parameter
-            Storage::<T>::create_dynamic_bag(dyn_bag, None).unwrap();
-        }
-
-        UploadParametersRecord {
-            bag_id,
-            object_creation_list: assets.object_creation_list.clone(),
-            deletion_prize_source_account_id: prize_source_account.clone(),
-            expected_data_size_fee: assets.expected_data_size_fee,
-        }
-    }
-
     fn actor_to_channel_owner(
         actor: &ContentActor<T::CuratorGroupId, T::CuratorId, T::MemberId>,
     ) -> ActorToChannelOwnerResult<T> {
@@ -1414,35 +1501,18 @@ impl<T: Trait> Module<T> {
         Err(Error::<T>::FeatureNotImplemented.into())
     }
 
-    fn upload_assets_to_storage(
+    // construct parameters to be upload to storage
+    fn construct_upload_parameters(
         assets: &StorageAssets<T>,
         channel_id: &T::ChannelId,
         prize_source_account: &T::AccountId,
-    ) -> DispatchResult {
-        // construct upload params
-        let upload_params =
-            Self::pick_upload_parameters_from_assets(assets, channel_id, prize_source_account);
-
-        // attempt to upload objects att
-        Storage::<T>::upload_data_objects(upload_params.clone())?;
-
-        Ok(())
-    }
-
-    fn remove_assets_from_storage(
-        assets: &BTreeSet<DataObjectId<T>>,
-        channel_id: &T::ChannelId,
-        prize_source_account: &T::AccountId,
-    ) -> DispatchResult {
-        // remove assets if any
-        if !assets.is_empty() {
-            Storage::<T>::delete_data_objects(
-                prize_source_account.clone(),
-                Self::bag_id_for_channel(&channel_id),
-                assets.clone(),
-            )?;
+    ) -> UploadParameters<T> {
+        UploadParameters::<T> {
+            bag_id: Self::bag_id_for_channel(channel_id),
+            object_creation_list: assets.object_creation_list.clone(),
+            deletion_prize_source_account_id: prize_source_account.clone(),
+            expected_data_size_fee: assets.expected_data_size_fee,
         }
-        Ok(())
     }
 
     fn validate_collaborator_set(collaborators: &BTreeSet<T::MemberId>) -> DispatchResult {

+ 66 - 6
runtime-modules/content/src/tests/channels.rs

@@ -12,10 +12,12 @@ fn successful_channel_deletion() {
         // Run to block one to see emitted events
         run_to_block(1);
 
+        create_initial_storage_buckets();
+
         // create an account with enought balance
         let _ = balances::Module::<Test>::deposit_creating(
             &FIRST_MEMBER_ORIGIN,
-            <Test as balances::Trait>::Balance::from(100u32),
+            <Test as balances::Trait>::Balance::from(INITIAL_BALANCE),
         );
 
         // 3 assets added at creation
@@ -66,7 +68,7 @@ fn successful_channel_deletion() {
             FIRST_MEMBER_ORIGIN,
             ContentActor::Member(FIRST_MEMBER_ID),
             channel_id,
-            3u64,
+            3u64, // now assets are 0
             Ok(()),
         );
 
@@ -83,11 +85,12 @@ fn successful_channel_deletion() {
             },
             Ok(()),
         );
+
         delete_channel_mock(
             FIRST_MEMBER_ORIGIN,
             ContentActor::Member(FIRST_MEMBER_ID),
             empty_channel_id,
-            43u64, // this param will be discarded if channel has no assets
+            0u64,
             Ok(()),
         );
     })
@@ -99,10 +102,11 @@ fn successful_channel_assets_deletion() {
         // Run to block one to see emitted events
         run_to_block(1);
 
+        create_initial_storage_buckets();
         // create an account with enought balance
         let _ = balances::Module::<Test>::deposit_creating(
             &FIRST_MEMBER_ORIGIN,
-            <Test as balances::Trait>::Balance::from(100u32),
+            <Test as balances::Trait>::Balance::from(INITIAL_BALANCE),
         );
 
         // 3 assets
@@ -163,10 +167,12 @@ fn succesful_channel_update() {
         // Run to block one to see emitted events
         run_to_block(1);
 
+        create_initial_storage_buckets();
+
         // create an account with enought balance
         let _ = balances::Module::<Test>::deposit_creating(
             &FIRST_MEMBER_ORIGIN,
-            <Test as balances::Trait>::Balance::from(100u32),
+            <Test as balances::Trait>::Balance::from(INITIAL_BALANCE),
         );
 
         // 2 + 1 assets to be uploaded
@@ -254,10 +260,12 @@ fn succesful_channel_creation() {
         // Run to block one to see emitted events
         run_to_block(1);
 
+        create_initial_storage_buckets();
+
         // create an account with enought balance
         let _ = balances::Module::<Test>::deposit_creating(
             &FIRST_MEMBER_ORIGIN,
-            <Test as balances::Trait>::Balance::from(100u32),
+            <Test as balances::Trait>::Balance::from(INITIAL_BALANCE),
         );
 
         // 3 assets to be uploaded
@@ -297,6 +305,7 @@ fn succesful_channel_creation() {
 #[test]
 fn lead_cannot_create_channel() {
     with_default_mock_builder(|| {
+        create_initial_storage_buckets();
         assert_err!(
             Content::create_channel(
                 Origin::signed(LEAD_ORIGIN),
@@ -438,6 +447,7 @@ fn invalid_member_cannot_create_channel() {
         // Run to block one to see emitted events
         run_to_block(1);
 
+        create_initial_storage_buckets();
         // Not a member
         create_channel_mock(
             FIRST_MEMBER_ORIGIN,
@@ -459,6 +469,7 @@ fn invalid_member_cannot_update_channel() {
         // Run to block one to see emitted events
         run_to_block(1);
 
+        create_initial_storage_buckets();
         create_channel_mock(
             FIRST_MEMBER_ORIGIN,
             ContentActor::Member(FIRST_MEMBER_ID),
@@ -493,6 +504,8 @@ fn invalid_member_cannot_delete_channel() {
         // Run to block one to see emitted events
         run_to_block(1);
 
+        create_initial_storage_buckets();
+
         create_channel_mock(
             FIRST_MEMBER_ORIGIN,
             ContentActor::Member(FIRST_MEMBER_ID),
@@ -523,6 +536,8 @@ fn non_authorized_collaborators_cannot_update_channel() {
 
         helper_init_accounts(vec![FIRST_MEMBER_ORIGIN, COLLABORATOR_MEMBER_ORIGIN]);
 
+        create_initial_storage_buckets();
+
         // create channel
         create_channel_mock(
             FIRST_MEMBER_ORIGIN,
@@ -598,6 +613,7 @@ fn authorized_collaborators_can_update_channel() {
 
         helper_init_accounts(vec![FIRST_MEMBER_ORIGIN, COLLABORATOR_MEMBER_ORIGIN]);
 
+        create_initial_storage_buckets();
         // create channel
         create_channel_mock(
             FIRST_MEMBER_ORIGIN,
@@ -749,3 +765,47 @@ fn channel_censoring() {
         ));
     })
 }
+
+#[test]
+fn channel_creation_doesnt_leave_bags_dangling() {
+    with_default_mock_builder(|| {
+        // in order to emit events
+        run_to_block(1);
+
+        create_initial_storage_buckets();
+        // number of assets big enought to make upload_data_objects throw
+        let asset_num = 100_000usize;
+        let mut object_creation_list =
+            Vec::<DataObjectCreationParameters>::with_capacity(asset_num);
+        for _i in 0..asset_num {
+            object_creation_list.push(DataObjectCreationParameters {
+                size: 1_000_000, // size big enought to make upload_data_objects throw
+                ipfs_content_id: b"test".to_vec(),
+            });
+        }
+
+        let assets = StorageAssetsRecord {
+            object_creation_list: object_creation_list,
+            expected_data_size_fee: storage::DataObjectPerMegabyteFee::<Test>::get(),
+        };
+
+        let channel_id = NextChannelId::<Test>::get();
+        // create channel
+        create_channel_mock(
+            FIRST_MEMBER_ORIGIN,
+            ContentActor::Member(FIRST_MEMBER_ID),
+            ChannelCreationParametersRecord {
+                assets: Some(assets),
+                meta: Some(vec![]),
+                reward_account: None,
+                collaborators: BTreeSet::new(),
+            },
+            Err(storage::Error::<Test>::MaxDataObjectSizeExceeded.into()),
+        );
+
+        // ensure that no bag are left dangling
+        let dyn_bag = DynamicBagIdType::<MemberId, ChannelId>::Channel(channel_id);
+        let bag_id = storage::BagIdType::from(dyn_bag.clone());
+        assert!(<Test as Trait>::DataObjectStorage::ensure_bag_exists(&bag_id).is_err());
+    })
+}

+ 60 - 2
runtime-modules/content/src/tests/mock.rs

@@ -52,7 +52,7 @@ pub const COLLABORATOR_MEMBER_ID: MemberId = 9;
 
 /// Constants
 // initial balancer for an account
-pub const INIT_BALANCE: u32 = 500;
+pub const INITIAL_BALANCE: u32 = 1_000_000;
 
 impl_outer_origin! {
     pub enum Origin for Test {}
@@ -658,7 +658,65 @@ pub fn helper_init_accounts(accounts: Vec<u64>) {
     for acc in accounts.iter() {
         let _ = balances::Module::<Test>::deposit_creating(
             acc,
-            <Test as balances::Trait>::Balance::from(INIT_BALANCE),
+            <Test as balances::Trait>::Balance::from(INITIAL_BALANCE),
         );
     }
 }
+
+pub fn create_initial_storage_buckets() {
+    // first set limits
+    assert_eq!(
+        Storage::<Test>::update_storage_buckets_voucher_max_limits(
+            Origin::signed(STORAGE_WG_LEADER_ACCOUNT_ID),
+            400,
+            40
+        ),
+        Ok(())
+    );
+
+    // create bucket(s)
+    assert_eq!(
+        Storage::<Test>::create_storage_bucket(
+            Origin::signed(STORAGE_WG_LEADER_ACCOUNT_ID),
+            None,
+            true,
+            100,
+            10,
+        ),
+        Ok(())
+    );
+}
+
+pub fn create_channel_with_bag() {
+    // 3 assets added at creation
+    let assets = StorageAssetsRecord {
+        object_creation_list: vec![
+            DataObjectCreationParameters {
+                size: 3,
+                ipfs_content_id: b"first".to_vec(),
+            },
+            DataObjectCreationParameters {
+                size: 3,
+                ipfs_content_id: b"second".to_vec(),
+            },
+            DataObjectCreationParameters {
+                size: 3,
+                ipfs_content_id: b"third".to_vec(),
+            },
+        ],
+        expected_data_size_fee: storage::DataObjectPerMegabyteFee::<Test>::get(),
+    };
+
+    // create channel
+    create_channel_mock(
+        FIRST_MEMBER_ORIGIN,
+        ContentActor::Member(FIRST_MEMBER_ID),
+        ChannelCreationParametersRecord {
+            assets: Some(assets),
+            meta: None,
+            reward_account: None,
+            collaborators: BTreeSet::new(),
+        },
+        Ok(()),
+    );
+}

+ 17 - 24
runtime-modules/content/src/tests/videos.rs

@@ -28,25 +28,19 @@ fn video_creation_successful() {
     with_default_mock_builder(|| {
         run_to_block(1);
 
+        create_initial_storage_buckets();
+
         // depositi initial balance
         let _ = balances::Module::<Test>::deposit_creating(
             &FIRST_MEMBER_ORIGIN,
-            <Test as balances::Trait>::Balance::from(100u32),
+            <Test as balances::Trait>::Balance::from(INITIAL_BALANCE),
         );
 
         let channel_id = NextChannelId::<Test>::get();
 
-        create_channel_mock(
-            FIRST_MEMBER_ORIGIN,
-            ContentActor::Member(FIRST_MEMBER_ID),
-            ChannelCreationParametersRecord {
-                assets: None,
-                meta: None,
-                reward_account: None,
-                collaborators: BTreeSet::new(),
-            },
-            Ok(()),
-        );
+        create_initial_storage_buckets();
+
+        create_channel_with_bag();
 
         let params = VideoCreationParametersRecord {
             assets: Some(StorageAssetsRecord {
@@ -84,24 +78,15 @@ fn video_update_successful() {
     with_default_mock_builder(|| {
         run_to_block(1);
 
+        create_initial_storage_buckets();
         let _ = balances::Module::<Test>::deposit_creating(
             &FIRST_MEMBER_ORIGIN,
-            <Test as balances::Trait>::Balance::from(100u32),
+            <Test as balances::Trait>::Balance::from(INITIAL_BALANCE),
         );
 
         let channel_id = NextChannelId::<Test>::get();
 
-        create_channel_mock(
-            FIRST_MEMBER_ORIGIN,
-            ContentActor::Member(FIRST_MEMBER_ID),
-            ChannelCreationParametersRecord {
-                assets: None,
-                meta: None,
-                reward_account: None,
-                collaborators: BTreeSet::new(),
-            },
-            Ok(()),
-        );
+        create_channel_with_bag();
 
         // create video with 3 assets
         let params = VideoCreationParametersRecord {
@@ -413,6 +398,8 @@ fn non_authorized_collaborators_cannot_add_video() {
         // Run to block one to see emitted events
         run_to_block(1);
 
+        create_initial_storage_buckets();
+
         helper_init_accounts(vec![FIRST_MEMBER_ORIGIN, COLLABORATOR_MEMBER_ORIGIN]);
 
         // create channel
@@ -449,6 +436,7 @@ fn non_authorized_collaborators_cannot_update_video() {
 
         helper_init_accounts(vec![FIRST_MEMBER_ORIGIN, COLLABORATOR_MEMBER_ORIGIN]);
 
+        create_initial_storage_buckets();
         // create channel
         create_channel_mock(
             FIRST_MEMBER_ORIGIN,
@@ -498,6 +486,7 @@ fn non_authorized_collaborators_cannot_delete_video() {
 
         helper_init_accounts(vec![FIRST_MEMBER_ORIGIN, COLLABORATOR_MEMBER_ORIGIN]);
 
+        create_initial_storage_buckets();
         // create channel
         create_channel_mock(
             FIRST_MEMBER_ORIGIN,
@@ -546,6 +535,7 @@ fn authorized_collaborators_can_add_video() {
 
         helper_init_accounts(vec![FIRST_MEMBER_ORIGIN, COLLABORATOR_MEMBER_ORIGIN]);
 
+        create_initial_storage_buckets();
         // create channel
         create_channel_mock(
             FIRST_MEMBER_ORIGIN,
@@ -582,6 +572,8 @@ fn authorized_collaborators_can_update_video() {
 
         helper_init_accounts(vec![FIRST_MEMBER_ORIGIN, COLLABORATOR_MEMBER_ORIGIN]);
 
+        create_initial_storage_buckets();
+
         // create channel
         create_channel_mock(
             FIRST_MEMBER_ORIGIN,
@@ -633,6 +625,7 @@ fn authorized_collaborators_can_delete_video() {
 
         helper_init_accounts(vec![FIRST_MEMBER_ORIGIN, COLLABORATOR_MEMBER_ORIGIN]);
 
+        create_initial_storage_buckets();
         // create channel
         create_channel_mock(
             FIRST_MEMBER_ORIGIN,

+ 336 - 125
runtime-modules/storage/src/lib.rs

@@ -97,6 +97,10 @@
 //! - delete_dynamic_bag
 //! - can_create_dynamic_bag
 //! - create_dynamic_bag
+//! - can_create_dynamic_bag_with_objects_constraints
+//! - create_dynamic_bag_with_objects_constraints
+//! - can_delete_dynamic_bag_with_objects
+
 //!
 //! ### Pallet constants
 //! - DataObjectDeletionPrize
@@ -118,7 +122,6 @@
 #![allow(clippy::unused_unit)]
 // needed for step iteration over DataObjectId range
 #![feature(step_trait)]
-
 #[cfg(test)]
 mod tests;
 
@@ -200,6 +203,9 @@ pub trait DataObjectStorage<T: Trait> {
     /// Validates `delete_dynamic_bag` parameters and conditions.
     fn can_delete_dynamic_bag(bag_id: &DynamicBagId<T>) -> DispatchResult;
 
+    /// Validates `delete_dynamic_bag` without checking for num objects == 0
+    fn can_delete_dynamic_bag_with_objects(bag_id: &DynamicBagId<T>) -> DispatchResult;
+
     /// Creates dynamic bag. BagId should provide the caller.
     fn create_dynamic_bag(
         bag_id: DynamicBagId<T>,
@@ -212,6 +218,20 @@ pub trait DataObjectStorage<T: Trait> {
         deletion_prize: &Option<DynamicBagDeletionPrize<T>>,
     ) -> DispatchResult;
 
+    /// Same as create_dynamic_bag but with caller provided objects/data
+    fn create_dynamic_bag_with_objects_constraints(
+        bag_id: DynamicBagId<T>,
+        deletion_prize: Option<DynamicBagDeletionPrize<T>>,
+        params: UploadParameters<T>,
+    ) -> DispatchResult;
+
+    /// Same as can_create_dynamic_bag but with caller provided objects/data
+    fn can_create_dynamic_bag_with_objects_constraints(
+        bag_id: &DynamicBagId<T>,
+        deletion_prize: &Option<DynamicBagDeletionPrize<T>>,
+        params: &UploadParameters<T>,
+    ) -> DispatchResult;
+
     /// Checks if a bag does exists and returns it. Static Always exists
     fn ensure_bag_exists(bag_id: &BagId<T>) -> Result<Bag<T>, DispatchError>;
 
@@ -464,6 +484,12 @@ pub type WorkerId<T> = <T as common::MembershipTypes>::ActorId;
 /// Balance alias for `balances` module.
 pub type BalanceOf<T> = <T as balances::Trait>::Balance;
 
+/// Type alias for the storage & distribution bucket ids pair
+pub type BucketPair<T> = (
+    BTreeSet<<T as Trait>::StorageBucketId>,
+    BTreeSet<DistributionBucketId<T>>,
+);
+
 /// The fundamental concept in the system, which represents single static binary object in the
 /// system. The main goal of the system is to retain an index of all such objects, including who
 /// owns them, and information about what actors are currently tasked with storing and distributing
@@ -1438,6 +1464,9 @@ decl_error! {
         /// Max data object size exceeded.
         MaxDataObjectSizeExceeded,
 
+        /// Different Accounts for dynamic bag deletion prize and upload fees
+        AccountsNotCoherent,
+
         /// Invalid transactor account ID for this bucket.
         InvalidTransactorAccount,
     }
@@ -2496,46 +2525,8 @@ impl<T: Trait> DataObjectStorage<T> for Module<T> {
 
     fn upload_data_objects(params: UploadParameters<T>) -> DispatchResult {
         let bag = Self::ensure_bag_exists(&params.bag_id)?;
-
         let bag_change = Self::validate_upload_data_objects_parameters(&params)?;
-
-        //
-        // == MUTATION SAFE ==
-        //
-
-        let data = Self::create_data_objects(params.object_creation_list.clone());
-
-        <StorageTreasury<T>>::deposit(
-            &params.deletion_prize_source_account_id,
-            bag_change.total_deletion_prize,
-        )?;
-
-        Self::slash_data_size_fee(
-            &params.deletion_prize_source_account_id,
-            bag_change.voucher_update.objects_total_size,
-        );
-
-        // Save next object id.
-        <NextDataObjectId<T>>::put(data.next_data_object_id);
-
-        // Insert new objects.
-        for (data_object_id, data_object) in data.data_objects_map.iter() {
-            DataObjectsById::<T>::insert(&params.bag_id, &data_object_id, data_object);
-        }
-
-        Self::change_storage_bucket_vouchers_for_bag(
-            &params.bag_id,
-            &bag,
-            &bag_change.voucher_update,
-            OperationType::Increase,
-        );
-
-        Self::deposit_event(RawEvent::DataObjectsUploaded(
-            data.data_objects_map.keys().cloned().collect(),
-            params,
-            T::DataObjectDeletionPrize::get(),
-        ));
-
+        Self::upload_data_objects_inner(&params, &bag_change, &bag)?;
         Ok(())
     }
 
@@ -2632,15 +2623,20 @@ impl<T: Trait> DataObjectStorage<T> for Module<T> {
         Ok(())
     }
 
-    fn can_delete_dynamic_bag(bag_id: &DynamicBagId<T>) -> DispatchResult {
-        Self::validate_delete_dynamic_bag_params(bag_id).map(|_| ())
+    fn can_delete_dynamic_bag(dynamic_bag_id: &DynamicBagId<T>) -> DispatchResult {
+        Self::validate_delete_dynamic_bag_params(dynamic_bag_id, false).map(|_| ())
+    }
+
+    fn can_delete_dynamic_bag_with_objects(dynamic_bag_id: &DynamicBagId<T>) -> DispatchResult {
+        Self::validate_delete_dynamic_bag_params(dynamic_bag_id, true).map(|_| ())
     }
 
     fn delete_dynamic_bag(
         deletion_prize_account_id: T::AccountId,
         dynamic_bag_id: DynamicBagId<T>,
     ) -> DispatchResult {
-        let deletion_prize = Self::validate_delete_dynamic_bag_params(&dynamic_bag_id)?;
+        // make deletion always be performed on an empty bag
+        let deletion_prize = Self::validate_delete_dynamic_bag_params(&dynamic_bag_id, false)?;
 
         let bag_id: BagId<T> = dynamic_bag_id.clone().into();
 
@@ -2666,21 +2662,100 @@ impl<T: Trait> DataObjectStorage<T> for Module<T> {
         dynamic_bag_id: DynamicBagId<T>,
         deletion_prize: Option<DynamicBagDeletionPrize<T>>,
     ) -> DispatchResult {
-        Self::validate_create_dynamic_bag_params(&dynamic_bag_id, &deletion_prize)?;
+        // validate params and get storage & distribution buckets
+        let bag_change =
+            Self::validate_create_dynamic_bag_params(&dynamic_bag_id, &deletion_prize, &None)?;
+
+        let (storage_bucket_ids, distribution_bucket_ids) =
+            Self::pick_buckets_for_bag(dynamic_bag_id.clone(), &bag_change)?;
+
+        //
+        // == MUTATION SAFE ==
+        //
+
+        Self::create_dynamic_bag_inner(
+            &dynamic_bag_id,
+            &deletion_prize,
+            &storage_bucket_ids,
+            &distribution_bucket_ids,
+        )?;
+        Ok(())
+    }
+
+    fn create_dynamic_bag_with_objects_constraints(
+        dynamic_bag_id: DynamicBagId<T>,
+        deletion_prize: Option<DynamicBagDeletionPrize<T>>,
+        params: UploadParameters<T>,
+    ) -> DispatchResult {
+        let bag_change = Self::validate_create_dynamic_bag_params(
+            &dynamic_bag_id,
+            &deletion_prize,
+            &Some(params),
+        )?;
+
+        let (storage_bucket_ids, distribution_bucket_ids) =
+            Self::pick_buckets_for_bag(dynamic_bag_id.clone(), &bag_change)?;
 
         //
         // == MUTATION SAFE ==
         //
+        Self::create_dynamic_bag_inner(
+            &dynamic_bag_id,
+            &deletion_prize,
+            &storage_bucket_ids,
+            &distribution_bucket_ids,
+        )?;
+        Ok(())
+    }
+
+    fn can_create_dynamic_bag(
+        bag_id: &DynamicBagId<T>,
+        deletion_prize: &Option<DynamicBagDeletionPrize<T>>,
+    ) -> DispatchResult {
+        Self::validate_create_dynamic_bag_params(bag_id, deletion_prize, &None).map(|_| ())
+    }
+
+    fn can_create_dynamic_bag_with_objects_constraints(
+        dynamic_bag_id: &DynamicBagId<T>,
+        deletion_prize: &Option<DynamicBagDeletionPrize<T>>,
+        params: &UploadParameters<T>,
+    ) -> DispatchResult {
+        let bag_change = Self::validate_create_dynamic_bag_params(
+            dynamic_bag_id,
+            deletion_prize,
+            &Some(params.clone()),
+        )?;
+
+        Self::pick_buckets_for_bag(dynamic_bag_id.clone(), &bag_change).map(|_| ())
+    }
+
+    fn ensure_bag_exists(bag_id: &BagId<T>) -> Result<Bag<T>, DispatchError> {
+        Self::ensure_bag_exists(bag_id)
+    }
+
+    fn get_data_objects_id(bag_id: &BagId<T>) -> BTreeSet<T::DataObjectId> {
+        DataObjectsById::<T>::iter_prefix(&bag_id)
+            .map(|x| x.0)
+            .collect()
+    }
+}
+
+impl<T: Trait> Module<T> {
+    // dynamic bag creation logic
+    fn create_dynamic_bag_inner(
+        dynamic_bag_id: &DynamicBagId<T>,
+        deletion_prize: &Option<DynamicBagDeletionPrize<T>>,
+        storage_buckets: &BTreeSet<T::StorageBucketId>,
+        distribution_buckets: &BTreeSet<DistributionBucketId<T>>,
+    ) -> DispatchResult {
+        //
+        // = MUTATION SAFE =
+        //
 
         if let Some(deletion_prize) = deletion_prize.clone() {
             <StorageTreasury<T>>::deposit(&deletion_prize.account_id, deletion_prize.prize)?;
         }
 
-        let bag_type: DynamicBagType = dynamic_bag_id.clone().into();
-
-        let storage_buckets = Self::pick_storage_buckets_for_dynamic_bag(bag_type);
-        let distribution_buckets = Self::pick_distribution_buckets_for_dynamic_bag(bag_type);
-
         let bag = Bag::<T> {
             stored_by: storage_buckets.clone(),
             deletion_prize: deletion_prize.clone().map(|dp| dp.prize),
@@ -2693,34 +2768,60 @@ impl<T: Trait> DataObjectStorage<T> for Module<T> {
         <Bags<T>>::insert(&bag_id, bag);
 
         Self::deposit_event(RawEvent::DynamicBagCreated(
-            dynamic_bag_id,
-            deletion_prize,
-            storage_buckets,
-            distribution_buckets,
+            dynamic_bag_id.clone(),
+            deletion_prize.clone(),
+            storage_buckets.clone(),
+            distribution_buckets.clone(),
         ));
 
         Ok(())
     }
 
-    fn can_create_dynamic_bag(
-        bag_id: &DynamicBagId<T>,
-        deletion_prize: &Option<DynamicBagDeletionPrize<T>>,
+    fn upload_data_objects_inner(
+        params: &UploadParameters<T>,
+        bag_change: &BagUpdate<BalanceOf<T>>,
+        bag: &Bag<T>,
     ) -> DispatchResult {
-        Self::validate_create_dynamic_bag_params(bag_id, deletion_prize)
-    }
+        let data = Self::create_data_objects(params.object_creation_list.clone());
 
-    fn ensure_bag_exists(bag_id: &BagId<T>) -> Result<Bag<T>, DispatchError> {
-        Self::ensure_bag_exists(bag_id)
-    }
+        //
+        // == MUTATION SAFE ==
+        //
 
-    fn get_data_objects_id(bag_id: &BagId<T>) -> BTreeSet<T::DataObjectId> {
-        DataObjectsById::<T>::iter_prefix(&bag_id)
-            .map(|x| x.0)
-            .collect()
+        <StorageTreasury<T>>::deposit(
+            &params.deletion_prize_source_account_id,
+            bag_change.total_deletion_prize,
+        )?;
+
+        Self::slash_data_size_fee(
+            &params.deletion_prize_source_account_id,
+            bag_change.voucher_update.objects_total_size,
+        );
+
+        // Save next object id.
+        <NextDataObjectId<T>>::put(data.next_data_object_id);
+
+        // Insert new objects.
+        for (data_object_id, data_object) in data.data_objects_map.iter() {
+            DataObjectsById::<T>::insert(&params.bag_id, &data_object_id, data_object);
+        }
+
+        Self::change_storage_bucket_vouchers_for_bag(
+            &params.bag_id,
+            &bag,
+            &bag_change.voucher_update,
+            OperationType::Increase,
+        );
+
+        Self::deposit_event(RawEvent::DataObjectsUploaded(
+            data.data_objects_map.keys().cloned().collect(),
+            params.clone(),
+            T::DataObjectDeletionPrize::get(),
+        ));
+
+        Ok(())
     }
-}
 
-impl<T: Trait> Module<T> {
     // Increment distribution family number in the storage.
     fn increment_distribution_family_number() {
         DistributionBucketFamilyNumber::put(Self::distribution_bucket_family_number() + 1);
@@ -2737,43 +2838,95 @@ impl<T: Trait> Module<T> {
     fn validate_create_dynamic_bag_params(
         dynamic_bag_id: &DynamicBagId<T>,
         deletion_prize: &Option<DynamicBagDeletionPrize<T>>,
-    ) -> DispatchResult {
+        upload_params: &Option<UploadParameters<T>>,
+    ) -> Result<Option<BagUpdate<BalanceOf<T>>>, DispatchError> {
         let bag_id: BagId<T> = dynamic_bag_id.clone().into();
-
         ensure!(
             !<Bags<T>>::contains_key(bag_id),
             Error::<T>::DynamicBagExists
         );
 
-        if let Some(deletion_prize) = deletion_prize {
-            ensure!(
-                Balances::<T>::usable_balance(&deletion_prize.account_id) >= deletion_prize.prize,
-                Error::<T>::InsufficientBalance
-            );
-        }
+        // call can upload data explicitly
+        let bag_change = upload_params
+            .as_ref()
+            .map(|params| {
+                // ensure coherent account ids for prize
+                if let Some(deletion_prize) = deletion_prize {
+                    ensure!(
+                        params.deletion_prize_source_account_id == deletion_prize.account_id,
+                        Error::<T>::AccountsNotCoherent,
+                    );
+                }
+                Self::validate_bag_change(params)
+            })
+            .transpose()?;
+
+        // check that fees are sufficient
+        let total_upload_fee = deletion_prize
+            .as_ref()
+            .map_or(Zero::zero(), |del_prize| del_prize.prize)
+            .saturating_add(bag_change.as_ref().map_or(Zero::zero(), |bag_change| {
+                Self::compute_upload_fees(bag_change)
+            }));
+
+        Self::ensure_sufficient_balance_for_upload(
+            deletion_prize
+                .as_ref()
+                .map(|deletion_prize| deletion_prize.account_id.clone()),
+            total_upload_fee,
+        )?;
 
+        Ok(bag_change)
+    }
+
+    fn ensure_sufficient_balance_for_upload(
+        deletion_prize_source_account_id: Option<T::AccountId>,
+        required_balance: BalanceOf<T>,
+    ) -> DispatchResult {
+        let usable_balance = deletion_prize_source_account_id.map_or(Zero::zero(), |account_id| {
+            Balances::<T>::usable_balance(account_id)
+        });
+
+        ensure!(
+            usable_balance >= required_balance,
+            Error::<T>::InsufficientBalance
+        );
         Ok(())
     }
 
     // Validates dynamic bag deletion params and conditions. Returns bag's deletion prize.
     fn validate_delete_dynamic_bag_params(
         dynamic_bag_id: &DynamicBagId<T>,
+        with_objects: bool,
     ) -> Result<Option<BalanceOf<T>>, DispatchError> {
         Self::ensure_dynamic_bag_exists(dynamic_bag_id)?;
 
         let dynamic_bag = Self::dynamic_bag(dynamic_bag_id);
 
-        ensure!(
-            dynamic_bag.objects_number == 0,
-            Error::<T>::CannotDeleteNonEmptyDynamicBag
-        );
-
-        if let Some(deletion_prize) = dynamic_bag.deletion_prize {
+        // deletion prize = bag.deletion_prize + total_objects fees if any
+        let deletion_prize = if !with_objects {
             ensure!(
-                <StorageTreasury<T>>::usable_balance() >= deletion_prize,
-                Error::<T>::InsufficientTreasuryBalance
+                dynamic_bag.objects_number == 0,
+                Error::<T>::CannotDeleteNonEmptyDynamicBag
             );
-        }
+            dynamic_bag.deletion_prize.unwrap_or_else(Zero::zero)
+        } else {
+            let bag_id: BagId<T> = dynamic_bag_id.clone().into();
+            let objects_del_prize = <DataObjectsById<T>>::iter_prefix(bag_id)
+                .fold(BalanceOf::<T>::zero(), |acc, (_, data_object)| {
+                    acc.saturating_add(data_object.deletion_prize)
+                });
+
+            dynamic_bag
+                .deletion_prize
+                .unwrap_or_else(Zero::zero)
+                .saturating_add(objects_del_prize)
+        };
+
+        ensure!(
+            <StorageTreasury<T>>::usable_balance() >= deletion_prize,
+            Error::<T>::InsufficientTreasuryBalance
+        );
 
         Ok(dynamic_bag.deletion_prize)
     }
@@ -3153,6 +3306,37 @@ impl<T: Trait> Module<T> {
         }
     }
 
+    // Validates upload parameters and conditions (like global uploading block).
+    // Returns voucher update parameters for the storage buckets.
+    fn validate_upload_data_objects_parameters(
+        params: &UploadParameters<T>,
+    ) -> Result<BagUpdate<BalanceOf<T>>, DispatchError> {
+        let bag_change = Self::validate_bag_change(params)?;
+        Self::ensure_sufficient_balance_for_upload(
+            Some(params.deletion_prize_source_account_id.clone()),
+            Self::compute_upload_fees(&bag_change),
+        )?;
+        Self::ensure_upload_bag_validity(&params.bag_id, &bag_change.voucher_update)?;
+        Ok(bag_change)
+    }
+
+    // construct bag change after validating the inputs
+    fn validate_bag_change(
+        params: &UploadParameters<T>,
+    ) -> Result<BagUpdate<BalanceOf<T>>, DispatchError> {
+        Self::check_global_uploading_block()?;
+
+        Self::ensure_objects_creation_list_validity(&params.object_creation_list)?;
+
+        let bag_change = Self::construct_bag_change(&params.object_creation_list)?;
+
+        ensure!(
+            params.expected_data_size_fee == Self::data_object_per_mega_byte_fee(),
+            Error::<T>::DataSizeFeeChanged
+        );
+        Ok(bag_change)
+    }
+
     // Validates `delete_data_objects` parameters.
     // Returns voucher update for an affected bag.
     fn validate_delete_data_objects_params(
@@ -3189,39 +3373,33 @@ impl<T: Trait> Module<T> {
         Ok(bag_change)
     }
 
-    // Validates upload parameters and conditions (like global uploading block).
-    // Returns voucher update parameters for the storage buckets.
-    fn validate_upload_data_objects_parameters(
-        params: &UploadParameters<T>,
-    ) -> Result<BagUpdate<BalanceOf<T>>, DispatchError> {
-        // Check global uploading block.
-        ensure!(!Self::uploading_blocked(), Error::<T>::UploadingBlocked);
-
-        // Check object creation list validity.
-        ensure!(
-            !params.object_creation_list.is_empty(),
-            Error::<T>::NoObjectsOnUpload
-        );
+    fn ensure_upload_bag_validity(
+        bag_id: &BagId<T>,
+        voucher_update: &VoucherUpdate,
+    ) -> DispatchResult {
+        let bag = Self::ensure_bag_exists(bag_id)?;
+        // Check buckets.
+        Self::check_bag_for_buckets_overflow(&bag, voucher_update)?;
+        Ok(())
+    }
 
-        // Check data objects' max size.
-        ensure!(
-            params
-                .object_creation_list
-                .iter()
-                .all(|obj| obj.size <= T::MaxDataObjectSize::get()),
-            Error::<T>::MaxDataObjectSizeExceeded
-        );
+    fn compute_upload_fees(bag_change: &BagUpdate<BalanceOf<T>>) -> BalanceOf<T> {
+        let size_fee =
+            Self::calculate_data_storage_fee(bag_change.voucher_update.objects_total_size);
 
-        let bag = Self::ensure_bag_exists(&params.bag_id)?;
+        bag_change.total_deletion_prize.saturating_add(size_fee)
+    }
 
-        // Check data size fee change.
-        ensure!(
-            params.expected_data_size_fee == Self::data_object_per_mega_byte_fee(),
-            Error::<T>::DataSizeFeeChanged
-        );
+    // Check global uploading block.
+    fn check_global_uploading_block() -> DispatchResult {
+        ensure!(!Self::uploading_blocked(), Error::<T>::UploadingBlocked);
+        Ok(())
+    }
 
-        let bag_change = params
-            .object_creation_list
+    fn construct_bag_change(
+        object_creation_list: &[DataObjectCreationParameters],
+    ) -> Result<BagUpdate<BalanceOf<T>>, DispatchError> {
+        let bag_change = object_creation_list
             .iter()
             .try_fold::<_, _, Result<_, DispatchError>>(
                 BagUpdate::default(),
@@ -3248,19 +3426,27 @@ impl<T: Trait> Module<T> {
                 },
             )?;
 
-        let size_fee =
-            Self::calculate_data_storage_fee(bag_change.voucher_update.objects_total_size);
-        let usable_balance =
-            Balances::<T>::usable_balance(&params.deletion_prize_source_account_id);
-
-        // Check account balance to satisfy deletion prize and storage fee.
-        let total_fee = bag_change.total_deletion_prize + size_fee;
-        ensure!(usable_balance >= total_fee, Error::<T>::InsufficientBalance);
+        Ok(bag_change)
+    }
 
-        // Check buckets.
-        Self::check_bag_for_buckets_overflow(&bag, &bag_change.voucher_update)?;
+    // objects creation list validity
+    fn ensure_objects_creation_list_validity(
+        object_creation_list: &[DataObjectCreationParameters],
+    ) -> DispatchResult {
+        // Check object creation list is not empty
+        ensure!(
+            !object_creation_list.is_empty(),
+            Error::<T>::NoObjectsOnUpload
+        );
 
-        Ok(bag_change)
+        // Check data objects' max size.
+        ensure!(
+            object_creation_list
+                .iter()
+                .all(|obj| obj.size <= T::MaxDataObjectSize::get()),
+            Error::<T>::MaxDataObjectSizeExceeded
+        );
+        Ok(())
     }
 
     // Iterates through buckets in the bag. Verifies voucher parameters to fit the new limits:
@@ -3324,11 +3510,36 @@ impl<T: Trait> Module<T> {
         }
     }
 
+    // helper pick buckets for bag
+    fn pick_buckets_for_bag(
+        dynamic_bag_id: DynamicBagId<T>,
+        bag_change: &Option<BagUpdate<BalanceOf<T>>>,
+    ) -> Result<BucketPair<T>, DispatchError> {
+        let bag_type: DynamicBagType = dynamic_bag_id.into();
+
+        let storage_bucket_ids = Self::pick_storage_buckets_for_dynamic_bag(
+            bag_type,
+            bag_change.map(|bag_change| bag_change.voucher_update),
+        );
+
+        let distribution_bucket_ids = Self::pick_distribution_buckets_for_dynamic_bag(bag_type);
+
+        if bag_change.is_some() {
+            ensure!(
+                !storage_bucket_ids.is_empty(),
+                Error::<T>::StorageBucketIdCollectionsAreEmpty
+            );
+        }
+
+        Ok((storage_bucket_ids, distribution_bucket_ids))
+    }
+
     // Selects storage bucket ID sets to assign to the dynamic bag.
     pub(crate) fn pick_storage_buckets_for_dynamic_bag(
         bag_type: DynamicBagType,
+        voucher_update: Option<VoucherUpdate>,
     ) -> BTreeSet<T::StorageBucketId> {
-        StorageBucketPicker::<T>::pick_storage_buckets(bag_type)
+        StorageBucketPicker::<T>::pick_storage_buckets(bag_type, voucher_update)
     }
 
     // Selects distributed bucket ID sets to assign to the dynamic bag.

+ 32 - 7
runtime-modules/storage/src/random_buckets/storage_bucket_picker.rs

@@ -5,7 +5,7 @@ use sp_std::collections::btree_set::BTreeSet;
 use sp_std::marker::PhantomData;
 
 pub(crate) use super::{RandomBucketIdIterator, SequentialBucketIdIterator};
-use crate::{DynamicBagType, Module, Trait};
+use crate::{DynamicBagType, Module, Trait, VoucherUpdate};
 
 // Generates storage bucket IDs to assign to a new dynamic bag.
 pub(crate) struct StorageBucketPicker<T> {
@@ -19,7 +19,10 @@ impl<T: Trait> StorageBucketPicker<T> {
     // The function filters deleted buckets and disabled buckets (accepting_new_bags == false)
     // Total number of possible IDs is limited by the dynamic bag settings.
     // Returns an accumulated bucket ID set or an empty set.
-    pub(crate) fn pick_storage_buckets(bag_type: DynamicBagType) -> BTreeSet<T::StorageBucketId> {
+    pub(crate) fn pick_storage_buckets(
+        bag_type: DynamicBagType,
+        voucher_update: Option<VoucherUpdate>,
+    ) -> BTreeSet<T::StorageBucketId> {
         let creation_policy = Module::<T>::get_dynamic_bag_creation_policy(bag_type);
 
         if creation_policy.no_storage_buckets_required() {
@@ -28,6 +31,7 @@ impl<T: Trait> StorageBucketPicker<T> {
 
         let required_bucket_num = creation_policy.number_of_storage_buckets as usize;
 
+        // TODO: the selection algorithm will change: https://github.com/Joystream/joystream/issues/2904
         // Storage bucket IDs accumulator.
         let bucket_ids_cell = RefCell::new(BTreeSet::new());
         let next_storage_bucket_id = Module::<T>::next_storage_bucket_id();
@@ -35,7 +39,7 @@ impl<T: Trait> StorageBucketPicker<T> {
             .chain(SequentialBucketIdIterator::<T, T::StorageBucketId>::new(
                 next_storage_bucket_id,
             ))
-            .filter(Self::check_storage_bucket_is_valid_for_bag_assigning)
+            .filter(|id| Self::check_storage_bucket_is_valid_for_bag_assigning(id, &voucher_update))
             .filter(|bucket_id| {
                 let bucket_ids = bucket_ids_cell.borrow();
 
@@ -55,11 +59,32 @@ impl<T: Trait> StorageBucketPicker<T> {
     // Verifies storage bucket ID (non-deleted and accepting new bags).
     pub(crate) fn check_storage_bucket_is_valid_for_bag_assigning(
         bucket_id: &T::StorageBucketId,
+        voucher_update: &Option<VoucherUpdate>,
     ) -> bool {
         // Check bucket for existence (return false if not). Check `accepting_new_bags`.
-        Module::<T>::ensure_storage_bucket_exists(bucket_id)
-            .ok()
-            .map(|bucket| bucket.accepting_new_bags)
-            .unwrap_or(false)
+        let bucket = Module::<T>::ensure_storage_bucket_exists(bucket_id).ok();
+
+        // check that bucket is accepting new bags
+        let accepting_bags = bucket
+            .as_ref()
+            .map_or(false, |bucket| bucket.accepting_new_bags);
+
+        // check that buckets has enough room for objects and size
+        let limits_sufficient = bucket.as_ref().map_or(false, |bucket| {
+            voucher_update.map_or(true, |voucher_update| {
+                let num_objects_enough = bucket.voucher.objects_limit
+                    >= bucket
+                        .voucher
+                        .objects_used
+                        .saturating_add(voucher_update.objects_number);
+                let size_enough = bucket.voucher.size_limit
+                    >= bucket
+                        .voucher
+                        .size_used
+                        .saturating_add(voucher_update.objects_total_size);
+                size_enough && num_objects_enough
+            })
+        });
+        accepting_bags && limits_sufficient
     }
 }

+ 74 - 0
runtime-modules/storage/src/tests/fixtures.rs

@@ -805,6 +805,28 @@ impl DeleteDynamicBagFixture {
     }
 }
 
+pub struct CanDeleteDynamicBagWithObjectsFixture {
+    bag_id: DynamicBagId<Test>,
+}
+
+impl CanDeleteDynamicBagWithObjectsFixture {
+    pub fn default() -> Self {
+        Self {
+            bag_id: Default::default(),
+        }
+    }
+
+    pub fn with_bag_id(self, bag_id: DynamicBagId<Test>) -> Self {
+        Self { bag_id, ..self }
+    }
+
+    pub fn call_and_assert(&self, expected_result: DispatchResult) {
+        let actual_result = Storage::can_delete_dynamic_bag_with_objects(&self.bag_id.clone());
+
+        assert_eq!(actual_result, expected_result);
+    }
+}
+
 pub struct DeleteStorageBucketFixture {
     origin: RawOrigin<u64>,
     storage_bucket_id: u64,
@@ -1128,6 +1150,58 @@ impl CreateDynamicBagFixture {
     }
 }
 
+pub struct CreateDynamicBagWithObjectsFixture {
+    bag_id: DynamicBagId<Test>,
+    deletion_prize: Option<DynamicBagDeletionPrize<Test>>,
+    upload_parameters: UploadParameters<Test>,
+}
+
+impl CreateDynamicBagWithObjectsFixture {
+    pub fn default() -> Self {
+        Self {
+            bag_id: Default::default(),
+            deletion_prize: Default::default(),
+            upload_parameters: Default::default(),
+        }
+    }
+
+    pub fn with_bag_id(self, bag_id: DynamicBagId<Test>) -> Self {
+        Self { bag_id, ..self }
+    }
+
+    pub fn with_deletion_prize(
+        self,
+        deletion_prize: Option<DynamicBagDeletionPrize<Test>>,
+    ) -> Self {
+        Self {
+            deletion_prize: deletion_prize,
+            ..self
+        }
+    }
+
+    pub fn with_objects(self, upload_parameters: UploadParameters<Test>) -> Self {
+        Self {
+            upload_parameters,
+            ..self
+        }
+    }
+
+    pub fn call_and_assert(&self, expected_result: DispatchResult) {
+        let actual_result = Storage::create_dynamic_bag_with_objects_constraints(
+            self.bag_id.clone(),
+            self.deletion_prize.clone(),
+            self.upload_parameters.clone(),
+        );
+
+        assert_eq!(actual_result, expected_result);
+
+        if actual_result.is_ok() {
+            let bag_id: BagId<Test> = self.bag_id.clone().into();
+            assert!(<crate::Bags<Test>>::contains_key(&bag_id));
+        }
+    }
+}
+
 pub struct UpdateNumberOfStorageBucketsInDynamicBagCreationPolicyFixture {
     origin: RawOrigin<u64>,
     new_storage_buckets_number: u64,

+ 408 - 14
runtime-modules/storage/src/tests/mod.rs

@@ -3052,9 +3052,13 @@ fn set_max_voucher_limits() {
     let new_size_limit = 100;
     let new_objects_limit = 1;
 
+    set_max_voucher_limits_with_params(new_size_limit, new_objects_limit);
+}
+
+fn set_max_voucher_limits_with_params(size_limit: u64, objects_limit: u64) {
     UpdateStorageBucketsVoucherMaxLimitsFixture::default()
-        .with_new_objects_size_limit(new_size_limit)
-        .with_new_objects_number_limit(new_objects_limit)
+        .with_new_objects_size_limit(size_limit)
+        .with_new_objects_number_limit(objects_limit)
         .call_and_assert(Ok(()));
 }
 
@@ -3208,20 +3212,23 @@ fn test_storage_bucket_picking_for_bag_non_random() {
 
         let initial_buckets_number = InitialStorageBucketsNumberForDynamicBag::get();
         // No buckets
-        let bucket_ids = Storage::pick_storage_buckets_for_dynamic_bag(DynamicBagType::Member);
+        let bucket_ids =
+            Storage::pick_storage_buckets_for_dynamic_bag(DynamicBagType::Member, None);
         assert_eq!(bucket_ids, BTreeSet::new());
 
         // Less then initial buckets number
         let buckets_number = initial_buckets_number - 1;
         let created_buckets = create_storage_buckets(buckets_number);
-        let bucket_ids = Storage::pick_storage_buckets_for_dynamic_bag(DynamicBagType::Member);
+        let bucket_ids =
+            Storage::pick_storage_buckets_for_dynamic_bag(DynamicBagType::Member, None);
 
         assert_eq!(bucket_ids, created_buckets);
 
         // More then initial buckets number
         let buckets_number = 5;
         create_storage_buckets(buckets_number);
-        let bucket_ids = Storage::pick_storage_buckets_for_dynamic_bag(DynamicBagType::Member);
+        let bucket_ids =
+            Storage::pick_storage_buckets_for_dynamic_bag(DynamicBagType::Member, None);
 
         assert_eq!(
             bucket_ids,
@@ -3232,7 +3239,8 @@ fn test_storage_bucket_picking_for_bag_non_random() {
         let removed_bucket_id = 1;
         <crate::StorageBucketById<Test>>::remove(removed_bucket_id);
 
-        let bucket_ids = Storage::pick_storage_buckets_for_dynamic_bag(DynamicBagType::Member);
+        let bucket_ids =
+            Storage::pick_storage_buckets_for_dynamic_bag(DynamicBagType::Member, None);
 
         let mut expected_ids =
             BTreeSet::from_iter((0u64..(initial_buckets_number + 1)).into_iter());
@@ -3246,7 +3254,8 @@ fn test_storage_bucket_picking_for_bag_non_random() {
             bucket.accepting_new_bags = false;
         });
 
-        let bucket_ids = Storage::pick_storage_buckets_for_dynamic_bag(DynamicBagType::Member);
+        let bucket_ids =
+            Storage::pick_storage_buckets_for_dynamic_bag(DynamicBagType::Member, None);
 
         let mut expected_ids =
             BTreeSet::from_iter((0u64..(initial_buckets_number + 2)).into_iter());
@@ -3261,7 +3270,8 @@ fn test_storage_bucket_picking_for_bag_non_random() {
             DynamicBagCreationPolicy::default(),
         );
 
-        let bucket_ids = Storage::pick_storage_buckets_for_dynamic_bag(DynamicBagType::Member);
+        let bucket_ids =
+            Storage::pick_storage_buckets_for_dynamic_bag(DynamicBagType::Member, None);
         assert_eq!(bucket_ids, BTreeSet::new());
     });
 }
@@ -3275,20 +3285,23 @@ fn test_storage_bucket_picking_for_bag_with_randomness() {
 
         let initial_buckets_number = InitialStorageBucketsNumberForDynamicBag::get();
         // No buckets
-        let bucket_ids = Storage::pick_storage_buckets_for_dynamic_bag(DynamicBagType::Member);
+        let bucket_ids =
+            Storage::pick_storage_buckets_for_dynamic_bag(DynamicBagType::Member, None);
         assert_eq!(bucket_ids, BTreeSet::new());
 
         // Less then initial buckets number
         let buckets_number = initial_buckets_number - 1;
         let created_buckets = create_storage_buckets(buckets_number);
-        let bucket_ids = Storage::pick_storage_buckets_for_dynamic_bag(DynamicBagType::Member);
+        let bucket_ids =
+            Storage::pick_storage_buckets_for_dynamic_bag(DynamicBagType::Member, None);
 
         assert_eq!(bucket_ids, created_buckets);
 
         // More then initial buckets number
         let buckets_number = 5;
         create_storage_buckets(buckets_number);
-        let bucket_ids = Storage::pick_storage_buckets_for_dynamic_bag(DynamicBagType::Member);
+        let bucket_ids =
+            Storage::pick_storage_buckets_for_dynamic_bag(DynamicBagType::Member, None);
 
         let sequential_random_ids = BTreeSet::from_iter((0u64..initial_buckets_number).into_iter());
 
@@ -3305,7 +3318,8 @@ fn test_storage_bucket_picking_for_bag_with_randomness() {
         let removed_bucket_id = bucket_ids.iter().next().unwrap();
         <crate::StorageBucketById<Test>>::remove(removed_bucket_id);
 
-        let bucket_ids = Storage::pick_storage_buckets_for_dynamic_bag(DynamicBagType::Member);
+        let bucket_ids =
+            Storage::pick_storage_buckets_for_dynamic_bag(DynamicBagType::Member, None);
         // Check number of generated IDs
         assert_eq!(initial_buckets_number, bucket_ids.len() as u64);
         // Check that IDs are within possible range.
@@ -3321,7 +3335,8 @@ fn test_storage_bucket_picking_for_bag_with_randomness() {
             bucket.accepting_new_bags = false;
         });
 
-        let bucket_ids = Storage::pick_storage_buckets_for_dynamic_bag(DynamicBagType::Member);
+        let bucket_ids =
+            Storage::pick_storage_buckets_for_dynamic_bag(DynamicBagType::Member, None);
 
         let mut expected_ids =
             BTreeSet::from_iter((0u64..(initial_buckets_number + 2)).into_iter());
@@ -3343,7 +3358,8 @@ fn test_storage_bucket_picking_for_bag_with_randomness() {
             DynamicBagCreationPolicy::default(),
         );
 
-        let bucket_ids = Storage::pick_storage_buckets_for_dynamic_bag(DynamicBagType::Member);
+        let bucket_ids =
+            Storage::pick_storage_buckets_for_dynamic_bag(DynamicBagType::Member, None);
         assert_eq!(bucket_ids, BTreeSet::new());
     });
 }
@@ -3390,6 +3406,14 @@ fn create_storage_buckets(buckets_number: u64) -> BTreeSet<u64> {
     let objects_limit = 1;
     let size_limit = 100;
 
+    create_storage_buckets_with_limits(buckets_number, size_limit, objects_limit)
+}
+
+fn create_storage_buckets_with_limits(
+    buckets_number: u64,
+    size_limit: u64,
+    objects_limit: u64,
+) -> BTreeSet<u64> {
     let mut bucket_ids = BTreeSet::new();
 
     for _ in 0..buckets_number {
@@ -5042,3 +5066,373 @@ fn set_distribution_bucket_family_metadata_fails_with_invalid_distribution_bucke
             ));
     });
 }
+
+#[test]
+fn create_dynamic_bag_with_objects_succeeds() {
+    build_test_externalities().execute_with(|| {
+        let starting_block = 1;
+        run_to_block(starting_block);
+
+        let dynamic_bag_id = DynamicBagId::<Test>::Member(DEFAULT_MEMBER_ID);
+
+        create_storage_buckets(10);
+
+        let deletion_prize_value = 100;
+        let deletion_prize_account_id = DEFAULT_MEMBER_ACCOUNT_ID;
+        let initial_balance = 10000;
+        increase_account_balance(&deletion_prize_account_id, initial_balance);
+
+        let deletion_prize = Some(DynamicBagDeletionPrize::<Test> {
+            prize: deletion_prize_value,
+            account_id: deletion_prize_account_id,
+        });
+
+        let upload_parameters = UploadParameters::<Test> {
+            bag_id: BagId::<Test>::from(dynamic_bag_id.clone()),
+            object_creation_list: create_single_data_object(),
+            deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
+            expected_data_size_fee: Storage::data_object_per_mega_byte_fee(),
+        };
+
+        // pre-check balances
+        assert_eq!(
+            Balances::usable_balance(&DEFAULT_MEMBER_ACCOUNT_ID),
+            initial_balance
+        );
+        assert_eq!(
+            Balances::usable_balance(&<StorageTreasury<Test>>::module_account_id()),
+            0
+        );
+
+        CreateDynamicBagWithObjectsFixture::default()
+            .with_bag_id(dynamic_bag_id.clone())
+            .with_deletion_prize(deletion_prize.clone())
+            .with_objects(upload_parameters)
+            .call_and_assert(Ok(()));
+
+        let bag = Storage::dynamic_bag(&dynamic_bag_id);
+
+        // Check that IDs are within possible range.
+        assert!(bag
+            .stored_by
+            .iter()
+            .all(|id| { *id < Storage::next_storage_bucket_id() }));
+
+        let creation_policy =
+            Storage::get_dynamic_bag_creation_policy(dynamic_bag_id.clone().into());
+        assert_eq!(
+            bag.stored_by.len(),
+            creation_policy.number_of_storage_buckets as usize
+        );
+
+        assert_eq!(bag.deletion_prize.unwrap(), deletion_prize_value);
+
+        // post-check balances
+        assert_eq!(
+            Balances::usable_balance(&DEFAULT_MEMBER_ACCOUNT_ID),
+            initial_balance - deletion_prize_value
+        );
+        assert_eq!(
+            Balances::usable_balance(&<StorageTreasury<Test>>::module_account_id()),
+            deletion_prize_value
+        );
+
+        EventFixture::assert_last_crate_event(RawEvent::DynamicBagCreated(
+            dynamic_bag_id,
+            deletion_prize,
+            BTreeSet::from_iter(bag.stored_by),
+            BTreeSet::from_iter(bag.distributed_by),
+        ));
+    });
+}
+
+#[test]
+fn create_dynamic_bag_with_objects_fails_with_no_bucket_availables_with_sufficient_objects_limit() {
+    build_test_externalities().execute_with(|| {
+        let starting_block = 1;
+        run_to_block(starting_block);
+
+        // set limit size 100 and limit obj number 20
+        set_max_voucher_limits_with_params(100, 20);
+
+        let dynamic_bag_id = DynamicBagId::<Test>::Member(DEFAULT_MEMBER_ID);
+
+        // create 10 buckets each with size limit 10 and num object limit 1
+        create_storage_buckets_with_limits(10, 10, 1);
+
+        let deletion_prize_value = 100;
+        let deletion_prize_account_id = DEFAULT_MEMBER_ACCOUNT_ID;
+        let initial_balance = 10000;
+        increase_account_balance(&deletion_prize_account_id, initial_balance);
+
+        let deletion_prize = Some(DynamicBagDeletionPrize::<Test> {
+            prize: deletion_prize_value,
+            account_id: deletion_prize_account_id,
+        });
+
+        let upload_parameters = UploadParameters::<Test> {
+            bag_id: BagId::<Test>::from(dynamic_bag_id.clone()),
+            object_creation_list: create_data_object_candidates(1, 3),
+            deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
+            expected_data_size_fee: Storage::data_object_per_mega_byte_fee(),
+        };
+
+        // pre-check balances
+        assert_eq!(
+            Balances::usable_balance(&DEFAULT_MEMBER_ACCOUNT_ID),
+            initial_balance
+        );
+        assert_eq!(
+            Balances::usable_balance(&<StorageTreasury<Test>>::module_account_id()),
+            0
+        );
+
+        // this fails because num objects == 3 & bucket.num_objects_limit == 1
+        CreateDynamicBagWithObjectsFixture::default()
+            .with_bag_id(dynamic_bag_id.clone())
+            .with_deletion_prize(deletion_prize.clone())
+            .with_objects(upload_parameters.clone())
+            .call_and_assert(Err(Error::<Test>::StorageBucketIdCollectionsAreEmpty.into()));
+
+        // set bucket size limits to be large enought and retry
+        let new_objects_number_limit = 10;
+        let new_objects_size_limit = 100;
+        let bucket_id_to_enlarge = 1;
+
+        SetStorageBucketVoucherLimitsFixture::default()
+            .with_origin(RawOrigin::Signed(STORAGE_WG_LEADER_ACCOUNT_ID))
+            .with_storage_bucket_id(bucket_id_to_enlarge)
+            .with_new_objects_number_limit(new_objects_number_limit)
+            .with_new_objects_size_limit(new_objects_size_limit)
+            .call_and_assert(Ok(()));
+
+        // this succeeds now
+        CreateDynamicBagWithObjectsFixture::default()
+            .with_bag_id(dynamic_bag_id.clone())
+            .with_deletion_prize(deletion_prize.clone())
+            .with_objects(upload_parameters)
+            .call_and_assert(Ok(()));
+    })
+}
+
+#[test]
+fn create_dynamic_bag_with_objects_fails_with_no_bucket_availables_with_sufficient_size_limit() {
+    build_test_externalities().execute_with(|| {
+        let starting_block = 1;
+        run_to_block(starting_block);
+
+        // set limit size 100 and limit obj number 20
+        set_max_voucher_limits_with_params(100, 20);
+
+        let dynamic_bag_id = DynamicBagId::<Test>::Member(DEFAULT_MEMBER_ID);
+
+        // create 10 buckets each with size limit 1 and num object limit 10
+        create_storage_buckets_with_limits(10, 1, 10);
+
+        let deletion_prize_value = 100;
+        let deletion_prize_account_id = DEFAULT_MEMBER_ACCOUNT_ID;
+        let initial_balance = 10000;
+        increase_account_balance(&deletion_prize_account_id, initial_balance);
+
+        let deletion_prize = Some(DynamicBagDeletionPrize::<Test> {
+            prize: deletion_prize_value,
+            account_id: deletion_prize_account_id,
+        });
+
+        // try uploading with 3 objects each exceeding bucket size limit
+        let upload_parameters = UploadParameters::<Test> {
+            bag_id: BagId::<Test>::from(dynamic_bag_id.clone()),
+            object_creation_list: create_data_object_candidates(1, 3),
+            deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
+            expected_data_size_fee: Storage::data_object_per_mega_byte_fee(),
+        };
+
+        // pre-check balances
+        assert_eq!(
+            Balances::usable_balance(&DEFAULT_MEMBER_ACCOUNT_ID),
+            initial_balance
+        );
+        assert_eq!(
+            Balances::usable_balance(&<StorageTreasury<Test>>::module_account_id()),
+            0
+        );
+
+        CreateDynamicBagWithObjectsFixture::default()
+            .with_bag_id(dynamic_bag_id.clone())
+            .with_deletion_prize(deletion_prize.clone())
+            .with_objects(upload_parameters.clone())
+            .call_and_assert(Err(Error::<Test>::StorageBucketIdCollectionsAreEmpty.into()));
+
+        // set bucket size limits to be large enought and retry
+        let new_objects_number_limit = 10;
+        let new_objects_size_limit = 100;
+        let bucket_id_to_enlarge = 1;
+
+        SetStorageBucketVoucherLimitsFixture::default()
+            .with_origin(RawOrigin::Signed(STORAGE_WG_LEADER_ACCOUNT_ID))
+            .with_storage_bucket_id(bucket_id_to_enlarge)
+            .with_new_objects_number_limit(new_objects_number_limit)
+            .with_new_objects_size_limit(new_objects_size_limit)
+            .call_and_assert(Ok(()));
+
+        CreateDynamicBagWithObjectsFixture::default()
+            .with_bag_id(dynamic_bag_id.clone())
+            .with_deletion_prize(deletion_prize.clone())
+            .with_objects(upload_parameters)
+            .call_and_assert(Ok(()));
+    })
+}
+
+#[test]
+fn create_dynamic_bag_with_objects_fails_with_unsufficient_balance() {
+    build_test_externalities().execute_with(|| {
+        let starting_block = 1;
+        run_to_block(starting_block);
+
+        // set limit size 100 and limit obj number 20
+        set_max_voucher_limits_with_params(100, 20);
+        // create 3 buckets with size limit 10 and objects limit 3
+        create_storage_buckets_with_limits(3, 10, 3);
+
+        let dynamic_bag_id = DynamicBagId::<Test>::Member(DEFAULT_MEMBER_ID);
+
+        let deletion_prize_value = 100;
+        let deletion_prize_account_id = DEFAULT_MEMBER_ACCOUNT_ID;
+        let initial_balance = 100; // just enough for the del prize
+        increase_account_balance(&deletion_prize_account_id, initial_balance);
+
+        let deletion_prize = Some(DynamicBagDeletionPrize::<Test> {
+            prize: deletion_prize_value,
+            account_id: deletion_prize_account_id,
+        });
+
+        // try uploading with > 0 objects exceeding balance
+        let data_objects = create_data_object_candidates(1, 3);
+        let upload_parameters = UploadParameters::<Test> {
+            bag_id: BagId::<Test>::from(dynamic_bag_id.clone()),
+            object_creation_list: data_objects.clone(),
+            deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
+            expected_data_size_fee: Storage::data_object_per_mega_byte_fee(),
+        };
+
+        // pre-check balances
+        assert_eq!(
+            Balances::usable_balance(&DEFAULT_MEMBER_ACCOUNT_ID),
+            initial_balance
+        );
+        assert_eq!(
+            Balances::usable_balance(&<StorageTreasury<Test>>::module_account_id()),
+            0
+        );
+
+        CreateDynamicBagWithObjectsFixture::default()
+            .with_bag_id(dynamic_bag_id.clone())
+            .with_deletion_prize(deletion_prize.clone())
+            .with_objects(upload_parameters.clone())
+            .call_and_assert(Err(Error::<Test>::InsufficientBalance.into()));
+    })
+}
+
+#[test]
+fn can_delete_dynamic_bags_with_objects_succeeded() {
+    build_test_externalities().execute_with(|| {
+        let starting_block = 1;
+        run_to_block(starting_block);
+
+        let dynamic_bag_id = DynamicBagId::<Test>::Member(DEFAULT_MEMBER_ID);
+
+        create_storage_buckets(10);
+
+        let deletion_prize_value = 100;
+        let deletion_prize_account_id = DEFAULT_MEMBER_ACCOUNT_ID;
+        let initial_balance = 10000;
+        increase_account_balance(&deletion_prize_account_id, initial_balance);
+
+        let deletion_prize = Some(DynamicBagDeletionPrize::<Test> {
+            prize: deletion_prize_value,
+            account_id: deletion_prize_account_id,
+        });
+
+        let upload_parameters = UploadParameters::<Test> {
+            bag_id: BagId::<Test>::from(dynamic_bag_id.clone()),
+            object_creation_list: create_single_data_object(),
+            deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
+            expected_data_size_fee: Storage::data_object_per_mega_byte_fee(),
+        };
+
+        // pre-check balances
+        assert_eq!(
+            Balances::usable_balance(&DEFAULT_MEMBER_ACCOUNT_ID),
+            initial_balance
+        );
+        assert_eq!(
+            Balances::usable_balance(&<StorageTreasury<Test>>::module_account_id()),
+            0
+        );
+
+        CreateDynamicBagWithObjectsFixture::default()
+            .with_bag_id(dynamic_bag_id.clone())
+            .with_deletion_prize(deletion_prize.clone())
+            .with_objects(upload_parameters)
+            .call_and_assert(Ok(()));
+
+        CanDeleteDynamicBagWithObjectsFixture::default()
+            .with_bag_id(dynamic_bag_id.clone())
+            .call_and_assert(Ok(()));
+    });
+}
+
+#[test]
+fn cannot_delete_dynamic_bags_with_objects_with_unsufficient_treasury_balance() {
+    build_test_externalities().execute_with(|| {
+        let starting_block = 1;
+        run_to_block(starting_block);
+
+        let dynamic_bag_id = DynamicBagId::<Test>::Member(DEFAULT_MEMBER_ID);
+
+        create_storage_buckets(10);
+
+        let deletion_prize_value = 100;
+        let deletion_prize_account_id = DEFAULT_MEMBER_ACCOUNT_ID;
+        let initial_balance = 10000;
+        increase_account_balance(&deletion_prize_account_id, initial_balance);
+
+        let deletion_prize = Some(DynamicBagDeletionPrize::<Test> {
+            prize: deletion_prize_value,
+            account_id: deletion_prize_account_id,
+        });
+
+        let upload_parameters = UploadParameters::<Test> {
+            bag_id: BagId::<Test>::from(dynamic_bag_id.clone()),
+            object_creation_list: create_single_data_object(),
+            deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
+            expected_data_size_fee: Storage::data_object_per_mega_byte_fee(),
+        };
+
+        // pre-check balances
+        assert_eq!(
+            Balances::usable_balance(&DEFAULT_MEMBER_ACCOUNT_ID),
+            initial_balance
+        );
+        assert_eq!(
+            Balances::usable_balance(&<StorageTreasury<Test>>::module_account_id()),
+            0
+        );
+
+        CreateDynamicBagWithObjectsFixture::default()
+            .with_bag_id(dynamic_bag_id.clone())
+            .with_deletion_prize(deletion_prize.clone())
+            .with_objects(upload_parameters)
+            .call_and_assert(Ok(()));
+
+        // Corrupt module balance enough so that it doesn't reach sufficient balance for deletion
+        let _ = Balances::slash(
+            &<StorageTreasury<Test>>::module_account_id(),
+            deletion_prize_value,
+        );
+
+        CanDeleteDynamicBagWithObjectsFixture::default()
+            .with_bag_id(dynamic_bag_id.clone())
+            .call_and_assert(Err(Error::<Test>::InsufficientTreasuryBalance.into()));
+    });
+}

+ 1 - 1
runtime/Cargo.toml

@@ -4,7 +4,7 @@ edition = '2018'
 name = 'joystream-node-runtime'
 # Follow convention: https://github.com/Joystream/substrate-runtime-joystream/issues/1
 # {Authoring}.{Spec}.{Impl} of the RuntimeVersion
-version = '9.9.0'
+version = '9.10.0'
 
 [dependencies]
 # Third-party dependencies

+ 3 - 3
runtime/src/lib.rs

@@ -83,7 +83,7 @@ pub const VERSION: RuntimeVersion = RuntimeVersion {
     spec_name: create_runtime_str!("joystream-node"),
     impl_name: create_runtime_str!("joystream-node"),
     authoring_version: 9,
-    spec_version: 9,
+    spec_version: 10,
     impl_version: 0,
     apis: crate::runtime_api::EXPORTED_RUNTIME_API_VERSIONS,
     transaction_version: 1,
@@ -340,8 +340,8 @@ impl pallet_session::historical::Trait for Runtime {
 pallet_staking_reward_curve::build! {
     const REWARD_CURVE: PiecewiseLinear<'static> = curve!(
         min_inflation: 0_050_000,
-        max_inflation: 0_750_000,
-        ideal_stake: 0_300_000,
+        max_inflation: 0_150_000,
+        ideal_stake: 0_250_000,
         falloff: 0_050_000,
         max_piece_count: 100,
         test_precision: 0_005_000,

+ 1 - 1
utils/chain-spec-builder/Cargo.toml

@@ -3,7 +3,7 @@ authors = ['Joystream contributors']
 build = 'build.rs'
 edition = '2018'
 name = 'chain-spec-builder'
-version = '3.1.1'
+version = '3.2.0'
 
 [dependencies]
 ansi_term = "0.12.1"

+ 1 - 0
utils/migration-scripts/.eslintignore

@@ -0,0 +1 @@
+src/sumer-giza/sumer-query-node/generated

+ 9 - 0
utils/migration-scripts/.gitignore

@@ -0,0 +1,9 @@
+*-debug.log
+*-error.log
+/.nyc_output
+/dist
+/lib
+/package-lock.json
+/tmp
+node_modules
+results

+ 2 - 0
utils/migration-scripts/.prettierignore

@@ -0,0 +1,2 @@
+results
+src/sumer-giza/sumer-query-node/generated

+ 116 - 0
utils/migration-scripts/README.md

@@ -0,0 +1,116 @@
+migrations
+==========
+
+Joystream migrations scripts
+
+[![oclif](https://img.shields.io/badge/cli-oclif-brightgreen.svg)](https://oclif.io)
+[![Version](https://img.shields.io/npm/v/migrations.svg)](https://npmjs.org/package/migrations)
+[![Downloads/week](https://img.shields.io/npm/dw/migrations.svg)](https://npmjs.org/package/migrations)
+[![License](https://img.shields.io/npm/l/migrations.svg)](https://github.com/Joystream/joystream/blob/master/package.json)
+
+<!-- toc -->
+* [Usage](#usage)
+* [Commands](#commands)
+<!-- tocstop -->
+# Usage
+<!-- usage -->
+```sh-session
+$ npm install -g migration-scripts
+$ migration-scripts COMMAND
+running command...
+$ migration-scripts (-v|--version|version)
+migration-scripts/0.1.0 linux-x64 node-v14.16.1
+$ migration-scripts --help [COMMAND]
+USAGE
+  $ migration-scripts COMMAND
+...
+```
+<!-- usagestop -->
+# Commands
+<!-- commands -->
+* [`migration-scripts help [COMMAND]`](#migration-scripts-help-command)
+* [`migration-scripts sumer-giza:migrateContent`](#migration-scripts-sumer-gizamigratecontent)
+* [`migration-scripts sumer-giza:retryFailedUploads`](#migration-scripts-sumer-gizaretryfaileduploads)
+
+## `migration-scripts help [COMMAND]`
+
+display help for migration-scripts
+
+```
+USAGE
+  $ migration-scripts help [COMMAND]
+
+ARGUMENTS
+  COMMAND  command to show help for
+
+OPTIONS
+  --all  see all commands in CLI
+```
+
+_See code: [@oclif/plugin-help](https://github.com/oclif/plugin-help/blob/v3.2.3/src/commands/help.ts)_
+
+## `migration-scripts sumer-giza:migrateContent`
+
+```
+USAGE
+  $ migration-scripts sumer-giza:migrateContent
+
+OPTIONS
+  -c, --channelIds=channelIds                                  (required) Channel ids to migrate
+  --channelBatchSize=channelBatchSize                          [default: 20] Channel batch size
+
+  --dataDir=dataDir                                            [default: /tmp/joystream/sumer-giza-migration] Directory
+                                                               for storing data objects to upload
+
+  --forceChannelOwnerMemberId=forceChannelOwnerMemberId        Can be used to force a specific channel owner for all
+                                                               channels, allowing to test the script in dev environment
+
+  --migrationStatePath=migrationStatePath                      [default:
+                                                               /home/leszek/projects/joystream/joystream-ws-2/utils/migr
+                                                               ation-scripts/results/sumer-giza] Path to migration
+                                                               results directory
+
+  --preferredDownloadSpEndpoints=preferredDownloadSpEndpoints  [default: https://storage-1.joystream.org/storage]
+                                                               Preferred storage node endpoints when downloading data
+                                                               objects
+
+  --queryNodeUri=queryNodeUri                                  [default: https://hydra.joystream.org/graphql] Query node
+                                                               uri
+
+  --sudoUri=sudoUri                                            [default: //Alice] Sudo key Substrate uri
+
+  --uploadSpBucketId=uploadSpBucketId                          [default: 0] Giza storage bucket id
+
+  --uploadSpEndpoint=uploadSpEndpoint                          [default: http://localhost:3333] Giza storage node
+                                                               endpoint to use for uploading
+
+  --videoBatchSize=videoBatchSize                              [default: 20] Video batch size
+
+  --wsProviderEndpointUri=wsProviderEndpointUri                [default: ws://localhost:9944] WS provider endpoint uri
+                                                               (Giza)
+```
+
+_See code: [src/commands/sumer-giza/migrateContent.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/sumer-giza/migrateContent.ts)_
+
+## `migration-scripts sumer-giza:retryFailedUploads`
+
+```
+USAGE
+  $ migration-scripts sumer-giza:retryFailedUploads
+
+OPTIONS
+  -f, --failedUploadsPath=failedUploadsPath      (required) Path to failed uploads file
+
+  --dataDir=dataDir                              [default: /tmp/joystream/sumer-giza-migration] Directory where data
+                                                 objects to upload are stored
+
+  --uploadSpBucketId=uploadSpBucketId            [default: 0] Giza storage bucket id
+
+  --uploadSpEndpoint=uploadSpEndpoint            [default: http://localhost:3333] Giza storage node endpoint to use for
+                                                 uploading
+
+  --wsProviderEndpointUri=wsProviderEndpointUri  [default: ws://localhost:9944] WS provider endpoint uri (Giza)
+```
+
+_See code: [src/commands/sumer-giza/retryFailedUploads.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/sumer-giza/retryFailedUploads.ts)_
+<!-- commandsstop -->

+ 3 - 0
utils/migration-scripts/bin/run

@@ -0,0 +1,3 @@
+#!/usr/bin/env node
+
+require('@oclif/command').run().then(require('@oclif/command/flush')).catch(require('@oclif/errors/handle'))

+ 3 - 0
utils/migration-scripts/bin/run.cmd

@@ -0,0 +1,3 @@
+@echo off
+
+node "%~dp0\run" %*

+ 93 - 0
utils/migration-scripts/package.json

@@ -0,0 +1,93 @@
+{
+  "name": "migration-scripts",
+  "description": "Joystream migration scripts",
+  "version": "0.1.0",
+  "author": "Joystream contributors",
+  "bin": {
+    "migration-scripts": "./bin/run"
+  },
+  "bugs": "https://github.com/Joystream/joystream/issues",
+  "dependencies": {
+    "@oclif/command": "^1",
+    "@oclif/config": "^1",
+    "@oclif/plugin-help": "^3.2.3",
+    "tslib": "^1",
+    "@joystream/types": "^0.17.0",
+    "@polkadot/api": "5.9.1",
+    "@polkadot/types": "5.9.1",
+    "@polkadot/keyring": "7.3.1",
+    "@polkadot/util": "7.3.1",
+    "@polkadot/util-crypto": "7.3.1",
+    "@apollo/client": "^3.2.5",
+    "cross-fetch": "^3.1.4",
+    "lodash": "^4.17.21",
+    "url-join": "^4.0.1",
+    "@types/url-join": "^4.0.1",
+    "axios": "^0.24.0",
+    "blake3": "^2.1.4",
+    "multihashes": "^4.0.3",
+    "moment": "^2.29.1",
+    "sharp": "^0.29.2",
+    "@types/sharp": "^0.29.2",
+    "form-data": "^4.0.0",
+    "node-cleanup": "^2.1.2",
+    "@types/node-cleanup": "^2.1.2"
+  },
+  "devDependencies": {
+    "@graphql-codegen/cli": "^1.21.4",
+    "@graphql-codegen/import-types-preset": "^1.18.1",
+    "@graphql-codegen/typescript": "^1.22.0",
+    "@graphql-codegen/typescript-document-nodes": "^1.17.11",
+    "@graphql-codegen/typescript-operations": "^1.17.16",
+    "@oclif/dev-cli": "^1",
+    "@types/node": "^14",
+    "globby": "^10",
+    "ts-node": "^8",
+    "typescript": "^3.3"
+  },
+  "engines": {
+    "node": ">=14.0.0",
+    "yarn": "^1.22.0"
+  },
+  "volta": {
+    "node": "14.16.1",
+    "yarn": "1.22.4"
+  },
+  "files": [
+    "/bin",
+    "/lib",
+    "/npm-shrinkwrap.json",
+    "/oclif.manifest.json"
+  ],
+  "homepage": "https://github.com/Joystream/joystream",
+  "keywords": [
+    "oclif"
+  ],
+  "license": "GPL-3.0-only",
+  "main": "lib/index.js",
+  "oclif": {
+    "commands": "./lib/commands",
+    "bin": "migration-scripts",
+    "plugins": [
+      "@oclif/plugin-help"
+    ],
+    "topics": {
+      "sumer-giza": {
+        "description": "Sumer-to-Giza migration scripts"
+      }
+    }
+  },
+  "repository": "Joystream/joystream",
+  "scripts": {
+    "postpack": "rm -f oclif.manifest.json",
+    "prepack": "rm -rf lib && tsc -b && oclif-dev manifest && oclif-dev readme",
+    "test": "echo NO TESTS",
+    "version": "oclif-dev readme && git add README.md",
+    "build": "tsc --build tsconfig.json",
+    "lint": "eslint ./src --ext .ts",
+    "checks": "tsc --noEmit --pretty && prettier ./ --check && yarn lint",
+    "format": "prettier ./ --write",
+    "generate:types:graphql": "yarn graphql-codegen -c ./src/sumer-giza/sumer-query-node/codegen.yml"
+  },
+  "types": "lib/index.d.ts"
+}

+ 142 - 0
utils/migration-scripts/src/RuntimeApi.ts

@@ -0,0 +1,142 @@
+import { types } from '@joystream/types'
+import { ApiPromise, SubmittableResult } from '@polkadot/api'
+import { SubmittableExtrinsic, AugmentedEvent, ApiOptions, AugmentedQuery } from '@polkadot/api/types'
+import { KeyringPair } from '@polkadot/keyring/types'
+import { Call } from '@polkadot/types/interfaces'
+import { Codec, IEvent } from '@polkadot/types/types'
+import { DispatchError } from '@polkadot/types/interfaces/system'
+import { UInt } from '@polkadot/types'
+import { Observable } from 'rxjs'
+import BN from 'bn.js'
+
+export class ExtrinsicFailedError extends Error {}
+
+// Joystream runtime api utility class. Based on distributor node CLI / Joystream CLI implementation
+
+type EventSection = keyof ApiPromise['events'] & string
+type EventMethod<Section extends EventSection> = keyof ApiPromise['events'][Section] & string
+type EventType<
+  Section extends EventSection,
+  Method extends EventMethod<Section>
+> = ApiPromise['events'][Section][Method] extends AugmentedEvent<'promise', infer T> ? IEvent<T> : never
+
+export class RuntimeApi extends ApiPromise {
+  constructor(options: Omit<ApiOptions, 'types'>) {
+    super({ ...options, types })
+  }
+
+  public findEvent<S extends EventSection, M extends EventMethod<S>>(
+    result: SubmittableResult,
+    section: S,
+    method: M
+  ): EventType<S, M> | undefined {
+    return result.findRecord(section, method)?.event as EventType<S, M> | undefined
+  }
+
+  public getEvent<S extends EventSection, M extends EventMethod<S>>(
+    result: SubmittableResult,
+    section: S,
+    method: M
+  ): EventType<S, M> {
+    const event = this.findEvent(result, section, method)
+    if (!event) {
+      throw new Error(`Cannot find expected ${section}.${method} event in result: ${JSON.stringify(result.toHuman())}`)
+    }
+    return event
+  }
+
+  public findEvents<S extends EventSection, M extends EventMethod<S>>(
+    result: SubmittableResult,
+    section: S,
+    method: M,
+    expectedCount?: number
+  ): EventType<S, M>[] {
+    const events = result.filterRecords(section, method).map((r) => r.event)
+    if (expectedCount && events.length !== expectedCount) {
+      throw new Error(
+        `Unexpected count of ${section}.${method} events in result: ${JSON.stringify(result.toHuman())}. ` +
+          `Expected: ${expectedCount}, Got: ${events.length}`
+      )
+    }
+    return (events.sort((a, b) => new BN(a.index).cmp(new BN(b.index))) as unknown) as EventType<S, M>[]
+  }
+
+  private formatDispatchError(err: DispatchError): string {
+    try {
+      const { name, docs } = this.registry.findMetaError(err.asModule)
+      return `${name} (${docs.join(', ')})`
+    } catch (e) {
+      return err.toString()
+    }
+  }
+
+  async entriesByIds<IDType extends UInt, ValueType extends Codec>(
+    apiMethod: AugmentedQuery<'promise', (key: IDType) => Observable<ValueType>, [IDType]>
+  ): Promise<[IDType, ValueType][]> {
+    const entries: [IDType, ValueType][] = (await apiMethod.entries()).map(([storageKey, value]) => [
+      storageKey.args[0] as IDType,
+      value,
+    ])
+
+    return entries.sort((a, b) => a[0].toNumber() - b[0].toNumber())
+  }
+
+  sendExtrinsic(keyPair: KeyringPair, tx: SubmittableExtrinsic<'promise'>): Promise<SubmittableResult> {
+    let txName = `${tx.method.section}.${tx.method.method}`
+    if (txName === 'sudo.sudo') {
+      const innerCall = tx.args[0] as Call
+      txName = `sudo.sudo(${innerCall.section}.${innerCall.method})`
+    }
+    console.log(`Sending ${txName} extrinsic from ${keyPair.address}`)
+    return new Promise((resolve, reject) => {
+      let unsubscribe: () => void
+      tx.signAndSend(keyPair, {}, (result) => {
+        if (!result || !result.status) {
+          return
+        }
+
+        if (result.status.isInBlock) {
+          unsubscribe()
+          result.events
+            .filter(({ event }) => event.section === 'system')
+            .forEach(({ event }) => {
+              if (event.method === 'ExtrinsicFailed') {
+                const dispatchError = event.data[0] as DispatchError
+                reject(
+                  new ExtrinsicFailedError(`Extrinsic execution error: ${this.formatDispatchError(dispatchError)}`)
+                )
+              } else if (event.method === 'ExtrinsicSuccess') {
+                if (txName === 'sudo.sudo') {
+                  const sudidEvent = this.getEvent(result, 'sudo', 'Sudid')
+                  const [dispatchResult] = sudidEvent.data
+                  if (dispatchResult.isErr) {
+                    return reject(
+                      new ExtrinsicFailedError(
+                        `Sudo extrinsic execution error! ${this.formatDispatchError(dispatchResult.asErr)}`
+                      )
+                    )
+                  }
+                }
+
+                if (txName === 'sudo.sudoAs') {
+                  const sudoAsDoneEvent = this.getEvent(result, 'sudo', 'SudoAsDone')
+                  const [sudoAsDone] = sudoAsDoneEvent.data
+                  if (sudoAsDone.isFalse) {
+                    return reject(new ExtrinsicFailedError(`SudoAs failed!`))
+                  }
+                }
+
+                resolve(result)
+              }
+            })
+        } else if (result.isError) {
+          reject(new ExtrinsicFailedError('Extrinsic execution error!'))
+        }
+      })
+        .then((unsubFunc) => (unsubscribe = unsubFunc))
+        .catch((e) =>
+          reject(new ExtrinsicFailedError(`Cannot send the extrinsic: ${e.message ? e.message : JSON.stringify(e)}`))
+        )
+    })
+  }
+}

+ 73 - 0
utils/migration-scripts/src/commands/sumer-giza/migrateContent.ts

@@ -0,0 +1,73 @@
+import { Command, flags } from '@oclif/command'
+import path from 'path'
+import os from 'os'
+import { ContentMigration } from '../../sumer-giza/ContentMigration'
+
+export class MigrateContentCommand extends Command {
+  static flags = {
+    queryNodeUri: flags.string({
+      description: 'Query node uri',
+      default: 'https://hydra.joystream.org/graphql',
+    }),
+    wsProviderEndpointUri: flags.string({
+      description: 'WS provider endpoint uri (Giza)',
+      default: 'ws://localhost:9944',
+    }),
+    sudoUri: flags.string({
+      description: 'Sudo key Substrate uri',
+      default: '//Alice',
+    }),
+    channelIds: flags.integer({
+      char: 'c',
+      multiple: true,
+      description: 'Channel ids to migrate',
+      required: true,
+    }),
+    dataDir: flags.string({
+      description: 'Directory for storing data objects to upload',
+      default: path.join(os.tmpdir(), 'joystream/sumer-giza-migration'),
+    }),
+    channelBatchSize: flags.integer({
+      description: 'Channel batch size',
+      default: 20,
+    }),
+    videoBatchSize: flags.integer({
+      description: 'Video batch size',
+      default: 20,
+    }),
+    forceChannelOwnerMemberId: flags.integer({
+      description:
+        'Can be used to force a specific channel owner for all channels, allowing to easily test the script in dev environment',
+      required: false,
+    }),
+    preferredDownloadSpEndpoints: flags.string({
+      multiple: true,
+      description: 'Preferred storage node endpoints when downloading data objects',
+      default: ['https://storage-1.joystream.org/storage'],
+    }),
+    uploadSpEndpoint: flags.string({
+      description: 'Giza storage node endpoint to use for uploading',
+      default: 'http://localhost:3333',
+    }),
+    uploadSpBucketId: flags.integer({
+      description: 'Giza storage bucket id',
+      default: 0,
+    }),
+    migrationStatePath: flags.string({
+      description: 'Path to migration results directory',
+      default: path.join(__dirname, '../../../results/sumer-giza'),
+    }),
+  }
+
+  async run(): Promise<void> {
+    const opts = this.parse(MigrateContentCommand).flags
+    try {
+      const migration = new ContentMigration(opts)
+      await migration.run()
+    } catch (e) {
+      console.error(e)
+      this.exit(-1)
+    }
+    this.exit(0)
+  }
+}

+ 51 - 0
utils/migration-scripts/src/commands/sumer-giza/retryFailedUploads.ts

@@ -0,0 +1,51 @@
+import { Command, flags } from '@oclif/command'
+import path from 'path'
+import os from 'os'
+import { WsProvider } from '@polkadot/rpc-provider'
+import { RuntimeApi } from '../../RuntimeApi'
+import { AssetsManager } from '../../sumer-giza/AssetsManager'
+
+export class RetryFailedUploadsCommand extends Command {
+  static flags = {
+    wsProviderEndpointUri: flags.string({
+      description: 'WS provider endpoint uri (Giza)',
+      default: 'ws://localhost:9944',
+    }),
+    dataDir: flags.string({
+      description: 'Directory where data objects to upload are stored',
+      default: path.join(os.tmpdir(), 'joystream/sumer-giza-migration'),
+    }),
+    uploadSpEndpoint: flags.string({
+      description: 'Giza storage node endpoint to use for uploading',
+      default: 'http://localhost:3333',
+    }),
+    uploadSpBucketId: flags.integer({
+      description: 'Giza storage bucket id',
+      default: 0,
+    }),
+    failedUploadsPath: flags.string({
+      char: 'f',
+      description: 'Path to failed uploads file',
+      required: true,
+    }),
+  }
+
+  async run(): Promise<void> {
+    const opts = this.parse(RetryFailedUploadsCommand).flags
+    try {
+      const provider = new WsProvider(opts.wsProviderEndpointUri)
+      const api = new RuntimeApi({ provider })
+      await api.isReadyOrError
+      const assetsManager = await AssetsManager.create({
+        api,
+        config: opts,
+      })
+      assetsManager.loadQueue(opts.failedUploadsPath)
+      await assetsManager.processQueuedUploads()
+    } catch (e) {
+      console.error(e)
+      this.exit(-1)
+    }
+    this.exit(0)
+  }
+}

+ 1 - 0
utils/migration-scripts/src/index.ts

@@ -0,0 +1 @@
+export { run } from '@oclif/command'

+ 308 - 0
utils/migration-scripts/src/sumer-giza/AssetsManager.ts

@@ -0,0 +1,308 @@
+import { DataObjectFieldsFragment } from './sumer-query-node/generated/queries'
+import BN from 'bn.js'
+import urljoin from 'url-join'
+import axios from 'axios'
+import fs from 'fs'
+import path from 'path'
+import { BagId, DataObjectCreationParameters, DataObjectId, UploadParameters } from '@joystream/types/storage'
+import { IEvent } from '@polkadot/types/types'
+import { Vec } from '@polkadot/types'
+import { Balance } from '@polkadot/types/interfaces'
+import FormData from 'form-data'
+import { ImageResizer } from './ImageResizer'
+import { QueryNodeApi } from './sumer-query-node/api'
+import { RuntimeApi } from '../RuntimeApi'
+import { ContentHash } from './ContentHash'
+import { promisify } from 'util'
+import { createType } from '@joystream/types'
+import { Readable, pipeline } from 'stream'
+import _ from 'lodash'
+
+export type AssetsManagerConfig = {
+  preferredDownloadSpEndpoints?: string[]
+  uploadSpBucketId: number
+  uploadSpEndpoint: string
+  dataDir: string
+}
+
+export type AssetsManagerParams = {
+  api: RuntimeApi
+  queryNodeApi?: QueryNodeApi
+  config: AssetsManagerConfig
+}
+
+export type AssetsManagerLoadableParams = {
+  dataObjectFeePerMB: BN
+  sumerStorageProviderEndpoints: string[]
+}
+
+export type AssetsToPrepare = {
+  [name: string]: {
+    data?: DataObjectFieldsFragment
+    targetSize?: [number, number]
+  }
+}
+
+export type PreparedAsset = {
+  params: DataObjectCreationParameters
+  index: number
+}
+
+export class AssetsManager {
+  private api: RuntimeApi
+  private config: AssetsManagerConfig
+  public readonly dataObjectFeePerMB: BN
+  private sumerStorageProviderEndpoints: string[]
+  private resizer: ImageResizer
+  private queuedUploads: Set<string>
+  private isQueueProcessing = false
+
+  public get queueSize(): number {
+    return this.queuedUploads.size
+  }
+
+  public static async create(params: AssetsManagerParams): Promise<AssetsManager> {
+    const { api } = params
+    const dataObjectFeePerMB = await api.query.storage.dataObjectPerMegabyteFee()
+    const sumerStorageProviderEndpoints = params.queryNodeApi
+      ? await AssetsManager.getSumerStorageProviderEndpoints(params.queryNodeApi)
+      : []
+    return new AssetsManager(params, { dataObjectFeePerMB, sumerStorageProviderEndpoints })
+  }
+
+  private constructor(params: AssetsManagerParams, loadableParams: AssetsManagerLoadableParams) {
+    const { api, config } = params
+    const { dataObjectFeePerMB, sumerStorageProviderEndpoints } = loadableParams
+    this.dataObjectFeePerMB = dataObjectFeePerMB
+    this.sumerStorageProviderEndpoints = sumerStorageProviderEndpoints
+    this.api = api
+    this.config = config
+    this.resizer = new ImageResizer()
+    this.queuedUploads = new Set()
+    fs.mkdirSync(this.tmpAssetPath(''), { recursive: true })
+    fs.mkdirSync(this.assetPath(''), { recursive: true })
+  }
+
+  private static async getSumerStorageProviderEndpoints(queryNodeApi: QueryNodeApi): Promise<string[]> {
+    const endpoints: string[] = []
+    const workers = await queryNodeApi.getStorageWorkers()
+    workers.forEach((w) => w.metadata && endpoints.push(w.metadata))
+    return endpoints
+  }
+
+  private tmpAssetPath(contentId: string): string {
+    return path.join(this.config.dataDir, 'tmp', contentId)
+  }
+
+  private assetPath(contentHash: string): string {
+    return path.join(this.config.dataDir, contentHash)
+  }
+
+  public calcDataObjectsFee(params: DataObjectCreationParameters[]): BN {
+    const { dataObjectFeePerMB, api } = this
+    const deletionPrize = api.consts.storage.dataObjectDeletionPrize
+    const totalSize = params
+      .reduce((a, b) => {
+        return a.add(b.getField('size'))
+      }, new BN(0))
+      .toNumber()
+    const totalStorageFee = dataObjectFeePerMB.muln(Math.ceil(totalSize / 1024 / 1024))
+    const totalDeletionPrize = deletionPrize.muln(params.length)
+    return totalStorageFee.add(totalDeletionPrize)
+  }
+
+  private async prepareAsset(
+    data: DataObjectFieldsFragment,
+    targetSize?: [number, number]
+  ): Promise<DataObjectCreationParameters | undefined> {
+    if (data.liaisonJudgement !== 'ACCEPTED') {
+      console.error(
+        `Data object ${data.joystreamContentId} has invalid liason judgement: ${data.liaisonJudgement}. Skipping...`
+      )
+      return
+    }
+    let objectSize = new BN(data.size).toNumber()
+    let path: string
+    try {
+      path = await this.fetchAssetWithRetry(data.joystreamContentId, objectSize)
+    } catch (e) {
+      console.error(`Data object ${data.joystreamContentId} was not fetched: ${(e as Error).message}`)
+      return
+    }
+    if (targetSize) {
+      try {
+        await this.resizer.resize(path, targetSize)
+        // Re-estabilish object size
+        objectSize = fs.statSync(path).size
+      } catch (e) {
+        console.error(
+          `Could not resize image ${path} to target size ${targetSize[0]}/${targetSize[1]}: ${(e as Error).message}`
+        )
+      }
+    }
+    const hash = await this.calcContentHash(path)
+    // Move asset to final path
+    fs.renameSync(path, this.assetPath(hash))
+    return createType<DataObjectCreationParameters, 'DataObjectCreationParameters'>('DataObjectCreationParameters', {
+      ipfsContentId: hash,
+      size: objectSize,
+    })
+  }
+
+  public async prepareAssets<T extends AssetsToPrepare>(
+    assetsToPrepare: T
+  ): Promise<{ [K in keyof T]?: PreparedAsset }> {
+    const preparedAssets: { [K in keyof T]?: PreparedAsset } = {}
+    let assetIndex = 0
+    await Promise.all(
+      Object.entries(assetsToPrepare).map(async ([assetName, { data, targetSize }]) => {
+        if (!data) {
+          return
+        }
+        const params = await this.prepareAsset(data, targetSize)
+        if (!params) {
+          return
+        }
+        preparedAssets[assetName as keyof T] = { params, index: assetIndex++ }
+      })
+    )
+    return preparedAssets
+  }
+
+  private calcContentHash(assetPath: string): Promise<string> {
+    return new Promise<string>((resolve, reject) => {
+      const fReadStream = fs.createReadStream(assetPath)
+      const hash = new ContentHash()
+      fReadStream.on('data', (chunk) => hash.update(chunk))
+      fReadStream.on('end', () => resolve(hash.digest()))
+      fReadStream.on('error', (err) => reject(err))
+    })
+  }
+
+  private async fetchAsset(endpoint: string, contentId: string, expectedSize: number): Promise<string> {
+    const assetEndpoint = urljoin(endpoint, `asset/v0/${contentId}`)
+    const response = await axios.get<Readable>(assetEndpoint, { responseType: 'stream', timeout: 5000 })
+    const pipe = promisify(pipeline)
+    const destPath = this.tmpAssetPath(contentId)
+    const fWriteStream = fs.createWriteStream(destPath)
+    await pipe(response.data, fWriteStream)
+    const { size } = fs.statSync(destPath)
+    if (size !== expectedSize) {
+      throw new Error('Invalid file size')
+    }
+    return destPath
+  }
+
+  private async fetchAssetWithRetry(contentId: string, expectedSize: number): Promise<string> {
+    const preferredDownloadSpEndpoints = this.config.preferredDownloadSpEndpoints || []
+    const alternativeEndpoints = _.difference(this.sumerStorageProviderEndpoints, preferredDownloadSpEndpoints)
+    const endpoints = _.shuffle(preferredDownloadSpEndpoints).concat(_.shuffle(alternativeEndpoints))
+    let lastError: Error | undefined
+    for (const endpoint of endpoints) {
+      try {
+        const tmpAssetPath = await this.fetchAsset(endpoint, contentId, expectedSize)
+        return tmpAssetPath
+      } catch (e) {
+        lastError = e as Error
+        continue
+      }
+    }
+    throw new Error(
+      `Could not fetch asset ${contentId} from any provider. Last error: ${
+        lastError && this.reqErrorMessage(lastError)
+      }`
+    )
+  }
+
+  private reqErrorMessage(e: unknown): string {
+    if (axios.isAxiosError(e)) {
+      return e.response ? JSON.stringify(e.response.data) : e.message
+    }
+    return e instanceof Error ? e.message : JSON.stringify(e)
+  }
+
+  private async uploadDataObject(bagId: string, dataObjectId: number): Promise<void> {
+    const {
+      config: { uploadSpBucketId, uploadSpEndpoint },
+    } = this
+    const dataObject = await this.api.query.storage.dataObjectsById(
+      { Dynamic: { Channel: bagId.split(':')[2] } },
+      dataObjectId
+    )
+    const dataPath = this.assetPath(Buffer.from(dataObject.ipfsContentId.toHex().replace('0x', ''), 'hex').toString())
+    if (!fs.existsSync(dataPath)) {
+      throw new Error(`Cannot upload object: ${dataObjectId}: ${dataPath} not found`)
+    }
+
+    const fileStream = fs.createReadStream(dataPath)
+    const formData = new FormData()
+    formData.append('dataObjectId', dataObjectId)
+    formData.append('storageBucketId', uploadSpBucketId)
+    formData.append('bagId', bagId)
+    formData.append('file', fileStream, { filename: path.basename(dataPath) })
+    let uploadSuccesful: boolean
+    try {
+      await axios({
+        method: 'POST',
+        url: urljoin(uploadSpEndpoint, 'api/v1/files'),
+        data: formData,
+        maxBodyLength: Infinity,
+        headers: {
+          'content-type': 'multipart/form-data',
+          ...formData.getHeaders(),
+        },
+      })
+      uploadSuccesful = true
+    } catch (e) {
+      uploadSuccesful = false
+      const msg = this.reqErrorMessage(e)
+      console.error(`Upload of object ${dataObjectId} to ${uploadSpEndpoint} failed: ${msg}`)
+    }
+
+    if (uploadSuccesful) {
+      // Remove asset from queuedUploads and temporary storage
+      this.queuedUploads.delete(`${bagId}|${dataObjectId}`)
+      try {
+        fs.rmSync(dataPath)
+      } catch (e) {
+        console.error(`Could not remove file "${dataPath}" after succesful upload...`)
+      }
+    }
+  }
+
+  public async processQueuedUploads(): Promise<void> {
+    if (this.isQueueProcessing) {
+      throw new Error('Uploads queue is already beeing processed!')
+    }
+    this.isQueueProcessing = true
+    console.log(`Uploading ${this.queueSize} data objects...`)
+    await Promise.all(
+      Array.from(this.queuedUploads).map((queuedUpload) => {
+        const [bagId, objectId] = queuedUpload.split('|')
+        return this.uploadDataObject(bagId, parseInt(objectId))
+      })
+    )
+    this.isQueueProcessing = false
+  }
+
+  public loadQueue(queueFilePath: string): void {
+    const queue: string[] = JSON.parse(fs.readFileSync(queueFilePath).toString())
+    this.queuedUploads = new Set(queue)
+  }
+
+  public saveQueue(queueFilePath: string): void {
+    fs.writeFileSync(queueFilePath, JSON.stringify(Array.from(this.queuedUploads)))
+  }
+
+  private queueUpload(bagId: BagId, objectId: DataObjectId): void {
+    const bagIdStr = `dynamic:channel:${bagId.asType('Dynamic').asType('Channel').toString()}`
+    this.queuedUploads.add(`${bagIdStr}|${objectId.toString()}`)
+  }
+
+  public async uploadFromEvents(events: IEvent<[Vec<DataObjectId>, UploadParameters, Balance]>[]): Promise<void> {
+    events.map(({ data: [objectIds, uploadParams] }) => {
+      objectIds.forEach((objectId) => this.queueUpload(uploadParams.bagId, objectId))
+    })
+    await this.processQueuedUploads()
+  }
+}

+ 40 - 0
utils/migration-scripts/src/sumer-giza/AssetsMigration.ts

@@ -0,0 +1,40 @@
+import { BaseMigration, BaseMigrationConfig, BaseMigrationParams, MigrationResult } from './BaseMigration'
+import { AssetsManager, AssetsManagerConfig } from './AssetsManager'
+
+export type AssetsMigrationConfig = BaseMigrationConfig & AssetsManagerConfig
+
+export type AssetsMigrationParams = BaseMigrationParams & {
+  config: AssetsMigrationConfig
+}
+
+export abstract class AssetsMigration extends BaseMigration {
+  protected config: AssetsMigrationConfig
+  protected assetsManager!: AssetsManager
+
+  public constructor({ api, queryNodeApi, config }: AssetsMigrationParams) {
+    super({ api, queryNodeApi, config })
+    this.config = config
+  }
+
+  public async init(): Promise<void> {
+    await super.init()
+    this.assetsManager = await AssetsManager.create({
+      api: this.api,
+      queryNodeApi: this.queryNodeApi,
+      config: this.config,
+    })
+  }
+
+  public abstract run(): Promise<MigrationResult>
+
+  protected saveMigrationState(): void {
+    super.saveMigrationState()
+    if (this.assetsManager.queueSize) {
+      const failedUploadsFilePath = this.getMigrationStateFilePath().replace(
+        '.json',
+        `FailedUploads_${Date.now()}.json`
+      )
+      this.assetsManager.saveQueue(failedUploadsFilePath)
+    }
+  }
+}

+ 121 - 0
utils/migration-scripts/src/sumer-giza/BaseMigration.ts

@@ -0,0 +1,121 @@
+import { SubmittableResult } from '@polkadot/api'
+import { KeyringPair } from '@polkadot/keyring/types'
+import { QueryNodeApi } from './sumer-query-node/api'
+import { RuntimeApi } from '../RuntimeApi'
+import { Keyring } from '@polkadot/keyring'
+import path from 'path'
+import nodeCleanup from 'node-cleanup'
+import _ from 'lodash'
+import fs from 'fs'
+
+export type MigrationResult = {
+  idsMap: Map<number, number>
+  failedMigrations: number[]
+}
+
+export type MigrationStateJson = {
+  idsMapEntries: [number, number][]
+  failedMigrations: number[]
+}
+
+export type BaseMigrationConfig = {
+  migrationStatePath: string
+  sudoUri: string
+}
+
+export type BaseMigrationParams = {
+  api: RuntimeApi
+  queryNodeApi: QueryNodeApi
+  config: BaseMigrationConfig
+}
+
+export abstract class BaseMigration {
+  abstract readonly name: string
+  protected api: RuntimeApi
+  protected queryNodeApi: QueryNodeApi
+  protected sudo!: KeyringPair
+  protected config: BaseMigrationConfig
+  protected failedMigrations: Set<number>
+  protected idsMap: Map<number, number>
+
+  public constructor({ api, queryNodeApi, config }: BaseMigrationParams) {
+    this.api = api
+    this.queryNodeApi = queryNodeApi
+    this.config = config
+    this.failedMigrations = new Set()
+    this.idsMap = new Map()
+    fs.mkdirSync(config.migrationStatePath, { recursive: true })
+  }
+
+  protected getMigrationStateFilePath(): string {
+    const { migrationStatePath } = this.config
+    return path.join(migrationStatePath, `${_.camelCase(this.name)}.json`)
+  }
+
+  public async init(): Promise<void> {
+    this.loadMigrationState()
+    nodeCleanup(() => this.saveMigrationState())
+    await this.loadSudoKey()
+  }
+
+  public abstract run(): Promise<MigrationResult>
+
+  protected getMigrationStateJson(): MigrationStateJson {
+    return {
+      idsMapEntries: Array.from(this.idsMap.entries()),
+      failedMigrations: Array.from(this.failedMigrations),
+    }
+  }
+
+  protected loadMigrationState(): void {
+    const stateFilePath = this.getMigrationStateFilePath()
+    if (fs.existsSync(stateFilePath)) {
+      const migrationStateJson = fs.readFileSync(stateFilePath).toString()
+      const migrationState: MigrationStateJson = JSON.parse(migrationStateJson)
+      this.idsMap = new Map(migrationState.idsMapEntries)
+    }
+  }
+
+  protected saveMigrationState(): void {
+    const stateFilePath = this.getMigrationStateFilePath()
+    const migrationState = this.getMigrationStateJson()
+    fs.writeFileSync(stateFilePath, JSON.stringify(migrationState, undefined, 2))
+  }
+
+  private async loadSudoKey() {
+    const { sudoUri } = this.config
+    const keyring = new Keyring({ type: 'sr25519' })
+    this.sudo = keyring.createFromUri(sudoUri)
+    const sudoKey = await this.api.query.sudo.key()
+    if (sudoKey.toString() !== this.sudo.address) {
+      throw new Error(`Invalid sudo key! Expected: ${sudoKey.toString()}, Got: ${this.sudo.address}`)
+    }
+  }
+
+  protected extractFailedSudoAsMigrations<T extends { id: string }>(result: SubmittableResult, batch: T[]): void {
+    const { api } = this
+    const sudoAsDoneEvents = api.findEvents(result, 'sudo', 'SudoAsDone')
+    if (sudoAsDoneEvents.length !== batch.length) {
+      throw new Error(`Could not extract failed migrations from: ${JSON.stringify(result.toHuman())}`)
+    }
+    const failedIds: number[] = []
+    sudoAsDoneEvents.forEach(({ data: [sudoAsDone] }, i) => {
+      if (sudoAsDone.isFalse) {
+        const id = parseInt(batch[i].id)
+        failedIds.push(id)
+        this.failedMigrations.add(id)
+      }
+    })
+    if (failedIds.length) {
+      console.error(`Failed to migrate:`, failedIds)
+    }
+  }
+
+  public getResult(): MigrationResult {
+    const { idsMap, failedMigrations } = this
+    return {
+      idsMap: new Map(idsMap.entries()),
+      failedMigrations: Array.from(failedMigrations),
+    }
+  }
+}

+ 162 - 0
utils/migration-scripts/src/sumer-giza/ChannelsMigration.ts

@@ -0,0 +1,162 @@
+import { AssetsMigration, AssetsMigrationConfig, AssetsMigrationParams } from './AssetsMigration'
+import { ChannelMetadata } from '@joystream/metadata-protobuf'
+import { ChannelFieldsFragment } from './sumer-query-node/generated/queries'
+import { createType } from '@joystream/types'
+import Long from 'long'
+import { ChannelCreationParameters } from '@joystream/types/content'
+import { CHANNEL_AVATAR_TARGET_SIZE, CHANNEL_COVER_TARGET_SIZE } from './ImageResizer'
+import { ChannelId } from '@joystream/types/common'
+import _ from 'lodash'
+import { MigrationResult } from './BaseMigration'
+
+export type ChannelsMigrationConfig = AssetsMigrationConfig & {
+  channelIds: number[]
+  channelBatchSize: number
+  forceChannelOwnerMemberId: number | undefined
+}
+
+export type ChannelsMigrationParams = AssetsMigrationParams & {
+  config: ChannelsMigrationConfig
+  forcedChannelOwner: { id: string; controllerAccount: string } | undefined
+}
+
+export type ChannelsMigrationResult = MigrationResult & {
+  videoIds: number[]
+}
+
+export class ChannelMigration extends AssetsMigration {
+  name = 'Channels migration'
+  protected config: ChannelsMigrationConfig
+  protected videoIds: number[] = []
+  protected forcedChannelOwner: { id: string; controllerAccount: string } | undefined
+
+  public constructor(params: ChannelsMigrationParams) {
+    super(params)
+    this.config = params.config
+    this.forcedChannelOwner = params.forcedChannelOwner
+  }
+
+  private getChannelOwnerMember({ id, ownerMember }: ChannelFieldsFragment) {
+    if (!ownerMember) {
+      throw new Error(`Chanel ownerMember missing: ${id}. Only member-owned channels are supported!`)
+    }
+
+    if (this.forcedChannelOwner) {
+      return this.forcedChannelOwner
+    }
+
+    return ownerMember
+  }
+
+  public async run(): Promise<ChannelsMigrationResult> {
+    await this.init()
+    const {
+      api,
+      config: { channelIds, channelBatchSize },
+    } = this
+    const ids = channelIds.sort((a, b) => a - b)
+    while (ids.length) {
+      const idsBatch = ids.splice(0, channelBatchSize)
+      console.log(`Fetching a batch of ${idsBatch.length} channels...`)
+      const channelsBatch = (await this.queryNodeApi.getChannelsByIds(idsBatch)).sort(
+        (a, b) => parseInt(a.id) - parseInt(b.id)
+      )
+      if (channelsBatch.length < idsBatch.length) {
+        console.error(
+          `Some channels were not be found: ${_.difference(
+            idsBatch,
+            channelsBatch.map((c) => parseInt(c.id))
+          )}`
+        )
+      }
+      const channelsToMigrate = channelsBatch.filter((c) => !this.idsMap.has(parseInt(c.id)))
+      if (channelsToMigrate.length < channelsBatch.length) {
+        console.log(
+          `${channelsToMigrate.length ? 'Some' : 'All'} channels in this batch were already migrated ` +
+            `(${channelsBatch.length - channelsToMigrate.length}/${channelsBatch.length})`
+        )
+      }
+      if (channelsToMigrate.length) {
+        const txs = _.flatten(await Promise.all(channelsToMigrate.map((c) => this.prepareChannel(c))))
+        const result = await api.sendExtrinsic(this.sudo, api.tx.utility.batch(txs))
+        const channelCreatedEvents = api.findEvents(result, 'content', 'ChannelCreated')
+        const newChannelIds: ChannelId[] = channelCreatedEvents.map((e) => e.data[1])
+        if (channelCreatedEvents.length !== channelsToMigrate.length) {
+          this.extractFailedSudoAsMigrations(result, channelsToMigrate)
+        }
+        const dataObjectsUploadedEvents = api.findEvents(result, 'storage', 'DataObjectsUploaded')
+        const newChannelMapEntries: [number, number][] = []
+        let newChannelIdIndex = 0
+        channelsToMigrate.forEach(({ id }) => {
+          if (this.failedMigrations.has(parseInt(id))) {
+            return
+          }
+          const newChannelId = newChannelIds[newChannelIdIndex++].toNumber()
+          this.idsMap.set(parseInt(id), newChannelId)
+          newChannelMapEntries.push([parseInt(id), newChannelId])
+        })
+        if (newChannelMapEntries.length) {
+          console.log('Channel map entries added!', newChannelMapEntries)
+          await this.assetsManager.uploadFromEvents(dataObjectsUploadedEvents)
+        }
+      }
+      const videoIdsToMigrate: number[] = channelsBatch.reduce<number[]>(
+        (res, { id, videos }) => (this.idsMap.has(parseInt(id)) ? res.concat(videos.map((v) => parseInt(v.id))) : res),
+        []
+      )
+      this.videoIds = this.videoIds.concat(videoIdsToMigrate)
+      if (videoIdsToMigrate.length) {
+        console.log(`Added ${videoIdsToMigrate.length} video ids to the list of videos to migrate`)
+      }
+    }
+    return {
+      ...this.getResult(),
+      videoIds: [...this.videoIds],
+    }
+  }
+
+  private async prepareChannel(channel: ChannelFieldsFragment) {
+    const { api } = this
+    const { avatarPhotoDataObject, coverPhotoDataObject, title, description, categoryId, isPublic, language } = channel
+
+    const ownerMember = this.getChannelOwnerMember(channel)
+
+    const assetsToPrepare = {
+      avatar: { data: avatarPhotoDataObject || undefined, targetSize: CHANNEL_AVATAR_TARGET_SIZE },
+      coverPhoto: { data: coverPhotoDataObject || undefined, targetSize: CHANNEL_COVER_TARGET_SIZE },
+    }
+    const preparedAssets = await this.assetsManager.prepareAssets(assetsToPrepare)
+    const meta = new ChannelMetadata({
+      title,
+      description,
+      category: categoryId ? Long.fromString(categoryId) : undefined,
+      avatarPhoto: preparedAssets.avatar?.index,
+      coverPhoto: preparedAssets.coverPhoto?.index,
+      isPublic,
+      language: language?.iso,
+    })
+    const assetsParams = Object.values(preparedAssets)
+      .sort((a, b) => a.index - b.index)
+      .map((a) => a.params)
+    const channelCreationParams = createType<ChannelCreationParameters, 'ChannelCreationParameters'>(
+      'ChannelCreationParameters',
+      {
+        assets: assetsParams.length
+          ? {
+              object_creation_list: assetsParams,
+              expected_data_size_fee: this.assetsManager.dataObjectFeePerMB,
+            }
+          : null,
+        meta: `0x${Buffer.from(ChannelMetadata.encode(meta).finish()).toString('hex')}`,
+      }
+    )
+    const feesToCover = this.assetsManager.calcDataObjectsFee(assetsParams)
+    return [
+      api.tx.balances.transferKeepAlive(ownerMember.controllerAccount, feesToCover),
+      api.tx.sudo.sudoAs(
+        ownerMember.controllerAccount,
+        api.tx.content.createChannel({ Member: ownerMember.id }, channelCreationParams)
+      ),
+    ]
+  }
+}

+ 22 - 0
utils/migration-scripts/src/sumer-giza/ContentHash.ts

@@ -0,0 +1,22 @@
+import { createHash, HashInput, NodeHash } from 'blake3'
+import { HashReader } from 'blake3/dist/wasm/nodejs'
+import { toB58String, encode } from 'multihashes'
+
+// Based on distributor node's implementation
+export class ContentHash {
+  private hash: NodeHash<HashReader>
+  public static readonly algorithm = 'blake3'
+
+  constructor() {
+    this.hash = createHash()
+  }
+
+  update(data: HashInput): this {
+    this.hash.update(data)
+    return this
+  }
+
+  digest(): string {
+    return toB58String(encode(this.hash.digest(), ContentHash.algorithm))
+  }
+}

+ 69 - 0
utils/migration-scripts/src/sumer-giza/ContentMigration.ts

@@ -0,0 +1,69 @@
+import { WsProvider } from '@polkadot/api'
+import { QueryNodeApi } from './sumer-query-node/api'
+import { RuntimeApi } from '../RuntimeApi'
+import { VideosMigration } from './VideosMigration'
+import { ChannelMigration } from './ChannelsMigration'
+
+export type ContentMigrationConfig = {
+  queryNodeUri: string
+  wsProviderEndpointUri: string
+  sudoUri: string
+  channelIds: number[]
+  dataDir: string
+  channelBatchSize: number
+  videoBatchSize: number
+  forceChannelOwnerMemberId: number | undefined
+  preferredDownloadSpEndpoints?: string[]
+  uploadSpBucketId: number
+  uploadSpEndpoint: string
+  migrationStatePath: string
+}
+
+export class ContentMigration {
+  private api: RuntimeApi
+  private queryNodeApi: QueryNodeApi
+  private config: ContentMigrationConfig
+
+  constructor(config: ContentMigrationConfig) {
+    const { queryNodeUri, wsProviderEndpointUri } = config
+    const provider = new WsProvider(wsProviderEndpointUri)
+    this.api = new RuntimeApi({ provider })
+    this.queryNodeApi = new QueryNodeApi(queryNodeUri)
+    this.config = config
+  }
+
+  private async getForcedChannelOwner(): Promise<{ id: string; controllerAccount: string } | undefined> {
+    const { forceChannelOwnerMemberId } = this.config
+    if (forceChannelOwnerMemberId) {
+      const ownerMember = await this.api.query.members.membershipById(forceChannelOwnerMemberId)
+      if (ownerMember.isEmpty) {
+        throw new Error(`Membership by id ${forceChannelOwnerMemberId} not found!`)
+      }
+      return {
+        id: forceChannelOwnerMemberId.toString(),
+        controllerAccount: ownerMember.controller_account.toString(),
+      }
+    }
+    return undefined
+  }
+
+  public async run(): Promise<void> {
+    const { api, queryNodeApi, config } = this
+    await this.api.isReadyOrError
+    const forcedChannelOwner = await this.getForcedChannelOwner()
+    const { idsMap: channelsMap, videoIds } = await new ChannelMigration({
+      api,
+      queryNodeApi,
+      config,
+      forcedChannelOwner,
+    }).run()
+    await new VideosMigration({
+      api,
+      queryNodeApi,
+      config,
+      channelsMap,
+      videoIds,
+      forcedChannelOwner,
+    }).run()
+  }
+}

+ 30 - 0
utils/migration-scripts/src/sumer-giza/ImageResizer.ts

@@ -0,0 +1,30 @@
+import sharp from 'sharp'
+import fs from 'fs'
+
+export const CHANNEL_AVATAR_TARGET_SIZE: [number, number] = [256, 256]
+export const VIDEO_THUMB_TARGET_SIZE: [number, number] = [640, 360]
+export const CHANNEL_COVER_TARGET_SIZE: [number, number] = [1920, 480]
+
+export class ImageResizer {
+  resize(imagePath: string, target: [number, number]): Promise<void> {
+    return new Promise((resolve, reject) => {
+      const [width, height] = target
+      const targetPath = `${imagePath}-resized`
+      sharp(imagePath)
+        .resize({
+          width,
+          height,
+          fit: 'outside',
+        })
+        .extract({ left: 0, top: 0, width, height })
+        .webp()
+        .toFile(targetPath, (err) => {
+          if (err) {
+            return reject(err)
+          }
+          fs.renameSync(targetPath, imagePath)
+          resolve()
+        })
+    })
+  }
+}

+ 192 - 0
utils/migration-scripts/src/sumer-giza/VideosMigration.ts

@@ -0,0 +1,192 @@
+import { VideoMetadata } from '@joystream/metadata-protobuf'
+import { VideoFieldsFragment } from './sumer-query-node/generated/queries'
+import _ from 'lodash'
+import { createType } from '@joystream/types'
+import Long from 'long'
+import { VideoCreationParameters, VideoId } from '@joystream/types/content'
+import moment from 'moment'
+import { VIDEO_THUMB_TARGET_SIZE } from './ImageResizer'
+import { AssetsMigration, AssetsMigrationConfig, AssetsMigrationParams } from './AssetsMigration'
+import { MigrationResult } from './BaseMigration'
+
+export type VideosMigrationConfig = AssetsMigrationConfig & {
+  videoBatchSize: number
+}
+
+export type VideosMigrationParams = AssetsMigrationParams & {
+  config: VideosMigrationConfig
+  videoIds: number[]
+  channelsMap: Map<number, number>
+  forcedChannelOwner: { id: string; controllerAccount: string } | undefined
+}
+
+export class VideosMigration extends AssetsMigration {
+  name = 'Videos migration'
+  protected config: VideosMigrationConfig
+  protected channelsMap: Map<number, number>
+  protected videoIds: number[]
+  protected forcedChannelOwner: { id: string; controllerAccount: string } | undefined
+
+  public constructor({ api, queryNodeApi, config, videoIds, channelsMap, forcedChannelOwner }: VideosMigrationParams) {
+    super({ api, queryNodeApi, config })
+    this.config = config
+    this.channelsMap = channelsMap
+    this.videoIds = videoIds
+    this.forcedChannelOwner = forcedChannelOwner
+  }
+
+  private getNewChannelId(oldChannelId: number): number {
+    const newChannelId = this.channelsMap.get(oldChannelId)
+    if (!newChannelId) {
+      throw new Error(`Missing new channel id for channel ${oldChannelId} in the channelMap!`)
+    }
+    return newChannelId
+  }
+
+  public async run(): Promise<MigrationResult> {
+    await this.init()
+    const {
+      api,
+      videoIds,
+      config: { videoBatchSize },
+    } = this
+    const idsToMigrate = videoIds.filter((id) => !this.idsMap.has(id)).sort((a, b) => a - b)
+    if (idsToMigrate.length < videoIds.length) {
+      const alreadyMigratedVideosNum = videoIds.length - idsToMigrate.length
+      console.log(
+        (idsToMigrate.length ? `${alreadyMigratedVideosNum}/${videoIds.length}` : 'All') +
+          ' videos already migrated, skippping...'
+      )
+    }
+    while (idsToMigrate.length) {
+      const idsBatch = idsToMigrate.splice(0, videoBatchSize)
+      console.log(`Fetching a batch of ${idsBatch.length} videos...`)
+      const videosBatch = (await this.queryNodeApi.getVideosByIds(idsBatch)).sort(
+        (a, b) => parseInt(a.id) - parseInt(b.id)
+      )
+      if (videosBatch.length < idsBatch.length) {
+        console.error(
+          `Some videos were not be found: ${_.difference(
+            idsBatch,
+            videosBatch.map((v) => parseInt(v.id))
+          )}`
+        )
+      }
+      const txs = _.flatten(await Promise.all(videosBatch.map((v) => this.prepareVideo(v))))
+      const result = await api.sendExtrinsic(this.sudo, api.tx.utility.batch(txs))
+      const videoCreatedEvents = api.findEvents(result, 'content', 'VideoCreated')
+      const newVideoIds: VideoId[] = videoCreatedEvents.map((e) => e.data[2])
+      if (videoCreatedEvents.length !== videosBatch.length) {
+        this.extractFailedSudoAsMigrations(result, videosBatch)
+      }
+
+      const dataObjectsUploadedEvents = api.findEvents(result, 'storage', 'DataObjectsUploaded')
+      const newVideoMapEntries: [number, number][] = []
+      let newVideoIdIndex = 0
+      videosBatch.forEach(({ id }) => {
+        if (this.failedMigrations.has(parseInt(id))) {
+          return
+        }
+        const newVideoId = newVideoIds[newVideoIdIndex++].toNumber()
+        this.idsMap.set(parseInt(id), newVideoId)
+        newVideoMapEntries.push([parseInt(id), newVideoId])
+      })
+      if (newVideoMapEntries.length) {
+        console.log('Video map entries added!', newVideoMapEntries)
+        await this.assetsManager.uploadFromEvents(dataObjectsUploadedEvents)
+      }
+    }
+    return this.getResult()
+  }
+
+  private getVideoData(video: VideoFieldsFragment) {
+    const { id, channel } = video
+
+    if (!channel) {
+      throw new Error(`Channel data missing for video: ${id}`)
+    }
+
+    if (!channel.ownerMember) {
+      throw new Error(`Channel ownerMember missing for video ${id}`)
+    }
+
+    let { ownerMember } = channel
+    if (this.forcedChannelOwner) {
+      ownerMember = this.forcedChannelOwner
+    }
+
+    return { ...video, channel: { ...channel, ownerMember } }
+  }
+
+  private async prepareVideo(video: VideoFieldsFragment) {
+    const { api } = this
+
+    const {
+      categoryId,
+      description,
+      duration,
+      hasMarketing,
+      isExplicit,
+      isPublic,
+      language,
+      license,
+      mediaDataObject,
+      mediaMetadata,
+      publishedBeforeJoystream,
+      thumbnailPhotoDataObject,
+      title,
+      channel: { ownerMember, id: oldChannelId },
+    } = this.getVideoData(video)
+
+    const channelId = this.getNewChannelId(parseInt(oldChannelId))
+
+    const assetsToPrepare = {
+      thumbnail: { data: thumbnailPhotoDataObject || undefined, targetSize: VIDEO_THUMB_TARGET_SIZE },
+      video: { data: mediaDataObject || undefined },
+    }
+    const preparedAssets = await this.assetsManager.prepareAssets(assetsToPrepare)
+    const meta = new VideoMetadata({
+      title,
+      description,
+      category: categoryId ? Long.fromString(categoryId) : undefined,
+      duration,
+      hasMarketing,
+      isExplicit,
+      isPublic,
+      language: language?.iso,
+      license: license,
+      mediaPixelHeight: mediaMetadata?.pixelHeight,
+      mediaPixelWidth: mediaMetadata?.pixelWidth,
+      mediaType: mediaMetadata?.encoding,
+      publishedBeforeJoystream: {
+        isPublished: !!publishedBeforeJoystream,
+        date: moment(publishedBeforeJoystream).format('YYYY-MM-DD'),
+      },
+      thumbnailPhoto: preparedAssets.thumbnail?.index,
+      video: preparedAssets.video?.index,
+    })
+    const assetsParams = Object.values(preparedAssets)
+      .sort((a, b) => a.index - b.index)
+      .map((a) => a.params)
+    const videoCreationParams = createType<VideoCreationParameters, 'VideoCreationParameters'>(
+      'VideoCreationParameters',
+      {
+        assets: assetsParams.length
+          ? {
+              object_creation_list: assetsParams,
+              expected_data_size_fee: this.assetsManager.dataObjectFeePerMB,
+            }
+          : null,
+        meta: `0x${Buffer.from(VideoMetadata.encode(meta).finish()).toString('hex')}`,
+      }
+    )
+    const feesToCover = this.assetsManager.calcDataObjectsFee(assetsParams)
+    return [
+      api.tx.balances.transferKeepAlive(ownerMember.controllerAccount, feesToCover),
+      api.tx.sudo.sudoAs(
+        ownerMember.controllerAccount,
+        api.tx.content.createVideo({ Member: ownerMember.id }, channelId, videoCreationParams)
+      ),
+    ]
+  }
+}

+ 120 - 0
utils/migration-scripts/src/sumer-giza/sumer-query-node/api.ts

@@ -0,0 +1,120 @@
+import {
+  ApolloClient,
+  NormalizedCacheObject,
+  HttpLink,
+  InMemoryCache,
+  DocumentNode,
+  isApolloError,
+  ApolloQueryResult,
+} from '@apollo/client/core'
+import fetch from 'cross-fetch'
+import {
+  ChannelCategoryFieldsFragment,
+  ChannelFieldsFragment,
+  GetChannelsByIds,
+  GetChannelsByIdsQuery,
+  GetChannelsByIdsQueryVariables,
+  GetChannelsCategories,
+  GetChannelsCategoriesQuery,
+  GetChannelsCategoriesQueryVariables,
+  GetStorageWorkers,
+  GetStorageWorkersQuery,
+  GetStorageWorkersQueryVariables,
+  GetVideoCategories,
+  GetVideoCategoriesQuery,
+  GetVideoCategoriesQueryVariables,
+  GetVideosByIds,
+  GetVideosByIdsQuery,
+  GetVideosByIdsQueryVariables,
+  VideoCategoryFieldsFragment,
+  VideoFieldsFragment,
+  WorkerFieldsFragment,
+} from './generated/queries'
+
+export class QueryNodeApi {
+  private endpoint: string
+  private apolloClient: ApolloClient<NormalizedCacheObject>
+  private retryAttempts: number
+  private retryIntervalMs: number
+
+  public constructor(endpoint: string, retryAttempts = 5, retryIntervalMs = 5000) {
+    this.endpoint = endpoint
+    this.retryAttempts = retryAttempts
+    this.retryIntervalMs = retryIntervalMs
+    this.apolloClient = new ApolloClient({
+      link: new HttpLink({ uri: endpoint, fetch }),
+      cache: new InMemoryCache(),
+      defaultOptions: { query: { fetchPolicy: 'no-cache', errorPolicy: 'all' } },
+    })
+  }
+
+  private async query<T>(queryFunc: () => Promise<ApolloQueryResult<T>>): Promise<ApolloQueryResult<T>> {
+    let attempts = 0
+    while (true) {
+      try {
+        const result = await queryFunc()
+        return result
+      } catch (e) {
+        if (e instanceof Error && isApolloError(e) && e.networkError) {
+          console.error(`Query node (${this.endpoint}) network error: ${e.networkError.message}`)
+          if (attempts++ > this.retryAttempts) {
+            throw new Error(`Maximum number of query retry attempts reached for ${this.endpoint}`)
+          }
+          console.log(`Retrying in ${this.retryIntervalMs}ms...`)
+          await new Promise((resolve) => setTimeout(resolve, this.retryIntervalMs))
+        } else {
+          throw e
+        }
+      }
+    }
+  }
+
+  // Query-node: get multiple entities
+  protected async multipleEntitiesQuery<
+    QueryT extends { [k: string]: unknown[] },
+    VariablesT extends Record<string, unknown>
+  >(query: DocumentNode, variables: VariablesT, resultKey: keyof QueryT): Promise<QueryT[keyof QueryT]> {
+    const q = this.query<QueryT>(() => this.apolloClient.query<QueryT, VariablesT>({ query, variables }))
+    return (await q).data[resultKey]
+  }
+
+  public getChannelCategories(): Promise<ChannelCategoryFieldsFragment[]> {
+    return this.multipleEntitiesQuery<GetChannelsCategoriesQuery, GetChannelsCategoriesQueryVariables>(
+      GetChannelsCategories,
+      {},
+      'channelCategories'
+    )
+  }
+
+  public getVideoCategories(): Promise<VideoCategoryFieldsFragment[]> {
+    return this.multipleEntitiesQuery<GetVideoCategoriesQuery, GetVideoCategoriesQueryVariables>(
+      GetVideoCategories,
+      {},
+      'videoCategories'
+    )
+  }
+
+  public getChannelsByIds(channelIds: string[] | number[]): Promise<ChannelFieldsFragment[]> {
+    return this.multipleEntitiesQuery<GetChannelsByIdsQuery, GetChannelsByIdsQueryVariables>(
+      GetChannelsByIds,
+      { ids: channelIds.map((id) => id.toString()) },
+      'channels'
+    )
+  }
+
+  public getVideosByIds(videoIds: string[] | number[]): Promise<VideoFieldsFragment[]> {
+    return this.multipleEntitiesQuery<GetVideosByIdsQuery, GetVideosByIdsQueryVariables>(
+      GetVideosByIds,
+      { ids: videoIds.map((id) => id.toString()) },
+      'videos'
+    )
+  }
+
+  public getStorageWorkers(): Promise<WorkerFieldsFragment[]> {
+    return this.multipleEntitiesQuery<GetStorageWorkersQuery, GetStorageWorkersQueryVariables>(
+      GetStorageWorkers,
+      {},
+      'workers'
+    )
+  }
+}

+ 33 - 0
utils/migration-scripts/src/sumer-giza/sumer-query-node/codegen.yml

@@ -0,0 +1,33 @@
+# Paths are relative to root distribution-node directory
+overwrite: true
+
+schema: https://hydra.joystream.org/graphql
+
+documents:
+  - 'src/sumer-giza/sumer-query-node/queries/*.graphql'
+
+config:
+  scalars:
+    Date: Date
+  preResolveTypes: true # avoid using Pick
+  skipTypename: true # skip __typename field in typings unless it's part of the query
+
+generates:
+  src/sumer-giza/sumer-query-node/generated/schema.ts:
+    hooks:
+      afterOneFileWrite:
+        - prettier --write
+        - eslint --fix
+    plugins:
+      - typescript
+  src/sumer-giza/sumer-query-node/generated/queries.ts:
+    preset: import-types
+    presetConfig:
+      typesPath: ./schema
+    hooks:
+      afterOneFileWrite:
+        - prettier --write
+        - eslint --fix
+    plugins:
+      - typescript-operations
+      - typescript-document-nodes

+ 229 - 0
utils/migration-scripts/src/sumer-giza/sumer-query-node/generated/queries.ts

@@ -0,0 +1,229 @@
+import * as Types from './schema'
+
+import gql from 'graphql-tag'
+export type VideoCategoryFieldsFragment = { id: string; name?: Types.Maybe<string> }
+
+export type ChannelCategoryFieldsFragment = { id: string; name?: Types.Maybe<string> }
+
+export type DataObjectFieldsFragment = {
+  id: string
+  joystreamContentId: string
+  size: number
+  liaisonJudgement: Types.LiaisonJudgement
+}
+
+export type VideoFieldsFragment = {
+  id: string
+  categoryId?: Types.Maybe<string>
+  title?: Types.Maybe<string>
+  description?: Types.Maybe<string>
+  duration?: Types.Maybe<number>
+  hasMarketing?: Types.Maybe<boolean>
+  publishedBeforeJoystream?: Types.Maybe<any>
+  isPublic?: Types.Maybe<boolean>
+  isCensored: boolean
+  isExplicit?: Types.Maybe<boolean>
+  isFeatured: boolean
+  thumbnailPhotoDataObject?: Types.Maybe<DataObjectFieldsFragment>
+  language?: Types.Maybe<{ iso: string }>
+  license?: Types.Maybe<{
+    code?: Types.Maybe<number>
+    attribution?: Types.Maybe<string>
+    customText?: Types.Maybe<string>
+  }>
+  mediaDataObject?: Types.Maybe<DataObjectFieldsFragment>
+  mediaMetadata?: Types.Maybe<{
+    pixelWidth?: Types.Maybe<number>
+    pixelHeight?: Types.Maybe<number>
+    size?: Types.Maybe<number>
+    encoding?: Types.Maybe<{
+      codecName?: Types.Maybe<string>
+      container?: Types.Maybe<string>
+      mimeMediaType?: Types.Maybe<string>
+    }>
+  }>
+  channel?: Types.Maybe<{ id: string; ownerMember?: Types.Maybe<{ id: string; controllerAccount: string }> }>
+}
+
+export type ChannelFieldsFragment = {
+  id: string
+  categoryId?: Types.Maybe<string>
+  rewardAccount?: Types.Maybe<string>
+  title?: Types.Maybe<string>
+  description?: Types.Maybe<string>
+  isPublic?: Types.Maybe<boolean>
+  isCensored: boolean
+  ownerMember?: Types.Maybe<{ id: string; controllerAccount: string }>
+  coverPhotoDataObject?: Types.Maybe<DataObjectFieldsFragment>
+  avatarPhotoDataObject?: Types.Maybe<DataObjectFieldsFragment>
+  language?: Types.Maybe<{ iso: string }>
+  videos: Array<{ id: string }>
+}
+
+export type WorkerFieldsFragment = { id: string; metadata?: Types.Maybe<string> }
+
+export type GetChannelsByIdsQueryVariables = Types.Exact<{
+  ids?: Types.Maybe<Array<Types.Scalars['ID']> | Types.Scalars['ID']>
+}>
+
+export type GetChannelsByIdsQuery = { channels: Array<ChannelFieldsFragment> }
+
+export type GetVideosByIdsQueryVariables = Types.Exact<{
+  ids?: Types.Maybe<Array<Types.Scalars['ID']> | Types.Scalars['ID']>
+}>
+
+export type GetVideosByIdsQuery = { videos: Array<VideoFieldsFragment> }
+
+export type GetVideoCategoriesQueryVariables = Types.Exact<{ [key: string]: never }>
+
+export type GetVideoCategoriesQuery = { videoCategories: Array<VideoCategoryFieldsFragment> }
+
+export type GetChannelsCategoriesQueryVariables = Types.Exact<{ [key: string]: never }>
+
+export type GetChannelsCategoriesQuery = { channelCategories: Array<ChannelCategoryFieldsFragment> }
+
+export type GetStorageWorkersQueryVariables = Types.Exact<{ [key: string]: never }>
+
+export type GetStorageWorkersQuery = { workers: Array<WorkerFieldsFragment> }
+
+export const VideoCategoryFields = gql`
+  fragment VideoCategoryFields on VideoCategory {
+    id
+    name
+  }
+`
+export const ChannelCategoryFields = gql`
+  fragment ChannelCategoryFields on ChannelCategory {
+    id
+    name
+  }
+`
+export const DataObjectFields = gql`
+  fragment DataObjectFields on DataObject {
+    id
+    joystreamContentId
+    size
+    liaisonJudgement
+  }
+`
+export const VideoFields = gql`
+  fragment VideoFields on Video {
+    id
+    categoryId
+    title
+    description
+    duration
+    thumbnailPhotoDataObject {
+      ...DataObjectFields
+    }
+    language {
+      iso
+    }
+    hasMarketing
+    publishedBeforeJoystream
+    isPublic
+    isCensored
+    isExplicit
+    license {
+      code
+      attribution
+      customText
+    }
+    mediaDataObject {
+      ...DataObjectFields
+    }
+    mediaMetadata {
+      encoding {
+        codecName
+        container
+        mimeMediaType
+      }
+      pixelWidth
+      pixelHeight
+      size
+    }
+    isFeatured
+    channel {
+      id
+      ownerMember {
+        id
+        controllerAccount
+      }
+    }
+  }
+  ${DataObjectFields}
+`
+export const ChannelFields = gql`
+  fragment ChannelFields on Channel {
+    id
+    ownerMember {
+      id
+      controllerAccount
+    }
+    categoryId
+    rewardAccount
+    title
+    description
+    coverPhotoDataObject {
+      ...DataObjectFields
+    }
+    avatarPhotoDataObject {
+      ...DataObjectFields
+    }
+    isPublic
+    isCensored
+    language {
+      iso
+    }
+    videos {
+      id
+    }
+  }
+  ${DataObjectFields}
+`
+export const WorkerFields = gql`
+  fragment WorkerFields on Worker {
+    id
+    metadata
+  }
+`
+export const GetChannelsByIds = gql`
+  query getChannelsByIds($ids: [ID!]) {
+    channels(where: { id_in: $ids }, limit: 1000) {
+      ...ChannelFields
+    }
+  }
+  ${ChannelFields}
+`
+export const GetVideosByIds = gql`
+  query getVideosByIds($ids: [ID!]) {
+    videos(where: { id_in: $ids }, limit: 1000) {
+      ...VideoFields
+    }
+  }
+  ${VideoFields}
+`
+export const GetVideoCategories = gql`
+  query getVideoCategories {
+    videoCategories {
+      ...VideoCategoryFields
+    }
+  }
+  ${VideoCategoryFields}
+`
+export const GetChannelsCategories = gql`
+  query getChannelsCategories {
+    channelCategories {
+      ...ChannelCategoryFields
+    }
+  }
+  ${ChannelCategoryFields}
+`
+export const GetStorageWorkers = gql`
+  query getStorageWorkers {
+    workers(where: { type_eq: STORAGE }) {
+      ...WorkerFields
+    }
+  }
+  ${WorkerFields}
+`

+ 2565 - 0
utils/migration-scripts/src/sumer-giza/sumer-query-node/generated/schema.ts

@@ -0,0 +1,2565 @@
+export type Maybe<T> = T | null
+export type Exact<T extends { [key: string]: unknown }> = { [K in keyof T]: T[K] }
+export type MakeOptional<T, K extends keyof T> = Omit<T, K> & { [SubKey in K]?: Maybe<T[SubKey]> }
+export type MakeMaybe<T, K extends keyof T> = Omit<T, K> & { [SubKey in K]: Maybe<T[SubKey]> }
+/** All built-in and custom scalars, mapped to their actual values */
+export type Scalars = {
+  ID: string
+  String: string
+  Boolean: boolean
+  Int: number
+  Float: number
+  /** The javascript `Date` as string. Type represents date and time as the ISO Date string. */
+  DateTime: any
+  /** The `JSONObject` scalar type represents JSON objects as specified by [ECMA-404](http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf). */
+  JSONObject: any
+}
+
+export enum AssetAvailability {
+  Accepted = 'ACCEPTED',
+  Pending = 'PENDING',
+  Invalid = 'INVALID',
+}
+
+export type BaseGraphQlObject = {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+}
+
+export type BaseModel = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+}
+
+export type BaseModelUuid = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+}
+
+export type BaseWhereInput = {
+  id_eq?: Maybe<Scalars['String']>
+  id_in?: Maybe<Array<Scalars['String']>>
+  createdAt_eq?: Maybe<Scalars['String']>
+  createdAt_lt?: Maybe<Scalars['String']>
+  createdAt_lte?: Maybe<Scalars['String']>
+  createdAt_gt?: Maybe<Scalars['String']>
+  createdAt_gte?: Maybe<Scalars['String']>
+  createdById_eq?: Maybe<Scalars['String']>
+  updatedAt_eq?: Maybe<Scalars['String']>
+  updatedAt_lt?: Maybe<Scalars['String']>
+  updatedAt_lte?: Maybe<Scalars['String']>
+  updatedAt_gt?: Maybe<Scalars['String']>
+  updatedAt_gte?: Maybe<Scalars['String']>
+  updatedById_eq?: Maybe<Scalars['String']>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['String']>
+  deletedAt_lt?: Maybe<Scalars['String']>
+  deletedAt_lte?: Maybe<Scalars['String']>
+  deletedAt_gt?: Maybe<Scalars['String']>
+  deletedAt_gte?: Maybe<Scalars['String']>
+  deletedById_eq?: Maybe<Scalars['String']>
+}
+
+export type Channel = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  ownerMember?: Maybe<Membership>
+  ownerMemberId?: Maybe<Scalars['String']>
+  ownerCuratorGroup?: Maybe<CuratorGroup>
+  ownerCuratorGroupId?: Maybe<Scalars['String']>
+  category?: Maybe<ChannelCategory>
+  categoryId?: Maybe<Scalars['String']>
+  /** Reward account where revenue is sent if set. */
+  rewardAccount?: Maybe<Scalars['String']>
+  /** The title of the Channel */
+  title?: Maybe<Scalars['String']>
+  /** The description of a Channel */
+  description?: Maybe<Scalars['String']>
+  coverPhotoDataObject?: Maybe<DataObject>
+  coverPhotoDataObjectId?: Maybe<Scalars['String']>
+  /** URLs where the asset content can be accessed (if any) */
+  coverPhotoUrls: Array<Scalars['String']>
+  /** Availability meta information */
+  coverPhotoAvailability: AssetAvailability
+  avatarPhotoDataObject?: Maybe<DataObject>
+  avatarPhotoDataObjectId?: Maybe<Scalars['String']>
+  /** URLs where the asset content can be accessed (if any) */
+  avatarPhotoUrls: Array<Scalars['String']>
+  /** Availability meta information */
+  avatarPhotoAvailability: AssetAvailability
+  /** Flag signaling whether a channel is public. */
+  isPublic?: Maybe<Scalars['Boolean']>
+  /** Flag signaling whether a channel is censored. */
+  isCensored: Scalars['Boolean']
+  language?: Maybe<Language>
+  languageId?: Maybe<Scalars['String']>
+  videos: Array<Video>
+  createdInBlock: Scalars['Int']
+}
+
+export type ChannelCategoriesByNameFtsOutput = {
+  item: ChannelCategoriesByNameSearchResult
+  rank: Scalars['Float']
+  isTypeOf: Scalars['String']
+  highlight: Scalars['String']
+}
+
+export type ChannelCategoriesByNameSearchResult = ChannelCategory
+
+/** Category of media channel */
+export type ChannelCategory = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** The name of the category */
+  name?: Maybe<Scalars['String']>
+  channels: Array<Channel>
+  createdInBlock: Scalars['Int']
+}
+
+export type ChannelCategoryConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<ChannelCategoryEdge>
+  pageInfo: PageInfo
+}
+
+export type ChannelCategoryCreateInput = {
+  name?: Maybe<Scalars['String']>
+  createdInBlock: Scalars['Float']
+}
+
+export type ChannelCategoryEdge = {
+  node: ChannelCategory
+  cursor: Scalars['String']
+}
+
+export enum ChannelCategoryOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  NameAsc = 'name_ASC',
+  NameDesc = 'name_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+}
+
+export type ChannelCategoryUpdateInput = {
+  name?: Maybe<Scalars['String']>
+  createdInBlock?: Maybe<Scalars['Float']>
+}
+
+export type ChannelCategoryWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  name_eq?: Maybe<Scalars['String']>
+  name_contains?: Maybe<Scalars['String']>
+  name_startsWith?: Maybe<Scalars['String']>
+  name_endsWith?: Maybe<Scalars['String']>
+  name_in?: Maybe<Array<Scalars['String']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  channels_none?: Maybe<ChannelWhereInput>
+  channels_some?: Maybe<ChannelWhereInput>
+  channels_every?: Maybe<ChannelWhereInput>
+  AND?: Maybe<Array<ChannelCategoryWhereInput>>
+  OR?: Maybe<Array<ChannelCategoryWhereInput>>
+}
+
+export type ChannelCategoryWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type ChannelConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<ChannelEdge>
+  pageInfo: PageInfo
+}
+
+export type ChannelCreateInput = {
+  ownerMember?: Maybe<Scalars['ID']>
+  ownerMemberId?: Maybe<Scalars['ID']>
+  ownerCuratorGroup?: Maybe<Scalars['ID']>
+  ownerCuratorGroupId?: Maybe<Scalars['ID']>
+  category?: Maybe<Scalars['ID']>
+  categoryId?: Maybe<Scalars['ID']>
+  rewardAccount?: Maybe<Scalars['String']>
+  title?: Maybe<Scalars['String']>
+  description?: Maybe<Scalars['String']>
+  coverPhotoDataObject?: Maybe<Scalars['ID']>
+  coverPhotoDataObjectId?: Maybe<Scalars['ID']>
+  coverPhotoUrls: Array<Scalars['String']>
+  coverPhotoAvailability: AssetAvailability
+  avatarPhotoDataObject?: Maybe<Scalars['ID']>
+  avatarPhotoDataObjectId?: Maybe<Scalars['ID']>
+  avatarPhotoUrls: Array<Scalars['String']>
+  avatarPhotoAvailability: AssetAvailability
+  isPublic?: Maybe<Scalars['Boolean']>
+  isCensored: Scalars['Boolean']
+  language?: Maybe<Scalars['ID']>
+  languageId?: Maybe<Scalars['ID']>
+  createdInBlock: Scalars['Float']
+}
+
+export type ChannelEdge = {
+  node: Channel
+  cursor: Scalars['String']
+}
+
+export enum ChannelOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  OwnerMemberAsc = 'ownerMember_ASC',
+  OwnerMemberDesc = 'ownerMember_DESC',
+  OwnerMemberIdAsc = 'ownerMemberId_ASC',
+  OwnerMemberIdDesc = 'ownerMemberId_DESC',
+  OwnerCuratorGroupAsc = 'ownerCuratorGroup_ASC',
+  OwnerCuratorGroupDesc = 'ownerCuratorGroup_DESC',
+  OwnerCuratorGroupIdAsc = 'ownerCuratorGroupId_ASC',
+  OwnerCuratorGroupIdDesc = 'ownerCuratorGroupId_DESC',
+  CategoryAsc = 'category_ASC',
+  CategoryDesc = 'category_DESC',
+  CategoryIdAsc = 'categoryId_ASC',
+  CategoryIdDesc = 'categoryId_DESC',
+  RewardAccountAsc = 'rewardAccount_ASC',
+  RewardAccountDesc = 'rewardAccount_DESC',
+  TitleAsc = 'title_ASC',
+  TitleDesc = 'title_DESC',
+  DescriptionAsc = 'description_ASC',
+  DescriptionDesc = 'description_DESC',
+  CoverPhotoDataObjectAsc = 'coverPhotoDataObject_ASC',
+  CoverPhotoDataObjectDesc = 'coverPhotoDataObject_DESC',
+  CoverPhotoDataObjectIdAsc = 'coverPhotoDataObjectId_ASC',
+  CoverPhotoDataObjectIdDesc = 'coverPhotoDataObjectId_DESC',
+  CoverPhotoAvailabilityAsc = 'coverPhotoAvailability_ASC',
+  CoverPhotoAvailabilityDesc = 'coverPhotoAvailability_DESC',
+  AvatarPhotoDataObjectAsc = 'avatarPhotoDataObject_ASC',
+  AvatarPhotoDataObjectDesc = 'avatarPhotoDataObject_DESC',
+  AvatarPhotoDataObjectIdAsc = 'avatarPhotoDataObjectId_ASC',
+  AvatarPhotoDataObjectIdDesc = 'avatarPhotoDataObjectId_DESC',
+  AvatarPhotoAvailabilityAsc = 'avatarPhotoAvailability_ASC',
+  AvatarPhotoAvailabilityDesc = 'avatarPhotoAvailability_DESC',
+  IsPublicAsc = 'isPublic_ASC',
+  IsPublicDesc = 'isPublic_DESC',
+  IsCensoredAsc = 'isCensored_ASC',
+  IsCensoredDesc = 'isCensored_DESC',
+  LanguageAsc = 'language_ASC',
+  LanguageDesc = 'language_DESC',
+  LanguageIdAsc = 'languageId_ASC',
+  LanguageIdDesc = 'languageId_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+}
+
+export type ChannelUpdateInput = {
+  ownerMember?: Maybe<Scalars['ID']>
+  ownerMemberId?: Maybe<Scalars['ID']>
+  ownerCuratorGroup?: Maybe<Scalars['ID']>
+  ownerCuratorGroupId?: Maybe<Scalars['ID']>
+  category?: Maybe<Scalars['ID']>
+  categoryId?: Maybe<Scalars['ID']>
+  rewardAccount?: Maybe<Scalars['String']>
+  title?: Maybe<Scalars['String']>
+  description?: Maybe<Scalars['String']>
+  coverPhotoDataObject?: Maybe<Scalars['ID']>
+  coverPhotoDataObjectId?: Maybe<Scalars['ID']>
+  coverPhotoUrls?: Maybe<Array<Scalars['String']>>
+  coverPhotoAvailability?: Maybe<AssetAvailability>
+  avatarPhotoDataObject?: Maybe<Scalars['ID']>
+  avatarPhotoDataObjectId?: Maybe<Scalars['ID']>
+  avatarPhotoUrls?: Maybe<Array<Scalars['String']>>
+  avatarPhotoAvailability?: Maybe<AssetAvailability>
+  isPublic?: Maybe<Scalars['Boolean']>
+  isCensored?: Maybe<Scalars['Boolean']>
+  language?: Maybe<Scalars['ID']>
+  languageId?: Maybe<Scalars['ID']>
+  createdInBlock?: Maybe<Scalars['Float']>
+}
+
+export type ChannelWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  ownerMemberId_eq?: Maybe<Scalars['ID']>
+  ownerMemberId_in?: Maybe<Array<Scalars['ID']>>
+  ownerCuratorGroupId_eq?: Maybe<Scalars['ID']>
+  ownerCuratorGroupId_in?: Maybe<Array<Scalars['ID']>>
+  categoryId_eq?: Maybe<Scalars['ID']>
+  categoryId_in?: Maybe<Array<Scalars['ID']>>
+  rewardAccount_eq?: Maybe<Scalars['String']>
+  rewardAccount_contains?: Maybe<Scalars['String']>
+  rewardAccount_startsWith?: Maybe<Scalars['String']>
+  rewardAccount_endsWith?: Maybe<Scalars['String']>
+  rewardAccount_in?: Maybe<Array<Scalars['String']>>
+  title_eq?: Maybe<Scalars['String']>
+  title_contains?: Maybe<Scalars['String']>
+  title_startsWith?: Maybe<Scalars['String']>
+  title_endsWith?: Maybe<Scalars['String']>
+  title_in?: Maybe<Array<Scalars['String']>>
+  description_eq?: Maybe<Scalars['String']>
+  description_contains?: Maybe<Scalars['String']>
+  description_startsWith?: Maybe<Scalars['String']>
+  description_endsWith?: Maybe<Scalars['String']>
+  description_in?: Maybe<Array<Scalars['String']>>
+  coverPhotoDataObjectId_eq?: Maybe<Scalars['ID']>
+  coverPhotoDataObjectId_in?: Maybe<Array<Scalars['ID']>>
+  coverPhotoUrls_containsAll?: Maybe<Array<Scalars['String']>>
+  coverPhotoUrls_containsNone?: Maybe<Array<Scalars['String']>>
+  coverPhotoUrls_containsAny?: Maybe<Array<Scalars['String']>>
+  coverPhotoAvailability_eq?: Maybe<AssetAvailability>
+  coverPhotoAvailability_in?: Maybe<Array<AssetAvailability>>
+  avatarPhotoDataObjectId_eq?: Maybe<Scalars['ID']>
+  avatarPhotoDataObjectId_in?: Maybe<Array<Scalars['ID']>>
+  avatarPhotoUrls_containsAll?: Maybe<Array<Scalars['String']>>
+  avatarPhotoUrls_containsNone?: Maybe<Array<Scalars['String']>>
+  avatarPhotoUrls_containsAny?: Maybe<Array<Scalars['String']>>
+  avatarPhotoAvailability_eq?: Maybe<AssetAvailability>
+  avatarPhotoAvailability_in?: Maybe<Array<AssetAvailability>>
+  isPublic_eq?: Maybe<Scalars['Boolean']>
+  isPublic_in?: Maybe<Array<Scalars['Boolean']>>
+  isCensored_eq?: Maybe<Scalars['Boolean']>
+  isCensored_in?: Maybe<Array<Scalars['Boolean']>>
+  languageId_eq?: Maybe<Scalars['ID']>
+  languageId_in?: Maybe<Array<Scalars['ID']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  ownerMember?: Maybe<MembershipWhereInput>
+  ownerCuratorGroup?: Maybe<CuratorGroupWhereInput>
+  category?: Maybe<ChannelCategoryWhereInput>
+  coverPhotoDataObject?: Maybe<DataObjectWhereInput>
+  avatarPhotoDataObject?: Maybe<DataObjectWhereInput>
+  language?: Maybe<LanguageWhereInput>
+  videos_none?: Maybe<VideoWhereInput>
+  videos_some?: Maybe<VideoWhereInput>
+  videos_every?: Maybe<VideoWhereInput>
+  AND?: Maybe<Array<ChannelWhereInput>>
+  OR?: Maybe<Array<ChannelWhereInput>>
+}
+
+export type ChannelWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type CuratorGroup = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Curators belonging to this group */
+  curatorIds: Array<Scalars['Int']>
+  /** Is group active or not */
+  isActive: Scalars['Boolean']
+  channels: Array<Channel>
+}
+
+export type CuratorGroupConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<CuratorGroupEdge>
+  pageInfo: PageInfo
+}
+
+export type CuratorGroupCreateInput = {
+  curatorIds: Array<Scalars['Int']>
+  isActive: Scalars['Boolean']
+}
+
+export type CuratorGroupEdge = {
+  node: CuratorGroup
+  cursor: Scalars['String']
+}
+
+export enum CuratorGroupOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  IsActiveAsc = 'isActive_ASC',
+  IsActiveDesc = 'isActive_DESC',
+}
+
+export type CuratorGroupUpdateInput = {
+  curatorIds?: Maybe<Array<Scalars['Int']>>
+  isActive?: Maybe<Scalars['Boolean']>
+}
+
+export type CuratorGroupWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  curatorIds_containsAll?: Maybe<Array<Scalars['Int']>>
+  curatorIds_containsNone?: Maybe<Array<Scalars['Int']>>
+  curatorIds_containsAny?: Maybe<Array<Scalars['Int']>>
+  isActive_eq?: Maybe<Scalars['Boolean']>
+  isActive_in?: Maybe<Array<Scalars['Boolean']>>
+  channels_none?: Maybe<ChannelWhereInput>
+  channels_some?: Maybe<ChannelWhereInput>
+  channels_every?: Maybe<ChannelWhereInput>
+  AND?: Maybe<Array<CuratorGroupWhereInput>>
+  OR?: Maybe<Array<CuratorGroupWhereInput>>
+}
+
+export type CuratorGroupWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+/** Manages content ids, type and storage provider decision about it */
+export type DataObject = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Content owner */
+  owner: DataObjectOwner
+  /** Content added at */
+  createdInBlock: Scalars['Int']
+  /** Content type id */
+  typeId: Scalars['Int']
+  /** Content size in bytes */
+  size: Scalars['Float']
+  liaison?: Maybe<Worker>
+  liaisonId?: Maybe<Scalars['String']>
+  /** Storage provider as liaison judgment */
+  liaisonJudgement: LiaisonJudgement
+  /** IPFS content id */
+  ipfsContentId: Scalars['String']
+  /** Joystream runtime content */
+  joystreamContentId: Scalars['String']
+  channelcoverPhotoDataObject?: Maybe<Array<Channel>>
+  channelavatarPhotoDataObject?: Maybe<Array<Channel>>
+  videothumbnailPhotoDataObject?: Maybe<Array<Video>>
+  videomediaDataObject?: Maybe<Array<Video>>
+}
+
+export type DataObjectConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<DataObjectEdge>
+  pageInfo: PageInfo
+}
+
+export type DataObjectCreateInput = {
+  owner: Scalars['JSONObject']
+  createdInBlock: Scalars['Float']
+  typeId: Scalars['Float']
+  size: Scalars['Float']
+  liaison?: Maybe<Scalars['ID']>
+  liaisonId?: Maybe<Scalars['ID']>
+  liaisonJudgement: LiaisonJudgement
+  ipfsContentId: Scalars['String']
+  joystreamContentId: Scalars['String']
+}
+
+export type DataObjectEdge = {
+  node: DataObject
+  cursor: Scalars['String']
+}
+
+export enum DataObjectOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+  TypeIdAsc = 'typeId_ASC',
+  TypeIdDesc = 'typeId_DESC',
+  SizeAsc = 'size_ASC',
+  SizeDesc = 'size_DESC',
+  LiaisonAsc = 'liaison_ASC',
+  LiaisonDesc = 'liaison_DESC',
+  LiaisonIdAsc = 'liaisonId_ASC',
+  LiaisonIdDesc = 'liaisonId_DESC',
+  LiaisonJudgementAsc = 'liaisonJudgement_ASC',
+  LiaisonJudgementDesc = 'liaisonJudgement_DESC',
+  IpfsContentIdAsc = 'ipfsContentId_ASC',
+  IpfsContentIdDesc = 'ipfsContentId_DESC',
+  JoystreamContentIdAsc = 'joystreamContentId_ASC',
+  JoystreamContentIdDesc = 'joystreamContentId_DESC',
+}
+
+export type DataObjectOwner =
+  | DataObjectOwnerMember
+  | DataObjectOwnerChannel
+  | DataObjectOwnerDao
+  | DataObjectOwnerCouncil
+  | DataObjectOwnerWorkingGroup
+
+export type DataObjectOwnerChannel = {
+  /** Channel identifier */
+  channel: Scalars['Int']
+  /** Variant needs to have at least one property. This value is not used. */
+  dummy?: Maybe<Scalars['Int']>
+}
+
+export type DataObjectOwnerChannelCreateInput = {
+  channel: Scalars['Float']
+  dummy?: Maybe<Scalars['Float']>
+}
+
+export type DataObjectOwnerChannelUpdateInput = {
+  channel?: Maybe<Scalars['Float']>
+  dummy?: Maybe<Scalars['Float']>
+}
+
+export type DataObjectOwnerChannelWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  channel_eq?: Maybe<Scalars['Int']>
+  channel_gt?: Maybe<Scalars['Int']>
+  channel_gte?: Maybe<Scalars['Int']>
+  channel_lt?: Maybe<Scalars['Int']>
+  channel_lte?: Maybe<Scalars['Int']>
+  channel_in?: Maybe<Array<Scalars['Int']>>
+  dummy_eq?: Maybe<Scalars['Int']>
+  dummy_gt?: Maybe<Scalars['Int']>
+  dummy_gte?: Maybe<Scalars['Int']>
+  dummy_lt?: Maybe<Scalars['Int']>
+  dummy_lte?: Maybe<Scalars['Int']>
+  dummy_in?: Maybe<Array<Scalars['Int']>>
+  AND?: Maybe<Array<DataObjectOwnerChannelWhereInput>>
+  OR?: Maybe<Array<DataObjectOwnerChannelWhereInput>>
+}
+
+export type DataObjectOwnerChannelWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type DataObjectOwnerCouncil = {
+  /** Variant needs to have at least one property. This value is not used. */
+  dummy?: Maybe<Scalars['Int']>
+}
+
+export type DataObjectOwnerCouncilCreateInput = {
+  dummy?: Maybe<Scalars['Float']>
+}
+
+export type DataObjectOwnerCouncilUpdateInput = {
+  dummy?: Maybe<Scalars['Float']>
+}
+
+export type DataObjectOwnerCouncilWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  dummy_eq?: Maybe<Scalars['Int']>
+  dummy_gt?: Maybe<Scalars['Int']>
+  dummy_gte?: Maybe<Scalars['Int']>
+  dummy_lt?: Maybe<Scalars['Int']>
+  dummy_lte?: Maybe<Scalars['Int']>
+  dummy_in?: Maybe<Array<Scalars['Int']>>
+  AND?: Maybe<Array<DataObjectOwnerCouncilWhereInput>>
+  OR?: Maybe<Array<DataObjectOwnerCouncilWhereInput>>
+}
+
+export type DataObjectOwnerCouncilWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type DataObjectOwnerDao = {
+  /** DAO identifier */
+  dao: Scalars['Int']
+}
+
+export type DataObjectOwnerDaoCreateInput = {
+  dao: Scalars['Float']
+}
+
+export type DataObjectOwnerDaoUpdateInput = {
+  dao?: Maybe<Scalars['Float']>
+}
+
+export type DataObjectOwnerDaoWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  dao_eq?: Maybe<Scalars['Int']>
+  dao_gt?: Maybe<Scalars['Int']>
+  dao_gte?: Maybe<Scalars['Int']>
+  dao_lt?: Maybe<Scalars['Int']>
+  dao_lte?: Maybe<Scalars['Int']>
+  dao_in?: Maybe<Array<Scalars['Int']>>
+  AND?: Maybe<Array<DataObjectOwnerDaoWhereInput>>
+  OR?: Maybe<Array<DataObjectOwnerDaoWhereInput>>
+}
+
+export type DataObjectOwnerDaoWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type DataObjectOwnerMember = {
+  /** Member identifier */
+  member: Scalars['Int']
+  /** Variant needs to have at least one property. This value is not used. */
+  dummy?: Maybe<Scalars['Int']>
+}
+
+export type DataObjectOwnerMemberCreateInput = {
+  member: Scalars['Float']
+  dummy?: Maybe<Scalars['Float']>
+}
+
+export type DataObjectOwnerMemberUpdateInput = {
+  member?: Maybe<Scalars['Float']>
+  dummy?: Maybe<Scalars['Float']>
+}
+
+export type DataObjectOwnerMemberWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  member_eq?: Maybe<Scalars['Int']>
+  member_gt?: Maybe<Scalars['Int']>
+  member_gte?: Maybe<Scalars['Int']>
+  member_lt?: Maybe<Scalars['Int']>
+  member_lte?: Maybe<Scalars['Int']>
+  member_in?: Maybe<Array<Scalars['Int']>>
+  dummy_eq?: Maybe<Scalars['Int']>
+  dummy_gt?: Maybe<Scalars['Int']>
+  dummy_gte?: Maybe<Scalars['Int']>
+  dummy_lt?: Maybe<Scalars['Int']>
+  dummy_lte?: Maybe<Scalars['Int']>
+  dummy_in?: Maybe<Array<Scalars['Int']>>
+  AND?: Maybe<Array<DataObjectOwnerMemberWhereInput>>
+  OR?: Maybe<Array<DataObjectOwnerMemberWhereInput>>
+}
+
+export type DataObjectOwnerMemberWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type DataObjectOwnerWorkingGroup = {
+  /** Working group identifier */
+  workingGroup: Scalars['Int']
+}
+
+export type DataObjectOwnerWorkingGroupCreateInput = {
+  workingGroup: Scalars['Float']
+}
+
+export type DataObjectOwnerWorkingGroupUpdateInput = {
+  workingGroup?: Maybe<Scalars['Float']>
+}
+
+export type DataObjectOwnerWorkingGroupWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  workingGroup_eq?: Maybe<Scalars['Int']>
+  workingGroup_gt?: Maybe<Scalars['Int']>
+  workingGroup_gte?: Maybe<Scalars['Int']>
+  workingGroup_lt?: Maybe<Scalars['Int']>
+  workingGroup_lte?: Maybe<Scalars['Int']>
+  workingGroup_in?: Maybe<Array<Scalars['Int']>>
+  AND?: Maybe<Array<DataObjectOwnerWorkingGroupWhereInput>>
+  OR?: Maybe<Array<DataObjectOwnerWorkingGroupWhereInput>>
+}
+
+export type DataObjectOwnerWorkingGroupWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type DataObjectUpdateInput = {
+  owner?: Maybe<Scalars['JSONObject']>
+  createdInBlock?: Maybe<Scalars['Float']>
+  typeId?: Maybe<Scalars['Float']>
+  size?: Maybe<Scalars['Float']>
+  liaison?: Maybe<Scalars['ID']>
+  liaisonId?: Maybe<Scalars['ID']>
+  liaisonJudgement?: Maybe<LiaisonJudgement>
+  ipfsContentId?: Maybe<Scalars['String']>
+  joystreamContentId?: Maybe<Scalars['String']>
+}
+
+export type DataObjectWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  owner_json?: Maybe<Scalars['JSONObject']>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  typeId_eq?: Maybe<Scalars['Int']>
+  typeId_gt?: Maybe<Scalars['Int']>
+  typeId_gte?: Maybe<Scalars['Int']>
+  typeId_lt?: Maybe<Scalars['Int']>
+  typeId_lte?: Maybe<Scalars['Int']>
+  typeId_in?: Maybe<Array<Scalars['Int']>>
+  size_eq?: Maybe<Scalars['Float']>
+  size_gt?: Maybe<Scalars['Float']>
+  size_gte?: Maybe<Scalars['Float']>
+  size_lt?: Maybe<Scalars['Float']>
+  size_lte?: Maybe<Scalars['Float']>
+  size_in?: Maybe<Array<Scalars['Float']>>
+  liaisonId_eq?: Maybe<Scalars['ID']>
+  liaisonId_in?: Maybe<Array<Scalars['ID']>>
+  liaisonJudgement_eq?: Maybe<LiaisonJudgement>
+  liaisonJudgement_in?: Maybe<Array<LiaisonJudgement>>
+  ipfsContentId_eq?: Maybe<Scalars['String']>
+  ipfsContentId_contains?: Maybe<Scalars['String']>
+  ipfsContentId_startsWith?: Maybe<Scalars['String']>
+  ipfsContentId_endsWith?: Maybe<Scalars['String']>
+  ipfsContentId_in?: Maybe<Array<Scalars['String']>>
+  joystreamContentId_eq?: Maybe<Scalars['String']>
+  joystreamContentId_contains?: Maybe<Scalars['String']>
+  joystreamContentId_startsWith?: Maybe<Scalars['String']>
+  joystreamContentId_endsWith?: Maybe<Scalars['String']>
+  joystreamContentId_in?: Maybe<Array<Scalars['String']>>
+  liaison?: Maybe<WorkerWhereInput>
+  channelcoverPhotoDataObject_none?: Maybe<ChannelWhereInput>
+  channelcoverPhotoDataObject_some?: Maybe<ChannelWhereInput>
+  channelcoverPhotoDataObject_every?: Maybe<ChannelWhereInput>
+  channelavatarPhotoDataObject_none?: Maybe<ChannelWhereInput>
+  channelavatarPhotoDataObject_some?: Maybe<ChannelWhereInput>
+  channelavatarPhotoDataObject_every?: Maybe<ChannelWhereInput>
+  videothumbnailPhotoDataObject_none?: Maybe<VideoMediaMetadataWhereInput>
+  videothumbnailPhotoDataObject_some?: Maybe<VideoMediaMetadataWhereInput>
+  videothumbnailPhotoDataObject_every?: Maybe<VideoMediaMetadataWhereInput>
+  videomediaDataObject_none?: Maybe<VideoMediaMetadataWhereInput>
+  videomediaDataObject_some?: Maybe<VideoMediaMetadataWhereInput>
+  videomediaDataObject_every?: Maybe<VideoMediaMetadataWhereInput>
+  AND?: Maybe<Array<DataObjectWhereInput>>
+  OR?: Maybe<Array<DataObjectWhereInput>>
+}
+
+export type DataObjectWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type DeleteResponse = {
+  id: Scalars['ID']
+}
+
+export type Language = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Language identifier ISO 639-1 */
+  iso: Scalars['String']
+  createdInBlock: Scalars['Int']
+  channellanguage?: Maybe<Array<Channel>>
+  videolanguage?: Maybe<Array<Video>>
+}
+
+export type LanguageConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<LanguageEdge>
+  pageInfo: PageInfo
+}
+
+export type LanguageCreateInput = {
+  iso: Scalars['String']
+  createdInBlock: Scalars['Float']
+}
+
+export type LanguageEdge = {
+  node: Language
+  cursor: Scalars['String']
+}
+
+export enum LanguageOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  IsoAsc = 'iso_ASC',
+  IsoDesc = 'iso_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+}
+
+export type LanguageUpdateInput = {
+  iso?: Maybe<Scalars['String']>
+  createdInBlock?: Maybe<Scalars['Float']>
+}
+
+export type LanguageWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  iso_eq?: Maybe<Scalars['String']>
+  iso_contains?: Maybe<Scalars['String']>
+  iso_startsWith?: Maybe<Scalars['String']>
+  iso_endsWith?: Maybe<Scalars['String']>
+  iso_in?: Maybe<Array<Scalars['String']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  channellanguage_none?: Maybe<ChannelWhereInput>
+  channellanguage_some?: Maybe<ChannelWhereInput>
+  channellanguage_every?: Maybe<ChannelWhereInput>
+  videolanguage_none?: Maybe<VideoWhereInput>
+  videolanguage_some?: Maybe<VideoWhereInput>
+  videolanguage_every?: Maybe<VideoWhereInput>
+  AND?: Maybe<Array<LanguageWhereInput>>
+  OR?: Maybe<Array<LanguageWhereInput>>
+}
+
+export type LanguageWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export enum LiaisonJudgement {
+  Pending = 'PENDING',
+  Accepted = 'ACCEPTED',
+}
+
+export type License = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** License code defined by Joystream */
+  code?: Maybe<Scalars['Int']>
+  /** Attribution (if required by the license) */
+  attribution?: Maybe<Scalars['String']>
+  /** Custom license content */
+  customText?: Maybe<Scalars['String']>
+  videolicense?: Maybe<Array<Video>>
+}
+
+export type LicenseConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<LicenseEdge>
+  pageInfo: PageInfo
+}
+
+export type LicenseCreateInput = {
+  code?: Maybe<Scalars['Float']>
+  attribution?: Maybe<Scalars['String']>
+  customText?: Maybe<Scalars['String']>
+}
+
+export type LicenseEdge = {
+  node: License
+  cursor: Scalars['String']
+}
+
+export enum LicenseOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  CodeAsc = 'code_ASC',
+  CodeDesc = 'code_DESC',
+  AttributionAsc = 'attribution_ASC',
+  AttributionDesc = 'attribution_DESC',
+  CustomTextAsc = 'customText_ASC',
+  CustomTextDesc = 'customText_DESC',
+}
+
+export type LicenseUpdateInput = {
+  code?: Maybe<Scalars['Float']>
+  attribution?: Maybe<Scalars['String']>
+  customText?: Maybe<Scalars['String']>
+}
+
+export type LicenseWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  code_eq?: Maybe<Scalars['Int']>
+  code_gt?: Maybe<Scalars['Int']>
+  code_gte?: Maybe<Scalars['Int']>
+  code_lt?: Maybe<Scalars['Int']>
+  code_lte?: Maybe<Scalars['Int']>
+  code_in?: Maybe<Array<Scalars['Int']>>
+  attribution_eq?: Maybe<Scalars['String']>
+  attribution_contains?: Maybe<Scalars['String']>
+  attribution_startsWith?: Maybe<Scalars['String']>
+  attribution_endsWith?: Maybe<Scalars['String']>
+  attribution_in?: Maybe<Array<Scalars['String']>>
+  customText_eq?: Maybe<Scalars['String']>
+  customText_contains?: Maybe<Scalars['String']>
+  customText_startsWith?: Maybe<Scalars['String']>
+  customText_endsWith?: Maybe<Scalars['String']>
+  customText_in?: Maybe<Array<Scalars['String']>>
+  videolanguage_none?: Maybe<VideoWhereInput>
+  videolanguage_some?: Maybe<VideoWhereInput>
+  videolanguage_every?: Maybe<VideoWhereInput>
+  AND?: Maybe<Array<LicenseWhereInput>>
+  OR?: Maybe<Array<LicenseWhereInput>>
+}
+
+export type LicenseWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type MembersByHandleFtsOutput = {
+  item: MembersByHandleSearchResult
+  rank: Scalars['Float']
+  isTypeOf: Scalars['String']
+  highlight: Scalars['String']
+}
+
+export type MembersByHandleSearchResult = Membership
+
+/** Stored information about a registered user */
+export type Membership = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** The unique handle chosen by member */
+  handle: Scalars['String']
+  /** A Url to member's Avatar image */
+  avatarUri?: Maybe<Scalars['String']>
+  /** Short text chosen by member to share information about themselves */
+  about?: Maybe<Scalars['String']>
+  /** Member's controller account id */
+  controllerAccount: Scalars['String']
+  /** Member's root account id */
+  rootAccount: Scalars['String']
+  /** Blocknumber when member was registered */
+  createdInBlock: Scalars['Int']
+  /** How the member was registered */
+  entry: MembershipEntryMethod
+  /** The type of subscription the member has purchased if any. */
+  subscription?: Maybe<Scalars['Int']>
+  channels: Array<Channel>
+}
+
+export type MembershipConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<MembershipEdge>
+  pageInfo: PageInfo
+}
+
+export type MembershipCreateInput = {
+  handle: Scalars['String']
+  avatarUri?: Maybe<Scalars['String']>
+  about?: Maybe<Scalars['String']>
+  controllerAccount: Scalars['String']
+  rootAccount: Scalars['String']
+  createdInBlock: Scalars['Float']
+  entry: MembershipEntryMethod
+  subscription?: Maybe<Scalars['Float']>
+}
+
+export type MembershipEdge = {
+  node: Membership
+  cursor: Scalars['String']
+}
+
+export enum MembershipEntryMethod {
+  Paid = 'PAID',
+  Screening = 'SCREENING',
+  Genesis = 'GENESIS',
+}
+
+export enum MembershipOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  HandleAsc = 'handle_ASC',
+  HandleDesc = 'handle_DESC',
+  AvatarUriAsc = 'avatarUri_ASC',
+  AvatarUriDesc = 'avatarUri_DESC',
+  AboutAsc = 'about_ASC',
+  AboutDesc = 'about_DESC',
+  ControllerAccountAsc = 'controllerAccount_ASC',
+  ControllerAccountDesc = 'controllerAccount_DESC',
+  RootAccountAsc = 'rootAccount_ASC',
+  RootAccountDesc = 'rootAccount_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+  EntryAsc = 'entry_ASC',
+  EntryDesc = 'entry_DESC',
+  SubscriptionAsc = 'subscription_ASC',
+  SubscriptionDesc = 'subscription_DESC',
+}
+
+export type MembershipUpdateInput = {
+  handle?: Maybe<Scalars['String']>
+  avatarUri?: Maybe<Scalars['String']>
+  about?: Maybe<Scalars['String']>
+  controllerAccount?: Maybe<Scalars['String']>
+  rootAccount?: Maybe<Scalars['String']>
+  createdInBlock?: Maybe<Scalars['Float']>
+  entry?: Maybe<MembershipEntryMethod>
+  subscription?: Maybe<Scalars['Float']>
+}
+
+export type MembershipWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  handle_eq?: Maybe<Scalars['String']>
+  handle_contains?: Maybe<Scalars['String']>
+  handle_startsWith?: Maybe<Scalars['String']>
+  handle_endsWith?: Maybe<Scalars['String']>
+  handle_in?: Maybe<Array<Scalars['String']>>
+  avatarUri_eq?: Maybe<Scalars['String']>
+  avatarUri_contains?: Maybe<Scalars['String']>
+  avatarUri_startsWith?: Maybe<Scalars['String']>
+  avatarUri_endsWith?: Maybe<Scalars['String']>
+  avatarUri_in?: Maybe<Array<Scalars['String']>>
+  about_eq?: Maybe<Scalars['String']>
+  about_contains?: Maybe<Scalars['String']>
+  about_startsWith?: Maybe<Scalars['String']>
+  about_endsWith?: Maybe<Scalars['String']>
+  about_in?: Maybe<Array<Scalars['String']>>
+  controllerAccount_eq?: Maybe<Scalars['String']>
+  controllerAccount_contains?: Maybe<Scalars['String']>
+  controllerAccount_startsWith?: Maybe<Scalars['String']>
+  controllerAccount_endsWith?: Maybe<Scalars['String']>
+  controllerAccount_in?: Maybe<Array<Scalars['String']>>
+  rootAccount_eq?: Maybe<Scalars['String']>
+  rootAccount_contains?: Maybe<Scalars['String']>
+  rootAccount_startsWith?: Maybe<Scalars['String']>
+  rootAccount_endsWith?: Maybe<Scalars['String']>
+  rootAccount_in?: Maybe<Array<Scalars['String']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  entry_eq?: Maybe<MembershipEntryMethod>
+  entry_in?: Maybe<Array<MembershipEntryMethod>>
+  subscription_eq?: Maybe<Scalars['Int']>
+  subscription_gt?: Maybe<Scalars['Int']>
+  subscription_gte?: Maybe<Scalars['Int']>
+  subscription_lt?: Maybe<Scalars['Int']>
+  subscription_lte?: Maybe<Scalars['Int']>
+  subscription_in?: Maybe<Array<Scalars['Int']>>
+  channels_none?: Maybe<ChannelWhereInput>
+  channels_some?: Maybe<ChannelWhereInput>
+  channels_every?: Maybe<ChannelWhereInput>
+  AND?: Maybe<Array<MembershipWhereInput>>
+  OR?: Maybe<Array<MembershipWhereInput>>
+}
+
+export type MembershipWhereUniqueInput = {
+  id?: Maybe<Scalars['ID']>
+  handle?: Maybe<Scalars['String']>
+}
+
+export type NextEntityId = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Next deterministic id for entities without custom id */
+  nextId: Scalars['Float']
+}
+
+export type NextEntityIdConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<NextEntityIdEdge>
+  pageInfo: PageInfo
+}
+
+export type NextEntityIdCreateInput = {
+  nextId: Scalars['Float']
+}
+
+export type NextEntityIdEdge = {
+  node: NextEntityId
+  cursor: Scalars['String']
+}
+
+export enum NextEntityIdOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  NextIdAsc = 'nextId_ASC',
+  NextIdDesc = 'nextId_DESC',
+}
+
+export type NextEntityIdUpdateInput = {
+  nextId?: Maybe<Scalars['Float']>
+}
+
+export type NextEntityIdWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  nextId_eq?: Maybe<Scalars['Float']>
+  nextId_gt?: Maybe<Scalars['Float']>
+  nextId_gte?: Maybe<Scalars['Float']>
+  nextId_lt?: Maybe<Scalars['Float']>
+  nextId_lte?: Maybe<Scalars['Float']>
+  nextId_in?: Maybe<Array<Scalars['Float']>>
+  AND?: Maybe<Array<NextEntityIdWhereInput>>
+  OR?: Maybe<Array<NextEntityIdWhereInput>>
+}
+
+export type NextEntityIdWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type PageInfo = {
+  hasNextPage: Scalars['Boolean']
+  hasPreviousPage: Scalars['Boolean']
+  startCursor?: Maybe<Scalars['String']>
+  endCursor?: Maybe<Scalars['String']>
+}
+
+export type ProcessorState = {
+  lastCompleteBlock: Scalars['Float']
+  lastProcessedEvent: Scalars['String']
+  indexerHead: Scalars['Float']
+  chainHead: Scalars['Float']
+}
+
+export type Query = {
+  channelCategories: Array<ChannelCategory>
+  channelCategoryByUniqueInput?: Maybe<ChannelCategory>
+  channelCategoriesConnection: ChannelCategoryConnection
+  channels: Array<Channel>
+  channelByUniqueInput?: Maybe<Channel>
+  channelsConnection: ChannelConnection
+  curatorGroups: Array<CuratorGroup>
+  curatorGroupByUniqueInput?: Maybe<CuratorGroup>
+  curatorGroupsConnection: CuratorGroupConnection
+  dataObjects: Array<DataObject>
+  dataObjectByUniqueInput?: Maybe<DataObject>
+  dataObjectsConnection: DataObjectConnection
+  languages: Array<Language>
+  languageByUniqueInput?: Maybe<Language>
+  languagesConnection: LanguageConnection
+  licenses: Array<License>
+  licenseByUniqueInput?: Maybe<License>
+  licensesConnection: LicenseConnection
+  memberships: Array<Membership>
+  membershipByUniqueInput?: Maybe<Membership>
+  membershipsConnection: MembershipConnection
+  nextEntityIds: Array<NextEntityId>
+  nextEntityIdByUniqueInput?: Maybe<NextEntityId>
+  nextEntityIdsConnection: NextEntityIdConnection
+  channelCategoriesByName: Array<ChannelCategoriesByNameFtsOutput>
+  membersByHandle: Array<MembersByHandleFtsOutput>
+  search: Array<SearchFtsOutput>
+  videoCategoriesByName: Array<VideoCategoriesByNameFtsOutput>
+  videoCategories: Array<VideoCategory>
+  videoCategoryByUniqueInput?: Maybe<VideoCategory>
+  videoCategoriesConnection: VideoCategoryConnection
+  videoMediaEncodings: Array<VideoMediaEncoding>
+  videoMediaEncodingByUniqueInput?: Maybe<VideoMediaEncoding>
+  videoMediaEncodingsConnection: VideoMediaEncodingConnection
+  videoMediaMetadata: Array<VideoMediaMetadata>
+  videoMediaMetadataByUniqueInput?: Maybe<VideoMediaMetadata>
+  videoMediaMetadataConnection: VideoMediaMetadataConnection
+  videos: Array<Video>
+  videoByUniqueInput?: Maybe<Video>
+  videosConnection: VideoConnection
+  workers: Array<Worker>
+  workerByUniqueInput?: Maybe<Worker>
+  workersConnection: WorkerConnection
+}
+
+export type QueryChannelCategoriesArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<ChannelCategoryWhereInput>
+  orderBy?: Maybe<Array<ChannelCategoryOrderByInput>>
+}
+
+export type QueryChannelCategoryByUniqueInputArgs = {
+  where: ChannelCategoryWhereUniqueInput
+}
+
+export type QueryChannelCategoriesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<ChannelCategoryWhereInput>
+  orderBy?: Maybe<Array<ChannelCategoryOrderByInput>>
+}
+
+export type QueryChannelsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<ChannelWhereInput>
+  orderBy?: Maybe<Array<ChannelOrderByInput>>
+}
+
+export type QueryChannelByUniqueInputArgs = {
+  where: ChannelWhereUniqueInput
+}
+
+export type QueryChannelsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<ChannelWhereInput>
+  orderBy?: Maybe<Array<ChannelOrderByInput>>
+}
+
+export type QueryCuratorGroupsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<CuratorGroupWhereInput>
+  orderBy?: Maybe<Array<CuratorGroupOrderByInput>>
+}
+
+export type QueryCuratorGroupByUniqueInputArgs = {
+  where: CuratorGroupWhereUniqueInput
+}
+
+export type QueryCuratorGroupsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<CuratorGroupWhereInput>
+  orderBy?: Maybe<Array<CuratorGroupOrderByInput>>
+}
+
+export type QueryDataObjectsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<DataObjectWhereInput>
+  orderBy?: Maybe<Array<DataObjectOrderByInput>>
+}
+
+export type QueryDataObjectByUniqueInputArgs = {
+  where: DataObjectWhereUniqueInput
+}
+
+export type QueryDataObjectsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<DataObjectWhereInput>
+  orderBy?: Maybe<Array<DataObjectOrderByInput>>
+}
+
+export type QueryLanguagesArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<LanguageWhereInput>
+  orderBy?: Maybe<Array<LanguageOrderByInput>>
+}
+
+export type QueryLanguageByUniqueInputArgs = {
+  where: LanguageWhereUniqueInput
+}
+
+export type QueryLanguagesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<LanguageWhereInput>
+  orderBy?: Maybe<Array<LanguageOrderByInput>>
+}
+
+export type QueryLicensesArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<LicenseWhereInput>
+  orderBy?: Maybe<Array<LicenseOrderByInput>>
+}
+
+export type QueryLicenseByUniqueInputArgs = {
+  where: LicenseWhereUniqueInput
+}
+
+export type QueryLicensesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<LicenseWhereInput>
+  orderBy?: Maybe<Array<LicenseOrderByInput>>
+}
+
+export type QueryMembershipsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<MembershipWhereInput>
+  orderBy?: Maybe<Array<MembershipOrderByInput>>
+}
+
+export type QueryMembershipByUniqueInputArgs = {
+  where: MembershipWhereUniqueInput
+}
+
+export type QueryMembershipsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<MembershipWhereInput>
+  orderBy?: Maybe<Array<MembershipOrderByInput>>
+}
+
+export type QueryNextEntityIdsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<NextEntityIdWhereInput>
+  orderBy?: Maybe<Array<NextEntityIdOrderByInput>>
+}
+
+export type QueryNextEntityIdByUniqueInputArgs = {
+  where: NextEntityIdWhereUniqueInput
+}
+
+export type QueryNextEntityIdsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<NextEntityIdWhereInput>
+  orderBy?: Maybe<Array<NextEntityIdOrderByInput>>
+}
+
+export type QueryChannelCategoriesByNameArgs = {
+  whereChannelCategory?: Maybe<ChannelCategoryWhereInput>
+  skip?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  text: Scalars['String']
+}
+
+export type QueryMembersByHandleArgs = {
+  whereMembership?: Maybe<MembershipWhereInput>
+  skip?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  text: Scalars['String']
+}
+
+export type QuerySearchArgs = {
+  whereVideo?: Maybe<VideoWhereInput>
+  whereChannel?: Maybe<ChannelWhereInput>
+  skip?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  text: Scalars['String']
+}
+
+export type QueryVideoCategoriesByNameArgs = {
+  whereVideoCategory?: Maybe<VideoCategoryWhereInput>
+  skip?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  text: Scalars['String']
+}
+
+export type QueryVideoCategoriesArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<VideoCategoryWhereInput>
+  orderBy?: Maybe<Array<VideoCategoryOrderByInput>>
+}
+
+export type QueryVideoCategoryByUniqueInputArgs = {
+  where: VideoCategoryWhereUniqueInput
+}
+
+export type QueryVideoCategoriesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<VideoCategoryWhereInput>
+  orderBy?: Maybe<Array<VideoCategoryOrderByInput>>
+}
+
+export type QueryVideoMediaEncodingsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<VideoMediaEncodingWhereInput>
+  orderBy?: Maybe<Array<VideoMediaEncodingOrderByInput>>
+}
+
+export type QueryVideoMediaEncodingByUniqueInputArgs = {
+  where: VideoMediaEncodingWhereUniqueInput
+}
+
+export type QueryVideoMediaEncodingsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<VideoMediaEncodingWhereInput>
+  orderBy?: Maybe<Array<VideoMediaEncodingOrderByInput>>
+}
+
+export type QueryVideoMediaMetadataArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<VideoMediaMetadataWhereInput>
+  orderBy?: Maybe<Array<VideoMediaMetadataOrderByInput>>
+}
+
+export type QueryVideoMediaMetadataByUniqueInputArgs = {
+  where: VideoMediaMetadataWhereUniqueInput
+}
+
+export type QueryVideoMediaMetadataConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<VideoMediaMetadataWhereInput>
+  orderBy?: Maybe<Array<VideoMediaMetadataOrderByInput>>
+}
+
+export type QueryVideosArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<VideoWhereInput>
+  orderBy?: Maybe<Array<VideoOrderByInput>>
+}
+
+export type QueryVideoByUniqueInputArgs = {
+  where: VideoWhereUniqueInput
+}
+
+export type QueryVideosConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<VideoWhereInput>
+  orderBy?: Maybe<Array<VideoOrderByInput>>
+}
+
+export type QueryWorkersArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<WorkerWhereInput>
+  orderBy?: Maybe<Array<WorkerOrderByInput>>
+}
+
+export type QueryWorkerByUniqueInputArgs = {
+  where: WorkerWhereUniqueInput
+}
+
+export type QueryWorkersConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<WorkerWhereInput>
+  orderBy?: Maybe<Array<WorkerOrderByInput>>
+}
+
+export type SearchFtsOutput = {
+  item: SearchSearchResult
+  rank: Scalars['Float']
+  isTypeOf: Scalars['String']
+  highlight: Scalars['String']
+}
+
+export type SearchSearchResult = Channel | Video
+
+export type StandardDeleteResponse = {
+  id: Scalars['ID']
+}
+
+export type Subscription = {
+  stateSubscription: ProcessorState
+}
+
+export type Video = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  channel?: Maybe<Channel>
+  channelId?: Maybe<Scalars['String']>
+  category?: Maybe<VideoCategory>
+  categoryId?: Maybe<Scalars['String']>
+  /** The title of the video */
+  title?: Maybe<Scalars['String']>
+  /** The description of the Video */
+  description?: Maybe<Scalars['String']>
+  /** Video duration in seconds */
+  duration?: Maybe<Scalars['Int']>
+  thumbnailPhotoDataObject?: Maybe<DataObject>
+  thumbnailPhotoDataObjectId?: Maybe<Scalars['String']>
+  /** URLs where the asset content can be accessed (if any) */
+  thumbnailPhotoUrls: Array<Scalars['String']>
+  /** Availability meta information */
+  thumbnailPhotoAvailability: AssetAvailability
+  language?: Maybe<Language>
+  languageId?: Maybe<Scalars['String']>
+  /** Whether or not Video contains marketing */
+  hasMarketing?: Maybe<Scalars['Boolean']>
+  /** If the Video was published on other platform before beeing published on Joystream - the original publication date */
+  publishedBeforeJoystream?: Maybe<Scalars['DateTime']>
+  /** Whether the Video is supposed to be publically displayed */
+  isPublic?: Maybe<Scalars['Boolean']>
+  /** Flag signaling whether a video is censored. */
+  isCensored: Scalars['Boolean']
+  /** Whether the Video contains explicit material. */
+  isExplicit?: Maybe<Scalars['Boolean']>
+  license?: Maybe<License>
+  licenseId?: Maybe<Scalars['String']>
+  mediaDataObject?: Maybe<DataObject>
+  mediaDataObjectId?: Maybe<Scalars['String']>
+  /** URLs where the asset content can be accessed (if any) */
+  mediaUrls: Array<Scalars['String']>
+  /** Availability meta information */
+  mediaAvailability: AssetAvailability
+  mediaMetadata?: Maybe<VideoMediaMetadata>
+  mediaMetadataId?: Maybe<Scalars['String']>
+  createdInBlock: Scalars['Int']
+  /** Is video featured or not */
+  isFeatured: Scalars['Boolean']
+}
+
+export type VideoCategoriesByNameFtsOutput = {
+  item: VideoCategoriesByNameSearchResult
+  rank: Scalars['Float']
+  isTypeOf: Scalars['String']
+  highlight: Scalars['String']
+}
+
+export type VideoCategoriesByNameSearchResult = VideoCategory
+
+export type VideoCategory = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** The name of the category */
+  name?: Maybe<Scalars['String']>
+  videos: Array<Video>
+  createdInBlock: Scalars['Int']
+}
+
+export type VideoCategoryConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<VideoCategoryEdge>
+  pageInfo: PageInfo
+}
+
+export type VideoCategoryCreateInput = {
+  name?: Maybe<Scalars['String']>
+  createdInBlock: Scalars['Float']
+}
+
+export type VideoCategoryEdge = {
+  node: VideoCategory
+  cursor: Scalars['String']
+}
+
+export enum VideoCategoryOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  NameAsc = 'name_ASC',
+  NameDesc = 'name_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+}
+
+export type VideoCategoryUpdateInput = {
+  name?: Maybe<Scalars['String']>
+  createdInBlock?: Maybe<Scalars['Float']>
+}
+
+export type VideoCategoryWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  name_eq?: Maybe<Scalars['String']>
+  name_contains?: Maybe<Scalars['String']>
+  name_startsWith?: Maybe<Scalars['String']>
+  name_endsWith?: Maybe<Scalars['String']>
+  name_in?: Maybe<Array<Scalars['String']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  videos_none?: Maybe<VideoWhereInput>
+  videos_some?: Maybe<VideoWhereInput>
+  videos_every?: Maybe<VideoWhereInput>
+  AND?: Maybe<Array<VideoCategoryWhereInput>>
+  OR?: Maybe<Array<VideoCategoryWhereInput>>
+}
+
+export type VideoCategoryWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type VideoConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<VideoEdge>
+  pageInfo: PageInfo
+}
+
+export type VideoCreateInput = {
+  channel?: Maybe<Scalars['ID']>
+  channelId?: Maybe<Scalars['ID']>
+  category?: Maybe<Scalars['ID']>
+  categoryId?: Maybe<Scalars['ID']>
+  title?: Maybe<Scalars['String']>
+  description?: Maybe<Scalars['String']>
+  duration?: Maybe<Scalars['Float']>
+  thumbnailPhotoDataObject?: Maybe<Scalars['ID']>
+  thumbnailPhotoDataObjectId?: Maybe<Scalars['ID']>
+  thumbnailPhotoUrls: Array<Scalars['String']>
+  thumbnailPhotoAvailability: AssetAvailability
+  language?: Maybe<Scalars['ID']>
+  languageId?: Maybe<Scalars['ID']>
+  hasMarketing?: Maybe<Scalars['Boolean']>
+  publishedBeforeJoystream?: Maybe<Scalars['DateTime']>
+  isPublic?: Maybe<Scalars['Boolean']>
+  isCensored: Scalars['Boolean']
+  isExplicit?: Maybe<Scalars['Boolean']>
+  license?: Maybe<Scalars['ID']>
+  licenseId?: Maybe<Scalars['ID']>
+  mediaDataObject?: Maybe<Scalars['ID']>
+  mediaDataObjectId?: Maybe<Scalars['ID']>
+  mediaUrls: Array<Scalars['String']>
+  mediaAvailability: AssetAvailability
+  mediaMetadata?: Maybe<Scalars['ID']>
+  mediaMetadataId?: Maybe<Scalars['ID']>
+  createdInBlock: Scalars['Float']
+  isFeatured: Scalars['Boolean']
+}
+
+export type VideoEdge = {
+  node: Video
+  cursor: Scalars['String']
+}
+
+export type VideoMediaEncoding = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Encoding of the video media object */
+  codecName?: Maybe<Scalars['String']>
+  /** Media container format */
+  container?: Maybe<Scalars['String']>
+  /** Content MIME type */
+  mimeMediaType?: Maybe<Scalars['String']>
+  videomediametadataencoding?: Maybe<Array<VideoMediaMetadata>>
+}
+
+export type VideoMediaEncodingConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<VideoMediaEncodingEdge>
+  pageInfo: PageInfo
+}
+
+export type VideoMediaEncodingCreateInput = {
+  codecName?: Maybe<Scalars['String']>
+  container?: Maybe<Scalars['String']>
+  mimeMediaType?: Maybe<Scalars['String']>
+}
+
+export type VideoMediaEncodingEdge = {
+  node: VideoMediaEncoding
+  cursor: Scalars['String']
+}
+
+export enum VideoMediaEncodingOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  CodecNameAsc = 'codecName_ASC',
+  CodecNameDesc = 'codecName_DESC',
+  ContainerAsc = 'container_ASC',
+  ContainerDesc = 'container_DESC',
+  MimeMediaTypeAsc = 'mimeMediaType_ASC',
+  MimeMediaTypeDesc = 'mimeMediaType_DESC',
+}
+
+export type VideoMediaEncodingUpdateInput = {
+  codecName?: Maybe<Scalars['String']>
+  container?: Maybe<Scalars['String']>
+  mimeMediaType?: Maybe<Scalars['String']>
+}
+
+export type VideoMediaEncodingWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  codecName_eq?: Maybe<Scalars['String']>
+  codecName_contains?: Maybe<Scalars['String']>
+  codecName_startsWith?: Maybe<Scalars['String']>
+  codecName_endsWith?: Maybe<Scalars['String']>
+  codecName_in?: Maybe<Array<Scalars['String']>>
+  container_eq?: Maybe<Scalars['String']>
+  container_contains?: Maybe<Scalars['String']>
+  container_startsWith?: Maybe<Scalars['String']>
+  container_endsWith?: Maybe<Scalars['String']>
+  container_in?: Maybe<Array<Scalars['String']>>
+  mimeMediaType_eq?: Maybe<Scalars['String']>
+  mimeMediaType_contains?: Maybe<Scalars['String']>
+  mimeMediaType_startsWith?: Maybe<Scalars['String']>
+  mimeMediaType_endsWith?: Maybe<Scalars['String']>
+  mimeMediaType_in?: Maybe<Array<Scalars['String']>>
+  videomediametadataencoding_none?: Maybe<VideoMediaMetadataWhereInput>
+  videomediametadataencoding_some?: Maybe<VideoMediaMetadataWhereInput>
+  videomediametadataencoding_every?: Maybe<VideoMediaMetadataWhereInput>
+  AND?: Maybe<Array<VideoMediaEncodingWhereInput>>
+  OR?: Maybe<Array<VideoMediaEncodingWhereInput>>
+}
+
+export type VideoMediaEncodingWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type VideoMediaMetadata = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  encoding?: Maybe<VideoMediaEncoding>
+  encodingId?: Maybe<Scalars['String']>
+  /** Video media width in pixels */
+  pixelWidth?: Maybe<Scalars['Int']>
+  /** Video media height in pixels */
+  pixelHeight?: Maybe<Scalars['Int']>
+  /** Video media size in bytes */
+  size?: Maybe<Scalars['Float']>
+  video?: Maybe<Video>
+  createdInBlock: Scalars['Int']
+}
+
+export type VideoMediaMetadataConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<VideoMediaMetadataEdge>
+  pageInfo: PageInfo
+}
+
+export type VideoMediaMetadataCreateInput = {
+  encoding?: Maybe<Scalars['ID']>
+  encodingId?: Maybe<Scalars['ID']>
+  pixelWidth?: Maybe<Scalars['Float']>
+  pixelHeight?: Maybe<Scalars['Float']>
+  size?: Maybe<Scalars['Float']>
+  createdInBlock: Scalars['Float']
+}
+
+export type VideoMediaMetadataEdge = {
+  node: VideoMediaMetadata
+  cursor: Scalars['String']
+}
+
+export enum VideoMediaMetadataOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  EncodingAsc = 'encoding_ASC',
+  EncodingDesc = 'encoding_DESC',
+  EncodingIdAsc = 'encodingId_ASC',
+  EncodingIdDesc = 'encodingId_DESC',
+  PixelWidthAsc = 'pixelWidth_ASC',
+  PixelWidthDesc = 'pixelWidth_DESC',
+  PixelHeightAsc = 'pixelHeight_ASC',
+  PixelHeightDesc = 'pixelHeight_DESC',
+  SizeAsc = 'size_ASC',
+  SizeDesc = 'size_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+}
+
+export type VideoMediaMetadataUpdateInput = {
+  encoding?: Maybe<Scalars['ID']>
+  encodingId?: Maybe<Scalars['ID']>
+  pixelWidth?: Maybe<Scalars['Float']>
+  pixelHeight?: Maybe<Scalars['Float']>
+  size?: Maybe<Scalars['Float']>
+  createdInBlock?: Maybe<Scalars['Float']>
+}
+
+export type VideoMediaMetadataWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  encodingId_eq?: Maybe<Scalars['ID']>
+  encodingId_in?: Maybe<Array<Scalars['ID']>>
+  pixelWidth_eq?: Maybe<Scalars['Int']>
+  pixelWidth_gt?: Maybe<Scalars['Int']>
+  pixelWidth_gte?: Maybe<Scalars['Int']>
+  pixelWidth_lt?: Maybe<Scalars['Int']>
+  pixelWidth_lte?: Maybe<Scalars['Int']>
+  pixelWidth_in?: Maybe<Array<Scalars['Int']>>
+  pixelHeight_eq?: Maybe<Scalars['Int']>
+  pixelHeight_gt?: Maybe<Scalars['Int']>
+  pixelHeight_gte?: Maybe<Scalars['Int']>
+  pixelHeight_lt?: Maybe<Scalars['Int']>
+  pixelHeight_lte?: Maybe<Scalars['Int']>
+  pixelHeight_in?: Maybe<Array<Scalars['Int']>>
+  size_eq?: Maybe<Scalars['Float']>
+  size_gt?: Maybe<Scalars['Float']>
+  size_gte?: Maybe<Scalars['Float']>
+  size_lt?: Maybe<Scalars['Float']>
+  size_lte?: Maybe<Scalars['Float']>
+  size_in?: Maybe<Array<Scalars['Float']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  encoding?: Maybe<VideoMediaEncodingWhereInput>
+  video?: Maybe<VideoWhereInput>
+  AND?: Maybe<Array<VideoMediaMetadataWhereInput>>
+  OR?: Maybe<Array<VideoMediaMetadataWhereInput>>
+}
+
+export type VideoMediaMetadataWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export enum VideoOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  ChannelAsc = 'channel_ASC',
+  ChannelDesc = 'channel_DESC',
+  ChannelIdAsc = 'channelId_ASC',
+  ChannelIdDesc = 'channelId_DESC',
+  CategoryAsc = 'category_ASC',
+  CategoryDesc = 'category_DESC',
+  CategoryIdAsc = 'categoryId_ASC',
+  CategoryIdDesc = 'categoryId_DESC',
+  TitleAsc = 'title_ASC',
+  TitleDesc = 'title_DESC',
+  DescriptionAsc = 'description_ASC',
+  DescriptionDesc = 'description_DESC',
+  DurationAsc = 'duration_ASC',
+  DurationDesc = 'duration_DESC',
+  ThumbnailPhotoDataObjectAsc = 'thumbnailPhotoDataObject_ASC',
+  ThumbnailPhotoDataObjectDesc = 'thumbnailPhotoDataObject_DESC',
+  ThumbnailPhotoDataObjectIdAsc = 'thumbnailPhotoDataObjectId_ASC',
+  ThumbnailPhotoDataObjectIdDesc = 'thumbnailPhotoDataObjectId_DESC',
+  ThumbnailPhotoAvailabilityAsc = 'thumbnailPhotoAvailability_ASC',
+  ThumbnailPhotoAvailabilityDesc = 'thumbnailPhotoAvailability_DESC',
+  LanguageAsc = 'language_ASC',
+  LanguageDesc = 'language_DESC',
+  LanguageIdAsc = 'languageId_ASC',
+  LanguageIdDesc = 'languageId_DESC',
+  HasMarketingAsc = 'hasMarketing_ASC',
+  HasMarketingDesc = 'hasMarketing_DESC',
+  PublishedBeforeJoystreamAsc = 'publishedBeforeJoystream_ASC',
+  PublishedBeforeJoystreamDesc = 'publishedBeforeJoystream_DESC',
+  IsPublicAsc = 'isPublic_ASC',
+  IsPublicDesc = 'isPublic_DESC',
+  IsCensoredAsc = 'isCensored_ASC',
+  IsCensoredDesc = 'isCensored_DESC',
+  IsExplicitAsc = 'isExplicit_ASC',
+  IsExplicitDesc = 'isExplicit_DESC',
+  LicenseAsc = 'license_ASC',
+  LicenseDesc = 'license_DESC',
+  LicenseIdAsc = 'licenseId_ASC',
+  LicenseIdDesc = 'licenseId_DESC',
+  MediaDataObjectAsc = 'mediaDataObject_ASC',
+  MediaDataObjectDesc = 'mediaDataObject_DESC',
+  MediaDataObjectIdAsc = 'mediaDataObjectId_ASC',
+  MediaDataObjectIdDesc = 'mediaDataObjectId_DESC',
+  MediaAvailabilityAsc = 'mediaAvailability_ASC',
+  MediaAvailabilityDesc = 'mediaAvailability_DESC',
+  MediaMetadataAsc = 'mediaMetadata_ASC',
+  MediaMetadataDesc = 'mediaMetadata_DESC',
+  MediaMetadataIdAsc = 'mediaMetadataId_ASC',
+  MediaMetadataIdDesc = 'mediaMetadataId_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+  IsFeaturedAsc = 'isFeatured_ASC',
+  IsFeaturedDesc = 'isFeatured_DESC',
+}
+
+export type VideoUpdateInput = {
+  channel?: Maybe<Scalars['ID']>
+  channelId?: Maybe<Scalars['ID']>
+  category?: Maybe<Scalars['ID']>
+  categoryId?: Maybe<Scalars['ID']>
+  title?: Maybe<Scalars['String']>
+  description?: Maybe<Scalars['String']>
+  duration?: Maybe<Scalars['Float']>
+  thumbnailPhotoDataObject?: Maybe<Scalars['ID']>
+  thumbnailPhotoDataObjectId?: Maybe<Scalars['ID']>
+  thumbnailPhotoUrls?: Maybe<Array<Scalars['String']>>
+  thumbnailPhotoAvailability?: Maybe<AssetAvailability>
+  language?: Maybe<Scalars['ID']>
+  languageId?: Maybe<Scalars['ID']>
+  hasMarketing?: Maybe<Scalars['Boolean']>
+  publishedBeforeJoystream?: Maybe<Scalars['DateTime']>
+  isPublic?: Maybe<Scalars['Boolean']>
+  isCensored?: Maybe<Scalars['Boolean']>
+  isExplicit?: Maybe<Scalars['Boolean']>
+  license?: Maybe<Scalars['ID']>
+  licenseId?: Maybe<Scalars['ID']>
+  mediaDataObject?: Maybe<Scalars['ID']>
+  mediaDataObjectId?: Maybe<Scalars['ID']>
+  mediaUrls?: Maybe<Array<Scalars['String']>>
+  mediaAvailability?: Maybe<AssetAvailability>
+  mediaMetadata?: Maybe<Scalars['ID']>
+  mediaMetadataId?: Maybe<Scalars['ID']>
+  createdInBlock?: Maybe<Scalars['Float']>
+  isFeatured?: Maybe<Scalars['Boolean']>
+}
+
+export type VideoWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  channelId_eq?: Maybe<Scalars['ID']>
+  channelId_in?: Maybe<Array<Scalars['ID']>>
+  categoryId_eq?: Maybe<Scalars['ID']>
+  categoryId_in?: Maybe<Array<Scalars['ID']>>
+  title_eq?: Maybe<Scalars['String']>
+  title_contains?: Maybe<Scalars['String']>
+  title_startsWith?: Maybe<Scalars['String']>
+  title_endsWith?: Maybe<Scalars['String']>
+  title_in?: Maybe<Array<Scalars['String']>>
+  description_eq?: Maybe<Scalars['String']>
+  description_contains?: Maybe<Scalars['String']>
+  description_startsWith?: Maybe<Scalars['String']>
+  description_endsWith?: Maybe<Scalars['String']>
+  description_in?: Maybe<Array<Scalars['String']>>
+  duration_eq?: Maybe<Scalars['Int']>
+  duration_gt?: Maybe<Scalars['Int']>
+  duration_gte?: Maybe<Scalars['Int']>
+  duration_lt?: Maybe<Scalars['Int']>
+  duration_lte?: Maybe<Scalars['Int']>
+  duration_in?: Maybe<Array<Scalars['Int']>>
+  thumbnailPhotoDataObjectId_eq?: Maybe<Scalars['ID']>
+  thumbnailPhotoDataObjectId_in?: Maybe<Array<Scalars['ID']>>
+  thumbnailPhotoUrls_containsAll?: Maybe<Array<Scalars['String']>>
+  thumbnailPhotoUrls_containsNone?: Maybe<Array<Scalars['String']>>
+  thumbnailPhotoUrls_containsAny?: Maybe<Array<Scalars['String']>>
+  thumbnailPhotoAvailability_eq?: Maybe<AssetAvailability>
+  thumbnailPhotoAvailability_in?: Maybe<Array<AssetAvailability>>
+  languageId_eq?: Maybe<Scalars['ID']>
+  languageId_in?: Maybe<Array<Scalars['ID']>>
+  hasMarketing_eq?: Maybe<Scalars['Boolean']>
+  hasMarketing_in?: Maybe<Array<Scalars['Boolean']>>
+  publishedBeforeJoystream_eq?: Maybe<Scalars['DateTime']>
+  publishedBeforeJoystream_lt?: Maybe<Scalars['DateTime']>
+  publishedBeforeJoystream_lte?: Maybe<Scalars['DateTime']>
+  publishedBeforeJoystream_gt?: Maybe<Scalars['DateTime']>
+  publishedBeforeJoystream_gte?: Maybe<Scalars['DateTime']>
+  isPublic_eq?: Maybe<Scalars['Boolean']>
+  isPublic_in?: Maybe<Array<Scalars['Boolean']>>
+  isCensored_eq?: Maybe<Scalars['Boolean']>
+  isCensored_in?: Maybe<Array<Scalars['Boolean']>>
+  isExplicit_eq?: Maybe<Scalars['Boolean']>
+  isExplicit_in?: Maybe<Array<Scalars['Boolean']>>
+  licenseId_eq?: Maybe<Scalars['ID']>
+  licenseId_in?: Maybe<Array<Scalars['ID']>>
+  mediaDataObjectId_eq?: Maybe<Scalars['ID']>
+  mediaDataObjectId_in?: Maybe<Array<Scalars['ID']>>
+  mediaUrls_containsAll?: Maybe<Array<Scalars['String']>>
+  mediaUrls_containsNone?: Maybe<Array<Scalars['String']>>
+  mediaUrls_containsAny?: Maybe<Array<Scalars['String']>>
+  mediaAvailability_eq?: Maybe<AssetAvailability>
+  mediaAvailability_in?: Maybe<Array<AssetAvailability>>
+  mediaMetadataId_eq?: Maybe<Scalars['ID']>
+  mediaMetadataId_in?: Maybe<Array<Scalars['ID']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  isFeatured_eq?: Maybe<Scalars['Boolean']>
+  isFeatured_in?: Maybe<Array<Scalars['Boolean']>>
+  channel?: Maybe<ChannelWhereInput>
+  category?: Maybe<VideoCategoryWhereInput>
+  thumbnailPhotoDataObject?: Maybe<DataObjectWhereInput>
+  language?: Maybe<LanguageWhereInput>
+  license?: Maybe<LicenseWhereInput>
+  mediaDataObject?: Maybe<DataObjectWhereInput>
+  mediaMetadata?: Maybe<VideoMediaMetadataWhereInput>
+  AND?: Maybe<Array<VideoWhereInput>>
+  OR?: Maybe<Array<VideoWhereInput>>
+}
+
+export type VideoWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type Worker = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Sign of worker still being active */
+  isActive: Scalars['Boolean']
+  /** Runtime identifier */
+  workerId: Scalars['String']
+  /** Associated working group */
+  type: WorkerType
+  /** Custom metadata set by provider */
+  metadata?: Maybe<Scalars['String']>
+  dataObjects: Array<DataObject>
+}
+
+export type WorkerConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<WorkerEdge>
+  pageInfo: PageInfo
+}
+
+export type WorkerCreateInput = {
+  isActive: Scalars['Boolean']
+  workerId: Scalars['String']
+  type: WorkerType
+  metadata?: Maybe<Scalars['String']>
+}
+
+export type WorkerEdge = {
+  node: Worker
+  cursor: Scalars['String']
+}
+
+export enum WorkerOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  IsActiveAsc = 'isActive_ASC',
+  IsActiveDesc = 'isActive_DESC',
+  WorkerIdAsc = 'workerId_ASC',
+  WorkerIdDesc = 'workerId_DESC',
+  TypeAsc = 'type_ASC',
+  TypeDesc = 'type_DESC',
+  MetadataAsc = 'metadata_ASC',
+  MetadataDesc = 'metadata_DESC',
+}
+
+export enum WorkerType {
+  Gateway = 'GATEWAY',
+  Storage = 'STORAGE',
+}
+
+export type WorkerUpdateInput = {
+  isActive?: Maybe<Scalars['Boolean']>
+  workerId?: Maybe<Scalars['String']>
+  type?: Maybe<WorkerType>
+  metadata?: Maybe<Scalars['String']>
+}
+
+export type WorkerWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  isActive_eq?: Maybe<Scalars['Boolean']>
+  isActive_in?: Maybe<Array<Scalars['Boolean']>>
+  workerId_eq?: Maybe<Scalars['String']>
+  workerId_contains?: Maybe<Scalars['String']>
+  workerId_startsWith?: Maybe<Scalars['String']>
+  workerId_endsWith?: Maybe<Scalars['String']>
+  workerId_in?: Maybe<Array<Scalars['String']>>
+  type_eq?: Maybe<WorkerType>
+  type_in?: Maybe<Array<WorkerType>>
+  metadata_eq?: Maybe<Scalars['String']>
+  metadata_contains?: Maybe<Scalars['String']>
+  metadata_startsWith?: Maybe<Scalars['String']>
+  metadata_endsWith?: Maybe<Scalars['String']>
+  metadata_in?: Maybe<Array<Scalars['String']>>
+  dataObjects_none?: Maybe<DataObjectWhereInput>
+  dataObjects_some?: Maybe<DataObjectWhereInput>
+  dataObjects_every?: Maybe<DataObjectWhereInput>
+  AND?: Maybe<Array<WorkerWhereInput>>
+  OR?: Maybe<Array<WorkerWhereInput>>
+}
+
+export type WorkerWhereUniqueInput = {
+  id: Scalars['ID']
+}

+ 122 - 0
utils/migration-scripts/src/sumer-giza/sumer-query-node/queries/queries.graphql

@@ -0,0 +1,122 @@
+fragment VideoCategoryFields on VideoCategory {
+  id
+  name
+}
+
+fragment ChannelCategoryFields on ChannelCategory {
+  id
+  name
+}
+
+fragment DataObjectFields on DataObject {
+  id
+  joystreamContentId
+  size
+  liaisonJudgement
+}
+
+fragment VideoFields on Video {
+  id
+  categoryId
+  title
+  description
+  duration
+  thumbnailPhotoDataObject {
+    ...DataObjectFields
+  }
+  language {
+    iso
+  }
+  hasMarketing
+  publishedBeforeJoystream
+  isPublic
+  isCensored
+  isExplicit
+  license {
+    code
+    attribution
+    customText
+  }
+  mediaDataObject {
+    ...DataObjectFields
+  }
+  mediaMetadata {
+    encoding {
+      codecName
+      container
+      mimeMediaType
+    }
+    pixelWidth
+    pixelHeight
+    size
+  }
+  isFeatured
+  channel {
+    id
+    ownerMember {
+      id
+      controllerAccount
+    }
+  }
+}
+
+fragment ChannelFields on Channel {
+  id
+  ownerMember {
+    id
+    controllerAccount
+  }
+  categoryId
+  rewardAccount
+  title
+  description
+  coverPhotoDataObject {
+    ...DataObjectFields
+  }
+  avatarPhotoDataObject {
+    ...DataObjectFields
+  }
+  isPublic
+  isCensored
+  language {
+    iso
+  }
+  videos {
+    id
+  }
+}
+
+fragment WorkerFields on Worker {
+  id
+  metadata
+}
+
+query getChannelsByIds($ids: [ID!]) {
+  channels(where: { id_in: $ids }, limit: 1000) {
+    ...ChannelFields
+  }
+}
+
+query getVideosByIds($ids: [ID!]) {
+  videos(where: { id_in: $ids }, limit: 1000) {
+    ...VideoFields
+  }
+}
+
+query getVideoCategories {
+  videoCategories {
+    ...VideoCategoryFields
+  }
+}
+
+query getChannelsCategories {
+  channelCategories {
+    ...ChannelCategoryFields
+  }
+}
+
+query getStorageWorkers {
+  workers(where: { type_eq: STORAGE }) {
+    ...WorkerFields
+  }
+}

+ 21 - 0
utils/migration-scripts/tsconfig.json

@@ -0,0 +1,21 @@
+{
+  "compilerOptions": {
+    "declaration": true,
+    "importHelpers": true,
+    "module": "commonjs",
+    "outDir": "lib",
+    "rootDir": "src",
+    "strict": true,
+    "target": "es2017",
+    "esModuleInterop": true,
+    "types": ["node"],
+    "noUnusedLocals": true,
+    "baseUrl": "./",
+    "paths": {
+      "@polkadot/types/augment": ["../../types/augment-codec/augment-types.ts"],
+      "@polkadot/api/augment": ["../../types/augment-codec/augment-api.ts"]
+    },
+    "skipLibCheck": true
+  },
+  "include": ["src/**/*"]
+}

+ 159 - 37
yarn.lock

@@ -3035,19 +3035,19 @@
   resolved "https://registry.yarnpkg.com/@josephg/resolvable/-/resolvable-1.0.1.tgz#69bc4db754d79e1a2f17a650d3466e038d94a5eb"
   integrity sha512-CtzORUwWTTOTqfVtHaKRJ0I1kNQd1bpn3sUh8I3nJDVY+5/M/Oe1DnEWzPQvqq/xPIIkzzzIP7mfCoAjFRvDhg==
 
-"@joystream/hydra-common@3.1.0-alpha.13", "@joystream/hydra-common@^3.1.0-alpha.13":
-  version "3.1.0-alpha.13"
-  resolved "https://registry.yarnpkg.com/@joystream/hydra-common/-/hydra-common-3.1.0-alpha.13.tgz#90292e8aa7cd79014f01faa21a193d39ff403522"
-  integrity sha512-867WpaNRp8qoBakOSPNCtM3KiLipYDk92mWC659qFCG3bjLLTJmnv4TEpfvUMF28oteeis0KxSutu5k5GLao7A==
+"@joystream/hydra-common@3.1.0-alpha.16", "@joystream/hydra-common@^3.1.0-alpha.16":
+  version "3.1.0-alpha.16"
+  resolved "https://registry.yarnpkg.com/@joystream/hydra-common/-/hydra-common-3.1.0-alpha.16.tgz#ae9c140d863c82f7b42e9265fe644125d1ec6cf9"
+  integrity sha512-XG/UvwCBvjjftbyemWGFUzvyB+gYHCZVHtbfthZaaPnq50L+wyCnjWpiFF/PFT3I8cMwj3HT6i8G2lQPdxRVfw==
   dependencies:
     bn.js "^5.1.3"
 
-"@joystream/hydra-db-utils@3.1.0-alpha.13", "@joystream/hydra-db-utils@^3.1.0-alpha.13":
-  version "3.1.0-alpha.13"
-  resolved "https://registry.yarnpkg.com/@joystream/hydra-db-utils/-/hydra-db-utils-3.1.0-alpha.13.tgz#b023a081c63009d7d5524b2fcf6256249c2b55d1"
-  integrity sha512-VbDfDpuKt1Fn2644eGmxoRiS7OhRMJLoSEhbSAbtowhfDPAvWXHpYrM9Hbex+hiqVXF+UENry+wvA6d2rYc8bA==
+"@joystream/hydra-db-utils@3.1.0-alpha.16", "@joystream/hydra-db-utils@^3.1.0-alpha.16":
+  version "3.1.0-alpha.16"
+  resolved "https://registry.yarnpkg.com/@joystream/hydra-db-utils/-/hydra-db-utils-3.1.0-alpha.16.tgz#fbed064cc47f232fb38bb48db03b82bdc357b2c9"
+  integrity sha512-4ABM4ZQ3/ZIDHe0g3u/FrPgef8wnH0dpfTnYx4qtepbr2iHHxW6b4k038Ubpuo0mvJRnnabE0+q4mPYsBGuwoQ==
   dependencies:
-    "@joystream/hydra-common" "^3.1.0-alpha.13"
+    "@joystream/hydra-common" "^3.1.0-alpha.16"
     "@types/ioredis" "^4.17.4"
     bn.js "^5.1.3"
     ioredis "^4.17.3"
@@ -3055,13 +3055,13 @@
     shortid "^2.2.16"
     typeorm "^0.2.25"
 
-"@joystream/hydra-processor@3.1.0-alpha.13":
-  version "3.1.0-alpha.13"
-  resolved "https://registry.yarnpkg.com/@joystream/hydra-processor/-/hydra-processor-3.1.0-alpha.13.tgz#c41687a6aa4ebcc163ada4184ed56110988a3e50"
-  integrity sha512-8VJXb0sJbwUapvgNT39UnSvnOaCw+zZw8vS/K3Qcl9vm1MHlS6Jwxkmkkwrn/mrfVNrwPBYCtjYAFLivUzhIsg==
+"@joystream/hydra-processor@3.1.0-alpha.16":
+  version "3.1.0-alpha.16"
+  resolved "https://registry.yarnpkg.com/@joystream/hydra-processor/-/hydra-processor-3.1.0-alpha.16.tgz#c1f5d6e879d1b39c41706b7dc41069a1e1dcd78a"
+  integrity sha512-hUzl2oR2FCvNd1/no+lX6EPdRNCSZChaAQT8lHBb296Ol/fxCmgW3Faudyrv289Zj3yQNP4EaHCrPHBhvpPG4Q==
   dependencies:
-    "@joystream/hydra-common" "^3.1.0-alpha.13"
-    "@joystream/hydra-db-utils" "^3.1.0-alpha.13"
+    "@joystream/hydra-common" "^3.1.0-alpha.16"
+    "@joystream/hydra-db-utils" "^3.1.0-alpha.16"
     "@oclif/command" "^1.8.0"
     "@oclif/config" "^1"
     "@oclif/errors" "^1.3.3"
@@ -3110,7 +3110,7 @@
     lodash "^4.17.15"
     moment "^2.24.0"
 
-"@joystream/warthog@2.39.0", "@joystream/warthog@^2.40.0":
+"@joystream/warthog@2.39.0", "@joystream/warthog@~2.41.2":
   version "2.39.0"
   resolved "https://registry.yarnpkg.com/@joystream/warthog/-/warthog-2.39.0.tgz#3587b94953aed929bff809a7ba763d495e03170c"
   integrity sha512-gwZ8oBqcN7Xez8BfBDeDIyMhZ7VcL2paMuj0n3qOplyH+sxsBwgBemDzV6RThmAGi3GOhVVQJqOMq3w6siWqzA==
@@ -4118,6 +4118,22 @@
     widest-line "^3.1.0"
     wrap-ansi "^4.0.0"
 
+"@oclif/plugin-help@^3.2.3":
+  version "3.2.3"
+  resolved "https://registry.yarnpkg.com/@oclif/plugin-help/-/plugin-help-3.2.3.tgz#cd24010e7eb326782843d3aa6d6b5a4affebb2c3"
+  integrity sha512-l2Pd0lbOMq4u/7xsl9hqISFqyR9gWEz/8+05xmrXFr67jXyS6EUCQB+mFBa0wepltrmJu0sAFg9AvA2mLaMMqQ==
+  dependencies:
+    "@oclif/command" "^1.5.20"
+    "@oclif/config" "^1.15.1"
+    "@oclif/errors" "^1.2.2"
+    chalk "^4.1.0"
+    indent-string "^4.0.0"
+    lodash.template "^4.4.0"
+    string-width "^4.2.0"
+    strip-ansi "^6.0.0"
+    widest-line "^3.1.0"
+    wrap-ansi "^4.0.0"
+
 "@oclif/plugin-not-found@^1.2.4":
   version "1.2.4"
   resolved "https://registry.yarnpkg.com/@oclif/plugin-not-found/-/plugin-not-found-1.2.4.tgz#160108c82f0aa10f4fb52cee4e0135af34b7220b"
@@ -5509,9 +5525,9 @@
     "@types/babel__traverse" "*"
 
 "@types/babel__core@^7.1.0":
-  version "7.1.16"
-  resolved "https://registry.yarnpkg.com/@types/babel__core/-/babel__core-7.1.16.tgz#bc12c74b7d65e82d29876b5d0baf5c625ac58702"
-  integrity sha512-EAEHtisTMM+KaKwfWdC3oyllIqswlznXCIVCt7/oRNrh+DhgT4UEBNC/jlADNjvw7UnfbcdkGQcPVZ1xYiLcrQ==
+  version "7.1.18"
+  resolved "https://registry.yarnpkg.com/@types/babel__core/-/babel__core-7.1.18.tgz#1a29abcc411a9c05e2094c98f9a1b7da6cdf49f8"
+  integrity sha512-S7unDjm/C7z2A2R9NzfKCK1I+BAALDtxEmsJBwlB3EzNfb929ykjL++1CK9LO++EIp2fQrC8O+BwjKvz6UeDyQ==
   dependencies:
     "@babel/parser" "^7.1.0"
     "@babel/types" "^7.0.0"
@@ -6089,6 +6105,11 @@
   dependencies:
     node-cache "*"
 
+"@types/node-cleanup@^2.1.2":
+  version "2.1.2"
+  resolved "https://registry.yarnpkg.com/@types/node-cleanup/-/node-cleanup-2.1.2.tgz#545c6909b864df699d46f53ae8d59cabdcb51665"
+  integrity sha512-HTksao/sZs9nqxKD/vWOR3WxSrQsyJlBPEFFCgq9lMmhRsuQF+2p6hy+7FaCYn6lOeiDc3ywI8jDQ2bz5y6m8w==
+
 "@types/node-emoji@^1.8.1":
   version "1.8.1"
   resolved "https://registry.yarnpkg.com/@types/node-emoji/-/node-emoji-1.8.1.tgz#689cb74fdf6e84309bcafce93a135dfecd01de3f"
@@ -6153,6 +6174,11 @@
   resolved "https://registry.yarnpkg.com/@types/node/-/node-12.20.11.tgz#980832cd56efafff8c18aa148c4085eb02a483f4"
   integrity sha512-gema+apZ6qLQK7k7F0dGkGCWQYsL0qqKORWOQO6tq46q+x+1C0vbOiOqOwRVlh4RAdbQwV/j/ryr3u5NOG1fPQ==
 
+"@types/node@^14":
+  version "14.17.32"
+  resolved "https://registry.yarnpkg.com/@types/node/-/node-14.17.32.tgz#2ca61c9ef8c77f6fa1733be9e623ceb0d372ad96"
+  integrity sha512-JcII3D5/OapPGx+eJ+Ik1SQGyt6WvuqdRfh9jUwL6/iHGjmyOriBDciBUu7lEIBTL2ijxwrR70WUnw5AEDmFvQ==
+
 "@types/node@^9.6.4":
   version "9.6.61"
   resolved "https://registry.yarnpkg.com/@types/node/-/node-9.6.61.tgz#29f124eddd41c4c74281bd0b455d689109fc2a2d"
@@ -6417,6 +6443,13 @@
     "@types/mime" "^1"
     "@types/node" "*"
 
+"@types/sharp@^0.29.2":
+  version "0.29.2"
+  resolved "https://registry.yarnpkg.com/@types/sharp/-/sharp-0.29.2.tgz#b4e932e982e258d1013236c8b4bcc14f9883c9a3"
+  integrity sha512-tIbMvtPa8kMyFMKNhpsPT1HO3CgXLuiCAA8bxHAGAZLyALpYvYc4hUu3pu0+3oExQA5LwvHrWp+OilgXCYVQgg==
+  dependencies:
+    "@types/node" "*"
+
 "@types/shortid@^0.0.29":
   version "0.0.29"
   resolved "https://registry.yarnpkg.com/@types/shortid/-/shortid-0.0.29.tgz#8093ee0416a6e2bf2aa6338109114b3fbffa0e9b"
@@ -8469,9 +8502,9 @@ aws-credstash@^3.0.0:
     debug "^4.3.1"
 
 aws-sdk@^2.567.0:
-  version "2.984.0"
-  resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.984.0.tgz#fee0e73d63826a413cc7053c5daeb518d3261561"
-  integrity sha512-wFwNKhlO03V7UnpIge2qT/gYOMvGUlmVuFgF2gQRIkt6lWYvnf8/QDTCKZLhGBpC8/mml10m0CM3khMNwU1KVQ==
+  version "2.1049.0"
+  resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.1049.0.tgz#8146dcdf3a1ab603e50ff961169ee8abc537d48e"
+  integrity sha512-+wls9iNlotMeoZepwgR0yPzXsjXzr2ijoi5ERmsPWfMTFMHkm6INndBtSkm6fpu/NZnl+7EaPPES2yhaqnhoJg==
   dependencies:
     buffer "4.9.2"
     events "1.1.1"
@@ -8508,6 +8541,13 @@ axios@^0.21.1:
   dependencies:
     follow-redirects "^1.10.0"
 
+axios@^0.24.0:
+  version "0.24.0"
+  resolved "https://registry.yarnpkg.com/axios/-/axios-0.24.0.tgz#804e6fa1e4b9c5288501dd9dff56a7a0940d20d6"
+  integrity sha512-Q6cWsys88HoPgAaFAVUb0WpPk0O8iTeisR9IMqy9G8AbO4NlpVknrnQS03zzF9PGAWgO3cgletO3VjV/P7VztA==
+  dependencies:
+    follow-redirects "^1.14.4"
+
 babel-code-frame@^6.22.0:
   version "6.26.0"
   resolved "https://registry.yarnpkg.com/babel-code-frame/-/babel-code-frame-6.26.0.tgz#63fd43f7dc1e3bb7ce35947db8fe369a3f58c74b"
@@ -10795,7 +10835,7 @@ color-name@^1.0.0, color-name@~1.1.4:
   resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2"
   integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==
 
-color-string@^1.5.2:
+color-string@^1.5.2, color-string@^1.6.0:
   version "1.6.0"
   resolved "https://registry.yarnpkg.com/color-string/-/color-string-1.6.0.tgz#c3915f61fe267672cb7e1e064c9d692219f6c312"
   integrity sha512-c/hGS+kRWJutUBEngKKmk4iH3sD59MBkoxVapS/0wgpCz2u7XsNloxknyvBhzwEs1IbV36D9PwqLPJ2DTu3vMA==
@@ -10827,6 +10867,14 @@ color@^3.0.0, color@^3.1.2:
     color-convert "^1.9.1"
     color-string "^1.5.4"
 
+color@^4.0.1:
+  version "4.0.1"
+  resolved "https://registry.yarnpkg.com/color/-/color-4.0.1.tgz#21df44cd10245a91b1ccf5ba031609b0e10e7d67"
+  integrity sha512-rpZjOKN5O7naJxkH2Rx1sZzzBgaiWECc6BYXjeCE6kF0kcASJYbUq02u7JqIHwCb/j3NhV+QhRL2683aICeGZA==
+  dependencies:
+    color-convert "^2.0.1"
+    color-string "^1.6.0"
+
 colorette@^1.2.1, colorette@^1.2.2:
   version "1.2.2"
   resolved "https://registry.yarnpkg.com/colorette/-/colorette-1.2.2.tgz#cbcc79d5e99caea2dbf10eb3a26fd8b3e6acfa94"
@@ -14723,6 +14771,11 @@ follow-redirects@^1.0.0, follow-redirects@^1.10.0:
   resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.14.0.tgz#f5d260f95c5f8c105894491feee5dc8993b402fe"
   integrity sha512-0vRwd7RKQBTt+mgu87mtYeofLFZpTas2S9zY+jIeuLJMNvudIgF52nr19q40HOwH5RrhWIPuj9puybzSJiRrVg==
 
+follow-redirects@^1.14.4:
+  version "1.14.4"
+  resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.14.4.tgz#838fdf48a8bbdd79e52ee51fb1c94e3ed98b9379"
+  integrity sha512-zwGkiSXC1MUJG/qmeIFH2HBJx9u0V46QGUe3YR1fXG8bXQxq7fLj0RjLZQ5nubr9qNJUZrH+xUcwXEoXNpfS+g==
+
 for-each@^0.3.3:
   version "0.3.3"
   resolved "https://registry.yarnpkg.com/for-each/-/for-each-0.3.3.tgz#69b447e88a0a5d32c3e7084f3f1710034b21376e"
@@ -17493,6 +17546,13 @@ is-core-module@^2.2.0:
   dependencies:
     has "^1.0.3"
 
+is-core-module@^2.8.0:
+  version "2.8.0"
+  resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.8.0.tgz#0321336c3d0925e497fd97f5d95cb114a5ccd548"
+  integrity sha512-vd15qHsaqrRL7dtH6QNuy0ndJmRDrS9HAM1CAiSifNUFv4x1a0CCVsj18hJ1mShxIG6T2i1sO78MkP56r0nYRw==
+  dependencies:
+    has "^1.0.3"
+
 is-data-descriptor@^0.1.4:
   version "0.1.4"
   resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56"
@@ -21455,7 +21515,7 @@ module-lookup-amd@^6.1.0:
     requirejs "^2.3.5"
     requirejs-config-file "^3.1.1"
 
-moment@^2.10.2, moment@^2.11.2, moment@^2.22.1, moment@^2.24.0:
+moment@^2.10.2, moment@^2.11.2, moment@^2.22.1, moment@^2.24.0, moment@^2.29.1:
   version "2.29.1"
   resolved "https://registry.yarnpkg.com/moment/-/moment-2.29.1.tgz#b2be769fa31940be9eeea6469c075e35006fa3d3"
   integrity sha512-kHmoybcPV8Sqy59DwNDY3Jefr64lK/by/da0ViFcuA4DH0vQg5Q6Ze5VimxkfQNSC+Mls/Kx53s7TjP1RhFEDQ==
@@ -21911,6 +21971,11 @@ node-addon-api@^3.0.2:
   resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-3.1.0.tgz#98b21931557466c6729e51cb77cd39c965f42239"
   integrity "sha1-mLIZMVV0ZsZynlHLd805yWX0Ijk= sha512-flmrDNB06LIl5lywUz7YlNGZH/5p0M7W28k8hzd9Lshtdh1wshD2Y+U4h9LD6KObOy1f+fEVdgprPrEymjM5uw=="
 
+node-addon-api@^4.2.0:
+  version "4.2.0"
+  resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-4.2.0.tgz#117cbb5a959dff0992e1c586ae0393573e4d2a87"
+  integrity sha512-eazsqzwG2lskuzBqCGPi7Ac2UgOoMz8JVOXVhTvvPDYhthvNpefx8jWD8Np7Gv+2Sz0FlPWZk0nJV0z598Wn8Q==
+
 node-cache@*, node-cache@^5.1.2:
   version "5.1.2"
   resolved "https://registry.yarnpkg.com/node-cache/-/node-cache-5.1.2.tgz#f264dc2ccad0a780e76253a694e9fd0ed19c398d"
@@ -21918,6 +21983,11 @@ node-cache@*, node-cache@^5.1.2:
   dependencies:
     clone "2.x"
 
+node-cleanup@^2.1.2:
+  version "2.1.2"
+  resolved "https://registry.yarnpkg.com/node-cleanup/-/node-cleanup-2.1.2.tgz#7ac19abd297e09a7f72a71545d951b517e4dde2c"
+  integrity sha1-esGavSl+Caf3KnFUXZUbUX5N3iw=
+
 node-dir@^0.1.10:
   version "0.1.17"
   resolved "https://registry.yarnpkg.com/node-dir/-/node-dir-0.1.17.tgz#5f5665d93351335caabef8f1c554516cf5f1e4e5"
@@ -23320,6 +23390,11 @@ path-parse@^1.0.6:
   resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.6.tgz#d62dbb5679405d72c4737ec58600e9ddcf06d24c"
   integrity sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==
 
+path-parse@^1.0.7:
+  version "1.0.7"
+  resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735"
+  integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==
+
 path-root-regex@^0.1.0:
   version "0.1.2"
   resolved "https://registry.yarnpkg.com/path-root-regex/-/path-root-regex-0.1.2.tgz#bfccdc8df5b12dc52c8b43ec38d18d72c04ba96d"
@@ -24154,6 +24229,25 @@ prebuild-install@^6.0.0:
     tar-fs "^2.0.0"
     tunnel-agent "^0.6.0"
 
+prebuild-install@^6.1.4:
+  version "6.1.4"
+  resolved "https://registry.yarnpkg.com/prebuild-install/-/prebuild-install-6.1.4.tgz#ae3c0142ad611d58570b89af4986088a4937e00f"
+  integrity sha512-Z4vpywnK1lBg+zdPCVCsKq0xO66eEV9rWo2zrROGGiRS4JtueBOdlB1FnY8lcy7JsUud/Q3ijUxyWN26Ika0vQ==
+  dependencies:
+    detect-libc "^1.0.3"
+    expand-template "^2.0.3"
+    github-from-package "0.0.0"
+    minimist "^1.2.3"
+    mkdirp-classic "^0.5.3"
+    napi-build-utils "^1.0.1"
+    node-abi "^2.21.0"
+    npmlog "^4.0.1"
+    pump "^3.0.0"
+    rc "^1.2.7"
+    simple-get "^3.0.3"
+    tar-fs "^2.0.0"
+    tunnel-agent "^0.6.0"
+
 precinct@^6.3.1:
   version "6.3.1"
   resolved "https://registry.yarnpkg.com/precinct/-/precinct-6.3.1.tgz#8ad735a8afdfc48b56ed39c9ad3bf999b6b928dc"
@@ -26337,7 +26431,16 @@ resolve@1.1.7:
   resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.1.7.tgz#203114d82ad2c5ed9e8e0411b3932875e889e97b"
   integrity sha1-IDEU2CrSxe2ejgQRs5ModeiJ6Xs=
 
-resolve@1.x, resolve@^1.0.0, resolve@^1.1.6, resolve@^1.1.7, resolve@^1.10.0, resolve@^1.10.1, resolve@^1.11.0, resolve@^1.11.1, resolve@^1.12.0, resolve@^1.13.1, resolve@^1.14.2, resolve@^1.17.0, resolve@^1.18.1, resolve@^1.2.0, resolve@^1.20.0, resolve@^1.8.1:
+resolve@1.x, resolve@^1.0.0:
+  version "1.21.0"
+  resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.21.0.tgz#b51adc97f3472e6a5cf4444d34bc9d6b9037591f"
+  integrity sha512-3wCbTpk5WJlyE4mSOtDLhqQmGFi0/TD9VPwmiolnk8U0wRgMEktqCXd3vy5buTO3tljvalNvKrjHEfrd2WpEKA==
+  dependencies:
+    is-core-module "^2.8.0"
+    path-parse "^1.0.7"
+    supports-preserve-symlinks-flag "^1.0.0"
+
+resolve@^1.1.6, resolve@^1.1.7, resolve@^1.10.0, resolve@^1.10.1, resolve@^1.11.0, resolve@^1.11.1, resolve@^1.12.0, resolve@^1.13.1, resolve@^1.14.2, resolve@^1.17.0, resolve@^1.18.1, resolve@^1.2.0, resolve@^1.20.0, resolve@^1.8.1:
   version "1.20.0"
   resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.20.0.tgz#629a013fb3f70755d6f0b7935cc1c2c5378b1975"
   integrity sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A==
@@ -26930,6 +27033,20 @@ shallowequal@^1.0.1, shallowequal@^1.1.0:
   resolved "https://registry.yarnpkg.com/shallowequal/-/shallowequal-1.1.0.tgz#188d521de95b9087404fd4dcb68b13df0ae4e7f8"
   integrity sha512-y0m1JoUZSlPAjXVtPPW70aZWfIL/dSP7AFkRnniLCrK/8MDKog3TySTBmckD+RObVxH0v4Tox67+F14PdED2oQ==
 
+sharp@^0.29.2:
+  version "0.29.2"
+  resolved "https://registry.yarnpkg.com/sharp/-/sharp-0.29.2.tgz#e8c003cd9cb321585b32dbda6eed3baa7d6f2308"
+  integrity sha512-XWRdiYLIJ3tDUejRyG24KERnJzMfIoyiJBntd2S6/uj3NEeNgRFRLgiBlvPxMa8aml14dKKD98yHinSNKp1xzQ==
+  dependencies:
+    color "^4.0.1"
+    detect-libc "^1.0.3"
+    node-addon-api "^4.2.0"
+    prebuild-install "^6.1.4"
+    semver "^7.3.5"
+    simple-get "^3.1.0"
+    tar-fs "^2.1.1"
+    tunnel-agent "^0.6.0"
+
 shebang-command@^1.2.0:
   version "1.2.0"
   resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea"
@@ -27030,7 +27147,7 @@ simple-get@^2.7.0:
     once "^1.3.1"
     simple-concat "^1.0.0"
 
-simple-get@^3.0.3:
+simple-get@^3.0.3, simple-get@^3.1.0:
   version "3.1.0"
   resolved "https://registry.yarnpkg.com/simple-get/-/simple-get-3.1.0.tgz#b45be062435e50d159540b576202ceec40b9c6b3"
   integrity "sha1-tFvgYkNeUNFZVAtXYgLO7EC5xrM= sha512-bCR6cP+aTdScaQCnQKbPKtJOKDp/hj9EDLJo3Nw4y1QksqaovlW/bnptB6/c1e+qmNIDHRK+oXFDdEqBT8WzUA=="
@@ -27305,18 +27422,18 @@ source-map-resolve@^0.5.0, source-map-resolve@^0.5.2:
     source-map-url "^0.4.0"
     urix "^0.1.0"
 
-source-map-support@^0.5.12, source-map-support@^0.5.16, source-map-support@^0.5.6, source-map-support@~0.5.12:
-  version "0.5.19"
-  resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.19.tgz#a98b62f86dcaf4f67399648c085291ab9e8fed61"
-  integrity sha512-Wonm7zOCIJzBGQdB+thsPar0kYuCIzYvxZwlBa87yi/Mdjv7Tip2cyVbLj5o0cFPN4EVkuTwb3GDDyUx2DGnGw==
+source-map-support@^0.5.12, source-map-support@^0.5.17:
+  version "0.5.21"
+  resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f"
+  integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==
   dependencies:
     buffer-from "^1.0.0"
     source-map "^0.6.0"
 
-source-map-support@^0.5.17:
-  version "0.5.21"
-  resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f"
-  integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==
+source-map-support@^0.5.16, source-map-support@^0.5.6, source-map-support@~0.5.12:
+  version "0.5.19"
+  resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.19.tgz#a98b62f86dcaf4f67399648c085291ab9e8fed61"
+  integrity sha512-Wonm7zOCIJzBGQdB+thsPar0kYuCIzYvxZwlBa87yi/Mdjv7Tip2cyVbLj5o0cFPN4EVkuTwb3GDDyUx2DGnGw==
   dependencies:
     buffer-from "^1.0.0"
     source-map "^0.6.0"
@@ -28251,6 +28368,11 @@ supports-hyperlinks@^2.0.0, supports-hyperlinks@^2.1.0:
     has-flag "^4.0.0"
     supports-color "^7.0.0"
 
+supports-preserve-symlinks-flag@^1.0.0:
+  version "1.0.0"
+  resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09"
+  integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==
+
 svg-parser@^2.0.0:
   version "2.0.4"
   resolved "https://registry.yarnpkg.com/svg-parser/-/svg-parser-2.0.4.tgz#fdc2e29e13951736140b76cb122c8ee6630eb6b5"
@@ -28380,7 +28502,7 @@ tar-fs@^1.13.0:
     pump "^1.0.0"
     tar-stream "^1.1.2"
 
-tar-fs@^2.0.0:
+tar-fs@^2.0.0, tar-fs@^2.1.1:
   version "2.1.1"
   resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-2.1.1.tgz#489a15ab85f1f0befabb370b7de4f9eb5cbe8784"
   integrity sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng==
@@ -29130,7 +29252,7 @@ ts-node@^7.0.1:
     source-map-support "^0.5.6"
     yn "^2.0.0"
 
-ts-node@^8.10:
+ts-node@^8, ts-node@^8.10:
   version "8.10.2"
   resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-8.10.2.tgz#eee03764633b1234ddd37f8db9ec10b75ec7fb8d"
   integrity sha512-ISJJGgkIpDdBhWVu3jufsWpK3Rzo7bdiIXJjQc0ynKxVOVcg2oIrf2H2cejminGrptVc6q6/uynAHNCuWGbpVA==
@@ -29460,7 +29582,7 @@ typescript-tuple@^2.2.1:
   dependencies:
     typescript-compare "^0.0.2"
 
-typescript@2.2.2, typescript@^3.0.3, typescript@^3.8.3, typescript@^3.9.5, typescript@^3.9.7, typescript@^4.0.3, typescript@^4.4.3:
+typescript@2.2.2, typescript@^3.0.3, typescript@^3.3, typescript@^3.8.3, typescript@^3.9.5, typescript@^3.9.7, typescript@^4.0.3, typescript@^4.4, typescript@^4.4.3:
   version "4.4.3"
   resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.4.3.tgz#bdc5407caa2b109efd4f82fe130656f977a29324"
   integrity sha512-4xfscpisVgqqDfPaJo5vkd+Qd/ItkoagnHpufr+i2QCHBsNYp+G7UAoyFl8aPtx879u38wPV65rZ8qbGZijalA==

Some files were not shown because too many files changed in this diff