Browse Source

Merge branch 'giza' into distribution_bucket_content_fix

Ignazio Bovo 3 years ago
parent
commit
00902348af
100 changed files with 6859 additions and 1701 deletions
  1. 0 3
      .env
  2. 0 0
      chain-metadata.json
  3. 267 144
      cli/README.md
  4. 32 0
      cli/codegen.yml
  5. 1 1
      cli/examples/content/CreateVideo.json
  6. 17 5
      cli/package.json
  7. 62 0
      cli/scripts/content-test.sh
  8. 128 156
      cli/src/Api.ts
  9. 2 1
      cli/src/ExitCodes.ts
  10. 125 0
      cli/src/QueryNodeApi.ts
  11. 89 73
      cli/src/Types.ts
  12. 241 132
      cli/src/base/AccountsCommandBase.ts
  13. 193 69
      cli/src/base/ApiCommandBase.ts
  14. 117 52
      cli/src/base/ContentDirectoryCommandBase.ts
  15. 16 0
      cli/src/base/DefaultCommandBase.ts
  16. 5 5
      cli/src/base/StateAwareCommandBase.ts
  17. 201 80
      cli/src/base/UploadCommandBase.ts
  18. 19 53
      cli/src/base/WorkingGroupsCommandBase.ts
  19. 0 48
      cli/src/commands/account/choose.ts
  20. 16 39
      cli/src/commands/account/create.ts
  21. 0 40
      cli/src/commands/account/current.ts
  22. 34 31
      cli/src/commands/account/export.ts
  23. 4 10
      cli/src/commands/account/forget.ts
  24. 56 33
      cli/src/commands/account/import.ts
  25. 56 0
      cli/src/commands/account/info.ts
  26. 26 0
      cli/src/commands/account/list.ts
  27. 34 51
      cli/src/commands/account/transferTokens.ts
  28. 11 0
      cli/src/commands/api/getQueryNodeEndpoint.ts
  29. 9 10
      cli/src/commands/api/inspect.ts
  30. 36 0
      cli/src/commands/api/setQueryNodeEndpoint.ts
  31. 3 6
      cli/src/commands/api/setUri.ts
  32. 6 5
      cli/src/commands/content/addCuratorToGroup.ts
  33. 7 13
      cli/src/commands/content/channel.ts
  34. 4 3
      cli/src/commands/content/channels.ts
  35. 49 32
      cli/src/commands/content/createChannel.ts
  36. 13 14
      cli/src/commands/content/createChannelCategory.ts
  37. 3 5
      cli/src/commands/content/createCuratorGroup.ts
  38. 56 47
      cli/src/commands/content/createVideo.ts
  39. 13 14
      cli/src/commands/content/createVideoCategory.ts
  40. 2 2
      cli/src/commands/content/curatorGroup.ts
  41. 1 1
      cli/src/commands/content/curatorGroups.ts
  42. 101 0
      cli/src/commands/content/deleteChannel.ts
  43. 6 6
      cli/src/commands/content/deleteChannelCategory.ts
  44. 80 0
      cli/src/commands/content/deleteVideo.ts
  45. 6 6
      cli/src/commands/content/deleteVideoCategory.ts
  46. 40 0
      cli/src/commands/content/removeChannelAssets.ts
  47. 7 6
      cli/src/commands/content/removeCuratorFromGroup.ts
  48. 16 9
      cli/src/commands/content/reuploadAssets.ts
  49. 6 5
      cli/src/commands/content/setCuratorGroupStatus.ts
  50. 3 6
      cli/src/commands/content/setFeaturedVideos.ts
  51. 90 24
      cli/src/commands/content/updateChannel.ts
  52. 8 11
      cli/src/commands/content/updateChannelCategory.ts
  53. 6 8
      cli/src/commands/content/updateChannelCensorshipStatus.ts
  54. 77 25
      cli/src/commands/content/updateVideo.ts
  55. 8 11
      cli/src/commands/content/updateVideoCategory.ts
  56. 6 8
      cli/src/commands/content/updateVideoCensorshipStatus.ts
  57. 2 2
      cli/src/commands/content/video.ts
  58. 4 6
      cli/src/commands/content/videos.ts
  59. 1 1
      cli/src/commands/council/info.ts
  60. 24 20
      cli/src/commands/working-groups/createOpening.ts
  61. 8 6
      cli/src/commands/working-groups/decreaseWorkerStake.ts
  62. 10 12
      cli/src/commands/working-groups/evictWorker.ts
  63. 9 11
      cli/src/commands/working-groups/fillOpening.ts
  64. 8 6
      cli/src/commands/working-groups/increaseStake.ts
  65. 13 7
      cli/src/commands/working-groups/leaveRole.ts
  66. 8 6
      cli/src/commands/working-groups/slashWorker.ts
  67. 8 6
      cli/src/commands/working-groups/startAcceptingApplications.ts
  68. 8 6
      cli/src/commands/working-groups/startReviewPeriod.ts
  69. 8 6
      cli/src/commands/working-groups/terminateApplication.ts
  70. 16 18
      cli/src/commands/working-groups/updateRewardAccount.ts
  71. 17 34
      cli/src/commands/working-groups/updateRoleAccount.ts
  72. 8 11
      cli/src/commands/working-groups/updateRoleStorage.ts
  73. 12 13
      cli/src/commands/working-groups/updateWorkerReward.ts
  74. 120 0
      cli/src/graphql/generated/queries.ts
  75. 3714 0
      cli/src/graphql/generated/schema.ts
  76. 65 0
      cli/src/graphql/queries/storage.graphql
  77. 2 4
      cli/src/helpers/JsonSchemaPrompt.ts
  78. 5 5
      cli/src/helpers/display.ts
  79. 13 89
      cli/src/helpers/serialization.ts
  80. 9 2
      cli/src/helpers/validation.ts
  81. 0 22
      cli/src/json-schemas/Assets.schema.json
  82. 8 1
      cli/src/schemas/ContentDirectory.ts
  83. 34 0
      cli/src/schemas/json/Assets.schema.json
  84. 0 0
      cli/src/schemas/json/WorkingGroupOpening.schema.json
  85. 30 0
      cli/src/schemas/typings/Assets.schema.d.ts
  86. 0 0
      cli/src/schemas/typings/WorkingGroupOpening.schema.d.ts
  87. 2 1
      cli/tsconfig.json
  88. 25 6
      colossus.Dockerfile
  89. 15 25
      docker-compose.yml
  90. 1 1
      package.json
  91. 10 6
      query-node/mappings/content/channel.ts
  92. 41 25
      query-node/mappings/storage/index.ts
  93. 28 3
      query-node/mappings/storage/utils.ts
  94. 5 3
      query-node/schemas/content.graphql
  95. 4 0
      query-node/schemas/membership.graphql
  96. 5 1
      query-node/schemas/storage.graphql
  97. 79 32
      runtime-modules/storage/src/lib.rs
  98. 18 1
      runtime-modules/storage/src/tests/fixtures.rs
  99. 47 0
      runtime-modules/storage/src/tests/mod.rs
  100. 1 0
      storage-node-v2/.eslintignore

+ 0 - 3
.env

@@ -14,9 +14,6 @@ DB_PORT=5432
 DEBUG=index-builder:*
 TYPEORM_LOGGING=error
 
-DEBUG=index-builder:*
-TYPEORM_LOGGING=error
-
 ###########################
 #    Indexer options      #
 ###########################

File diff suppressed because it is too large
+ 0 - 0
chain-metadata.json


+ 267 - 144
cli/README.md

@@ -9,35 +9,13 @@ Command Line Interface for Joystream community and governance activities
 [![License](https://img.shields.io/npm/l/@joystream/cli.svg)](https://github.com/Joystream/joystream/blob/master/cli/package.json)
 
 <!-- toc -->
-* [Development](#development)
 * [Usage](#usage)
+* [Development](#development)
 * [First steps](#first-steps)
+* [Useful environment settings](#useful-environment-settings)
 * [Commands](#commands)
-* [Environment variables](#environment-variables)
 <!-- tocstop -->
 
-# Development
-<!-- development -->
-To run a command in developemnt environment (without installing the package):
-
-1. Navigate into the CLI root directory
-1. Execute any command like this:
-
-    ```
-        $ ./bin/run COMMAND
-    ```
-
-Alternatively:
-
-1. Navigate into the CLI root directory
-1. Execute `yarn link` (if that doesn't work, consider `sudo yarn link`)
-1. Execute command from any location like this:
-
-    ```
-        $ joystream-cli COMMAND
-    ```
-<!-- developmentstop -->
-
 # Usage
 <!-- usage -->
 ```sh-session
@@ -45,7 +23,7 @@ $ npm install -g @joystream/cli
 $ joystream-cli COMMAND
 running command...
 $ joystream-cli (-v|--version|version)
-@joystream/cli/0.5.1 linux-x64 node-v14.16.1
+@joystream/cli/0.6.0 linux-x64 node-v14.18.0
 $ joystream-cli --help [COMMAND]
 USAGE
   $ joystream-cli COMMAND
@@ -53,28 +31,56 @@ USAGE
 ```
 <!-- usagestop -->
 
+# Development
+<!-- development -->
+To run a command in developemnt environment (from the root of [Joystream monorepo](https://github.com/Joystream/joystream), without installing the package):
+
+```shell
+  $ yarn && yarn workspace @joystream/types build && yarn workspace @joystream/metadata-protobuf build
+  $ ./cli/bin/run COMMAND # OR:
+  $ yarn joystream-cli COMMAND
+```
+
+Alternatively:
+
+```shell
+  $ yarn workspace @joystream/cli link
+  $ joystream-cli COMMAND
+```
+<!-- developmentstop -->
+
+
 # First steps
 <!-- first-steps -->
 When using the CLI for the first time there are a few common steps you might want to take in order to configure the CLI:
 
-1. Set the correct node endpoint. You can do this by executing `api:setUri` or any command that requires an api connection. To verify the current endpoint you can execute `api:getUri`.
-1. In order to use the accounts/keys that you may already have access to within Pioneer, you need to dowload the backup json file(s) ([https://testnet.joystream.org/#/accounts](https://testnet.joystream.org/#/accounts)) and import them into the CLI by executing `account:import /path/to/backup.json`.
-1. By executing `account:choose` you can choose one of the imported accounts, that will then serve as context for the next commands (you can check currently selected account using `account:info`). If you just want to use the development _Alice_ or _Bob_ account, you can access them without importing by providing an additional flag: `account:choose --showSpecial`.
-1. The context should now be fully set up! Feel free to use the `--help` flag to investigate the available commands or take a look at the sections below.
-1. You may also find it useful to get the first part of the command (before the colon) autocompleted when you press `[Tab]` while typing the name in the console. Executing `autocomplete` command will provide the instructions on how to set this up (see documentation below).
+1. Set the correct Joystream node websocket endpoint. You can do this by executing [`api:setUri`](#joystream-cli-apiseturi-uri) and choosing one of the suggested endpoints of providing your own url. To verify the currently used Joystream node websocket endpoint you can execute [`api:getUri`](#joystream-cli-apigeturi).
+2. Set the Joystream query node endpoint. This is optional, but some commands (for example: [`content:createChannel`](#joystream-cli-contentcreatechannel)) will require a connection to the query node in order to fetch the data they need complete the requested operations (ie. [`content:createChannel`](#joystream-cli-contentcreatechannel) will need to fetch the available storage node endnpoints in order to upload the channel assets). In order to do that, execute [`api:setQueryNodeEndpoint`](#joystream-cli-apisetquerynodeendpoint-endpoint) and choose one of the suggested endpoints or provide your own url. You can use [`api:getQueryNodeEndpoint`](#joystream-cli-apigetquerynodeendpoint) any time to verify the currently set endpoint.
+3. In order to use your existing keys within the CLI, you can import them using [`account:import`](#joystream-cli-accountimport) command. You can provide json backup files exported from Pioneer or Polkadot{.js} extension as an input. You can also use raw mnemonic or seed phrases. See the [`account:import` command documentation](#joystream-cli-accountimport) for the full list of supported inputs.
+  The key to sign the transaction(s) with will be determined based on the required permissions, depending on the command you execute. For example, if you execute [`working-groups:updateRewardAccount --group storageProviders`](#joystream-cli-working-groupsupdaterewardaccount-address), the CLI will look for a storage provider role key among your available keys. If multiple execution contexts are available, the CLI will prompt you to choose the desired one.
+4. **Optionally:** You may also find it useful to get the first part of the command (before the colon) autocompleted when you press `[Tab]` while typing the command name in the console. Executing [`autocomplete`](#joystream-cli-autocomplete-shell) command will provide you the instructions on how to set this up.
+5. That's it! The CLI is now be fully set up! Feel free to use the `--help` flag to investigate the available commands or take a look at the commands documentation below.
 <!-- first-steps -->
 
+# Useful environment settings
+<!-- env -->
+- `FORCE_COLOR=0` - disables output coloring. This will make the output easier to parse in case it's redirected to a file or used within a script.
+- `AUTO_CONFIRM=true` - this will make the CLI skip asking for any confirmations (can be useful when creating bash scripts).
+<!-- envstop -->
+
 # Commands
 <!-- commands -->
-* [`joystream-cli account:choose`](#joystream-cli-accountchoose)
-* [`joystream-cli account:create NAME`](#joystream-cli-accountcreate-name)
-* [`joystream-cli account:current`](#joystream-cli-accountcurrent)
-* [`joystream-cli account:export PATH`](#joystream-cli-accountexport-path)
+* [`joystream-cli account:create`](#joystream-cli-accountcreate)
+* [`joystream-cli account:export DESTPATH`](#joystream-cli-accountexport-destpath)
 * [`joystream-cli account:forget`](#joystream-cli-accountforget)
-* [`joystream-cli account:import BACKUPFILEPATH`](#joystream-cli-accountimport-backupfilepath)
-* [`joystream-cli account:transferTokens RECIPIENT AMOUNT`](#joystream-cli-accounttransfertokens-recipient-amount)
+* [`joystream-cli account:import`](#joystream-cli-accountimport)
+* [`joystream-cli account:info [ADDRESS]`](#joystream-cli-accountinfo-address)
+* [`joystream-cli account:list`](#joystream-cli-accountlist)
+* [`joystream-cli account:transferTokens`](#joystream-cli-accounttransfertokens)
+* [`joystream-cli api:getQueryNodeEndpoint`](#joystream-cli-apigetquerynodeendpoint)
 * [`joystream-cli api:getUri`](#joystream-cli-apigeturi)
 * [`joystream-cli api:inspect`](#joystream-cli-apiinspect)
+* [`joystream-cli api:setQueryNodeEndpoint [ENDPOINT]`](#joystream-cli-apisetquerynodeendpoint-endpoint)
 * [`joystream-cli api:setUri [URI]`](#joystream-cli-apiseturi-uri)
 * [`joystream-cli autocomplete [SHELL]`](#joystream-cli-autocomplete-shell)
 * [`joystream-cli content:addCuratorToGroup [GROUPID] [CURATORID]`](#joystream-cli-contentaddcuratortogroup-groupid-curatorid)
@@ -87,8 +93,11 @@ When using the CLI for the first time there are a few common steps you might wan
 * [`joystream-cli content:createVideoCategory`](#joystream-cli-contentcreatevideocategory)
 * [`joystream-cli content:curatorGroup ID`](#joystream-cli-contentcuratorgroup-id)
 * [`joystream-cli content:curatorGroups`](#joystream-cli-contentcuratorgroups)
+* [`joystream-cli content:deleteChannel`](#joystream-cli-contentdeletechannel)
 * [`joystream-cli content:deleteChannelCategory CHANNELCATEGORYID`](#joystream-cli-contentdeletechannelcategory-channelcategoryid)
+* [`joystream-cli content:deleteVideo`](#joystream-cli-contentdeletevideo)
 * [`joystream-cli content:deleteVideoCategory VIDEOCATEGORYID`](#joystream-cli-contentdeletevideocategory-videocategoryid)
+* [`joystream-cli content:removeChannelAssets`](#joystream-cli-contentremovechannelassets)
 * [`joystream-cli content:removeCuratorFromGroup [GROUPID] [CURATORID]`](#joystream-cli-contentremovecuratorfromgroup-groupid-curatorid)
 * [`joystream-cli content:reuploadAssets`](#joystream-cli-contentreuploadassets)
 * [`joystream-cli content:setCuratorGroupStatus [ID] [STATUS]`](#joystream-cli-contentsetcuratorgroupstatus-id-status)
@@ -118,111 +127,129 @@ When using the CLI for the first time there are a few common steps you might wan
 * [`joystream-cli working-groups:startAcceptingApplications WGOPENINGID`](#joystream-cli-working-groupsstartacceptingapplications-wgopeningid)
 * [`joystream-cli working-groups:startReviewPeriod WGOPENINGID`](#joystream-cli-working-groupsstartreviewperiod-wgopeningid)
 * [`joystream-cli working-groups:terminateApplication WGAPPLICATIONID`](#joystream-cli-working-groupsterminateapplication-wgapplicationid)
-* [`joystream-cli working-groups:updateRewardAccount [ACCOUNTADDRESS]`](#joystream-cli-working-groupsupdaterewardaccount-accountaddress)
-* [`joystream-cli working-groups:updateRoleAccount [ACCOUNTADDRESS]`](#joystream-cli-working-groupsupdateroleaccount-accountaddress)
+* [`joystream-cli working-groups:updateRewardAccount [ADDRESS]`](#joystream-cli-working-groupsupdaterewardaccount-address)
+* [`joystream-cli working-groups:updateRoleAccount [ADDRESS]`](#joystream-cli-working-groupsupdateroleaccount-address)
 * [`joystream-cli working-groups:updateRoleStorage STORAGE`](#joystream-cli-working-groupsupdaterolestorage-storage)
 * [`joystream-cli working-groups:updateWorkerReward WORKERID`](#joystream-cli-working-groupsupdateworkerreward-workerid)
 
-## `joystream-cli account:choose`
+## `joystream-cli account:create`
 
-Choose default account to use in the CLI
+Create a new account
 
 ```
 USAGE
-  $ joystream-cli account:choose
+  $ joystream-cli account:create
 
 OPTIONS
-  -S, --showSpecial      Whether to show special (DEV chain) accounts
-  -a, --address=address  Select account by address (if available)
+  --name=name               Account name
+  --type=(sr25519|ed25519)  Account type (defaults to sr25519)
 ```
 
-_See code: [src/commands/account/choose.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/account/choose.ts)_
+_See code: [src/commands/account/create.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/account/create.ts)_
 
-## `joystream-cli account:create NAME`
+## `joystream-cli account:export DESTPATH`
 
-Create new account
+Export account(s) to given location
 
 ```
 USAGE
-  $ joystream-cli account:create NAME
+  $ joystream-cli account:export DESTPATH
 
 ARGUMENTS
-  NAME  Account name
+  DESTPATH  Path where the exported files should be placed
+
+OPTIONS
+  -a, --all        If provided, exports all existing accounts into "exported_accounts" folder inside given path
+  -n, --name=name  Name of the account to export
 ```
 
-_See code: [src/commands/account/create.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/account/create.ts)_
+_See code: [src/commands/account/export.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/account/export.ts)_
 
-## `joystream-cli account:current`
+## `joystream-cli account:forget`
 
-Display information about currently choosen default account
+Forget (remove) account from the list of available accounts
 
 ```
 USAGE
-  $ joystream-cli account:current
+  $ joystream-cli account:forget
+```
+
+_See code: [src/commands/account/forget.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/account/forget.ts)_
+
+## `joystream-cli account:import`
+
+Import account using mnemonic phrase, seed, suri or json backup file
 
-ALIASES
-  $ joystream-cli account:info
-  $ joystream-cli account:default
 ```
+USAGE
+  $ joystream-cli account:import
 
-_See code: [src/commands/account/current.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/account/current.ts)_
+OPTIONS
+  --backupFilePath=backupFilePath  Path to account backup JSON file
+  --mnemonic=mnemonic              Mnemonic phrase
+  --name=name                      Account name
+  --password=password              Account password
+  --seed=seed                      Secret seed
+  --suri=suri                      Substrate uri
+  --type=(sr25519|ed25519)         Account type (defaults to sr25519)
+```
 
-## `joystream-cli account:export PATH`
+_See code: [src/commands/account/import.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/account/import.ts)_
 
-Export account(s) to given location
+## `joystream-cli account:info [ADDRESS]`
+
+Display detailed information about specified account
 
 ```
 USAGE
-  $ joystream-cli account:export PATH
+  $ joystream-cli account:info [ADDRESS]
 
 ARGUMENTS
-  PATH  Path where the exported files should be placed
+  ADDRESS  An address to inspect (can also be provided interavtively)
 
-OPTIONS
-  -a, --all  If provided, exports all existing accounts into "exported_accounts" folder inside given path
+ALIASES
+  $ joystream-cli account:inspect
 ```
 
-_See code: [src/commands/account/export.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/account/export.ts)_
+_See code: [src/commands/account/info.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/account/info.ts)_
 
-## `joystream-cli account:forget`
+## `joystream-cli account:list`
 
-Forget (remove) account from the list of available accounts
+List all available accounts
 
 ```
 USAGE
-  $ joystream-cli account:forget
+  $ joystream-cli account:list
 ```
 
-_See code: [src/commands/account/forget.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/account/forget.ts)_
+_See code: [src/commands/account/list.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/account/list.ts)_
 
-## `joystream-cli account:import BACKUPFILEPATH`
+## `joystream-cli account:transferTokens`
 
-Import account using JSON backup file
+Transfer tokens from any of the available accounts
 
 ```
 USAGE
-  $ joystream-cli account:import BACKUPFILEPATH
+  $ joystream-cli account:transferTokens
 
-ARGUMENTS
-  BACKUPFILEPATH  Path to account backup JSON file
+OPTIONS
+  --amount=amount  (required) Amount of tokens to transfer
+  --from=from      Address of the sender (can also be provided interactively)
+  --to=to          Address of the recipient (can also be provided interactively)
 ```
 
-_See code: [src/commands/account/import.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/account/import.ts)_
+_See code: [src/commands/account/transferTokens.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/account/transferTokens.ts)_
 
-## `joystream-cli account:transferTokens RECIPIENT AMOUNT`
+## `joystream-cli api:getQueryNodeEndpoint`
 
-Transfer tokens from currently choosen account
+Get current query node endpoint
 
 ```
 USAGE
-  $ joystream-cli account:transferTokens RECIPIENT AMOUNT
-
-ARGUMENTS
-  RECIPIENT  Address of the transfer recipient
-  AMOUNT     Amount of tokens to transfer
+  $ joystream-cli api:getQueryNodeEndpoint
 ```
 
-_See code: [src/commands/account/transferTokens.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/account/transferTokens.ts)_
+_See code: [src/commands/api/getQueryNodeEndpoint.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/api/getQueryNodeEndpoint.ts)_
 
 ## `joystream-cli api:getUri`
 
@@ -279,6 +306,20 @@ EXAMPLES
 
 _See code: [src/commands/api/inspect.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/api/inspect.ts)_
 
+## `joystream-cli api:setQueryNodeEndpoint [ENDPOINT]`
+
+Set query node endpoint
+
+```
+USAGE
+  $ joystream-cli api:setQueryNodeEndpoint [ENDPOINT]
+
+ARGUMENTS
+  ENDPOINT  Query node endpoint for the CLI to use
+```
+
+_See code: [src/commands/api/setQueryNodeEndpoint.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/api/setQueryNodeEndpoint.ts)_
+
 ## `joystream-cli api:setUri [URI]`
 
 Set api WS provider uri
@@ -409,8 +450,9 @@ USAGE
   $ joystream-cli content:createVideo
 
 OPTIONS
-  -c, --channelId=channelId  (required) ID of the Channel
-  -i, --input=input          (required) Path to JSON file to use as input
+  -c, --channelId=channelId       (required) ID of the Channel
+  -i, --input=input               (required) Path to JSON file to use as input
+  --context=(Owner|Collaborator)  Actor context to execute the command in (Owner/Collaborator)
 ```
 
 _See code: [src/commands/content/createVideo.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/content/createVideo.ts)_
@@ -455,6 +497,21 @@ USAGE
 
 _See code: [src/commands/content/curatorGroups.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/content/curatorGroups.ts)_
 
+## `joystream-cli content:deleteChannel`
+
+Delete the channel and optionally all associated data objects.
+
+```
+USAGE
+  $ joystream-cli content:deleteChannel
+
+OPTIONS
+  -c, --channelId=channelId  (required) ID of the Channel
+  -f, --force                Force-remove all associated channel data objects
+```
+
+_See code: [src/commands/content/deleteChannel.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/content/deleteChannel.ts)_
+
 ## `joystream-cli content:deleteChannelCategory CHANNELCATEGORYID`
 
 Delete channel category.
@@ -472,6 +529,22 @@ OPTIONS
 
 _See code: [src/commands/content/deleteChannelCategory.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/content/deleteChannelCategory.ts)_
 
+## `joystream-cli content:deleteVideo`
+
+Delete the video and optionally all associated data objects.
+
+```
+USAGE
+  $ joystream-cli content:deleteVideo
+
+OPTIONS
+  -f, --force                     Force-remove all associated video data objects
+  -v, --videoId=videoId           (required) ID of the Video
+  --context=(Owner|Collaborator)  Actor context to execute the command in (Owner/Collaborator)
+```
+
+_See code: [src/commands/content/deleteVideo.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/content/deleteVideo.ts)_
+
 ## `joystream-cli content:deleteVideoCategory VIDEOCATEGORYID`
 
 Delete video category.
@@ -489,6 +562,22 @@ OPTIONS
 
 _See code: [src/commands/content/deleteVideoCategory.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/content/deleteVideoCategory.ts)_
 
+## `joystream-cli content:removeChannelAssets`
+
+Remove data objects associated with the channel or any of its videos.
+
+```
+USAGE
+  $ joystream-cli content:removeChannelAssets
+
+OPTIONS
+  -c, --channelId=channelId       (required) ID of the Channel
+  -o, --objectId=objectId         (required) ID of an object to remove
+  --context=(Owner|Collaborator)  Actor context to execute the command in (Owner/Collaborator)
+```
+
+_See code: [src/commands/content/removeChannelAssets.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/content/removeChannelAssets.ts)_
+
 ## `joystream-cli content:removeCuratorFromGroup [GROUPID] [CURATORID]`
 
 Remove Curator from Curator Group.
@@ -559,7 +648,8 @@ ARGUMENTS
   CHANNELID  ID of the Channel
 
 OPTIONS
-  -i, --input=input  (required) Path to JSON file to use as input
+  -i, --input=input               (required) Path to JSON file to use as input
+  --context=(Owner|Collaborator)  Actor context to execute the command in (Owner/Collaborator)
 ```
 
 _See code: [src/commands/content/updateChannel.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/content/updateChannel.ts)_
@@ -612,7 +702,8 @@ ARGUMENTS
   VIDEOID  ID of the Video
 
 OPTIONS
-  -i, --input=input  (required) Path to JSON file to use as input
+  -i, --input=input               (required) Path to JSON file to use as input
+  --context=(Owner|Collaborator)  Actor context to execute the command in (Owner/Collaborator)
 ```
 
 _See code: [src/commands/content/updateVideo.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/content/updateVideo.ts)_
@@ -721,8 +812,10 @@ ARGUMENTS
   WGAPPLICATIONID  Working Group Application ID
 
 OPTIONS
-  -g, --group=(storageProviders|curators|operations)  The working group context in which the command should be executed
-                                                      Available values are: storageProviders, curators, operations.
+  -g, --group=(storageProviders|curators|operationsAlpha|operationsBeta|operationsGamma|gateway|distributors)
+      The working group context in which the command should be executed
+      Available values are: storageProviders, curators, operationsAlpha, operationsBeta, operationsGamma, gateway, 
+      distributors.
 ```
 
 _See code: [src/commands/working-groups/application.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/working-groups/application.ts)_
@@ -736,21 +829,23 @@ USAGE
   $ joystream-cli working-groups:createOpening
 
 OPTIONS
-  -e, --edit                                          If provided along with --input - launches in edit mode allowing to
-                                                      modify the input before sending the exstinsic
+  -e, --edit
+      If provided along with --input - launches in edit mode allowing to modify the input before sending the exstinsic
 
-  -g, --group=(storageProviders|curators|operations)  The working group context in which the command should be executed
-                                                      Available values are: storageProviders, curators, operations.
+  -g, --group=(storageProviders|curators|operationsAlpha|operationsBeta|operationsGamma|gateway|distributors)
+      The working group context in which the command should be executed
+      Available values are: storageProviders, curators, operationsAlpha, operationsBeta, operationsGamma, gateway, 
+      distributors.
 
-  -i, --input=input                                   Path to JSON file to use as input (if not specified - the input
-                                                      can be provided interactively)
+  -i, --input=input
+      Path to JSON file to use as input (if not specified - the input can be provided interactively)
 
-  -o, --output=output                                 Path to the file where the output JSON should be saved (this
-                                                      output can be then reused as input)
+  -o, --output=output
+      Path to the file where the output JSON should be saved (this output can be then reused as input)
 
-  --dryRun                                            If provided along with --output - skips sending the actual
-                                                      extrinsic(can be used to generate a "draft" which can be provided
-                                                      as input later)
+  --dryRun
+      If provided along with --output - skips sending the actual extrinsic(can be used to generate a "draft" which can be 
+      provided as input later)
 ```
 
 _See code: [src/commands/working-groups/createOpening.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/working-groups/createOpening.ts)_
@@ -767,8 +862,10 @@ ARGUMENTS
   WORKERID  Worker ID
 
 OPTIONS
-  -g, --group=(storageProviders|curators|operations)  The working group context in which the command should be executed
-                                                      Available values are: storageProviders, curators, operations.
+  -g, --group=(storageProviders|curators|operationsAlpha|operationsBeta|operationsGamma|gateway|distributors)
+      The working group context in which the command should be executed
+      Available values are: storageProviders, curators, operationsAlpha, operationsBeta, operationsGamma, gateway, 
+      distributors.
 ```
 
 _See code: [src/commands/working-groups/decreaseWorkerStake.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/working-groups/decreaseWorkerStake.ts)_
@@ -785,8 +882,10 @@ ARGUMENTS
   WORKERID  Worker ID
 
 OPTIONS
-  -g, --group=(storageProviders|curators|operations)  The working group context in which the command should be executed
-                                                      Available values are: storageProviders, curators, operations.
+  -g, --group=(storageProviders|curators|operationsAlpha|operationsBeta|operationsGamma|gateway|distributors)
+      The working group context in which the command should be executed
+      Available values are: storageProviders, curators, operationsAlpha, operationsBeta, operationsGamma, gateway, 
+      distributors.
 ```
 
 _See code: [src/commands/working-groups/evictWorker.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/working-groups/evictWorker.ts)_
@@ -803,8 +902,10 @@ ARGUMENTS
   WGOPENINGID  Working Group Opening ID
 
 OPTIONS
-  -g, --group=(storageProviders|curators|operations)  The working group context in which the command should be executed
-                                                      Available values are: storageProviders, curators, operations.
+  -g, --group=(storageProviders|curators|operationsAlpha|operationsBeta|operationsGamma|gateway|distributors)
+      The working group context in which the command should be executed
+      Available values are: storageProviders, curators, operationsAlpha, operationsBeta, operationsGamma, gateway, 
+      distributors.
 ```
 
 _See code: [src/commands/working-groups/fillOpening.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/working-groups/fillOpening.ts)_
@@ -818,8 +919,10 @@ USAGE
   $ joystream-cli working-groups:increaseStake
 
 OPTIONS
-  -g, --group=(storageProviders|curators|operations)  The working group context in which the command should be executed
-                                                      Available values are: storageProviders, curators, operations.
+  -g, --group=(storageProviders|curators|operationsAlpha|operationsBeta|operationsGamma|gateway|distributors)
+      The working group context in which the command should be executed
+      Available values are: storageProviders, curators, operationsAlpha, operationsBeta, operationsGamma, gateway, 
+      distributors.
 ```
 
 _See code: [src/commands/working-groups/increaseStake.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/working-groups/increaseStake.ts)_
@@ -833,8 +936,10 @@ USAGE
   $ joystream-cli working-groups:leaveRole
 
 OPTIONS
-  -g, --group=(storageProviders|curators|operations)  The working group context in which the command should be executed
-                                                      Available values are: storageProviders, curators, operations.
+  -g, --group=(storageProviders|curators|operationsAlpha|operationsBeta|operationsGamma|gateway|distributors)
+      The working group context in which the command should be executed
+      Available values are: storageProviders, curators, operationsAlpha, operationsBeta, operationsGamma, gateway, 
+      distributors.
 ```
 
 _See code: [src/commands/working-groups/leaveRole.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/working-groups/leaveRole.ts)_
@@ -851,8 +956,10 @@ ARGUMENTS
   WGOPENINGID  Working Group Opening ID
 
 OPTIONS
-  -g, --group=(storageProviders|curators|operations)  The working group context in which the command should be executed
-                                                      Available values are: storageProviders, curators, operations.
+  -g, --group=(storageProviders|curators|operationsAlpha|operationsBeta|operationsGamma|gateway|distributors)
+      The working group context in which the command should be executed
+      Available values are: storageProviders, curators, operationsAlpha, operationsBeta, operationsGamma, gateway, 
+      distributors.
 ```
 
 _See code: [src/commands/working-groups/opening.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/working-groups/opening.ts)_
@@ -866,8 +973,10 @@ USAGE
   $ joystream-cli working-groups:openings
 
 OPTIONS
-  -g, --group=(storageProviders|curators|operations)  The working group context in which the command should be executed
-                                                      Available values are: storageProviders, curators, operations.
+  -g, --group=(storageProviders|curators|operationsAlpha|operationsBeta|operationsGamma|gateway|distributors)
+      The working group context in which the command should be executed
+      Available values are: storageProviders, curators, operationsAlpha, operationsBeta, operationsGamma, gateway, 
+      distributors.
 ```
 
 _See code: [src/commands/working-groups/openings.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/working-groups/openings.ts)_
@@ -881,8 +990,10 @@ USAGE
   $ joystream-cli working-groups:overview
 
 OPTIONS
-  -g, --group=(storageProviders|curators|operations)  The working group context in which the command should be executed
-                                                      Available values are: storageProviders, curators, operations.
+  -g, --group=(storageProviders|curators|operationsAlpha|operationsBeta|operationsGamma|gateway|distributors)
+      The working group context in which the command should be executed
+      Available values are: storageProviders, curators, operationsAlpha, operationsBeta, operationsGamma, gateway, 
+      distributors.
 ```
 
 _See code: [src/commands/working-groups/overview.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/working-groups/overview.ts)_
@@ -896,8 +1007,10 @@ USAGE
   $ joystream-cli working-groups:setDefaultGroup
 
 OPTIONS
-  -g, --group=(storageProviders|curators|operations)  The working group context in which the command should be executed
-                                                      Available values are: storageProviders, curators, operations.
+  -g, --group=(storageProviders|curators|operationsAlpha|operationsBeta|operationsGamma|gateway|distributors)
+      The working group context in which the command should be executed
+      Available values are: storageProviders, curators, operationsAlpha, operationsBeta, operationsGamma, gateway, 
+      distributors.
 ```
 
 _See code: [src/commands/working-groups/setDefaultGroup.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/working-groups/setDefaultGroup.ts)_
@@ -914,8 +1027,10 @@ ARGUMENTS
   WORKERID  Worker ID
 
 OPTIONS
-  -g, --group=(storageProviders|curators|operations)  The working group context in which the command should be executed
-                                                      Available values are: storageProviders, curators, operations.
+  -g, --group=(storageProviders|curators|operationsAlpha|operationsBeta|operationsGamma|gateway|distributors)
+      The working group context in which the command should be executed
+      Available values are: storageProviders, curators, operationsAlpha, operationsBeta, operationsGamma, gateway, 
+      distributors.
 ```
 
 _See code: [src/commands/working-groups/slashWorker.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/working-groups/slashWorker.ts)_
@@ -932,8 +1047,10 @@ ARGUMENTS
   WGOPENINGID  Working Group Opening ID
 
 OPTIONS
-  -g, --group=(storageProviders|curators|operations)  The working group context in which the command should be executed
-                                                      Available values are: storageProviders, curators, operations.
+  -g, --group=(storageProviders|curators|operationsAlpha|operationsBeta|operationsGamma|gateway|distributors)
+      The working group context in which the command should be executed
+      Available values are: storageProviders, curators, operationsAlpha, operationsBeta, operationsGamma, gateway, 
+      distributors.
 ```
 
 _See code: [src/commands/working-groups/startAcceptingApplications.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/working-groups/startAcceptingApplications.ts)_
@@ -950,8 +1067,10 @@ ARGUMENTS
   WGOPENINGID  Working Group Opening ID
 
 OPTIONS
-  -g, --group=(storageProviders|curators|operations)  The working group context in which the command should be executed
-                                                      Available values are: storageProviders, curators, operations.
+  -g, --group=(storageProviders|curators|operationsAlpha|operationsBeta|operationsGamma|gateway|distributors)
+      The working group context in which the command should be executed
+      Available values are: storageProviders, curators, operationsAlpha, operationsBeta, operationsGamma, gateway, 
+      distributors.
 ```
 
 _See code: [src/commands/working-groups/startReviewPeriod.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/working-groups/startReviewPeriod.ts)_
@@ -968,44 +1087,50 @@ ARGUMENTS
   WGAPPLICATIONID  Working Group Application ID
 
 OPTIONS
-  -g, --group=(storageProviders|curators|operations)  The working group context in which the command should be executed
-                                                      Available values are: storageProviders, curators, operations.
+  -g, --group=(storageProviders|curators|operationsAlpha|operationsBeta|operationsGamma|gateway|distributors)
+      The working group context in which the command should be executed
+      Available values are: storageProviders, curators, operationsAlpha, operationsBeta, operationsGamma, gateway, 
+      distributors.
 ```
 
 _See code: [src/commands/working-groups/terminateApplication.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/working-groups/terminateApplication.ts)_
 
-## `joystream-cli working-groups:updateRewardAccount [ACCOUNTADDRESS]`
+## `joystream-cli working-groups:updateRewardAccount [ADDRESS]`
 
 Updates the worker/lead reward account (requires current role account to be selected)
 
 ```
 USAGE
-  $ joystream-cli working-groups:updateRewardAccount [ACCOUNTADDRESS]
+  $ joystream-cli working-groups:updateRewardAccount [ADDRESS]
 
 ARGUMENTS
-  ACCOUNTADDRESS  New reward account address (if omitted, one of the existing CLI accounts can be selected)
+  ADDRESS  New reward account address (if omitted, can be provided interactivel)
 
 OPTIONS
-  -g, --group=(storageProviders|curators|operations)  The working group context in which the command should be executed
-                                                      Available values are: storageProviders, curators, operations.
+  -g, --group=(storageProviders|curators|operationsAlpha|operationsBeta|operationsGamma|gateway|distributors)
+      The working group context in which the command should be executed
+      Available values are: storageProviders, curators, operationsAlpha, operationsBeta, operationsGamma, gateway, 
+      distributors.
 ```
 
 _See code: [src/commands/working-groups/updateRewardAccount.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/working-groups/updateRewardAccount.ts)_
 
-## `joystream-cli working-groups:updateRoleAccount [ACCOUNTADDRESS]`
+## `joystream-cli working-groups:updateRoleAccount [ADDRESS]`
 
 Updates the worker/lead role account. Requires member controller account to be selected
 
 ```
 USAGE
-  $ joystream-cli working-groups:updateRoleAccount [ACCOUNTADDRESS]
+  $ joystream-cli working-groups:updateRoleAccount [ADDRESS]
 
 ARGUMENTS
-  ACCOUNTADDRESS  New role account address (if omitted, one of the existing CLI accounts can be selected)
+  ADDRESS  New role account address (if omitted, can be provided interactively)
 
 OPTIONS
-  -g, --group=(storageProviders|curators|operations)  The working group context in which the command should be executed
-                                                      Available values are: storageProviders, curators, operations.
+  -g, --group=(storageProviders|curators|operationsAlpha|operationsBeta|operationsGamma|gateway|distributors)
+      The working group context in which the command should be executed
+      Available values are: storageProviders, curators, operationsAlpha, operationsBeta, operationsGamma, gateway, 
+      distributors.
 ```
 
 _See code: [src/commands/working-groups/updateRoleAccount.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/working-groups/updateRoleAccount.ts)_
@@ -1022,8 +1147,10 @@ ARGUMENTS
   STORAGE  Worker storage
 
 OPTIONS
-  -g, --group=(storageProviders|curators|operations)  The working group context in which the command should be executed
-                                                      Available values are: storageProviders, curators, operations.
+  -g, --group=(storageProviders|curators|operationsAlpha|operationsBeta|operationsGamma|gateway|distributors)
+      The working group context in which the command should be executed
+      Available values are: storageProviders, curators, operationsAlpha, operationsBeta, operationsGamma, gateway, 
+      distributors.
 ```
 
 _See code: [src/commands/working-groups/updateRoleStorage.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/working-groups/updateRoleStorage.ts)_
@@ -1040,15 +1167,11 @@ ARGUMENTS
   WORKERID  Worker ID
 
 OPTIONS
-  -g, --group=(storageProviders|curators|operations)  The working group context in which the command should be executed
-                                                      Available values are: storageProviders, curators, operations.
+  -g, --group=(storageProviders|curators|operationsAlpha|operationsBeta|operationsGamma|gateway|distributors)
+      The working group context in which the command should be executed
+      Available values are: storageProviders, curators, operationsAlpha, operationsBeta, operationsGamma, gateway, 
+      distributors.
 ```
 
 _See code: [src/commands/working-groups/updateWorkerReward.ts](https://github.com/Joystream/joystream/blob/master/cli/src/commands/working-groups/updateWorkerReward.ts)_
 <!-- commandsstop -->
-
-# Environment variables
-<!-- env -->
-- `FORCE_COLOR` - can be set to `0` to disable output coloring
-- `AUTO_CONFIRM` - can be set to `1` or `true` to skip any required confirmations (can be useful for creating bash scripts)
-<!-- envstop -->

+ 32 - 0
cli/codegen.yml

@@ -0,0 +1,32 @@
+overwrite: true
+
+schema: '../query-node/generated/graphql-server/generated/schema.graphql'
+
+documents:
+  - './src/graphql/queries/*.graphql'
+
+config:
+  scalars:
+    Date: Date
+  preResolveTypes: true # avoid using Pick
+  skipTypename: true # skip __typename field in typings unless it's part of the query
+
+generates:
+  src/graphql/generated/schema.ts:
+    hooks:
+      afterOneFileWrite:
+        - prettier --write
+        - eslint --fix
+    plugins:
+      - typescript
+  src/graphql/generated/queries.ts:
+    preset: import-types
+    presetConfig:
+      typesPath: ./schema
+    hooks:
+      afterOneFileWrite:
+        - prettier --write
+        - eslint --fix
+    plugins:
+      - typescript-operations
+      - typescript-document-nodes

+ 1 - 1
cli/examples/content/CreateVideo.json

@@ -7,7 +7,7 @@
   "hasMarketing": false,
   "isPublic": true,
   "isExplicit": false,
-  "personsList": [],
+  "persons": [],
   "category": 1,
   "license": {
     "code": 1001,

+ 17 - 5
cli/package.json

@@ -1,7 +1,7 @@
 {
   "name": "@joystream/cli",
   "description": "Command Line Interface for Joystream community and governance activities",
-  "version": "0.5.1",
+  "version": "0.6.0",
   "author": "Leszek Wiesner",
   "bin": {
     "joystream-cli": "./bin/run"
@@ -11,7 +11,7 @@
     "@apidevtools/json-schema-ref-parser": "^9.0.6",
     "@ffprobe-installer/ffprobe": "^1.1.0",
     "@joystream/metadata-protobuf": "^1.0.0",
-    "@joystream/types": "^0.16.1",
+    "@joystream/types": "^0.17.0",
     "@oclif/command": "^1.5.19",
     "@oclif/config": "^1.14.0",
     "@oclif/plugin-autocomplete": "^0.2.0",
@@ -43,7 +43,12 @@
     "moment": "^2.24.0",
     "proper-lockfile": "^4.1.1",
     "slug": "^2.1.1",
-    "tslib": "^1.11.1"
+    "tslib": "^1.11.1",
+    "blake3": "^2.1.4",
+    "multihashes": "^4.0.3",
+    "@apollo/client": "^3.2.5",
+    "cross-fetch": "^3.0.6",
+    "form-data": "^4.0.0"
   },
   "devDependencies": {
     "@oclif/dev-cli": "^1.22.2",
@@ -61,7 +66,12 @@
     "mocha": "^5.2.0",
     "nyc": "^14.1.1",
     "ts-node": "^10.2.1",
-    "typescript": "^4.4.3"
+    "typescript": "^4.4.3",
+    "@graphql-codegen/cli": "^1.21.4",
+    "@graphql-codegen/typescript": "^1.22.0",
+    "@graphql-codegen/import-types-preset": "^1.18.1",
+    "@graphql-codegen/typescript-operations": "^1.17.16",
+    "@graphql-codegen/typescript-document-nodes": "^1.17.11"
   },
   "engines": {
     "node": ">=14.0.0",
@@ -126,7 +136,9 @@
     "lint": "eslint ./src --ext .ts",
     "checks": "tsc --noEmit --pretty && prettier ./ --check && yarn lint",
     "format": "prettier ./ --write",
-    "generate:schema-typings": "rm -rf ./src/json-schemas/typings && json2ts -i ./src/json-schemas/ -o ./src/json-schemas/typings/"
+    "generate:schema-typings": "rm -rf ./src/schemas/typings && json2ts -i ./src/schemas/json/ -o ./src/schemas/typings/ && prettier ./src/schemas/typings/ --write",
+    "generate:graphql-typings": "graphql-codegen",
+    "generate:all": "yarn generate:schema-typings && yarn generate:graphql-typings"
   },
   "types": "lib/index.d.ts",
   "volta": {

+ 62 - 0
cli/scripts/content-test.sh

@@ -0,0 +1,62 @@
+#!/usr/bin/env bash
+set -e
+
+SCRIPT_PATH="$(dirname "${BASH_SOURCE[0]}")"
+cd $SCRIPT_PATH
+
+echo "{}" > ~/tmp/empty.json
+
+export AUTO_CONFIRM=true
+
+# Init content lead
+yarn workspace api-scripts initialize-content-lead
+# Test create/update/remove category
+yarn joystream-cli content:createVideoCategory -i ./examples/content/CreateCategory.json
+yarn joystream-cli content:createVideoCategory -i ./examples/content/CreateCategory.json
+yarn joystream-cli content:createVideoCategory -i ./examples/content/CreateCategory.json
+yarn joystream-cli content:createChannelCategory -i ./examples/content/CreateCategory.json
+yarn joystream-cli content:createChannelCategory -i ./examples/content/CreateCategory.json
+yarn joystream-cli content:createChannelCategory -i ./examples/content/CreateCategory.json
+yarn joystream-cli content:updateVideoCategory -i ./examples/content/UpdateCategory.json 2
+yarn joystream-cli content:updateChannelCategory -i ./examples/content/UpdateCategory.json 2
+yarn joystream-cli content:deleteChannelCategory 3
+yarn joystream-cli content:deleteVideoCategory 3
+# Group 1 - a valid group
+yarn joystream-cli content:createCuratorGroup
+yarn joystream-cli content:setCuratorGroupStatus 1 1
+yarn joystream-cli content:addCuratorToGroup 1 0
+# Group 2 - test removeCuratorFromGroup
+yarn joystream-cli content:createCuratorGroup
+yarn joystream-cli content:addCuratorToGroup 2 0
+yarn joystream-cli content:removeCuratorFromGroup 2 0
+# Create/update channel
+yarn joystream-cli content:createChannel -i ./examples/content/CreateChannel.json --context Member || true
+yarn joystream-cli content:createChannel -i ./examples/content/CreateChannel.json --context Curator || true
+yarn joystream-cli content:createChannel -i ~/tmp/empty.json --context Member || true
+yarn joystream-cli content:updateChannel -i ./examples/content/UpdateChannel.json 1 || true
+# Create/update video
+yarn joystream-cli content:createVideo -i ./examples/content/CreateVideo.json -c 1 || true
+yarn joystream-cli content:createVideo -i ./examples/content/CreateVideo.json -c 2 || true
+yarn joystream-cli content:createVideo -i ~/tmp/empty.json -c 2 || true
+yarn joystream-cli content:updateVideo -i ./examples/content/UpdateVideo.json 1 || true
+# Set featured videos
+yarn joystream-cli content:setFeaturedVideos 1,2
+yarn joystream-cli content:setFeaturedVideos 2,3
+# Update channel censorship status
+yarn joystream-cli content:updateChannelCensorshipStatus 1 1 --rationale "Test"
+yarn joystream-cli content:updateVideoCensorshipStatus 1 1 --rationale "Test"
+# Display-only commands
+yarn joystream-cli content:videos
+yarn joystream-cli content:video 1
+yarn joystream-cli content:channels
+yarn joystream-cli content:channel 1
+yarn joystream-cli content:curatorGroups
+yarn joystream-cli content:curatorGroup 1
+# Remove videos/channels/assets
+yarn joystream-cli content:removeChannelAssets -c 1 -o 0
+yarn joystream-cli content:deleteVideo -v 1 -f
+yarn joystream-cli content:deleteVideo -v 2 -f
+yarn joystream-cli content:deleteVideo -v 3 -f
+yarn joystream-cli content:deleteChannel -c 1 -f
+yarn joystream-cli content:deleteChannel -c 2 -f
+yarn joystream-cli content:deleteChannel -c 3 -f

+ 128 - 156
cli/src/Api.ts

@@ -1,17 +1,14 @@
 import BN from 'bn.js'
-import { types } from '@joystream/types/'
+import { createType, types } from '@joystream/types/'
 import { ApiPromise, WsProvider } from '@polkadot/api'
-import { QueryableStorageMultiArg, SubmittableExtrinsic, QueryableStorageEntry } from '@polkadot/api/types'
+import { AugmentedQuery, SubmittableExtrinsic } from '@polkadot/api/types'
 import { formatBalance } from '@polkadot/util'
-import { Balance, Moment, BlockNumber } from '@polkadot/types/interfaces'
+import { Balance, BlockNumber } from '@polkadot/types/interfaces'
 import { KeyringPair } from '@polkadot/keyring/types'
-import { Codec, CodecArg } from '@polkadot/types/types'
-import { Option, Vec, UInt, Bytes } from '@polkadot/types'
+import { Codec } from '@polkadot/types/types'
+import { Option, UInt } from '@polkadot/types'
 import {
   AccountSummary,
-  CouncilInfoObj,
-  CouncilInfoTuple,
-  createCouncilInfoObj,
   WorkingGroups,
   Reward,
   GroupMember,
@@ -22,61 +19,50 @@ import {
   openingPolicyUnstakingPeriodsKeys,
   UnstakingPeriods,
   StakingPolicyUnstakingPeriodKey,
+  UnaugmentedApiPromise,
+  CouncilInfo,
 } from './Types'
 import { DeriveBalancesAll } from '@polkadot/api-derive/types'
 import { CLIError } from '@oclif/errors'
-import ExitCodes from './ExitCodes'
-import {
-  Worker,
-  WorkerId,
-  RoleStakeProfile,
-  Opening as WGOpening,
-  Application as WGApplication,
-  StorageProviderId,
-} from '@joystream/types/working-group'
-import {
-  Opening,
-  Application,
-  OpeningStage,
-  ApplicationStageKeys,
-  ApplicationId,
-  OpeningId,
-  StakingPolicy,
-} from '@joystream/types/hiring'
+import { Worker, WorkerId, RoleStakeProfile, Application as WGApplication } from '@joystream/types/working-group'
+import { Opening, Application, OpeningStage, ApplicationId, OpeningId, StakingPolicy } from '@joystream/types/hiring'
 import { MemberId, Membership } from '@joystream/types/members'
-import { RewardRelationship, RewardRelationshipId } from '@joystream/types/recurring-rewards'
-import { Stake, StakeId } from '@joystream/types/stake'
+import { RewardRelationshipId } from '@joystream/types/recurring-rewards'
+import { StakeId } from '@joystream/types/stake'
 
-import { InputValidationLengthConstraint, ChannelId, Url } from '@joystream/types/common'
+import { InputValidationLengthConstraint, ChannelId } from '@joystream/types/common'
 import {
   CuratorGroup,
   CuratorGroupId,
   Channel,
   Video,
   VideoId,
-  ChannelCategory,
-  VideoCategory,
   ChannelCategoryId,
   VideoCategoryId,
 } from '@joystream/types/content'
-import { ContentId, DataObject } from '@joystream/types/storage'
-import _ from 'lodash'
+import { Observable } from 'rxjs'
+import { BagId, DataObject, DataObjectId } from '@joystream/types/storage'
 
 export const DEFAULT_API_URI = 'ws://localhost:9944/'
 
 // Mapping of working group to api module
-export const apiModuleByGroup: { [key in WorkingGroups]: string } = {
+export const apiModuleByGroup = {
   [WorkingGroups.StorageProviders]: 'storageWorkingGroup',
-  [WorkingGroups.Curators]: 'contentDirectoryWorkingGroup',
-  [WorkingGroups.Operations]: 'operationsWorkingGroup',
+  [WorkingGroups.Curators]: 'contentWorkingGroup',
+  [WorkingGroups.OperationsAlpha]: 'operationsWorkingGroupAlpha',
+  [WorkingGroups.OperationsBeta]: 'operationsWorkingGroupBeta',
+  [WorkingGroups.OperationsGamma]: 'operationsWorkingGroupGamma',
   [WorkingGroups.Gateway]: 'gatewayWorkingGroup',
-}
+  [WorkingGroups.Distribution]: 'distributionWorkingGroup',
+} as const
 
 // Api wrapper for handling most common api calls and allowing easy API implementation switch in the future
 export default class Api {
   private _api: ApiPromise
+  public isDevelopment = false
 
-  private constructor(originalApi: ApiPromise) {
+  private constructor(originalApi: ApiPromise, isDevelopment: boolean) {
+    this.isDevelopment = isDevelopment
     this._api = originalApi
   }
 
@@ -84,16 +70,18 @@ export default class Api {
     return this._api
   }
 
-  private static async initApi(
-    apiUri: string = DEFAULT_API_URI,
-    metadataCache: Record<string, any>
-  ): Promise<ApiPromise> {
+  // Get api for use-cases where no type augmentations are desirable
+  public getUnaugmentedApi(): UnaugmentedApiPromise {
+    return (this._api as unknown) as UnaugmentedApiPromise
+  }
+
+  private static async initApi(apiUri: string = DEFAULT_API_URI, metadataCache: Record<string, any>) {
     const wsProvider: WsProvider = new WsProvider(apiUri)
     const api = new ApiPromise({ provider: wsProvider, types, metadata: metadataCache })
     await api.isReadyOrError
 
     // Initializing some api params based on pioneer/packages/react-api/Api.tsx
-    const [properties] = await Promise.all([api.rpc.system.properties()])
+    const [properties, chainType] = await Promise.all([api.rpc.system.properties(), api.rpc.system.chainType()])
 
     const tokenSymbol = properties.tokenSymbol.unwrap()[0].toString()
     const tokenDecimals = properties.tokenDecimals.unwrap()[0].toNumber()
@@ -104,29 +92,12 @@ export default class Api {
       unit: tokenSymbol,
     })
 
-    return api
-  }
-
-  static async create(apiUri: string = DEFAULT_API_URI, metadataCache: Record<string, any>): Promise<Api> {
-    const originalApi: ApiPromise = await Api.initApi(apiUri, metadataCache)
-    return new Api(originalApi)
+    return { api, properties, chainType }
   }
 
-  private queryMultiOnce(queries: Parameters<typeof ApiPromise.prototype.queryMulti>[0]): Promise<Codec[]> {
-    return new Promise((resolve, reject) => {
-      let unsub: () => void
-      this._api
-        .queryMulti(queries, (res) => {
-          // unsub should already be set at this point
-          if (!unsub) {
-            reject(new CLIError('API queryMulti issue - unsub method not set!', { exit: ExitCodes.ApiError }))
-          }
-          unsub()
-          resolve(res)
-        })
-        .then((unsubscribe) => (unsub = unsubscribe))
-        .catch((e) => reject(e))
-    })
+  static async create(apiUri = DEFAULT_API_URI, metadataCache: Record<string, any>): Promise<Api> {
+    const { api, chainType } = await Api.initApi(apiUri, metadataCache)
+    return new Api(api, chainType.isDevelopment || chainType.isLocal)
   }
 
   async bestNumber(): Promise<number> {
@@ -150,25 +121,51 @@ export default class Api {
     return { balances }
   }
 
-  async getCouncilInfo(): Promise<CouncilInfoObj> {
-    const queries: { [P in keyof CouncilInfoObj]: QueryableStorageMultiArg<'promise'> } = {
-      activeCouncil: this._api.query.council.activeCouncil,
-      termEndsAt: this._api.query.council.termEndsAt,
-      autoStart: this._api.query.councilElection.autoStart,
-      newTermDuration: this._api.query.councilElection.newTermDuration,
-      candidacyLimit: this._api.query.councilElection.candidacyLimit,
-      councilSize: this._api.query.councilElection.councilSize,
-      minCouncilStake: this._api.query.councilElection.minCouncilStake,
-      minVotingStake: this._api.query.councilElection.minVotingStake,
-      announcingPeriod: this._api.query.councilElection.announcingPeriod,
-      votingPeriod: this._api.query.councilElection.votingPeriod,
-      revealingPeriod: this._api.query.councilElection.revealingPeriod,
-      round: this._api.query.councilElection.round,
-      stage: this._api.query.councilElection.stage,
+  async getCouncilInfo(): Promise<CouncilInfo> {
+    const [
+      activeCouncil,
+      termEndsAt,
+      autoStart,
+      newTermDuration,
+      candidacyLimit,
+      councilSize,
+      minCouncilStake,
+      minVotingStake,
+      announcingPeriod,
+      votingPeriod,
+    ] = await Promise.all([
+      this._api.query.council.activeCouncil(),
+      this._api.query.council.termEndsAt(),
+      this._api.query.councilElection.autoStart(),
+      this._api.query.councilElection.newTermDuration(),
+      this._api.query.councilElection.candidacyLimit(),
+      this._api.query.councilElection.councilSize(),
+      this._api.query.councilElection.minCouncilStake(),
+      this._api.query.councilElection.minVotingStake(),
+      this._api.query.councilElection.announcingPeriod(),
+      this._api.query.councilElection.votingPeriod(),
+    ])
+    // Promise.all only allows 10 types, so we need to split the queries
+    const [revealingPeriod, round, stage] = await Promise.all([
+      this._api.query.councilElection.revealingPeriod(),
+      this._api.query.councilElection.round(),
+      this._api.query.councilElection.stage(),
+    ])
+    return {
+      activeCouncil,
+      termEndsAt,
+      autoStart,
+      newTermDuration,
+      candidacyLimit,
+      councilSize,
+      minCouncilStake,
+      minVotingStake,
+      announcingPeriod,
+      votingPeriod,
+      revealingPeriod,
+      round,
+      stage,
     }
-    const results: CouncilInfoTuple = (await this.queryMultiOnce(Object.values(queries))) as CouncilInfoTuple
-
-    return createCouncilInfoObj(...results)
   }
 
   async estimateFee(account: KeyringPair, tx: SubmittableExtrinsic<'promise'>): Promise<Balance> {
@@ -184,12 +181,10 @@ export default class Api {
   // TODO: This is a lot of repeated logic from "/pioneer/joy-utils/transport"
   // It will be refactored to "joystream-js" soon
   async entriesByIds<IDType extends UInt, ValueType extends Codec>(
-    apiMethod: QueryableStorageEntry<'promise'>,
-    firstKey?: CodecArg // First key in case of double maps
+    apiMethod: AugmentedQuery<'promise', (key: IDType) => Observable<ValueType>, [IDType]>
   ): Promise<[IDType, ValueType][]> {
-    const entries: [IDType, ValueType][] = (await apiMethod.entries<ValueType>(firstKey)).map(([storageKey, value]) => [
-      // If double-map (first key is provided), we map entries by second key
-      storageKey.args[firstKey !== undefined ? 1 : 0] as IDType,
+    const entries: [IDType, ValueType][] = (await apiMethod.entries()).map(([storageKey, value]) => [
+      storageKey.args[0] as IDType,
       value,
     ])
 
@@ -203,7 +198,7 @@ export default class Api {
   }
 
   protected async blockTimestamp(height: number): Promise<Date> {
-    const blockTime = (await this._api.query.timestamp.now.at(await this.blockHash(height))) as Moment
+    const blockTime = await this._api.query.timestamp.now.at(await this.blockHash(height))
 
     return new Date(blockTime.toNumber())
   }
@@ -214,14 +209,14 @@ export default class Api {
   }
 
   protected async membershipById(memberId: MemberId): Promise<Membership | null> {
-    const profile = (await this._api.query.members.membershipById(memberId)) as Membership
+    const profile = await this._api.query.members.membershipById(memberId)
 
     // Can't just use profile.isEmpty because profile.suspended is Bool (which isEmpty method always returns false)
     return profile.handle.isEmpty ? null : profile
   }
 
   async groupLead(group: WorkingGroups): Promise<GroupMember | null> {
-    const optLeadId = (await this.workingGroupApiQuery(group).currentLead()) as Option<WorkerId>
+    const optLeadId = await this.workingGroupApiQuery(group).currentLead()
 
     if (!optLeadId.isSome) {
       return null
@@ -234,7 +229,7 @@ export default class Api {
   }
 
   protected async stakeValue(stakeId: StakeId): Promise<Balance> {
-    const stake = await this._api.query.stake.stakes<Stake>(stakeId)
+    const stake = await this._api.query.stake.stakes(stakeId)
     return stake.value
   }
 
@@ -243,9 +238,7 @@ export default class Api {
   }
 
   protected async workerReward(relationshipId: RewardRelationshipId): Promise<Reward> {
-    const rewardRelationship = await this._api.query.recurringRewards.rewardRelationships<RewardRelationship>(
-      relationshipId
-    )
+    const rewardRelationship = await this._api.query.recurringRewards.rewardRelationships(relationshipId)
 
     return {
       totalRecieved: rewardRelationship.total_reward_received,
@@ -286,14 +279,14 @@ export default class Api {
   }
 
   async workerByWorkerId(group: WorkingGroups, workerId: number): Promise<Worker> {
-    const nextId = await this.workingGroupApiQuery(group).nextWorkerId<WorkerId>()
+    const nextId = await this.workingGroupApiQuery(group).nextWorkerId()
 
     // This is chain specfic, but if next id is still 0, it means no workers have been added yet
     if (workerId < 0 || workerId >= nextId.toNumber()) {
       throw new CLIError('Invalid worker id!')
     }
 
-    const worker = await this.workingGroupApiQuery(group).workerById<Worker>(workerId)
+    const worker = await this.workingGroupApiQuery(group).workerById(workerId)
 
     if (worker.isEmpty) {
       throw new CLIError('This worker is not active anymore')
@@ -302,7 +295,7 @@ export default class Api {
     return worker
   }
 
-  async groupMember(group: WorkingGroups, workerId: number) {
+  async groupMember(group: WorkingGroups, workerId: number): Promise<GroupMember> {
     const worker = await this.workerByWorkerId(group, workerId)
     return await this.parseGroupMember(this._api.createType('WorkerId', workerId), worker)
   }
@@ -318,12 +311,12 @@ export default class Api {
   }
 
   groupWorkers(group: WorkingGroups): Promise<[WorkerId, Worker][]> {
-    return this.entriesByIds<WorkerId, Worker>(this.workingGroupApiQuery(group).workerById)
+    return this.entriesByIds(this.workingGroupApiQuery(group).workerById)
   }
 
   async openingsByGroup(group: WorkingGroups): Promise<GroupOpening[]> {
     let openings: GroupOpening[] = []
-    const nextId = await this.workingGroupApiQuery(group).nextOpeningId<OpeningId>()
+    const nextId = await this.workingGroupApiQuery(group).nextOpeningId()
 
     // This is chain specfic, but if next id is still 0, it means no openings have been added yet
     if (!nextId.eq(0)) {
@@ -335,23 +328,23 @@ export default class Api {
   }
 
   protected async hiringOpeningById(id: number | OpeningId): Promise<Opening> {
-    const result = await this._api.query.hiring.openingById<Opening>(id)
+    const result = await this._api.query.hiring.openingById(id)
     return result
   }
 
   protected async hiringApplicationById(id: number | ApplicationId): Promise<Application> {
-    const result = await this._api.query.hiring.applicationById<Application>(id)
+    const result = await this._api.query.hiring.applicationById(id)
     return result
   }
 
   async wgApplicationById(group: WorkingGroups, wgApplicationId: number): Promise<WGApplication> {
-    const nextAppId = await this.workingGroupApiQuery(group).nextApplicationId<ApplicationId>()
+    const nextAppId = await this.workingGroupApiQuery(group).nextApplicationId()
 
     if (wgApplicationId < 0 || wgApplicationId >= nextAppId.toNumber()) {
       throw new CLIError('Invalid working group application ID!')
     }
 
-    const result = await this.workingGroupApiQuery(group).applicationById<WGApplication>(wgApplicationId)
+    const result = await this.workingGroupApiQuery(group).applicationById(wgApplicationId)
     return result
   }
 
@@ -372,7 +365,7 @@ export default class Api {
         role: roleStakingId.isSome ? (await this.stakeValue(roleStakingId.unwrap())).toNumber() : 0,
       },
       humanReadableText: application.human_readable_text.toString(),
-      stage: application.stage.type as ApplicationStageKeys,
+      stage: application.stage.type,
     }
   }
 
@@ -382,9 +375,7 @@ export default class Api {
   }
 
   protected async groupOpeningApplications(group: WorkingGroups, wgOpeningId: number): Promise<GroupApplication[]> {
-    const wgApplicationEntries = await this.entriesByIds<ApplicationId, WGApplication>(
-      this.workingGroupApiQuery(group).applicationById
-    )
+    const wgApplicationEntries = await this.entriesByIds(this.workingGroupApiQuery(group).applicationById)
 
     return Promise.all(
       wgApplicationEntries
@@ -394,13 +385,13 @@ export default class Api {
   }
 
   async groupOpening(group: WorkingGroups, wgOpeningId: number): Promise<GroupOpening> {
-    const nextId = ((await this.workingGroupApiQuery(group).nextOpeningId()) as OpeningId).toNumber()
+    const nextId = (await this.workingGroupApiQuery(group).nextOpeningId()).toNumber()
 
     if (wgOpeningId < 0 || wgOpeningId >= nextId) {
       throw new CLIError('Invalid working group opening ID!')
     }
 
-    const groupOpening = await this.workingGroupApiQuery(group).openingById<WGOpening>(wgOpeningId)
+    const groupOpening = await this.workingGroupApiQuery(group).openingById(wgOpeningId)
 
     const openingId = groupOpening.hiring_opening_id.toNumber()
     const opening = await this.hiringOpeningById(openingId)
@@ -458,7 +449,7 @@ export default class Api {
     if (stage.isOfType('WaitingToBegin')) {
       const stageData = stage.asType('WaitingToBegin')
       const currentBlockNumber = (await this._api.derive.chain.bestNumber()).toNumber()
-      const expectedBlockTime = (this._api.consts.babe.expectedBlockTime as Moment).toNumber()
+      const expectedBlockTime = this._api.consts.babe.expectedBlockTime.toNumber()
       status = OpeningStatus.WaitingToBegin
       stageBlock = stageData.begins_at_block.toNumber()
       stageDate = new Date(Date.now() + (stageBlock - currentBlockNumber) * expectedBlockTime)
@@ -494,34 +485,34 @@ export default class Api {
   }
 
   async getMemberIdsByControllerAccount(address: string): Promise<MemberId[]> {
-    const ids = await this._api.query.members.memberIdsByControllerAccountId<Vec<MemberId>>(address)
+    const ids = await this._api.query.members.memberIdsByControllerAccountId(address)
     return ids.toArray()
   }
 
   async workerExitRationaleConstraint(group: WorkingGroups): Promise<InputValidationLengthConstraint> {
-    return await this.workingGroupApiQuery(group).workerExitRationaleText<InputValidationLengthConstraint>()
+    return await this.workingGroupApiQuery(group).workerExitRationaleText()
   }
 
   // Content directory
   async availableChannels(): Promise<[ChannelId, Channel][]> {
-    return await this.entriesByIds<ChannelId, Channel>(this._api.query.content.channelById)
+    return await this.entriesByIds(this._api.query.content.channelById)
   }
 
   async availableVideos(): Promise<[VideoId, Video][]> {
-    return await this.entriesByIds<VideoId, Video>(this._api.query.content.videoById)
+    return await this.entriesByIds(this._api.query.content.videoById)
   }
 
   availableCuratorGroups(): Promise<[CuratorGroupId, CuratorGroup][]> {
-    return this.entriesByIds<CuratorGroupId, CuratorGroup>(this._api.query.content.curatorGroupById)
+    return this.entriesByIds(this._api.query.content.curatorGroupById)
   }
 
   async curatorGroupById(id: number): Promise<CuratorGroup | null> {
     const exists = !!(await this._api.query.content.curatorGroupById.size(id)).toNumber()
-    return exists ? await this._api.query.content.curatorGroupById<CuratorGroup>(id) : null
+    return exists ? await this._api.query.content.curatorGroupById(id) : null
   }
 
   async nextCuratorGroupId(): Promise<number> {
-    return (await this._api.query.content.nextCuratorGroupId<CuratorGroupId>()).toNumber()
+    return (await this._api.query.content.nextCuratorGroupId()).toNumber()
   }
 
   async channelById(channelId: ChannelId | number | string): Promise<Channel> {
@@ -530,26 +521,13 @@ export default class Api {
     if (!exists) {
       throw new CLIError(`Channel by id ${channelId.toString()} not found!`)
     }
-    const channel = await this._api.query.content.channelById<Channel>(channelId)
+    const channel = await this._api.query.content.channelById(channelId)
 
     return channel
   }
 
-  async videosByChannelId(channelId: ChannelId | number | string): Promise<[VideoId, Video][]> {
-    const channel = await this.channelById(channelId)
-    if (channel) {
-      return Promise.all(
-        channel.videos.map(
-          async (videoId) => [videoId, await this._api.query.content.videoById<Video>(videoId)] as [VideoId, Video]
-        )
-      )
-    } else {
-      return []
-    }
-  }
-
   async videoById(videoId: VideoId | number | string): Promise<Video> {
-    const video = await this._api.query.content.videoById<Video>(videoId)
+    const video = await this._api.query.content.videoById(videoId)
     if (video.isEmpty) {
       throw new CLIError(`Video by id ${videoId.toString()} not found!`)
     }
@@ -557,45 +535,39 @@ export default class Api {
     return video
   }
 
+  async dataObjectsByIds(bagId: BagId, ids: DataObjectId[]): Promise<DataObject[]> {
+    return this._api.query.storage.dataObjectsById.multi(ids.map((id) => [bagId, id]))
+  }
+
   async channelCategoryIds(): Promise<ChannelCategoryId[]> {
     // There is currently no way to differentiate between unexisting and existing category
     // other than fetching all existing category ids (event the .size() trick does not work, as the object is empty)
-    return (
-      await this.entriesByIds<ChannelCategoryId, ChannelCategory>(this._api.query.content.channelCategoryById)
-    ).map(([id]) => id)
+    return (await this.entriesByIds(this._api.query.content.channelCategoryById)).map(([id]) => id)
   }
 
   async videoCategoryIds(): Promise<VideoCategoryId[]> {
     // There is currently no way to differentiate between unexisting and existing category
     // other than fetching all existing category ids (event the .size() trick does not work, as the object is empty)
-    return (await this.entriesByIds<VideoCategoryId, VideoCategory>(this._api.query.content.videoCategoryById)).map(
-      ([id]) => id
-    )
+    return (await this.entriesByIds(this._api.query.content.videoCategoryById)).map(([id]) => id)
   }
 
-  async dataObjectsByContentIds(contentIds: ContentId[]): Promise<DataObject[]> {
-    const dataObjects = await this._api.query.dataDirectory.dataByContentId.multi<DataObject>(contentIds)
-    const notFoundIndex = dataObjects.findIndex((o) => o.isEmpty)
-    if (notFoundIndex !== -1) {
-      throw new CLIError(`DataObject not found by id ${contentIds[notFoundIndex].toString()}`)
-    }
-    return dataObjects
+  async dataObjectsInBag(bagId: BagId): Promise<[DataObjectId, DataObject][]> {
+    return (await this._api.query.storage.dataObjectsById.entries(bagId)).map(([{ args: [, dataObjectId] }, value]) => [
+      dataObjectId,
+      value,
+    ])
   }
 
-  async getRandomBootstrapEndpoint(): Promise<string | null> {
-    const endpoints = await this._api.query.discovery.bootstrapEndpoints<Vec<Url>>()
-    const randomEndpoint = _.sample(endpoints.toArray())
-    return randomEndpoint ? randomEndpoint.toString() : null
+  async getMembers(ids: MemberId[] | number[]): Promise<Membership[]> {
+    return this._api.query.members.membershipById.multi(ids)
   }
 
-  async storageProviderEndpoint(storageProviderId: StorageProviderId | number): Promise<string> {
-    const value = await this._api.query.storageWorkingGroup.workerStorage<Bytes>(storageProviderId)
-    return this._api.createType('Text', value).toString()
+  async memberEntriesByIds(ids: MemberId[] | number[]): Promise<[MemberId, Membership][]> {
+    const memberships = await this._api.query.members.membershipById.multi<Membership>(ids)
+    return ids.map((id, i) => [createType('MemberId', id), memberships[i]])
   }
 
-  async allStorageProviderEndpoints(): Promise<string[]> {
-    const workerIds = (await this.groupWorkers(WorkingGroups.StorageProviders)).map(([id]) => id)
-    const workerStorages = await this._api.query.storageWorkingGroup.workerStorage.multi<Bytes>(workerIds)
-    return workerStorages.map((storage) => this._api.createType('Text', storage).toString())
+  allMemberEntries(): Promise<[MemberId, Membership][]> {
+    return this.entriesByIds(this._api.query.members.membershipById)
   }
 }

+ 2 - 1
cli/src/ExitCodes.ts

@@ -11,7 +11,8 @@ enum ExitCodes {
   UnexpectedException = 500,
   FsOperationFailed = 501,
   ApiError = 502,
-  ExternalInfrastructureError = 503,
+  StorageNodeError = 503,
   ActionCurrentlyUnavailable = 504,
+  QueryNodeError = 505,
 }
 export = ExitCodes

+ 125 - 0
cli/src/QueryNodeApi.ts

@@ -0,0 +1,125 @@
+import { StorageNodeInfo } from './Types'
+import {
+  ApolloClient,
+  InMemoryCache,
+  HttpLink,
+  NormalizedCacheObject,
+  DocumentNode,
+  from,
+  ApolloLink,
+} from '@apollo/client/core'
+import { ErrorLink, onError } from '@apollo/client/link/error'
+import { Maybe } from './graphql/generated/schema'
+import {
+  GetStorageNodesInfoByBagId,
+  GetStorageNodesInfoByBagIdQuery,
+  GetStorageNodesInfoByBagIdQueryVariables,
+  DataObjectInfoFragment,
+  GetDataObjectsByBagId,
+  GetDataObjectsByBagIdQuery,
+  GetDataObjectsByBagIdQueryVariables,
+  GetDataObjectsByVideoId,
+  GetDataObjectsByVideoIdQuery,
+  GetDataObjectsByVideoIdQueryVariables,
+  GetDataObjectsByChannelId,
+  GetDataObjectsByChannelIdQuery,
+  GetDataObjectsByChannelIdQueryVariables,
+} from './graphql/generated/queries'
+import { URL } from 'url'
+import fetch from 'cross-fetch'
+
+export default class QueryNodeApi {
+  private _qnClient: ApolloClient<NormalizedCacheObject>
+
+  public constructor(uri?: string, errorHandler?: ErrorLink.ErrorHandler) {
+    const links: ApolloLink[] = []
+    if (errorHandler) {
+      links.push(onError(errorHandler))
+    }
+    links.push(new HttpLink({ uri, fetch }))
+    this._qnClient = new ApolloClient({
+      link: from(links),
+      cache: new InMemoryCache(),
+      defaultOptions: { query: { fetchPolicy: 'no-cache', errorPolicy: 'all' } },
+    })
+  }
+
+  // Get entity by unique input
+  protected async uniqueEntityQuery<
+    QueryT extends { [k: string]: Maybe<Record<string, unknown>> | undefined },
+    VariablesT extends Record<string, unknown>
+  >(
+    query: DocumentNode,
+    variables: VariablesT,
+    resultKey: keyof QueryT
+  ): Promise<Required<QueryT>[keyof QueryT] | null> {
+    return (await this._qnClient.query<QueryT, VariablesT>({ query, variables })).data[resultKey] || null
+  }
+
+  // Get entities by "non-unique" input and return first result
+  protected async firstEntityQuery<
+    QueryT extends { [k: string]: unknown[] },
+    VariablesT extends Record<string, unknown>
+  >(query: DocumentNode, variables: VariablesT, resultKey: keyof QueryT): Promise<QueryT[keyof QueryT][number] | null> {
+    return (await this._qnClient.query<QueryT, VariablesT>({ query, variables })).data[resultKey][0] || null
+  }
+
+  // Get multiple entities
+  protected async multipleEntitiesQuery<
+    QueryT extends { [k: string]: unknown[] },
+    VariablesT extends Record<string, unknown>
+  >(query: DocumentNode, variables: VariablesT, resultKey: keyof QueryT): Promise<QueryT[keyof QueryT]> {
+    return (await this._qnClient.query<QueryT, VariablesT>({ query, variables })).data[resultKey]
+  }
+
+  async dataObjectsByBagId(bagId: string): Promise<DataObjectInfoFragment[]> {
+    return this.multipleEntitiesQuery<GetDataObjectsByBagIdQuery, GetDataObjectsByBagIdQueryVariables>(
+      GetDataObjectsByBagId,
+      { bagId },
+      'storageDataObjects'
+    )
+  }
+
+  async dataObjectsByVideoId(videoId: string): Promise<DataObjectInfoFragment[]> {
+    return this.multipleEntitiesQuery<GetDataObjectsByVideoIdQuery, GetDataObjectsByVideoIdQueryVariables>(
+      GetDataObjectsByVideoId,
+      { videoId },
+      'storageDataObjects'
+    )
+  }
+
+  async dataObjectsByChannelId(channelId: string): Promise<DataObjectInfoFragment[]> {
+    return this.multipleEntitiesQuery<GetDataObjectsByChannelIdQuery, GetDataObjectsByChannelIdQueryVariables>(
+      GetDataObjectsByChannelId,
+      { channelId },
+      'storageDataObjects'
+    )
+  }
+
+  async storageNodesInfoByBagId(bagId: string): Promise<StorageNodeInfo[]> {
+    const result = await this.multipleEntitiesQuery<
+      GetStorageNodesInfoByBagIdQuery,
+      GetStorageNodesInfoByBagIdQueryVariables
+    >(GetStorageNodesInfoByBagId, { bagId }, 'storageBuckets')
+
+    const validNodesInfo: StorageNodeInfo[] = []
+    for (const { operatorMetadata, id } of result) {
+      if (operatorMetadata?.nodeEndpoint) {
+        try {
+          const rootEndpoint = operatorMetadata.nodeEndpoint
+          const apiEndpoint = new URL(
+            'api/v1',
+            rootEndpoint.endsWith('/') ? rootEndpoint : rootEndpoint + '/'
+          ).toString()
+          validNodesInfo.push({
+            apiEndpoint,
+            bucketId: parseInt(id),
+          })
+        } catch (e) {
+          continue
+        }
+      }
+    }
+    return validNodesInfo
+  }
+}

+ 89 - 73
cli/src/Types.ts

@@ -1,23 +1,24 @@
 import BN from 'bn.js'
 import { ElectionStage, Seat } from '@joystream/types/council'
-import { Option } from '@polkadot/types'
+import { Option, bool } from '@polkadot/types'
 import { Codec } from '@polkadot/types/types'
 import { BlockNumber, Balance, AccountId } from '@polkadot/types/interfaces'
 import { DeriveBalancesAll } from '@polkadot/api-derive/types'
 import { KeyringPair } from '@polkadot/keyring/types'
 import { WorkerId, OpeningType } from '@joystream/types/working-group'
 import { Membership, MemberId } from '@joystream/types/members'
-import { Opening, StakingPolicy, ApplicationStageKeys } from '@joystream/types/hiring'
+import { Opening, StakingPolicy, ApplicationStage } from '@joystream/types/hiring'
 import { Validator } from 'inquirer'
+import { ApiPromise } from '@polkadot/api'
+import { SubmittableModuleExtrinsics, QueryableModuleStorage, QueryableModuleConsts } from '@polkadot/api/types'
+import { JSONSchema4 } from 'json-schema'
 import {
-  VideoMetadata,
-  ChannelMetadata,
-  ChannelCategoryMetadata,
-  VideoCategoryMetadata,
-} from '@joystream/content-metadata-protobuf'
-import { ContentId, ContentParameters } from '@joystream/types/storage'
-
-import { JSONSchema7, JSONSchema7Definition } from 'json-schema'
+  IChannelCategoryMetadata,
+  IChannelMetadata,
+  IVideoCategoryMetadata,
+  IVideoMetadata,
+} from '@joystream/metadata-protobuf'
+import { DataObjectCreationParameters } from '@joystream/types/storage'
 
 // KeyringPair type extended with mandatory "meta.name"
 // It's used for accounts/keys management within CLI.
@@ -33,43 +34,21 @@ export type AccountSummary = {
   balances: DeriveBalancesAll
 }
 
-// This function allows us to easily transform the tuple into the object
-// and simplifies the creation of consitent Object and Tuple types (seen below).
-export function createCouncilInfoObj(
-  activeCouncil: Seat[],
-  termEndsAt: BlockNumber,
-  autoStart: boolean,
-  newTermDuration: BN,
-  candidacyLimit: BN,
-  councilSize: BN,
-  minCouncilStake: Balance,
-  minVotingStake: Balance,
-  announcingPeriod: BlockNumber,
-  votingPeriod: BlockNumber,
-  revealingPeriod: BlockNumber,
-  round: BN,
+export type CouncilInfo = {
+  activeCouncil: Seat[]
+  termEndsAt: BlockNumber
+  autoStart: bool
+  newTermDuration: BN
+  candidacyLimit: BN
+  councilSize: BN
+  minCouncilStake: Balance
+  minVotingStake: Balance
+  announcingPeriod: BlockNumber
+  votingPeriod: BlockNumber
+  revealingPeriod: BlockNumber
+  round: BN
   stage: Option<ElectionStage>
-) {
-  return {
-    activeCouncil,
-    termEndsAt,
-    autoStart,
-    newTermDuration,
-    candidacyLimit,
-    councilSize,
-    minCouncilStake,
-    minVotingStake,
-    announcingPeriod,
-    votingPeriod,
-    revealingPeriod,
-    round,
-    stage,
-  }
 }
-// Object/Tuple containing council/councilElection information (council:info).
-// The tuple is useful, because that's how api.queryMulti returns the results.
-export type CouncilInfoTuple = Parameters<typeof createCouncilInfoObj>
-export type CouncilInfoObj = ReturnType<typeof createCouncilInfoObj>
 
 // Object with "name" and "value" properties, used for rendering simple CLI tables like:
 // Total balance:   100 JOY
@@ -80,15 +59,22 @@ export type NameValueObj = { name: string; value: string }
 export enum WorkingGroups {
   StorageProviders = 'storageProviders',
   Curators = 'curators',
-  Operations = 'operations',
+  OperationsAlpha = 'operationsAlpha',
+  OperationsBeta = 'operationsBeta',
+  OperationsGamma = 'operationsGamma',
   Gateway = 'gateway',
+  Distribution = 'distributors',
 }
 
 // In contrast to Pioneer, currently only StorageProviders group is available in CLI
 export const AvailableGroups: readonly WorkingGroups[] = [
   WorkingGroups.StorageProviders,
   WorkingGroups.Curators,
-  WorkingGroups.Operations,
+  WorkingGroups.OperationsAlpha,
+  WorkingGroups.OperationsBeta,
+  WorkingGroups.OperationsGamma,
+  WorkingGroups.Gateway,
+  WorkingGroups.Distribution,
 ] as const
 
 export type Reward = {
@@ -119,7 +105,7 @@ export type GroupApplication = {
     role: number
   }
   humanReadableText: string
-  stage: ApplicationStageKeys
+  stage: keyof ApplicationStage['typeDefinitions']
 }
 
 export enum OpeningStatus {
@@ -206,18 +192,21 @@ export type ApiMethodNamedArg = {
 }
 export type ApiMethodNamedArgs = ApiMethodNamedArg[]
 
-// Content-related
-export enum AssetType {
-  AnyAsset = 1,
+// Api without TypeScript augmentations for "query", "tx" and "consts" (useful when more type flexibility is needed)
+export type UnaugmentedApiPromise = Omit<ApiPromise, 'query' | 'tx' | 'consts'> & {
+  query: { [key: string]: QueryableModuleStorage<'promise'> }
+  tx: { [key: string]: SubmittableModuleExtrinsics<'promise'> }
+  consts: { [key: string]: QueryableModuleConsts }
 }
 
-export type InputAsset = {
+export type AssetToUpload = {
+  dataObjectId: BN
   path: string
-  contentId: ContentId
 }
 
-export type InputAssetDetails = InputAsset & {
-  parameters: ContentParameters
+export type ResolvedAsset = {
+  path: string
+  parameters: DataObjectCreationParameters
 }
 
 export type VideoFFProbeMetadata = {
@@ -234,47 +223,74 @@ export type VideoFileMetadata = VideoFFProbeMetadata & {
   mimeType: string
 }
 
-export type VideoInputParameters = Omit<VideoMetadata.AsObject, 'video' | 'thumbnailPhoto'> & {
+export type VideoInputParameters = Omit<IVideoMetadata, 'video' | 'thumbnailPhoto'> & {
   videoPath?: string
   thumbnailPhotoPath?: string
 }
 
-export type ChannelInputParameters = Omit<ChannelMetadata.AsObject, 'coverPhoto' | 'avatarPhoto'> & {
+export type ChannelInputParameters = Omit<IChannelMetadata, 'coverPhoto' | 'avatarPhoto'> & {
   coverPhotoPath?: string
   avatarPhotoPath?: string
   rewardAccount?: string
+  collaborators?: number[]
 }
 
-export type ChannelCategoryInputParameters = ChannelCategoryMetadata.AsObject
+export type ChannelCategoryInputParameters = IChannelCategoryMetadata
 
-export type VideoCategoryInputParameters = VideoCategoryMetadata.AsObject
+export type VideoCategoryInputParameters = IVideoCategoryMetadata
+
+type AnyNonObject = string | number | boolean | any[] | Long
 
 // JSONSchema utility types
+
+// Based on: https://stackoverflow.com/questions/51465182/how-to-remove-index-signature-using-mapped-types
+type RemoveIndex<T> = {
+  [K in keyof T as string extends K ? never : number extends K ? never : K]: T[K]
+}
+
+type AnyJSONSchema = RemoveIndex<JSONSchema4>
+
 export type JSONTypeName<T> = T extends string
   ? 'string' | ['string', 'null']
   : T extends number
   ? 'number' | ['number', 'null']
-  : T extends any[]
-  ? 'array' | ['array', 'null']
-  : T extends Record<string, unknown>
-  ? 'object' | ['object', 'null']
   : T extends boolean
   ? 'boolean' | ['boolean', 'null']
-  : never
+  : T extends any[]
+  ? 'array' | ['array', 'null']
+  : T extends Long
+  ? 'number' | ['number', 'null']
+  : 'object' | ['object', 'null']
 
-export type PropertySchema<P> = Omit<
-  JSONSchema7Definition & {
-    type: JSONTypeName<P>
-    properties: P extends Record<string, unknown> ? JsonSchemaProperties<P> : never
-  },
-  P extends Record<string, unknown> ? '' : 'properties'
->
+export type PropertySchema<P> = Omit<AnyJSONSchema, 'type' | 'properties'> & {
+  type: JSONTypeName<P>
+} & (P extends AnyNonObject ? { properties?: never } : { properties: JsonSchemaProperties<P> })
 
-export type JsonSchemaProperties<T extends Record<string, unknown>> = {
+export type JsonSchemaProperties<T> = {
   [K in keyof Required<T>]: PropertySchema<Required<T>[K]>
 }
 
-export type JsonSchema<T extends Record<string, unknown>> = JSONSchema7 & {
+export type JsonSchema<T> = Omit<AnyJSONSchema, 'type' | 'properties'> & {
   type: 'object'
   properties: JsonSchemaProperties<T>
 }
+
+// Storage node related types
+
+export type StorageNodeInfo = {
+  bucketId: number
+  apiEndpoint: string
+}
+
+export type TokenRequest = {
+  data: TokenRequestData
+  signature: string
+}
+
+export type TokenRequestData = {
+  memberId: number
+  accountId: string
+  dataObjectId: number
+  storageBucketId: number
+  bagId: string
+}

+ 241 - 132
cli/src/base/AccountsCommandBase.ts

@@ -1,4 +1,4 @@
-import fs from 'fs'
+import fs, { readdirSync } from 'fs'
 import path from 'path'
 import slug from 'slug'
 import inquirer from 'inquirer'
@@ -10,9 +10,20 @@ import { formatBalance } from '@polkadot/util'
 import { NamedKeyringPair } from '../Types'
 import { DeriveBalancesAll } from '@polkadot/api-derive/types'
 import { toFixedLength } from '../helpers/display'
+import { MemberId, Membership } from '@joystream/types/members'
+import { AccountId } from '@polkadot/types/interfaces'
+import { KeyringPair, KeyringInstance, KeyringOptions } from '@polkadot/keyring/types'
+import { KeypairType } from '@polkadot/util-crypto/types'
+import { createTestKeyring } from '@polkadot/keyring/testing'
+import chalk from 'chalk'
+import { mnemonicGenerate } from '@polkadot/util-crypto'
+import { validateAddress } from '../helpers/validation'
 
 const ACCOUNTS_DIRNAME = 'accounts'
-const SPECIAL_ACCOUNT_POSTFIX = '__DEV'
+export const DEFAULT_ACCOUNT_TYPE = 'sr25519'
+export const KEYRING_OPTIONS: KeyringOptions = {
+  type: DEFAULT_ACCOUNT_TYPE,
+}
 
 /**
  * Abstract base class for account-related commands.
@@ -22,16 +33,36 @@ const SPECIAL_ACCOUNT_POSTFIX = '__DEV'
  * Where: APP_DATA_PATH is provided by StateAwareCommandBase and ACCOUNTS_DIRNAME is a const (see above).
  */
 export default abstract class AccountsCommandBase extends ApiCommandBase {
+  private selectedMember: [MemberId, Membership] | undefined
+  private _keyring: KeyringInstance | undefined
+
+  private get keyring(): KeyringInstance {
+    if (!this._keyring) {
+      this.error('Trying to access Keyring before AccountsCommandBase initialization', {
+        exit: ExitCodes.UnexpectedException,
+      })
+    }
+    return this._keyring
+  }
+
+  isKeyAvailable(key: AccountId | string): boolean {
+    return this.keyring.getPairs().some((p) => p.address === key.toString())
+  }
+
   getAccountsDirPath(): string {
     return path.join(this.getAppDataPath(), ACCOUNTS_DIRNAME)
   }
 
-  getAccountFilePath(account: NamedKeyringPair, isSpecial = false): string {
-    return path.join(this.getAccountsDirPath(), this.generateAccountFilename(account, isSpecial))
+  getAccountFileName(accountName: string): string {
+    return `${slug(accountName)}.json`
   }
 
-  generateAccountFilename(account: NamedKeyringPair, isSpecial = false): string {
-    return `${slug(account.meta.name, '_')}__${account.address}${isSpecial ? SPECIAL_ACCOUNT_POSTFIX : ''}.json`
+  getAccountFilePath(accountName: string): string {
+    return path.join(this.getAccountsDirPath(), this.getAccountFileName(accountName))
+  }
+
+  isAccountNameTaken(accountName: string): boolean {
+    return readdirSync(this.getAccountsDirPath()).some((filename) => filename === this.getAccountFileName(accountName))
   }
 
   private initAccountsFs(): void {
@@ -40,23 +71,58 @@ export default abstract class AccountsCommandBase extends ApiCommandBase {
     }
   }
 
-  saveAccount(account: NamedKeyringPair, password: string, isSpecial = false): void {
-    try {
-      const destPath = this.getAccountFilePath(account, isSpecial)
-      fs.writeFileSync(destPath, JSON.stringify(account.toJson(password)))
-    } catch (e) {
-      throw this.createDataWriteError()
+  async createAccount(
+    name?: string,
+    masterKey?: KeyringPair,
+    password?: string,
+    type?: KeypairType
+  ): Promise<NamedKeyringPair> {
+    while (!name || this.isAccountNameTaken(name)) {
+      if (name) {
+        this.warn(`Account ${chalk.magentaBright(name)} already exists... Try different name`)
+      }
+      name = await this.simplePrompt({ message: 'New account name' })
     }
-  }
 
-  // Add dev "Alice" and "Bob" accounts
-  initSpecialAccounts() {
-    const keyring = new Keyring({ type: 'sr25519' })
-    keyring.addFromUri('//Alice', { name: 'Alice' })
-    keyring.addFromUri('//Bob', { name: 'Bob' })
-    keyring
-      .getPairs()
-      .forEach((pair) => this.saveAccount({ ...pair, meta: { name: pair.meta.name as string } }, '', true))
+    if (!masterKey) {
+      const keyring = new Keyring(KEYRING_OPTIONS)
+      const mnemonic = mnemonicGenerate()
+      keyring.addFromMnemonic(mnemonic, { name, whenCreated: Date.now() }, type)
+      masterKey = keyring.getPairs()[0]
+      this.log(chalk.magentaBright(`${chalk.bold('New account memonic: ')}${mnemonic}`))
+    } else {
+      const { address } = masterKey
+      const existingAcc = this.getPairs().find((p) => p.address === address)
+      if (existingAcc) {
+        this.error(`Account with this key already exists (${chalk.magentaBright(existingAcc.meta.name)})`, {
+          exit: ExitCodes.InvalidInput,
+        })
+      }
+      await this.requestPairDecoding(masterKey, 'Current account password')
+      masterKey.meta.name = name
+    }
+
+    while (password === undefined) {
+      password = await this.promptForPassword("Set new account's password")
+      const password2 = await this.promptForPassword("Confirm new account's password")
+
+      if (password !== password2) {
+        this.warn('Passwords are not the same!')
+        password = undefined
+      }
+    }
+    if (!password) {
+      this.warn('Using empty password is not recommended!')
+    }
+
+    const destPath = this.getAccountFilePath(name)
+    fs.writeFileSync(destPath, JSON.stringify(masterKey.toJson(password)))
+
+    this.keyring.addPair(masterKey)
+
+    this.log(chalk.greenBright(`\nNew account succesfully created!`))
+
+    return masterKey as NamedKeyringPair
   }
 
   fetchAccountFromJsonFile(jsonBackupFilePath: string): NamedKeyringPair {
@@ -76,18 +142,20 @@ export default abstract class AccountsCommandBase extends ApiCommandBase {
       throw new CLIError('Provided backup file is not valid', { exit: ExitCodes.InvalidFile })
     }
 
-    // Force some default account name if none is provided in the original backup
     if (!accountJsonObj.meta) accountJsonObj.meta = {}
-    if (!accountJsonObj.meta.name) accountJsonObj.meta.name = 'Unnamed Account'
+    // Normalize the CLI account name based on file name
+    // (makes sure getAccountFilePath(name) will always point to the correct file, preserving backward-compatibility
+    // with older CLI versions)
+    accountJsonObj.meta.name = path.basename(jsonBackupFilePath, '.json')
 
-    const keyring = new Keyring()
+    const keyring = new Keyring(KEYRING_OPTIONS)
     let account: NamedKeyringPair
     try {
       // Try adding and retrieving the keys in order to validate that the backup file is correct
       keyring.addFromJson(accountJsonObj)
       account = keyring.getPair(accountJsonObj.address) as NamedKeyringPair // We can be sure it's named, because we forced it before
     } catch (e) {
-      throw new CLIError('Provided backup file is not valid', { exit: ExitCodes.InvalidFile })
+      throw new CLIError(`Provided backup file is not valid (${(e as Error).message})`, { exit: ExitCodes.InvalidFile })
     }
 
     return account
@@ -103,7 +171,7 @@ export default abstract class AccountsCommandBase extends ApiCommandBase {
     }
   }
 
-  fetchAccounts(includeSpecial = false): NamedKeyringPair[] {
+  fetchAccounts(): NamedKeyringPair[] {
     let files: string[] = []
     const accountDir = this.getAccountsDirPath()
     try {
@@ -116,156 +184,197 @@ export default abstract class AccountsCommandBase extends ApiCommandBase {
     return files
       .map((fileName) => {
         const filePath = path.join(accountDir, fileName)
-        if (!includeSpecial && filePath.includes(SPECIAL_ACCOUNT_POSTFIX + '.')) return null
         return this.fetchAccountOrNullFromFile(filePath)
       })
-      .filter((accObj) => accObj !== null) as NamedKeyringPair[]
+      .filter((account) => account !== null) as NamedKeyringPair[]
   }
 
-  getSelectedAccountFilename(): string {
-    return this.getPreservedState().selectedAccountFilename
+  getPairs(includeDevAccounts = true): NamedKeyringPair[] {
+    return this.keyring.getPairs().filter((p) => includeDevAccounts || !p.meta.isTesting) as NamedKeyringPair[]
   }
 
-  getSelectedAccount(): NamedKeyringPair | null {
-    const selectedAccountFilename = this.getSelectedAccountFilename()
-
-    if (!selectedAccountFilename) {
-      return null
-    }
+  getPair(key: string): NamedKeyringPair {
+    return this.keyring.getPair(key) as NamedKeyringPair
+  }
 
-    const account = this.fetchAccountOrNullFromFile(path.join(this.getAccountsDirPath(), selectedAccountFilename))
+  async getDecodedPair(key: string | AccountId): Promise<NamedKeyringPair> {
+    const pair = this.getPair(key.toString())
 
-    return account
+    return (await this.requestPairDecoding(pair)) as NamedKeyringPair
   }
 
-  // Use when account usage is required in given command
-  async getRequiredSelectedAccount(promptIfMissing = true): Promise<NamedKeyringPair> {
-    let selectedAccount: NamedKeyringPair | null = this.getSelectedAccount()
-    if (!selectedAccount) {
-      if (!promptIfMissing) {
-        this.error('No default account selected! Use account:choose to set the default account.', {
-          exit: ExitCodes.NoAccountSelected,
-        })
-      }
+  async requestPairDecoding(pair: KeyringPair, message?: string): Promise<KeyringPair> {
+    // Skip if pair already unlocked
+    if (!pair.isLocked) {
+      return pair
+    }
 
-      const accounts: NamedKeyringPair[] = this.fetchAccounts()
-      if (!accounts.length) {
-        this.error('No accounts available! Use account:import in order to import accounts into the CLI.', {
-          exit: ExitCodes.NoAccountFound,
-        })
-      }
+    // First - try decoding using empty string
+    try {
+      pair.decodePkcs8('')
+      return pair
+    } catch (e) {
+      // Continue...
+    }
 
-      this.warn('No default account selected!')
-      selectedAccount = await this.promptForAccount(accounts)
-      await this.setSelectedAccount(selectedAccount)
+    let isPassValid = false
+    while (!isPassValid) {
+      try {
+        const password = await this.promptForPassword(
+          message || `Enter ${pair.meta.name ? pair.meta.name : pair.address} account password`
+        )
+        pair.decodePkcs8(password)
+        isPassValid = true
+      } catch (e) {
+        this.warn('Invalid password... Try again.')
+      }
     }
 
-    return selectedAccount
+    return pair
   }
 
-  async setSelectedAccount(account: NamedKeyringPair): Promise<void> {
-    const accountFilename = fs.existsSync(this.getAccountFilePath(account, true))
-      ? this.generateAccountFilename(account, true)
-      : this.generateAccountFilename(account)
-
-    await this.setPreservedState({ selectedAccountFilename: accountFilename })
+  initKeyring(): void {
+    this._keyring = this.getApi().isDevelopment ? createTestKeyring(KEYRING_OPTIONS) : new Keyring(KEYRING_OPTIONS)
+    const accounts = this.fetchAccounts()
+    accounts.forEach((a) => this.keyring.addPair(a))
   }
 
-  async promptForPassword(message = "Your account's password") {
-    const { password } = await inquirer.prompt([{ name: 'password', type: 'password', message }])
+  async promptForPassword(message = "Your account's password"): Promise<string> {
+    const { password } = await inquirer.prompt([
+      {
+        name: 'password',
+        type: 'password',
+        message,
+      },
+    ])
 
     return password
   }
 
-  async requireConfirmation(
-    message = 'Are you sure you want to execute this action?',
-    defaultVal = false
-  ): Promise<void> {
-    if (process.env.AUTO_CONFIRM === 'true' || parseInt(process.env.AUTO_CONFIRM || '')) {
-      return
-    }
-    const { confirmed } = await inquirer.prompt([{ type: 'confirm', name: 'confirmed', message, default: defaultVal }])
-    if (!confirmed) {
-      this.exit(ExitCodes.OK)
-    }
-  }
-
   async promptForAccount(
-    accounts: NamedKeyringPair[],
-    defaultAccount: NamedKeyringPair | null = null,
     message = 'Select an account',
+    createIfUnavailable = true,
+    includeDevAccounts = true,
     showBalances = true
-  ): Promise<NamedKeyringPair> {
-    let balances: DeriveBalancesAll[]
+  ): Promise<string> {
+    const pairs = this.getPairs(includeDevAccounts)
+
+    if (!pairs.length) {
+      this.warn('No accounts available!')
+      if (createIfUnavailable) {
+        await this.requireConfirmation('Do you want to create a new account?', true)
+        pairs.push(await this.createAccount())
+      } else {
+        this.exit()
+      }
+    }
+
+    let balances: DeriveBalancesAll[] = []
     if (showBalances) {
-      balances = await this.getApi().getAccountsBalancesInfo(accounts.map((acc) => acc.address))
+      balances = await this.getApi().getAccountsBalancesInfo(pairs.map((p) => p.address))
     }
-    const longestAccNameLength: number = accounts.reduce((prev, curr) => Math.max(curr.meta.name.length, prev), 0)
-    const accNameColLength: number = Math.min(longestAccNameLength + 1, 20)
-    const { chosenAccountFilename } = await inquirer.prompt([
-      {
-        name: 'chosenAccountFilename',
-        message,
-        type: 'list',
-        choices: accounts.map((account: NamedKeyringPair, i) => ({
-          name:
-            `${toFixedLength(account.meta.name, accNameColLength)} | ` +
-            `${account.address} | ` +
-            ((showBalances || '') &&
-              `${formatBalance(balances[i].availableBalance)} / ` + `${formatBalance(balances[i].votingBalance)}`),
-          value: this.generateAccountFilename(account),
-          short: `${account.meta.name} (${account.address})`,
-        })),
-        default: defaultAccount && this.generateAccountFilename(defaultAccount),
-      },
-    ])
 
-    return accounts.find((acc) => this.generateAccountFilename(acc) === chosenAccountFilename) as NamedKeyringPair
+    const longestNameLen: number = pairs.reduce((prev, curr) => Math.max(curr.meta.name.length, prev), 0)
+    const nameColLength: number = Math.min(longestNameLen + 1, 20)
+    const chosenKey = await this.simplePrompt({
+      message,
+      type: 'list',
+      choices: pairs.map((p, i) => ({
+        name:
+          `${toFixedLength(p.meta.name, nameColLength)} | ` +
+          `${p.address} | ` +
+          ((showBalances || '') &&
+            `${formatBalance(balances[i].availableBalance)} / ` + `${formatBalance(balances[i].votingBalance)}`),
+        value: p.address,
+      })),
+    })
+
+    return chosenKey
   }
 
-  async requestAccountDecoding(account: NamedKeyringPair): Promise<void> {
-    // Skip if account already unlocked
-    if (!account.isLocked) {
-      return
-    }
+  promptForCustomAddress(): Promise<string> {
+    return this.simplePrompt({
+      message: 'Provide custom address',
+      validate: (a) => validateAddress(a),
+    })
+  }
 
-    // First - try decoding using empty string
-    try {
-      account.decodePkcs8('')
-      return
-    } catch (e) {
-      // Continue...
+  async promptForAnyAddress(message = 'Select an address'): Promise<string> {
+    const type: 'available' | 'new' | 'custom' = await this.simplePrompt({
+      message,
+      type: 'list',
+      choices: [
+        { name: 'Available account', value: 'available' },
+        { name: 'New account', value: 'new' },
+        { name: 'Custom address', value: 'custom' },
+      ],
+    })
+
+    if (type === 'available') {
+      return this.promptForAccount()
+    } else if (type === 'new') {
+      return (await this.createAccount()).address
+    } else {
+      return this.promptForCustomAddress()
     }
+  }
 
-    let isPassValid = false
-    while (!isPassValid) {
-      try {
-        const password = await this.promptForPassword()
-        account.decodePkcs8(password)
-        isPassValid = true
-      } catch (e) {
-        this.warn('Invalid password... Try again.')
-      }
+  async getRequiredMemberContext(useSelected = false, allowedIds?: MemberId[]): Promise<[MemberId, Membership]> {
+    if (
+      useSelected &&
+      this.selectedMember &&
+      (!allowedIds || allowedIds.some((id) => id.eq(this.selectedMember?.[0])))
+    ) {
+      return this.selectedMember
     }
-  }
 
-  async getRequiredMemberId(): Promise<number> {
-    const account = await this.getRequiredSelectedAccount()
-    const memberIds = await this.getApi().getMemberIdsByControllerAccount(account.address)
-    if (!memberIds.length) {
-      this.error('Membership required to access this command!', { exit: ExitCodes.AccessDenied })
+    const membersEntries = allowedIds
+      ? await this.getApi().memberEntriesByIds(allowedIds)
+      : await this.getApi().allMemberEntries()
+    const availableMemberships = await Promise.all(
+      membersEntries.filter(([, m]) => this.isKeyAvailable(m.controller_account.toString()))
+    )
+
+    if (!availableMemberships.length) {
+      this.error(
+        `No ${allowedIds ? 'allowed ' : ''}member controller key available!` +
+          (allowedIds ? ` Allowed members: ${allowedIds.join(', ')}.` : ''),
+        {
+          exit: ExitCodes.AccessDenied,
+        }
+      )
+    } else if (availableMemberships.length === 1) {
+      this.selectedMember = availableMemberships[0]
+    } else {
+      this.selectedMember = await this.promptForMember(availableMemberships, 'Choose member context')
     }
 
-    return memberIds[0].toNumber() // FIXME: Temporary solution (just using the first one)
+    return this.selectedMember
+  }
+
+  async promptForMember(
+    availableMemberships: [MemberId, Membership][],
+    message = 'Choose a member'
+  ): Promise<[MemberId, Membership]> {
+    const memberIndex = await this.simplePrompt({
+      type: 'list',
+      message,
+      choices: availableMemberships.map(([, membership], i) => ({
+        name: membership.handle.toString(),
+        value: i,
+      })),
+    })
+
+    return availableMemberships[memberIndex]
   }
 
-  async init() {
+  async init(): Promise<void> {
     await super.init()
     try {
       this.initAccountsFs()
-      this.initSpecialAccounts()
     } catch (e) {
       throw this.createDataDirInitError()
     }
+    await this.initKeyring()
   }
 }

+ 193 - 69
cli/src/base/ApiCommandBase.ts

@@ -2,21 +2,23 @@ import ExitCodes from '../ExitCodes'
 import { CLIError } from '@oclif/errors'
 import StateAwareCommandBase from './StateAwareCommandBase'
 import Api from '../Api'
-import { getTypeDef, Option, Tuple, TypeRegistry } from '@polkadot/types'
-import { Registry, Codec, CodecArg, TypeDef, TypeDefInfo } from '@polkadot/types/types'
-
+import { getTypeDef, Option, Tuple } from '@polkadot/types'
+import { Registry, Codec, TypeDef, TypeDefInfo, IEvent, DetectCodec } from '@polkadot/types/types'
 import { Vec, Struct, Enum } from '@polkadot/types/codec'
 import { ApiPromise, SubmittableResult, WsProvider } from '@polkadot/api'
 import { KeyringPair } from '@polkadot/keyring/types'
 import chalk from 'chalk'
 import { InterfaceTypes } from '@polkadot/types/types/registry'
-import { ApiMethodArg, ApiMethodNamedArgs, ApiParamsOptions, ApiParamOptions } from '../Types'
+import { ApiMethodArg, ApiMethodNamedArgs, ApiParamsOptions, ApiParamOptions, UnaugmentedApiPromise } from '../Types'
 import { createParamOptions } from '../helpers/promptOptions'
-import { SubmittableExtrinsic } from '@polkadot/api/types'
+import { AugmentedSubmittables, SubmittableExtrinsic, AugmentedEvents, AugmentedEvent } from '@polkadot/api/types'
 import { DistinctQuestion } from 'inquirer'
 import { BOOL_PROMPT_OPTIONS } from '../helpers/prompting'
 import { DispatchError } from '@polkadot/types/interfaces/system'
-import { Event } from '@polkadot/types/interfaces'
+import QueryNodeApi from '../QueryNodeApi'
+import { formatBalance } from '@polkadot/util'
+import BN from 'bn.js'
+import _ from 'lodash'
 
 export class ExtrinsicFailedError extends Error {}
 
@@ -24,35 +26,74 @@ export class ExtrinsicFailedError extends Error {}
  * Abstract base class for commands that require access to the API.
  */
 export default abstract class ApiCommandBase extends StateAwareCommandBase {
-  private api: Api | null = null
+  private api: Api | undefined
+  private queryNodeApi: QueryNodeApi | null | undefined
+
+  // Command configuration
+  protected requiresApiConnection = true
+  protected requiresQueryNode = false
 
   getApi(): Api {
-    if (!this.api) throw new CLIError('Tried to get API before initialization.', { exit: ExitCodes.ApiError })
+    if (!this.api) {
+      throw new CLIError('Tried to access API before initialization.', { exit: ExitCodes.ApiError })
+    }
     return this.api
   }
 
-  // Get original api for lower-level api calls
+  getQNApi(): QueryNodeApi {
+    if (this.queryNodeApi === undefined) {
+      throw new CLIError('Tried to access QueryNodeApi before initialization.', { exit: ExitCodes.QueryNodeError })
+    }
+    if (this.queryNodeApi === null) {
+      throw new CLIError('Query node endpoint uri is required in order to run this command!', {
+        exit: ExitCodes.QueryNodeError,
+      })
+    }
+    return this.queryNodeApi
+  }
+
+  // Shortcuts
   getOriginalApi(): ApiPromise {
     return this.getApi().getOriginalApi()
   }
 
+  getUnaugmentedApi(): UnaugmentedApiPromise {
+    return this.getApi().getUnaugmentedApi()
+  }
+
   getTypesRegistry(): Registry {
     return this.getOriginalApi().registry
   }
 
-  createType<K extends keyof InterfaceTypes>(typeName: K, value?: unknown): InterfaceTypes[K] {
-    return this.getOriginalApi().createType(typeName, value)
+  createType<T extends Codec = Codec, TN extends string = string>(typeName: TN, value?: unknown): DetectCodec<T, TN> {
+    return this.getOriginalApi().createType<T, TN>(typeName, value)
+  }
+
+  isQueryNodeUriSet(): boolean {
+    const { queryNodeUri } = this.getPreservedState()
+    return !!queryNodeUri
   }
 
-  async init(skipConnection = false): Promise<void> {
+  async init(): Promise<void> {
     await super.init()
-    if (!skipConnection) {
+    if (this.requiresApiConnection) {
       let apiUri: string = this.getPreservedState().apiUri
+
       if (!apiUri) {
-        this.warn("You haven't provided a node/endpoint for the CLI to connect to yet!")
+        this.warn("You haven't provided a Joystream node websocket api uri for the CLI to connect to yet!")
         apiUri = await this.promptForApiUri()
       }
 
+      let queryNodeUri: string | null | undefined = this.getPreservedState().queryNodeUri
+
+      if (this.requiresQueryNode && !queryNodeUri) {
+        this.warn('Query node endpoint uri is required in order to run this command!')
+        queryNodeUri = await this.promptForQueryNodeUri(true)
+      } else if (queryNodeUri === undefined) {
+        this.warn("You haven't provided a Joystream query node uri for the CLI to connect to yet!")
+        queryNodeUri = await this.promptForQueryNodeUri()
+      }
+
       const { metadataCache } = this.getPreservedState()
       this.api = await Api.create(apiUri, metadataCache)
 
@@ -63,13 +104,19 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
         metadataCache[metadataKey] = await this.getOriginalApi().runtimeMetadata.toJSON()
         await this.setPreservedState({ metadataCache })
       }
+
+      this.queryNodeApi = queryNodeUri
+        ? new QueryNodeApi(queryNodeUri, (err) => {
+            this.warn(`Query node error: ${err.networkError?.message || err.graphQLErrors?.join('\n')}`)
+          })
+        : null
     }
   }
 
   async promptForApiUri(): Promise<string> {
     let selectedNodeUri = await this.simplePrompt({
       type: 'list',
-      message: 'Choose a node/endpoint:',
+      message: 'Choose a node websocket api uri:',
       choices: [
         {
           name: 'Local node (ws://localhost:9944)',
@@ -103,7 +150,53 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
     return selectedNodeUri
   }
 
-  isApiUriValid(uri: string) {
+  async promptForQueryNodeUri(isRequired = false): Promise<string | null> {
+    const choices = [
+      {
+        name: 'Local query node (http://localhost:8081/graphql)',
+        value: 'http://localhost:8081/graphql',
+      },
+      {
+        name: 'Jsgenesis-hosted query node (https://hydra.joystream.org/graphql)',
+        value: 'https://hydra.joystream.org/graphql',
+      },
+      {
+        name: 'Custom endpoint',
+        value: '',
+      },
+    ]
+    if (!isRequired) {
+      choices.push({
+        name: "No endpoint (if you don't use query node some features will not be available)",
+        value: 'none',
+      })
+    }
+    let selectedUri: string = await this.simplePrompt({
+      type: 'list',
+      message: 'Choose a query node endpoint:',
+      choices,
+    })
+
+    if (!selectedUri) {
+      do {
+        selectedUri = await this.simplePrompt({
+          type: 'input',
+          message: 'Provide a query node endpoint',
+        })
+        if (!this.isQueryNodeUriValid(selectedUri)) {
+          this.warn('Provided uri seems incorrect! Please try again...')
+        }
+      } while (!this.isQueryNodeUriValid(selectedUri))
+    }
+
+    const queryNodeUri = selectedUri === 'none' ? null : selectedUri
+
+    await this.setPreservedState({ queryNodeUri })
+
+    return queryNodeUri
+  }
+
+  isApiUriValid(uri: string): boolean {
     try {
       // eslint-disable-next-line no-new
       new WsProvider(uri)
@@ -113,15 +206,26 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
     return true
   }
 
+  isQueryNodeUriValid(uri: string): boolean {
+    let url: URL
+    try {
+      url = new URL(uri)
+    } catch (_) {
+      return false
+    }
+
+    return url.protocol === 'http:' || url.protocol === 'https:'
+  }
+
   // This is needed to correctly handle some structs, enums etc.
   // Where the main typeDef doesn't provide enough information
-  protected getRawTypeDef(type: keyof InterfaceTypes) {
+  protected getRawTypeDef(type: keyof InterfaceTypes): TypeDef {
     const instance = this.createType(type)
     return getTypeDef(instance.toRawType())
   }
 
   // Prettifier for type names which are actually JSON strings
-  protected prettifyJsonTypeName(json: string) {
+  protected prettifyJsonTypeName(json: string): string {
     const obj = JSON.parse(json) as { [key: string]: string }
     return (
       '{\n' +
@@ -133,7 +237,7 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
   }
 
   // Get param name based on TypeDef object
-  protected paramName(typeDef: TypeDef) {
+  protected paramName(typeDef: TypeDef): string {
     return chalk.green(
       typeDef.displayName ||
         typeDef.name ||
@@ -180,10 +284,10 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
         createParamOptions(subtype.name, defaultValue?.unwrapOr(undefined))
       )
       this.closeIndentGroup()
-      return this.createType(`Option<${subtype.type}>` as any, value)
+      return this.createType<Option<Codec>>(`Option<${subtype.type}>`, value)
     }
 
-    return this.createType(`Option<${subtype.type}>` as any, null)
+    return this.createType<Option<Codec>>(`Option<${subtype.type}>`, null)
   }
 
   // Prompt for Tuple
@@ -204,7 +308,11 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
     }
     this.closeIndentGroup()
 
-    return new Tuple(this.getTypesRegistry(), subtypes.map((subtype) => subtype.type) as any, result)
+    return new Tuple(
+      this.getTypesRegistry(),
+      subtypes.map((subtype) => subtype.type),
+      result
+    )
   }
 
   // Prompt for Struct
@@ -231,7 +339,7 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
     }
     this.closeIndentGroup()
 
-    return this.createType(structType as any, structValues)
+    return this.createType(structType, structValues)
   }
 
   // Prompt for Vec
@@ -259,7 +367,7 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
     } while (addAnother)
     this.closeIndentGroup()
 
-    return this.createType(`Vec<${subtype.type}>` as any, entries)
+    return this.createType<Vec<Codec>>(`Vec<${subtype.type}>`, entries)
   }
 
   // Prompt for Enum
@@ -284,12 +392,12 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
 
     if (enumSubtype.type !== 'Null') {
       const subtypeOptions = createParamOptions(enumSubtype.name, defaultValue?.value)
-      return this.createType(enumType as any, {
+      return this.createType<Enum>(enumType, {
         [enumSubtype.name!]: await this.promptForParam(enumSubtype.type, subtypeOptions),
       })
     }
 
-    return this.createType(enumType as any, enumSubtype.name)
+    return this.createType<Enum>(enumType, enumSubtype.name)
   }
 
   // Prompt for param based on "paramType" string (ie. Option<MemeberId>)
@@ -325,7 +433,7 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
   }
 
   // More typesafe version
-  async promptForType(type: keyof InterfaceTypes, options?: ApiParamOptions) {
+  async promptForType(type: keyof InterfaceTypes, options?: ApiParamOptions): Promise<Codec> {
     return await this.promptForParam(type, options)
   }
 
@@ -334,7 +442,7 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
     method: string,
     paramsOptions?: ApiParamsOptions
   ): Promise<ApiMethodArg[]> {
-    const extrinsicMethod = this.getOriginalApi().tx[module][method]
+    const extrinsicMethod = (await this.getUnaugmentedApi().tx)[module][method]
     const values: ApiMethodArg[] = []
 
     this.openIndentGroup()
@@ -371,12 +479,8 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
                 let errorMsg = dispatchError.toString()
                 if (dispatchError.isModule) {
                   try {
-                    // Need to assert that registry is of TypeRegistry type, since Registry intefrace
-                    // seems outdated and doesn't include DispatchErrorModule as possible argument for "findMetaError"
-                    const { name, documentation } = (this.getOriginalApi().registry as TypeRegistry).findMetaError(
-                      dispatchError.asModule
-                    )
-                    errorMsg = `${name} (${documentation})`
+                    const { name, docs } = this.getOriginalApi().registry.findMetaError(dispatchError.asModule)
+                    errorMsg = `${name} (${docs.join(', ')})`
                   } catch (e) {
                     // This probably means we don't have this error in the metadata
                     // In this case - continue (we'll just display dispatchError.toString())
@@ -398,20 +502,20 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
     })
   }
 
-  async sendAndFollowTx(
-    account: KeyringPair,
-    tx: SubmittableExtrinsic<'promise'>,
-    warnOnly = false // If specified - only warning will be displayed in case of failure (instead of error beeing thrown)
-  ): Promise<SubmittableResult | false> {
+  async sendAndFollowTx(account: KeyringPair, tx: SubmittableExtrinsic<'promise'>): Promise<SubmittableResult> {
+    // Calculate fee and ask for confirmation
+    const fee = await this.getApi().estimateFee(account, tx)
+
+    await this.requireConfirmation(
+      `Tx fee of ${chalk.cyan(formatBalance(fee))} will be deduced from you account, do you confirm the transfer?`
+    )
+
     try {
       const res = await this.sendExtrinsic(account, tx)
       this.log(chalk.green(`Extrinsic successful!`))
       return res
     } catch (e) {
-      if (e instanceof ExtrinsicFailedError && warnOnly) {
-        this.warn(`Extrinsic failed! ${e.message}`)
-        return false
-      } else if (e instanceof ExtrinsicFailedError) {
+      if (e instanceof ExtrinsicFailedError) {
         throw new CLIError(`Extrinsic failed! ${e.message}`, { exit: ExitCodes.ApiError })
       } else {
         throw e
@@ -419,36 +523,56 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
     }
   }
 
-  async sendAndFollowNamedTx(
+  private humanize(p: unknown): any {
+    if (Array.isArray(p)) {
+      return p.map((v) => this.humanize(v))
+    } else if (typeof p === 'object' && p !== null) {
+      if ((p as any).toHuman) {
+        return (p as Codec).toHuman()
+      } else if (p instanceof BN) {
+        return p.toString()
+      } else {
+        return _.mapValues(p, this.humanize.bind(this))
+      }
+    }
+
+    return p
+  }
+
+  async sendAndFollowNamedTx<
+    Module extends keyof AugmentedSubmittables<'promise'>,
+    Method extends keyof AugmentedSubmittables<'promise'>[Module] & string,
+    Submittable extends AugmentedSubmittables<'promise'>[Module][Method]
+  >(
     account: KeyringPair,
-    module: string,
-    method: string,
-    params: CodecArg[],
-    warnOnly = false
-  ): Promise<SubmittableResult | false> {
-    this.log(chalk.magentaBright(`\nSending ${module}.${method} extrinsic...`))
-    const tx = await this.getOriginalApi().tx[module][method](...params)
-    return await this.sendAndFollowTx(account, tx, warnOnly)
+    module: Module,
+    method: Method,
+    params: Submittable extends (...args: any[]) => any ? Parameters<Submittable> : []
+  ): Promise<SubmittableResult> {
+    this.log(
+      chalk.magentaBright(
+        `\nSending ${module}.${method} extrinsic from ${account.meta.name ? account.meta.name : account.address}...`
+      )
+    )
+    this.log('Tx params:', this.humanize(params))
+    const tx = await this.getUnaugmentedApi().tx[module][method](...params)
+    return this.sendAndFollowTx(account, tx)
   }
 
-  // TODO:
-  // Switch to:
-  // public findEvent<S extends keyof AugmentedEvents<'promise'> & string, M extends keyof AugmentedEvents<'promise'>[S] & string>
-  //          (result: SubmittableResult, section: S, method: M): Event | undefined {
-  // Once augment-api is supported
-  public findEvent(result: SubmittableResult, section: string, method: string): Event | undefined {
-    return result.findRecord(section, method)?.event
+  public findEvent<
+    S extends keyof AugmentedEvents<'promise'> & string,
+    M extends keyof AugmentedEvents<'promise'>[S] & string,
+    EventType = AugmentedEvents<'promise'>[S][M] extends AugmentedEvent<'promise', infer T> ? IEvent<T> : never
+  >(result: SubmittableResult, section: S, method: M): EventType | undefined {
+    return result.findRecord(section, method)?.event as EventType | undefined
   }
 
-  async buildAndSendExtrinsic(
-    account: KeyringPair,
-    module: string,
-    method: string,
-    paramsOptions?: ApiParamsOptions,
-    warnOnly = false // If specified - only warning will be displayed (instead of error beeing thrown)
-  ): Promise<ApiMethodArg[]> {
+  async buildAndSendExtrinsic<
+    Module extends keyof AugmentedSubmittables<'promise'>,
+    Method extends keyof AugmentedSubmittables<'promise'>[Module] & string
+  >(account: KeyringPair, module: Module, method: Method, paramsOptions?: ApiParamsOptions): Promise<ApiMethodArg[]> {
     const params = await this.promptForExtrinsicParams(module, method, paramsOptions)
-    await this.sendAndFollowNamedTx(account, module, method, params, warnOnly)
+    await this.sendAndFollowNamedTx(account, module, method, params as any)
 
     return params
   }
@@ -456,7 +580,7 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
   extrinsicArgsFromDraft(module: string, method: string, draftFilePath: string): ApiMethodNamedArgs {
     let draftJSONObj
     const parsedArgs: ApiMethodNamedArgs = []
-    const extrinsicMethod = this.getOriginalApi().tx[module][method]
+    const extrinsicMethod = this.getUnaugmentedApi().tx[module][method]
     try {
       // eslint-disable-next-line @typescript-eslint/no-var-requires
       draftJSONObj = require(draftFilePath)
@@ -470,7 +594,7 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
       const argName = arg.name.toString()
       const argType = arg.type.toString()
       try {
-        parsedArgs.push({ name: argName, value: this.createType(argType as any, draftJSONObj[parseInt(index)]) })
+        parsedArgs.push({ name: argName, value: this.createType(argType, draftJSONObj[parseInt(index)]) })
       } catch (e) {
         throw new CLIError(`Couldn't parse ${argName} value from draft at ${draftFilePath}!`, {
           exit: ExitCodes.InvalidFile,

+ 117 - 52
cli/src/base/ContentDirectoryCommandBase.ts

@@ -6,59 +6,50 @@ import { CLIError } from '@oclif/errors'
 import { RolesCommandBase } from './WorkingGroupsCommandBase'
 import { createType } from '@joystream/types'
 import { flags } from '@oclif/command'
+import { MemberId } from '@joystream/types/members'
 
-// TODO: Rework the contexts
-
-const CONTEXTS = ['Member', 'Curator', 'Lead'] as const
-const OWNER_CONTEXTS = ['Member', 'Curator'] as const
+const CHANNEL_CREATION_CONTEXTS = ['Member', 'Curator'] as const
 const CATEGORIES_CONTEXTS = ['Lead', 'Curator'] as const
+const CHANNEL_MANAGEMENT_CONTEXTS = ['Owner', 'Collaborator'] as const
 
-type Context = typeof CONTEXTS[number]
-type OwnerContext = typeof OWNER_CONTEXTS[number]
+type ChannelManagementContext = typeof CHANNEL_MANAGEMENT_CONTEXTS[number]
+type ChannelCreationContext = typeof CHANNEL_CREATION_CONTEXTS[number]
 type CategoriesContext = typeof CATEGORIES_CONTEXTS[number]
 
 /**
  * Abstract base class for commands related to content directory
  */
 export default abstract class ContentDirectoryCommandBase extends RolesCommandBase {
-  group = WorkingGroups.Curators // override group for RolesCommandBase
-
-  static contextFlag = flags.enum({
-    name: 'context',
+  static channelCreationContextFlag = flags.enum({
     required: false,
-    description: `Actor context to execute the command in (${CONTEXTS.join('/')})`,
-    options: [...CONTEXTS],
+    description: `Actor context to execute the command in (${CHANNEL_CREATION_CONTEXTS.join('/')})`,
+    options: [...CHANNEL_CREATION_CONTEXTS],
   })
 
-  static ownerContextFlag = flags.enum({
-    name: 'ownerContext',
+  static channelManagementContextFlag = flags.enum({
     required: false,
-    description: `Actor context to execute the command in (${OWNER_CONTEXTS.join('/')})`,
-    options: [...OWNER_CONTEXTS],
+    description: `Actor context to execute the command in (${CHANNEL_MANAGEMENT_CONTEXTS.join('/')})`,
+    options: [...CHANNEL_MANAGEMENT_CONTEXTS],
   })
 
   static categoriesContextFlag = flags.enum({
-    name: 'categoriesContext',
     required: false,
     description: `Actor context to execute the command in (${CATEGORIES_CONTEXTS.join('/')})`,
     options: [...CATEGORIES_CONTEXTS],
   })
 
-  async promptForContext(message = 'Choose in which context you wish to execute the command'): Promise<Context> {
-    return this.simplePrompt({
-      message,
-      type: 'list',
-      choices: CONTEXTS.map((c) => ({ name: c, value: c })),
-    })
+  async init(): Promise<void> {
+    await super.init()
+    this.group = WorkingGroups.Curators // override group for RolesCommandBase
   }
 
-  async promptForOwnerContext(
+  async promptForChannelCreationContext(
     message = 'Choose in which context you wish to execute the command'
-  ): Promise<OwnerContext> {
+  ): Promise<ChannelCreationContext> {
     return this.simplePrompt({
       message,
       type: 'list',
-      choices: OWNER_CONTEXTS.map((c) => ({ name: c, value: c })),
+      choices: CHANNEL_CREATION_CONTEXTS.map((c) => ({ name: c, value: c })),
     })
   }
 
@@ -74,35 +65,90 @@ export default abstract class ContentDirectoryCommandBase extends RolesCommandBa
 
   // Use when lead access is required in given command
   async requireLead(): Promise<void> {
-    await this.getRequiredLead()
+    await this.getRequiredLeadContext()
   }
 
-  async getCurationActorByChannel(channel: Channel): Promise<ContentActor> {
-    return channel.owner.isOfType('Curators') ? await this.getActor('Lead') : await this.getActor('Curator')
+  getCurationActorByChannel(channel: Channel): Promise<[ContentActor, string]> {
+    return channel.owner.isOfType('Curators') ? this.getContentActor('Lead') : this.getContentActor('Curator')
   }
 
-  async getChannelOwnerActor(channel: Channel): Promise<ContentActor> {
+  async getChannelOwnerActor(channel: Channel): Promise<[ContentActor, string]> {
     if (channel.owner.isOfType('Curators')) {
       try {
-        return await this.getActor('Lead')
+        return this.getContentActor('Lead')
       } catch (e) {
-        return await this.getCuratorContext(channel.owner.asType('Curators'))
+        return this.getCuratorContext(channel.owner.asType('Curators'))
       }
     } else {
-      return await this.getActor('Member')
+      const [id, membership] = await this.getRequiredMemberContext(false, [channel.owner.asType('Member')])
+      return [
+        createType<ContentActor, 'ContentActor'>('ContentActor', { Member: id }),
+        membership.controller_account.toString(),
+      ]
     }
   }
 
-  async getCategoryManagementActor(): Promise<ContentActor> {
+  async getChannelCollaboratorActor(channel: Channel): Promise<[ContentActor, string]> {
+    const [id, membership] = await this.getRequiredMemberContext(false, Array.from(channel.collaborators))
+    return [
+      createType<ContentActor, 'ContentActor'>('ContentActor', { Collaborator: id }),
+      membership.controller_account.toString(),
+    ]
+  }
+
+  async getChannelManagementActor(
+    channel: Channel,
+    context: ChannelManagementContext
+  ): Promise<[ContentActor, string]> {
+    if (context && context === 'Owner') {
+      return this.getChannelOwnerActor(channel)
+    }
+    if (context && context === 'Collaborator') {
+      return this.getChannelCollaboratorActor(channel)
+    }
+
+    // Context not set - derive
+
+    try {
+      const owner = await this.getChannelOwnerActor(channel)
+      this.log('Derived context: Channel owner')
+      return owner
+    } catch (e) {
+      // continue
+    }
+
     try {
-      return await this.getActor('Lead')
+      const collaborator = await this.getChannelCollaboratorActor(channel)
+      this.log('Derived context: Channel collaborator')
+      return collaborator
     } catch (e) {
-      return await this.getActor('Curator')
+      // continue
     }
+
+    this.error('No account found with access to manage the provided channel', { exit: ExitCodes.AccessDenied })
   }
 
-  async getCuratorContext(requiredGroupId?: CuratorGroupId): Promise<ContentActor> {
-    const curator = await this.getRequiredWorker()
+  async getCategoryManagementActor(): Promise<[ContentActor, string]> {
+    try {
+      const lead = await this.getContentActor('Lead')
+      this.log('Derived context: Lead')
+      return lead
+    } catch (e) {
+      // continue
+    }
+    try {
+      const curator = await this.getContentActor('Curator')
+      this.log('Derived context: Curator')
+      return curator
+    } catch (e) {
+      // continue
+    }
+
+    this.error('Lead / Curator Group member permissions are required for this action', { exit: ExitCodes.AccessDenied })
+  }
+
+  async getCuratorContext(requiredGroupId?: CuratorGroupId): Promise<[ContentActor, string]> {
+    const curator = await this.getRequiredWorkerContext()
 
     let groupId: number
     if (requiredGroupId) {
@@ -110,7 +156,7 @@ export default abstract class ContentDirectoryCommandBase extends RolesCommandBa
       if (!group.active.valueOf()) {
         this.error(`Curator group ${requiredGroupId.toString()} is no longer active`, { exit: ExitCodes.AccessDenied })
       }
-      if (!group.curators.toArray().some((curatorId) => curatorId.eq(curator.workerId))) {
+      if (!Array.from(group.curators).some((curatorId) => curatorId.eq(curator.workerId))) {
         this.error(`You don't belong to required curator group (ID: ${requiredGroupId.toString()})`, {
           exit: ExitCodes.AccessDenied,
         })
@@ -121,7 +167,7 @@ export default abstract class ContentDirectoryCommandBase extends RolesCommandBa
       const availableGroupIds = groups
         .filter(
           ([, group]) =>
-            group.active.valueOf() && group.curators.toArray().some((curatorId) => curatorId.eq(curator.workerId))
+            group.active.valueOf() && Array.from(group.curators).some((curatorId) => curatorId.eq(curator.workerId))
         )
         .map(([id]) => id)
 
@@ -134,7 +180,10 @@ export default abstract class ContentDirectoryCommandBase extends RolesCommandBa
       }
     }
 
-    return createType('ContentActor', { Curator: [groupId, curator.workerId.toNumber()] })
+    return [
+      createType<ContentActor, 'ContentActor'>('ContentActor', { Curator: [groupId, curator.workerId.toNumber()] }),
+      curator.roleAccount.toString(),
+    ]
   }
 
   private async curatorGroupChoices(ids?: CuratorGroupId[]) {
@@ -145,7 +194,7 @@ export default abstract class ContentDirectoryCommandBase extends RolesCommandBa
         name:
           `Group ${id.toString()} (` +
           `${group.active.valueOf() ? 'Active' : 'Inactive'}, ` +
-          `${group.curators.toArray().length} member(s)), `,
+          `${Array.from(group.curators).length} member(s)), `,
         value: id.toNumber(),
       }))
   }
@@ -226,19 +275,35 @@ export default abstract class ContentDirectoryCommandBase extends RolesCommandBa
     return group
   }
 
-  async getActor(context: typeof CONTEXTS[number]) {
-    let actor: ContentActor
+  async getContentActor(
+    context: Exclude<keyof typeof ContentActor.typeDefinitions, 'Collaborator'>
+  ): Promise<[ContentActor, string]> {
     if (context === 'Member') {
-      const memberId = await this.getRequiredMemberId()
-      actor = this.createType('ContentActor', { Member: memberId })
-    } else if (context === 'Curator') {
-      actor = await this.getCuratorContext()
-    } else {
-      await this.getRequiredLead()
+      const [id, membership] = await this.getRequiredMemberContext()
+      return [
+        createType<ContentActor, 'ContentActor'>('ContentActor', { Member: id }),
+        membership.controller_account.toString(),
+      ]
+    }
+
+    if (context === 'Curator') {
+      return this.getCuratorContext()
+    }
 
-      actor = this.createType('ContentActor', { Lead: null })
+    if (context === 'Lead') {
+      const lead = await this.getRequiredLeadContext()
+      return [createType<ContentActor, 'ContentActor'>('ContentActor', { Lead: null }), lead.roleAccount.toString()]
     }
 
-    return actor
+    throw new Error(`Unrecognized context: ${context}`)
+  }
+
+  async validateCollaborators(collaborators: number[] | MemberId[]): Promise<void> {
+    const collaboratorMembers = await this.getApi().getMembers(collaborators)
+    if (collaboratorMembers.length < collaborators.length || collaboratorMembers.some((m) => m.isEmpty)) {
+      this.error(`Invalid collaborator set! All collaborators must be existing members.`, {
+        exit: ExitCodes.InvalidInput,
+      })
+    }
   }
 }

+ 16 - 0
cli/src/base/DefaultCommandBase.ts

@@ -38,6 +38,19 @@ export default abstract class DefaultCommandBase extends Command {
     return result
   }
 
+  async requireConfirmation(
+    message = 'Are you sure you want to execute this action?',
+    defaultVal = false
+  ): Promise<void> {
+    if (process.env.AUTO_CONFIRM === 'true' || parseInt(process.env.AUTO_CONFIRM || '')) {
+      return
+    }
+    const { confirmed } = await inquirer.prompt([{ type: 'confirm', name: 'confirmed', message, default: defaultVal }])
+    if (!confirmed) {
+      this.exit(ExitCodes.OK)
+    }
+  }
+
   private jsonPrettyIndented(line: string) {
     return `${this.jsonPrettyIdent}${line}`
   }
@@ -102,6 +115,9 @@ export default abstract class DefaultCommandBase extends Command {
     // called after run and catch regardless of whether or not the command errored
     // We'll force exit here, in case there is no error, to prevent console.log from hanging the process
     if (!err) this.exit(ExitCodes.OK)
+    if (err && process.env.DEBUG === 'true') {
+      console.log(err)
+    }
     super.finally(err)
   }
 

+ 5 - 5
cli/src/base/StateAwareCommandBase.ts

@@ -10,16 +10,16 @@ import { WorkingGroups } from '../Types'
 
 // Type for the state object (which is preserved as json in the state file)
 type StateObject = {
-  selectedAccountFilename: string
   apiUri: string
+  queryNodeUri: string | null | undefined
   defaultWorkingGroup: WorkingGroups
   metadataCache: Record<string, any>
 }
 
 // State object default values
 const DEFAULT_STATE: StateObject = {
-  selectedAccountFilename: '',
   apiUri: '',
+  queryNodeUri: undefined,
   defaultWorkingGroup: WorkingGroups.StorageProviders,
   metadataCache: {},
 }
@@ -88,10 +88,10 @@ export default abstract class StateAwareCommandBase extends DefaultCommandBase {
 
   private initStateFs(): void {
     if (!fs.existsSync(this.getAppDataPath())) {
-      fs.mkdirSync(this.getAppDataPath())
+      fs.mkdirSync(this.getAppDataPath(), { recursive: true })
     }
     if (!fs.existsSync(this.getStateFilePath())) {
-      fs.writeFileSync(this.getStateFilePath(), JSON.stringify(DEFAULT_STATE))
+      fs.writeFileSync(this.getStateFilePath(), JSON.stringify(DEFAULT_STATE, null, 4))
     }
   }
 
@@ -117,7 +117,7 @@ export default abstract class StateAwareCommandBase extends DefaultCommandBase {
     const oldState: StateObject = this.getPreservedState()
     const newState: StateObject = { ...oldState, ...modifiedState }
     try {
-      fs.writeFileSync(stateFilePath, JSON.stringify(newState))
+      fs.writeFileSync(stateFilePath, JSON.stringify(newState, null, 4))
     } catch (e) {
       await unlock()
       throw this.createDataWriteError()

+ 201 - 80
cli/src/base/UploadCommandBase.ts

@@ -1,18 +1,33 @@
 import ContentDirectoryCommandBase from './ContentDirectoryCommandBase'
-import { VideoFFProbeMetadata, VideoFileMetadata, AssetType, InputAsset, InputAssetDetails } from '../Types'
-import { ContentId, ContentParameters } from '@joystream/types/storage'
+import {
+  AssetToUpload,
+  ResolvedAsset,
+  StorageNodeInfo,
+  TokenRequest,
+  TokenRequestData,
+  VideoFFProbeMetadata,
+  VideoFileMetadata,
+} from '../Types'
 import { MultiBar, Options, SingleBar } from 'cli-progress'
-import { Assets } from '../json-schemas/typings/Assets.schema'
 import ExitCodes from '../ExitCodes'
-import ipfsHash from 'ipfs-only-hash'
 import fs from 'fs'
 import _ from 'lodash'
-import axios, { AxiosRequestConfig } from 'axios'
+import axios from 'axios'
 import ffprobeInstaller from '@ffprobe-installer/ffprobe'
 import ffmpeg from 'fluent-ffmpeg'
 import path from 'path'
-import chalk from 'chalk'
 import mimeTypes from 'mime-types'
+import { Assets } from '../schemas/typings/Assets.schema'
+import chalk from 'chalk'
+import { DataObjectCreationParameters } from '@joystream/types/storage'
+import { createHash } from 'blake3'
+import * as multihash from 'multihashes'
+import { u8aToHex, formatBalance } from '@polkadot/util'
+import { KeyringPair } from '@polkadot/keyring/types'
+import FormData from 'form-data'
+import BN from 'bn.js'
+import { createTypeFromConstructor } from '@joystream/types'
+import { StorageAssets } from '@joystream/types/content'
 
 ffmpeg.setFfprobePath(ffprobeInstaller.path)
 
@@ -21,19 +36,19 @@ ffmpeg.setFfprobePath(ffprobeInstaller.path)
  */
 export default abstract class UploadCommandBase extends ContentDirectoryCommandBase {
   private fileSizeCache: Map<string, number> = new Map<string, number>()
+  private maxFileSize: undefined | BN = undefined
   private progressBarOptions: Options = {
+    noTTYOutput: true,
     format: `{barTitle} | {bar} | {value}/{total} KB processed`,
   }
 
+  protected requiresQueryNode = true
+
   getFileSize(path: string): number {
     const cachedSize = this.fileSizeCache.get(path)
     return cachedSize !== undefined ? cachedSize : fs.statSync(path).size
   }
 
-  normalizeEndpoint(endpoint: string) {
-    return endpoint.endsWith('/') ? endpoint : endpoint + '/'
-  }
-
   createReadStreamWithProgressBar(
     filePath: string,
     barTitle: string,
@@ -49,9 +64,13 @@ export default abstract class UploadCommandBase extends ContentDirectoryCommandB
     let processedKB = 0
     const fileSizeKB = Math.ceil(fileSize / 1024)
     const progress = multiBar
-      ? multiBar.create(fileSizeKB, processedKB, { barTitle })
+      ? (multiBar.create(fileSizeKB, processedKB, { barTitle }) as SingleBar | undefined)
       : new SingleBar(this.progressBarOptions)
 
+    if (!progress) {
+      throw new Error('Provided multibar does not support noTTY mode!')
+    }
+
     progress.start(fileSizeKB, processedKB, { barTitle })
     return {
       fileStream: fs
@@ -103,7 +122,7 @@ export default abstract class UploadCommandBase extends ContentDirectoryCommandB
     try {
       ffProbeMetadata = await this.getVideoFFProbeMetadata(filePath)
     } catch (e) {
-      const message = e.message || e
+      const message = e instanceof Error ? e.message : e
       this.warn(`Failed to get video metadata via ffprobe (${message})`)
     }
 
@@ -118,114 +137,183 @@ export default abstract class UploadCommandBase extends ContentDirectoryCommandB
     }
   }
 
-  async calculateFileIpfsHash(filePath: string): Promise<string> {
+  async calculateFileHash(filePath: string): Promise<string> {
     const { fileStream } = this.createReadStreamWithProgressBar(filePath, 'Calculating file hash')
-    const hash: string = await ipfsHash.of(fileStream)
-
-    return hash
+    let blake3Hash: Uint8Array
+    return new Promise<string>((resolve, reject) => {
+      fileStream
+        .pipe(createHash())
+        .on('data', (data) => (blake3Hash = data))
+        .on('end', () => resolve(multihash.toB58String(multihash.encode(blake3Hash, 'blake3'))))
+        .on('error', (err) => reject(err))
+    })
   }
 
-  validateFile(filePath: string): void {
+  async validateFile(filePath: string): Promise<void> {
     // Basic file validation
     if (!fs.existsSync(filePath)) {
       this.error(`${filePath} - file does not exist under provided path!`, { exit: ExitCodes.FileNotFound })
     }
+    if (!this.maxFileSize) {
+      this.maxFileSize = await this.getOriginalApi().consts.storage.maxDataObjectSize
+    }
+    if (this.maxFileSize.ltn(this.getFileSize(filePath))) {
+      this.error(`${filePath} - file is too big. Max file size is ${this.maxFileSize.toString()} bytes`)
+    }
   }
 
-  assetUrl(endpointRoot: string, contentId: ContentId): string {
-    // This will also make sure the resulting url is a valid url
-    return new URL(`asset/v0/${contentId.encode()}`, this.normalizeEndpoint(endpointRoot)).toString()
-  }
-
-  async getRandomProviderEndpoint(): Promise<string | null> {
-    const endpoints = _.shuffle(await this.getApi().allStorageProviderEndpoints())
-    for (const endpoint of endpoints) {
-      try {
-        const url = new URL('swagger.json', this.normalizeEndpoint(endpoint)).toString()
-        await axios.head(url)
-        return endpoint
-      } catch (e) {
-        continue
+  async getRandomActiveStorageNodeInfo(bagId: string, retryTime = 6, retryCount = 5): Promise<StorageNodeInfo | null> {
+    for (let i = 0; i <= retryCount; ++i) {
+      const nodesInfo = _.shuffle(await this.getQNApi().storageNodesInfoByBagId(bagId))
+      for (const info of nodesInfo) {
+        try {
+          await axios.get(info.apiEndpoint + '/version', {
+            headers: {
+              connection: 'close',
+            },
+          })
+          return info
+        } catch (err) {
+          continue
+        }
+      }
+      if (i !== retryCount) {
+        this.log(`No storage provider can serve the request yet, retrying in ${retryTime}s (${i + 1}/${retryCount})...`)
+        await new Promise((resolve) => setTimeout(resolve, retryTime * 1000))
       }
     }
 
     return null
   }
 
-  async generateContentParameters(filePath: string, type: AssetType): Promise<ContentParameters> {
-    return this.createType('ContentParameters', {
-      content_id: ContentId.generate(this.getTypesRegistry()),
-      type_id: type,
+  async generateDataObjectParameters(filePath: string): Promise<DataObjectCreationParameters> {
+    return createTypeFromConstructor(DataObjectCreationParameters, {
       size: this.getFileSize(filePath),
-      ipfs_content_id: await this.calculateFileIpfsHash(filePath),
+      ipfsContentId: await this.calculateFileHash(filePath),
     })
   }
 
-  async prepareInputAssets(paths: string[], basePath?: string): Promise<InputAssetDetails[]> {
-    // Resolve assets
-    if (basePath) {
-      paths = paths.map((p) => basePath && path.resolve(path.dirname(basePath), p))
+  async resolveAndValidateAssets<T extends Record<string, string | null | undefined>>(
+    paths: T,
+    basePath: string
+  ): Promise<[ResolvedAsset[], { [K in keyof T]?: number }]> {
+    const assetIndices: { [K in keyof T]?: number } = {}
+    const resolvedAssets: ResolvedAsset[] = []
+    for (let [assetKey, assetPath] of Object.entries(paths)) {
+      const assetType = assetKey as keyof T
+      if (!assetPath) {
+        assetIndices[assetType] = undefined
+        continue
+      }
+      if (basePath) {
+        assetPath = path.resolve(path.dirname(basePath), assetPath)
+      }
+      await this.validateFile(assetPath)
+      const parameters = await this.generateDataObjectParameters(assetPath)
+      assetIndices[assetType] = resolvedAssets.length
+      resolvedAssets.push({
+        path: assetPath,
+        parameters,
+      })
+    }
+    return [resolvedAssets, assetIndices]
+  }
+
+  async getStorageNodeUploadToken(
+    storageNodeInfo: StorageNodeInfo,
+    account: KeyringPair,
+    memberId: number,
+    objectId: BN,
+    bagId: string
+  ): Promise<string> {
+    const data: TokenRequestData = {
+      storageBucketId: storageNodeInfo.bucketId,
+      accountId: account.address,
+      bagId,
+      memberId,
+      dataObjectId: objectId.toNumber(),
+    }
+    const message = JSON.stringify(data)
+    const signature = u8aToHex(account.sign(message))
+    const postData: TokenRequest = { data, signature }
+    const {
+      data: { token },
+    } = await axios.post(`${storageNodeInfo.apiEndpoint}/authToken`, postData)
+    if (!token) {
+      this.error('Recieved empty token from the storage node!', { exit: ExitCodes.StorageNodeError })
     }
-    // Validate assets
-    paths.forEach((p) => this.validateFile(p))
 
-    // Return data
-    return await Promise.all(
-      paths.map(async (path) => {
-        const parameters = await this.generateContentParameters(path, AssetType.AnyAsset)
-        return {
-          path,
-          contentId: parameters.content_id,
-          parameters,
-        }
-      })
-    )
+    return token
   }
 
-  async uploadAsset(contentId: ContentId, filePath: string, endpoint?: string, multiBar?: MultiBar): Promise<void> {
-    const providerEndpoint = endpoint || (await this.getRandomProviderEndpoint())
-    if (!providerEndpoint) {
-      this.error('No active provider found!', { exit: ExitCodes.ActionCurrentlyUnavailable })
+  async uploadAsset(
+    account: KeyringPair,
+    memberId: number,
+    objectId: BN,
+    bagId: string,
+    filePath: string,
+    storageNode?: StorageNodeInfo,
+    multiBar?: MultiBar
+  ): Promise<void> {
+    const storageNodeInfo = storageNode || (await this.getRandomActiveStorageNodeInfo(bagId))
+    if (!storageNodeInfo) {
+      this.error('No active storage node found!', { exit: ExitCodes.ActionCurrentlyUnavailable })
     }
-    const uploadUrl = this.assetUrl(providerEndpoint, contentId)
-    const fileSize = this.getFileSize(filePath)
+    this.log(`Chosen storage node endpoint: ${storageNodeInfo.apiEndpoint}`)
     const { fileStream, progressBar } = this.createReadStreamWithProgressBar(
       filePath,
-      `Uploading ${contentId.encode()}`,
+      `Uploading ${filePath}`,
       multiBar
     )
     fileStream.on('end', () => {
       // Temporarly disable because with Promise.all it breaks the UI
       // cli.action.start('Waiting for the file to be processed...')
     })
-
+    const formData = new FormData()
+    formData.append('dataObjectId', objectId.toString())
+    formData.append('storageBucketId', storageNodeInfo.bucketId)
+    formData.append('bagId', bagId)
+    formData.append('file', fileStream, {
+      filename: path.basename(filePath),
+      filepath: filePath,
+      knownLength: this.getFileSize(filePath),
+    })
+    this.log(`Uploading object ${objectId.toString()} (${filePath})`)
     try {
-      const config: AxiosRequestConfig = {
+      await axios.post(`${storageNodeInfo.apiEndpoint}/files`, formData, {
+        maxBodyLength: Infinity,
+        maxContentLength: Infinity,
         headers: {
-          'Content-Type': '', // https://github.com/Joystream/storage-node-joystream/issues/16
-          'Content-Length': fileSize.toString(),
+          'content-type': 'multipart/form-data',
+          ...formData.getHeaders(),
         },
-        maxBodyLength: fileSize,
-      }
-      await axios.put(uploadUrl, fileStream, config)
+      })
     } catch (e) {
       progressBar.stop()
-      const msg = (e.response && e.response.data && e.response.data.message) || e.message || e
-      this.error(`Unexpected error when trying to upload a file: ${msg}`, {
-        exit: ExitCodes.ExternalInfrastructureError,
-      })
+      if (axios.isAxiosError(e)) {
+        const msg = e.response && e.response.data ? JSON.stringify(e.response.data) : e.message
+        this.error(`Unexpected error when trying to upload a file: ${msg}`, {
+          exit: ExitCodes.StorageNodeError,
+        })
+      } else {
+        throw e
+      }
     }
   }
 
   async uploadAssets(
-    assets: InputAsset[],
+    account: KeyringPair,
+    memberId: number,
+    bagId: string,
+    assets: AssetToUpload[],
     inputFilePath: string,
     outputFilePostfix = '__rejectedContent'
   ): Promise<void> {
-    const endpoint = await this.getRandomProviderEndpoint()
-    if (!endpoint) {
+    const storageNodeInfo = await this.getRandomActiveStorageNodeInfo(bagId)
+    if (!storageNodeInfo) {
       this.warn('No storage provider is currently available!')
       this.handleRejectedUploads(
+        bagId,
         assets,
         assets.map(() => false),
         inputFilePath,
@@ -234,34 +322,67 @@ export default abstract class UploadCommandBase extends ContentDirectoryCommandB
       this.exit(ExitCodes.ActionCurrentlyUnavailable)
     }
     const multiBar = new MultiBar(this.progressBarOptions)
+    const errors: [string, string][] = []
     // Workaround replacement for Promise.allSettled (which is only available in ES2020)
     const results = await Promise.all(
       assets.map(async (a) => {
         try {
-          await this.uploadAsset(a.contentId, a.path, endpoint, multiBar)
+          await this.uploadAsset(account, memberId, a.dataObjectId, bagId, a.path, storageNodeInfo, multiBar)
           return true
         } catch (e) {
+          errors.push([a.dataObjectId.toString(), e instanceof Error ? e.message : 'Unknown error'])
           return false
         }
       })
     )
-    this.handleRejectedUploads(assets, results, inputFilePath, outputFilePostfix)
+    errors.forEach(([objectId, message]) => this.warn(`Upload of object ${objectId} failed: ${message}`))
+    this.handleRejectedUploads(bagId, assets, results, inputFilePath, outputFilePostfix)
     multiBar.stop()
   }
 
+  async prepareAssetsForExtrinsic(resolvedAssets: ResolvedAsset[]): Promise<StorageAssets | undefined> {
+    const feePerMB = await this.getOriginalApi().query.storage.dataObjectPerMegabyteFee()
+    const { dataObjectDeletionPrize } = this.getOriginalApi().consts.storage
+    if (resolvedAssets.length) {
+      const totalBytes = resolvedAssets
+        .reduce((a, b) => {
+          return a.add(b.parameters.getField('size'))
+        }, new BN(0))
+        .toNumber()
+      const totalStorageFee = feePerMB.muln(Math.ceil(totalBytes / 1024 / 1024))
+      const totalDeletionPrize = dataObjectDeletionPrize.muln(resolvedAssets.length)
+      await this.requireConfirmation(
+        `Some additional costs will be associated with this operation:\n` +
+          `Total data storage fee: ${chalk.cyan(formatBalance(totalStorageFee))}\n` +
+          `Total deletion prize: ${chalk.cyan(
+            formatBalance(totalDeletionPrize)
+          )} (recoverable on data object(s) removal)\n` +
+          `Are you sure you want to continue?`
+      )
+      return createTypeFromConstructor(StorageAssets, {
+        expected_data_size_fee: feePerMB,
+        object_creation_list: resolvedAssets.map((a) => a.parameters),
+      })
+    }
+
+    return undefined
+  }
+
   private handleRejectedUploads(
-    assets: InputAsset[],
+    bagId: string,
+    assets: AssetToUpload[],
     results: boolean[],
     inputFilePath: string,
     outputFilePostfix: string
   ): void {
     // Try to save rejected contentIds and paths for reupload purposes
-    const rejectedAssetsOutput: Assets = []
+    const rejectedAssetsOutput: Assets = { bagId, assets: [] }
     results.forEach(
       (r, i) =>
-        r === false && rejectedAssetsOutput.push({ contentId: assets[i].contentId.encode(), path: assets[i].path })
+        r === false &&
+        rejectedAssetsOutput.assets.push({ objectId: assets[i].dataObjectId.toString(), path: assets[i].path })
     )
-    if (rejectedAssetsOutput.length) {
+    if (rejectedAssetsOutput.assets.length) {
       this.warn(
         `Some assets were not uploaded successfully. Try reuploading them with ${chalk.magentaBright(
           'content:reuploadAssets'

+ 19 - 53
cli/src/base/WorkingGroupsCommandBase.ts

@@ -1,38 +1,27 @@
 import ExitCodes from '../ExitCodes'
 import AccountsCommandBase from './AccountsCommandBase'
 import { flags } from '@oclif/command'
-import {
-  WorkingGroups,
-  AvailableGroups,
-  NamedKeyringPair,
-  GroupMember,
-  GroupOpening,
-  OpeningStatus,
-  GroupApplication,
-} from '../Types'
+import { WorkingGroups, AvailableGroups, GroupMember, GroupOpening, OpeningStatus, GroupApplication } from '../Types'
 import _ from 'lodash'
 import { ApplicationStageKeys } from '@joystream/types/hiring'
 import chalk from 'chalk'
-import { IConfig } from '@oclif/config'
 
 /**
  * Abstract base class for commands that need to use gates based on user's roles
  */
 export abstract class RolesCommandBase extends AccountsCommandBase {
-  group: WorkingGroups
+  group!: WorkingGroups
 
-  constructor(argv: string[], config: IConfig) {
-    super(argv, config)
-    // Can be modified by child class constructor
+  async init(): Promise<void> {
+    await super.init()
     this.group = this.getPreservedState().defaultWorkingGroup
   }
 
   // Use when lead access is required in given command
-  async getRequiredLead(): Promise<GroupMember> {
-    const selectedAccount: NamedKeyringPair = await this.getRequiredSelectedAccount()
+  async getRequiredLeadContext(): Promise<GroupMember> {
     const lead = await this.getApi().groupLead(this.group)
 
-    if (!lead || lead.roleAccount.toString() !== selectedAccount.address) {
+    if (!lead || !this.isKeyAvailable(lead.roleAccount)) {
       this.error(`${_.startCase(this.group)} Group Lead access required for this command!`, {
         exit: ExitCodes.AccessDenied,
       })
@@ -42,38 +31,22 @@ export abstract class RolesCommandBase extends AccountsCommandBase {
   }
 
   // Use when worker access is required in given command
-  async getRequiredWorker(): Promise<GroupMember> {
-    const selectedAccount: NamedKeyringPair = await this.getRequiredSelectedAccount()
+  async getRequiredWorkerContext(expectedKeyType: 'Role' | 'MemberController' = 'Role'): Promise<GroupMember> {
     const groupMembers = await this.getApi().groupMembers(this.group)
-    const groupMembersByAccount = groupMembers.filter((m) => m.roleAccount.toString() === selectedAccount.address)
-
-    if (!groupMembersByAccount.length) {
-      this.error(`${_.startCase(this.group)} Group Worker access required for this command!`, {
-        exit: ExitCodes.AccessDenied,
-      })
-    } else if (groupMembersByAccount.length === 1) {
-      return groupMembersByAccount[0]
-    } else {
-      return await this.promptForWorker(groupMembersByAccount)
-    }
-  }
-
-  // Use when member controller access is required, but one of the associated roles is expected to be selected
-  async getRequiredWorkerByMemberController(): Promise<GroupMember> {
-    const selectedAccount: NamedKeyringPair = await this.getRequiredSelectedAccount()
-    const memberIds = await this.getApi().getMemberIdsByControllerAccount(selectedAccount.address)
-    const controlledWorkers = (await this.getApi().groupMembers(this.group)).filter((groupMember) =>
-      memberIds.some((memberId) => groupMember.memberId.eq(memberId))
+    const availableGroupMemberContexts = groupMembers.filter((m) =>
+      expectedKeyType === 'Role'
+        ? this.isKeyAvailable(m.roleAccount.toString())
+        : this.isKeyAvailable(m.profile.controller_account.toString())
     )
 
-    if (!controlledWorkers.length) {
-      this.error(`Member controller account with some associated ${this.group} group roles needs to be selected!`, {
+    if (!availableGroupMemberContexts.length) {
+      this.error(`No ${_.startCase(this.group)} Group Worker ${_.startCase(expectedKeyType)} key available!`, {
         exit: ExitCodes.AccessDenied,
       })
-    } else if (controlledWorkers.length === 1) {
-      return controlledWorkers[0]
+    } else if (availableGroupMemberContexts.length === 1) {
+      return availableGroupMemberContexts[0]
     } else {
-      return await this.promptForWorker(controlledWorkers)
+      return await this.promptForWorker(availableGroupMemberContexts)
     }
   }
 
@@ -95,13 +68,6 @@ export abstract class RolesCommandBase extends AccountsCommandBase {
  * Abstract base class for commands directly related to working groups
  */
 export default abstract class WorkingGroupsCommandBase extends RolesCommandBase {
-  group: WorkingGroups
-
-  constructor(argv: string[], config: IConfig) {
-    super(argv, config)
-    this.group = this.getPreservedState().defaultWorkingGroup
-  }
-
   static flags = {
     group: flags.enum({
       char: 'g',
@@ -167,7 +133,7 @@ export default abstract class WorkingGroupsCommandBase extends RolesCommandBase
     return application
   }
 
-  async getWorkerForLeadAction(id: number, requireStakeProfile = false) {
+  async getWorkerForLeadAction(id: number, requireStakeProfile = false): Promise<GroupMember> {
     const groupMember = await this.getApi().groupMember(this.group, id)
     const groupLead = await this.getApi().groupLead(this.group)
 
@@ -184,11 +150,11 @@ export default abstract class WorkingGroupsCommandBase extends RolesCommandBase
 
   // Helper for better TS handling.
   // We could also use some magic with conditional types instead, but those don't seem be very well supported yet.
-  async getWorkerWithStakeForLeadAction(id: number) {
+  async getWorkerWithStakeForLeadAction(id: number): Promise<GroupMember & Required<Pick<GroupMember, 'stake'>>> {
     return (await this.getWorkerForLeadAction(id, true)) as GroupMember & Required<Pick<GroupMember, 'stake'>>
   }
 
-  async init() {
+  async init(): Promise<void> {
     await super.init()
     const { flags } = this.parse(this.constructor as typeof WorkingGroupsCommandBase)
     if (flags.group) {

+ 0 - 48
cli/src/commands/account/choose.ts

@@ -1,48 +0,0 @@
-import AccountsCommandBase from '../../base/AccountsCommandBase'
-import chalk from 'chalk'
-import ExitCodes from '../../ExitCodes'
-import { NamedKeyringPair } from '../../Types'
-import { flags } from '@oclif/command'
-
-export default class AccountChoose extends AccountsCommandBase {
-  static description = 'Choose default account to use in the CLI'
-  static flags = {
-    showSpecial: flags.boolean({
-      description: 'Whether to show special (DEV chain) accounts',
-      char: 'S',
-      required: false,
-    }),
-    address: flags.string({
-      description: 'Select account by address (if available)',
-      char: 'a',
-      required: false,
-    }),
-  }
-
-  async run() {
-    const { showSpecial, address } = this.parse(AccountChoose).flags
-    const accounts: NamedKeyringPair[] = this.fetchAccounts(!!address || showSpecial)
-    const selectedAccount: NamedKeyringPair | null = this.getSelectedAccount()
-
-    this.log(chalk.magentaBright(`Found ${accounts.length} existing accounts...\n`))
-
-    if (accounts.length === 0) {
-      this.warn('No account to choose from. Add accont using account:import or account:create.')
-      this.exit(ExitCodes.NoAccountFound)
-    }
-
-    let choosenAccount: NamedKeyringPair
-    if (address) {
-      const matchingAccount = accounts.find((a) => a.address === address)
-      if (!matchingAccount) {
-        this.error(`No matching account found by address: ${address}`, { exit: ExitCodes.InvalidInput })
-      }
-      choosenAccount = matchingAccount
-    } else {
-      choosenAccount = await this.promptForAccount(accounts, selectedAccount)
-    }
-
-    await this.setSelectedAccount(choosenAccount)
-    this.log(chalk.greenBright(`\nAccount switched to ${chalk.magentaBright(choosenAccount.address)}!`))
-  }
-}

+ 16 - 39
cli/src/commands/account/create.ts

@@ -1,47 +1,24 @@
-import chalk from 'chalk'
-import ExitCodes from '../../ExitCodes'
-import AccountsCommandBase from '../../base/AccountsCommandBase'
-import { Keyring } from '@polkadot/api'
-import { mnemonicGenerate } from '@polkadot/util-crypto'
-import { NamedKeyringPair } from '../../Types'
-
-type AccountCreateArgs = {
-  name: string
-}
+import AccountsCommandBase, { DEFAULT_ACCOUNT_TYPE } from '../../base/AccountsCommandBase'
+import { KeypairType } from '@polkadot/util-crypto/types'
+import { flags } from '@oclif/command'
 
 export default class AccountCreate extends AccountsCommandBase {
-  static description = 'Create new account'
+  static description = 'Create a new account'
 
-  static args = [
-    {
-      name: 'name',
-      required: true,
+  static flags = {
+    name: flags.string({
+      required: false,
       description: 'Account name',
-    },
-  ]
-
-  validatePass(password: string, password2: string): void {
-    if (password !== password2) this.error('Passwords are not the same!', { exit: ExitCodes.InvalidInput })
-    if (!password) this.error("You didn't provide a password", { exit: ExitCodes.InvalidInput })
+    }),
+    type: flags.enum<KeypairType>({
+      required: false,
+      description: `Account type (defaults to ${DEFAULT_ACCOUNT_TYPE})`,
+      options: ['sr25519', 'ed25519'],
+    }),
   }
 
-  async run() {
-    const args: AccountCreateArgs = this.parse(AccountCreate).args as AccountCreateArgs
-    const keyring: Keyring = new Keyring()
-    const mnemonic: string = mnemonicGenerate()
-
-    keyring.addFromMnemonic(mnemonic, { name: args.name, whenCreated: Date.now() })
-    const keys: NamedKeyringPair = keyring.pairs[0] as NamedKeyringPair // We assigned the name above
-
-    const password = await this.promptForPassword("Set your account's password")
-    const password2 = await this.promptForPassword('Confirm your password')
-
-    this.validatePass(password, password2)
-
-    this.saveAccount(keys, password)
-
-    this.log(chalk.greenBright(`\nAccount successfully created!`))
-    this.log(chalk.magentaBright(`${chalk.bold('Name:    ')}${args.name}`))
-    this.log(chalk.magentaBright(`${chalk.bold('Address: ')}${keys.address}`))
+  async run(): Promise<void> {
+    const { name, type } = this.parse(AccountCreate).flags
+    await this.createAccount(name, undefined, undefined, type)
   }
 }

+ 0 - 40
cli/src/commands/account/current.ts

@@ -1,40 +0,0 @@
-import AccountsCommandBase from '../../base/AccountsCommandBase'
-import { AccountSummary, NameValueObj, NamedKeyringPair } from '../../Types'
-import { displayHeader, displayNameValueTable } from '../../helpers/display'
-import { formatBalance } from '@polkadot/util'
-import moment from 'moment'
-
-export default class AccountCurrent extends AccountsCommandBase {
-  static description = 'Display information about currently choosen default account'
-  static aliases = ['account:info', 'account:default']
-
-  async run() {
-    const currentAccount: NamedKeyringPair = await this.getRequiredSelectedAccount(false)
-    const summary: AccountSummary = await this.getApi().getAccountSummary(currentAccount.address)
-
-    displayHeader('Account information')
-    const creationDate: string = currentAccount.meta.whenCreated
-      ? moment(currentAccount.meta.whenCreated as string | number).format('YYYY-MM-DD HH:mm:ss')
-      : '?'
-    const accountRows: NameValueObj[] = [
-      { name: 'Account name:', value: currentAccount.meta.name },
-      { name: 'Address:', value: currentAccount.address },
-      { name: 'Created:', value: creationDate },
-    ]
-    displayNameValueTable(accountRows)
-
-    displayHeader('Balances')
-    const balances = summary.balances
-    const balancesRows: NameValueObj[] = [
-      { name: 'Total balance:', value: formatBalance(balances.votingBalance) },
-      { name: 'Transferable balance:', value: formatBalance(balances.availableBalance) },
-    ]
-    if (balances.lockedBalance.gtn(0)) {
-      balancesRows.push({ name: 'Locked balance:', value: formatBalance(balances.lockedBalance) })
-    }
-    if (balances.reservedBalance.gtn(0)) {
-      balancesRows.push({ name: 'Reserved balance:', value: formatBalance(balances.reservedBalance) })
-    }
-    displayNameValueTable(balancesRows)
-  }
-}

+ 34 - 31
cli/src/commands/account/export.ts

@@ -4,10 +4,8 @@ import path from 'path'
 import ExitCodes from '../../ExitCodes'
 import AccountsCommandBase from '../../base/AccountsCommandBase'
 import { flags } from '@oclif/command'
-import { NamedKeyringPair } from '../../Types'
 
-type AccountExportFlags = { all: boolean }
-type AccountExportArgs = { path: string }
+type AccountExportArgs = { destPath: string }
 
 export default class AccountExport extends AccountsCommandBase {
   static description = 'Export account(s) to given location'
@@ -15,22 +13,30 @@ export default class AccountExport extends AccountsCommandBase {
 
   static args = [
     {
-      name: 'path',
+      name: 'destPath',
       required: true,
       description: 'Path where the exported files should be placed',
     },
   ]
 
   static flags = {
+    name: flags.string({
+      char: 'n',
+      description: 'Name of the account to export',
+      required: false,
+      exclusive: ['all'],
+    }),
     all: flags.boolean({
       char: 'a',
       description: `If provided, exports all existing accounts into "${AccountExport.MULTI_EXPORT_FOLDER_NAME}" folder inside given path`,
+      required: false,
+      exclusive: ['name'],
     }),
   }
 
-  exportAccount(account: NamedKeyringPair, destPath: string): string {
-    const sourceFilePath: string = this.getAccountFilePath(account)
-    const destFilePath: string = path.join(destPath, this.generateAccountFilename(account))
+  exportAccount(name: string, destPath: string): string {
+    const sourceFilePath: string = this.getAccountFilePath(name)
+    const destFilePath: string = path.join(destPath, this.getAccountFileName(name))
     try {
       fs.copyFileSync(sourceFilePath, destFilePath)
     } catch (e) {
@@ -42,35 +48,32 @@ export default class AccountExport extends AccountsCommandBase {
     return destFilePath
   }
 
-  async run() {
-    const args: AccountExportArgs = this.parse(AccountExport).args as AccountExportArgs
-    const flags: AccountExportFlags = this.parse(AccountExport).flags as AccountExportFlags
-    const accounts: NamedKeyringPair[] = this.fetchAccounts()
-
-    if (!accounts.length) {
-      this.error('No accounts found!', { exit: ExitCodes.NoAccountFound })
-    }
+  async run(): Promise<void> {
+    const { destPath } = this.parse(AccountExport).args as AccountExportArgs
+    let { name, all } = this.parse(AccountExport).flags
+    const accounts = this.fetchAccounts()
 
-    if (flags.all) {
-      const destPath: string = path.join(args.path, AccountExport.MULTI_EXPORT_FOLDER_NAME)
+    if (all) {
+      const exportPath: string = path.join(destPath, AccountExport.MULTI_EXPORT_FOLDER_NAME)
       try {
-        if (!fs.existsSync(destPath)) fs.mkdirSync(destPath)
+        if (!fs.existsSync(exportPath)) {
+          fs.mkdirSync(exportPath, { recursive: true })
+        }
       } catch (e) {
-        this.error(`Failed to create the export folder (${destPath})`, { exit: ExitCodes.FsOperationFailed })
+        this.error(`Failed to create the export folder (${exportPath})`, { exit: ExitCodes.FsOperationFailed })
       }
-      for (const account of accounts) this.exportAccount(account, destPath)
-      this.log(
-        chalk.greenBright(`All accounts successfully exported successfully to: ${chalk.magentaBright(destPath)}!`)
-      )
+      for (const acc of accounts) {
+        this.exportAccount(acc.meta.name, exportPath)
+      }
+      this.log(chalk.greenBright(`All accounts succesfully exported to: ${chalk.magentaBright(exportPath)}!`))
     } else {
-      const destPath: string = args.path
-      const choosenAccount: NamedKeyringPair = await this.promptForAccount(
-        accounts,
-        null,
-        'Select an account to export'
-      )
-      const exportedFilePath: string = this.exportAccount(choosenAccount, destPath)
-      this.log(chalk.greenBright(`Account successfully exported to: ${chalk.magentaBright(exportedFilePath)}`))
+      if (!name) {
+        const key = await this.promptForAccount('Select an account to export', false, false)
+        const { meta } = this.getPair(key)
+        name = meta.name
+      }
+      const exportedFilePath: string = this.exportAccount(name, destPath)
+      this.log(chalk.greenBright(`Account succesfully exported to: ${chalk.magentaBright(exportedFilePath)}`))
     }
   }
 }

+ 4 - 10
cli/src/commands/account/forget.ts

@@ -2,22 +2,16 @@ import fs from 'fs'
 import chalk from 'chalk'
 import ExitCodes from '../../ExitCodes'
 import AccountsCommandBase from '../../base/AccountsCommandBase'
-import { NamedKeyringPair } from '../../Types'
 
 export default class AccountForget extends AccountsCommandBase {
   static description = 'Forget (remove) account from the list of available accounts'
 
-  async run() {
-    const accounts: NamedKeyringPair[] = this.fetchAccounts()
+  async run(): Promise<void> {
+    const selecteKey = await this.promptForAccount('Select an account to forget', false, false)
+    await this.requireConfirmation('Are you sure you want to PERMANENTLY FORGET this account?')
 
-    if (!accounts.length) {
-      this.error('No accounts found!', { exit: ExitCodes.NoAccountFound })
-    }
-
-    const choosenAccount: NamedKeyringPair = await this.promptForAccount(accounts, null, 'Select an account to forget')
-    await this.requireConfirmation('Are you sure you want this account to be forgotten?')
+    const accountFilePath = this.getAccountFilePath(this.getPair(selecteKey).meta.name)
 
-    const accountFilePath: string = this.getAccountFilePath(choosenAccount)
     try {
       fs.unlinkSync(accountFilePath)
     } catch (e) {

+ 56 - 33
cli/src/commands/account/import.ts

@@ -1,44 +1,67 @@
-import fs from 'fs'
-import chalk from 'chalk'
-import path from 'path'
-import ExitCodes from '../../ExitCodes'
-import AccountsCommandBase from '../../base/AccountsCommandBase'
-import { NamedKeyringPair } from '../../Types'
-
-type AccountImportArgs = {
-  backupFilePath: string
-}
+import AccountsCommandBase, { DEFAULT_ACCOUNT_TYPE, KEYRING_OPTIONS } from '../../base/AccountsCommandBase'
+import { flags } from '@oclif/command'
+import Keyring from '@polkadot/keyring'
+import { KeypairType } from '@polkadot/util-crypto/types'
 
 export default class AccountImport extends AccountsCommandBase {
-  static description = 'Import account using JSON backup file'
+  static description = 'Import account using mnemonic phrase, seed, suri or json backup file'
 
-  static args = [
-    {
-      name: 'backupFilePath',
-      required: true,
+  static flags = {
+    name: flags.string({
+      required: false,
+      description: 'Account name',
+    }),
+    mnemonic: flags.string({
+      required: false,
+      description: 'Mnemonic phrase',
+      exclusive: ['backupFilePath', 'seed', 'suri'],
+    }),
+    seed: flags.string({
+      required: false,
+      description: 'Secret seed',
+      exclusive: ['backupFilePath', 'mnemonic', 'suri'],
+    }),
+    backupFilePath: flags.string({
+      required: false,
       description: 'Path to account backup JSON file',
-    },
-  ]
+      exclusive: ['mnemonic', 'seed', 'suri'],
+    }),
+    suri: flags.string({
+      required: false,
+      description: 'Substrate uri',
+      exclusive: ['mnemonic', 'seed', 'backupFilePath'],
+    }),
+    type: flags.enum<KeypairType>({
+      required: false,
+      description: `Account type (defaults to ${DEFAULT_ACCOUNT_TYPE})`,
+      options: ['sr25519', 'ed25519'],
+      exclusive: ['backupFilePath'],
+    }),
+    password: flags.string({
+      required: false,
+      description: `Account password`,
+    }),
+  }
 
-  async run() {
-    const args: AccountImportArgs = this.parse(AccountImport).args as AccountImportArgs
-    const backupAcc: NamedKeyringPair = this.fetchAccountFromJsonFile(args.backupFilePath)
-    const accountName: string = backupAcc.meta.name
-    const accountAddress: string = backupAcc.address
+  async run(): Promise<void> {
+    const { name, mnemonic, seed, backupFilePath, suri, type, password } = this.parse(AccountImport).flags
 
-    const sourcePath: string = args.backupFilePath
-    const destPath: string = path.join(this.getAccountsDirPath(), this.generateAccountFilename(backupAcc))
+    const keyring = new Keyring(KEYRING_OPTIONS)
 
-    try {
-      fs.copyFileSync(sourcePath, destPath)
-    } catch (e) {
-      this.error('Unexpected error while trying to copy input file! Permissions issue?', {
-        exit: ExitCodes.FsOperationFailed,
-      })
+    if (mnemonic) {
+      keyring.addFromMnemonic(mnemonic, {}, type)
+    } else if (seed) {
+      keyring.addFromSeed(Buffer.from(seed), {}, type)
+    } else if (suri) {
+      keyring.addFromUri(suri, {}, type)
+    } else if (backupFilePath) {
+      const pair = this.fetchAccountFromJsonFile(backupFilePath)
+      keyring.addPair(pair)
+    } else {
+      this._help()
+      return
     }
 
-    this.log(chalk.bold.greenBright(`ACCOUNT IMPORTED SUCCESSFULLY!`))
-    this.log(chalk.bold.magentaBright(`NAME:    `), accountName)
-    this.log(chalk.bold.magentaBright(`ADDRESS: `), accountAddress)
+    await this.createAccount(name, keyring.getPairs()[0], password)
   }
 }

+ 56 - 0
cli/src/commands/account/info.ts

@@ -0,0 +1,56 @@
+import AccountsCommandBase from '../../base/AccountsCommandBase'
+import ExitCodes from '../../ExitCodes'
+import { validateAddress } from '../../helpers/validation'
+import { NameValueObj } from '../../Types'
+import { displayHeader, displayNameValueTable } from '../../helpers/display'
+import { formatBalance } from '@polkadot/util'
+import moment from 'moment'
+
+export default class AccountInfo extends AccountsCommandBase {
+  static description = 'Display detailed information about specified account'
+  static aliases = ['account:inspect']
+  static args = [
+    { name: 'address', required: false, description: 'An address to inspect (can also be provided interavtively)' },
+  ]
+
+  async run(): Promise<void> {
+    let { address } = this.parse(AccountInfo).args
+
+    if (!address) {
+      address = await this.promptForAnyAddress()
+    } else if (validateAddress(address) !== true) {
+      this.error('Invalid address', { exit: ExitCodes.InvalidInput })
+    }
+
+    const summary = await this.getApi().getAccountSummary(address)
+
+    displayHeader('Account information')
+    const accountRows: NameValueObj[] = [{ name: 'Address:', value: address }]
+    if (this.isKeyAvailable(address)) {
+      const pair = this.getPair(address)
+      accountRows.push({ name: 'Account name', value: pair.meta.name })
+      accountRows.push({ name: 'Type', value: pair.type })
+      const creationDate = pair.meta.whenCreated
+        ? moment(pair.meta.whenCreated as string | number).format('YYYY-MM-DD HH:mm:ss')
+        : null
+      if (creationDate) {
+        accountRows.push({ name: 'Creation date', value: creationDate })
+      }
+    }
+    displayNameValueTable(accountRows)
+
+    displayHeader('Balances')
+    const balances = summary.balances
+    const balancesRows: NameValueObj[] = [
+      { name: 'Total balance:', value: formatBalance(balances.votingBalance) },
+      { name: 'Transferable balance:', value: formatBalance(balances.availableBalance) },
+    ]
+    if (balances.lockedBalance.gtn(0)) {
+      balancesRows.push({ name: 'Locked balance:', value: formatBalance(balances.lockedBalance) })
+    }
+    if (balances.reservedBalance.gtn(0)) {
+      balancesRows.push({ name: 'Reserved balance:', value: formatBalance(balances.reservedBalance) })
+    }
+    displayNameValueTable(balancesRows)
+  }
+}

+ 26 - 0
cli/src/commands/account/list.ts

@@ -0,0 +1,26 @@
+import AccountsCommandBase from '../../base/AccountsCommandBase'
+import { displayTable } from '../../helpers/display'
+import { formatBalance } from '@polkadot/util'
+
+export default class AccountList extends AccountsCommandBase {
+  static description = 'List all available accounts'
+
+  async run(): Promise<void> {
+    const pairs = this.getPairs()
+    const balances = await this.getApi().getAccountsBalancesInfo(pairs.map((p) => p.address))
+
+    if (pairs.length) {
+      displayTable(
+        pairs.map((p, i) => ({
+          'Name': p.meta.name,
+          'Address': p.address,
+          'Available balance': formatBalance(balances[i].availableBalance),
+          'Total balance': formatBalance(balances[i].votingBalance),
+        })),
+        3
+      )
+    } else {
+      this.log('No accounts available!')
+    }
+  }
+}

+ 34 - 51
cli/src/commands/account/transferTokens.ts

@@ -1,67 +1,50 @@
+import { flags } from '@oclif/command'
 import BN from 'bn.js'
 import AccountsCommandBase from '../../base/AccountsCommandBase'
-import chalk from 'chalk'
 import ExitCodes from '../../ExitCodes'
-import { formatBalance } from '@polkadot/util'
-import { Hash } from '@polkadot/types/interfaces'
-import { NamedKeyringPair } from '../../Types'
-import { checkBalance, validateAddress } from '../../helpers/validation'
-
-type AccountTransferArgs = {
-  recipient: string
-  amount: string
-}
+import { checkBalance, isValidBalance, validateAddress } from '../../helpers/validation'
 
 export default class AccountTransferTokens extends AccountsCommandBase {
-  static description = 'Transfer tokens from currently choosen account'
-
-  static args = [
-    {
-      name: 'recipient',
-      required: true,
-      description: 'Address of the transfer recipient',
-    },
-    {
-      name: 'amount',
+  static description = 'Transfer tokens from any of the available accounts'
+
+  static flags = {
+    from: flags.string({
+      required: false,
+      description: 'Address of the sender (can also be provided interactively)',
+    }),
+    to: flags.string({
+      required: false,
+      description: 'Address of the recipient (can also be provided interactively)',
+    }),
+    amount: flags.string({
       required: true,
       description: 'Amount of tokens to transfer',
-    },
-  ]
+    }),
+  }
 
-  async run() {
-    const args: AccountTransferArgs = this.parse(AccountTransferTokens).args as AccountTransferArgs
-    const selectedAccount: NamedKeyringPair = await this.getRequiredSelectedAccount()
-    const amountBN: BN = new BN(args.amount)
+  async run(): Promise<void> {
+    let { from, to, amount } = this.parse(AccountTransferTokens).flags
 
-    // Initial validation
-    validateAddress(args.recipient, 'Invalid recipient address')
-    const accBalances = (await this.getApi().getAccountsBalancesInfo([selectedAccount.address]))[0]
-    checkBalance(accBalances, amountBN)
-
-    await this.requestAccountDecoding(selectedAccount)
+    if (!isValidBalance(amount)) {
+      this.error('Invalid transfer amount', { exit: ExitCodes.InvalidInput })
+    }
 
-    this.log(chalk.magentaBright('Estimating fee...'))
-    const tx = await this.getApi().createTransferTx(args.recipient, amountBN)
-    let estimatedFee: BN
-    try {
-      estimatedFee = await this.getApi().estimateFee(selectedAccount, tx)
-    } catch (e) {
-      this.error('Could not estimate the fee.', { exit: ExitCodes.UnexpectedException })
+    // Initial validation
+    if (!from) {
+      from = await this.promptForAccount('Select sender account')
+    } else if (!this.isKeyAvailable(from)) {
+      this.error('Sender key not available', { exit: ExitCodes.InvalidInput })
     }
-    const totalAmount: BN = amountBN.add(estimatedFee)
-    this.log(chalk.magentaBright('Estimated fee:', formatBalance(estimatedFee)))
-    this.log(chalk.magentaBright('Total transfer amount:', formatBalance(totalAmount)))
 
-    checkBalance(accBalances, totalAmount)
+    if (!to) {
+      to = await this.promptForAnyAddress('Select recipient')
+    } else if (validateAddress(to) !== true) {
+      this.error('Invalid recipient address', { exit: ExitCodes.InvalidInput })
+    }
 
-    await this.requireConfirmation('Do you confirm the transfer?')
+    const accBalances = (await this.getApi().getAccountsBalancesInfo([from]))[0]
+    checkBalance(accBalances, new BN(amount))
 
-    try {
-      const txHash: Hash = await tx.signAndSend(selectedAccount)
-      this.log(chalk.greenBright('Transaction successfully sent!'))
-      this.log(chalk.magentaBright('Hash:', txHash.toString()))
-    } catch (e) {
-      this.error('Could not send the transaction.', { exit: ExitCodes.UnexpectedException })
-    }
+    await this.sendAndFollowNamedTx(await this.getDecodedPair(from), 'balances', 'transferKeepAlive', [to, amount])
   }
 }

+ 11 - 0
cli/src/commands/api/getQueryNodeEndpoint.ts

@@ -0,0 +1,11 @@
+import StateAwareCommandBase from '../../base/StateAwareCommandBase'
+import chalk from 'chalk'
+
+export default class ApiGetQueryNodeEndpoint extends StateAwareCommandBase {
+  static description = 'Get current query node endpoint'
+
+  async run(): Promise<void> {
+    const currentEndpoint: string | null | undefined = this.getPreservedState().queryNodeUri
+    this.log(chalk.green(JSON.stringify(currentEndpoint)))
+  }
+}

+ 9 - 10
cli/src/commands/api/inspect.ts

@@ -1,12 +1,11 @@
 import { flags } from '@oclif/command'
 import { CLIError } from '@oclif/errors'
 import { displayNameValueTable } from '../../helpers/display'
-import { ApiPromise } from '@polkadot/api'
 import { Codec } from '@polkadot/types/types'
-import { ConstantCodec } from '@polkadot/metadata/decorate/types'
+import { ConstantCodec } from '@polkadot/types/metadata/decorate/types'
 import ExitCodes from '../../ExitCodes'
 import chalk from 'chalk'
-import { NameValueObj, ApiMethodArg } from '../../Types'
+import { NameValueObj, ApiMethodArg, UnaugmentedApiPromise } from '../../Types'
 import ApiCommandBase from '../../base/ApiCommandBase'
 
 // Command flags type
@@ -78,21 +77,21 @@ export default class ApiInspect extends ApiCommandBase {
 
   getMethodMeta(apiType: ApiType, apiModule: string, apiMethod: string) {
     if (apiType === 'query') {
-      return this.getOriginalApi().query[apiModule][apiMethod].creator.meta
+      return this.getUnaugmentedApi().query[apiModule][apiMethod].creator.meta
     } else {
       // Currently the only other optoin is api.consts
-      const method: ConstantCodec = this.getOriginalApi().consts[apiModule][apiMethod] as ConstantCodec
+      const method = this.getUnaugmentedApi().consts[apiModule][apiMethod] as ConstantCodec
       return method.meta
     }
   }
 
   getMethodDescription(apiType: ApiType, apiModule: string, apiMethod: string): string {
-    const description: string = this.getMethodMeta(apiType, apiModule, apiMethod).documentation.join(' ')
+    const description: string = this.getMethodMeta(apiType, apiModule, apiMethod).docs.join(' ')
     return description || 'No description available.'
   }
 
   getQueryMethodParamsTypes(apiModule: string, apiMethod: string): string[] {
-    const method = this.getOriginalApi().query[apiModule][apiMethod]
+    const method = this.getUnaugmentedApi().query[apiModule][apiMethod]
     const { type } = method.creator.meta
     if (type.isDoubleMap) {
       return [type.asDoubleMap.key1.toString(), type.asDoubleMap.key2.toString()]
@@ -105,7 +104,7 @@ export default class ApiInspect extends ApiCommandBase {
 
   getMethodReturnType(apiType: ApiType, apiModule: string, apiMethod: string): string {
     if (apiType === 'query') {
-      const method = this.getOriginalApi().query[apiModule][apiMethod]
+      const method = this.getUnaugmentedApi().query[apiModule][apiMethod]
       const {
         meta: { type, modifier },
       } = method.creator
@@ -126,7 +125,7 @@ export default class ApiInspect extends ApiCommandBase {
   // Validate the flags - throws an error if flags.type, flags.module or flags.method is invalid / does not exist in the api.
   // Returns type, module and method which validity we can be sure about (notice they may still be "undefined" if weren't provided).
   validateFlags(
-    api: ApiPromise,
+    api: UnaugmentedApiPromise,
     flags: ApiInspectFlags
   ): { apiType: ApiType | undefined; apiModule: string | undefined; apiMethod: string | undefined } {
     let apiType: ApiType | undefined
@@ -164,7 +163,7 @@ export default class ApiInspect extends ApiCommandBase {
   }
 
   async run() {
-    const api: ApiPromise = this.getOriginalApi()
+    const api: UnaugmentedApiPromise = this.getUnaugmentedApi()
     const flags: ApiInspectFlags = this.parse(ApiInspect).flags as ApiInspectFlags
     const availableTypes: readonly string[] = TYPES_AVAILABLE
     const { apiType, apiModule, apiMethod } = this.validateFlags(api, flags)

+ 36 - 0
cli/src/commands/api/setQueryNodeEndpoint.ts

@@ -0,0 +1,36 @@
+import chalk from 'chalk'
+import ApiCommandBase from '../../base/ApiCommandBase'
+import ExitCodes from '../../ExitCodes'
+
+type ApiSetQueryNodeEndpointArgs = { endpoint: string }
+
+export default class ApiSetQueryNodeEndpoint extends ApiCommandBase {
+  protected requiresApiConnection = false
+
+  static description = 'Set query node endpoint'
+  static args = [
+    {
+      name: 'endpoint',
+      required: false,
+      description: 'Query node endpoint for the CLI to use',
+    },
+  ]
+
+  async run(): Promise<void> {
+    const { endpoint }: ApiSetQueryNodeEndpointArgs = this.parse(ApiSetQueryNodeEndpoint)
+      .args as ApiSetQueryNodeEndpointArgs
+    let newEndpoint: string | null = null
+    if (endpoint) {
+      if (!this.isQueryNodeUriValid(endpoint)) {
+        this.error('Provided endpoint seems to be incorrect!', { exit: ExitCodes.InvalidInput })
+      }
+      newEndpoint = endpoint
+    } else {
+      newEndpoint = await this.promptForQueryNodeUri()
+    }
+    await this.setPreservedState({ queryNodeUri: newEndpoint })
+    this.log(
+      chalk.greenBright('Query node endpoint successfuly changed! New endpoint: ') + chalk.magentaBright(newEndpoint)
+    )
+  }
+}

+ 3 - 6
cli/src/commands/api/setUri.ts

@@ -5,6 +5,8 @@ import ExitCodes from '../../ExitCodes'
 type ApiSetUriArgs = { uri: string }
 
 export default class ApiSetUri extends ApiCommandBase {
+  protected requiresApiConnection = false
+
   static description = 'Set api WS provider uri'
   static args = [
     {
@@ -14,12 +16,7 @@ export default class ApiSetUri extends ApiCommandBase {
     },
   ]
 
-  async init() {
-    // Pass "skipConnection" arg to prevent command from exiting if current api uri is invalid
-    await super.init(true)
-  }
-
-  async run() {
+  async run(): Promise<void> {
     const args: ApiSetUriArgs = this.parse(ApiSetUri).args as ApiSetUriArgs
     let newUri = ''
     if (args.uri) {

+ 6 - 5
cli/src/commands/content/addCuratorToGroup.ts

@@ -16,9 +16,8 @@ export default class AddCuratorToGroupCommand extends ContentDirectoryCommandBas
     },
   ]
 
-  async run() {
-    const account = await this.getRequiredSelectedAccount()
-    await this.requireLead()
+  async run(): Promise<void> {
+    const lead = await this.getRequiredLeadContext()
 
     let { groupId, curatorId } = this.parse(AddCuratorToGroupCommand).args
 
@@ -34,8 +33,10 @@ export default class AddCuratorToGroupCommand extends ContentDirectoryCommandBas
       await this.getCurator(curatorId)
     }
 
-    await this.requestAccountDecoding(account)
-    await this.sendAndFollowNamedTx(account, 'content', 'addCuratorToGroup', [groupId, curatorId])
+    await this.sendAndFollowNamedTx(await this.getDecodedPair(lead.roleAccount), 'content', 'addCuratorToGroup', [
+      groupId,
+      curatorId,
+    ])
 
     console.log(
       chalk.green(

+ 7 - 13
cli/src/commands/content/channel.ts

@@ -11,7 +11,7 @@ export default class ChannelCommand extends ContentDirectoryCommandBase {
     },
   ]
 
-  async run() {
+  async run(): Promise<void> {
     const { channelId } = this.parse(ChannelCommand).args
     const channel = await this.getApi().channelById(channelId)
     if (channel) {
@@ -19,24 +19,18 @@ export default class ChannelCommand extends ContentDirectoryCommandBase {
         'ID': channelId.toString(),
         'Owner': JSON.stringify(channel.owner.toJSON()),
         'IsCensored': channel.is_censored.toString(),
-        'RewardAccount': channel.reward_account ? channel.reward_account.toString() : 'NONE',
+        'RewardAccount': channel.reward_account.unwrapOr('NONE').toString(),
       })
 
       displayHeader(`Media`)
-
       displayCollapsedRow({
-        'NumberOfVideos': channel.videos.length,
-        'NumberOfPlaylists': channel.playlists.length,
-        'NumberOfSeries': channel.series.length,
+        'NumberOfVideos': channel.num_videos.toNumber(),
       })
 
-      displayHeader(`MediaData`)
-
-      displayCollapsedRow({
-        'Videos': JSON.stringify(channel.videos.toJSON()),
-        'Playlists': JSON.stringify(channel.playlists.toJSON()),
-        'Series': JSON.stringify(channel.series.toJSON()),
-      })
+      displayHeader(`Collaborators`)
+      const collaboratorIds = Array.from(channel.collaborators)
+      const collaborators = await this.getApi().getMembers(collaboratorIds)
+      this.log(collaborators.map((c, i) => `${collaboratorIds[i].toString()} (${c.handle.toString()})`).join(', '))
     } else {
       this.error(`Channel not found by channel id: "${channelId}"!`)
     }

+ 4 - 3
cli/src/commands/content/channels.ts

@@ -1,11 +1,11 @@
 import ContentDirectoryCommandBase from '../../base/ContentDirectoryCommandBase'
 // import chalk from 'chalk'
-import { displayTable } from '../../helpers/display'
+import { displayTable, shortAddress } from '../../helpers/display'
 
 export default class ChannelsCommand extends ContentDirectoryCommandBase {
   static description = 'List existing content directory channels.'
 
-  async run() {
+  async run(): Promise<void> {
     const channels = await this.getApi().availableChannels()
 
     if (channels.length > 0) {
@@ -14,7 +14,8 @@ export default class ChannelsCommand extends ContentDirectoryCommandBase {
           'ID': id.toString(),
           'Owner': JSON.stringify(c.owner.toJSON()),
           'IsCensored': c.is_censored.toString(),
-          'RewardAccount': c.reward_account ? c.reward_account.toString() : 'NONE',
+          'RewardAccount': c.reward_account ? shortAddress(c.reward_account.toString()) : 'NONE',
+          'Collaborators': c.collaborators.size,
         })),
         3
       )

+ 49 - 32
cli/src/commands/content/createChannel.ts

@@ -1,18 +1,19 @@
 import { getInputJson } from '../../helpers/InputOutput'
 import { ChannelInputParameters } from '../../Types'
-import { metadataToBytes, channelMetadataFromInput } from '../../helpers/serialization'
+import { asValidatedMetadata, metadataToBytes } from '../../helpers/serialization'
 import { flags } from '@oclif/command'
-import { CreateInterface } from '@joystream/types'
+import { createType } from '@joystream/types'
 import { ChannelCreationParameters } from '@joystream/types/content'
-import { ChannelInputSchema } from '../../json-schemas/ContentDirectory'
+import { ChannelInputSchema } from '../../schemas/ContentDirectory'
 import ContentDirectoryCommandBase from '../../base/ContentDirectoryCommandBase'
 import UploadCommandBase from '../../base/UploadCommandBase'
 import chalk from 'chalk'
+import { ChannelMetadata } from '@joystream/metadata-protobuf'
 
 export default class CreateChannelCommand extends UploadCommandBase {
   static description = 'Create channel inside content directory.'
   static flags = {
-    context: ContentDirectoryCommandBase.ownerContextFlag,
+    context: ContentDirectoryCommandBase.channelCreationContextFlag,
     input: flags.string({
       char: 'i',
       required: true,
@@ -20,51 +21,67 @@ export default class CreateChannelCommand extends UploadCommandBase {
     }),
   }
 
-  async run() {
+  async run(): Promise<void> {
     let { context, input } = this.parse(CreateChannelCommand).flags
 
     // Context
     if (!context) {
-      context = await this.promptForOwnerContext()
+      context = await this.promptForChannelCreationContext()
     }
-    const account = await this.getRequiredSelectedAccount()
-    const actor = await this.getActor(context)
-    await this.requestAccountDecoding(account)
+    const [actor, address] = await this.getContentActor(context)
+    const [memberId] = await this.getRequiredMemberContext(true)
+    const keypair = await this.getDecodedPair(address)
 
     const channelInput = await getInputJson<ChannelInputParameters>(input, ChannelInputSchema)
+    const meta = asValidatedMetadata(ChannelMetadata, channelInput)
 
-    const meta = channelMetadataFromInput(channelInput)
-    const { coverPhotoPath, avatarPhotoPath } = channelInput
-    const assetsPaths = [coverPhotoPath, avatarPhotoPath].filter((v) => v !== undefined) as string[]
-    const inputAssets = await this.prepareInputAssets(assetsPaths, input)
-    const assets = inputAssets.map(({ parameters }) => ({ Upload: parameters }))
-    // Set assets indexes in the metadata
-    if (coverPhotoPath) {
-      meta.setCoverPhoto(0)
-    }
-    if (avatarPhotoPath) {
-      meta.setAvatarPhoto(coverPhotoPath ? 1 : 0)
+    if (channelInput.collaborators) {
+      await this.validateCollaborators(channelInput.collaborators)
     }
 
-    const channelCreationParameters: CreateInterface<ChannelCreationParameters> = {
-      assets,
-      meta: metadataToBytes(meta),
-      reward_account: channelInput.rewardAccount,
-    }
+    const { coverPhotoPath, avatarPhotoPath } = channelInput
+    const [resolvedAssets, assetIndices] = await this.resolveAndValidateAssets(
+      { coverPhotoPath, avatarPhotoPath },
+      input
+    )
+    meta.coverPhoto = assetIndices.coverPhotoPath
+    meta.avatarPhoto = assetIndices.avatarPhotoPath
 
-    this.jsonPrettyPrint(JSON.stringify({ assets, metadata: meta.toObject() }))
+    // Preare and send the extrinsic
+    const assets = await this.prepareAssetsForExtrinsic(resolvedAssets)
+    const channelCreationParameters = createType<ChannelCreationParameters, 'ChannelCreationParameters'>(
+      'ChannelCreationParameters',
+      {
+        assets,
+        meta: metadataToBytes(ChannelMetadata, meta),
+        collaborators: channelInput.collaborators,
+        reward_account: channelInput.rewardAccount,
+      }
+    )
+
+    this.jsonPrettyPrint(JSON.stringify({ assets: assets?.toJSON(), metadata: meta }))
 
     await this.requireConfirmation('Do you confirm the provided input?', true)
 
-    const result = await this.sendAndFollowNamedTx(account, 'content', 'createChannel', [
+    const result = await this.sendAndFollowNamedTx(keypair, 'content', 'createChannel', [
       actor,
       channelCreationParameters,
     ])
-    if (result) {
-      const event = this.findEvent(result, 'content', 'ChannelCreated')
-      this.log(chalk.green(`Channel with id ${chalk.cyanBright(event?.data[1].toString())} successfully created!`))
-    }
 
-    await this.uploadAssets(inputAssets, input)
+    const channelCreatedEvent = this.findEvent(result, 'content', 'ChannelCreated')
+    const channelId = channelCreatedEvent!.data[1]
+    this.log(chalk.green(`Channel with id ${chalk.cyanBright(channelId.toString())} successfully created!`))
+
+    const dataObjectsUploadedEvent = this.findEvent(result, 'storage', 'DataObjectsUploaded')
+    if (dataObjectsUploadedEvent) {
+      const [objectIds] = dataObjectsUploadedEvent.data
+      await this.uploadAssets(
+        keypair,
+        memberId.toNumber(),
+        `dynamic:channel:${channelId.toString()}`,
+        objectIds.map((id, index) => ({ dataObjectId: id, path: resolvedAssets[index].path })),
+        input
+      )
+    }
   }
 }

+ 13 - 14
cli/src/commands/content/createChannelCategory.ts

@@ -1,12 +1,13 @@
 import ContentDirectoryCommandBase from '../../base/ContentDirectoryCommandBase'
 import { getInputJson } from '../../helpers/InputOutput'
 import { ChannelCategoryInputParameters } from '../../Types'
-import { channelCategoryMetadataFromInput, metadataToBytes } from '../../helpers/serialization'
+import { asValidatedMetadata, metadataToBytes } from '../../helpers/serialization'
 import { flags } from '@oclif/command'
 import { CreateInterface } from '@joystream/types'
 import { ChannelCategoryCreationParameters } from '@joystream/types/content'
-import { ChannelCategoryInputSchema } from '../../json-schemas/ContentDirectory'
+import { ChannelCategoryInputSchema } from '../../schemas/ContentDirectory'
 import chalk from 'chalk'
+import { ChannelCategoryMetadata } from '@joystream/metadata-protobuf'
 
 export default class CreateChannelCategoryCommand extends ContentDirectoryCommandBase {
   static description = 'Create channel category inside content directory.'
@@ -19,30 +20,28 @@ export default class CreateChannelCategoryCommand extends ContentDirectoryComman
     }),
   }
 
-  async run() {
+  async run(): Promise<void> {
     const { context, input } = this.parse(CreateChannelCategoryCommand).flags
 
-    const currentAccount = await this.getRequiredSelectedAccount()
-    await this.requestAccountDecoding(currentAccount)
-
-    const actor = context ? await this.getActor(context) : await this.getCategoryManagementActor()
+    const [actor, address] = context ? await this.getContentActor(context) : await this.getCategoryManagementActor()
 
     const channelCategoryInput = await getInputJson<ChannelCategoryInputParameters>(input, ChannelCategoryInputSchema)
-
-    const meta = channelCategoryMetadataFromInput(channelCategoryInput)
+    const meta = asValidatedMetadata(ChannelCategoryMetadata, channelCategoryInput)
 
     const channelCategoryCreationParameters: CreateInterface<ChannelCategoryCreationParameters> = {
-      meta: metadataToBytes(meta),
+      meta: metadataToBytes(ChannelCategoryMetadata, meta),
     }
 
     this.jsonPrettyPrint(JSON.stringify(channelCategoryInput))
 
     await this.requireConfirmation('Do you confirm the provided input?', true)
 
-    const result = await this.sendAndFollowNamedTx(currentAccount, 'content', 'createChannelCategory', [
-      actor,
-      channelCategoryCreationParameters,
-    ])
+    const result = await this.sendAndFollowNamedTx(
+      await this.getDecodedPair(address),
+      'content',
+      'createChannelCategory',
+      [actor, channelCategoryCreationParameters]
+    )
 
     if (result) {
       const event = this.findEvent(result, 'content', 'ChannelCategoryCreated')

+ 3 - 5
cli/src/commands/content/createCuratorGroup.ts

@@ -5,12 +5,10 @@ export default class CreateCuratorGroupCommand extends ContentDirectoryCommandBa
   static description = 'Create new Curator Group.'
   static aliases = ['createCuratorGroup']
 
-  async run() {
-    const account = await this.getRequiredSelectedAccount()
-    await this.requireLead()
+  async run(): Promise<void> {
+    const lead = await this.getRequiredLeadContext()
 
-    await this.requestAccountDecoding(account)
-    await this.buildAndSendExtrinsic(account, 'content', 'createCuratorGroup')
+    await this.buildAndSendExtrinsic(await this.getDecodedPair(lead.roleAccount), 'content', 'createCuratorGroup')
 
     const newGroupId = (await this.getApi().nextCuratorGroupId()) - 1
     console.log(chalk.green(`New group successfully created! (ID: ${chalk.magentaBright(newGroupId)})`))

+ 56 - 47
cli/src/commands/content/createVideo.ts

@@ -1,13 +1,14 @@
 import UploadCommandBase from '../../base/UploadCommandBase'
 import { getInputJson } from '../../helpers/InputOutput'
-import { videoMetadataFromInput, metadataToBytes } from '../../helpers/serialization'
+import { asValidatedMetadata, metadataToBytes } from '../../helpers/serialization'
 import { VideoInputParameters, VideoFileMetadata } from '../../Types'
-import { CreateInterface } from '@joystream/types'
+import { createTypeFromConstructor } from '@joystream/types'
 import { flags } from '@oclif/command'
 import { VideoCreationParameters } from '@joystream/types/content'
-import { MediaType, VideoMetadata } from '@joystream/content-metadata-protobuf'
-import { VideoInputSchema } from '../../json-schemas/ContentDirectory'
+import { IVideoMetadata, VideoMetadata } from '@joystream/metadata-protobuf'
+import { VideoInputSchema } from '../../schemas/ContentDirectory'
 import chalk from 'chalk'
+import ContentDirectoryCommandBase from '../../base/ContentDirectoryCommandBase'
 
 export default class CreateVideoCommand extends UploadCommandBase {
   static description = 'Create video under specific channel inside content directory.'
@@ -22,74 +23,82 @@ export default class CreateVideoCommand extends UploadCommandBase {
       required: true,
       description: 'ID of the Channel',
     }),
+    context: ContentDirectoryCommandBase.channelManagementContextFlag,
   }
 
-  setVideoMetadataDefaults(metadata: VideoMetadata, videoFileMetadata: VideoFileMetadata) {
-    const metaObj = metadata.toObject()
-    metadata.setDuration((metaObj.duration || videoFileMetadata.duration) as number)
-    metadata.setMediaPixelWidth((metaObj.mediaPixelWidth || videoFileMetadata.width) as number)
-    metadata.setMediaPixelHeight((metaObj.mediaPixelHeight || videoFileMetadata.height) as number)
-
-    const fileMediaType = new MediaType()
-    fileMediaType.setCodecName(videoFileMetadata.codecName as string)
-    fileMediaType.setContainer(videoFileMetadata.container)
-    fileMediaType.setMimeMediaType(videoFileMetadata.mimeType)
-    metadata.setMediaType(metadata.getMediaType() || fileMediaType)
+  setVideoMetadataDefaults(metadata: IVideoMetadata, videoFileMetadata: VideoFileMetadata): IVideoMetadata {
+    return {
+      duration: videoFileMetadata.duration,
+      mediaPixelWidth: videoFileMetadata.width,
+      mediaPixelHeight: videoFileMetadata.height,
+      mediaType: {
+        codecName: videoFileMetadata.codecName,
+        container: videoFileMetadata.container,
+        mimeMediaType: videoFileMetadata.mimeType,
+      },
+      ...metadata,
+    }
   }
 
-  async run() {
-    const { input, channelId } = this.parse(CreateVideoCommand).flags
+  async run(): Promise<void> {
+    const { input, channelId, context } = this.parse(CreateVideoCommand).flags
 
     // Get context
-    const account = await this.getRequiredSelectedAccount()
     const channel = await this.getApi().channelById(channelId)
-    const actor = await this.getChannelOwnerActor(channel)
-    await this.requestAccountDecoding(account)
+    const [actor, address] = await this.getChannelManagementActor(channel, context)
+    const [memberId] = await this.getRequiredMemberContext(true)
+    const keypair = await this.getDecodedPair(address)
 
     // Get input from file
     const videoCreationParametersInput = await getInputJson<VideoInputParameters>(input, VideoInputSchema)
-
-    const meta = videoMetadataFromInput(videoCreationParametersInput)
+    let meta = asValidatedMetadata(VideoMetadata, videoCreationParametersInput)
 
     // Assets
     const { videoPath, thumbnailPhotoPath } = videoCreationParametersInput
-    const assetsPaths = [videoPath, thumbnailPhotoPath].filter((a) => a !== undefined) as string[]
-    const inputAssets = await this.prepareInputAssets(assetsPaths, input)
-    const assets = inputAssets.map(({ parameters }) => ({ Upload: parameters }))
-    // Set assets indexes in the metadata
-    if (videoPath) {
-      meta.setVideo(0)
-    }
-    if (thumbnailPhotoPath) {
-      meta.setThumbnailPhoto(videoPath ? 1 : 0)
-    }
+    const [resolvedAssets, assetIndices] = await this.resolveAndValidateAssets({ videoPath, thumbnailPhotoPath }, input)
+    // Set assets indices in the metadata
+    meta.video = assetIndices.videoPath
+    meta.thumbnailPhoto = assetIndices.thumbnailPhotoPath
 
     // Try to get video file metadata
-    const videoFileMetadata = await this.getVideoFileMetadata(inputAssets[0].path)
-    this.log('Video media file parameters established:', videoFileMetadata)
-    this.setVideoMetadataDefaults(meta, videoFileMetadata)
+    if (assetIndices.videoPath !== undefined) {
+      const videoFileMetadata = await this.getVideoFileMetadata(resolvedAssets[assetIndices.videoPath].path)
+      this.log('Video media file parameters established:', videoFileMetadata)
+      meta = this.setVideoMetadataDefaults(meta, videoFileMetadata)
+    }
 
-    // Create final extrinsic params and send the extrinsic
-    const videoCreationParameters: CreateInterface<VideoCreationParameters> = {
+    // Preare and send the extrinsic
+    const assets = await this.prepareAssetsForExtrinsic(resolvedAssets)
+    const videoCreationParameters = createTypeFromConstructor(VideoCreationParameters, {
       assets,
-      meta: metadataToBytes(meta),
-    }
+      meta: metadataToBytes(VideoMetadata, meta),
+    })
 
-    this.jsonPrettyPrint(JSON.stringify({ assets, metadata: meta.toObject() }))
+    this.jsonPrettyPrint(JSON.stringify({ assets: assets?.toJSON(), metadata: meta }))
 
     await this.requireConfirmation('Do you confirm the provided input?', true)
 
-    const result = await this.sendAndFollowNamedTx(account, 'content', 'createVideo', [
+    const result = await this.sendAndFollowNamedTx(keypair, 'content', 'createVideo', [
       actor,
       channelId,
       videoCreationParameters,
     ])
-    if (result) {
-      const event = this.findEvent(result, 'content', 'VideoCreated')
-      this.log(chalk.green(`Video with id ${chalk.cyanBright(event?.data[2].toString())} successfully created!`))
-    }
 
-    // Upload assets
-    await this.uploadAssets(inputAssets, input)
+    const videoCreatedEvent = this.findEvent(result, 'content', 'VideoCreated')
+    this.log(
+      chalk.green(`Video with id ${chalk.cyanBright(videoCreatedEvent?.data[2].toString())} successfully created!`)
+    )
+
+    const dataObjectsUploadedEvent = this.findEvent(result, 'storage', 'DataObjectsUploaded')
+    if (dataObjectsUploadedEvent) {
+      const [objectIds] = dataObjectsUploadedEvent.data
+      await this.uploadAssets(
+        keypair,
+        memberId.toNumber(),
+        `dynamic:channel:${channelId.toString()}`,
+        objectIds.map((id, index) => ({ dataObjectId: id, path: resolvedAssets[index].path })),
+        input
+      )
+    }
   }
 }

+ 13 - 14
cli/src/commands/content/createVideoCategory.ts

@@ -1,12 +1,13 @@
 import ContentDirectoryCommandBase from '../../base/ContentDirectoryCommandBase'
 import { getInputJson } from '../../helpers/InputOutput'
 import { VideoCategoryInputParameters } from '../../Types'
-import { metadataToBytes, videoCategoryMetadataFromInput } from '../../helpers/serialization'
+import { asValidatedMetadata, metadataToBytes } from '../../helpers/serialization'
 import { flags } from '@oclif/command'
 import { CreateInterface } from '@joystream/types'
 import { VideoCategoryCreationParameters } from '@joystream/types/content'
-import { VideoCategoryInputSchema } from '../../json-schemas/ContentDirectory'
+import { VideoCategoryInputSchema } from '../../schemas/ContentDirectory'
 import chalk from 'chalk'
+import { VideoCategoryMetadata } from '@joystream/metadata-protobuf'
 
 export default class CreateVideoCategoryCommand extends ContentDirectoryCommandBase {
   static description = 'Create video category inside content directory.'
@@ -19,30 +20,28 @@ export default class CreateVideoCategoryCommand extends ContentDirectoryCommandB
     }),
   }
 
-  async run() {
+  async run(): Promise<void> {
     const { context, input } = this.parse(CreateVideoCategoryCommand).flags
 
-    const currentAccount = await this.getRequiredSelectedAccount()
-    await this.requestAccountDecoding(currentAccount)
-
-    const actor = context ? await this.getActor(context) : await this.getCategoryManagementActor()
+    const [actor, address] = context ? await this.getContentActor(context) : await this.getCategoryManagementActor()
 
     const videoCategoryInput = await getInputJson<VideoCategoryInputParameters>(input, VideoCategoryInputSchema)
-
-    const meta = videoCategoryMetadataFromInput(videoCategoryInput)
+    const meta = asValidatedMetadata(VideoCategoryMetadata, videoCategoryInput)
 
     const videoCategoryCreationParameters: CreateInterface<VideoCategoryCreationParameters> = {
-      meta: metadataToBytes(meta),
+      meta: metadataToBytes(VideoCategoryMetadata, meta),
     }
 
     this.jsonPrettyPrint(JSON.stringify(videoCategoryInput))
 
     await this.requireConfirmation('Do you confirm the provided input?', true)
 
-    const result = await this.sendAndFollowNamedTx(currentAccount, 'content', 'createVideoCategory', [
-      actor,
-      videoCategoryCreationParameters,
-    ])
+    const result = await this.sendAndFollowNamedTx(
+      await this.getDecodedPair(address),
+      'content',
+      'createVideoCategory',
+      [actor, videoCategoryCreationParameters]
+    )
 
     if (result) {
       const event = this.findEvent(result, 'content', 'VideoCategoryCreated')

+ 2 - 2
cli/src/commands/content/curatorGroup.ts

@@ -13,11 +13,11 @@ export default class CuratorGroupCommand extends ContentDirectoryCommandBase {
     },
   ]
 
-  async run() {
+  async run(): Promise<void> {
     const { id } = this.parse(CuratorGroupCommand).args
     const group = await this.getCuratorGroup(id)
     const members = (await this.getApi().groupMembers(WorkingGroups.Curators)).filter((curator) =>
-      group.curators.toArray().some((groupCurator) => groupCurator.eq(curator.workerId))
+      Array.from(group.curators).some((groupCurator) => groupCurator.eq(curator.workerId))
     )
 
     displayCollapsedRow({

+ 1 - 1
cli/src/commands/content/curatorGroups.ts

@@ -13,7 +13,7 @@ export default class CuratorGroupsCommand extends ContentDirectoryCommandBase {
         groups.map(([id, group]) => ({
           'ID': id.toString(),
           'Status': group.active.valueOf() ? 'Active' : 'Inactive',
-          'Members': group.curators.toArray().length,
+          'Members': Array.from(group.curators).length,
         })),
         5
       )

+ 101 - 0
cli/src/commands/content/deleteChannel.ts

@@ -0,0 +1,101 @@
+import ContentDirectoryCommandBase from '../../base/ContentDirectoryCommandBase'
+import { flags } from '@oclif/command'
+import chalk from 'chalk'
+import { createTypeFromConstructor } from '@joystream/types'
+import { BagId } from '@joystream/types/storage'
+import ExitCodes from '../../ExitCodes'
+import { formatBalance } from '@polkadot/util'
+import BN from 'bn.js'
+
+export default class DeleteChannelCommand extends ContentDirectoryCommandBase {
+  static description = 'Delete the channel and optionally all associated data objects.'
+
+  static flags = {
+    channelId: flags.integer({
+      char: 'c',
+      required: true,
+      description: 'ID of the Channel',
+    }),
+    force: flags.boolean({
+      char: 'f',
+      default: false,
+      description: 'Force-remove all associated channel data objects',
+    }),
+  }
+
+  async getDataObjectsInfoFromQueryNode(channelId: number): Promise<[string, BN][]> {
+    const dataObjects = await this.getQNApi().dataObjectsByBagId(`dynamic:channel:${channelId}`)
+
+    if (dataObjects.length) {
+      this.log('Following data objects are still associated with the channel:')
+      dataObjects.forEach((o) => {
+        let parentStr = ''
+        if ('video' in o.type && o.type.video) {
+          parentStr = ` (video: ${o.type.video.id})`
+        }
+        this.log(`- ${o.id} - ${o.type.__typename}${parentStr}`)
+      })
+    }
+
+    return dataObjects.map((o) => [o.id, new BN(o.deletionPrize)])
+  }
+
+  async getDataObjectsInfoFromChain(channelId: number): Promise<[string, BN][]> {
+    const dataObjects = await this.getApi().dataObjectsInBag(
+      createTypeFromConstructor(BagId, { Dynamic: { Channel: channelId } })
+    )
+
+    if (dataObjects.length) {
+      const dataObjectIds = dataObjects.map(([id]) => id.toString())
+      this.log(`Following data objects are still associated with the channel: ${dataObjectIds.join(', ')}`)
+    }
+
+    return dataObjects.map(([id, o]) => [id.toString(), o.deletion_prize])
+  }
+
+  async run(): Promise<void> {
+    const {
+      flags: { channelId, force },
+    } = this.parse(DeleteChannelCommand)
+    // Context
+    const channel = await this.getApi().channelById(channelId)
+    const [actor, address] = await this.getChannelOwnerActor(channel)
+
+    if (channel.num_videos.toNumber()) {
+      this.error(
+        `This channel still has ${channel.num_videos.toNumber()} associated video(s)!\n` +
+          `Delete the videos first using ${chalk.magentaBright('content:deleteVideo')} command`
+      )
+    }
+
+    const dataObjectsInfo = this.isQueryNodeUriSet()
+      ? await this.getDataObjectsInfoFromQueryNode(channelId)
+      : await this.getDataObjectsInfoFromChain(channelId)
+
+    if (dataObjectsInfo.length) {
+      if (!force) {
+        this.error(`Cannot remove associated data objects unless ${chalk.magentaBright('--force')} flag is used`, {
+          exit: ExitCodes.InvalidInput,
+        })
+      }
+      const deletionPrize = dataObjectsInfo.reduce((sum, [, prize]) => sum.add(prize), new BN(0))
+      this.log(
+        `Data objects deletion prize of ${chalk.cyanBright(
+          formatBalance(deletionPrize)
+        )} will be transferred to ${chalk.magentaBright(address)}`
+      )
+    }
+
+    await this.requireConfirmation(
+      `Are you sure you want to remove channel ${chalk.magentaBright(channelId.toString())}${
+        force ? ' and all associated data objects' : ''
+      }?`
+    )
+
+    await this.sendAndFollowNamedTx(await this.getDecodedPair(address), 'content', 'deleteChannel', [
+      actor,
+      channelId,
+      force ? dataObjectsInfo.length : 0,
+    ])
+  }
+}

+ 6 - 6
cli/src/commands/content/deleteChannelCategory.ts

@@ -14,7 +14,7 @@ export default class DeleteChannelCategoryCommand extends ContentDirectoryComman
     },
   ]
 
-  async run() {
+  async run(): Promise<void> {
     const { context } = this.parse(DeleteChannelCategoryCommand).flags
 
     const { channelCategoryId } = this.parse(DeleteChannelCategoryCommand).args
@@ -22,12 +22,12 @@ export default class DeleteChannelCategoryCommand extends ContentDirectoryComman
     const channelCategoryIds = await this.getApi().channelCategoryIds()
 
     if (channelCategoryIds.some((id) => id.toString() === channelCategoryId)) {
-      const currentAccount = await this.getRequiredSelectedAccount()
-      await this.requestAccountDecoding(currentAccount)
+      const [actor, address] = context ? await this.getContentActor(context) : await this.getCategoryManagementActor()
 
-      const actor = context ? await this.getActor(context) : await this.getCategoryManagementActor()
-
-      await this.sendAndFollowNamedTx(currentAccount, 'content', 'deleteChannelCategory', [actor, channelCategoryId])
+      await this.sendAndFollowNamedTx(await this.getDecodedPair(address), 'content', 'deleteChannelCategory', [
+        actor,
+        channelCategoryId,
+      ])
     } else {
       this.error('Channel category under given id does not exist...')
     }

+ 80 - 0
cli/src/commands/content/deleteVideo.ts

@@ -0,0 +1,80 @@
+import ContentDirectoryCommandBase from '../../base/ContentDirectoryCommandBase'
+import { flags } from '@oclif/command'
+import BN from 'bn.js'
+import chalk from 'chalk'
+import { formatBalance } from '@polkadot/util'
+import { createType } from '@joystream/types'
+import ExitCodes from '../../ExitCodes'
+
+export default class DeleteVideoCommand extends ContentDirectoryCommandBase {
+  static description = 'Delete the video and optionally all associated data objects.'
+
+  protected requiresQueryNode = true
+
+  static flags = {
+    videoId: flags.integer({
+      char: 'v',
+      required: true,
+      description: 'ID of the Video',
+    }),
+    force: flags.boolean({
+      char: 'f',
+      default: false,
+      description: 'Force-remove all associated video data objects',
+    }),
+    context: ContentDirectoryCommandBase.channelManagementContextFlag,
+  }
+
+  async getDataObjectsInfo(videoId: number): Promise<[string, BN][]> {
+    const dataObjects = await this.getQNApi().dataObjectsByVideoId(videoId.toString())
+
+    if (dataObjects.length) {
+      this.log('Following data objects are still associated with the video:')
+      dataObjects.forEach((o) => {
+        this.log(`${o.id} - ${o.type.__typename}`)
+      })
+    }
+
+    return dataObjects.map((o) => [o.id, new BN(o.deletionPrize)])
+  }
+
+  async run(): Promise<void> {
+    const {
+      flags: { videoId, force, context },
+    } = this.parse(DeleteVideoCommand)
+    // Context
+    const video = await this.getApi().videoById(videoId)
+    const channel = await this.getApi().channelById(video.in_channel.toNumber())
+    const [actor, address] = await this.getChannelManagementActor(channel, context)
+
+    const dataObjectsInfo = await this.getDataObjectsInfo(videoId)
+    if (dataObjectsInfo.length) {
+      if (!force) {
+        this.error(`Cannot remove associated data objects unless ${chalk.magentaBright('--force')} flag is used`, {
+          exit: ExitCodes.InvalidInput,
+        })
+      }
+      const deletionPrize = dataObjectsInfo.reduce((sum, [, prize]) => sum.add(prize), new BN(0))
+      this.log(
+        `Data objects deletion prize of ${chalk.cyanBright(
+          formatBalance(deletionPrize)
+        )} will be transferred to ${chalk.magentaBright(address)}`
+      )
+    }
+
+    await this.requireConfirmation(
+      `Are you sure you want to remove video ${chalk.magentaBright(videoId)}${
+        force ? ' and all associated data objects' : ''
+      }?`
+    )
+
+    await this.sendAndFollowNamedTx(await this.getDecodedPair(address), 'content', 'deleteVideo', [
+      actor,
+      videoId,
+      createType(
+        'BTreeSet<DataObjectId>',
+        dataObjectsInfo.map(([id]) => id)
+      ),
+    ])
+  }
+}

+ 6 - 6
cli/src/commands/content/deleteVideoCategory.ts

@@ -14,7 +14,7 @@ export default class DeleteVideoCategoryCommand extends ContentDirectoryCommandB
     },
   ]
 
-  async run() {
+  async run(): Promise<void> {
     const { context } = this.parse(DeleteVideoCategoryCommand).flags
 
     const { videoCategoryId } = this.parse(DeleteVideoCategoryCommand).args
@@ -22,12 +22,12 @@ export default class DeleteVideoCategoryCommand extends ContentDirectoryCommandB
     const videoCategoryIds = await this.getApi().videoCategoryIds()
 
     if (videoCategoryIds.some((id) => id.toString() === videoCategoryId)) {
-      const currentAccount = await this.getRequiredSelectedAccount()
-      await this.requestAccountDecoding(currentAccount)
+      const [actor, address] = context ? await this.getContentActor(context) : await this.getCategoryManagementActor()
 
-      const actor = context ? await this.getActor(context) : await this.getCategoryManagementActor()
-
-      await this.sendAndFollowNamedTx(currentAccount, 'content', 'deleteVideoCategory', [actor, videoCategoryId])
+      await this.sendAndFollowNamedTx(await this.getDecodedPair(address), 'content', 'deleteVideoCategory', [
+        actor,
+        videoCategoryId,
+      ])
     } else {
       this.error('Video category under given id does not exist...')
     }

+ 40 - 0
cli/src/commands/content/removeChannelAssets.ts

@@ -0,0 +1,40 @@
+import ContentDirectoryCommandBase from '../../base/ContentDirectoryCommandBase'
+import { flags } from '@oclif/command'
+import { createType } from '@joystream/types'
+
+export default class RemoveChannelAssetsCommand extends ContentDirectoryCommandBase {
+  static description = 'Remove data objects associated with the channel or any of its videos.'
+
+  static flags = {
+    channelId: flags.integer({
+      char: 'c',
+      required: true,
+      description: 'ID of the Channel',
+    }),
+    objectId: flags.integer({
+      char: 'o',
+      required: true,
+      multiple: true,
+      description: 'ID of an object to remove',
+    }),
+    context: ContentDirectoryCommandBase.channelManagementContextFlag,
+  }
+
+  async run(): Promise<void> {
+    const {
+      flags: { channelId, objectId: objectIds, context },
+    } = this.parse(RemoveChannelAssetsCommand)
+    // Context
+    const channel = await this.getApi().channelById(channelId)
+    const [actor, address] = await this.getChannelManagementActor(channel, context)
+
+    this.jsonPrettyPrint(JSON.stringify({ channelId, assetsToRemove: objectIds }))
+    await this.requireConfirmation('Do you confirm the provided input?', true)
+
+    await this.sendAndFollowNamedTx(await this.getDecodedPair(address), 'content', 'updateChannel', [
+      actor,
+      channelId,
+      { assets_to_remove: createType('BTreeSet<DataObjectId>', objectIds) },
+    ])
+  }
+}

+ 7 - 6
cli/src/commands/content/removeCuratorFromGroup.ts

@@ -16,9 +16,8 @@ export default class RemoveCuratorFromGroupCommand extends ContentDirectoryComma
     },
   ]
 
-  async run() {
-    const account = await this.getRequiredSelectedAccount()
-    await this.requireLead()
+  async run(): Promise<void> {
+    const lead = await this.getRequiredLeadContext()
 
     let { groupId, curatorId } = this.parse(RemoveCuratorFromGroupCommand).args
 
@@ -27,7 +26,7 @@ export default class RemoveCuratorFromGroupCommand extends ContentDirectoryComma
     }
 
     const group = await this.getCuratorGroup(groupId)
-    const groupCuratorIds = group.curators.toArray().map((id) => id.toNumber())
+    const groupCuratorIds = Array.from(group.curators).map((id) => id.toNumber())
 
     if (curatorId === undefined) {
       curatorId = await this.promptForCurator('Choose a Curator to remove', groupCuratorIds)
@@ -38,8 +37,10 @@ export default class RemoveCuratorFromGroupCommand extends ContentDirectoryComma
       await this.getCurator(curatorId)
     }
 
-    await this.requestAccountDecoding(account)
-    await this.sendAndFollowNamedTx(account, 'content', 'removeCuratorFromGroup', [groupId, curatorId])
+    await this.sendAndFollowNamedTx(await this.getDecodedPair(lead.roleAccount), 'content', 'removeCuratorFromGroup', [
+      groupId,
+      curatorId,
+    ])
 
     this.log(
       chalk.green(

+ 16 - 9
cli/src/commands/content/reuploadAssets.ts

@@ -1,9 +1,9 @@
 import UploadCommandBase from '../../base/UploadCommandBase'
 import { getInputJson } from '../../helpers/InputOutput'
-import AssetsSchema from '../../json-schemas/Assets.schema.json'
-import { Assets as AssetsInput } from '../../json-schemas/typings/Assets.schema'
+import AssetsSchema from '../../schemas/json/Assets.schema.json'
+import { Assets as AssetsInput } from '../../schemas/typings/Assets.schema'
 import { flags } from '@oclif/command'
-import { ContentId } from '@joystream/types/storage'
+import BN from 'bn.js'
 
 export default class ReuploadVideoAssetsCommand extends UploadCommandBase {
   static description = 'Allows reuploading assets that were not successfully uploaded during channel/video creation'
@@ -16,21 +16,28 @@ export default class ReuploadVideoAssetsCommand extends UploadCommandBase {
     }),
   }
 
-  async run() {
+  async run(): Promise<void> {
     const { input } = this.parse(ReuploadVideoAssetsCommand).flags
 
     // Get context
-    const account = await this.getRequiredSelectedAccount()
-    await this.requestAccountDecoding(account)
+    const [memberId, membership] = await this.getRequiredMemberContext()
 
     // Get input from file
     const inputData = await getInputJson<AssetsInput>(input, AssetsSchema)
-    const inputAssets = inputData.map(({ contentId, path }) => ({
-      contentId: ContentId.decode(this.getTypesRegistry(), contentId),
+    const { bagId } = inputData
+    const inputAssets = inputData.assets.map(({ objectId, path }) => ({
+      dataObjectId: new BN(objectId),
       path,
     }))
 
     // Upload assets
-    await this.uploadAssets(inputAssets, input, '')
+    await this.uploadAssets(
+      await this.getDecodedPair(membership.controller_account),
+      memberId.toNumber(),
+      bagId,
+      inputAssets,
+      input,
+      ''
+    )
   }
 }

+ 6 - 5
cli/src/commands/content/setCuratorGroupStatus.ts

@@ -17,9 +17,8 @@ export default class SetCuratorGroupStatusCommand extends ContentDirectoryComman
     },
   ]
 
-  async run() {
-    const account = await this.getRequiredSelectedAccount()
-    await this.requireLead()
+  async run(): Promise<void> {
+    const lead = await this.getRequiredLeadContext()
 
     let { id, status } = this.parse(SetCuratorGroupStatusCommand).args
 
@@ -47,8 +46,10 @@ export default class SetCuratorGroupStatusCommand extends ContentDirectoryComman
       status = !!parseInt(status)
     }
 
-    await this.requestAccountDecoding(account)
-    await this.sendAndFollowNamedTx(account, 'content', 'setCuratorGroupStatus', [id, status])
+    await this.sendAndFollowNamedTx(await this.getDecodedPair(lead.roleAccount), 'content', 'setCuratorGroupStatus', [
+      id,
+      status,
+    ])
 
     console.log(
       chalk.green(

+ 3 - 6
cli/src/commands/content/setFeaturedVideos.ts

@@ -11,15 +11,12 @@ export default class SetFeaturedVideosCommand extends ContentDirectoryCommandBas
     },
   ]
 
-  async run() {
+  async run(): Promise<void> {
     const { featuredVideoIds } = this.parse(SetFeaturedVideosCommand).args
 
-    const currentAccount = await this.getRequiredSelectedAccount()
-    await this.requestAccountDecoding(currentAccount)
+    const [actor, address] = await this.getContentActor('Lead')
 
-    const actor = await this.getActor('Lead')
-
-    await this.sendAndFollowNamedTx(currentAccount, 'content', 'setFeaturedVideos', [
+    await this.sendAndFollowNamedTx(await this.getDecodedPair(address), 'content', 'setFeaturedVideos', [
       actor,
       (featuredVideoIds as string).split(','),
     ])

+ 90 - 24
cli/src/commands/content/updateChannel.ts

@@ -1,15 +1,23 @@
 import { getInputJson } from '../../helpers/InputOutput'
-import { channelMetadataFromInput, metadataToBytes } from '../../helpers/serialization'
+import { asValidatedMetadata, metadataToBytes } from '../../helpers/serialization'
 import { ChannelInputParameters } from '../../Types'
 import { flags } from '@oclif/command'
 import UploadCommandBase from '../../base/UploadCommandBase'
-import { CreateInterface } from '@joystream/types'
+import { CreateInterface, createType } from '@joystream/types'
 import { ChannelUpdateParameters } from '@joystream/types/content'
-import { ChannelInputSchema } from '../../json-schemas/ContentDirectory'
+import { ChannelInputSchema } from '../../schemas/ContentDirectory'
+import { ChannelMetadata } from '@joystream/metadata-protobuf'
+import { DataObjectInfoFragment } from '../../graphql/generated/queries'
+import BN from 'bn.js'
+import { formatBalance } from '@polkadot/util'
+import chalk from 'chalk'
+import ContentDirectoryCommandBase from '../../base/ContentDirectoryCommandBase'
+import ExitCodes from '../../ExitCodes'
 
 export default class UpdateChannelCommand extends UploadCommandBase {
   static description = 'Update existing content directory channel.'
   static flags = {
+    context: ContentDirectoryCommandBase.channelManagementContextFlag,
     input: flags.string({
       char: 'i',
       required: true,
@@ -38,50 +46,108 @@ export default class UpdateChannelCommand extends UploadCommandBase {
     }
   }
 
-  async run() {
+  async getAssetsToRemove(
+    channelId: number,
+    coverPhotoIndex: number | undefined,
+    avatarPhotoIndex: number | undefined
+  ): Promise<string[]> {
+    let assetsToRemove: DataObjectInfoFragment[] = []
+    if (coverPhotoIndex !== undefined || avatarPhotoIndex !== undefined) {
+      const currentAssets = await this.getQNApi().dataObjectsByChannelId(channelId.toString())
+      const currentCovers = currentAssets.filter((a) => a.type.__typename === 'DataObjectTypeChannelCoverPhoto')
+      const currentAvatars = currentAssets.filter((a) => a.type.__typename === 'DataObjectTypeChannelAvatar')
+      if (currentCovers.length && coverPhotoIndex !== undefined) {
+        assetsToRemove = assetsToRemove.concat(currentCovers)
+      }
+      if (currentAvatars.length && avatarPhotoIndex !== undefined) {
+        assetsToRemove = assetsToRemove.concat(currentAvatars)
+      }
+      if (assetsToRemove.length) {
+        this.log(`\nData objects to be removed due to replacement:`)
+        assetsToRemove.forEach((a) => this.log(`- ${a.id} (${a.type.__typename})`))
+        const totalPrize = assetsToRemove.reduce((sum, { deletionPrize }) => sum.add(new BN(deletionPrize)), new BN(0))
+        this.log(`Total deletion prize: ${chalk.cyanBright(formatBalance(totalPrize))}\n`)
+      }
+    }
+
+    return assetsToRemove.map((a) => a.id)
+  }
+
+  async run(): Promise<void> {
     const {
-      flags: { input },
+      flags: { input, context },
       args: { channelId },
     } = this.parse(UpdateChannelCommand)
 
     // Context
-    const currentAccount = await this.getRequiredSelectedAccount()
     const channel = await this.getApi().channelById(channelId)
-    const actor = await this.getChannelOwnerActor(channel)
-    await this.requestAccountDecoding(currentAccount)
+    const [actor, address] = await this.getChannelManagementActor(channel, context)
+    const [memberId] = await this.getRequiredMemberContext(true)
+    const keypair = await this.getDecodedPair(address)
 
     const channelInput = await getInputJson<ChannelInputParameters>(input, ChannelInputSchema)
+    const meta = asValidatedMetadata(ChannelMetadata, channelInput)
 
-    const meta = channelMetadataFromInput(channelInput)
+    if (channelInput.rewardAccount !== undefined && actor.type === 'Collaborator') {
+      this.error("Collaborators are not allowed to update channel's reward account!", { exit: ExitCodes.AccessDenied })
+    }
 
-    const { coverPhotoPath, avatarPhotoPath, rewardAccount } = channelInput
-    const inputPaths = [coverPhotoPath, avatarPhotoPath].filter((p) => p !== undefined) as string[]
-    const inputAssets = await this.prepareInputAssets(inputPaths, input)
-    const assets = inputAssets.map(({ parameters }) => ({ Upload: parameters }))
-    // Set assets indexes in the metadata
-    if (coverPhotoPath) {
-      meta.setCoverPhoto(0)
+    if (channelInput.collaborators !== undefined && actor.type === 'Collaborator') {
+      this.error("Collaborators are not allowed to update channel's collaborators!", { exit: ExitCodes.AccessDenied })
     }
-    if (avatarPhotoPath) {
-      meta.setAvatarPhoto(coverPhotoPath ? 1 : 0)
+
+    if (channelInput.collaborators) {
+      await this.validateCollaborators(channelInput.collaborators)
     }
 
+    const { coverPhotoPath, avatarPhotoPath, rewardAccount } = channelInput
+    const [resolvedAssets, assetIndices] = await this.resolveAndValidateAssets(
+      { coverPhotoPath, avatarPhotoPath },
+      input
+    )
+    // Set assets indices in the metadata
+    // "undefined" values will be omitted when the metadata is encoded. It's not possible to "unset" an asset this way.
+    meta.coverPhoto = assetIndices.coverPhotoPath
+    meta.avatarPhoto = assetIndices.avatarPhotoPath
+
+    // Preare and send the extrinsic
+    const assetsToUpload = await this.prepareAssetsForExtrinsic(resolvedAssets)
+    const assetsToRemove = await this.getAssetsToRemove(
+      channelId,
+      assetIndices.coverPhotoPath,
+      assetIndices.avatarPhotoPath
+    )
+
+    const collaborators = createType('Option<BTreeSet<MemberId>>', channelInput.collaborators)
     const channelUpdateParameters: CreateInterface<ChannelUpdateParameters> = {
-      assets,
-      new_meta: metadataToBytes(meta),
+      assets_to_upload: assetsToUpload,
+      assets_to_remove: createType('BTreeSet<DataObjectId>', assetsToRemove),
+      new_meta: metadataToBytes(ChannelMetadata, meta),
       reward_account: this.parseRewardAccountInput(rewardAccount),
+      collaborators,
     }
 
-    this.jsonPrettyPrint(JSON.stringify({ assets, metadata: meta.toObject(), rewardAccount }))
+    this.jsonPrettyPrint(
+      JSON.stringify({ assetsToUpload: assetsToUpload?.toJSON(), assetsToRemove, metadata: meta, rewardAccount })
+    )
 
     await this.requireConfirmation('Do you confirm the provided input?', true)
 
-    await this.sendAndFollowNamedTx(currentAccount, 'content', 'updateChannel', [
+    const result = await this.sendAndFollowNamedTx(keypair, 'content', 'updateChannel', [
       actor,
       channelId,
       channelUpdateParameters,
     ])
-
-    await this.uploadAssets(inputAssets, input)
+    const dataObjectsUploadedEvent = this.findEvent(result, 'storage', 'DataObjectsUploaded')
+    if (dataObjectsUploadedEvent) {
+      const [objectIds] = dataObjectsUploadedEvent.data
+      await this.uploadAssets(
+        keypair,
+        memberId.toNumber(),
+        `dynamic:channel:${channelId.toString()}`,
+        objectIds.map((id, index) => ({ dataObjectId: id, path: resolvedAssets[index].path })),
+        input
+      )
+    }
   }
 }

+ 8 - 11
cli/src/commands/content/updateChannelCategory.ts

@@ -1,11 +1,12 @@
 import ContentDirectoryCommandBase from '../../base/ContentDirectoryCommandBase'
 import { getInputJson } from '../../helpers/InputOutput'
 import { ChannelCategoryInputParameters } from '../../Types'
-import { channelCategoryMetadataFromInput, metadataToBytes } from '../../helpers/serialization'
+import { asValidatedMetadata, metadataToBytes } from '../../helpers/serialization'
 import { CreateInterface } from '@joystream/types'
 import { ChannelCategoryUpdateParameters } from '@joystream/types/content'
 import { flags } from '@oclif/command'
-import { ChannelCategoryInputSchema } from '../../json-schemas/ContentDirectory'
+import { ChannelCategoryInputSchema } from '../../schemas/ContentDirectory'
+import { ChannelCategoryMetadata } from '@joystream/metadata-protobuf'
 export default class UpdateChannelCategoryCommand extends ContentDirectoryCommandBase {
   static description = 'Update channel category inside content directory.'
   static flags = {
@@ -25,29 +26,25 @@ export default class UpdateChannelCategoryCommand extends ContentDirectoryComman
     },
   ]
 
-  async run() {
+  async run(): Promise<void> {
     const { context, input } = this.parse(UpdateChannelCategoryCommand).flags
 
     const { channelCategoryId } = this.parse(UpdateChannelCategoryCommand).args
 
-    const currentAccount = await this.getRequiredSelectedAccount()
-    await this.requestAccountDecoding(currentAccount)
-
-    const actor = context ? await this.getActor(context) : await this.getCategoryManagementActor()
+    const [actor, address] = context ? await this.getContentActor(context) : await this.getCategoryManagementActor()
 
     const channelCategoryInput = await getInputJson<ChannelCategoryInputParameters>(input, ChannelCategoryInputSchema)
-
-    const meta = channelCategoryMetadataFromInput(channelCategoryInput)
+    const meta = asValidatedMetadata(ChannelCategoryMetadata, channelCategoryInput)
 
     const channelCategoryUpdateParameters: CreateInterface<ChannelCategoryUpdateParameters> = {
-      new_meta: metadataToBytes(meta),
+      new_meta: metadataToBytes(ChannelCategoryMetadata, meta),
     }
 
     this.jsonPrettyPrint(JSON.stringify(channelCategoryInput))
 
     await this.requireConfirmation('Do you confirm the provided input?', true)
 
-    await this.sendAndFollowNamedTx(currentAccount, 'content', 'updateChannelCategory', [
+    await this.sendAndFollowNamedTx(await this.getDecodedPair(address), 'content', 'updateChannelCategory', [
       actor,
       channelCategoryId,
       channelCategoryUpdateParameters,

+ 6 - 8
cli/src/commands/content/updateChannelCensorshipStatus.ts

@@ -26,18 +26,14 @@ export default class UpdateChannelCensorshipStatusCommand extends ContentDirecto
     },
   ]
 
-  async run() {
+  async run(): Promise<void> {
     let {
       args: { id, status },
       flags: { rationale },
     } = this.parse(UpdateChannelCensorshipStatusCommand)
 
-    const currentAccount = await this.getRequiredSelectedAccount()
-
     const channel = await this.getApi().channelById(id)
-    const actor = await this.getCurationActorByChannel(channel)
-
-    await this.requestAccountDecoding(currentAccount)
+    const [actor, address] = await this.getCurationActorByChannel(channel)
 
     if (status === undefined) {
       status = await this.simplePrompt({
@@ -58,10 +54,12 @@ export default class UpdateChannelCensorshipStatusCommand extends ContentDirecto
     }
 
     if (rationale === undefined) {
-      rationale = await this.simplePrompt({ message: 'Please provide the rationale for updating the status' })
+      rationale = (await this.simplePrompt({
+        message: 'Please provide the rationale for updating the status',
+      })) as string
     }
 
-    await this.sendAndFollowNamedTx(currentAccount, 'content', 'updateChannelCensorshipStatus', [
+    await this.sendAndFollowNamedTx(await this.getDecodedPair(address), 'content', 'updateChannelCensorshipStatus', [
       actor,
       id,
       status,

+ 77 - 25
cli/src/commands/content/updateVideo.ts

@@ -1,11 +1,17 @@
 import { getInputJson } from '../../helpers/InputOutput'
 import { VideoInputParameters } from '../../Types'
-import { metadataToBytes, videoMetadataFromInput } from '../../helpers/serialization'
+import { asValidatedMetadata, metadataToBytes } from '../../helpers/serialization'
 import UploadCommandBase from '../../base/UploadCommandBase'
 import { flags } from '@oclif/command'
-import { CreateInterface } from '@joystream/types'
+import { CreateInterface, createType } from '@joystream/types'
 import { VideoUpdateParameters } from '@joystream/types/content'
-import { VideoInputSchema } from '../../json-schemas/ContentDirectory'
+import { VideoInputSchema } from '../../schemas/ContentDirectory'
+import { VideoMetadata } from '@joystream/metadata-protobuf'
+import { DataObjectInfoFragment } from '../../graphql/generated/queries'
+import BN from 'bn.js'
+import { formatBalance } from '@polkadot/util'
+import chalk from 'chalk'
+import ContentDirectoryCommandBase from '../../base/ContentDirectoryCommandBase'
 
 export default class UpdateVideoCommand extends UploadCommandBase {
   static description = 'Update video under specific id.'
@@ -15,6 +21,7 @@ export default class UpdateVideoCommand extends UploadCommandBase {
       required: true,
       description: `Path to JSON file to use as input`,
     }),
+    context: ContentDirectoryCommandBase.channelManagementContextFlag,
   }
 
   static args = [
@@ -25,45 +32,90 @@ export default class UpdateVideoCommand extends UploadCommandBase {
     },
   ]
 
-  async run() {
+  async getAssetsToRemove(
+    videoId: number,
+    videoIndex: number | undefined,
+    thumbnailIndex: number | undefined
+  ): Promise<string[]> {
+    let assetsToRemove: DataObjectInfoFragment[] = []
+    if (videoIndex !== undefined || thumbnailIndex !== undefined) {
+      const currentAssets = await this.getQNApi().dataObjectsByVideoId(videoId.toString())
+      const currentThumbs = currentAssets.filter((a) => a.type.__typename === 'DataObjectTypeVideoThumbnail')
+      const currentMedias = currentAssets.filter((a) => a.type.__typename === 'DataObjectTypeVideoMedia')
+      if (currentThumbs.length && thumbnailIndex !== undefined) {
+        assetsToRemove = assetsToRemove.concat(currentThumbs)
+      }
+      if (currentMedias.length && videoIndex !== undefined) {
+        assetsToRemove = assetsToRemove.concat(currentMedias)
+      }
+      if (assetsToRemove.length) {
+        this.log(`\nData objects to be removed due to replacement:`)
+        assetsToRemove.forEach((a) => this.log(`- ${a.id} (${a.type.__typename})`))
+        const totalPrize = assetsToRemove.reduce((sum, { deletionPrize }) => sum.add(new BN(deletionPrize)), new BN(0))
+        this.log(`Total deletion prize: ${chalk.cyanBright(formatBalance(totalPrize))}\n`)
+      }
+    }
+
+    return assetsToRemove.map((a) => a.id)
+  }
+
+  async run(): Promise<void> {
     const {
-      flags: { input },
+      flags: { input, context },
       args: { videoId },
     } = this.parse(UpdateVideoCommand)
 
     // Context
-    const currentAccount = await this.getRequiredSelectedAccount()
     const video = await this.getApi().videoById(videoId)
     const channel = await this.getApi().channelById(video.in_channel.toNumber())
-    const actor = await this.getChannelOwnerActor(channel)
-    await this.requestAccountDecoding(currentAccount)
+    const [actor, address] = await this.getChannelManagementActor(channel, context)
+    const [memberId] = await this.getRequiredMemberContext(true)
+    const keypair = await this.getDecodedPair(address)
 
     const videoInput = await getInputJson<VideoInputParameters>(input, VideoInputSchema)
+    const meta = asValidatedMetadata(VideoMetadata, videoInput)
 
-    const meta = videoMetadataFromInput(videoInput)
     const { videoPath, thumbnailPhotoPath } = videoInput
-    const inputPaths = [videoPath, thumbnailPhotoPath].filter((p) => p !== undefined) as string[]
-    const inputAssets = await this.prepareInputAssets(inputPaths, input)
-    const assets = inputAssets.map(({ parameters }) => ({ Upload: parameters }))
-    // Set assets indexes in the metadata
-    if (videoPath) {
-      meta.setVideo(0)
-    }
-    if (thumbnailPhotoPath) {
-      meta.setThumbnailPhoto(videoPath ? 1 : 0)
-    }
+    const [resolvedAssets, assetIndices] = await this.resolveAndValidateAssets({ videoPath, thumbnailPhotoPath }, input)
+    // Set assets indices in the metadata
+    // "undefined" values will be omitted when the metadata is encoded. It's not possible to "unset" an asset this way.
+    meta.video = assetIndices.videoPath
+    meta.thumbnailPhoto = assetIndices.thumbnailPhotoPath
 
+    // Preare and send the extrinsic
+    const assetsToUpload = await this.prepareAssetsForExtrinsic(resolvedAssets)
+    const assetsToRemove = await this.getAssetsToRemove(
+      videoId,
+      assetIndices.videoPath,
+      assetIndices.thumbnailPhotoPath
+    )
     const videoUpdateParameters: CreateInterface<VideoUpdateParameters> = {
-      assets,
-      new_meta: metadataToBytes(meta),
+      assets_to_upload: assetsToUpload,
+      new_meta: metadataToBytes(VideoMetadata, meta),
+      assets_to_remove: createType('BTreeSet<DataObjectId>', assetsToRemove),
     }
 
-    this.jsonPrettyPrint(JSON.stringify({ assets, newMetadata: meta.toObject() }))
+    this.jsonPrettyPrint(
+      JSON.stringify({ assetsToUpload: assetsToUpload?.toJSON(), newMetadata: meta, assetsToRemove })
+    )
 
     await this.requireConfirmation('Do you confirm the provided input?', true)
 
-    await this.sendAndFollowNamedTx(currentAccount, 'content', 'updateVideo', [actor, videoId, videoUpdateParameters])
-
-    await this.uploadAssets(inputAssets, input)
+    const result = await this.sendAndFollowNamedTx(keypair, 'content', 'updateVideo', [
+      actor,
+      videoId,
+      videoUpdateParameters,
+    ])
+    const dataObjectsUploadedEvent = this.findEvent(result, 'storage', 'DataObjectsUploaded')
+    if (dataObjectsUploadedEvent) {
+      const [objectIds] = dataObjectsUploadedEvent.data
+      await this.uploadAssets(
+        keypair,
+        memberId.toNumber(),
+        `dynamic:channel:${video.in_channel.toString()}`,
+        objectIds.map((id, index) => ({ dataObjectId: id, path: resolvedAssets[index].path })),
+        input
+      )
+    }
   }
 }

+ 8 - 11
cli/src/commands/content/updateVideoCategory.ts

@@ -1,11 +1,12 @@
 import ContentDirectoryCommandBase from '../../base/ContentDirectoryCommandBase'
 import { getInputJson } from '../../helpers/InputOutput'
 import { VideoCategoryInputParameters } from '../../Types'
-import { metadataToBytes, videoCategoryMetadataFromInput } from '../../helpers/serialization'
+import { asValidatedMetadata, metadataToBytes } from '../../helpers/serialization'
 import { flags } from '@oclif/command'
 import { CreateInterface } from '@joystream/types'
 import { VideoCategoryUpdateParameters } from '@joystream/types/content'
-import { VideoCategoryInputSchema } from '../../json-schemas/ContentDirectory'
+import { VideoCategoryInputSchema } from '../../schemas/ContentDirectory'
+import { VideoCategoryMetadata } from '@joystream/metadata-protobuf'
 
 export default class UpdateVideoCategoryCommand extends ContentDirectoryCommandBase {
   static description = 'Update video category inside content directory.'
@@ -26,29 +27,25 @@ export default class UpdateVideoCategoryCommand extends ContentDirectoryCommandB
     },
   ]
 
-  async run() {
+  async run(): Promise<void> {
     const { context, input } = this.parse(UpdateVideoCategoryCommand).flags
 
     const { videoCategoryId } = this.parse(UpdateVideoCategoryCommand).args
 
-    const currentAccount = await this.getRequiredSelectedAccount()
-    await this.requestAccountDecoding(currentAccount)
-
-    const actor = context ? await this.getActor(context) : await this.getCategoryManagementActor()
+    const [actor, address] = context ? await this.getContentActor(context) : await this.getCategoryManagementActor()
 
     const videoCategoryInput = await getInputJson<VideoCategoryInputParameters>(input, VideoCategoryInputSchema)
-
-    const meta = videoCategoryMetadataFromInput(videoCategoryInput)
+    const meta = asValidatedMetadata(VideoCategoryMetadata, videoCategoryInput)
 
     const videoCategoryUpdateParameters: CreateInterface<VideoCategoryUpdateParameters> = {
-      new_meta: metadataToBytes(meta),
+      new_meta: metadataToBytes(VideoCategoryMetadata, meta),
     }
 
     this.jsonPrettyPrint(JSON.stringify(videoCategoryInput))
 
     await this.requireConfirmation('Do you confirm the provided input?', true)
 
-    await this.sendAndFollowNamedTx(currentAccount, 'content', 'updateVideoCategory', [
+    await this.sendAndFollowNamedTx(await this.getDecodedPair(address), 'content', 'updateVideoCategory', [
       actor,
       videoCategoryId,
       videoCategoryUpdateParameters,

+ 6 - 8
cli/src/commands/content/updateVideoCensorshipStatus.ts

@@ -26,19 +26,15 @@ export default class UpdateVideoCensorshipStatusCommand extends ContentDirectory
     },
   ]
 
-  async run() {
+  async run(): Promise<void> {
     let {
       args: { id, status },
       flags: { rationale },
     } = this.parse(UpdateVideoCensorshipStatusCommand)
 
-    const currentAccount = await this.getRequiredSelectedAccount()
-
     const video = await this.getApi().videoById(id)
     const channel = await this.getApi().channelById(video.in_channel.toNumber())
-    const actor = await this.getCurationActorByChannel(channel)
-
-    await this.requestAccountDecoding(currentAccount)
+    const [actor, address] = await this.getCurationActorByChannel(channel)
 
     if (status === undefined) {
       status = await this.simplePrompt({
@@ -59,10 +55,12 @@ export default class UpdateVideoCensorshipStatusCommand extends ContentDirectory
     }
 
     if (rationale === undefined) {
-      rationale = await this.simplePrompt({ message: 'Please provide the rationale for updating the status' })
+      rationale = (await this.simplePrompt({
+        message: 'Please provide the rationale for updating the status',
+      })) as string
     }
 
-    await this.sendAndFollowNamedTx(currentAccount, 'content', 'updateVideoCensorshipStatus', [
+    await this.sendAndFollowNamedTx(await this.getDecodedPair(address), 'content', 'updateVideoCensorshipStatus', [
       actor,
       id,
       status,

+ 2 - 2
cli/src/commands/content/video.ts

@@ -11,14 +11,14 @@ export default class VideoCommand extends ContentDirectoryCommandBase {
     },
   ]
 
-  async run() {
+  async run(): Promise<void> {
     const { videoId } = this.parse(VideoCommand).args
     const aVideo = await this.getApi().videoById(videoId)
     if (aVideo) {
       displayCollapsedRow({
         'ID': videoId.toString(),
         'InChannel': aVideo.in_channel.toString(),
-        'InSeries': aVideo.in_series.toString(),
+        'InSeries': aVideo.in_series.unwrapOr('NONE').toString(),
         'IsCensored': aVideo.is_censored.toString(),
       })
     } else {

+ 4 - 6
cli/src/commands/content/videos.ts

@@ -13,14 +13,12 @@ export default class VideosCommand extends ContentDirectoryCommandBase {
     },
   ]
 
-  async run() {
+  async run(): Promise<void> {
     const { channelId } = this.parse(VideosCommand).args
 
-    let videos: [VideoId, Video][]
+    let videos: [VideoId, Video][] = await this.getApi().availableVideos()
     if (channelId) {
-      videos = await this.getApi().videosByChannelId(channelId)
-    } else {
-      videos = await this.getApi().availableVideos()
+      videos = videos.filter(([, v]) => v.in_channel.eqn(parseInt(channelId)))
     }
 
     if (videos.length > 0) {
@@ -28,7 +26,7 @@ export default class VideosCommand extends ContentDirectoryCommandBase {
         videos.map(([id, v]) => ({
           'ID': id.toString(),
           'InChannel': v.in_channel.toString(),
-          'InSeries': v.in_series.toString(),
+          'InSeries': v.in_series.unwrapOr('NONE').toString(),
           'IsCensored': v.is_censored.toString(),
         })),
         3

+ 1 - 1
cli/src/commands/council/info.ts

@@ -1,7 +1,7 @@
 import { ElectionStage } from '@joystream/types/council'
 import { formatNumber, formatBalance } from '@polkadot/util'
 import { BlockNumber } from '@polkadot/types/interfaces'
-import { CouncilInfoObj, NameValueObj } from '../../Types'
+import { CouncilInfo as CouncilInfoObj, NameValueObj } from '../../Types'
 import { displayHeader, displayNameValueTable } from '../../helpers/display'
 import ApiCommandBase from '../../base/ApiCommandBase'
 

+ 24 - 20
cli/src/commands/working-groups/createOpening.ts

@@ -6,14 +6,17 @@ import HRTSchema from '@joystream/types/hiring/schemas/role.schema.json'
 import { GenericJoyStreamRoleSchema as HRTJson } from '@joystream/types/hiring/schemas/role.schema.typings'
 import { JsonSchemaPrompter } from '../../helpers/JsonSchemaPrompt'
 import { JSONSchema } from '@apidevtools/json-schema-ref-parser'
-import WGOpeningSchema from '../../json-schemas/WorkingGroupOpening.schema.json'
-import { WorkingGroupOpening as WGOpeningJson } from '../../json-schemas/typings/WorkingGroupOpening.schema'
+import WGOpeningSchema from '../../schemas/json/WorkingGroupOpening.schema.json'
+import { WorkingGroupOpening as WGOpeningJson } from '../../schemas/typings/WorkingGroupOpening.schema'
 import _ from 'lodash'
 import { IOFlags, getInputJson, ensureOutputFileIsWriteable, saveOutputJsonToFile } from '../../helpers/InputOutput'
 import Ajv from 'ajv'
 import ExitCodes from '../../ExitCodes'
 import { flags } from '@oclif/command'
-import { createType } from '@joystream/types'
+import { CLIError } from '@oclif/errors'
+import { createTypeFromConstructor } from '@joystream/types'
+import { OpeningPolicyCommitment, OpeningType } from '@joystream/types/working-group'
+import { ActivateOpeningAt } from '@joystream/types/hiring'
 
 export default class WorkingGroupsCreateOpening extends WorkingGroupsCommandBase {
   static description = 'Create working group opening (requires lead access)'
@@ -76,10 +79,13 @@ export default class WorkingGroupsCreateOpening extends WorkingGroupsCommandBase
     }
   }
 
-  createTxParams(wgOpeningJson: WGOpeningJson, hrtJson: HRTJson) {
+  createTxParams(
+    wgOpeningJson: WGOpeningJson,
+    hrtJson: HRTJson
+  ): [ActivateOpeningAt, OpeningPolicyCommitment, string, OpeningType] {
     return [
-      wgOpeningJson.activateAt,
-      createType('OpeningPolicyCommitment', {
+      createTypeFromConstructor(ActivateOpeningAt, wgOpeningJson.activateAt),
+      createTypeFromConstructor(OpeningPolicyCommitment, {
         max_review_period_length: wgOpeningJson.maxReviewPeriodLength,
         application_rationing_policy: wgOpeningJson.maxActiveApplicants
           ? { max_active_applicants: wgOpeningJson.maxActiveApplicants }
@@ -100,7 +106,7 @@ export default class WorkingGroupsCreateOpening extends WorkingGroupsCommandBase
         exit_role_stake_unstaking_period: wgOpeningJson.leaveRoleUnstakingPeriod,
       }),
       JSON.stringify(hrtJson),
-      createType('OpeningType', 'Worker'),
+      createTypeFromConstructor(OpeningType, 'Worker'),
     ]
   }
 
@@ -155,11 +161,9 @@ export default class WorkingGroupsCreateOpening extends WorkingGroupsCommandBase
     return [openingJson, hrtJson]
   }
 
-  async run() {
-    const account = await this.getRequiredSelectedAccount()
+  async run(): Promise<void> {
     // lead-only gate
-    const lead = await this.getRequiredLead()
-    await this.requestAccountDecoding(account) // Prompt for password
+    const lead = await this.getRequiredLeadContext()
 
     const {
       flags: { input, output, edit, dryRun },
@@ -210,17 +214,17 @@ export default class WorkingGroupsCreateOpening extends WorkingGroupsCommandBase
 
       // Send the tx
       this.log(chalk.magentaBright('Sending the extrinsic...'))
-      const txSuccess = await this.sendAndFollowTx(
-        account,
-        this.getOriginalApi().tx[apiModuleByGroup[this.group]].addOpening(...txParams),
-        true // warnOnly
-      )
-
-      // Display a success message on success or ask to try again on error
-      if (txSuccess) {
+      try {
+        await this.sendAndFollowTx(
+          await this.getDecodedPair(lead.roleAccount),
+          this.getOriginalApi().tx[apiModuleByGroup[this.group]].addOpening(...txParams)
+        )
         this.log(chalk.green('Opening successfully created!'))
         tryAgain = false
-      } else {
+      } catch (e) {
+        if (e instanceof CLIError) {
+          this.warn(e.message)
+        }
         tryAgain = await this.simplePrompt({ type: 'confirm', message: 'Try again with remembered input?' })
       }
     } while (tryAgain)

+ 8 - 6
cli/src/commands/working-groups/decreaseWorkerStake.ts

@@ -23,12 +23,11 @@ export default class WorkingGroupsDecreaseWorkerStake extends WorkingGroupsComma
     ...WorkingGroupsCommandBase.flags,
   }
 
-  async run() {
+  async run(): Promise<void> {
     const { args } = this.parse(WorkingGroupsDecreaseWorkerStake)
 
-    const account = await this.getRequiredSelectedAccount()
     // Lead-only gate
-    await this.getRequiredLead()
+    const lead = await this.getRequiredLeadContext()
 
     const workerId = parseInt(args.workerId)
     const groupMember = await this.getWorkerWithStakeForLeadAction(workerId)
@@ -40,9 +39,12 @@ export default class WorkingGroupsDecreaseWorkerStake extends WorkingGroupsComma
       createParamOptions('amount', undefined, balanceValidator)
     )) as Balance
 
-    await this.requestAccountDecoding(account)
-
-    await this.sendAndFollowNamedTx(account, apiModuleByGroup[this.group], 'decreaseStake', [workerId, balance])
+    await this.sendAndFollowNamedTx(
+      await this.getDecodedPair(lead.roleAccount),
+      apiModuleByGroup[this.group],
+      'decreaseStake',
+      [workerId, balance]
+    )
 
     this.log(
       chalk.green(

+ 10 - 12
cli/src/commands/working-groups/evictWorker.ts

@@ -3,6 +3,7 @@ import { apiModuleByGroup } from '../../Api'
 import { formatBalance } from '@polkadot/util'
 import chalk from 'chalk'
 import { createParamOptions } from '../../helpers/promptOptions'
+import { Bytes } from '@polkadot/types'
 
 export default class WorkingGroupsEvictWorker extends WorkingGroupsCommandBase {
   static description = 'Evicts given worker. Requires lead access.'
@@ -18,19 +19,17 @@ export default class WorkingGroupsEvictWorker extends WorkingGroupsCommandBase {
     ...WorkingGroupsCommandBase.flags,
   }
 
-  async run() {
+  async run(): Promise<void> {
     const { args } = this.parse(WorkingGroupsEvictWorker)
 
-    const account = await this.getRequiredSelectedAccount()
-    // Lead-only gate
-    await this.getRequiredLead()
+    const lead = await this.getRequiredLeadContext()
 
     const workerId = parseInt(args.workerId)
     // This will also make sure the worker is valid
     const groupMember = await this.getWorkerForLeadAction(workerId)
 
     // TODO: Terminate worker text limits? (minMaxStr)
-    const rationale = await this.promptForParam('Bytes', createParamOptions('rationale'))
+    const rationale = (await this.promptForParam('Bytes', createParamOptions('rationale'))) as Bytes
     const shouldSlash = groupMember.stake
       ? await this.simplePrompt({
           message: `Should the worker stake (${formatBalance(groupMember.stake)}) be slashed?`,
@@ -39,13 +38,12 @@ export default class WorkingGroupsEvictWorker extends WorkingGroupsCommandBase {
         })
       : false
 
-    await this.requestAccountDecoding(account)
-
-    await this.sendAndFollowNamedTx(account, apiModuleByGroup[this.group], 'terminateRole', [
-      workerId,
-      rationale,
-      shouldSlash,
-    ])
+    await this.sendAndFollowNamedTx(
+      await this.getDecodedPair(lead.roleAccount),
+      apiModuleByGroup[this.group],
+      'terminateRole',
+      [workerId, rationale, shouldSlash]
+    )
 
     this.log(chalk.green(`Worker ${chalk.magentaBright(workerId)} has been evicted!`))
     if (shouldSlash) {

+ 9 - 11
cli/src/commands/working-groups/fillOpening.ts

@@ -3,7 +3,7 @@ import { OpeningStatus } from '../../Types'
 import { apiModuleByGroup } from '../../Api'
 import chalk from 'chalk'
 import { createParamOptions } from '../../helpers/promptOptions'
-
+import { createType } from '@joystream/types'
 export default class WorkingGroupsFillOpening extends WorkingGroupsCommandBase {
   static description = "Allows filling working group opening that's currently in review. Requires lead access."
   static args = [
@@ -18,12 +18,11 @@ export default class WorkingGroupsFillOpening extends WorkingGroupsCommandBase {
     ...WorkingGroupsCommandBase.flags,
   }
 
-  async run() {
+  async run(): Promise<void> {
     const { args } = this.parse(WorkingGroupsFillOpening)
 
-    const account = await this.getRequiredSelectedAccount()
     // Lead-only gate
-    await this.getRequiredLead()
+    const lead = await this.getRequiredLeadContext()
 
     const openingId = parseInt(args.wgOpeningId)
     const opening = await this.getOpeningForLeadAction(openingId, OpeningStatus.InReview)
@@ -31,13 +30,12 @@ export default class WorkingGroupsFillOpening extends WorkingGroupsCommandBase {
     const applicationIds = await this.promptForApplicationsToAccept(opening)
     const rewardPolicyOpt = await this.promptForParam(`Option<RewardPolicy>`, createParamOptions('RewardPolicy'))
 
-    await this.requestAccountDecoding(account)
-
-    await this.sendAndFollowNamedTx(account, apiModuleByGroup[this.group], 'fillOpening', [
-      openingId,
-      applicationIds,
-      rewardPolicyOpt,
-    ])
+    await this.sendAndFollowNamedTx(
+      await this.getDecodedPair(lead.roleAccount),
+      apiModuleByGroup[this.group],
+      'fillOpening',
+      [openingId, createType('BTreeSet<ApplicationId>', applicationIds), rewardPolicyOpt]
+    )
 
     this.log(chalk.green(`Opening ${chalk.magentaBright(openingId)} successfully filled!`))
     this.log(

+ 8 - 6
cli/src/commands/working-groups/increaseStake.ts

@@ -13,10 +13,9 @@ export default class WorkingGroupsIncreaseStake extends WorkingGroupsCommandBase
     ...WorkingGroupsCommandBase.flags,
   }
 
-  async run() {
-    const account = await this.getRequiredSelectedAccount()
+  async run(): Promise<void> {
     // Worker-only gate
-    const worker = await this.getRequiredWorker()
+    const worker = await this.getRequiredWorkerContext()
 
     if (!worker.stake) {
       this.error('Cannot increase stake. No associated role stake profile found!', { exit: ExitCodes.InvalidInput })
@@ -28,9 +27,12 @@ export default class WorkingGroupsIncreaseStake extends WorkingGroupsCommandBase
       createParamOptions('amount', undefined, positiveInt())
     )) as Balance
 
-    await this.requestAccountDecoding(account)
-
-    await this.sendAndFollowNamedTx(account, apiModuleByGroup[this.group], 'increaseStake', [worker.workerId, balance])
+    await this.sendAndFollowNamedTx(
+      await this.getDecodedPair(worker.roleAccount),
+      apiModuleByGroup[this.group],
+      'increaseStake',
+      [worker.workerId, balance]
+    )
 
     this.log(
       chalk.green(

+ 13 - 7
cli/src/commands/working-groups/leaveRole.ts

@@ -3,6 +3,7 @@ import { apiModuleByGroup } from '../../Api'
 import { minMaxStr } from '../../validators/common'
 import chalk from 'chalk'
 import { createParamOptions } from '../../helpers/promptOptions'
+import { Bytes } from '@polkadot/types'
 
 export default class WorkingGroupsLeaveRole extends WorkingGroupsCommandBase {
   static description = 'Leave the worker or lead role associated with currently selected account.'
@@ -10,18 +11,23 @@ export default class WorkingGroupsLeaveRole extends WorkingGroupsCommandBase {
     ...WorkingGroupsCommandBase.flags,
   }
 
-  async run() {
-    const account = await this.getRequiredSelectedAccount()
+  async run(): Promise<void> {
     // Worker-only gate
-    const worker = await this.getRequiredWorker()
+    const worker = await this.getRequiredWorkerContext()
 
     const constraint = await this.getApi().workerExitRationaleConstraint(this.group)
     const rationaleValidator = minMaxStr(constraint.min.toNumber(), constraint.max.toNumber())
-    const rationale = await this.promptForParam('Bytes', createParamOptions('rationale', undefined, rationaleValidator))
+    const rationale = (await this.promptForParam(
+      'Bytes',
+      createParamOptions('rationale', undefined, rationaleValidator)
+    )) as Bytes
 
-    await this.requestAccountDecoding(account)
-
-    await this.sendAndFollowNamedTx(account, apiModuleByGroup[this.group], 'leaveRole', [worker.workerId, rationale])
+    await this.sendAndFollowNamedTx(
+      await this.getDecodedPair(worker.roleAccount),
+      apiModuleByGroup[this.group],
+      'leaveRole',
+      [worker.workerId, rationale]
+    )
 
     this.log(chalk.green(`Successfully left the role! (worker id: ${chalk.magentaBright(worker.workerId.toNumber())})`))
   }

+ 8 - 6
cli/src/commands/working-groups/slashWorker.ts

@@ -20,12 +20,11 @@ export default class WorkingGroupsSlashWorker extends WorkingGroupsCommandBase {
     ...WorkingGroupsCommandBase.flags,
   }
 
-  async run() {
+  async run(): Promise<void> {
     const { args } = this.parse(WorkingGroupsSlashWorker)
 
-    const account = await this.getRequiredSelectedAccount()
     // Lead-only gate
-    await this.getRequiredLead()
+    const lead = await this.getRequiredLeadContext()
 
     const workerId = parseInt(args.workerId)
     const groupMember = await this.getWorkerWithStakeForLeadAction(workerId)
@@ -37,9 +36,12 @@ export default class WorkingGroupsSlashWorker extends WorkingGroupsCommandBase {
       createParamOptions('amount', undefined, balanceValidator)
     )) as Balance
 
-    await this.requestAccountDecoding(account)
-
-    await this.sendAndFollowNamedTx(account, apiModuleByGroup[this.group], 'slashStake', [workerId, balance])
+    await this.sendAndFollowNamedTx(
+      await this.getDecodedPair(lead.roleAccount),
+      apiModuleByGroup[this.group],
+      'slashStake',
+      [workerId, balance]
+    )
 
     this.log(
       chalk.green(

+ 8 - 6
cli/src/commands/working-groups/startAcceptingApplications.ts

@@ -17,19 +17,21 @@ export default class WorkingGroupsStartAcceptingApplications extends WorkingGrou
     ...WorkingGroupsCommandBase.flags,
   }
 
-  async run() {
+  async run(): Promise<void> {
     const { args } = this.parse(WorkingGroupsStartAcceptingApplications)
 
-    const account = await this.getRequiredSelectedAccount()
     // Lead-only gate
-    await this.getRequiredLead()
+    const lead = await this.getRequiredLeadContext()
 
     const openingId = parseInt(args.wgOpeningId)
     await this.validateOpeningForLeadAction(openingId, OpeningStatus.WaitingToBegin)
 
-    await this.requestAccountDecoding(account)
-
-    await this.sendAndFollowNamedTx(account, apiModuleByGroup[this.group], 'acceptApplications', [openingId])
+    await this.sendAndFollowNamedTx(
+      await this.getDecodedPair(lead.roleAccount),
+      apiModuleByGroup[this.group],
+      'acceptApplications',
+      [openingId]
+    )
 
     this.log(
       chalk.green(

+ 8 - 6
cli/src/commands/working-groups/startReviewPeriod.ts

@@ -17,19 +17,21 @@ export default class WorkingGroupsStartReviewPeriod extends WorkingGroupsCommand
     ...WorkingGroupsCommandBase.flags,
   }
 
-  async run() {
+  async run(): Promise<void> {
     const { args } = this.parse(WorkingGroupsStartReviewPeriod)
 
-    const account = await this.getRequiredSelectedAccount()
     // Lead-only gate
-    await this.getRequiredLead()
+    const lead = await this.getRequiredLeadContext()
 
     const openingId = parseInt(args.wgOpeningId)
     await this.validateOpeningForLeadAction(openingId, OpeningStatus.AcceptingApplications)
 
-    await this.requestAccountDecoding(account)
-
-    await this.sendAndFollowNamedTx(account, apiModuleByGroup[this.group], 'beginApplicantReview', [openingId])
+    await this.sendAndFollowNamedTx(
+      await this.getDecodedPair(lead.roleAccount),
+      apiModuleByGroup[this.group],
+      'beginApplicantReview',
+      [openingId]
+    )
 
     this.log(
       chalk.green(`Opening ${chalk.magentaBright(openingId)} status changed to: ${chalk.magentaBright('In Review')}`)

+ 8 - 6
cli/src/commands/working-groups/terminateApplication.ts

@@ -17,20 +17,22 @@ export default class WorkingGroupsTerminateApplication extends WorkingGroupsComm
     ...WorkingGroupsCommandBase.flags,
   }
 
-  async run() {
+  async run(): Promise<void> {
     const { args } = this.parse(WorkingGroupsTerminateApplication)
 
-    const account = await this.getRequiredSelectedAccount()
     // Lead-only gate
-    await this.getRequiredLead()
+    const lead = await this.getRequiredLeadContext()
 
     const applicationId = parseInt(args.wgApplicationId)
     // We don't really need the application itself here, so this one is just for validation purposes
     await this.getApplicationForLeadAction(applicationId, ApplicationStageKeys.Active)
 
-    await this.requestAccountDecoding(account)
-
-    await this.sendAndFollowNamedTx(account, apiModuleByGroup[this.group], 'terminateApplication', [applicationId])
+    await this.sendAndFollowNamedTx(
+      await this.getDecodedPair(lead.roleAccount),
+      apiModuleByGroup[this.group],
+      'terminateApplication',
+      [applicationId]
+    )
 
     this.log(chalk.green(`Application ${chalk.magentaBright(applicationId)} has been successfully terminated!`))
   }

+ 16 - 18
cli/src/commands/working-groups/updateRewardAccount.ts

@@ -8,9 +8,9 @@ export default class WorkingGroupsUpdateRewardAccount extends WorkingGroupsComma
   static description = 'Updates the worker/lead reward account (requires current role account to be selected)'
   static args = [
     {
-      name: 'accountAddress',
+      name: 'address',
       required: false,
-      description: 'New reward account address (if omitted, one of the existing CLI accounts can be selected)',
+      description: 'New reward account address (if omitted, can be provided interactivel)',
     },
   ]
 
@@ -18,31 +18,29 @@ export default class WorkingGroupsUpdateRewardAccount extends WorkingGroupsComma
     ...WorkingGroupsCommandBase.flags,
   }
 
-  async run() {
-    const { args } = this.parse(WorkingGroupsUpdateRewardAccount)
+  async run(): Promise<void> {
+    let { address } = this.parse(WorkingGroupsUpdateRewardAccount).args
 
-    const account = await this.getRequiredSelectedAccount()
     // Worker-only gate
-    const worker = await this.getRequiredWorker()
+    const worker = await this.getRequiredWorkerContext()
 
     if (!worker.reward) {
       this.error('There is no reward relationship associated with this role!', { exit: ExitCodes.InvalidInput })
     }
 
-    let newRewardAccount: string = args.accountAddress
-    if (!newRewardAccount) {
-      const accounts = await this.fetchAccounts()
-      newRewardAccount = (await this.promptForAccount(accounts, undefined, 'Choose the new reward account')).address
+    if (!address) {
+      address = await this.promptForAnyAddress('Select new reward account')
+    } else if (validateAddress(address) !== true) {
+      this.error('Invalid address', { exit: ExitCodes.InvalidInput })
     }
-    validateAddress(newRewardAccount)
 
-    await this.requestAccountDecoding(account)
+    await this.sendAndFollowNamedTx(
+      await this.getDecodedPair(worker.roleAccount),
+      apiModuleByGroup[this.group],
+      'updateRewardAccount',
+      [worker.workerId, address]
+    )
 
-    await this.sendAndFollowNamedTx(account, apiModuleByGroup[this.group], 'updateRewardAccount', [
-      worker.workerId,
-      newRewardAccount,
-    ])
-
-    this.log(chalk.green(`Successfully updated the reward account to: ${chalk.magentaBright(newRewardAccount)})`))
+    this.log(chalk.green(`Successfully updated the reward account to: ${chalk.magentaBright(address)})`))
   }
 }

+ 17 - 34
cli/src/commands/working-groups/updateRoleAccount.ts

@@ -2,14 +2,15 @@ import WorkingGroupsCommandBase from '../../base/WorkingGroupsCommandBase'
 import { apiModuleByGroup } from '../../Api'
 import { validateAddress } from '../../helpers/validation'
 import chalk from 'chalk'
+import ExitCodes from '../../ExitCodes'
 
 export default class WorkingGroupsUpdateRoleAccount extends WorkingGroupsCommandBase {
   static description = 'Updates the worker/lead role account. Requires member controller account to be selected'
   static args = [
     {
-      name: 'accountAddress',
+      name: 'address',
       required: false,
-      description: 'New role account address (if omitted, one of the existing CLI accounts can be selected)',
+      description: 'New role account address (if omitted, can be provided interactively)',
     },
   ]
 
@@ -17,42 +18,24 @@ export default class WorkingGroupsUpdateRoleAccount extends WorkingGroupsCommand
     ...WorkingGroupsCommandBase.flags,
   }
 
-  async run() {
-    const { args } = this.parse(WorkingGroupsUpdateRoleAccount)
+  async run(): Promise<void> {
+    let { address } = this.parse(WorkingGroupsUpdateRoleAccount).args
 
-    const account = await this.getRequiredSelectedAccount()
-    const worker = await this.getRequiredWorkerByMemberController()
+    const worker = await this.getRequiredWorkerContext('MemberController')
 
-    const cliAccounts = await this.fetchAccounts()
-    let newRoleAccount: string = args.accountAddress
-    if (!newRoleAccount) {
-      newRoleAccount = (await this.promptForAccount(cliAccounts, undefined, 'Choose the new role account')).address
+    if (!address) {
+      address = await this.promptForAnyAddress('Select new role account')
+    } else if (validateAddress(address) !== true) {
+      this.error('Invalid address', { exit: ExitCodes.InvalidInput })
     }
-    validateAddress(newRoleAccount)
 
-    await this.requestAccountDecoding(account)
+    await this.sendAndFollowNamedTx(
+      await this.getDecodedPair(worker.profile.controller_account),
+      apiModuleByGroup[this.group],
+      'updateRoleAccount',
+      [worker.workerId, address]
+    )
 
-    await this.sendAndFollowNamedTx(account, apiModuleByGroup[this.group], 'updateRoleAccount', [
-      worker.workerId,
-      newRoleAccount,
-    ])
-
-    this.log(chalk.green(`Successfully updated the role account to: ${chalk.magentaBright(newRoleAccount)})`))
-
-    const matchingAccount = cliAccounts.find((account) => account.address === newRoleAccount)
-    if (matchingAccount) {
-      const switchAccount = await this.simplePrompt({
-        type: 'confirm',
-        message: 'Do you want to switch the currenly selected CLI account to the new role account?',
-        default: false,
-      })
-      if (switchAccount) {
-        await this.setSelectedAccount(matchingAccount)
-        this.log(
-          chalk.green('Account switched to: ') +
-            chalk.magentaBright(`${matchingAccount.meta.name} (${matchingAccount.address})`)
-        )
-      }
-    }
+    this.log(chalk.green(`Successfully updated the role account to: ${chalk.magentaBright(address)})`))
   }
 }

+ 8 - 11
cli/src/commands/working-groups/updateRoleStorage.ts

@@ -16,20 +16,17 @@ export default class WorkingGroupsUpdateRoleStorage extends WorkingGroupsCommand
     ...WorkingGroupsCommandBase.flags,
   }
 
-  async run() {
+  async run(): Promise<void> {
     const { storage } = this.parse(WorkingGroupsUpdateRoleStorage).args
 
-    const account = await this.getRequiredSelectedAccount()
+    const worker = await this.getRequiredWorkerContext()
 
-    // Worker-only gate
-    const worker = await this.getRequiredWorker()
-
-    await this.requestAccountDecoding(account)
-
-    await this.sendAndFollowNamedTx(account, apiModuleByGroup[this.group], 'updateRoleStorage', [
-      worker.workerId,
-      storage,
-    ])
+    await this.sendAndFollowNamedTx(
+      await this.getDecodedPair(worker.roleAccount),
+      apiModuleByGroup[this.group],
+      'updateRoleStorage',
+      [worker.workerId, storage]
+    )
 
     this.log(chalk.green(`Successfully updated the associated worker storage to: ${chalk.magentaBright(storage)})`))
   }

+ 12 - 13
cli/src/commands/working-groups/updateWorkerReward.ts

@@ -6,6 +6,7 @@ import { Reward } from '../../Types'
 import { positiveInt } from '../../validators/common'
 import { createParamOptions } from '../../helpers/promptOptions'
 import ExitCodes from '../../ExitCodes'
+import { BalanceOfMint } from '@joystream/types/mint'
 
 export default class WorkingGroupsUpdateWorkerReward extends WorkingGroupsCommandBase {
   static description = "Change given worker's reward (amount only). Requires lead access."
@@ -21,7 +22,7 @@ export default class WorkingGroupsUpdateWorkerReward extends WorkingGroupsComman
     ...WorkingGroupsCommandBase.flags,
   }
 
-  formatReward(reward?: Reward) {
+  formatReward(reward?: Reward): string {
     return reward
       ? formatBalance(reward.value) +
           (reward.interval ? ` / ${reward.interval} block(s)` : '') +
@@ -29,12 +30,10 @@ export default class WorkingGroupsUpdateWorkerReward extends WorkingGroupsComman
       : 'NONE'
   }
 
-  async run() {
+  async run(): Promise<void> {
     const { args } = this.parse(WorkingGroupsUpdateWorkerReward)
 
-    const account = await this.getRequiredSelectedAccount()
-    // Lead-only gate
-    await this.getRequiredLead()
+    const lead = await this.getRequiredLeadContext()
 
     const workerId = parseInt(args.workerId)
     // This will also make sure the worker is valid
@@ -48,17 +47,17 @@ export default class WorkingGroupsUpdateWorkerReward extends WorkingGroupsComman
 
     console.log(chalk.magentaBright(`Current worker reward: ${this.formatReward(reward)}`))
 
-    const newRewardValue = await this.promptForParam(
+    const newRewardValue = (await this.promptForParam(
       'BalanceOfMint',
       createParamOptions('new_amount', undefined, positiveInt())
-    )
-
-    await this.requestAccountDecoding(account)
+    )) as BalanceOfMint
 
-    await this.sendAndFollowNamedTx(account, apiModuleByGroup[this.group], 'updateRewardAmount', [
-      workerId,
-      newRewardValue,
-    ])
+    await this.sendAndFollowNamedTx(
+      await this.getDecodedPair(lead.roleAccount),
+      apiModuleByGroup[this.group],
+      'updateRewardAmount',
+      [workerId, newRewardValue]
+    )
 
     const updatedGroupMember = await this.getApi().groupMember(this.group, workerId)
     this.log(chalk.green(`Worker ${chalk.magentaBright(workerId)} reward has been updated!`))

+ 120 - 0
cli/src/graphql/generated/queries.ts

@@ -0,0 +1,120 @@
+import * as Types from './schema'
+
+import gql from 'graphql-tag'
+export type StorageNodeInfoFragment = {
+  id: string
+  operatorMetadata?: Types.Maybe<{ nodeEndpoint?: Types.Maybe<string> }>
+}
+
+export type GetStorageNodesInfoByBagIdQueryVariables = Types.Exact<{
+  bagId?: Types.Maybe<Types.Scalars['ID']>
+}>
+
+export type GetStorageNodesInfoByBagIdQuery = { storageBuckets: Array<StorageNodeInfoFragment> }
+
+export type DataObjectInfoFragment = {
+  id: string
+  size: any
+  deletionPrize: any
+  type:
+    | { __typename: 'DataObjectTypeChannelAvatar'; channel?: Types.Maybe<{ id: string }> }
+    | { __typename: 'DataObjectTypeChannelCoverPhoto'; channel?: Types.Maybe<{ id: string }> }
+    | { __typename: 'DataObjectTypeVideoMedia'; video?: Types.Maybe<{ id: string }> }
+    | { __typename: 'DataObjectTypeVideoThumbnail'; video?: Types.Maybe<{ id: string }> }
+    | { __typename: 'DataObjectTypeUnknown' }
+}
+
+export type GetDataObjectsByBagIdQueryVariables = Types.Exact<{
+  bagId?: Types.Maybe<Types.Scalars['ID']>
+}>
+
+export type GetDataObjectsByBagIdQuery = { storageDataObjects: Array<DataObjectInfoFragment> }
+
+export type GetDataObjectsByChannelIdQueryVariables = Types.Exact<{
+  channelId?: Types.Maybe<Types.Scalars['ID']>
+}>
+
+export type GetDataObjectsByChannelIdQuery = { storageDataObjects: Array<DataObjectInfoFragment> }
+
+export type GetDataObjectsByVideoIdQueryVariables = Types.Exact<{
+  videoId?: Types.Maybe<Types.Scalars['ID']>
+}>
+
+export type GetDataObjectsByVideoIdQuery = { storageDataObjects: Array<DataObjectInfoFragment> }
+
+export const StorageNodeInfo = gql`
+  fragment StorageNodeInfo on StorageBucket {
+    id
+    operatorMetadata {
+      nodeEndpoint
+    }
+  }
+`
+export const DataObjectInfo = gql`
+  fragment DataObjectInfo on StorageDataObject {
+    id
+    size
+    deletionPrize
+    type {
+      __typename
+      ... on DataObjectTypeVideoMedia {
+        video {
+          id
+        }
+      }
+      ... on DataObjectTypeVideoThumbnail {
+        video {
+          id
+        }
+      }
+      ... on DataObjectTypeChannelAvatar {
+        channel {
+          id
+        }
+      }
+      ... on DataObjectTypeChannelCoverPhoto {
+        channel {
+          id
+        }
+      }
+    }
+  }
+`
+export const GetStorageNodesInfoByBagId = gql`
+  query getStorageNodesInfoByBagId($bagId: ID) {
+    storageBuckets(
+      where: {
+        operatorStatus_json: { isTypeOf_eq: "StorageBucketOperatorStatusActive" }
+        bags_some: { id_eq: $bagId }
+        operatorMetadata: { nodeEndpoint_contains: "http" }
+      }
+    ) {
+      ...StorageNodeInfo
+    }
+  }
+  ${StorageNodeInfo}
+`
+export const GetDataObjectsByBagId = gql`
+  query getDataObjectsByBagId($bagId: ID) {
+    storageDataObjects(where: { storageBag: { id_eq: $bagId } }) {
+      ...DataObjectInfo
+    }
+  }
+  ${DataObjectInfo}
+`
+export const GetDataObjectsByChannelId = gql`
+  query getDataObjectsByChannelId($channelId: ID) {
+    storageDataObjects(where: { type_json: { channelId_eq: $channelId } }) {
+      ...DataObjectInfo
+    }
+  }
+  ${DataObjectInfo}
+`
+export const GetDataObjectsByVideoId = gql`
+  query getDataObjectsByVideoId($videoId: ID) {
+    storageDataObjects(where: { type_json: { videoId_eq: $videoId } }) {
+      ...DataObjectInfo
+    }
+  }
+  ${DataObjectInfo}
+`

+ 3714 - 0
cli/src/graphql/generated/schema.ts

@@ -0,0 +1,3714 @@
+export type Maybe<T> = T | null
+export type Exact<T extends { [key: string]: unknown }> = { [K in keyof T]: T[K] }
+export type MakeOptional<T, K extends keyof T> = Omit<T, K> & { [SubKey in K]?: Maybe<T[SubKey]> }
+export type MakeMaybe<T, K extends keyof T> = Omit<T, K> & { [SubKey in K]: Maybe<T[SubKey]> }
+/** All built-in and custom scalars, mapped to their actual values */
+export type Scalars = {
+  ID: string
+  String: string
+  Boolean: boolean
+  Int: number
+  Float: number
+  /** The javascript `Date` as string. Type represents date and time as the ISO Date string. */
+  DateTime: any
+  /** GraphQL representation of BigInt */
+  BigInt: any
+  /** The `JSONObject` scalar type represents JSON objects as specified by [ECMA-404](http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf). */
+  JSONObject: any
+}
+
+export type BaseGraphQlObject = {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+}
+
+export type BaseModel = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+}
+
+export type BaseModelUuid = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+}
+
+export type BaseWhereInput = {
+  id_eq?: Maybe<Scalars['String']>
+  id_in?: Maybe<Array<Scalars['String']>>
+  createdAt_eq?: Maybe<Scalars['String']>
+  createdAt_lt?: Maybe<Scalars['String']>
+  createdAt_lte?: Maybe<Scalars['String']>
+  createdAt_gt?: Maybe<Scalars['String']>
+  createdAt_gte?: Maybe<Scalars['String']>
+  createdById_eq?: Maybe<Scalars['String']>
+  updatedAt_eq?: Maybe<Scalars['String']>
+  updatedAt_lt?: Maybe<Scalars['String']>
+  updatedAt_lte?: Maybe<Scalars['String']>
+  updatedAt_gt?: Maybe<Scalars['String']>
+  updatedAt_gte?: Maybe<Scalars['String']>
+  updatedById_eq?: Maybe<Scalars['String']>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['String']>
+  deletedAt_lt?: Maybe<Scalars['String']>
+  deletedAt_lte?: Maybe<Scalars['String']>
+  deletedAt_gt?: Maybe<Scalars['String']>
+  deletedAt_gte?: Maybe<Scalars['String']>
+  deletedById_eq?: Maybe<Scalars['String']>
+}
+
+export type Channel = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  ownerMember?: Maybe<Membership>
+  ownerMemberId?: Maybe<Scalars['String']>
+  ownerCuratorGroup?: Maybe<CuratorGroup>
+  ownerCuratorGroupId?: Maybe<Scalars['String']>
+  category?: Maybe<ChannelCategory>
+  categoryId?: Maybe<Scalars['String']>
+  /** Reward account where revenue is sent if set. */
+  rewardAccount?: Maybe<Scalars['String']>
+  /** The title of the Channel */
+  title?: Maybe<Scalars['String']>
+  /** The description of a Channel */
+  description?: Maybe<Scalars['String']>
+  coverPhoto?: Maybe<StorageDataObject>
+  coverPhotoId?: Maybe<Scalars['String']>
+  avatarPhoto?: Maybe<StorageDataObject>
+  avatarPhotoId?: Maybe<Scalars['String']>
+  /** Flag signaling whether a channel is public. */
+  isPublic?: Maybe<Scalars['Boolean']>
+  /** Flag signaling whether a channel is censored. */
+  isCensored: Scalars['Boolean']
+  language?: Maybe<Language>
+  languageId?: Maybe<Scalars['String']>
+  videos: Array<Video>
+  /** Number of the block the channel was created in */
+  createdInBlock: Scalars['Int']
+  collaborators: Array<Membership>
+}
+
+export type ChannelCategoriesByNameFtsOutput = {
+  item: ChannelCategoriesByNameSearchResult
+  rank: Scalars['Float']
+  isTypeOf: Scalars['String']
+  highlight: Scalars['String']
+}
+
+export type ChannelCategoriesByNameSearchResult = ChannelCategory
+
+/** Category of media channel */
+export type ChannelCategory = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** The name of the category */
+  name?: Maybe<Scalars['String']>
+  channels: Array<Channel>
+  createdInBlock: Scalars['Int']
+}
+
+export type ChannelCategoryConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<ChannelCategoryEdge>
+  pageInfo: PageInfo
+}
+
+export type ChannelCategoryCreateInput = {
+  name?: Maybe<Scalars['String']>
+  createdInBlock: Scalars['Float']
+}
+
+export type ChannelCategoryEdge = {
+  node: ChannelCategory
+  cursor: Scalars['String']
+}
+
+export enum ChannelCategoryOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  NameAsc = 'name_ASC',
+  NameDesc = 'name_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+}
+
+export type ChannelCategoryUpdateInput = {
+  name?: Maybe<Scalars['String']>
+  createdInBlock?: Maybe<Scalars['Float']>
+}
+
+export type ChannelCategoryWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  name_eq?: Maybe<Scalars['String']>
+  name_contains?: Maybe<Scalars['String']>
+  name_startsWith?: Maybe<Scalars['String']>
+  name_endsWith?: Maybe<Scalars['String']>
+  name_in?: Maybe<Array<Scalars['String']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  channels_none?: Maybe<ChannelWhereInput>
+  channels_some?: Maybe<ChannelWhereInput>
+  channels_every?: Maybe<ChannelWhereInput>
+  AND?: Maybe<Array<ChannelCategoryWhereInput>>
+  OR?: Maybe<Array<ChannelCategoryWhereInput>>
+}
+
+export type ChannelCategoryWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type ChannelConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<ChannelEdge>
+  pageInfo: PageInfo
+}
+
+export type ChannelCreateInput = {
+  ownerMember?: Maybe<Scalars['ID']>
+  ownerCuratorGroup?: Maybe<Scalars['ID']>
+  category?: Maybe<Scalars['ID']>
+  rewardAccount?: Maybe<Scalars['String']>
+  title?: Maybe<Scalars['String']>
+  description?: Maybe<Scalars['String']>
+  coverPhoto?: Maybe<Scalars['ID']>
+  avatarPhoto?: Maybe<Scalars['ID']>
+  isPublic?: Maybe<Scalars['Boolean']>
+  isCensored: Scalars['Boolean']
+  language?: Maybe<Scalars['ID']>
+  createdInBlock: Scalars['Float']
+}
+
+export type ChannelEdge = {
+  node: Channel
+  cursor: Scalars['String']
+}
+
+export enum ChannelOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  OwnerMemberAsc = 'ownerMember_ASC',
+  OwnerMemberDesc = 'ownerMember_DESC',
+  OwnerCuratorGroupAsc = 'ownerCuratorGroup_ASC',
+  OwnerCuratorGroupDesc = 'ownerCuratorGroup_DESC',
+  CategoryAsc = 'category_ASC',
+  CategoryDesc = 'category_DESC',
+  RewardAccountAsc = 'rewardAccount_ASC',
+  RewardAccountDesc = 'rewardAccount_DESC',
+  TitleAsc = 'title_ASC',
+  TitleDesc = 'title_DESC',
+  DescriptionAsc = 'description_ASC',
+  DescriptionDesc = 'description_DESC',
+  CoverPhotoAsc = 'coverPhoto_ASC',
+  CoverPhotoDesc = 'coverPhoto_DESC',
+  AvatarPhotoAsc = 'avatarPhoto_ASC',
+  AvatarPhotoDesc = 'avatarPhoto_DESC',
+  IsPublicAsc = 'isPublic_ASC',
+  IsPublicDesc = 'isPublic_DESC',
+  IsCensoredAsc = 'isCensored_ASC',
+  IsCensoredDesc = 'isCensored_DESC',
+  LanguageAsc = 'language_ASC',
+  LanguageDesc = 'language_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+}
+
+export type ChannelUpdateInput = {
+  ownerMember?: Maybe<Scalars['ID']>
+  ownerCuratorGroup?: Maybe<Scalars['ID']>
+  category?: Maybe<Scalars['ID']>
+  rewardAccount?: Maybe<Scalars['String']>
+  title?: Maybe<Scalars['String']>
+  description?: Maybe<Scalars['String']>
+  coverPhoto?: Maybe<Scalars['ID']>
+  avatarPhoto?: Maybe<Scalars['ID']>
+  isPublic?: Maybe<Scalars['Boolean']>
+  isCensored?: Maybe<Scalars['Boolean']>
+  language?: Maybe<Scalars['ID']>
+  createdInBlock?: Maybe<Scalars['Float']>
+}
+
+export type ChannelWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  rewardAccount_eq?: Maybe<Scalars['String']>
+  rewardAccount_contains?: Maybe<Scalars['String']>
+  rewardAccount_startsWith?: Maybe<Scalars['String']>
+  rewardAccount_endsWith?: Maybe<Scalars['String']>
+  rewardAccount_in?: Maybe<Array<Scalars['String']>>
+  title_eq?: Maybe<Scalars['String']>
+  title_contains?: Maybe<Scalars['String']>
+  title_startsWith?: Maybe<Scalars['String']>
+  title_endsWith?: Maybe<Scalars['String']>
+  title_in?: Maybe<Array<Scalars['String']>>
+  description_eq?: Maybe<Scalars['String']>
+  description_contains?: Maybe<Scalars['String']>
+  description_startsWith?: Maybe<Scalars['String']>
+  description_endsWith?: Maybe<Scalars['String']>
+  description_in?: Maybe<Array<Scalars['String']>>
+  isPublic_eq?: Maybe<Scalars['Boolean']>
+  isPublic_in?: Maybe<Array<Scalars['Boolean']>>
+  isCensored_eq?: Maybe<Scalars['Boolean']>
+  isCensored_in?: Maybe<Array<Scalars['Boolean']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  ownerMember?: Maybe<MembershipWhereInput>
+  ownerCuratorGroup?: Maybe<CuratorGroupWhereInput>
+  category?: Maybe<ChannelCategoryWhereInput>
+  coverPhoto?: Maybe<StorageDataObjectWhereInput>
+  avatarPhoto?: Maybe<StorageDataObjectWhereInput>
+  language?: Maybe<LanguageWhereInput>
+  videos_none?: Maybe<VideoWhereInput>
+  videos_some?: Maybe<VideoWhereInput>
+  videos_every?: Maybe<VideoWhereInput>
+  collaborators_none?: Maybe<MembershipWhereInput>
+  collaborators_some?: Maybe<MembershipWhereInput>
+  collaborators_every?: Maybe<MembershipWhereInput>
+  AND?: Maybe<Array<ChannelWhereInput>>
+  OR?: Maybe<Array<ChannelWhereInput>>
+}
+
+export type ChannelWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export enum Continent {
+  Af = 'AF',
+  Na = 'NA',
+  Oc = 'OC',
+  An = 'AN',
+  As = 'AS',
+  Eu = 'EU',
+  Sa = 'SA',
+}
+
+export type CuratorGroup = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Curators belonging to this group */
+  curatorIds: Array<Scalars['Int']>
+  /** Is group active or not */
+  isActive: Scalars['Boolean']
+  channels: Array<Channel>
+}
+
+export type CuratorGroupConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<CuratorGroupEdge>
+  pageInfo: PageInfo
+}
+
+export type CuratorGroupCreateInput = {
+  curatorIds: Array<Scalars['Int']>
+  isActive: Scalars['Boolean']
+}
+
+export type CuratorGroupEdge = {
+  node: CuratorGroup
+  cursor: Scalars['String']
+}
+
+export enum CuratorGroupOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  IsActiveAsc = 'isActive_ASC',
+  IsActiveDesc = 'isActive_DESC',
+}
+
+export type CuratorGroupUpdateInput = {
+  curatorIds?: Maybe<Array<Scalars['Int']>>
+  isActive?: Maybe<Scalars['Boolean']>
+}
+
+export type CuratorGroupWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  curatorIds_containsAll?: Maybe<Array<Scalars['Int']>>
+  curatorIds_containsNone?: Maybe<Array<Scalars['Int']>>
+  curatorIds_containsAny?: Maybe<Array<Scalars['Int']>>
+  isActive_eq?: Maybe<Scalars['Boolean']>
+  isActive_in?: Maybe<Array<Scalars['Boolean']>>
+  channels_none?: Maybe<ChannelWhereInput>
+  channels_some?: Maybe<ChannelWhereInput>
+  channels_every?: Maybe<ChannelWhereInput>
+  AND?: Maybe<Array<CuratorGroupWhereInput>>
+  OR?: Maybe<Array<CuratorGroupWhereInput>>
+}
+
+export type CuratorGroupWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type DataObjectType =
+  | DataObjectTypeChannelAvatar
+  | DataObjectTypeChannelCoverPhoto
+  | DataObjectTypeVideoMedia
+  | DataObjectTypeVideoThumbnail
+  | DataObjectTypeUnknown
+
+export type DataObjectTypeChannelAvatar = {
+  /** Related channel entity */
+  channel?: Maybe<Channel>
+}
+
+export type DataObjectTypeChannelCoverPhoto = {
+  /** Related channel entity */
+  channel?: Maybe<Channel>
+}
+
+export type DataObjectTypeUnknown = {
+  phantom?: Maybe<Scalars['Int']>
+}
+
+export type DataObjectTypeVideoMedia = {
+  /** Related video entity */
+  video?: Maybe<Video>
+}
+
+export type DataObjectTypeVideoThumbnail = {
+  /** Related video entity */
+  video?: Maybe<Video>
+}
+
+export type DeleteResponse = {
+  id: Scalars['ID']
+}
+
+export type DistributionBucket = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  family: DistributionBucketFamily
+  familyId: Scalars['String']
+  /** Bucket index within the family */
+  bucketIndex: Scalars['Int']
+  operators: Array<DistributionBucketOperator>
+  /** Whether the bucket is accepting any new bags */
+  acceptingNewBags: Scalars['Boolean']
+  /** Whether the bucket is currently distributing content */
+  distributing: Scalars['Boolean']
+  bags: Array<StorageBag>
+}
+
+export type DistributionBucketConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<DistributionBucketEdge>
+  pageInfo: PageInfo
+}
+
+export type DistributionBucketCreateInput = {
+  family: Scalars['ID']
+  bucketIndex: Scalars['Float']
+  acceptingNewBags: Scalars['Boolean']
+  distributing: Scalars['Boolean']
+}
+
+export type DistributionBucketEdge = {
+  node: DistributionBucket
+  cursor: Scalars['String']
+}
+
+export type DistributionBucketFamily = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  metadata?: Maybe<DistributionBucketFamilyMetadata>
+  metadataId?: Maybe<Scalars['String']>
+  buckets: Array<DistributionBucket>
+}
+
+export type DistributionBucketFamilyConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<DistributionBucketFamilyEdge>
+  pageInfo: PageInfo
+}
+
+export type DistributionBucketFamilyCreateInput = {
+  metadata?: Maybe<Scalars['ID']>
+}
+
+export type DistributionBucketFamilyEdge = {
+  node: DistributionBucketFamily
+  cursor: Scalars['String']
+}
+
+export type DistributionBucketFamilyGeographicArea = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Geographical area (continent / country / subdivision) */
+  area: GeographicalArea
+  distributionBucketFamilyMetadata: DistributionBucketFamilyMetadata
+  distributionBucketFamilyMetadataId: Scalars['String']
+}
+
+export type DistributionBucketFamilyGeographicAreaConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<DistributionBucketFamilyGeographicAreaEdge>
+  pageInfo: PageInfo
+}
+
+export type DistributionBucketFamilyGeographicAreaCreateInput = {
+  area: Scalars['JSONObject']
+  distributionBucketFamilyMetadata: Scalars['ID']
+}
+
+export type DistributionBucketFamilyGeographicAreaEdge = {
+  node: DistributionBucketFamilyGeographicArea
+  cursor: Scalars['String']
+}
+
+export enum DistributionBucketFamilyGeographicAreaOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  DistributionBucketFamilyMetadataAsc = 'distributionBucketFamilyMetadata_ASC',
+  DistributionBucketFamilyMetadataDesc = 'distributionBucketFamilyMetadata_DESC',
+}
+
+export type DistributionBucketFamilyGeographicAreaUpdateInput = {
+  area?: Maybe<Scalars['JSONObject']>
+  distributionBucketFamilyMetadata?: Maybe<Scalars['ID']>
+}
+
+export type DistributionBucketFamilyGeographicAreaWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  area_json?: Maybe<Scalars['JSONObject']>
+  distributionBucketFamilyMetadata?: Maybe<DistributionBucketFamilyMetadataWhereInput>
+  AND?: Maybe<Array<DistributionBucketFamilyGeographicAreaWhereInput>>
+  OR?: Maybe<Array<DistributionBucketFamilyGeographicAreaWhereInput>>
+}
+
+export type DistributionBucketFamilyGeographicAreaWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type DistributionBucketFamilyMetadata = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Name of the geographical region covered by the family (ie.: us-east-1) */
+  region?: Maybe<Scalars['String']>
+  /** Optional, more specific description of the region covered by the family */
+  description?: Maybe<Scalars['String']>
+  areas: Array<DistributionBucketFamilyGeographicArea>
+  /** List of targets (hosts/ips) best suited latency measurements for the family */
+  latencyTestTargets?: Maybe<Array<Scalars['String']>>
+  distributionbucketfamilymetadata?: Maybe<Array<DistributionBucketFamily>>
+}
+
+export type DistributionBucketFamilyMetadataConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<DistributionBucketFamilyMetadataEdge>
+  pageInfo: PageInfo
+}
+
+export type DistributionBucketFamilyMetadataCreateInput = {
+  region?: Maybe<Scalars['String']>
+  description?: Maybe<Scalars['String']>
+  latencyTestTargets?: Maybe<Array<Scalars['String']>>
+}
+
+export type DistributionBucketFamilyMetadataEdge = {
+  node: DistributionBucketFamilyMetadata
+  cursor: Scalars['String']
+}
+
+export enum DistributionBucketFamilyMetadataOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  RegionAsc = 'region_ASC',
+  RegionDesc = 'region_DESC',
+  DescriptionAsc = 'description_ASC',
+  DescriptionDesc = 'description_DESC',
+}
+
+export type DistributionBucketFamilyMetadataUpdateInput = {
+  region?: Maybe<Scalars['String']>
+  description?: Maybe<Scalars['String']>
+  latencyTestTargets?: Maybe<Array<Scalars['String']>>
+}
+
+export type DistributionBucketFamilyMetadataWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  region_eq?: Maybe<Scalars['String']>
+  region_contains?: Maybe<Scalars['String']>
+  region_startsWith?: Maybe<Scalars['String']>
+  region_endsWith?: Maybe<Scalars['String']>
+  region_in?: Maybe<Array<Scalars['String']>>
+  description_eq?: Maybe<Scalars['String']>
+  description_contains?: Maybe<Scalars['String']>
+  description_startsWith?: Maybe<Scalars['String']>
+  description_endsWith?: Maybe<Scalars['String']>
+  description_in?: Maybe<Array<Scalars['String']>>
+  latencyTestTargets_containsAll?: Maybe<Array<Scalars['String']>>
+  latencyTestTargets_containsNone?: Maybe<Array<Scalars['String']>>
+  latencyTestTargets_containsAny?: Maybe<Array<Scalars['String']>>
+  areas_none?: Maybe<DistributionBucketFamilyGeographicAreaWhereInput>
+  areas_some?: Maybe<DistributionBucketFamilyGeographicAreaWhereInput>
+  areas_every?: Maybe<DistributionBucketFamilyGeographicAreaWhereInput>
+  distributionbucketfamilymetadata_none?: Maybe<DistributionBucketFamilyWhereInput>
+  distributionbucketfamilymetadata_some?: Maybe<DistributionBucketFamilyWhereInput>
+  distributionbucketfamilymetadata_every?: Maybe<DistributionBucketFamilyWhereInput>
+  AND?: Maybe<Array<DistributionBucketFamilyMetadataWhereInput>>
+  OR?: Maybe<Array<DistributionBucketFamilyMetadataWhereInput>>
+}
+
+export type DistributionBucketFamilyMetadataWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export enum DistributionBucketFamilyOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  MetadataAsc = 'metadata_ASC',
+  MetadataDesc = 'metadata_DESC',
+}
+
+export type DistributionBucketFamilyUpdateInput = {
+  metadata?: Maybe<Scalars['ID']>
+}
+
+export type DistributionBucketFamilyWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  metadata?: Maybe<DistributionBucketFamilyMetadataWhereInput>
+  buckets_none?: Maybe<DistributionBucketWhereInput>
+  buckets_some?: Maybe<DistributionBucketWhereInput>
+  buckets_every?: Maybe<DistributionBucketWhereInput>
+  AND?: Maybe<Array<DistributionBucketFamilyWhereInput>>
+  OR?: Maybe<Array<DistributionBucketFamilyWhereInput>>
+}
+
+export type DistributionBucketFamilyWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type DistributionBucketOperator = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  distributionBucket: DistributionBucket
+  distributionBucketId: Scalars['String']
+  /** ID of the distribution group worker */
+  workerId: Scalars['Int']
+  /** Current operator status */
+  status: DistributionBucketOperatorStatus
+  metadata?: Maybe<DistributionBucketOperatorMetadata>
+  metadataId?: Maybe<Scalars['String']>
+}
+
+export type DistributionBucketOperatorConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<DistributionBucketOperatorEdge>
+  pageInfo: PageInfo
+}
+
+export type DistributionBucketOperatorCreateInput = {
+  distributionBucket: Scalars['ID']
+  workerId: Scalars['Float']
+  status: DistributionBucketOperatorStatus
+  metadata?: Maybe<Scalars['ID']>
+}
+
+export type DistributionBucketOperatorEdge = {
+  node: DistributionBucketOperator
+  cursor: Scalars['String']
+}
+
+export type DistributionBucketOperatorMetadata = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Root distributor node api endpoint */
+  nodeEndpoint?: Maybe<Scalars['String']>
+  nodeLocation?: Maybe<NodeLocationMetadata>
+  nodeLocationId?: Maybe<Scalars['String']>
+  /** Additional information about the node/operator */
+  extra?: Maybe<Scalars['String']>
+  distributionbucketoperatormetadata?: Maybe<Array<DistributionBucketOperator>>
+}
+
+export type DistributionBucketOperatorMetadataConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<DistributionBucketOperatorMetadataEdge>
+  pageInfo: PageInfo
+}
+
+export type DistributionBucketOperatorMetadataCreateInput = {
+  nodeEndpoint?: Maybe<Scalars['String']>
+  nodeLocation?: Maybe<Scalars['ID']>
+  extra?: Maybe<Scalars['String']>
+}
+
+export type DistributionBucketOperatorMetadataEdge = {
+  node: DistributionBucketOperatorMetadata
+  cursor: Scalars['String']
+}
+
+export enum DistributionBucketOperatorMetadataOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  NodeEndpointAsc = 'nodeEndpoint_ASC',
+  NodeEndpointDesc = 'nodeEndpoint_DESC',
+  NodeLocationAsc = 'nodeLocation_ASC',
+  NodeLocationDesc = 'nodeLocation_DESC',
+  ExtraAsc = 'extra_ASC',
+  ExtraDesc = 'extra_DESC',
+}
+
+export type DistributionBucketOperatorMetadataUpdateInput = {
+  nodeEndpoint?: Maybe<Scalars['String']>
+  nodeLocation?: Maybe<Scalars['ID']>
+  extra?: Maybe<Scalars['String']>
+}
+
+export type DistributionBucketOperatorMetadataWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  nodeEndpoint_eq?: Maybe<Scalars['String']>
+  nodeEndpoint_contains?: Maybe<Scalars['String']>
+  nodeEndpoint_startsWith?: Maybe<Scalars['String']>
+  nodeEndpoint_endsWith?: Maybe<Scalars['String']>
+  nodeEndpoint_in?: Maybe<Array<Scalars['String']>>
+  extra_eq?: Maybe<Scalars['String']>
+  extra_contains?: Maybe<Scalars['String']>
+  extra_startsWith?: Maybe<Scalars['String']>
+  extra_endsWith?: Maybe<Scalars['String']>
+  extra_in?: Maybe<Array<Scalars['String']>>
+  nodeLocation?: Maybe<NodeLocationMetadataWhereInput>
+  distributionbucketoperatormetadata_none?: Maybe<DistributionBucketOperatorWhereInput>
+  distributionbucketoperatormetadata_some?: Maybe<DistributionBucketOperatorWhereInput>
+  distributionbucketoperatormetadata_every?: Maybe<DistributionBucketOperatorWhereInput>
+  AND?: Maybe<Array<DistributionBucketOperatorMetadataWhereInput>>
+  OR?: Maybe<Array<DistributionBucketOperatorMetadataWhereInput>>
+}
+
+export type DistributionBucketOperatorMetadataWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export enum DistributionBucketOperatorOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  DistributionBucketAsc = 'distributionBucket_ASC',
+  DistributionBucketDesc = 'distributionBucket_DESC',
+  WorkerIdAsc = 'workerId_ASC',
+  WorkerIdDesc = 'workerId_DESC',
+  StatusAsc = 'status_ASC',
+  StatusDesc = 'status_DESC',
+  MetadataAsc = 'metadata_ASC',
+  MetadataDesc = 'metadata_DESC',
+}
+
+export enum DistributionBucketOperatorStatus {
+  Invited = 'INVITED',
+  Active = 'ACTIVE',
+}
+
+export type DistributionBucketOperatorUpdateInput = {
+  distributionBucket?: Maybe<Scalars['ID']>
+  workerId?: Maybe<Scalars['Float']>
+  status?: Maybe<DistributionBucketOperatorStatus>
+  metadata?: Maybe<Scalars['ID']>
+}
+
+export type DistributionBucketOperatorWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  workerId_eq?: Maybe<Scalars['Int']>
+  workerId_gt?: Maybe<Scalars['Int']>
+  workerId_gte?: Maybe<Scalars['Int']>
+  workerId_lt?: Maybe<Scalars['Int']>
+  workerId_lte?: Maybe<Scalars['Int']>
+  workerId_in?: Maybe<Array<Scalars['Int']>>
+  status_eq?: Maybe<DistributionBucketOperatorStatus>
+  status_in?: Maybe<Array<DistributionBucketOperatorStatus>>
+  distributionBucket?: Maybe<DistributionBucketWhereInput>
+  metadata?: Maybe<DistributionBucketOperatorMetadataWhereInput>
+  AND?: Maybe<Array<DistributionBucketOperatorWhereInput>>
+  OR?: Maybe<Array<DistributionBucketOperatorWhereInput>>
+}
+
+export type DistributionBucketOperatorWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export enum DistributionBucketOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  FamilyAsc = 'family_ASC',
+  FamilyDesc = 'family_DESC',
+  BucketIndexAsc = 'bucketIndex_ASC',
+  BucketIndexDesc = 'bucketIndex_DESC',
+  AcceptingNewBagsAsc = 'acceptingNewBags_ASC',
+  AcceptingNewBagsDesc = 'acceptingNewBags_DESC',
+  DistributingAsc = 'distributing_ASC',
+  DistributingDesc = 'distributing_DESC',
+}
+
+export type DistributionBucketUpdateInput = {
+  family?: Maybe<Scalars['ID']>
+  bucketIndex?: Maybe<Scalars['Float']>
+  acceptingNewBags?: Maybe<Scalars['Boolean']>
+  distributing?: Maybe<Scalars['Boolean']>
+}
+
+export type DistributionBucketWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  bucketIndex_eq?: Maybe<Scalars['Int']>
+  bucketIndex_gt?: Maybe<Scalars['Int']>
+  bucketIndex_gte?: Maybe<Scalars['Int']>
+  bucketIndex_lt?: Maybe<Scalars['Int']>
+  bucketIndex_lte?: Maybe<Scalars['Int']>
+  bucketIndex_in?: Maybe<Array<Scalars['Int']>>
+  acceptingNewBags_eq?: Maybe<Scalars['Boolean']>
+  acceptingNewBags_in?: Maybe<Array<Scalars['Boolean']>>
+  distributing_eq?: Maybe<Scalars['Boolean']>
+  distributing_in?: Maybe<Array<Scalars['Boolean']>>
+  family?: Maybe<DistributionBucketFamilyWhereInput>
+  operators_none?: Maybe<DistributionBucketOperatorWhereInput>
+  operators_some?: Maybe<DistributionBucketOperatorWhereInput>
+  operators_every?: Maybe<DistributionBucketOperatorWhereInput>
+  bags_none?: Maybe<StorageBagWhereInput>
+  bags_some?: Maybe<StorageBagWhereInput>
+  bags_every?: Maybe<StorageBagWhereInput>
+  AND?: Maybe<Array<DistributionBucketWhereInput>>
+  OR?: Maybe<Array<DistributionBucketWhereInput>>
+}
+
+export type DistributionBucketWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type GeoCoordinates = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  latitude: Scalars['Float']
+  longitude: Scalars['Float']
+  nodelocationmetadatacoordinates?: Maybe<Array<NodeLocationMetadata>>
+}
+
+export type GeoCoordinatesConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<GeoCoordinatesEdge>
+  pageInfo: PageInfo
+}
+
+export type GeoCoordinatesCreateInput = {
+  latitude: Scalars['Float']
+  longitude: Scalars['Float']
+}
+
+export type GeoCoordinatesEdge = {
+  node: GeoCoordinates
+  cursor: Scalars['String']
+}
+
+export enum GeoCoordinatesOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  LatitudeAsc = 'latitude_ASC',
+  LatitudeDesc = 'latitude_DESC',
+  LongitudeAsc = 'longitude_ASC',
+  LongitudeDesc = 'longitude_DESC',
+}
+
+export type GeoCoordinatesUpdateInput = {
+  latitude?: Maybe<Scalars['Float']>
+  longitude?: Maybe<Scalars['Float']>
+}
+
+export type GeoCoordinatesWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  latitude_eq?: Maybe<Scalars['Float']>
+  latitude_gt?: Maybe<Scalars['Float']>
+  latitude_gte?: Maybe<Scalars['Float']>
+  latitude_lt?: Maybe<Scalars['Float']>
+  latitude_lte?: Maybe<Scalars['Float']>
+  latitude_in?: Maybe<Array<Scalars['Float']>>
+  longitude_eq?: Maybe<Scalars['Float']>
+  longitude_gt?: Maybe<Scalars['Float']>
+  longitude_gte?: Maybe<Scalars['Float']>
+  longitude_lt?: Maybe<Scalars['Float']>
+  longitude_lte?: Maybe<Scalars['Float']>
+  longitude_in?: Maybe<Array<Scalars['Float']>>
+  nodelocationmetadatacoordinates_none?: Maybe<NodeLocationMetadataWhereInput>
+  nodelocationmetadatacoordinates_some?: Maybe<NodeLocationMetadataWhereInput>
+  nodelocationmetadatacoordinates_every?: Maybe<NodeLocationMetadataWhereInput>
+  AND?: Maybe<Array<GeoCoordinatesWhereInput>>
+  OR?: Maybe<Array<GeoCoordinatesWhereInput>>
+}
+
+export type GeoCoordinatesWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type GeographicalArea = GeographicalAreaContinent | GeographicalAreaCountry | GeographicalAreaSubdivistion
+
+export type GeographicalAreaContinent = {
+  code?: Maybe<Continent>
+}
+
+export type GeographicalAreaContinentCreateInput = {
+  code?: Maybe<Continent>
+}
+
+export type GeographicalAreaContinentUpdateInput = {
+  code?: Maybe<Continent>
+}
+
+export type GeographicalAreaContinentWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  code_eq?: Maybe<Continent>
+  code_in?: Maybe<Array<Continent>>
+  AND?: Maybe<Array<GeographicalAreaContinentWhereInput>>
+  OR?: Maybe<Array<GeographicalAreaContinentWhereInput>>
+}
+
+export type GeographicalAreaContinentWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type GeographicalAreaCountry = {
+  /** ISO 3166-1 alpha-2 country code */
+  code?: Maybe<Scalars['String']>
+}
+
+export type GeographicalAreaSubdivistion = {
+  /** ISO 3166-2 subdivision code */
+  code?: Maybe<Scalars['String']>
+}
+
+export type Language = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Language identifier ISO 639-1 */
+  iso: Scalars['String']
+  createdInBlock: Scalars['Int']
+  channellanguage?: Maybe<Array<Channel>>
+  videolanguage?: Maybe<Array<Video>>
+}
+
+export type LanguageConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<LanguageEdge>
+  pageInfo: PageInfo
+}
+
+export type LanguageCreateInput = {
+  iso: Scalars['String']
+  createdInBlock: Scalars['Float']
+}
+
+export type LanguageEdge = {
+  node: Language
+  cursor: Scalars['String']
+}
+
+export enum LanguageOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  IsoAsc = 'iso_ASC',
+  IsoDesc = 'iso_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+}
+
+export type LanguageUpdateInput = {
+  iso?: Maybe<Scalars['String']>
+  createdInBlock?: Maybe<Scalars['Float']>
+}
+
+export type LanguageWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  iso_eq?: Maybe<Scalars['String']>
+  iso_contains?: Maybe<Scalars['String']>
+  iso_startsWith?: Maybe<Scalars['String']>
+  iso_endsWith?: Maybe<Scalars['String']>
+  iso_in?: Maybe<Array<Scalars['String']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  channellanguage_none?: Maybe<ChannelWhereInput>
+  channellanguage_some?: Maybe<ChannelWhereInput>
+  channellanguage_every?: Maybe<ChannelWhereInput>
+  videolanguage_none?: Maybe<VideoWhereInput>
+  videolanguage_some?: Maybe<VideoWhereInput>
+  videolanguage_every?: Maybe<VideoWhereInput>
+  AND?: Maybe<Array<LanguageWhereInput>>
+  OR?: Maybe<Array<LanguageWhereInput>>
+}
+
+export type LanguageWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type License = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** License code defined by Joystream */
+  code?: Maybe<Scalars['Int']>
+  /** Attribution (if required by the license) */
+  attribution?: Maybe<Scalars['String']>
+  /** Custom license content */
+  customText?: Maybe<Scalars['String']>
+  videolicense?: Maybe<Array<Video>>
+}
+
+export type LicenseConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<LicenseEdge>
+  pageInfo: PageInfo
+}
+
+export type LicenseCreateInput = {
+  code?: Maybe<Scalars['Float']>
+  attribution?: Maybe<Scalars['String']>
+  customText?: Maybe<Scalars['String']>
+}
+
+export type LicenseEdge = {
+  node: License
+  cursor: Scalars['String']
+}
+
+export enum LicenseOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  CodeAsc = 'code_ASC',
+  CodeDesc = 'code_DESC',
+  AttributionAsc = 'attribution_ASC',
+  AttributionDesc = 'attribution_DESC',
+  CustomTextAsc = 'customText_ASC',
+  CustomTextDesc = 'customText_DESC',
+}
+
+export type LicenseUpdateInput = {
+  code?: Maybe<Scalars['Float']>
+  attribution?: Maybe<Scalars['String']>
+  customText?: Maybe<Scalars['String']>
+}
+
+export type LicenseWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  code_eq?: Maybe<Scalars['Int']>
+  code_gt?: Maybe<Scalars['Int']>
+  code_gte?: Maybe<Scalars['Int']>
+  code_lt?: Maybe<Scalars['Int']>
+  code_lte?: Maybe<Scalars['Int']>
+  code_in?: Maybe<Array<Scalars['Int']>>
+  attribution_eq?: Maybe<Scalars['String']>
+  attribution_contains?: Maybe<Scalars['String']>
+  attribution_startsWith?: Maybe<Scalars['String']>
+  attribution_endsWith?: Maybe<Scalars['String']>
+  attribution_in?: Maybe<Array<Scalars['String']>>
+  customText_eq?: Maybe<Scalars['String']>
+  customText_contains?: Maybe<Scalars['String']>
+  customText_startsWith?: Maybe<Scalars['String']>
+  customText_endsWith?: Maybe<Scalars['String']>
+  customText_in?: Maybe<Array<Scalars['String']>>
+  videolicense_none?: Maybe<VideoWhereInput>
+  videolicense_some?: Maybe<VideoWhereInput>
+  videolicense_every?: Maybe<VideoWhereInput>
+  AND?: Maybe<Array<LicenseWhereInput>>
+  OR?: Maybe<Array<LicenseWhereInput>>
+}
+
+export type LicenseWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type MembersByHandleFtsOutput = {
+  item: MembersByHandleSearchResult
+  rank: Scalars['Float']
+  isTypeOf: Scalars['String']
+  highlight: Scalars['String']
+}
+
+export type MembersByHandleSearchResult = Membership
+
+/** Stored information about a registered user */
+export type Membership = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** The unique handle chosen by member */
+  handle: Scalars['String']
+  /** A Url to member's Avatar image */
+  avatarUri?: Maybe<Scalars['String']>
+  /** Short text chosen by member to share information about themselves */
+  about?: Maybe<Scalars['String']>
+  /** Member's controller account id */
+  controllerAccount: Scalars['String']
+  /** Member's root account id */
+  rootAccount: Scalars['String']
+  /** Blocknumber when member was registered */
+  createdInBlock: Scalars['Int']
+  /** How the member was registered */
+  entry: MembershipEntryMethod
+  /** The type of subscription the member has purchased if any. */
+  subscription?: Maybe<Scalars['Int']>
+  channels: Array<Channel>
+  collaboratorInChannels: Array<Channel>
+}
+
+export type MembershipConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<MembershipEdge>
+  pageInfo: PageInfo
+}
+
+export type MembershipCreateInput = {
+  handle: Scalars['String']
+  avatarUri?: Maybe<Scalars['String']>
+  about?: Maybe<Scalars['String']>
+  controllerAccount: Scalars['String']
+  rootAccount: Scalars['String']
+  createdInBlock: Scalars['Float']
+  entry: MembershipEntryMethod
+  subscription?: Maybe<Scalars['Float']>
+}
+
+export type MembershipEdge = {
+  node: Membership
+  cursor: Scalars['String']
+}
+
+export enum MembershipEntryMethod {
+  Paid = 'PAID',
+  Screening = 'SCREENING',
+  Genesis = 'GENESIS',
+}
+
+export enum MembershipOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  HandleAsc = 'handle_ASC',
+  HandleDesc = 'handle_DESC',
+  AvatarUriAsc = 'avatarUri_ASC',
+  AvatarUriDesc = 'avatarUri_DESC',
+  AboutAsc = 'about_ASC',
+  AboutDesc = 'about_DESC',
+  ControllerAccountAsc = 'controllerAccount_ASC',
+  ControllerAccountDesc = 'controllerAccount_DESC',
+  RootAccountAsc = 'rootAccount_ASC',
+  RootAccountDesc = 'rootAccount_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+  EntryAsc = 'entry_ASC',
+  EntryDesc = 'entry_DESC',
+  SubscriptionAsc = 'subscription_ASC',
+  SubscriptionDesc = 'subscription_DESC',
+}
+
+export type MembershipUpdateInput = {
+  handle?: Maybe<Scalars['String']>
+  avatarUri?: Maybe<Scalars['String']>
+  about?: Maybe<Scalars['String']>
+  controllerAccount?: Maybe<Scalars['String']>
+  rootAccount?: Maybe<Scalars['String']>
+  createdInBlock?: Maybe<Scalars['Float']>
+  entry?: Maybe<MembershipEntryMethod>
+  subscription?: Maybe<Scalars['Float']>
+}
+
+export type MembershipWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  handle_eq?: Maybe<Scalars['String']>
+  handle_contains?: Maybe<Scalars['String']>
+  handle_startsWith?: Maybe<Scalars['String']>
+  handle_endsWith?: Maybe<Scalars['String']>
+  handle_in?: Maybe<Array<Scalars['String']>>
+  avatarUri_eq?: Maybe<Scalars['String']>
+  avatarUri_contains?: Maybe<Scalars['String']>
+  avatarUri_startsWith?: Maybe<Scalars['String']>
+  avatarUri_endsWith?: Maybe<Scalars['String']>
+  avatarUri_in?: Maybe<Array<Scalars['String']>>
+  about_eq?: Maybe<Scalars['String']>
+  about_contains?: Maybe<Scalars['String']>
+  about_startsWith?: Maybe<Scalars['String']>
+  about_endsWith?: Maybe<Scalars['String']>
+  about_in?: Maybe<Array<Scalars['String']>>
+  controllerAccount_eq?: Maybe<Scalars['String']>
+  controllerAccount_contains?: Maybe<Scalars['String']>
+  controllerAccount_startsWith?: Maybe<Scalars['String']>
+  controllerAccount_endsWith?: Maybe<Scalars['String']>
+  controllerAccount_in?: Maybe<Array<Scalars['String']>>
+  rootAccount_eq?: Maybe<Scalars['String']>
+  rootAccount_contains?: Maybe<Scalars['String']>
+  rootAccount_startsWith?: Maybe<Scalars['String']>
+  rootAccount_endsWith?: Maybe<Scalars['String']>
+  rootAccount_in?: Maybe<Array<Scalars['String']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  entry_eq?: Maybe<MembershipEntryMethod>
+  entry_in?: Maybe<Array<MembershipEntryMethod>>
+  subscription_eq?: Maybe<Scalars['Int']>
+  subscription_gt?: Maybe<Scalars['Int']>
+  subscription_gte?: Maybe<Scalars['Int']>
+  subscription_lt?: Maybe<Scalars['Int']>
+  subscription_lte?: Maybe<Scalars['Int']>
+  subscription_in?: Maybe<Array<Scalars['Int']>>
+  channels_none?: Maybe<ChannelWhereInput>
+  channels_some?: Maybe<ChannelWhereInput>
+  channels_every?: Maybe<ChannelWhereInput>
+  collaboratorInChannels_none?: Maybe<ChannelWhereInput>
+  collaboratorInChannels_some?: Maybe<ChannelWhereInput>
+  collaboratorInChannels_every?: Maybe<ChannelWhereInput>
+  AND?: Maybe<Array<MembershipWhereInput>>
+  OR?: Maybe<Array<MembershipWhereInput>>
+}
+
+export type MembershipWhereUniqueInput = {
+  id?: Maybe<Scalars['ID']>
+  handle?: Maybe<Scalars['String']>
+}
+
+export type NodeLocationMetadata = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** ISO 3166-1 alpha-2 country code (2 letters) */
+  countryCode?: Maybe<Scalars['String']>
+  /** City name */
+  city?: Maybe<Scalars['String']>
+  coordinates?: Maybe<GeoCoordinates>
+  coordinatesId?: Maybe<Scalars['String']>
+  distributionbucketoperatormetadatanodeLocation?: Maybe<Array<DistributionBucketOperatorMetadata>>
+  storagebucketoperatormetadatanodeLocation?: Maybe<Array<StorageBucketOperatorMetadata>>
+}
+
+export type NodeLocationMetadataConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<NodeLocationMetadataEdge>
+  pageInfo: PageInfo
+}
+
+export type NodeLocationMetadataCreateInput = {
+  countryCode?: Maybe<Scalars['String']>
+  city?: Maybe<Scalars['String']>
+  coordinates?: Maybe<Scalars['ID']>
+}
+
+export type NodeLocationMetadataEdge = {
+  node: NodeLocationMetadata
+  cursor: Scalars['String']
+}
+
+export enum NodeLocationMetadataOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  CountryCodeAsc = 'countryCode_ASC',
+  CountryCodeDesc = 'countryCode_DESC',
+  CityAsc = 'city_ASC',
+  CityDesc = 'city_DESC',
+  CoordinatesAsc = 'coordinates_ASC',
+  CoordinatesDesc = 'coordinates_DESC',
+}
+
+export type NodeLocationMetadataUpdateInput = {
+  countryCode?: Maybe<Scalars['String']>
+  city?: Maybe<Scalars['String']>
+  coordinates?: Maybe<Scalars['ID']>
+}
+
+export type NodeLocationMetadataWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  countryCode_eq?: Maybe<Scalars['String']>
+  countryCode_contains?: Maybe<Scalars['String']>
+  countryCode_startsWith?: Maybe<Scalars['String']>
+  countryCode_endsWith?: Maybe<Scalars['String']>
+  countryCode_in?: Maybe<Array<Scalars['String']>>
+  city_eq?: Maybe<Scalars['String']>
+  city_contains?: Maybe<Scalars['String']>
+  city_startsWith?: Maybe<Scalars['String']>
+  city_endsWith?: Maybe<Scalars['String']>
+  city_in?: Maybe<Array<Scalars['String']>>
+  coordinates?: Maybe<GeoCoordinatesWhereInput>
+  distributionbucketoperatormetadatanodeLocation_none?: Maybe<DistributionBucketOperatorMetadataWhereInput>
+  distributionbucketoperatormetadatanodeLocation_some?: Maybe<DistributionBucketOperatorMetadataWhereInput>
+  distributionbucketoperatormetadatanodeLocation_every?: Maybe<DistributionBucketOperatorMetadataWhereInput>
+  storagebucketoperatormetadatanodeLocation_none?: Maybe<StorageBucketOperatorMetadataWhereInput>
+  storagebucketoperatormetadatanodeLocation_some?: Maybe<StorageBucketOperatorMetadataWhereInput>
+  storagebucketoperatormetadatanodeLocation_every?: Maybe<StorageBucketOperatorMetadataWhereInput>
+  AND?: Maybe<Array<NodeLocationMetadataWhereInput>>
+  OR?: Maybe<Array<NodeLocationMetadataWhereInput>>
+}
+
+export type NodeLocationMetadataWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type PageInfo = {
+  hasNextPage: Scalars['Boolean']
+  hasPreviousPage: Scalars['Boolean']
+  startCursor?: Maybe<Scalars['String']>
+  endCursor?: Maybe<Scalars['String']>
+}
+
+export type ProcessorState = {
+  lastCompleteBlock: Scalars['Float']
+  lastProcessedEvent: Scalars['String']
+  indexerHead: Scalars['Float']
+  chainHead: Scalars['Float']
+}
+
+export type Query = {
+  channelCategories: Array<ChannelCategory>
+  channelCategoryByUniqueInput?: Maybe<ChannelCategory>
+  channelCategoriesConnection: ChannelCategoryConnection
+  channels: Array<Channel>
+  channelByUniqueInput?: Maybe<Channel>
+  channelsConnection: ChannelConnection
+  curatorGroups: Array<CuratorGroup>
+  curatorGroupByUniqueInput?: Maybe<CuratorGroup>
+  curatorGroupsConnection: CuratorGroupConnection
+  distributionBucketFamilyGeographicAreas: Array<DistributionBucketFamilyGeographicArea>
+  distributionBucketFamilyGeographicAreaByUniqueInput?: Maybe<DistributionBucketFamilyGeographicArea>
+  distributionBucketFamilyGeographicAreasConnection: DistributionBucketFamilyGeographicAreaConnection
+  distributionBucketFamilyMetadata: Array<DistributionBucketFamilyMetadata>
+  distributionBucketFamilyMetadataByUniqueInput?: Maybe<DistributionBucketFamilyMetadata>
+  distributionBucketFamilyMetadataConnection: DistributionBucketFamilyMetadataConnection
+  distributionBucketFamilies: Array<DistributionBucketFamily>
+  distributionBucketFamilyByUniqueInput?: Maybe<DistributionBucketFamily>
+  distributionBucketFamiliesConnection: DistributionBucketFamilyConnection
+  distributionBucketOperatorMetadata: Array<DistributionBucketOperatorMetadata>
+  distributionBucketOperatorMetadataByUniqueInput?: Maybe<DistributionBucketOperatorMetadata>
+  distributionBucketOperatorMetadataConnection: DistributionBucketOperatorMetadataConnection
+  distributionBucketOperators: Array<DistributionBucketOperator>
+  distributionBucketOperatorByUniqueInput?: Maybe<DistributionBucketOperator>
+  distributionBucketOperatorsConnection: DistributionBucketOperatorConnection
+  distributionBuckets: Array<DistributionBucket>
+  distributionBucketByUniqueInput?: Maybe<DistributionBucket>
+  distributionBucketsConnection: DistributionBucketConnection
+  geoCoordinates: Array<GeoCoordinates>
+  geoCoordinatesByUniqueInput?: Maybe<GeoCoordinates>
+  geoCoordinatesConnection: GeoCoordinatesConnection
+  languages: Array<Language>
+  languageByUniqueInput?: Maybe<Language>
+  languagesConnection: LanguageConnection
+  licenses: Array<License>
+  licenseByUniqueInput?: Maybe<License>
+  licensesConnection: LicenseConnection
+  memberships: Array<Membership>
+  membershipByUniqueInput?: Maybe<Membership>
+  membershipsConnection: MembershipConnection
+  nodeLocationMetadata: Array<NodeLocationMetadata>
+  nodeLocationMetadataByUniqueInput?: Maybe<NodeLocationMetadata>
+  nodeLocationMetadataConnection: NodeLocationMetadataConnection
+  channelCategoriesByName: Array<ChannelCategoriesByNameFtsOutput>
+  membersByHandle: Array<MembersByHandleFtsOutput>
+  search: Array<SearchFtsOutput>
+  videoCategoriesByName: Array<VideoCategoriesByNameFtsOutput>
+  storageBags: Array<StorageBag>
+  storageBagByUniqueInput?: Maybe<StorageBag>
+  storageBagsConnection: StorageBagConnection
+  storageBucketOperatorMetadata: Array<StorageBucketOperatorMetadata>
+  storageBucketOperatorMetadataByUniqueInput?: Maybe<StorageBucketOperatorMetadata>
+  storageBucketOperatorMetadataConnection: StorageBucketOperatorMetadataConnection
+  storageBuckets: Array<StorageBucket>
+  storageBucketByUniqueInput?: Maybe<StorageBucket>
+  storageBucketsConnection: StorageBucketConnection
+  storageDataObjects: Array<StorageDataObject>
+  storageDataObjectByUniqueInput?: Maybe<StorageDataObject>
+  storageDataObjectsConnection: StorageDataObjectConnection
+  storageSystemParameters: Array<StorageSystemParameters>
+  storageSystemParametersByUniqueInput?: Maybe<StorageSystemParameters>
+  storageSystemParametersConnection: StorageSystemParametersConnection
+  videoCategories: Array<VideoCategory>
+  videoCategoryByUniqueInput?: Maybe<VideoCategory>
+  videoCategoriesConnection: VideoCategoryConnection
+  videoMediaEncodings: Array<VideoMediaEncoding>
+  videoMediaEncodingByUniqueInput?: Maybe<VideoMediaEncoding>
+  videoMediaEncodingsConnection: VideoMediaEncodingConnection
+  videoMediaMetadata: Array<VideoMediaMetadata>
+  videoMediaMetadataByUniqueInput?: Maybe<VideoMediaMetadata>
+  videoMediaMetadataConnection: VideoMediaMetadataConnection
+  videos: Array<Video>
+  videoByUniqueInput?: Maybe<Video>
+  videosConnection: VideoConnection
+  workers: Array<Worker>
+  workerByUniqueInput?: Maybe<Worker>
+  workersConnection: WorkerConnection
+}
+
+export type QueryChannelCategoriesArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<ChannelCategoryWhereInput>
+  orderBy?: Maybe<Array<ChannelCategoryOrderByInput>>
+}
+
+export type QueryChannelCategoryByUniqueInputArgs = {
+  where: ChannelCategoryWhereUniqueInput
+}
+
+export type QueryChannelCategoriesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<ChannelCategoryWhereInput>
+  orderBy?: Maybe<Array<ChannelCategoryOrderByInput>>
+}
+
+export type QueryChannelsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<ChannelWhereInput>
+  orderBy?: Maybe<Array<ChannelOrderByInput>>
+}
+
+export type QueryChannelByUniqueInputArgs = {
+  where: ChannelWhereUniqueInput
+}
+
+export type QueryChannelsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<ChannelWhereInput>
+  orderBy?: Maybe<Array<ChannelOrderByInput>>
+}
+
+export type QueryCuratorGroupsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<CuratorGroupWhereInput>
+  orderBy?: Maybe<Array<CuratorGroupOrderByInput>>
+}
+
+export type QueryCuratorGroupByUniqueInputArgs = {
+  where: CuratorGroupWhereUniqueInput
+}
+
+export type QueryCuratorGroupsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<CuratorGroupWhereInput>
+  orderBy?: Maybe<Array<CuratorGroupOrderByInput>>
+}
+
+export type QueryDistributionBucketFamilyGeographicAreasArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<DistributionBucketFamilyGeographicAreaWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketFamilyGeographicAreaOrderByInput>>
+}
+
+export type QueryDistributionBucketFamilyGeographicAreaByUniqueInputArgs = {
+  where: DistributionBucketFamilyGeographicAreaWhereUniqueInput
+}
+
+export type QueryDistributionBucketFamilyGeographicAreasConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<DistributionBucketFamilyGeographicAreaWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketFamilyGeographicAreaOrderByInput>>
+}
+
+export type QueryDistributionBucketFamilyMetadataArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<DistributionBucketFamilyMetadataWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketFamilyMetadataOrderByInput>>
+}
+
+export type QueryDistributionBucketFamilyMetadataByUniqueInputArgs = {
+  where: DistributionBucketFamilyMetadataWhereUniqueInput
+}
+
+export type QueryDistributionBucketFamilyMetadataConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<DistributionBucketFamilyMetadataWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketFamilyMetadataOrderByInput>>
+}
+
+export type QueryDistributionBucketFamiliesArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<DistributionBucketFamilyWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketFamilyOrderByInput>>
+}
+
+export type QueryDistributionBucketFamilyByUniqueInputArgs = {
+  where: DistributionBucketFamilyWhereUniqueInput
+}
+
+export type QueryDistributionBucketFamiliesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<DistributionBucketFamilyWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketFamilyOrderByInput>>
+}
+
+export type QueryDistributionBucketOperatorMetadataArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<DistributionBucketOperatorMetadataWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketOperatorMetadataOrderByInput>>
+}
+
+export type QueryDistributionBucketOperatorMetadataByUniqueInputArgs = {
+  where: DistributionBucketOperatorMetadataWhereUniqueInput
+}
+
+export type QueryDistributionBucketOperatorMetadataConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<DistributionBucketOperatorMetadataWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketOperatorMetadataOrderByInput>>
+}
+
+export type QueryDistributionBucketOperatorsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<DistributionBucketOperatorWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketOperatorOrderByInput>>
+}
+
+export type QueryDistributionBucketOperatorByUniqueInputArgs = {
+  where: DistributionBucketOperatorWhereUniqueInput
+}
+
+export type QueryDistributionBucketOperatorsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<DistributionBucketOperatorWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketOperatorOrderByInput>>
+}
+
+export type QueryDistributionBucketsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<DistributionBucketWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketOrderByInput>>
+}
+
+export type QueryDistributionBucketByUniqueInputArgs = {
+  where: DistributionBucketWhereUniqueInput
+}
+
+export type QueryDistributionBucketsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<DistributionBucketWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketOrderByInput>>
+}
+
+export type QueryGeoCoordinatesArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<GeoCoordinatesWhereInput>
+  orderBy?: Maybe<Array<GeoCoordinatesOrderByInput>>
+}
+
+export type QueryGeoCoordinatesByUniqueInputArgs = {
+  where: GeoCoordinatesWhereUniqueInput
+}
+
+export type QueryGeoCoordinatesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<GeoCoordinatesWhereInput>
+  orderBy?: Maybe<Array<GeoCoordinatesOrderByInput>>
+}
+
+export type QueryLanguagesArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<LanguageWhereInput>
+  orderBy?: Maybe<Array<LanguageOrderByInput>>
+}
+
+export type QueryLanguageByUniqueInputArgs = {
+  where: LanguageWhereUniqueInput
+}
+
+export type QueryLanguagesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<LanguageWhereInput>
+  orderBy?: Maybe<Array<LanguageOrderByInput>>
+}
+
+export type QueryLicensesArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<LicenseWhereInput>
+  orderBy?: Maybe<Array<LicenseOrderByInput>>
+}
+
+export type QueryLicenseByUniqueInputArgs = {
+  where: LicenseWhereUniqueInput
+}
+
+export type QueryLicensesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<LicenseWhereInput>
+  orderBy?: Maybe<Array<LicenseOrderByInput>>
+}
+
+export type QueryMembershipsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<MembershipWhereInput>
+  orderBy?: Maybe<Array<MembershipOrderByInput>>
+}
+
+export type QueryMembershipByUniqueInputArgs = {
+  where: MembershipWhereUniqueInput
+}
+
+export type QueryMembershipsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<MembershipWhereInput>
+  orderBy?: Maybe<Array<MembershipOrderByInput>>
+}
+
+export type QueryNodeLocationMetadataArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<NodeLocationMetadataWhereInput>
+  orderBy?: Maybe<Array<NodeLocationMetadataOrderByInput>>
+}
+
+export type QueryNodeLocationMetadataByUniqueInputArgs = {
+  where: NodeLocationMetadataWhereUniqueInput
+}
+
+export type QueryNodeLocationMetadataConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<NodeLocationMetadataWhereInput>
+  orderBy?: Maybe<Array<NodeLocationMetadataOrderByInput>>
+}
+
+export type QueryChannelCategoriesByNameArgs = {
+  whereChannelCategory?: Maybe<ChannelCategoryWhereInput>
+  skip?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  text: Scalars['String']
+}
+
+export type QueryMembersByHandleArgs = {
+  whereMembership?: Maybe<MembershipWhereInput>
+  skip?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  text: Scalars['String']
+}
+
+export type QuerySearchArgs = {
+  whereVideo?: Maybe<VideoWhereInput>
+  whereChannel?: Maybe<ChannelWhereInput>
+  skip?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  text: Scalars['String']
+}
+
+export type QueryVideoCategoriesByNameArgs = {
+  whereVideoCategory?: Maybe<VideoCategoryWhereInput>
+  skip?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  text: Scalars['String']
+}
+
+export type QueryStorageBagsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<StorageBagWhereInput>
+  orderBy?: Maybe<Array<StorageBagOrderByInput>>
+}
+
+export type QueryStorageBagByUniqueInputArgs = {
+  where: StorageBagWhereUniqueInput
+}
+
+export type QueryStorageBagsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<StorageBagWhereInput>
+  orderBy?: Maybe<Array<StorageBagOrderByInput>>
+}
+
+export type QueryStorageBucketOperatorMetadataArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<StorageBucketOperatorMetadataWhereInput>
+  orderBy?: Maybe<Array<StorageBucketOperatorMetadataOrderByInput>>
+}
+
+export type QueryStorageBucketOperatorMetadataByUniqueInputArgs = {
+  where: StorageBucketOperatorMetadataWhereUniqueInput
+}
+
+export type QueryStorageBucketOperatorMetadataConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<StorageBucketOperatorMetadataWhereInput>
+  orderBy?: Maybe<Array<StorageBucketOperatorMetadataOrderByInput>>
+}
+
+export type QueryStorageBucketsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<StorageBucketWhereInput>
+  orderBy?: Maybe<Array<StorageBucketOrderByInput>>
+}
+
+export type QueryStorageBucketByUniqueInputArgs = {
+  where: StorageBucketWhereUniqueInput
+}
+
+export type QueryStorageBucketsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<StorageBucketWhereInput>
+  orderBy?: Maybe<Array<StorageBucketOrderByInput>>
+}
+
+export type QueryStorageDataObjectsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<StorageDataObjectWhereInput>
+  orderBy?: Maybe<Array<StorageDataObjectOrderByInput>>
+}
+
+export type QueryStorageDataObjectByUniqueInputArgs = {
+  where: StorageDataObjectWhereUniqueInput
+}
+
+export type QueryStorageDataObjectsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<StorageDataObjectWhereInput>
+  orderBy?: Maybe<Array<StorageDataObjectOrderByInput>>
+}
+
+export type QueryStorageSystemParametersArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<StorageSystemParametersWhereInput>
+  orderBy?: Maybe<Array<StorageSystemParametersOrderByInput>>
+}
+
+export type QueryStorageSystemParametersByUniqueInputArgs = {
+  where: StorageSystemParametersWhereUniqueInput
+}
+
+export type QueryStorageSystemParametersConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<StorageSystemParametersWhereInput>
+  orderBy?: Maybe<Array<StorageSystemParametersOrderByInput>>
+}
+
+export type QueryVideoCategoriesArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<VideoCategoryWhereInput>
+  orderBy?: Maybe<Array<VideoCategoryOrderByInput>>
+}
+
+export type QueryVideoCategoryByUniqueInputArgs = {
+  where: VideoCategoryWhereUniqueInput
+}
+
+export type QueryVideoCategoriesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<VideoCategoryWhereInput>
+  orderBy?: Maybe<Array<VideoCategoryOrderByInput>>
+}
+
+export type QueryVideoMediaEncodingsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<VideoMediaEncodingWhereInput>
+  orderBy?: Maybe<Array<VideoMediaEncodingOrderByInput>>
+}
+
+export type QueryVideoMediaEncodingByUniqueInputArgs = {
+  where: VideoMediaEncodingWhereUniqueInput
+}
+
+export type QueryVideoMediaEncodingsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<VideoMediaEncodingWhereInput>
+  orderBy?: Maybe<Array<VideoMediaEncodingOrderByInput>>
+}
+
+export type QueryVideoMediaMetadataArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<VideoMediaMetadataWhereInput>
+  orderBy?: Maybe<Array<VideoMediaMetadataOrderByInput>>
+}
+
+export type QueryVideoMediaMetadataByUniqueInputArgs = {
+  where: VideoMediaMetadataWhereUniqueInput
+}
+
+export type QueryVideoMediaMetadataConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<VideoMediaMetadataWhereInput>
+  orderBy?: Maybe<Array<VideoMediaMetadataOrderByInput>>
+}
+
+export type QueryVideosArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<VideoWhereInput>
+  orderBy?: Maybe<Array<VideoOrderByInput>>
+}
+
+export type QueryVideoByUniqueInputArgs = {
+  where: VideoWhereUniqueInput
+}
+
+export type QueryVideosConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<VideoWhereInput>
+  orderBy?: Maybe<Array<VideoOrderByInput>>
+}
+
+export type QueryWorkersArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<WorkerWhereInput>
+  orderBy?: Maybe<Array<WorkerOrderByInput>>
+}
+
+export type QueryWorkerByUniqueInputArgs = {
+  where: WorkerWhereUniqueInput
+}
+
+export type QueryWorkersConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<WorkerWhereInput>
+  orderBy?: Maybe<Array<WorkerOrderByInput>>
+}
+
+export type SearchFtsOutput = {
+  item: SearchSearchResult
+  rank: Scalars['Float']
+  isTypeOf: Scalars['String']
+  highlight: Scalars['String']
+}
+
+export type SearchSearchResult = Channel | Video
+
+export type StandardDeleteResponse = {
+  id: Scalars['ID']
+}
+
+export type StorageBag = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  objects: Array<StorageDataObject>
+  storageBuckets: Array<StorageBucket>
+  distributionBuckets: Array<DistributionBucket>
+  /** Owner of the storage bag */
+  owner: StorageBagOwner
+}
+
+export type StorageBagConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<StorageBagEdge>
+  pageInfo: PageInfo
+}
+
+export type StorageBagCreateInput = {
+  owner: Scalars['JSONObject']
+}
+
+export type StorageBagEdge = {
+  node: StorageBag
+  cursor: Scalars['String']
+}
+
+export enum StorageBagOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+}
+
+export type StorageBagOwner =
+  | StorageBagOwnerCouncil
+  | StorageBagOwnerWorkingGroup
+  | StorageBagOwnerMember
+  | StorageBagOwnerChannel
+  | StorageBagOwnerDao
+
+export type StorageBagOwnerChannel = {
+  channelId?: Maybe<Scalars['Int']>
+}
+
+export type StorageBagOwnerCouncil = {
+  phantom?: Maybe<Scalars['Int']>
+}
+
+export type StorageBagOwnerDao = {
+  daoId?: Maybe<Scalars['Int']>
+}
+
+export type StorageBagOwnerMember = {
+  memberId?: Maybe<Scalars['Int']>
+}
+
+export type StorageBagOwnerWorkingGroup = {
+  workingGroupId?: Maybe<Scalars['String']>
+}
+
+export type StorageBagUpdateInput = {
+  owner?: Maybe<Scalars['JSONObject']>
+}
+
+export type StorageBagWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  owner_json?: Maybe<Scalars['JSONObject']>
+  objects_none?: Maybe<StorageDataObjectWhereInput>
+  objects_some?: Maybe<StorageDataObjectWhereInput>
+  objects_every?: Maybe<StorageDataObjectWhereInput>
+  storageBuckets_none?: Maybe<StorageBucketWhereInput>
+  storageBuckets_some?: Maybe<StorageBucketWhereInput>
+  storageBuckets_every?: Maybe<StorageBucketWhereInput>
+  distributionBuckets_none?: Maybe<DistributionBucketWhereInput>
+  distributionBuckets_some?: Maybe<DistributionBucketWhereInput>
+  distributionBuckets_every?: Maybe<DistributionBucketWhereInput>
+  AND?: Maybe<Array<StorageBagWhereInput>>
+  OR?: Maybe<Array<StorageBagWhereInput>>
+}
+
+export type StorageBagWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageBucket = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Current bucket operator status */
+  operatorStatus: StorageBucketOperatorStatus
+  operatorMetadata?: Maybe<StorageBucketOperatorMetadata>
+  operatorMetadataId?: Maybe<Scalars['String']>
+  /** Whether the bucket is accepting any new storage bags */
+  acceptingNewBags: Scalars['Boolean']
+  bags: Array<StorageBag>
+  /** Bucket's data object size limit in bytes */
+  dataObjectsSizeLimit: Scalars['BigInt']
+  /** Bucket's data object count limit */
+  dataObjectCountLimit: Scalars['BigInt']
+  /** Number of assigned data objects */
+  dataObjectsCount: Scalars['BigInt']
+  /** Total size of assigned data objects */
+  dataObjectsSize: Scalars['BigInt']
+}
+
+export type StorageBucketConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<StorageBucketEdge>
+  pageInfo: PageInfo
+}
+
+export type StorageBucketCreateInput = {
+  operatorStatus: Scalars['JSONObject']
+  operatorMetadata?: Maybe<Scalars['ID']>
+  acceptingNewBags: Scalars['Boolean']
+  dataObjectsSizeLimit: Scalars['String']
+  dataObjectCountLimit: Scalars['String']
+  dataObjectsCount: Scalars['String']
+  dataObjectsSize: Scalars['String']
+}
+
+export type StorageBucketEdge = {
+  node: StorageBucket
+  cursor: Scalars['String']
+}
+
+export type StorageBucketOperatorMetadata = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Root node endpoint */
+  nodeEndpoint?: Maybe<Scalars['String']>
+  nodeLocation?: Maybe<NodeLocationMetadata>
+  nodeLocationId?: Maybe<Scalars['String']>
+  /** Additional information about the node/operator */
+  extra?: Maybe<Scalars['String']>
+  storagebucketoperatorMetadata?: Maybe<Array<StorageBucket>>
+}
+
+export type StorageBucketOperatorMetadataConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<StorageBucketOperatorMetadataEdge>
+  pageInfo: PageInfo
+}
+
+export type StorageBucketOperatorMetadataCreateInput = {
+  nodeEndpoint?: Maybe<Scalars['String']>
+  nodeLocation?: Maybe<Scalars['ID']>
+  extra?: Maybe<Scalars['String']>
+}
+
+export type StorageBucketOperatorMetadataEdge = {
+  node: StorageBucketOperatorMetadata
+  cursor: Scalars['String']
+}
+
+export enum StorageBucketOperatorMetadataOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  NodeEndpointAsc = 'nodeEndpoint_ASC',
+  NodeEndpointDesc = 'nodeEndpoint_DESC',
+  NodeLocationAsc = 'nodeLocation_ASC',
+  NodeLocationDesc = 'nodeLocation_DESC',
+  ExtraAsc = 'extra_ASC',
+  ExtraDesc = 'extra_DESC',
+}
+
+export type StorageBucketOperatorMetadataUpdateInput = {
+  nodeEndpoint?: Maybe<Scalars['String']>
+  nodeLocation?: Maybe<Scalars['ID']>
+  extra?: Maybe<Scalars['String']>
+}
+
+export type StorageBucketOperatorMetadataWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  nodeEndpoint_eq?: Maybe<Scalars['String']>
+  nodeEndpoint_contains?: Maybe<Scalars['String']>
+  nodeEndpoint_startsWith?: Maybe<Scalars['String']>
+  nodeEndpoint_endsWith?: Maybe<Scalars['String']>
+  nodeEndpoint_in?: Maybe<Array<Scalars['String']>>
+  extra_eq?: Maybe<Scalars['String']>
+  extra_contains?: Maybe<Scalars['String']>
+  extra_startsWith?: Maybe<Scalars['String']>
+  extra_endsWith?: Maybe<Scalars['String']>
+  extra_in?: Maybe<Array<Scalars['String']>>
+  nodeLocation?: Maybe<NodeLocationMetadataWhereInput>
+  storagebucketoperatorMetadata_none?: Maybe<StorageBucketWhereInput>
+  storagebucketoperatorMetadata_some?: Maybe<StorageBucketWhereInput>
+  storagebucketoperatorMetadata_every?: Maybe<StorageBucketWhereInput>
+  AND?: Maybe<Array<StorageBucketOperatorMetadataWhereInput>>
+  OR?: Maybe<Array<StorageBucketOperatorMetadataWhereInput>>
+}
+
+export type StorageBucketOperatorMetadataWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageBucketOperatorStatus =
+  | StorageBucketOperatorStatusMissing
+  | StorageBucketOperatorStatusInvited
+  | StorageBucketOperatorStatusActive
+
+export type StorageBucketOperatorStatusActive = {
+  workerId: Scalars['Int']
+}
+
+export type StorageBucketOperatorStatusInvited = {
+  workerId: Scalars['Int']
+}
+
+export type StorageBucketOperatorStatusMissing = {
+  phantom?: Maybe<Scalars['Int']>
+}
+
+export enum StorageBucketOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  OperatorMetadataAsc = 'operatorMetadata_ASC',
+  OperatorMetadataDesc = 'operatorMetadata_DESC',
+  AcceptingNewBagsAsc = 'acceptingNewBags_ASC',
+  AcceptingNewBagsDesc = 'acceptingNewBags_DESC',
+  DataObjectsSizeLimitAsc = 'dataObjectsSizeLimit_ASC',
+  DataObjectsSizeLimitDesc = 'dataObjectsSizeLimit_DESC',
+  DataObjectCountLimitAsc = 'dataObjectCountLimit_ASC',
+  DataObjectCountLimitDesc = 'dataObjectCountLimit_DESC',
+  DataObjectsCountAsc = 'dataObjectsCount_ASC',
+  DataObjectsCountDesc = 'dataObjectsCount_DESC',
+  DataObjectsSizeAsc = 'dataObjectsSize_ASC',
+  DataObjectsSizeDesc = 'dataObjectsSize_DESC',
+}
+
+export type StorageBucketUpdateInput = {
+  operatorStatus?: Maybe<Scalars['JSONObject']>
+  operatorMetadata?: Maybe<Scalars['ID']>
+  acceptingNewBags?: Maybe<Scalars['Boolean']>
+  dataObjectsSizeLimit?: Maybe<Scalars['String']>
+  dataObjectCountLimit?: Maybe<Scalars['String']>
+  dataObjectsCount?: Maybe<Scalars['String']>
+  dataObjectsSize?: Maybe<Scalars['String']>
+}
+
+export type StorageBucketWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  operatorStatus_json?: Maybe<Scalars['JSONObject']>
+  acceptingNewBags_eq?: Maybe<Scalars['Boolean']>
+  acceptingNewBags_in?: Maybe<Array<Scalars['Boolean']>>
+  dataObjectsSizeLimit_eq?: Maybe<Scalars['BigInt']>
+  dataObjectsSizeLimit_gt?: Maybe<Scalars['BigInt']>
+  dataObjectsSizeLimit_gte?: Maybe<Scalars['BigInt']>
+  dataObjectsSizeLimit_lt?: Maybe<Scalars['BigInt']>
+  dataObjectsSizeLimit_lte?: Maybe<Scalars['BigInt']>
+  dataObjectsSizeLimit_in?: Maybe<Array<Scalars['BigInt']>>
+  dataObjectCountLimit_eq?: Maybe<Scalars['BigInt']>
+  dataObjectCountLimit_gt?: Maybe<Scalars['BigInt']>
+  dataObjectCountLimit_gte?: Maybe<Scalars['BigInt']>
+  dataObjectCountLimit_lt?: Maybe<Scalars['BigInt']>
+  dataObjectCountLimit_lte?: Maybe<Scalars['BigInt']>
+  dataObjectCountLimit_in?: Maybe<Array<Scalars['BigInt']>>
+  dataObjectsCount_eq?: Maybe<Scalars['BigInt']>
+  dataObjectsCount_gt?: Maybe<Scalars['BigInt']>
+  dataObjectsCount_gte?: Maybe<Scalars['BigInt']>
+  dataObjectsCount_lt?: Maybe<Scalars['BigInt']>
+  dataObjectsCount_lte?: Maybe<Scalars['BigInt']>
+  dataObjectsCount_in?: Maybe<Array<Scalars['BigInt']>>
+  dataObjectsSize_eq?: Maybe<Scalars['BigInt']>
+  dataObjectsSize_gt?: Maybe<Scalars['BigInt']>
+  dataObjectsSize_gte?: Maybe<Scalars['BigInt']>
+  dataObjectsSize_lt?: Maybe<Scalars['BigInt']>
+  dataObjectsSize_lte?: Maybe<Scalars['BigInt']>
+  dataObjectsSize_in?: Maybe<Array<Scalars['BigInt']>>
+  operatorMetadata?: Maybe<StorageBucketOperatorMetadataWhereInput>
+  bags_none?: Maybe<StorageBagWhereInput>
+  bags_some?: Maybe<StorageBagWhereInput>
+  bags_every?: Maybe<StorageBagWhereInput>
+  AND?: Maybe<Array<StorageBucketWhereInput>>
+  OR?: Maybe<Array<StorageBucketWhereInput>>
+}
+
+export type StorageBucketWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageDataObject = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Whether the data object was uploaded and accepted by the storage provider */
+  isAccepted: Scalars['Boolean']
+  /** Data object size in bytes */
+  size: Scalars['BigInt']
+  storageBag: StorageBag
+  storageBagId: Scalars['String']
+  /** IPFS content hash */
+  ipfsHash: Scalars['String']
+  /** The type of the asset that the data object represents (if known) */
+  type: DataObjectType
+  /** Prize for removing the data object */
+  deletionPrize: Scalars['BigInt']
+  /** If the object is no longer used as an asset - the time at which it was unset (if known) */
+  unsetAt?: Maybe<Scalars['DateTime']>
+  channelcoverPhoto?: Maybe<Array<Channel>>
+  channelavatarPhoto?: Maybe<Array<Channel>>
+  videothumbnailPhoto?: Maybe<Array<Video>>
+  videomedia?: Maybe<Array<Video>>
+}
+
+export type StorageDataObjectConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<StorageDataObjectEdge>
+  pageInfo: PageInfo
+}
+
+export type StorageDataObjectCreateInput = {
+  isAccepted: Scalars['Boolean']
+  size: Scalars['String']
+  storageBag: Scalars['ID']
+  ipfsHash: Scalars['String']
+  type: Scalars['JSONObject']
+  deletionPrize: Scalars['String']
+  unsetAt?: Maybe<Scalars['DateTime']>
+}
+
+export type StorageDataObjectEdge = {
+  node: StorageDataObject
+  cursor: Scalars['String']
+}
+
+export enum StorageDataObjectOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  IsAcceptedAsc = 'isAccepted_ASC',
+  IsAcceptedDesc = 'isAccepted_DESC',
+  SizeAsc = 'size_ASC',
+  SizeDesc = 'size_DESC',
+  StorageBagAsc = 'storageBag_ASC',
+  StorageBagDesc = 'storageBag_DESC',
+  IpfsHashAsc = 'ipfsHash_ASC',
+  IpfsHashDesc = 'ipfsHash_DESC',
+  DeletionPrizeAsc = 'deletionPrize_ASC',
+  DeletionPrizeDesc = 'deletionPrize_DESC',
+  UnsetAtAsc = 'unsetAt_ASC',
+  UnsetAtDesc = 'unsetAt_DESC',
+}
+
+export type StorageDataObjectUpdateInput = {
+  isAccepted?: Maybe<Scalars['Boolean']>
+  size?: Maybe<Scalars['String']>
+  storageBag?: Maybe<Scalars['ID']>
+  ipfsHash?: Maybe<Scalars['String']>
+  type?: Maybe<Scalars['JSONObject']>
+  deletionPrize?: Maybe<Scalars['String']>
+  unsetAt?: Maybe<Scalars['DateTime']>
+}
+
+export type StorageDataObjectWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  isAccepted_eq?: Maybe<Scalars['Boolean']>
+  isAccepted_in?: Maybe<Array<Scalars['Boolean']>>
+  size_eq?: Maybe<Scalars['BigInt']>
+  size_gt?: Maybe<Scalars['BigInt']>
+  size_gte?: Maybe<Scalars['BigInt']>
+  size_lt?: Maybe<Scalars['BigInt']>
+  size_lte?: Maybe<Scalars['BigInt']>
+  size_in?: Maybe<Array<Scalars['BigInt']>>
+  ipfsHash_eq?: Maybe<Scalars['String']>
+  ipfsHash_contains?: Maybe<Scalars['String']>
+  ipfsHash_startsWith?: Maybe<Scalars['String']>
+  ipfsHash_endsWith?: Maybe<Scalars['String']>
+  ipfsHash_in?: Maybe<Array<Scalars['String']>>
+  type_json?: Maybe<Scalars['JSONObject']>
+  deletionPrize_eq?: Maybe<Scalars['BigInt']>
+  deletionPrize_gt?: Maybe<Scalars['BigInt']>
+  deletionPrize_gte?: Maybe<Scalars['BigInt']>
+  deletionPrize_lt?: Maybe<Scalars['BigInt']>
+  deletionPrize_lte?: Maybe<Scalars['BigInt']>
+  deletionPrize_in?: Maybe<Array<Scalars['BigInt']>>
+  unsetAt_eq?: Maybe<Scalars['DateTime']>
+  unsetAt_lt?: Maybe<Scalars['DateTime']>
+  unsetAt_lte?: Maybe<Scalars['DateTime']>
+  unsetAt_gt?: Maybe<Scalars['DateTime']>
+  unsetAt_gte?: Maybe<Scalars['DateTime']>
+  storageBag?: Maybe<StorageBagWhereInput>
+  channelcoverPhoto_none?: Maybe<ChannelWhereInput>
+  channelcoverPhoto_some?: Maybe<ChannelWhereInput>
+  channelcoverPhoto_every?: Maybe<ChannelWhereInput>
+  channelavatarPhoto_none?: Maybe<ChannelWhereInput>
+  channelavatarPhoto_some?: Maybe<ChannelWhereInput>
+  channelavatarPhoto_every?: Maybe<ChannelWhereInput>
+  videothumbnailPhoto_none?: Maybe<VideoWhereInput>
+  videothumbnailPhoto_some?: Maybe<VideoWhereInput>
+  videothumbnailPhoto_every?: Maybe<VideoWhereInput>
+  videomedia_none?: Maybe<VideoWhereInput>
+  videomedia_some?: Maybe<VideoWhereInput>
+  videomedia_every?: Maybe<VideoWhereInput>
+  AND?: Maybe<Array<StorageDataObjectWhereInput>>
+  OR?: Maybe<Array<StorageDataObjectWhereInput>>
+}
+
+export type StorageDataObjectWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+/** Global storage system parameters */
+export type StorageSystemParameters = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Blacklisted content hashes */
+  blacklist: Array<Scalars['String']>
+  /** How many buckets can be assigned to store a bag */
+  storageBucketsPerBagLimit: Scalars['Int']
+  /** How many buckets can be assigned to distribute a bag */
+  distributionBucketsPerBagLimit: Scalars['Int']
+  /** Whether the uploading is globally blocked */
+  uploadingBlocked: Scalars['Boolean']
+  /** Additional fee for storing 1 MB of data */
+  dataObjectFeePerMb: Scalars['BigInt']
+  /** Global max. number of objects a storage bucket can store (can also be further limitted the provider) */
+  storageBucketMaxObjectsCountLimit: Scalars['BigInt']
+  /** Global max. size of objects a storage bucket can store (can also be further limitted the provider) */
+  storageBucketMaxObjectsSizeLimit: Scalars['BigInt']
+  /** ID of the next data object when created */
+  nextDataObjectId: Scalars['BigInt']
+}
+
+export type StorageSystemParametersConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<StorageSystemParametersEdge>
+  pageInfo: PageInfo
+}
+
+export type StorageSystemParametersCreateInput = {
+  blacklist: Array<Scalars['String']>
+  storageBucketsPerBagLimit: Scalars['Float']
+  distributionBucketsPerBagLimit: Scalars['Float']
+  uploadingBlocked: Scalars['Boolean']
+  dataObjectFeePerMb: Scalars['String']
+  storageBucketMaxObjectsCountLimit: Scalars['String']
+  storageBucketMaxObjectsSizeLimit: Scalars['String']
+  nextDataObjectId: Scalars['String']
+}
+
+export type StorageSystemParametersEdge = {
+  node: StorageSystemParameters
+  cursor: Scalars['String']
+}
+
+export enum StorageSystemParametersOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  StorageBucketsPerBagLimitAsc = 'storageBucketsPerBagLimit_ASC',
+  StorageBucketsPerBagLimitDesc = 'storageBucketsPerBagLimit_DESC',
+  DistributionBucketsPerBagLimitAsc = 'distributionBucketsPerBagLimit_ASC',
+  DistributionBucketsPerBagLimitDesc = 'distributionBucketsPerBagLimit_DESC',
+  UploadingBlockedAsc = 'uploadingBlocked_ASC',
+  UploadingBlockedDesc = 'uploadingBlocked_DESC',
+  DataObjectFeePerMbAsc = 'dataObjectFeePerMb_ASC',
+  DataObjectFeePerMbDesc = 'dataObjectFeePerMb_DESC',
+  StorageBucketMaxObjectsCountLimitAsc = 'storageBucketMaxObjectsCountLimit_ASC',
+  StorageBucketMaxObjectsCountLimitDesc = 'storageBucketMaxObjectsCountLimit_DESC',
+  StorageBucketMaxObjectsSizeLimitAsc = 'storageBucketMaxObjectsSizeLimit_ASC',
+  StorageBucketMaxObjectsSizeLimitDesc = 'storageBucketMaxObjectsSizeLimit_DESC',
+  NextDataObjectIdAsc = 'nextDataObjectId_ASC',
+  NextDataObjectIdDesc = 'nextDataObjectId_DESC',
+}
+
+export type StorageSystemParametersUpdateInput = {
+  blacklist?: Maybe<Array<Scalars['String']>>
+  storageBucketsPerBagLimit?: Maybe<Scalars['Float']>
+  distributionBucketsPerBagLimit?: Maybe<Scalars['Float']>
+  uploadingBlocked?: Maybe<Scalars['Boolean']>
+  dataObjectFeePerMb?: Maybe<Scalars['String']>
+  storageBucketMaxObjectsCountLimit?: Maybe<Scalars['String']>
+  storageBucketMaxObjectsSizeLimit?: Maybe<Scalars['String']>
+  nextDataObjectId?: Maybe<Scalars['String']>
+}
+
+export type StorageSystemParametersWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  blacklist_containsAll?: Maybe<Array<Scalars['String']>>
+  blacklist_containsNone?: Maybe<Array<Scalars['String']>>
+  blacklist_containsAny?: Maybe<Array<Scalars['String']>>
+  storageBucketsPerBagLimit_eq?: Maybe<Scalars['Int']>
+  storageBucketsPerBagLimit_gt?: Maybe<Scalars['Int']>
+  storageBucketsPerBagLimit_gte?: Maybe<Scalars['Int']>
+  storageBucketsPerBagLimit_lt?: Maybe<Scalars['Int']>
+  storageBucketsPerBagLimit_lte?: Maybe<Scalars['Int']>
+  storageBucketsPerBagLimit_in?: Maybe<Array<Scalars['Int']>>
+  distributionBucketsPerBagLimit_eq?: Maybe<Scalars['Int']>
+  distributionBucketsPerBagLimit_gt?: Maybe<Scalars['Int']>
+  distributionBucketsPerBagLimit_gte?: Maybe<Scalars['Int']>
+  distributionBucketsPerBagLimit_lt?: Maybe<Scalars['Int']>
+  distributionBucketsPerBagLimit_lte?: Maybe<Scalars['Int']>
+  distributionBucketsPerBagLimit_in?: Maybe<Array<Scalars['Int']>>
+  uploadingBlocked_eq?: Maybe<Scalars['Boolean']>
+  uploadingBlocked_in?: Maybe<Array<Scalars['Boolean']>>
+  dataObjectFeePerMb_eq?: Maybe<Scalars['BigInt']>
+  dataObjectFeePerMb_gt?: Maybe<Scalars['BigInt']>
+  dataObjectFeePerMb_gte?: Maybe<Scalars['BigInt']>
+  dataObjectFeePerMb_lt?: Maybe<Scalars['BigInt']>
+  dataObjectFeePerMb_lte?: Maybe<Scalars['BigInt']>
+  dataObjectFeePerMb_in?: Maybe<Array<Scalars['BigInt']>>
+  storageBucketMaxObjectsCountLimit_eq?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsCountLimit_gt?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsCountLimit_gte?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsCountLimit_lt?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsCountLimit_lte?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsCountLimit_in?: Maybe<Array<Scalars['BigInt']>>
+  storageBucketMaxObjectsSizeLimit_eq?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsSizeLimit_gt?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsSizeLimit_gte?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsSizeLimit_lt?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsSizeLimit_lte?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsSizeLimit_in?: Maybe<Array<Scalars['BigInt']>>
+  nextDataObjectId_eq?: Maybe<Scalars['BigInt']>
+  nextDataObjectId_gt?: Maybe<Scalars['BigInt']>
+  nextDataObjectId_gte?: Maybe<Scalars['BigInt']>
+  nextDataObjectId_lt?: Maybe<Scalars['BigInt']>
+  nextDataObjectId_lte?: Maybe<Scalars['BigInt']>
+  nextDataObjectId_in?: Maybe<Array<Scalars['BigInt']>>
+  AND?: Maybe<Array<StorageSystemParametersWhereInput>>
+  OR?: Maybe<Array<StorageSystemParametersWhereInput>>
+}
+
+export type StorageSystemParametersWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type Subscription = {
+  stateSubscription: ProcessorState
+}
+
+export type Video = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  channel: Channel
+  channelId: Scalars['String']
+  category?: Maybe<VideoCategory>
+  categoryId?: Maybe<Scalars['String']>
+  /** The title of the video */
+  title?: Maybe<Scalars['String']>
+  /** The description of the Video */
+  description?: Maybe<Scalars['String']>
+  /** Video duration in seconds */
+  duration?: Maybe<Scalars['Int']>
+  thumbnailPhoto?: Maybe<StorageDataObject>
+  thumbnailPhotoId?: Maybe<Scalars['String']>
+  language?: Maybe<Language>
+  languageId?: Maybe<Scalars['String']>
+  /** Whether or not Video contains marketing */
+  hasMarketing?: Maybe<Scalars['Boolean']>
+  /** If the Video was published on other platform before beeing published on Joystream - the original publication date */
+  publishedBeforeJoystream?: Maybe<Scalars['DateTime']>
+  /** Whether the Video is supposed to be publically displayed */
+  isPublic?: Maybe<Scalars['Boolean']>
+  /** Flag signaling whether a video is censored. */
+  isCensored: Scalars['Boolean']
+  /** Whether the Video contains explicit material. */
+  isExplicit?: Maybe<Scalars['Boolean']>
+  license?: Maybe<License>
+  licenseId?: Maybe<Scalars['String']>
+  media?: Maybe<StorageDataObject>
+  mediaId?: Maybe<Scalars['String']>
+  mediaMetadata?: Maybe<VideoMediaMetadata>
+  mediaMetadataId?: Maybe<Scalars['String']>
+  createdInBlock: Scalars['Int']
+  /** Is video featured or not */
+  isFeatured: Scalars['Boolean']
+}
+
+export type VideoCategoriesByNameFtsOutput = {
+  item: VideoCategoriesByNameSearchResult
+  rank: Scalars['Float']
+  isTypeOf: Scalars['String']
+  highlight: Scalars['String']
+}
+
+export type VideoCategoriesByNameSearchResult = VideoCategory
+
+export type VideoCategory = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** The name of the category */
+  name?: Maybe<Scalars['String']>
+  videos: Array<Video>
+  createdInBlock: Scalars['Int']
+}
+
+export type VideoCategoryConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<VideoCategoryEdge>
+  pageInfo: PageInfo
+}
+
+export type VideoCategoryCreateInput = {
+  name?: Maybe<Scalars['String']>
+  createdInBlock: Scalars['Float']
+}
+
+export type VideoCategoryEdge = {
+  node: VideoCategory
+  cursor: Scalars['String']
+}
+
+export enum VideoCategoryOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  NameAsc = 'name_ASC',
+  NameDesc = 'name_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+}
+
+export type VideoCategoryUpdateInput = {
+  name?: Maybe<Scalars['String']>
+  createdInBlock?: Maybe<Scalars['Float']>
+}
+
+export type VideoCategoryWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  name_eq?: Maybe<Scalars['String']>
+  name_contains?: Maybe<Scalars['String']>
+  name_startsWith?: Maybe<Scalars['String']>
+  name_endsWith?: Maybe<Scalars['String']>
+  name_in?: Maybe<Array<Scalars['String']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  videos_none?: Maybe<VideoWhereInput>
+  videos_some?: Maybe<VideoWhereInput>
+  videos_every?: Maybe<VideoWhereInput>
+  AND?: Maybe<Array<VideoCategoryWhereInput>>
+  OR?: Maybe<Array<VideoCategoryWhereInput>>
+}
+
+export type VideoCategoryWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type VideoConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<VideoEdge>
+  pageInfo: PageInfo
+}
+
+export type VideoCreateInput = {
+  channel: Scalars['ID']
+  category?: Maybe<Scalars['ID']>
+  title?: Maybe<Scalars['String']>
+  description?: Maybe<Scalars['String']>
+  duration?: Maybe<Scalars['Float']>
+  thumbnailPhoto?: Maybe<Scalars['ID']>
+  language?: Maybe<Scalars['ID']>
+  hasMarketing?: Maybe<Scalars['Boolean']>
+  publishedBeforeJoystream?: Maybe<Scalars['DateTime']>
+  isPublic?: Maybe<Scalars['Boolean']>
+  isCensored: Scalars['Boolean']
+  isExplicit?: Maybe<Scalars['Boolean']>
+  license?: Maybe<Scalars['ID']>
+  media?: Maybe<Scalars['ID']>
+  mediaMetadata?: Maybe<Scalars['ID']>
+  createdInBlock: Scalars['Float']
+  isFeatured: Scalars['Boolean']
+}
+
+export type VideoEdge = {
+  node: Video
+  cursor: Scalars['String']
+}
+
+export type VideoMediaEncoding = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Encoding of the video media object */
+  codecName?: Maybe<Scalars['String']>
+  /** Media container format */
+  container?: Maybe<Scalars['String']>
+  /** Content MIME type */
+  mimeMediaType?: Maybe<Scalars['String']>
+  videomediametadataencoding?: Maybe<Array<VideoMediaMetadata>>
+}
+
+export type VideoMediaEncodingConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<VideoMediaEncodingEdge>
+  pageInfo: PageInfo
+}
+
+export type VideoMediaEncodingCreateInput = {
+  codecName?: Maybe<Scalars['String']>
+  container?: Maybe<Scalars['String']>
+  mimeMediaType?: Maybe<Scalars['String']>
+}
+
+export type VideoMediaEncodingEdge = {
+  node: VideoMediaEncoding
+  cursor: Scalars['String']
+}
+
+export enum VideoMediaEncodingOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  CodecNameAsc = 'codecName_ASC',
+  CodecNameDesc = 'codecName_DESC',
+  ContainerAsc = 'container_ASC',
+  ContainerDesc = 'container_DESC',
+  MimeMediaTypeAsc = 'mimeMediaType_ASC',
+  MimeMediaTypeDesc = 'mimeMediaType_DESC',
+}
+
+export type VideoMediaEncodingUpdateInput = {
+  codecName?: Maybe<Scalars['String']>
+  container?: Maybe<Scalars['String']>
+  mimeMediaType?: Maybe<Scalars['String']>
+}
+
+export type VideoMediaEncodingWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  codecName_eq?: Maybe<Scalars['String']>
+  codecName_contains?: Maybe<Scalars['String']>
+  codecName_startsWith?: Maybe<Scalars['String']>
+  codecName_endsWith?: Maybe<Scalars['String']>
+  codecName_in?: Maybe<Array<Scalars['String']>>
+  container_eq?: Maybe<Scalars['String']>
+  container_contains?: Maybe<Scalars['String']>
+  container_startsWith?: Maybe<Scalars['String']>
+  container_endsWith?: Maybe<Scalars['String']>
+  container_in?: Maybe<Array<Scalars['String']>>
+  mimeMediaType_eq?: Maybe<Scalars['String']>
+  mimeMediaType_contains?: Maybe<Scalars['String']>
+  mimeMediaType_startsWith?: Maybe<Scalars['String']>
+  mimeMediaType_endsWith?: Maybe<Scalars['String']>
+  mimeMediaType_in?: Maybe<Array<Scalars['String']>>
+  videomediametadataencoding_none?: Maybe<VideoMediaMetadataWhereInput>
+  videomediametadataencoding_some?: Maybe<VideoMediaMetadataWhereInput>
+  videomediametadataencoding_every?: Maybe<VideoMediaMetadataWhereInput>
+  AND?: Maybe<Array<VideoMediaEncodingWhereInput>>
+  OR?: Maybe<Array<VideoMediaEncodingWhereInput>>
+}
+
+export type VideoMediaEncodingWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type VideoMediaMetadata = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  encoding?: Maybe<VideoMediaEncoding>
+  encodingId?: Maybe<Scalars['String']>
+  /** Video media width in pixels */
+  pixelWidth?: Maybe<Scalars['Int']>
+  /** Video media height in pixels */
+  pixelHeight?: Maybe<Scalars['Int']>
+  /** Video media size in bytes */
+  size?: Maybe<Scalars['BigInt']>
+  video?: Maybe<Video>
+  createdInBlock: Scalars['Int']
+}
+
+export type VideoMediaMetadataConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<VideoMediaMetadataEdge>
+  pageInfo: PageInfo
+}
+
+export type VideoMediaMetadataCreateInput = {
+  encoding?: Maybe<Scalars['ID']>
+  pixelWidth?: Maybe<Scalars['Float']>
+  pixelHeight?: Maybe<Scalars['Float']>
+  size?: Maybe<Scalars['String']>
+  createdInBlock: Scalars['Float']
+}
+
+export type VideoMediaMetadataEdge = {
+  node: VideoMediaMetadata
+  cursor: Scalars['String']
+}
+
+export enum VideoMediaMetadataOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  EncodingAsc = 'encoding_ASC',
+  EncodingDesc = 'encoding_DESC',
+  PixelWidthAsc = 'pixelWidth_ASC',
+  PixelWidthDesc = 'pixelWidth_DESC',
+  PixelHeightAsc = 'pixelHeight_ASC',
+  PixelHeightDesc = 'pixelHeight_DESC',
+  SizeAsc = 'size_ASC',
+  SizeDesc = 'size_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+}
+
+export type VideoMediaMetadataUpdateInput = {
+  encoding?: Maybe<Scalars['ID']>
+  pixelWidth?: Maybe<Scalars['Float']>
+  pixelHeight?: Maybe<Scalars['Float']>
+  size?: Maybe<Scalars['String']>
+  createdInBlock?: Maybe<Scalars['Float']>
+}
+
+export type VideoMediaMetadataWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  pixelWidth_eq?: Maybe<Scalars['Int']>
+  pixelWidth_gt?: Maybe<Scalars['Int']>
+  pixelWidth_gte?: Maybe<Scalars['Int']>
+  pixelWidth_lt?: Maybe<Scalars['Int']>
+  pixelWidth_lte?: Maybe<Scalars['Int']>
+  pixelWidth_in?: Maybe<Array<Scalars['Int']>>
+  pixelHeight_eq?: Maybe<Scalars['Int']>
+  pixelHeight_gt?: Maybe<Scalars['Int']>
+  pixelHeight_gte?: Maybe<Scalars['Int']>
+  pixelHeight_lt?: Maybe<Scalars['Int']>
+  pixelHeight_lte?: Maybe<Scalars['Int']>
+  pixelHeight_in?: Maybe<Array<Scalars['Int']>>
+  size_eq?: Maybe<Scalars['BigInt']>
+  size_gt?: Maybe<Scalars['BigInt']>
+  size_gte?: Maybe<Scalars['BigInt']>
+  size_lt?: Maybe<Scalars['BigInt']>
+  size_lte?: Maybe<Scalars['BigInt']>
+  size_in?: Maybe<Array<Scalars['BigInt']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  encoding?: Maybe<VideoMediaEncodingWhereInput>
+  video?: Maybe<VideoWhereInput>
+  AND?: Maybe<Array<VideoMediaMetadataWhereInput>>
+  OR?: Maybe<Array<VideoMediaMetadataWhereInput>>
+}
+
+export type VideoMediaMetadataWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export enum VideoOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  ChannelAsc = 'channel_ASC',
+  ChannelDesc = 'channel_DESC',
+  CategoryAsc = 'category_ASC',
+  CategoryDesc = 'category_DESC',
+  TitleAsc = 'title_ASC',
+  TitleDesc = 'title_DESC',
+  DescriptionAsc = 'description_ASC',
+  DescriptionDesc = 'description_DESC',
+  DurationAsc = 'duration_ASC',
+  DurationDesc = 'duration_DESC',
+  ThumbnailPhotoAsc = 'thumbnailPhoto_ASC',
+  ThumbnailPhotoDesc = 'thumbnailPhoto_DESC',
+  LanguageAsc = 'language_ASC',
+  LanguageDesc = 'language_DESC',
+  HasMarketingAsc = 'hasMarketing_ASC',
+  HasMarketingDesc = 'hasMarketing_DESC',
+  PublishedBeforeJoystreamAsc = 'publishedBeforeJoystream_ASC',
+  PublishedBeforeJoystreamDesc = 'publishedBeforeJoystream_DESC',
+  IsPublicAsc = 'isPublic_ASC',
+  IsPublicDesc = 'isPublic_DESC',
+  IsCensoredAsc = 'isCensored_ASC',
+  IsCensoredDesc = 'isCensored_DESC',
+  IsExplicitAsc = 'isExplicit_ASC',
+  IsExplicitDesc = 'isExplicit_DESC',
+  LicenseAsc = 'license_ASC',
+  LicenseDesc = 'license_DESC',
+  MediaAsc = 'media_ASC',
+  MediaDesc = 'media_DESC',
+  MediaMetadataAsc = 'mediaMetadata_ASC',
+  MediaMetadataDesc = 'mediaMetadata_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+  IsFeaturedAsc = 'isFeatured_ASC',
+  IsFeaturedDesc = 'isFeatured_DESC',
+}
+
+export type VideoUpdateInput = {
+  channel?: Maybe<Scalars['ID']>
+  category?: Maybe<Scalars['ID']>
+  title?: Maybe<Scalars['String']>
+  description?: Maybe<Scalars['String']>
+  duration?: Maybe<Scalars['Float']>
+  thumbnailPhoto?: Maybe<Scalars['ID']>
+  language?: Maybe<Scalars['ID']>
+  hasMarketing?: Maybe<Scalars['Boolean']>
+  publishedBeforeJoystream?: Maybe<Scalars['DateTime']>
+  isPublic?: Maybe<Scalars['Boolean']>
+  isCensored?: Maybe<Scalars['Boolean']>
+  isExplicit?: Maybe<Scalars['Boolean']>
+  license?: Maybe<Scalars['ID']>
+  media?: Maybe<Scalars['ID']>
+  mediaMetadata?: Maybe<Scalars['ID']>
+  createdInBlock?: Maybe<Scalars['Float']>
+  isFeatured?: Maybe<Scalars['Boolean']>
+}
+
+export type VideoWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  title_eq?: Maybe<Scalars['String']>
+  title_contains?: Maybe<Scalars['String']>
+  title_startsWith?: Maybe<Scalars['String']>
+  title_endsWith?: Maybe<Scalars['String']>
+  title_in?: Maybe<Array<Scalars['String']>>
+  description_eq?: Maybe<Scalars['String']>
+  description_contains?: Maybe<Scalars['String']>
+  description_startsWith?: Maybe<Scalars['String']>
+  description_endsWith?: Maybe<Scalars['String']>
+  description_in?: Maybe<Array<Scalars['String']>>
+  duration_eq?: Maybe<Scalars['Int']>
+  duration_gt?: Maybe<Scalars['Int']>
+  duration_gte?: Maybe<Scalars['Int']>
+  duration_lt?: Maybe<Scalars['Int']>
+  duration_lte?: Maybe<Scalars['Int']>
+  duration_in?: Maybe<Array<Scalars['Int']>>
+  hasMarketing_eq?: Maybe<Scalars['Boolean']>
+  hasMarketing_in?: Maybe<Array<Scalars['Boolean']>>
+  publishedBeforeJoystream_eq?: Maybe<Scalars['DateTime']>
+  publishedBeforeJoystream_lt?: Maybe<Scalars['DateTime']>
+  publishedBeforeJoystream_lte?: Maybe<Scalars['DateTime']>
+  publishedBeforeJoystream_gt?: Maybe<Scalars['DateTime']>
+  publishedBeforeJoystream_gte?: Maybe<Scalars['DateTime']>
+  isPublic_eq?: Maybe<Scalars['Boolean']>
+  isPublic_in?: Maybe<Array<Scalars['Boolean']>>
+  isCensored_eq?: Maybe<Scalars['Boolean']>
+  isCensored_in?: Maybe<Array<Scalars['Boolean']>>
+  isExplicit_eq?: Maybe<Scalars['Boolean']>
+  isExplicit_in?: Maybe<Array<Scalars['Boolean']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  isFeatured_eq?: Maybe<Scalars['Boolean']>
+  isFeatured_in?: Maybe<Array<Scalars['Boolean']>>
+  channel?: Maybe<ChannelWhereInput>
+  category?: Maybe<VideoCategoryWhereInput>
+  thumbnailPhoto?: Maybe<StorageDataObjectWhereInput>
+  language?: Maybe<LanguageWhereInput>
+  license?: Maybe<LicenseWhereInput>
+  media?: Maybe<StorageDataObjectWhereInput>
+  mediaMetadata?: Maybe<VideoMediaMetadataWhereInput>
+  AND?: Maybe<Array<VideoWhereInput>>
+  OR?: Maybe<Array<VideoWhereInput>>
+}
+
+export type VideoWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type Worker = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Sign of worker still being active */
+  isActive: Scalars['Boolean']
+  /** Runtime identifier */
+  workerId: Scalars['String']
+  /** Associated working group */
+  type: WorkerType
+  /** Custom metadata set by provider */
+  metadata?: Maybe<Scalars['String']>
+}
+
+export type WorkerConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<WorkerEdge>
+  pageInfo: PageInfo
+}
+
+export type WorkerCreateInput = {
+  isActive: Scalars['Boolean']
+  workerId: Scalars['String']
+  type: WorkerType
+  metadata?: Maybe<Scalars['String']>
+}
+
+export type WorkerEdge = {
+  node: Worker
+  cursor: Scalars['String']
+}
+
+export enum WorkerOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  IsActiveAsc = 'isActive_ASC',
+  IsActiveDesc = 'isActive_DESC',
+  WorkerIdAsc = 'workerId_ASC',
+  WorkerIdDesc = 'workerId_DESC',
+  TypeAsc = 'type_ASC',
+  TypeDesc = 'type_DESC',
+  MetadataAsc = 'metadata_ASC',
+  MetadataDesc = 'metadata_DESC',
+}
+
+export enum WorkerType {
+  Gateway = 'GATEWAY',
+  Storage = 'STORAGE',
+}
+
+export type WorkerUpdateInput = {
+  isActive?: Maybe<Scalars['Boolean']>
+  workerId?: Maybe<Scalars['String']>
+  type?: Maybe<WorkerType>
+  metadata?: Maybe<Scalars['String']>
+}
+
+export type WorkerWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  isActive_eq?: Maybe<Scalars['Boolean']>
+  isActive_in?: Maybe<Array<Scalars['Boolean']>>
+  workerId_eq?: Maybe<Scalars['String']>
+  workerId_contains?: Maybe<Scalars['String']>
+  workerId_startsWith?: Maybe<Scalars['String']>
+  workerId_endsWith?: Maybe<Scalars['String']>
+  workerId_in?: Maybe<Array<Scalars['String']>>
+  type_eq?: Maybe<WorkerType>
+  type_in?: Maybe<Array<WorkerType>>
+  metadata_eq?: Maybe<Scalars['String']>
+  metadata_contains?: Maybe<Scalars['String']>
+  metadata_startsWith?: Maybe<Scalars['String']>
+  metadata_endsWith?: Maybe<Scalars['String']>
+  metadata_in?: Maybe<Array<Scalars['String']>>
+  AND?: Maybe<Array<WorkerWhereInput>>
+  OR?: Maybe<Array<WorkerWhereInput>>
+}
+
+export type WorkerWhereUniqueInput = {
+  id: Scalars['ID']
+}

+ 65 - 0
cli/src/graphql/queries/storage.graphql

@@ -0,0 +1,65 @@
+fragment StorageNodeInfo on StorageBucket {
+  id
+  operatorMetadata {
+    nodeEndpoint
+  }
+}
+
+query getStorageNodesInfoByBagId($bagId: ID) {
+  storageBuckets(
+    where: {
+      operatorStatus_json: { isTypeOf_eq: "StorageBucketOperatorStatusActive" }
+      bags_some: { id_eq: $bagId }
+      operatorMetadata: { nodeEndpoint_contains: "http" }
+    }
+  ) {
+    ...StorageNodeInfo
+  }
+}
+
+fragment DataObjectInfo on StorageDataObject {
+  id
+  size
+  deletionPrize
+  type {
+    __typename
+    ... on DataObjectTypeVideoMedia {
+      video {
+        id
+      }
+    }
+    ... on DataObjectTypeVideoThumbnail {
+      video {
+        id
+      }
+    }
+    ... on DataObjectTypeChannelAvatar {
+      channel {
+        id
+      }
+    }
+    ... on DataObjectTypeChannelCoverPhoto {
+      channel {
+        id
+      }
+    }
+  }
+}
+
+query getDataObjectsByBagId($bagId: ID) {
+  storageDataObjects(where: { storageBag: { id_eq: $bagId } }) {
+    ...DataObjectInfo
+  }
+}
+
+query getDataObjectsByChannelId($channelId: ID) {
+  storageDataObjects(where: { type_json: { channelId_eq: $channelId } }) {
+    ...DataObjectInfo
+  }
+}
+
+query getDataObjectsByVideoId($videoId: ID) {
+  storageDataObjects(where: { type_json: { videoId_eq: $videoId } }) {
+    ...DataObjectInfo
+  }
+}

+ 2 - 4
cli/src/helpers/JsonSchemaPrompt.ts

@@ -129,15 +129,13 @@ export class JsonSchemaPrompter<JsonResult> {
           confirmed = await this.inquirerSinglePrompt({
             message: `Do you want to provide optional ${chalk.greenBright(objectPropertyPath)}?`,
             type: 'confirm',
-            default:
-              _.get(this.filledObject, objectPropertyPath) !== undefined &&
-              _.get(this.filledObject, objectPropertyPath) !== null,
+            default: _.get(this.filledObject, objectPropertyPath) !== undefined,
           })
         }
         if (confirmed) {
           value[pName] = await this.prompt(pSchema, objectPropertyPath)
         } else {
-          _.set(this.filledObject, objectPropertyPath, null)
+          _.set(this.filledObject, objectPropertyPath, undefined)
         }
       }
       return value

+ 5 - 5
cli/src/helpers/display.ts

@@ -3,7 +3,7 @@ import chalk from 'chalk'
 import { NameValueObj } from '../Types'
 import { AccountId } from '@polkadot/types/interfaces'
 
-export function displayHeader(caption: string, placeholderSign = '_', size = 50) {
+export function displayHeader(caption: string, placeholderSign = '_', size = 50): void {
   const singsPerSide: number = Math.floor((size - (caption.length + 2)) / 2)
   let finalStr = ''
   for (let i = 0; i < singsPerSide; ++i) finalStr += placeholderSign
@@ -13,7 +13,7 @@ export function displayHeader(caption: string, placeholderSign = '_', size = 50)
   process.stdout.write('\n' + chalk.bold.blueBright(finalStr) + '\n\n')
 }
 
-export function displayNameValueTable(rows: NameValueObj[]) {
+export function displayNameValueTable(rows: NameValueObj[]): void {
   cli.table(
     rows,
     {
@@ -24,7 +24,7 @@ export function displayNameValueTable(rows: NameValueObj[]) {
   )
 }
 
-export function displayCollapsedRow(row: { [k: string]: string | number }) {
+export function displayCollapsedRow(row: { [k: string]: string | number }): void {
   const collapsedRow: NameValueObj[] = Object.keys(row).map((name) => ({
     name,
     value: typeof row[name] === 'string' ? (row[name] as string) : row[name].toString(),
@@ -33,11 +33,11 @@ export function displayCollapsedRow(row: { [k: string]: string | number }) {
   displayNameValueTable(collapsedRow)
 }
 
-export function displayCollapsedTable(rows: { [k: string]: string | number }[]) {
+export function displayCollapsedTable(rows: { [k: string]: string | number }[]): void {
   for (const row of rows) displayCollapsedRow(row)
 }
 
-export function displayTable(rows: { [k: string]: string | number }[], cellHorizontalPadding = 0) {
+export function displayTable(rows: { [k: string]: string | number }[], cellHorizontalPadding = 0): void {
   if (!rows.length) {
     return
   }

+ 13 - 89
cli/src/helpers/serialization.ts

@@ -1,98 +1,22 @@
-import {
-  VideoMetadata,
-  PublishedBeforeJoystream,
-  License,
-  MediaType,
-  ChannelMetadata,
-  ChannelCategoryMetadata,
-  VideoCategoryMetadata,
-} from '@joystream/content-metadata-protobuf'
-import {
-  ChannelCategoryInputParameters,
-  ChannelInputParameters,
-  VideoCategoryInputParameters,
-  VideoInputParameters,
-} from '../Types'
+import { AnyMetadataClass, DecodedMetadataObject } from '@joystream/metadata-protobuf/types'
 import { Bytes } from '@polkadot/types/primitive'
 import { createType } from '@joystream/types'
+import { CLIError } from '@oclif/errors'
+import ExitCodes from '../ExitCodes'
+import { metaToObject } from '@joystream/metadata-protobuf/utils'
 
-type AnyMetadata = {
-  serializeBinary(): Uint8Array
+export function metadataToBytes<T>(metaClass: AnyMetadataClass<T>, obj: T): Bytes {
+  return createType('Bytes', '0x' + Buffer.from(metaClass.encode(obj).finish()).toString('hex'))
 }
 
-export function metadataToBytes(metadata: AnyMetadata): Bytes {
-  const bytes = createType('Bytes', '0x' + Buffer.from(metadata.serializeBinary()).toString('hex'))
-  console.log('Metadata as Bytes:', bytes.toString())
-  return bytes
+export function metadataFromBytes<T>(metaClass: AnyMetadataClass<T>, bytes: Bytes): DecodedMetadataObject<T> {
+  return metaToObject(metaClass, metaClass.decode(bytes.toU8a(true)))
 }
 
-// TODO: If "fromObject()" was generated for the protobuffs we could avoid having to create separate converters for each metadata
-
-export function videoMetadataFromInput(videoParametersInput: VideoInputParameters): VideoMetadata {
-  const videoMetadata = new VideoMetadata()
-  videoMetadata.setTitle(videoParametersInput.title as string)
-  videoMetadata.setDescription(videoParametersInput.description as string)
-  videoMetadata.setDuration(videoParametersInput.duration as number)
-  videoMetadata.setMediaPixelHeight(videoParametersInput.mediaPixelHeight as number)
-  videoMetadata.setMediaPixelWidth(videoParametersInput.mediaPixelWidth as number)
-  videoMetadata.setLanguage(videoParametersInput.language as string)
-  videoMetadata.setHasMarketing(videoParametersInput.hasMarketing as boolean)
-  videoMetadata.setIsPublic(videoParametersInput.isPublic as boolean)
-  videoMetadata.setIsExplicit(videoParametersInput.isExplicit as boolean)
-  videoMetadata.setPersonsList(videoParametersInput.personsList as number[])
-  videoMetadata.setCategory(videoParametersInput.category as number)
-
-  if (videoParametersInput.mediaType) {
-    const mediaType = new MediaType()
-    mediaType.setCodecName(videoParametersInput.mediaType.codecName as string)
-    mediaType.setContainer(videoParametersInput.mediaType.container as string)
-    mediaType.setMimeMediaType(videoParametersInput.mediaType.mimeMediaType as string)
-    videoMetadata.setMediaType(mediaType)
-  }
-
-  if (videoParametersInput.publishedBeforeJoystream) {
-    const publishedBeforeJoystream = new PublishedBeforeJoystream()
-    publishedBeforeJoystream.setIsPublished(videoParametersInput.publishedBeforeJoystream.isPublished as boolean)
-    publishedBeforeJoystream.setDate(videoParametersInput.publishedBeforeJoystream.date as string)
-    videoMetadata.setPublishedBeforeJoystream(publishedBeforeJoystream)
+export function asValidatedMetadata<T>(metaClass: AnyMetadataClass<T>, anyObject: any): T {
+  const error = metaClass.verify(anyObject)
+  if (error) {
+    throw new CLIError(`Invalid metadata: ${error}`, { exit: ExitCodes.InvalidInput })
   }
-
-  if (videoParametersInput.license) {
-    const license = new License()
-    license.setCode(videoParametersInput.license.code as number)
-    license.setAttribution(videoParametersInput.license.attribution as string)
-    license.setCustomText(videoParametersInput.license.customText as string)
-    videoMetadata.setLicense(license)
-  }
-
-  return videoMetadata
-}
-
-export function channelMetadataFromInput(channelParametersInput: ChannelInputParameters): ChannelMetadata {
-  const channelMetadata = new ChannelMetadata()
-  channelMetadata.setTitle(channelParametersInput.title as string)
-  channelMetadata.setDescription(channelParametersInput.description as string)
-  channelMetadata.setIsPublic(channelParametersInput.isPublic as boolean)
-  channelMetadata.setLanguage(channelParametersInput.language as string)
-  channelMetadata.setCategory(channelParametersInput.category as number)
-
-  return channelMetadata
-}
-
-export function channelCategoryMetadataFromInput(
-  channelCategoryParametersInput: ChannelCategoryInputParameters
-): ChannelCategoryMetadata {
-  const channelCategoryMetadata = new ChannelCategoryMetadata()
-  channelCategoryMetadata.setName(channelCategoryParametersInput.name as string)
-
-  return channelCategoryMetadata
-}
-
-export function videoCategoryMetadataFromInput(
-  videoCategoryParametersInput: VideoCategoryInputParameters
-): VideoCategoryMetadata {
-  const videoCategoryMetadata = new VideoCategoryMetadata()
-  videoCategoryMetadata.setName(videoCategoryParametersInput.name as string)
-
-  return videoCategoryMetadata
+  return { ...anyObject } as T
 }

+ 9 - 2
cli/src/helpers/validation.ts

@@ -4,12 +4,14 @@ import { decodeAddress } from '@polkadot/util-crypto'
 import { DeriveBalancesAll } from '@polkadot/api-derive/types'
 import { CLIError } from '@oclif/errors'
 
-export function validateAddress(address: string, errorMessage = 'Invalid address'): void {
+export function validateAddress(address: string, errorMessage = 'Invalid address'): string | true {
   try {
     decodeAddress(address)
   } catch (e) {
-    throw new CLIError(errorMessage, { exit: ExitCodes.InvalidInput })
+    return errorMessage
   }
+
+  return true
 }
 
 export function checkBalance(accBalances: DeriveBalancesAll, requiredBalance: BN): void {
@@ -17,3 +19,8 @@ export function checkBalance(accBalances: DeriveBalancesAll, requiredBalance: BN
     throw new CLIError('Not enough balance available', { exit: ExitCodes.InvalidInput })
   }
 }
+
+// We assume balance to be u128, which is bigger than JavaScript integer
+export function isValidBalance(balance: string): boolean {
+  return /^[1-9][0-9]{0,37}$/.test(balance)
+}

+ 0 - 22
cli/src/json-schemas/Assets.schema.json

@@ -1,22 +0,0 @@
-{
-  "$schema": "http://json-schema.org/draft-07/schema",
-  "$id": "https://joystream.org/Assets.schema.json",
-  "title": "Assets",
-  "description": "List of assets to upload/reupload",
-  "type": "array",
-  "items": {
-    "type": "object",
-    "required": ["contentId", "path"],
-    "additionalProperties": false,
-    "properties": {
-      "contentId": {
-        "type": "string",
-        "description": "Already existing ContentID"
-      },
-      "path": {
-        "type": "string",
-        "description": "Path to the content file (relative to input json file)"
-      }
-    }
-  }
-}

+ 8 - 1
cli/src/json-schemas/ContentDirectory.ts → cli/src/schemas/ContentDirectory.ts

@@ -30,6 +30,13 @@ export const ChannelInputSchema: JsonSchema<ChannelInputParameters> = {
     coverPhotoPath: { type: 'string' },
     avatarPhotoPath: { type: 'string' },
     rewardAccount: { type: ['string', 'null'] },
+    collaborators: {
+      type: ['array', 'null'],
+      items: {
+        type: 'integer',
+        min: 0,
+      },
+    },
   },
 }
 
@@ -74,7 +81,7 @@ export const VideoInputSchema: JsonSchema<VideoInputParameters> = {
         },
       },
     },
-    personsList: { type: 'array' },
+    persons: { type: 'array' },
     publishedBeforeJoystream: {
       type: 'object',
       properties: {

+ 34 - 0
cli/src/schemas/json/Assets.schema.json

@@ -0,0 +1,34 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema",
+  "$id": "https://joystream.org/Assets.schema.json",
+  "title": "Assets",
+  "description": "List of assets to upload/reupload",
+  "type": "object",
+  "required": ["bagId", "assets"],
+  "properties": {
+    "bagId": {
+      "type": "string",
+      "description": "Target bag id"
+    },
+    "assets": {
+      "type": "array",
+      "description": "List of assets to upload",
+      "items": {
+        "type": "object",
+        "required": ["objectId", "path"],
+        "additionalProperties": false,
+        "properties": {
+          "objectId": {
+            "type": "string",
+            "description": "Already existing data object ID",
+            "pattern": "[0-9]+"
+          },
+          "path": {
+            "type": "string",
+            "description": "Path to the content file (relative to input json file)"
+          }
+        }
+      }
+    }
+  }
+}

+ 0 - 0
cli/src/json-schemas/WorkingGroupOpening.schema.json → cli/src/schemas/json/WorkingGroupOpening.schema.json


+ 30 - 0
cli/src/schemas/typings/Assets.schema.d.ts

@@ -0,0 +1,30 @@
+/* tslint:disable */
+/**
+ * This file was automatically generated by json-schema-to-typescript.
+ * DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file,
+ * and run json-schema-to-typescript to regenerate this file.
+ */
+
+/**
+ * List of assets to upload/reupload
+ */
+export interface Assets {
+  /**
+   * Target bag id
+   */
+  bagId: string
+  /**
+   * List of assets to upload
+   */
+  assets: {
+    /**
+     * Already existing data object ID
+     */
+    objectId: string
+    /**
+     * Path to the content file (relative to input json file)
+     */
+    path: string
+  }[]
+  [k: string]: unknown
+}

+ 0 - 0
cli/src/json-schemas/typings/WorkingGroupOpening.schema.d.ts → cli/src/schemas/typings/WorkingGroupOpening.schema.d.ts


+ 2 - 1
cli/tsconfig.json

@@ -12,7 +12,8 @@
     "noUnusedLocals": true,
     "baseUrl": ".",
     "paths": {
-      "@polkadot/types/augment": ["../types/augment-codec/augment-types.ts"]
+      "@polkadot/types/augment": ["../types/augment-codec/augment-types.ts"],
+      "@polkadot/api/augment": ["../types/augment-codec/augment-api.ts"]
     },
     "resolveJsonModule": true,
     "skipLibCheck": true

+ 25 - 6
colossus.Dockerfile

@@ -2,15 +2,34 @@ FROM --platform=linux/x86-64 node:14 as builder
 
 WORKDIR /joystream
 COPY . /joystream
-RUN  rm -fr /joystream/pioneer
-
-EXPOSE 3001
 
 RUN yarn --frozen-lockfile
 
 RUN yarn workspace @joystream/types build
-RUN yarn workspace storage-node build
+RUN yarn workspace @joystream/metadata-protobuf build
+RUN yarn workspace storage-node-v2 build
+
+# Use these volumes to persist uploading data and to pass the keyfile.
+VOLUME ["/data", "/keystore"]
+
+# Required variables
+ENV WS_PROVIDER_ENDPOINT_URI=ws://not-set
+ENV COLOSSUS_PORT=3333
+ENV QUERY_NODE_ENDPOINT=http://not-set/graphql
+ENV WORKER_ID=not-set
+# - set external key file using the `/keystore` volume
+ENV ACCOUNT_KEYFILE=
+ENV ACCOUNT_PWD=
+# Optional variables
+ENV SYNC_INTERVAL=1
+ENV ELASTIC_SEARCH_ENDPOINT=
+# warn, error, debug, info
+ENV ELASTIC_LOG_LEVEL=debug
+# - overrides account key file
+ENV ACCOUNT_URI=
 
-RUN yarn
+# Colossus node port
+EXPOSE ${COLOSSUS_PORT}
 
-ENTRYPOINT yarn colossus --dev --ws-provider $WS_PROVIDER_ENDPOINT_URI
+WORKDIR /joystream/storage-node-v2
+ENTRYPOINT yarn storage-node server --queryNodeEndpoint ${QUERY_NODE_ENDPOINT} --port ${COLOSSUS_PORT} --uploads /data --worker ${WORKER_ID} --apiUrl ${WS_PROVIDER_ENDPOINT_URI} --sync --syncInterval=${SYNC_INTERVAL} --keyFile=${ACCOUNT_KEYFILE} --elasticSearchEndpoint=${ELASTIC_SEARCH_ENDPOINT}

+ 15 - 25
docker-compose.yml

@@ -18,39 +18,29 @@ services:
       - "127.0.0.1:9944:9944"
       - "127.0.0.1:9933:9933"
 
-  ipfs:
-    image: ipfs/go-ipfs:latest
-    ports:
-      - '127.0.0.1:5001:5001'
-      - '127.0.0.1:8080:8080'
-    volumes:
-      - /data/ipfs
-    entrypoint: ''
-    command: |
-      /bin/sh -c "
-        set -e
-        /usr/local/bin/start_ipfs config profile apply lowpower
-        /usr/local/bin/start_ipfs config --json Gateway.PublicGateways '{\"localhost\": null }'
-        /sbin/tini -- /usr/local/bin/start_ipfs daemon --migrate=true
-      "
-
   colossus:
-    image: joystream/apps
+    image: joystream/colossus:latest
     restart: on-failure
-    depends_on:
-      - "ipfs"
     build:
       context: .
-      dockerfile: apps.Dockerfile
+      dockerfile: colossus.Dockerfile
+    depends_on:
+      - graphql-server-mnt
+    volumes:
+      - /data
+      - /keystore
+    ports:
+      - '127.0.0.1:3333:3333'
     env_file:
       # relative to working directory where docker-compose was run from
       - .env
-    ports:
-      - '127.0.0.1:3001:3001'
-    command: colossus --dev --ws-provider ${WS_PROVIDER_ENDPOINT_URI} --ipfs-host ipfs
     environment:
-      - DEBUG=*
-
+      - COLOSSUS_PORT=3333
+      - QUERY_NODE_ENDPOINT=http://graphql-server-mnt:${GRAPHQL_SERVER_PORT}/graphql
+      - WORKER_ID=0
+      - ACCOUNT_URI=//Alice
+      # enable ElasticSearch server
+      # - ELASTIC_SEARCH_ENDPOINT=host.docker.internal:9200
   db:
     image: postgres:12
     restart: always

+ 1 - 1
package.json

@@ -69,7 +69,7 @@
     "yarn": "^1.22.0"
   },
   "volta": {
-    "node": "14.16.1",
+    "node": "14.18.0",
     "yarn": "1.22.4"
   }
 }

+ 10 - 6
query-node/mappings/content/channel.ts

@@ -4,7 +4,7 @@ eslint-disable @typescript-eslint/naming-convention
 import { EventContext, StoreContext } from '@joystream/hydra-common'
 import { Content } from '../generated/types'
 import { convertContentActorToChannelOwner, processChannelMetadata } from './utils'
-import { Channel, ChannelCategory, StorageDataObject } from 'query-node/dist/model'
+import { Channel, ChannelCategory, StorageDataObject, Membership } from 'query-node/dist/model'
 import { deserializeMetadata, inconsistentState, logger } from '../common'
 import { ChannelCategoryMetadata, ChannelMetadata } from '@joystream/metadata-protobuf'
 import { integrateMeta } from '@joystream/metadata-protobuf/utils'
@@ -14,9 +14,7 @@ import { removeDataObject } from '../storage/utils'
 export async function content_ChannelCreated(ctx: EventContext & StoreContext): Promise<void> {
   const { store, event } = ctx
   // read event data
-  const [contentActor, channelId, runtimeChannel, channelCreationParameters] = new Content.ChannelCreatedEvent(
-    event
-  ).params
+  const [contentActor, channelId, , channelCreationParameters] = new Content.ChannelCreatedEvent(event).params
 
   // create entity
   const channel = new Channel({
@@ -26,12 +24,14 @@ export async function content_ChannelCreated(ctx: EventContext & StoreContext):
     videos: [],
     createdInBlock: event.blockNumber,
     rewardAccount: channelCreationParameters.reward_account.unwrapOr(undefined)?.toString(),
-    deletionPrizeDestAccount: runtimeChannel.deletion_prize_source_account_id.toString(),
     // fill in auto-generated fields
     createdAt: new Date(event.blockTimestamp),
     updatedAt: new Date(event.blockTimestamp),
     // prepare channel owner (handles fields `ownerMember` and `ownerCuratorGroup`)
     ...(await convertContentActorToChannelOwner(store, contentActor)),
+    collaborators: Array.from(channelCreationParameters.collaborators).map(
+      (id) => new Membership({ id: id.toString() })
+    ),
   })
 
   // deserialize & process metadata
@@ -76,13 +76,17 @@ export async function content_ChannelUpdated(ctx: EventContext & StoreContext):
 
   // prepare changed reward account
   const newRewardAccount = channelUpdateParameters.reward_account.unwrapOr(null)
-
   // reward account change happened?
   if (newRewardAccount) {
     // this will change the `channel`!
     channel.rewardAccount = newRewardAccount.unwrapOr(undefined)?.toString()
   }
 
+  const newCollaborators = channelUpdateParameters.collaborators.unwrapOr(undefined)
+  if (newCollaborators) {
+    channel.collaborators = Array.from(newCollaborators).map((id) => new Membership({ id: id.toString() }))
+  }
+
   // set last update time
   channel.updatedAt = new Date(event.blockTimestamp)
 

+ 41 - 25
query-node/mappings/storage/index.ts

@@ -38,6 +38,9 @@ import {
   getDynamicBag,
   getDistributionBucketFamilyWithMetadata,
   getDistributionBucketOperatorWithMetadata,
+  distributionBucketId,
+  distributionOperatorId,
+  distributionBucketIdByFamilyAndIndex,
 } from './utils'
 
 // STORAGE BUCKETS
@@ -92,10 +95,11 @@ export async function storage_StorageBucketInvitationAccepted({
   event,
   store,
 }: EventContext & StoreContext): Promise<void> {
-  const [bucketId, workerId] = new Storage.StorageBucketInvitationAcceptedEvent(event).params
+  const [bucketId, workerId, transactorAccountId] = new Storage.StorageBucketInvitationAcceptedEvent(event).params
   const storageBucket = await getById(store, StorageBucket, bucketId.toString())
   const operatorStatus = new StorageBucketOperatorStatusActive()
   operatorStatus.workerId = workerId.toNumber()
+  operatorStatus.transactorAccountId = transactorAccountId.toString()
   storageBucket.operatorStatus = operatorStatus
   await store.save<StorageBucket>(storageBucket)
 }
@@ -201,7 +205,7 @@ export async function storage_DynamicBagCreated({ event, store }: EventContext &
     owner: getDynamicBagOwner(bagId),
     storageBuckets: Array.from(storageBucketIdsSet).map((id) => new StorageBucket({ id: id.toString() })),
     distributionBuckets: Array.from(distributionBucketIdsSet).map(
-      (id) => new DistributionBucket({ id: id.toString() })
+      (id) => new DistributionBucket({ id: distributionBucketId(id) })
     ),
   })
   await store.save<StorageBag>(storageBag)
@@ -295,7 +299,8 @@ export async function storage_DistributionBucketCreated({ event, store }: EventC
 
   const family = await getById(store, DistributionBucketFamily, familyId.toString())
   const bucket = new DistributionBucket({
-    id: bucketId.toString(),
+    id: distributionBucketId(bucketId),
+    bucketIndex: bucketId.distribution_bucket_index.toNumber(),
     acceptingNewBags: acceptingNewBags.valueOf(),
     distributing: true, // Runtime default
     family,
@@ -308,28 +313,30 @@ export async function storage_DistributionBucketStatusUpdated({
   event,
   store,
 }: EventContext & StoreContext): Promise<void> {
-  const [, bucketId, acceptingNewBags] = new Storage.DistributionBucketStatusUpdatedEvent(event).params
+  const [bucketId, acceptingNewBags] = new Storage.DistributionBucketStatusUpdatedEvent(event).params
 
-  const bucket = await getById(store, DistributionBucket, bucketId.toString())
+  const bucket = await getById(store, DistributionBucket, distributionBucketId(bucketId))
   bucket.acceptingNewBags = acceptingNewBags.valueOf()
 
   await store.save<DistributionBucket>(bucket)
 }
 
 export async function storage_DistributionBucketDeleted({ event, store }: EventContext & StoreContext): Promise<void> {
-  const [, bucketId] = new Storage.DistributionBucketDeletedEvent(event).params
+  const [bucketId] = new Storage.DistributionBucketDeletedEvent(event).params
   // TODO: Cascade remove on db level (would require changes in Hydra / comitting autogenerated files)
   const distributionBucket = await store.get(DistributionBucket, {
-    where: { id: bucketId.toString() },
+    where: { id: distributionBucketId(bucketId) },
     relations: ['bags', 'bags.distributionBuckets'],
   })
   if (!distributionBucket) {
-    inconsistentState(`Distribution bucket by id ${bucketId.toString()} not found!`)
+    inconsistentState(`Distribution bucket by id ${distributionBucketId(bucketId)} not found!`)
   }
   // Remove relations
   await Promise.all(
     (distributionBucket.bags || []).map((bag) => {
-      bag.distributionBuckets = (bag.distributionBuckets || []).filter((bucket) => bucket.id !== bucketId.toString())
+      bag.distributionBuckets = (bag.distributionBuckets || []).filter(
+        (bucket) => bucket.id !== distributionBucketId(bucketId)
+      )
       return store.save<StorageBag>(bag)
     })
   )
@@ -340,11 +347,20 @@ export async function storage_DistributionBucketsUpdatedForBag({
   event,
   store,
 }: EventContext & StoreContext): Promise<void> {
-  const [bagId, , addedBucketsSet, removedBucketsSet] = new Storage.DistributionBucketsUpdatedForBagEvent(event).params
+  const [
+    bagId,
+    familyId,
+    addedBucketsIndices,
+    removedBucketsIndices,
+  ] = new Storage.DistributionBucketsUpdatedForBagEvent(event).params
   // Get or create bag
   const storageBag = await getBag(store, bagId, ['distributionBuckets'])
-  const removedBucketsIds = Array.from(removedBucketsSet).map((id) => id.toString())
-  const addedBucketsIds = Array.from(addedBucketsSet).map((id) => id.toString())
+  const removedBucketsIds = Array.from(removedBucketsIndices).map((bucketIndex) =>
+    distributionBucketIdByFamilyAndIndex(familyId, bucketIndex)
+  )
+  const addedBucketsIds = Array.from(addedBucketsIndices).map((bucketIndex) =>
+    distributionBucketIdByFamilyAndIndex(familyId, bucketIndex)
+  )
   storageBag.distributionBuckets = (storageBag.distributionBuckets || [])
     .filter((bucket) => !removedBucketsIds.includes(bucket.id))
     .concat(addedBucketsIds.map((id) => new DistributionBucket({ id })))
@@ -355,9 +371,9 @@ export async function storage_DistributionBucketModeUpdated({
   event,
   store,
 }: EventContext & StoreContext): Promise<void> {
-  const [, bucketId, distributing] = new Storage.DistributionBucketModeUpdatedEvent(event).params
+  const [bucketId, distributing] = new Storage.DistributionBucketModeUpdatedEvent(event).params
 
-  const bucket = await getById(store, DistributionBucket, bucketId.toString())
+  const bucket = await getById(store, DistributionBucket, distributionBucketId(bucketId))
   bucket.distributing = distributing.valueOf()
 
   await store.save<DistributionBucket>(bucket)
@@ -367,11 +383,11 @@ export async function storage_DistributionBucketOperatorInvited({
   event,
   store,
 }: EventContext & StoreContext): Promise<void> {
-  const [, bucketId, workerId] = new Storage.DistributionBucketOperatorInvitedEvent(event).params
+  const [bucketId, workerId] = new Storage.DistributionBucketOperatorInvitedEvent(event).params
 
-  const bucket = await getById(store, DistributionBucket, bucketId.toString())
+  const bucket = await getById(store, DistributionBucket, distributionBucketId(bucketId))
   const invitedOperator = new DistributionBucketOperator({
-    id: `${bucketId}-${workerId}`,
+    id: distributionOperatorId(bucketId, workerId),
     distributionBucket: bucket,
     status: DistributionBucketOperatorStatus.INVITED,
     workerId: workerId.toNumber(),
@@ -384,9 +400,9 @@ export async function storage_DistributionBucketInvitationCancelled({
   event,
   store,
 }: EventContext & StoreContext): Promise<void> {
-  const [, bucketId, workerId] = new Storage.DistributionBucketOperatorInvitedEvent(event).params
+  const [bucketId, workerId] = new Storage.DistributionBucketOperatorInvitedEvent(event).params
 
-  const invitedOperator = await getById(store, DistributionBucketOperator, `${bucketId}-${workerId}`)
+  const invitedOperator = await getById(store, DistributionBucketOperator, distributionOperatorId(bucketId, workerId))
 
   await store.remove<DistributionBucketOperator>(invitedOperator)
 }
@@ -395,9 +411,9 @@ export async function storage_DistributionBucketInvitationAccepted({
   event,
   store,
 }: EventContext & StoreContext): Promise<void> {
-  const [workerId, , bucketId] = new Storage.DistributionBucketInvitationAcceptedEvent(event).params
+  const [workerId, bucketId] = new Storage.DistributionBucketInvitationAcceptedEvent(event).params
 
-  const invitedOperator = await getById(store, DistributionBucketOperator, `${bucketId}-${workerId}`)
+  const invitedOperator = await getById(store, DistributionBucketOperator, distributionOperatorId(bucketId, workerId))
   invitedOperator.status = DistributionBucketOperatorStatus.ACTIVE
 
   await store.save<DistributionBucketOperator>(invitedOperator)
@@ -407,9 +423,9 @@ export async function storage_DistributionBucketMetadataSet({
   event,
   store,
 }: EventContext & StoreContext): Promise<void> {
-  const [workerId, , bucketId, metadataBytes] = new Storage.DistributionBucketMetadataSetEvent(event).params
+  const [workerId, bucketId, metadataBytes] = new Storage.DistributionBucketMetadataSetEvent(event).params
 
-  const operator = await getDistributionBucketOperatorWithMetadata(store, `${bucketId}-${workerId}`)
+  const operator = await getDistributionBucketOperatorWithMetadata(store, distributionOperatorId(bucketId, workerId))
   operator.metadata = await processDistributionOperatorMetadata(store, operator.metadata, metadataBytes)
 
   await store.save<DistributionBucketOperator>(operator)
@@ -419,11 +435,11 @@ export async function storage_DistributionBucketOperatorRemoved({
   event,
   store,
 }: EventContext & StoreContext): Promise<void> {
-  const [, bucketId, workerId] = new Storage.DistributionBucketOperatorRemovedEvent(event).params
+  const [bucketId, workerId] = new Storage.DistributionBucketOperatorRemovedEvent(event).params
 
   // TODO: Cascade remove on db level (would require changes in Hydra / comitting autogenerated files)
 
-  const operator = await getDistributionBucketOperatorWithMetadata(store, `${bucketId}-${workerId}`)
+  const operator = await getDistributionBucketOperatorWithMetadata(store, distributionOperatorId(bucketId, workerId))
   await store.remove<DistributionBucketOperator>(operator)
   if (operator.metadata) {
     await store.remove<DistributionBucketOperatorMetadata>(operator.metadata)

+ 28 - 3
query-node/mappings/storage/utils.ts

@@ -23,7 +23,16 @@ import { unsetAssetRelations } from '../content/utils'
 
 import { BTreeSet } from '@polkadot/types'
 import _ from 'lodash'
-import { DataObjectId, BagId, DynamicBagId, StaticBagId } from '@joystream/types/augment/all'
+import {
+  DataObjectId,
+  BagId,
+  DynamicBagId,
+  StaticBagId,
+  DistributionBucketId,
+  DistributionBucketFamilyId,
+  DistributionBucketIndex,
+  WorkerId,
+} from '@joystream/types/augment/all'
 import { Balance } from '@polkadot/types/interfaces'
 
 export async function getDataObjectsInBag(
@@ -60,7 +69,7 @@ export function getStaticBagOwner(bagId: StaticBagId): typeof StorageBagOwner {
   }
 }
 
-export function getDynamicBagOwner(bagId: DynamicBagId) {
+export function getDynamicBagOwner(bagId: DynamicBagId): typeof StorageBagOwner {
   if (bagId.isChannel) {
     const owner = new StorageBagOwnerChannel()
     owner.channelId = bagId.asChannel.toNumber()
@@ -94,7 +103,7 @@ export function getDynamicBagId(bagId: DynamicBagId): string {
   }
 }
 
-export function getBagId(bagId: BagId) {
+export function getBagId(bagId: BagId): string {
   return bagId.isStatic ? getStaticBagId(bagId.asStatic) : getDynamicBagId(bagId.asDynamic)
 }
 
@@ -239,3 +248,19 @@ export async function removeDataObject(store: DatabaseManager, object: StorageDa
   await unsetAssetRelations(store, object)
   await store.remove<StorageDataObject>(object)
 }
+
+export function distributionBucketId(runtimeBucketId: DistributionBucketId): string {
+  const { distribution_bucket_family_id: familyId, distribution_bucket_index: bucketIndex } = runtimeBucketId
+  return distributionBucketIdByFamilyAndIndex(familyId, bucketIndex)
+}
+
+export function distributionBucketIdByFamilyAndIndex(
+  familyId: DistributionBucketFamilyId,
+  bucketIndex: DistributionBucketIndex
+): string {
+  return `${familyId.toString()}:${bucketIndex.toString()}`
+}
+
+export function distributionOperatorId(bucketId: DistributionBucketId, workerId: WorkerId): string {
+  return `${distributionBucketId(bucketId)}-${workerId.toString()}`
+}

+ 5 - 3
query-node/schemas/content.graphql

@@ -35,9 +35,6 @@ type Channel @entity {
   "Reward account where revenue is sent if set."
   rewardAccount: String
 
-  "Destination account for the prize associated with channel deletion"
-  deletionPrizeDestAccount: String!
-
   "The title of the Channel"
   title: String @fulltext(query: "search")
 
@@ -61,9 +58,14 @@ type Channel @entity {
   "The primary langauge of the channel's content"
   language: Language
 
+  "List of videos that belong to the channel"
   videos: [Video!]! @derivedFrom(field: "channel")
 
+  "Number of the block the channel was created in"
   createdInBlock: Int!
+
+  "List of channel collaborators (members)"
+  collaborators: [Membership!]
 }
 
 type CuratorGroup @entity {

+ 4 - 0
query-node/schemas/membership.graphql

@@ -33,5 +33,9 @@ type Membership @entity {
   "The type of subscription the member has purchased if any."
   subscription: Int
 
+  "List of channels the member owns"
   channels: [Channel!]! @derivedFrom(field: "ownerMember")
+
+  "List of channels the member has collaborator access to"
+  collaboratorInChannels: [Channel!] @derivedFrom(field: "collaborators")
 }

+ 5 - 1
query-node/schemas/storage.graphql

@@ -35,6 +35,7 @@ type StorageBucketOperatorStatusInvited @variant {
 
 type StorageBucketOperatorStatusActive @variant {
   workerId: Int!
+  transactorAccountId: String!
 }
 
 union StorageBucketOperatorStatus = StorageBucketOperatorStatusMissing | StorageBucketOperatorStatusInvited | StorageBucketOperatorStatusActive
@@ -273,12 +274,15 @@ type DistributionBucketOperator @entity {
 }
 
 type DistributionBucket @entity {
-  "Runtime bucket id"
+  "Runtime bucket id in {familyId}:{bucketIndex} format"
   id: ID!
 
   "Distribution family the bucket is part of"
   family: DistributionBucketFamily!
 
+  "Bucket index within the family"
+  bucketIndex: Int!
+
   "Distribution bucket operators (either active or invited)"
   operators: [DistributionBucketOperator!] @derivedFrom(field: "distributionBucket")
 

+ 79 - 32
runtime-modules/storage/src/lib.rs

@@ -137,7 +137,7 @@ use frame_support::traits::{Currency, ExistenceRequirement, Get, Randomness};
 use frame_support::{
     decl_error, decl_event, decl_module, decl_storage, ensure, IterableStorageDoubleMap, Parameter,
 };
-use frame_system::ensure_root;
+use frame_system::{ensure_root, ensure_signed};
 #[cfg(feature = "std")]
 use serde::{Deserialize, Serialize};
 use sp_arithmetic::traits::{BaseArithmetic, One, Zero};
@@ -789,7 +789,7 @@ impl VoucherUpdate {
 /// Defines the storage bucket connection to the storage operator (storage WG worker).
 #[cfg_attr(feature = "std", derive(Serialize, Deserialize))]
 #[derive(Encode, Decode, Clone, PartialEq, Eq, Debug)]
-pub enum StorageBucketOperatorStatus<WorkerId> {
+pub enum StorageBucketOperatorStatus<WorkerId, AccountId> {
     /// No connection.
     Missing,
 
@@ -797,22 +797,25 @@ pub enum StorageBucketOperatorStatus<WorkerId> {
     InvitedStorageWorker(WorkerId),
 
     /// Storage operator accepted the invitation.
-    StorageWorker(WorkerId),
+    StorageWorker(WorkerId, AccountId),
 }
 
-impl<WorkerId> Default for StorageBucketOperatorStatus<WorkerId> {
+impl<WorkerId, AccountId> Default for StorageBucketOperatorStatus<WorkerId, AccountId> {
     fn default() -> Self {
         Self::Missing
     }
 }
 
+/// Type alias for the StorageBucketRecord.
+pub type StorageBucket<T> = StorageBucketRecord<WorkerId<T>, <T as frame_system::Trait>::AccountId>;
+
 /// A commitment to hold some set of bags for long term storage. A bucket may have a bucket
 /// operator, which is a single worker in the storage working group.
 #[cfg_attr(feature = "std", derive(Serialize, Deserialize))]
 #[derive(Encode, Decode, Default, Clone, PartialEq, Eq, Debug)]
-pub struct StorageBucket<WorkerId> {
+pub struct StorageBucketRecord<WorkerId, AccountId> {
     /// Current storage operator status.
-    pub operator_status: StorageBucketOperatorStatus<WorkerId>,
+    pub operator_status: StorageBucketOperatorStatus<WorkerId, AccountId>,
 
     /// Defines whether the bucket accepts new bags.
     pub accepting_new_bags: bool,
@@ -948,7 +951,7 @@ decl_storage! {
 
         /// Storage buckets.
         pub StorageBucketById get (fn storage_bucket_by_id): map hasher(blake2_128_concat)
-            T::StorageBucketId => StorageBucket<WorkerId<T>>;
+            T::StorageBucketId => StorageBucket<T>;
 
         /// Blacklisted data object hashes.
         pub Blacklist get (fn blacklist): map hasher(blake2_128_concat) Cid => ();
@@ -1030,7 +1033,8 @@ decl_event! {
         /// Params
         /// - storage bucket ID
         /// - invited worker ID
-        StorageBucketInvitationAccepted(StorageBucketId, WorkerId),
+        /// - transactor account ID
+        StorageBucketInvitationAccepted(StorageBucketId, WorkerId, AccountId),
 
         /// Emits on updating storage buckets for bag.
         /// Params
@@ -1463,6 +1467,8 @@ decl_error! {
         /// Different Accounts for dynamic bag deletion prize and upload fees
         AccountsNotCoherent,
 
+        /// Invalid transactor account ID for this bucket.
+        InvalidTransactorAccount,
     }
 }
 
@@ -1701,7 +1707,7 @@ decl_module! {
                 .map(StorageBucketOperatorStatus::InvitedStorageWorker)
                 .unwrap_or(StorageBucketOperatorStatus::Missing);
 
-            let storage_bucket = StorageBucket {
+            let storage_bucket = StorageBucket::<T> {
                 operator_status,
                 accepting_new_bags,
                 voucher,
@@ -1918,11 +1924,14 @@ decl_module! {
         // ===== Storage Operator actions =====
 
         /// Accept the storage bucket invitation. An invitation must match the worker_id parameter.
+        /// It accepts an additional account ID (transactor) for accepting data objects to prevent
+        /// transaction nonce collisions.
         #[weight = 10_000_000] // TODO: adjust weight
         pub fn accept_storage_bucket_invitation(
             origin,
             worker_id: WorkerId<T>,
-            storage_bucket_id: T::StorageBucketId
+            storage_bucket_id: T::StorageBucketId,
+            transactor_account_id: T::AccountId,
         ) {
             T::ensure_storage_worker_origin(origin, worker_id)?;
 
@@ -1935,11 +1944,19 @@ decl_module! {
             //
 
             <StorageBucketById<T>>::mutate(storage_bucket_id, |bucket| {
-                bucket.operator_status = StorageBucketOperatorStatus::StorageWorker(worker_id);
+                bucket.operator_status =
+                    StorageBucketOperatorStatus::StorageWorker(
+                        worker_id,
+                        transactor_account_id.clone()
+                );
             });
 
             Self::deposit_event(
-                RawEvent::StorageBucketInvitationAccepted(storage_bucket_id, worker_id)
+                RawEvent::StorageBucketInvitationAccepted(
+                    storage_bucket_id,
+                    worker_id,
+                    transactor_account_id
+                )
             );
         }
 
@@ -1975,11 +1992,11 @@ decl_module! {
             bag_id: BagId<T>,
             data_objects: BTreeSet<T::DataObjectId>,
         ) {
-            T::ensure_storage_worker_origin(origin, worker_id)?;
+            let transactor_account_id = ensure_signed(origin)?;
 
             let bucket = Self::ensure_storage_bucket_exists(&storage_bucket_id)?;
 
-            Self::ensure_bucket_invitation_accepted(&bucket, worker_id)?;
+            Self::ensure_bucket_transactor_access(&bucket, worker_id, transactor_account_id)?;
 
             Self::ensure_bag_exists(&bag_id)?;
 
@@ -2276,9 +2293,11 @@ decl_module! {
             // == MUTATION SAFE ==
             //
 
-            DynamicBagCreationPolicies::<T>::mutate(dynamic_bag_type, |creation_policy| {
-                creation_policy.families = families.clone();
-            });
+            // We initialize the default storage bucket number here if no policy exists.
+            let mut new_policy = Self::get_dynamic_bag_creation_policy(dynamic_bag_type);
+            new_policy.families = families.clone();
+
+            DynamicBagCreationPolicies::<T>::insert(dynamic_bag_type, new_policy);
 
             Self::deposit_event(
                 RawEvent::FamiliesInDynamicBagCreationPolicyUpdated(
@@ -2912,7 +2931,7 @@ impl<T: Trait> Module<T> {
     // Returns the StorageBucket object or error.
     fn ensure_storage_bucket_exists(
         storage_bucket_id: &T::StorageBucketId,
-    ) -> Result<StorageBucket<WorkerId<T>>, Error<T>> {
+    ) -> Result<StorageBucket<T>, Error<T>> {
         ensure!(
             <StorageBucketById<T>>::contains_key(storage_bucket_id),
             Error::<T>::StorageBucketDoesntExist
@@ -2924,14 +2943,14 @@ impl<T: Trait> Module<T> {
     // Ensures the correct invitation for the storage bucket and storage provider. Storage provider
     // must be invited.
     fn ensure_bucket_storage_provider_invitation_status(
-        bucket: &StorageBucket<WorkerId<T>>,
+        bucket: &StorageBucket<T>,
         worker_id: WorkerId<T>,
     ) -> DispatchResult {
         match bucket.operator_status {
             StorageBucketOperatorStatus::Missing => {
                 Err(Error::<T>::NoStorageBucketInvitation.into())
             }
-            StorageBucketOperatorStatus::StorageWorker(_) => {
+            StorageBucketOperatorStatus::StorageWorker(..) => {
                 Err(Error::<T>::StorageProviderAlreadySet.into())
             }
             StorageBucketOperatorStatus::InvitedStorageWorker(invited_worker_id) => {
@@ -2948,9 +2967,9 @@ impl<T: Trait> Module<T> {
     // Ensures the correct invitation for the storage bucket and storage provider for removal.
     // Must be invited storage provider.
     fn ensure_bucket_storage_provider_invitation_status_for_removal(
-        bucket: &StorageBucket<WorkerId<T>>,
+        bucket: &StorageBucket<T>,
     ) -> DispatchResult {
-        if let StorageBucketOperatorStatus::StorageWorker(_) = bucket.operator_status {
+        if let StorageBucketOperatorStatus::StorageWorker(..) = bucket.operator_status {
             Ok(())
         } else {
             Err(Error::<T>::StorageProviderMustBeSet.into())
@@ -2958,14 +2977,12 @@ impl<T: Trait> Module<T> {
     }
 
     // Ensures the correct invitation for the storage bucket and storage provider. Must be pending.
-    fn ensure_bucket_pending_invitation_status(
-        bucket: &StorageBucket<WorkerId<T>>,
-    ) -> DispatchResult {
+    fn ensure_bucket_pending_invitation_status(bucket: &StorageBucket<T>) -> DispatchResult {
         match bucket.operator_status {
             StorageBucketOperatorStatus::Missing => {
                 Err(Error::<T>::NoStorageBucketInvitation.into())
             }
-            StorageBucketOperatorStatus::StorageWorker(_) => {
+            StorageBucketOperatorStatus::StorageWorker(..) => {
                 Err(Error::<T>::StorageProviderAlreadySet.into())
             }
             StorageBucketOperatorStatus::InvitedStorageWorker(_) => Ok(()),
@@ -2973,12 +2990,10 @@ impl<T: Trait> Module<T> {
     }
 
     // Ensures the missing invitation for the storage bucket and storage provider.
-    fn ensure_bucket_missing_invitation_status(
-        bucket: &StorageBucket<WorkerId<T>>,
-    ) -> DispatchResult {
+    fn ensure_bucket_missing_invitation_status(bucket: &StorageBucket<T>) -> DispatchResult {
         match bucket.operator_status {
             StorageBucketOperatorStatus::Missing => Ok(()),
-            StorageBucketOperatorStatus::StorageWorker(_) => {
+            StorageBucketOperatorStatus::StorageWorker(..) => {
                 Err(Error::<T>::StorageProviderAlreadySet.into())
             }
             StorageBucketOperatorStatus::InvitedStorageWorker(_) => {
@@ -2989,7 +3004,7 @@ impl<T: Trait> Module<T> {
 
     // Ensures correct storage provider for the storage bucket.
     fn ensure_bucket_invitation_accepted(
-        bucket: &StorageBucket<WorkerId<T>>,
+        bucket: &StorageBucket<T>,
         worker_id: WorkerId<T>,
     ) -> DispatchResult {
         match bucket.operator_status {
@@ -2999,12 +3014,44 @@ impl<T: Trait> Module<T> {
             StorageBucketOperatorStatus::InvitedStorageWorker(_) => {
                 Err(Error::<T>::InvalidStorageProvider.into())
             }
-            StorageBucketOperatorStatus::StorageWorker(invited_worker_id) => {
+            StorageBucketOperatorStatus::StorageWorker(invited_worker_id, _) => {
+                ensure!(
+                    worker_id == invited_worker_id,
+                    Error::<T>::InvalidStorageProvider
+                );
+
+                Ok(())
+            }
+        }
+    }
+
+    // Ensures correct storage provider transactor account for the storage bucket.
+    fn ensure_bucket_transactor_access(
+        bucket: &StorageBucket<T>,
+        worker_id: WorkerId<T>,
+        transactor_account_id: T::AccountId,
+    ) -> DispatchResult {
+        match bucket.operator_status.clone() {
+            StorageBucketOperatorStatus::Missing => {
+                Err(Error::<T>::StorageProviderMustBeSet.into())
+            }
+            StorageBucketOperatorStatus::InvitedStorageWorker(_) => {
+                Err(Error::<T>::InvalidStorageProvider.into())
+            }
+            StorageBucketOperatorStatus::StorageWorker(
+                invited_worker_id,
+                bucket_transactor_account_id,
+            ) => {
                 ensure!(
                     worker_id == invited_worker_id,
                     Error::<T>::InvalidStorageProvider
                 );
 
+                ensure!(
+                    transactor_account_id == bucket_transactor_account_id,
+                    Error::<T>::InvalidTransactorAccount
+                );
+
                 Ok(())
             }
         }

+ 18 - 1
runtime-modules/storage/src/tests/fixtures.rs

@@ -188,6 +188,7 @@ pub struct AcceptStorageBucketInvitationFixture {
     origin: RawOrigin<u64>,
     worker_id: u64,
     storage_bucket_id: u64,
+    transactor_account_id: u64,
 }
 
 impl AcceptStorageBucketInvitationFixture {
@@ -196,6 +197,7 @@ impl AcceptStorageBucketInvitationFixture {
             origin: RawOrigin::Signed(DEFAULT_ACCOUNT_ID),
             worker_id: DEFAULT_WORKER_ID,
             storage_bucket_id: Default::default(),
+            transactor_account_id: DEFAULT_ACCOUNT_ID,
         }
     }
 
@@ -206,6 +208,12 @@ impl AcceptStorageBucketInvitationFixture {
     pub fn with_worker_id(self, worker_id: u64) -> Self {
         Self { worker_id, ..self }
     }
+    pub fn with_transactor_account_id(self, transactor_account_id: u64) -> Self {
+        Self {
+            transactor_account_id,
+            ..self
+        }
+    }
 
     pub fn with_storage_bucket_id(self, storage_bucket_id: u64) -> Self {
         Self {
@@ -221,6 +229,7 @@ impl AcceptStorageBucketInvitationFixture {
             self.origin.clone().into(),
             self.worker_id,
             self.storage_bucket_id,
+            self.transactor_account_id,
         );
 
         assert_eq!(actual_result, expected_result);
@@ -229,7 +238,10 @@ impl AcceptStorageBucketInvitationFixture {
         if actual_result.is_ok() {
             assert_eq!(
                 new_bucket.operator_status,
-                StorageBucketOperatorStatus::StorageWorker(self.worker_id)
+                StorageBucketOperatorStatus::StorageWorker(
+                    self.worker_id,
+                    self.transactor_account_id
+                )
             );
         } else {
             assert_eq!(old_bucket, new_bucket);
@@ -1686,6 +1698,11 @@ impl UpdateFamiliesInDynamicBagCreationPolicyFixture {
         assert_eq!(actual_result, expected_result);
 
         let new_policy = Storage::get_dynamic_bag_creation_policy(self.dynamic_bag_type);
+        assert_eq!(
+            old_policy.number_of_storage_buckets,
+            new_policy.number_of_storage_buckets
+        );
+
         if actual_result.is_ok() {
             assert_eq!(new_policy.families, self.families);
         } else {

+ 47 - 0
runtime-modules/storage/src/tests/mod.rs

@@ -156,6 +156,7 @@ fn accept_storage_bucket_invitation_succeeded() {
 
         let storage_provider_id = DEFAULT_STORAGE_PROVIDER_ID;
         let invite_worker = Some(storage_provider_id);
+        let transactor_id = DEFAULT_STORAGE_PROVIDER_ACCOUNT_ID;
 
         let bucket_id = CreateStorageBucketFixture::default()
             .with_origin(RawOrigin::Signed(STORAGE_WG_LEADER_ACCOUNT_ID))
@@ -167,11 +168,13 @@ fn accept_storage_bucket_invitation_succeeded() {
             .with_origin(RawOrigin::Signed(DEFAULT_STORAGE_PROVIDER_ACCOUNT_ID))
             .with_storage_bucket_id(bucket_id)
             .with_worker_id(storage_provider_id)
+            .with_transactor_account_id(transactor_id)
             .call_and_assert(Ok(()));
 
         EventFixture::assert_last_crate_event(RawEvent::StorageBucketInvitationAccepted(
             bucket_id,
             storage_provider_id,
+            transactor_id,
         ));
     });
 }
@@ -1327,6 +1330,7 @@ fn accept_pending_data_objects_fails_with_unrelated_storage_bucket() {
 
         AcceptStorageBucketInvitationFixture::default()
             .with_origin(RawOrigin::Signed(DEFAULT_STORAGE_PROVIDER_ACCOUNT_ID))
+            .with_transactor_account_id(DEFAULT_STORAGE_PROVIDER_ACCOUNT_ID)
             .with_storage_bucket_id(bucket_id)
             .with_worker_id(storage_provider_id)
             .call_and_assert(Ok(()));
@@ -1373,6 +1377,7 @@ fn accept_pending_data_objects_fails_with_non_existing_dynamic_bag() {
 
         AcceptStorageBucketInvitationFixture::default()
             .with_origin(RawOrigin::Signed(DEFAULT_STORAGE_PROVIDER_ACCOUNT_ID))
+            .with_transactor_account_id(DEFAULT_STORAGE_PROVIDER_ACCOUNT_ID)
             .with_storage_bucket_id(bucket_id)
             .with_worker_id(storage_provider_id)
             .call_and_assert(Ok(()));
@@ -1397,6 +1402,46 @@ fn accept_pending_data_objects_fails_with_non_existing_dynamic_bag() {
     });
 }
 
+#[test]
+fn accept_pending_data_objects_fails_with_invalid_transactor_account_id() {
+    build_test_externalities().execute_with(|| {
+        let storage_provider_id = DEFAULT_STORAGE_PROVIDER_ID;
+        let invite_worker = Some(storage_provider_id);
+        let transactor_account_id = 11111;
+
+        let bucket_id = CreateStorageBucketFixture::default()
+            .with_origin(RawOrigin::Signed(STORAGE_WG_LEADER_ACCOUNT_ID))
+            .with_invite_worker(invite_worker)
+            .call_and_assert(Ok(()))
+            .unwrap();
+
+        AcceptStorageBucketInvitationFixture::default()
+            .with_origin(RawOrigin::Signed(DEFAULT_STORAGE_PROVIDER_ACCOUNT_ID))
+            .with_transactor_account_id(transactor_account_id)
+            .with_storage_bucket_id(bucket_id)
+            .with_worker_id(storage_provider_id)
+            .call_and_assert(Ok(()));
+
+        let initial_balance = 1000;
+        increase_account_balance(&DEFAULT_MEMBER_ACCOUNT_ID, initial_balance);
+
+        let dynamic_bag_id = DynamicBagId::<Test>::Member(DEFAULT_MEMBER_ID);
+        let bag_id = BagId::<Test>::Dynamic(dynamic_bag_id.clone());
+
+        let data_object_id = 0;
+
+        let data_object_ids = BTreeSet::from_iter(vec![data_object_id]);
+
+        AcceptPendingDataObjectsFixture::default()
+            .with_origin(RawOrigin::Signed(DEFAULT_STORAGE_PROVIDER_ACCOUNT_ID))
+            .with_worker_id(storage_provider_id)
+            .with_storage_bucket_id(bucket_id)
+            .with_bag_id(bag_id.clone())
+            .with_data_object_ids(data_object_ids)
+            .call_and_assert(Err(Error::<Test>::InvalidTransactorAccount.into()));
+    });
+}
+
 #[test]
 fn accept_pending_data_objects_succeeded_with_dynamic_bag() {
     build_test_externalities().execute_with(|| {
@@ -1417,6 +1462,7 @@ fn accept_pending_data_objects_succeeded_with_dynamic_bag() {
 
         AcceptStorageBucketInvitationFixture::default()
             .with_origin(RawOrigin::Signed(DEFAULT_STORAGE_PROVIDER_ACCOUNT_ID))
+            .with_transactor_account_id(DEFAULT_STORAGE_PROVIDER_ACCOUNT_ID)
             .with_storage_bucket_id(bucket_id)
             .with_worker_id(storage_provider_id)
             .call_and_assert(Ok(()));
@@ -2482,6 +2528,7 @@ fn create_storage_bucket_and_assign_to_bag(
     if let Some(storage_provider_id) = storage_provider_id {
         AcceptStorageBucketInvitationFixture::default()
             .with_origin(RawOrigin::Signed(DEFAULT_STORAGE_PROVIDER_ACCOUNT_ID))
+            .with_transactor_account_id(DEFAULT_STORAGE_PROVIDER_ACCOUNT_ID)
             .with_storage_bucket_id(bucket_id)
             .with_worker_id(storage_provider_id)
             .call_and_assert(Ok(()));

+ 1 - 0
storage-node-v2/.eslintignore

@@ -1,2 +1,3 @@
 /lib
 .eslintrc.js
+**/generated/*

Some files were not shown because too many files changed in this diff