Browse Source

Address https://github.com/Joystream/joystream/pull/2582 review comments

Leszek Wiesner 3 years ago
parent
commit
d42f98bc02
42 changed files with 1118 additions and 564 deletions
  1. 1 0
      distributor-node/.eslintignore
  2. 0 4
      distributor-node/docs/commands/dev.md
  3. 1 3
      distributor-node/docs/commands/help.md
  4. 0 32
      distributor-node/docs/commands/leader.md
  5. 0 6
      distributor-node/docs/commands/operator.md
  6. 0 2
      distributor-node/docs/commands/start.md
  7. 1 1
      distributor-node/docs/schema/definition.md
  8. 5 1
      distributor-node/package.json
  9. 3 0
      distributor-node/scripts/init-bucket.sh
  10. 5 2
      distributor-node/scripts/test-commands.sh
  11. 6 6
      distributor-node/src/app/index.ts
  12. 4 4
      distributor-node/src/command-base/accounts.ts
  13. 5 5
      distributor-node/src/command-base/default.ts
  14. 5 6
      distributor-node/src/commands/dev/batchUpload.ts
  15. 1 1
      distributor-node/src/schemas/configSchema.ts
  16. 2 11
      distributor-node/src/services/cache/StateCacheService.ts
  17. 57 14
      distributor-node/src/services/content/ContentService.ts
  18. 21 0
      distributor-node/src/services/crypto/ContentHash.ts
  19. 2 2
      distributor-node/src/services/httpApi/HttpApiService.ts
  20. 67 65
      distributor-node/src/services/httpApi/controllers/public.ts
  21. 1 0
      distributor-node/src/services/logging/LoggingService.ts
  22. 46 37
      distributor-node/src/services/networking/NetworkingService.ts
  23. 50 3
      distributor-node/src/services/networking/query-node/api.ts
  24. 60 20
      distributor-node/src/services/networking/query-node/generated/queries.ts
  25. 436 146
      distributor-node/src/services/networking/query-node/generated/schema.ts
  26. 41 19
      distributor-node/src/services/networking/query-node/queries/queries.graphql
  27. 36 15
      distributor-node/src/services/networking/runtime/api.ts
  28. 70 26
      distributor-node/src/services/parsers/BagIdParserService.ts
  29. 72 33
      distributor-node/src/services/parsers/ConfigParserService.ts
  30. 21 11
      distributor-node/src/services/parsers/errors.ts
  31. 14 8
      distributor-node/src/services/validation/ValidationService.ts
  32. 81 0
      distributor-node/src/types/content.ts
  33. 1 1
      distributor-node/src/types/generated/ConfigJson.d.ts
  34. 1 2
      distributor-node/src/types/index.ts
  35. 0 10
      distributor-node/src/types/networking.ts
  36. 0 21
      distributor-node/src/types/storage.ts
  37. 0 17
      distributor-node/test/commands/hello.test.ts
  38. 0 5
      distributor-node/test/mocha.opts
  39. 0 7
      distributor-node/test/tsconfig.json
  40. 0 2
      distributor-node/tsconfig.json
  41. 1 1
      docker-compose.yml
  42. 1 15
      yarn.lock

+ 1 - 0
distributor-node/.eslintignore

@@ -1 +1,2 @@
 src/types/generated
 src/types/generated
+src/services/networking/query-node/generated

+ 0 - 4
distributor-node/docs/commands/dev.md

@@ -9,8 +9,6 @@ Developer utility commands
 ## `joystream-distributor dev:batchUpload`
 ## `joystream-distributor dev:batchUpload`
 
 
 ```
 ```
-undefined
-
 USAGE
 USAGE
   $ joystream-distributor dev:batchUpload
   $ joystream-distributor dev:batchUpload
 
 
@@ -33,8 +31,6 @@ _See code: [src/commands/dev/batchUpload.ts](https://github.com/Joystream/joystr
 Initialize development environment. Sets Alice as distributor working group leader.
 Initialize development environment. Sets Alice as distributor working group leader.
 
 
 ```
 ```
-Initialize development environment. Sets Alice as distributor working group leader.
-
 USAGE
 USAGE
   $ joystream-distributor dev:init
   $ joystream-distributor dev:init
 
 

+ 1 - 3
distributor-node/docs/commands/help.md

@@ -10,8 +10,6 @@ display help for joystream-distributor
 display help for joystream-distributor
 display help for joystream-distributor
 
 
 ```
 ```
-display help for <%= config.bin %>
-
 USAGE
 USAGE
   $ joystream-distributor help [COMMAND]
   $ joystream-distributor help [COMMAND]
 
 
@@ -22,4 +20,4 @@ OPTIONS
   --all  see all commands in CLI
   --all  see all commands in CLI
 ```
 ```
 
 
-_See code: [@oclif/plugin-help](https://github.com/oclif/plugin-help/blob/v2.2.3/src/commands/help.ts)_
+_See code: [@oclif/plugin-help](https://github.com/oclif/plugin-help/blob/v3.2.2/src/commands/help.ts)_

+ 0 - 32
distributor-node/docs/commands/leader.md

@@ -22,9 +22,6 @@ Commands for performing Distribution Working Group leader on-chain duties (like
 Cancel pending distribution bucket operator invitation.
 Cancel pending distribution bucket operator invitation.
 
 
 ```
 ```
-Cancel pending distribution bucket operator invitation.
-  Requires distribution working group leader permissions.
-
 USAGE
 USAGE
   $ joystream-distributor leader:cancel-invitation
   $ joystream-distributor leader:cancel-invitation
 
 
@@ -51,8 +48,6 @@ _See code: [src/commands/leader/cancel-invitation.ts](https://github.com/Joystre
 Create new distribution bucket. Requires distribution working group leader permissions.
 Create new distribution bucket. Requires distribution working group leader permissions.
 
 
 ```
 ```
-Create new distribution bucket. Requires distribution working group leader permissions.
-
 USAGE
 USAGE
   $ joystream-distributor leader:create-bucket
   $ joystream-distributor leader:create-bucket
 
 
@@ -74,8 +69,6 @@ _See code: [src/commands/leader/create-bucket.ts](https://github.com/Joystream/j
 Create new distribution bucket family. Requires distribution working group leader permissions.
 Create new distribution bucket family. Requires distribution working group leader permissions.
 
 
 ```
 ```
-Create new distribution bucket family. Requires distribution working group leader permissions.
-
 USAGE
 USAGE
   $ joystream-distributor leader:create-bucket-family
   $ joystream-distributor leader:create-bucket-family
 
 
@@ -93,8 +86,6 @@ _See code: [src/commands/leader/create-bucket-family.ts](https://github.com/Joys
 Delete distribution bucket. The bucket must have no operators. Requires distribution working group leader permissions.
 Delete distribution bucket. The bucket must have no operators. Requires distribution working group leader permissions.
 
 
 ```
 ```
-Delete distribution bucket. The bucket must have no operators. Requires distribution working group leader permissions.
-
 USAGE
 USAGE
   $ joystream-distributor leader:delete-bucket
   $ joystream-distributor leader:delete-bucket
 
 
@@ -116,8 +107,6 @@ _See code: [src/commands/leader/delete-bucket.ts](https://github.com/Joystream/j
 Delete distribution bucket family. Requires distribution working group leader permissions.
 Delete distribution bucket family. Requires distribution working group leader permissions.
 
 
 ```
 ```
-Delete distribution bucket family. Requires distribution working group leader permissions.
-
 USAGE
 USAGE
   $ joystream-distributor leader:delete-bucket-family
   $ joystream-distributor leader:delete-bucket-family
 
 
@@ -137,10 +126,6 @@ _See code: [src/commands/leader/delete-bucket-family.ts](https://github.com/Joys
 Invite distribution bucket operator (distribution group worker).
 Invite distribution bucket operator (distribution group worker).
 
 
 ```
 ```
-Invite distribution bucket operator (distribution group worker).
-  The specified bucket must not have any operator currently.
-  Requires distribution working group leader permissions.
-
 USAGE
 USAGE
   $ joystream-distributor leader:invite-bucket-operator
   $ joystream-distributor leader:invite-bucket-operator
 
 
@@ -168,9 +153,6 @@ _See code: [src/commands/leader/invite-bucket-operator.ts](https://github.com/Jo
 Remove distribution bucket operator (distribution group worker).
 Remove distribution bucket operator (distribution group worker).
 
 
 ```
 ```
-Remove distribution bucket operator (distribution group worker).
-  Requires distribution working group leader permissions.
-
 USAGE
 USAGE
   $ joystream-distributor leader:remove-bucket-operator
   $ joystream-distributor leader:remove-bucket-operator
 
 
@@ -198,9 +180,6 @@ _See code: [src/commands/leader/remove-bucket-operator.ts](https://github.com/Jo
 Set/update distribution bucket family metadata.
 Set/update distribution bucket family metadata.
 
 
 ```
 ```
-Set/update distribution bucket family metadata.
-  Requires distribution working group leader permissions.
-
 USAGE
 USAGE
   $ joystream-distributor leader:set-bucket-family-metadata
   $ joystream-distributor leader:set-bucket-family-metadata
 
 
@@ -225,8 +204,6 @@ _See code: [src/commands/leader/set-bucket-family-metadata.ts](https://github.co
 Set max. distribution buckets per bag limit. Requires distribution working group leader permissions.
 Set max. distribution buckets per bag limit. Requires distribution working group leader permissions.
 
 
 ```
 ```
-Set max. distribution buckets per bag limit. Requires distribution working group leader permissions.
-
 USAGE
 USAGE
   $ joystream-distributor leader:set-buckets-per-bag-limit
   $ joystream-distributor leader:set-buckets-per-bag-limit
 
 
@@ -246,8 +223,6 @@ _See code: [src/commands/leader/set-buckets-per-bag-limit.ts](https://github.com
 Add/remove distribution buckets from a bag.
 Add/remove distribution buckets from a bag.
 
 
 ```
 ```
-Add/remove distribution buckets from a bag.
-
 USAGE
 USAGE
   $ joystream-distributor leader:update-bag
   $ joystream-distributor leader:update-bag
 
 
@@ -291,8 +266,6 @@ _See code: [src/commands/leader/update-bag.ts](https://github.com/Joystream/joys
 Update distribution bucket mode ("distributing" flag). Requires distribution working group leader permissions.
 Update distribution bucket mode ("distributing" flag). Requires distribution working group leader permissions.
 
 
 ```
 ```
-Update distribution bucket mode ("distributing" flag). Requires distribution working group leader permissions.
-
 USAGE
 USAGE
   $ joystream-distributor leader:update-bucket-mode
   $ joystream-distributor leader:update-bucket-mode
 
 
@@ -316,8 +289,6 @@ _See code: [src/commands/leader/update-bucket-mode.ts](https://github.com/Joystr
 Update distribution bucket status ("acceptingNewBags" flag). Requires distribution working group leader permissions.
 Update distribution bucket status ("acceptingNewBags" flag). Requires distribution working group leader permissions.
 
 
 ```
 ```
-Update distribution bucket status ("acceptingNewBags" flag). Requires distribution working group leader permissions.
-
 USAGE
 USAGE
   $ joystream-distributor leader:update-bucket-status
   $ joystream-distributor leader:update-bucket-status
 
 
@@ -340,9 +311,6 @@ _See code: [src/commands/leader/update-bucket-status.ts](https://github.com/Joys
 Update dynamic bag creation policy (number of buckets by family that should store given dynamic bag type).
 Update dynamic bag creation policy (number of buckets by family that should store given dynamic bag type).
 
 
 ```
 ```
-Update dynamic bag creation policy (number of buckets by family that should store given dynamic bag type).
-    Requires distribution working group leader permissions.
-
 USAGE
 USAGE
   $ joystream-distributor leader:update-dynamic-bag-policy
   $ joystream-distributor leader:update-dynamic-bag-policy
 
 

+ 0 - 6
distributor-node/docs/commands/operator.md

@@ -11,9 +11,6 @@ Commands for performing node operator (Distribution Working Group worker) on-cha
 Accept pending distribution bucket operator invitation.
 Accept pending distribution bucket operator invitation.
 
 
 ```
 ```
-Accept pending distribution bucket operator invitation.
-  Requires the invited distribution group worker role key.
-
 USAGE
 USAGE
   $ joystream-distributor operator:accept-invitation
   $ joystream-distributor operator:accept-invitation
 
 
@@ -40,9 +37,6 @@ _See code: [src/commands/operator/accept-invitation.ts](https://github.com/Joyst
 Set/update distribution bucket operator metadata.
 Set/update distribution bucket operator metadata.
 
 
 ```
 ```
-Set/update distribution bucket operator metadata.
-  Requires active distribution bucket operator worker role key.
-
 USAGE
 USAGE
   $ joystream-distributor operator:set-metadata
   $ joystream-distributor operator:set-metadata
 
 

+ 0 - 2
distributor-node/docs/commands/start.md

@@ -10,8 +10,6 @@ Start the node
 Start the node
 Start the node
 
 
 ```
 ```
-Start the node
-
 USAGE
 USAGE
   $ joystream-distributor start
   $ joystream-distributor start
 
 

+ 1 - 1
distributor-node/docs/schema/definition.md

@@ -131,7 +131,7 @@ Specifies how often periodic tasks (for example cache cleanup) are executed by t
 
 
 ## port
 ## port
 
 
-Distributor node http server port
+Distributor node http api port
 
 
 `port`
 `port`
 
 

+ 5 - 1
distributor-node/package.json

@@ -13,7 +13,7 @@
     "@elastic/ecs-winston-format": "^1.1.0",
     "@elastic/ecs-winston-format": "^1.1.0",
     "@oclif/command": "^1",
     "@oclif/command": "^1",
     "@oclif/config": "^1",
     "@oclif/config": "^1",
-    "@oclif/plugin-help": "^2",
+    "@oclif/plugin-help": "^3",
     "@apollo/client": "^3.2.5",
     "@apollo/client": "^3.2.5",
     "graphql": "^14.7.0",
     "graphql": "^14.7.0",
     "winston": "^3.3.3",
     "winston": "^3.3.3",
@@ -76,6 +76,10 @@
   "engines": {
   "engines": {
     "node": ">=14.16.1"
     "node": ">=14.16.1"
   },
   },
+  "volta": {
+    "node": "14.16.1",
+    "yarn": "1.22.4"
+  },
   "files": [
   "files": [
     "/bin",
     "/bin",
     "/lib",
     "/lib",

+ 3 - 0
distributor-node/scripts/init-bucket.sh

@@ -15,3 +15,6 @@ ${CLI} leader:update-bag -b static:council -f ${FAMILY_ID} -a ${BUCKET_ID}
 ${CLI} leader:update-bucket-mode -f ${FAMILY_ID} -B ${BUCKET_ID} --mode on
 ${CLI} leader:update-bucket-mode -f ${FAMILY_ID} -B ${BUCKET_ID} --mode on
 ${CLI} leader:invite-bucket-operator -f ${FAMILY_ID} -B ${BUCKET_ID} -w 0
 ${CLI} leader:invite-bucket-operator -f ${FAMILY_ID} -B ${BUCKET_ID} -w 0
 ${CLI} operator:accept-invitation -f ${FAMILY_ID} -B ${BUCKET_ID} -w 0
 ${CLI} operator:accept-invitation -f ${FAMILY_ID} -B ${BUCKET_ID} -w 0
+${CLI} operator:set-metadata -f ${FAMILY_ID} -B ${BUCKET_ID} -w 0 -e http://localhost:3334
+${CLI} leader:update-dynamic-bag-policy -t Channel -p ${FAMILY_ID}:1
+${CLI} leader:update-dynamic-bag-policy -t Member -p ${FAMILY_ID}:1

+ 5 - 2
distributor-node/scripts/test-commands.sh

@@ -21,6 +21,7 @@ ${CLI} leader:update-bag -b static:wg:gateway -f ${FAMILY_ID} -a ${BUCKET_ID}
 ${CLI} leader:update-bag -b static:wg:distribution -f ${FAMILY_ID} -a ${BUCKET_ID}
 ${CLI} leader:update-bag -b static:wg:distribution -f ${FAMILY_ID} -a ${BUCKET_ID}
 ${CLI} leader:update-bucket-status -f ${FAMILY_ID} -B ${BUCKET_ID}  --acceptingBags yes
 ${CLI} leader:update-bucket-status -f ${FAMILY_ID} -B ${BUCKET_ID}  --acceptingBags yes
 ${CLI} leader:update-bucket-mode -f ${FAMILY_ID} -B ${BUCKET_ID} --mode on
 ${CLI} leader:update-bucket-mode -f ${FAMILY_ID} -B ${BUCKET_ID} --mode on
+${CLI} leader:update-dynamic-bag-policy -t Channel -p ${FAMILY_ID}:5
 ${CLI} leader:update-dynamic-bag-policy -t Member -p ${FAMILY_ID}:5
 ${CLI} leader:update-dynamic-bag-policy -t Member -p ${FAMILY_ID}:5
 ${CLI} leader:update-dynamic-bag-policy -t Member
 ${CLI} leader:update-dynamic-bag-policy -t Member
 ${CLI} leader:invite-bucket-operator -f ${FAMILY_ID} -B ${BUCKET_ID} -w 0
 ${CLI} leader:invite-bucket-operator -f ${FAMILY_ID} -B ${BUCKET_ID} -w 0
@@ -28,11 +29,13 @@ ${CLI} leader:cancel-invitation -f ${FAMILY_ID} -B ${BUCKET_ID} -w 0
 ${CLI} leader:invite-bucket-operator -f ${FAMILY_ID} -B ${BUCKET_ID} -w 0
 ${CLI} leader:invite-bucket-operator -f ${FAMILY_ID} -B ${BUCKET_ID} -w 0
 ${CLI} operator:accept-invitation -f ${FAMILY_ID} -B ${BUCKET_ID} -w 0
 ${CLI} operator:accept-invitation -f ${FAMILY_ID} -B ${BUCKET_ID} -w 0
 ${CLI} operator:set-metadata -f ${FAMILY_ID} -B ${BUCKET_ID} -w 0 -i ./data/operator-metadata.json
 ${CLI} operator:set-metadata -f ${FAMILY_ID} -B ${BUCKET_ID} -w 0 -i ./data/operator-metadata.json
-${CLI} leader:remove-bucket-operator -f ${FAMILY_ID} -B ${BUCKET_ID} -w 0
 ${CLI} leader:set-bucket-family-metadata -f ${FAMILY_ID} -i ./data/family-metadata.json
 ${CLI} leader:set-bucket-family-metadata -f ${FAMILY_ID} -i ./data/family-metadata.json
 
 
-# Deletion commands tested separately, since bucket operator removal is not yet supported
+# Deletion commands tested separately
 FAMILY_TO_DELETE_ID=`${CLI} leader:create-bucket-family`
 FAMILY_TO_DELETE_ID=`${CLI} leader:create-bucket-family`
 BUCKET_TO_DELETE_ID=`${CLI} leader:create-bucket -f ${FAMILY_TO_DELETE_ID} -a yes`
 BUCKET_TO_DELETE_ID=`${CLI} leader:create-bucket -f ${FAMILY_TO_DELETE_ID} -a yes`
+${CLI} leader:invite-bucket-operator -f ${FAMILY_TO_DELETE_ID} -B ${BUCKET_TO_DELETE_ID} -w 0
+${CLI} operator:accept-invitation -f ${FAMILY_TO_DELETE_ID} -B ${BUCKET_TO_DELETE_ID} -w 0
+${CLI} leader:remove-bucket-operator -f ${FAMILY_TO_DELETE_ID} -B ${BUCKET_TO_DELETE_ID} -w 0
 ${CLI} leader:delete-bucket -f ${FAMILY_TO_DELETE_ID} -B ${BUCKET_TO_DELETE_ID}
 ${CLI} leader:delete-bucket -f ${FAMILY_TO_DELETE_ID} -B ${BUCKET_TO_DELETE_ID}
 ${CLI} leader:delete-bucket-family -f ${FAMILY_TO_DELETE_ID}
 ${CLI} leader:delete-bucket-family -f ${FAMILY_TO_DELETE_ID}

+ 6 - 6
distributor-node/src/app/index.ts

@@ -3,7 +3,7 @@ import { NetworkingService } from '../services/networking'
 import { LoggingService } from '../services/logging'
 import { LoggingService } from '../services/logging'
 import { StateCacheService } from '../services/cache/StateCacheService'
 import { StateCacheService } from '../services/cache/StateCacheService'
 import { ContentService } from '../services/content/ContentService'
 import { ContentService } from '../services/content/ContentService'
-import { ServerService } from '../services/server/ServerService'
+import { HttpApiService } from '../services/httpApi/HttpApiService'
 import { Logger } from 'winston'
 import { Logger } from 'winston'
 import fs from 'fs'
 import fs from 'fs'
 import nodeCleanup from 'node-cleanup'
 import nodeCleanup from 'node-cleanup'
@@ -14,7 +14,7 @@ export class App {
   private content: ContentService
   private content: ContentService
   private stateCache: StateCacheService
   private stateCache: StateCacheService
   private networking: NetworkingService
   private networking: NetworkingService
-  private server: ServerService
+  private httpApi: HttpApiService
   private logging: LoggingService
   private logging: LoggingService
   private logger: Logger
   private logger: Logger
   private intervals: AppIntervals | undefined
   private intervals: AppIntervals | undefined
@@ -25,7 +25,7 @@ export class App {
     this.stateCache = new StateCacheService(config, this.logging)
     this.stateCache = new StateCacheService(config, this.logging)
     this.networking = new NetworkingService(config, this.stateCache, this.logging)
     this.networking = new NetworkingService(config, this.stateCache, this.logging)
     this.content = new ContentService(config, this.logging, this.networking, this.stateCache)
     this.content = new ContentService(config, this.logging, this.networking, this.stateCache)
-    this.server = new ServerService(config, this.stateCache, this.content, this.logging, this.networking)
+    this.httpApi = new HttpApiService(config, this.stateCache, this.content, this.logging, this.networking)
     this.logger = this.logging.createLogger('App')
     this.logger = this.logging.createLogger('App')
   }
   }
 
 
@@ -79,7 +79,7 @@ export class App {
       this.stateCache.load()
       this.stateCache.load()
       await this.content.startupInit()
       await this.content.startupInit()
       this.setIntervals()
       this.setIntervals()
-      this.server.start()
+      this.httpApi.start()
     } catch (err) {
     } catch (err) {
       this.logger.error('Node initialization failed!', { err })
       this.logger.error('Node initialization failed!', { err })
       process.exit(-1)
       process.exit(-1)
@@ -125,8 +125,8 @@ export class App {
     this.logger.info('Exiting...')
     this.logger.info('Exiting...')
     // Clear intervals
     // Clear intervals
     this.clearIntervals()
     this.clearIntervals()
-    // Stop the server
-    this.server.stop()
+    // Stop the http api
+    this.httpApi.stop()
     // Save cache
     // Save cache
     this.stateCache.saveSync()
     this.stateCache.saveSync()
     if (signal) {
     if (signal) {

+ 4 - 4
distributor-node/src/command-base/accounts.ts

@@ -1,7 +1,7 @@
 import ApiCommandBase from './api'
 import ApiCommandBase from './api'
 import { AccountId } from '@polkadot/types/interfaces'
 import { AccountId } from '@polkadot/types/interfaces'
 import { Keyring } from '@polkadot/api'
 import { Keyring } from '@polkadot/api'
-import { KeyringInstance, KeyringOptions, KeyringPair } from '@polkadot/keyring/types'
+import { KeyringInstance, KeyringOptions, KeyringPair, KeyringPair$Json } from '@polkadot/keyring/types'
 import { CLIError } from '@oclif/errors'
 import { CLIError } from '@oclif/errors'
 import ExitCodes from './ExitCodes'
 import ExitCodes from './ExitCodes'
 import fs from 'fs'
 import fs from 'fs'
@@ -30,7 +30,7 @@ export default abstract class AccountsCommandBase extends ApiCommandBase {
         exit: ExitCodes.InvalidFile,
         exit: ExitCodes.InvalidFile,
       })
       })
     }
     }
-    let accountJsonObj: any
+    let accountJsonObj: unknown
     try {
     try {
       accountJsonObj = require(jsonBackupFilePath)
       accountJsonObj = require(jsonBackupFilePath)
     } catch (e) {
     } catch (e) {
@@ -48,8 +48,8 @@ export default abstract class AccountsCommandBase extends ApiCommandBase {
     let account: KeyringPair
     let account: KeyringPair
     try {
     try {
       // Try adding and retrieving the keys in order to validate that the backup file is correct
       // Try adding and retrieving the keys in order to validate that the backup file is correct
-      keyring.addFromJson(accountJsonObj)
-      account = keyring.getPair(accountJsonObj.address)
+      keyring.addFromJson(accountJsonObj as KeyringPair$Json)
+      account = keyring.getPair((accountJsonObj as KeyringPair$Json).address)
     } catch (e) {
     } catch (e) {
       throw new CLIError(`Keypair backup json file is is not valid: ${jsonBackupFilePath}`, {
       throw new CLIError(`Keypair backup json file is is not valid: ${jsonBackupFilePath}`, {
         exit: ExitCodes.InvalidFile,
         exit: ExitCodes.InvalidFile,

+ 5 - 5
distributor-node/src/command-base/default.ts

@@ -22,8 +22,8 @@ export const flags = {
   }),
   }),
   bagId: oclifFlags.build({
   bagId: oclifFlags.build({
     parse: (value: string) => {
     parse: (value: string) => {
-      const parser = new BagIdParserService()
-      return parser.parseBagId(value)
+      const parser = new BagIdParserService(value)
+      return parser.parse()
     },
     },
     description: `Bag ID. Format: {bag_type}:{sub_type}:{id}.
     description: `Bag ID. Format: {bag_type}:{sub_type}:{id}.
     - Bag types: 'static', 'dynamic'
     - Bag types: 'static', 'dynamic'
@@ -62,7 +62,7 @@ export default abstract class DefaultCommandBase extends Command {
   async init(): Promise<void> {
   async init(): Promise<void> {
     const { configPath, yes } = this.parse(this.constructor as typeof DefaultCommandBase).flags
     const { configPath, yes } = this.parse(this.constructor as typeof DefaultCommandBase).flags
     const configParser = new ConfigParserService()
     const configParser = new ConfigParserService()
-    this.appConfig = configParser.loadConfing(configPath) as ReadonlyConfig
+    this.appConfig = configParser.loadConfig(configPath) as ReadonlyConfig
     this.logging = LoggingService.withCLIConfig()
     this.logging = LoggingService.withCLIConfig()
     this.logger = this.logging.createLogger('CLI')
     this.logger = this.logging.createLogger('CLI')
     this.autoConfirm = !!(process.env.AUTO_CONFIRM === 'true' || parseInt(process.env.AUTO_CONFIRM || '') || yes)
     this.autoConfirm = !!(process.env.AUTO_CONFIRM === 'true' || parseInt(process.env.AUTO_CONFIRM || '') || yes)
@@ -89,11 +89,11 @@ export default abstract class DefaultCommandBase extends Command {
     }
     }
   }
   }
 
 
-  async finally(err: any): Promise<void> {
+  async finally(err: unknown): Promise<void> {
     if (!err) this.exit(ExitCodes.OK)
     if (!err) this.exit(ExitCodes.OK)
     if (process.env.DEBUG === 'true') {
     if (process.env.DEBUG === 'true') {
       console.error(err)
       console.error(err)
     }
     }
-    super.finally(err)
+    super.finally(err as Error)
   }
   }
 }
 }

+ 5 - 6
distributor-node/src/commands/dev/batchUpload.ts

@@ -1,18 +1,17 @@
 import AccountsCommandBase from '../../command-base/accounts'
 import AccountsCommandBase from '../../command-base/accounts'
 import DefaultCommandBase, { flags } from '../../command-base/default'
 import DefaultCommandBase, { flags } from '../../command-base/default'
-import { hash } from 'blake3'
 import { FilesApi, Configuration, TokenRequest } from '../../services/networking/storage-node/generated'
 import { FilesApi, Configuration, TokenRequest } from '../../services/networking/storage-node/generated'
 import { u8aToHex } from '@polkadot/util'
 import { u8aToHex } from '@polkadot/util'
-import * as multihash from 'multihashes'
 import FormData from 'form-data'
 import FormData from 'form-data'
 import imgGen from 'js-image-generator'
 import imgGen from 'js-image-generator'
 import { SubmittableExtrinsic } from '@polkadot/api/types'
 import { SubmittableExtrinsic } from '@polkadot/api/types'
 import { BagIdParserService } from '../../services/parsers/BagIdParserService'
 import { BagIdParserService } from '../../services/parsers/BagIdParserService'
 import axios from 'axios'
 import axios from 'axios'
+import { ContentHash } from '../../services/crypto/ContentHash'
 
 
 async function generateRandomImage(): Promise<Buffer> {
 async function generateRandomImage(): Promise<Buffer> {
   return new Promise((resolve, reject) => {
   return new Promise((resolve, reject) => {
-    imgGen.generateImage(10, 10, 80, function (err: any, image: any) {
+    imgGen.generateImage(10, 10, 80, function (err: unknown, image: { data: Buffer }) {
       if (err) {
       if (err) {
         reject(err)
         reject(err)
       } else {
       } else {
@@ -61,7 +60,7 @@ export default class DevBatchUpload extends AccountsCommandBase {
       const batch: [SubmittableExtrinsic<'promise'>, Buffer][] = []
       const batch: [SubmittableExtrinsic<'promise'>, Buffer][] = []
       for (let j = 0; j < batchSize; ++j) {
       for (let j = 0; j < batchSize; ++j) {
         const dataObject = await generateRandomImage()
         const dataObject = await generateRandomImage()
-        const dataHash = multihash.toB58String(multihash.encode(hash(dataObject) as Buffer, 'blake3'))
+        const dataHash = new ContentHash().update(dataObject).digest()
         batch.push([
         batch.push([
           api.tx.sudo.sudo(
           api.tx.sudo.sudo(
             api.tx.storage.sudoUploadDataObjects({
             api.tx.storage.sudoUploadDataObjects({
@@ -73,7 +72,7 @@ export default class DevBatchUpload extends AccountsCommandBase {
                 },
                 },
               ],
               ],
               expectedDataSizeFee: dataFee,
               expectedDataSizeFee: dataFee,
-              bagId: new BagIdParserService().parseBagId(bagId),
+              bagId: new BagIdParserService(bagId).parse(),
             })
             })
           ),
           ),
           dataObject,
           dataObject,
@@ -102,7 +101,7 @@ export default class DevBatchUpload extends AccountsCommandBase {
             signature,
             signature,
           })
           })
           if (!token) {
           if (!token) {
-            throw new Error('Recieved empty token!')
+            throw new Error('Received empty token!')
           }
           }
 
 
           const formData = new FormData()
           const formData = new FormData()

+ 1 - 1
distributor-node/src/schemas/configSchema.ts

@@ -144,7 +144,7 @@ export const configSchema: JSONSchema4 = {
         },
         },
       },
       },
     },
     },
-    port: { description: 'Distributor node http server port', type: 'integer', minimum: 0 },
+    port: { description: 'Distributor node http api port', type: 'integer', minimum: 0 },
     keys: {
     keys: {
       description: 'Specifies the keys available within distributor node CLI.',
       description: 'Specifies the keys available within distributor node CLI.',
       type: 'array',
       type: 'array',

+ 2 - 11
distributor-node/src/services/cache/StateCacheService.ts

@@ -1,23 +1,14 @@
 import { Logger } from 'winston'
 import { Logger } from 'winston'
-import { ReadonlyConfig, StorageNodeDownloadResponse } from '../../types'
+import { PendingDownloadData, PendingDownloadStatus, ReadonlyConfig, StorageNodeDownloadResponse } from '../../types'
 import { LoggingService } from '../logging'
 import { LoggingService } from '../logging'
 import _ from 'lodash'
 import _ from 'lodash'
 import fs from 'fs'
 import fs from 'fs'
 
 
 // LRU-SP cache parameters
 // LRU-SP cache parameters
 // Since size is in KB, these parameters should be enough for grouping objects of size up to 2^24 KB = 16 GB
 // Since size is in KB, these parameters should be enough for grouping objects of size up to 2^24 KB = 16 GB
-// TODO: Intoduce MAX_CACHED_ITEM_SIZE and skip caching for large objects entirely? (ie. 10 GB objects)
 export const CACHE_GROUP_LOG_BASE = 2
 export const CACHE_GROUP_LOG_BASE = 2
 export const CACHE_GROUPS_COUNT = 24
 export const CACHE_GROUPS_COUNT = 24
 
 
-type PendingDownloadStatus = 'Waiting' | 'LookingForSource' | 'Downloading'
-
-export interface PendingDownloadData {
-  objectSize: number
-  status: PendingDownloadStatus
-  promise: Promise<StorageNodeDownloadResponse>
-}
-
 export interface StorageNodeEndpointData {
 export interface StorageNodeEndpointData {
   last10ResponseTimes: number[]
   last10ResponseTimes: number[]
 }
 }
@@ -155,7 +146,7 @@ export class StateCacheService {
     promise: Promise<StorageNodeDownloadResponse>
     promise: Promise<StorageNodeDownloadResponse>
   ): PendingDownloadData {
   ): PendingDownloadData {
     const pendingDownload: PendingDownloadData = {
     const pendingDownload: PendingDownloadData = {
-      status: 'Waiting',
+      status: PendingDownloadStatus.Waiting,
       objectSize,
       objectSize,
       promise,
       promise,
     }
     }

+ 57 - 14
distributor-node/src/services/content/ContentService.ts

@@ -1,5 +1,5 @@
 import fs from 'fs'
 import fs from 'fs'
-import { ReadonlyConfig } from '../../types'
+import { ObjectStatus, ObjectStatusType, ReadonlyConfig } from '../../types'
 import { StateCacheService } from '../cache/StateCacheService'
 import { StateCacheService } from '../cache/StateCacheService'
 import { LoggingService } from '../logging'
 import { LoggingService } from '../logging'
 import { Logger } from 'winston'
 import { Logger } from 'winston'
@@ -7,10 +7,11 @@ import { FileContinousReadStream, FileContinousReadStreamOptions } from './FileC
 import FileType from 'file-type'
 import FileType from 'file-type'
 import { Readable, pipeline } from 'stream'
 import { Readable, pipeline } from 'stream'
 import { NetworkingService } from '../networking'
 import { NetworkingService } from '../networking'
-import { createHash } from 'blake3'
-import * as multihash from 'multihashes'
+import { ContentHash } from '../crypto/ContentHash'
+import readChunk from 'read-chunk'
 
 
 export const DEFAULT_CONTENT_TYPE = 'application/octet-stream'
 export const DEFAULT_CONTENT_TYPE = 'application/octet-stream'
+export const MIME_TYPE_DETECTION_CHUNK_SIZE = 4100
 
 
 export class ContentService {
 export class ContentService {
   private config: ReadonlyConfig
   private config: ReadonlyConfig
@@ -159,8 +160,19 @@ export class ContentService {
   }
   }
 
 
   public async detectMimeType(objectId: string): Promise<string> {
   public async detectMimeType(objectId: string): Promise<string> {
-    const result = await FileType.fromFile(this.path(objectId))
-    return result?.mime || DEFAULT_CONTENT_TYPE
+    const objectPath = this.path(objectId)
+    try {
+      const buffer = await readChunk(objectPath, 0, MIME_TYPE_DETECTION_CHUNK_SIZE)
+      const result = await FileType.fromBuffer(buffer)
+      return result?.mime || DEFAULT_CONTENT_TYPE
+    } catch (err) {
+      this.logger.error(`Error while trying to detect object mimeType: ${err instanceof Error ? err.message : err}`, {
+        err,
+        objectId,
+        objectPath,
+      })
+      return DEFAULT_CONTENT_TYPE
+    }
   }
   }
 
 
   private async evictCacheUntilFreeSpaceReached(targetFreeSpace: number): Promise<void> {
   private async evictCacheUntilFreeSpaceReached(targetFreeSpace: number): Promise<void> {
@@ -207,17 +219,17 @@ export class ContentService {
     return new Promise<void>((resolve, reject) => {
     return new Promise<void>((resolve, reject) => {
       const fileStream = this.createWriteStream(objectId)
       const fileStream = this.createWriteStream(objectId)
 
 
-      let bytesRecieved = 0
-      const hash = createHash()
+      let bytesReceived = 0
+      const hash = new ContentHash()
 
 
       pipeline(dataStream, fileStream, async (err) => {
       pipeline(dataStream, fileStream, async (err) => {
         const { bytesWritten } = fileStream
         const { bytesWritten } = fileStream
-        const finalHash = multihash.toB58String(multihash.encode(hash.digest(), 'blake3'))
+        const finalHash = hash.digest()
         const logMetadata = {
         const logMetadata = {
           objectId,
           objectId,
           expectedSize,
           expectedSize,
           expectedHash,
           expectedHash,
-          bytesRecieved,
+          bytesReceived,
           bytesWritten,
           bytesWritten,
         }
         }
         if (err) {
         if (err) {
@@ -230,8 +242,8 @@ export class ContentService {
           return
           return
         }
         }
 
 
-        if (bytesWritten !== bytesRecieved || bytesWritten !== expectedSize) {
-          this.logger.error('Content rejected: Bytes written/recieved/expected mismatch!', {
+        if (bytesWritten !== bytesReceived || bytesWritten !== expectedSize) {
+          this.logger.error('Content rejected: Bytes written/received/expected mismatch!', {
             ...logMetadata,
             ...logMetadata,
           })
           })
           this.drop(objectId)
           this.drop(objectId)
@@ -257,13 +269,44 @@ export class ContentService {
       })
       })
 
 
       dataStream.on('data', (chunk) => {
       dataStream.on('data', (chunk) => {
-        bytesRecieved += chunk.length
+        if (dataStream.destroyed) {
+          return
+        }
+        bytesReceived += chunk.length
         hash.update(chunk)
         hash.update(chunk)
 
 
-        if (bytesRecieved > expectedSize) {
-          dataStream.destroy(new Error('Unexpected content size: Too much data recieved from source!'))
+        if (bytesReceived > expectedSize) {
+          dataStream.destroy(new Error('Unexpected content size: Too much data received from source!'))
         }
         }
       })
       })
     })
     })
   }
   }
+
+  public async objectStatus(objectId: string): Promise<ObjectStatus> {
+    const pendingDownload = this.stateCache.getPendingDownload(objectId)
+
+    if (!pendingDownload && this.exists(objectId)) {
+      return { type: ObjectStatusType.Available, path: this.path(objectId) }
+    }
+
+    if (pendingDownload) {
+      return { type: ObjectStatusType.PendingDownload, pendingDownloadData: pendingDownload }
+    }
+
+    const objectInfo = await this.networking.dataObjectInfo(objectId)
+    if (!objectInfo.exists) {
+      return { type: ObjectStatusType.NotFound }
+    }
+
+    if (!objectInfo.isSupported) {
+      return { type: ObjectStatusType.NotSupported }
+    }
+
+    const { data: objectData } = objectInfo
+    if (!objectData) {
+      throw new Error('Missing data object data')
+    }
+
+    return { type: ObjectStatusType.Missing, objectData }
+  }
 }
 }

+ 21 - 0
distributor-node/src/services/crypto/ContentHash.ts

@@ -0,0 +1,21 @@
+import { createHash, HashInput, NodeHash } from 'blake3'
+import { HashReader } from 'blake3/dist/wasm/nodejs'
+import { toB58String, encode } from 'multihashes'
+
+export class ContentHash {
+  private hash: NodeHash<HashReader>
+  public static readonly algorithm = 'blake3'
+
+  constructor() {
+    this.hash = createHash()
+  }
+
+  update(data: HashInput): this {
+    this.hash.update(data)
+    return this
+  }
+
+  digest(): string {
+    return toB58String(encode(this.hash.digest(), ContentHash.algorithm))
+  }
+}

+ 2 - 2
distributor-node/src/services/server/ServerService.ts → distributor-node/src/services/httpApi/HttpApiService.ts

@@ -5,7 +5,7 @@ import * as OpenApiValidator from 'express-openapi-validator'
 import { HttpError } from 'express-openapi-validator/dist/framework/types'
 import { HttpError } from 'express-openapi-validator/dist/framework/types'
 import { ReadonlyConfig } from '../../types/config'
 import { ReadonlyConfig } from '../../types/config'
 import expressWinston from 'express-winston'
 import expressWinston from 'express-winston'
-import { LoggingService } from '../../services/logging'
+import { LoggingService } from '../logging'
 import { PublicApiController } from './controllers/public'
 import { PublicApiController } from './controllers/public'
 import { StateCacheService } from '../cache/StateCacheService'
 import { StateCacheService } from '../cache/StateCacheService'
 import { NetworkingService } from '../networking'
 import { NetworkingService } from '../networking'
@@ -15,7 +15,7 @@ import { Server } from 'http'
 
 
 const OPENAPI_SPEC_PATH = path.join(__dirname, '../../api-spec/openapi.yml')
 const OPENAPI_SPEC_PATH = path.join(__dirname, '../../api-spec/openapi.yml')
 
 
-export class ServerService {
+export class HttpApiService {
   private config: ReadonlyConfig
   private config: ReadonlyConfig
   private logger: Logger
   private logger: Logger
   private expressApp: express.Application
   private expressApp: express.Application

+ 67 - 65
distributor-node/src/services/server/controllers/public.ts → distributor-node/src/services/httpApi/controllers/public.ts

@@ -1,13 +1,13 @@
 import * as express from 'express'
 import * as express from 'express'
 import { Logger } from 'winston'
 import { Logger } from 'winston'
 import send from 'send'
 import send from 'send'
-import { StateCacheService } from '../../../services/cache/StateCacheService'
-import { NetworkingService } from '../../../services/networking'
+import { StateCacheService } from '../../cache/StateCacheService'
+import { NetworkingService } from '../../networking'
 import { AssetRouteParams, BucketsResponse, ErrorResponse, StatusResponse } from '../../../types/api'
 import { AssetRouteParams, BucketsResponse, ErrorResponse, StatusResponse } from '../../../types/api'
 import { LoggingService } from '../../logging'
 import { LoggingService } from '../../logging'
 import { ContentService, DEFAULT_CONTENT_TYPE } from '../../content/ContentService'
 import { ContentService, DEFAULT_CONTENT_TYPE } from '../../content/ContentService'
 import proxy from 'express-http-proxy'
 import proxy from 'express-http-proxy'
-import { ReadonlyConfig } from '../../../types'
+import { DataObjectData, ObjectStatusType, ReadonlyConfig } from '../../../types'
 
 
 const CACHED_MAX_AGE = 31536000
 const CACHED_MAX_AGE = 31536000
 const PENDING_MAX_AGE = 180
 const PENDING_MAX_AGE = 180
@@ -33,6 +33,30 @@ export class PublicApiController {
     this.content = content
     this.content = content
   }
   }
 
 
+  private createErrorResponse(message: string, type?: string): ErrorResponse {
+    return { type, message }
+  }
+
+  private async serveMissingAsset(
+    req: express.Request<AssetRouteParams>,
+    res: express.Response,
+    next: express.NextFunction,
+    objectData: DataObjectData
+  ): Promise<void> {
+    const { objectId, size, contentHash } = objectData
+
+    const downloadResponse = await this.networking.downloadDataObject({ objectData })
+
+    if (downloadResponse) {
+      // Note: Await will only wait unil the file is created, so we may serve the response from it
+      await this.content.handleNewContent(objectId, size, contentHash, downloadResponse.data)
+      res.setHeader('x-cache', 'miss')
+    } else {
+      res.setHeader('x-cache', 'pending')
+    }
+    return this.servePendingDownloadAsset(req, res, next, objectId)
+  }
+
   private serveAssetFromFilesystem(
   private serveAssetFromFilesystem(
     req: express.Request<AssetRouteParams>,
     req: express.Request<AssetRouteParams>,
     res: express.Response,
     res: express.Response,
@@ -85,7 +109,7 @@ export class PublicApiController {
 
 
     const { promise, objectSize } = pendingDownload
     const { promise, objectSize } = pendingDownload
     const response = await promise
     const response = await promise
-    const source = new URL(response.config.url!)
+    const source = new URL(response.config.url || '')
     const contentType = response.headers['content-type'] || DEFAULT_CONTENT_TYPE
     const contentType = response.headers['content-type'] || DEFAULT_CONTENT_TYPE
     res.setHeader('content-type', contentType)
     res.setHeader('content-type', contentType)
     // Allow caching pendingDownload reponse only for very short period of time and requite revalidation,
     // Allow caching pendingDownload reponse only for very short period of time and requite revalidation,
@@ -139,36 +163,39 @@ export class PublicApiController {
   }
   }
 
 
   public async assetHead(req: express.Request<AssetRouteParams>, res: express.Response): Promise<void> {
   public async assetHead(req: express.Request<AssetRouteParams>, res: express.Response): Promise<void> {
-    const objectId = req.params.objectId
-    const pendingDownload = this.stateCache.getPendingDownload(objectId)
+    const { objectId } = req.params
+    const objectStatus = await this.content.objectStatus(objectId)
 
 
     res.setHeader('timing-allow-origin', '*')
     res.setHeader('timing-allow-origin', '*')
     res.setHeader('accept-ranges', 'bytes')
     res.setHeader('accept-ranges', 'bytes')
     res.setHeader('content-disposition', 'inline')
     res.setHeader('content-disposition', 'inline')
 
 
-    if (!pendingDownload && this.content.exists(objectId)) {
-      res.status(200)
-      res.setHeader('x-cache', 'hit')
-      res.setHeader('cache-control', `max-age=${CACHED_MAX_AGE}`)
-      res.setHeader('content-type', this.stateCache.getContentMimeType(objectId) || DEFAULT_CONTENT_TYPE)
-      res.setHeader('content-length', this.content.fileSize(objectId))
-    } else if (pendingDownload) {
-      res.status(200)
-      res.setHeader('x-cache', 'pending')
-      res.setHeader('cache-control', `max-age=${PENDING_MAX_AGE}, must-revalidate`)
-      res.setHeader('content-length', pendingDownload.objectSize)
-    } else {
-      const objectInfo = await this.networking.dataObjectInfo(objectId)
-      if (!objectInfo.exists) {
+    switch (objectStatus.type) {
+      case ObjectStatusType.Available:
+        res.status(200)
+        res.setHeader('x-cache', 'hit')
+        res.setHeader('cache-control', `max-age=${CACHED_MAX_AGE}`)
+        res.setHeader('content-type', this.stateCache.getContentMimeType(objectId) || DEFAULT_CONTENT_TYPE)
+        res.setHeader('content-length', this.content.fileSize(objectId))
+        break
+      case ObjectStatusType.PendingDownload:
+        res.status(200)
+        res.setHeader('x-cache', 'pending')
+        res.setHeader('cache-control', `max-age=${PENDING_MAX_AGE}, must-revalidate`)
+        res.setHeader('content-length', objectStatus.pendingDownloadData.objectSize)
+        break
+      case ObjectStatusType.NotFound:
         res.status(404)
         res.status(404)
-      } else if (!objectInfo.isSupported) {
+        break
+      case ObjectStatusType.NotSupported:
         res.status(421)
         res.status(421)
-      } else {
+        break
+      case ObjectStatusType.Missing:
         res.status(200)
         res.status(200)
         res.setHeader('x-cache', 'miss')
         res.setHeader('x-cache', 'miss')
         res.setHeader('cache-control', `max-age=${PENDING_MAX_AGE}, must-revalidate`)
         res.setHeader('cache-control', `max-age=${PENDING_MAX_AGE}, must-revalidate`)
-        res.setHeader('content-length', objectInfo.data?.size || 0)
-      }
+        res.setHeader('content-length', objectStatus.objectData.size)
+        break
     }
     }
 
 
     res.send()
     res.send()
@@ -179,55 +206,30 @@ export class PublicApiController {
     res: express.Response,
     res: express.Response,
     next: express.NextFunction
     next: express.NextFunction
   ): Promise<void> {
   ): Promise<void> {
-    const objectId = req.params.objectId
-    const pendingDownload = this.stateCache.getPendingDownload(objectId)
+    const { objectId } = req.params
+    const objectStatus = await this.content.objectStatus(objectId)
 
 
     this.logger.verbose('Data object requested', {
     this.logger.verbose('Data object requested', {
       objectId,
       objectId,
-      status: pendingDownload && pendingDownload.status,
+      objectStatus,
     })
     })
 
 
     res.setHeader('timing-allow-origin', '*')
     res.setHeader('timing-allow-origin', '*')
 
 
-    if (!pendingDownload && this.content.exists(objectId)) {
-      this.logger.verbose('Requested file found in filesystem', { path: this.content.path(objectId) })
-      return this.serveAssetFromFilesystem(req, res, next, objectId)
-    } else if (pendingDownload) {
-      this.logger.verbose('Requested file is in pending download state', { path: this.content.path(objectId) })
-      res.setHeader('x-cache', 'pending')
-      return this.servePendingDownloadAsset(req, res, next, objectId)
-    } else {
-      this.logger.verbose('Requested file not found in filesystem')
-      const objectInfo = await this.networking.dataObjectInfo(objectId)
-      if (!objectInfo.exists) {
-        const errorRes: ErrorResponse = {
-          message: 'Data object does not exist',
-        }
-        res.status(404).json(errorRes)
-      } else if (!objectInfo.isSupported) {
-        const errorRes: ErrorResponse = {
-          message: 'Data object not served by this node',
-        }
-        res.status(421).json(errorRes)
-        // TODO: Try to direct to a node that supports it?
-      } else {
-        const { data: objectData } = objectInfo
-        if (!objectData) {
-          throw new Error('Missing data object data')
-        }
-        const { size, contentHash } = objectData
-
-        const downloadResponse = await this.networking.downloadDataObject({ objectData })
-
-        if (downloadResponse) {
-          // Note: Await will only wait unil the file is created, so we may serve the response from it
-          await this.content.handleNewContent(objectId, size, contentHash, downloadResponse.data)
-          res.setHeader('x-cache', 'miss')
-        } else {
-          res.setHeader('x-cache', 'pending')
-        }
+    switch (objectStatus.type) {
+      case ObjectStatusType.Available:
+        return this.serveAssetFromFilesystem(req, res, next, objectId)
+      case ObjectStatusType.PendingDownload:
+        res.setHeader('x-cache', 'pending')
         return this.servePendingDownloadAsset(req, res, next, objectId)
         return this.servePendingDownloadAsset(req, res, next, objectId)
-      }
+      case ObjectStatusType.NotFound:
+        res.status(404).json(this.createErrorResponse('Data object does not exist'))
+        return
+      case ObjectStatusType.NotSupported:
+        res.status(421).json(this.createErrorResponse('Data object not served by this node'))
+        return
+      case ObjectStatusType.Missing:
+        return this.serveMissingAsset(req, res, next, objectStatus.objectData)
     }
     }
   }
   }
 
 

+ 1 - 0
distributor-node/src/services/logging/LoggingService.ts

@@ -66,6 +66,7 @@ export class LoggingService {
         throw new Error('config.endpoints.elasticSearch must be provided when elasticSeach logging is enabled!')
         throw new Error('config.endpoints.elasticSearch must be provided when elasticSeach logging is enabled!')
       }
       }
       esTransport = new ElasticsearchTransport({
       esTransport = new ElasticsearchTransport({
+        index: 'distributor-node',
         level: config.log.elastic,
         level: config.log.elastic,
         format: winston.format.combine(pauseFormat({ id: 'es' }), escFormat()),
         format: winston.format.combine(pauseFormat({ id: 'es' }), escFormat()),
         flushInterval: 5000,
         flushInterval: 5000,

+ 46 - 37
distributor-node/src/services/networking/NetworkingService.ts

@@ -3,7 +3,7 @@ import { QueryNodeApi } from './query-node/api'
 import { Logger } from 'winston'
 import { Logger } from 'winston'
 import { LoggingService } from '../logging'
 import { LoggingService } from '../logging'
 import { StorageNodeApi } from './storage-node/api'
 import { StorageNodeApi } from './storage-node/api'
-import { PendingDownloadData, StateCacheService } from '../cache/StateCacheService'
+import { StateCacheService } from '../cache/StateCacheService'
 import { DataObjectDetailsFragment } from './query-node/generated/queries'
 import { DataObjectDetailsFragment } from './query-node/generated/queries'
 import axios, { AxiosRequestConfig } from 'axios'
 import axios, { AxiosRequestConfig } from 'axios'
 import {
 import {
@@ -13,6 +13,8 @@ import {
   DataObjectInfo,
   DataObjectInfo,
   StorageNodeDownloadResponse,
   StorageNodeDownloadResponse,
   DownloadData,
   DownloadData,
+  PendingDownloadData,
+  PendingDownloadStatus,
 } from '../../types'
 } from '../../types'
 import queue from 'queue'
 import queue from 'queue'
 import { DistributionBucketOperatorStatus } from './query-node/generated/schema'
 import { DistributionBucketOperatorStatus } from './query-node/generated/schema'
@@ -27,7 +29,6 @@ export const MAX_CONCURRENT_RESPONSE_TIME_CHECKS = 10
 export class NetworkingService {
 export class NetworkingService {
   private config: ReadonlyConfig
   private config: ReadonlyConfig
   private queryNodeApi: QueryNodeApi
   private queryNodeApi: QueryNodeApi
-  // private runtimeApi: RuntimeApi
   private logging: LoggingService
   private logging: LoggingService
   private stateCache: StateCacheService
   private stateCache: StateCacheService
   private logger: Logger
   private logger: Logger
@@ -49,7 +50,6 @@ export class NetworkingService {
     this.stateCache = stateCache
     this.stateCache = stateCache
     this.logger = logging.createLogger('NetworkingManager')
     this.logger = logging.createLogger('NetworkingManager')
     this.queryNodeApi = new QueryNodeApi(config.endpoints.queryNode, this.logging)
     this.queryNodeApi = new QueryNodeApi(config.endpoints.queryNode, this.logging)
-    // this.runtimeApi = new RuntimeApi(config.endpoints.substrateNode)
     void this.checkActiveStorageNodeEndpoints()
     void this.checkActiveStorageNodeEndpoints()
     // Queues
     // Queues
     this.testLatencyQueue = queue({ concurrency: MAX_CONCURRENT_RESPONSE_TIME_CHECKS, autostart: true }).on(
     this.testLatencyQueue = queue({ concurrency: MAX_CONCURRENT_RESPONSE_TIME_CHECKS, autostart: true }).on(
@@ -93,14 +93,10 @@ export class NetworkingService {
 
 
   private prepareStorageNodeEndpoints(details: DataObjectDetailsFragment) {
   private prepareStorageNodeEndpoints(details: DataObjectDetailsFragment) {
     const endpointsData = details.storageBag.storageAssignments
     const endpointsData = details.storageBag.storageAssignments
-      .filter(
-        (a) =>
-          a.storageBucket.operatorStatus.__typename === 'StorageBucketOperatorStatusActive' &&
-          a.storageBucket.operatorMetadata?.nodeEndpoint
-      )
+      .filter((a) => a.storageBucket.operatorStatus.__typename === 'StorageBucketOperatorStatusActive')
       .map((a) => {
       .map((a) => {
-        const rootEndpoint = a.storageBucket.operatorMetadata!.nodeEndpoint!
-        const apiEndpoint = this.getApiEndpoint(rootEndpoint)
+        const rootEndpoint = a.storageBucket.operatorMetadata?.nodeEndpoint
+        const apiEndpoint = rootEndpoint ? this.getApiEndpoint(rootEndpoint) : ''
         return {
         return {
           bucketId: a.storageBucket.id,
           bucketId: a.storageBucket.id,
           endpoint: apiEndpoint,
           endpoint: apiEndpoint,
@@ -116,32 +112,45 @@ export class NetworkingService {
     }
     }
   }
   }
 
 
+  private getDataObjectActiveDistributorsSet(objectDetails: DataObjectDetailsFragment): Set<number> {
+    const activeDistributorsSet = new Set<number>()
+    const { distirbutionAssignments } = objectDetails.storageBag
+    const distributionBuckets = distirbutionAssignments.map((a) => a.distributionBucket)
+    for (const bucket of distributionBuckets) {
+      for (const operator of bucket.operators) {
+        if (operator.status === DistributionBucketOperatorStatus.Active) {
+          activeDistributorsSet.add(operator.workerId)
+        }
+      }
+    }
+    return activeDistributorsSet
+  }
+
   public async dataObjectInfo(objectId: string): Promise<DataObjectInfo> {
   public async dataObjectInfo(objectId: string): Promise<DataObjectInfo> {
     const details = await this.queryNodeApi.getDataObjectDetails(objectId)
     const details = await this.queryNodeApi.getDataObjectDetails(objectId)
-    return {
-      exists: !!details,
-      isSupported:
-        (this.config.buckets === 'all' &&
-          details?.storageBag.distirbutionAssignments.some((d) =>
-            d.distributionBucket.operators.some(
-              (o) => o.workerId === this.config.workerId && o.status === DistributionBucketOperatorStatus.Active
-            )
-          )) ||
-        (Array.isArray(this.config.buckets) &&
-          this.config.buckets.some((bucketId) =>
-            details?.storageBag.distirbutionAssignments
-              .map((a) => a.distributionBucket.id)
-              .includes(bucketId.toString())
-          )),
-      data: details
-        ? {
-            objectId,
-            accessPoints: this.parseDataObjectAccessPoints(details),
-            contentHash: details.ipfsHash,
-            size: parseInt(details.size),
-          }
-        : undefined,
+    let exists = false
+    let isSupported = false
+    let data: DataObjectData | undefined
+    if (details) {
+      exists = true
+      if (this.config.buckets === 'all') {
+        const distributors = this.getDataObjectActiveDistributorsSet(details)
+        isSupported = distributors.has(this.config.workerId)
+      } else {
+        const supportedBucketIds = this.config.buckets.map((id) => id.toString())
+        isSupported = details.storageBag.distirbutionAssignments.some((a) =>
+          supportedBucketIds.includes(a.distributionBucket.id)
+        )
+      }
+      data = {
+        objectId,
+        accessPoints: this.parseDataObjectAccessPoints(details),
+        contentHash: details.ipfsHash,
+        size: parseInt(details.size),
+      }
     }
     }
+
+    return { exists, isSupported, data }
   }
   }
 
 
   private sortEndpointsByMeanResponseTime(endpoints: string[]) {
   private sortEndpointsByMeanResponseTime(endpoints: string[]) {
@@ -164,7 +173,7 @@ export class NetworkingService {
       startAt,
       startAt,
     } = downloadData
     } = downloadData
 
 
-    pendingDownload.status = 'LookingForSource'
+    pendingDownload.status = PendingDownloadStatus.LookingForSource
 
 
     return new Promise<void>((resolve, reject) => {
     return new Promise<void>((resolve, reject) => {
       // Handlers:
       // Handlers:
@@ -176,7 +185,7 @@ export class NetworkingService {
 
 
       const sourceFound = (response: StorageNodeDownloadResponse) => {
       const sourceFound = (response: StorageNodeDownloadResponse) => {
         this.logger.info('Download source chosen', { objectId, source: response.config.url })
         this.logger.info('Download source chosen', { objectId, source: response.config.url })
-        pendingDownload.status = 'Downloading'
+        pendingDownload.status = PendingDownloadStatus.Downloading
         onSourceFound(response)
         onSourceFound(response)
       }
       }
 
 
@@ -211,7 +220,7 @@ export class NetworkingService {
           const api = new StorageNodeApi(endpoint, this.logging)
           const api = new StorageNodeApi(endpoint, this.logging)
           const available = await api.isObjectAvailable(objectId)
           const available = await api.isObjectAvailable(objectId)
           if (!available) {
           if (!available) {
-            throw new Error('Not avilable')
+            throw new Error('Not available')
           }
           }
           return endpoint
           return endpoint
         })
         })
@@ -308,7 +317,7 @@ export class NetworkingService {
       const endpoints = this.filterStorageNodeEndpoints(
       const endpoints = this.filterStorageNodeEndpoints(
         activeStorageOperators.map(({ id, operatorMetadata }) => ({
         activeStorageOperators.map(({ id, operatorMetadata }) => ({
           bucketId: id,
           bucketId: id,
-          endpoint: this.getApiEndpoint(operatorMetadata!.nodeEndpoint!),
+          endpoint: operatorMetadata?.nodeEndpoint ? this.getApiEndpoint(operatorMetadata.nodeEndpoint) : '',
         }))
         }))
       )
       )
       this.logger.verbose('Checking nearby storage nodes...', { validEndpointsCount: endpoints.length })
       this.logger.verbose('Checking nearby storage nodes...', { validEndpointsCount: endpoints.length })

+ 50 - 3
distributor-node/src/services/networking/query-node/api.ts

@@ -22,6 +22,23 @@ import {
 } from './generated/queries'
 } from './generated/queries'
 import { Maybe } from './generated/schema'
 import { Maybe } from './generated/schema'
 
 
+const MAX_RESULTS_PER_QUERY = 1000
+
+type PaginationQueryVariables = {
+  limit: number
+  lastCursor?: Maybe<string>
+}
+
+type PaginationQueryResult<T = unknown> = {
+  edges: { node: T }[]
+  pageInfo: {
+    hasNextPage: boolean
+    endCursor?: Maybe<string>
+  }
+}
+
+type CustomVariables<T> = Omit<T, keyof PaginationQueryVariables>
+
 export class QueryNodeApi {
 export class QueryNodeApi {
   private apolloClient: ApolloClient<NormalizedCacheObject>
   private apolloClient: ApolloClient<NormalizedCacheObject>
   private logger: Logger
   private logger: Logger
@@ -68,6 +85,35 @@ export class QueryNodeApi {
     return (await this.apolloClient.query<QueryT, VariablesT>({ query, variables })).data[resultKey]
     return (await this.apolloClient.query<QueryT, VariablesT>({ query, variables })).data[resultKey]
   }
   }
 
 
+  protected async multipleEntitiesWithPagination<
+    NodeT,
+    QueryT extends { [k: string]: PaginationQueryResult<NodeT> },
+    CustomVariablesT extends Record<string, unknown>
+  >(
+    query: DocumentNode,
+    variables: CustomVariablesT,
+    resultKey: keyof QueryT,
+    itemsPerPage = MAX_RESULTS_PER_QUERY
+  ): Promise<NodeT[]> {
+    let hasNextPage = true
+    let results: NodeT[] = []
+    let lastCursor: string | undefined
+    while (hasNextPage) {
+      const paginationVariables = { limit: itemsPerPage, lastCursor }
+      const queryVariables = { ...variables, ...paginationVariables }
+      const page = (
+        await this.apolloClient.query<QueryT, PaginationQueryVariables & CustomVariablesT>({
+          query,
+          variables: queryVariables,
+        })
+      ).data[resultKey]
+      results = results.concat(page.edges.map((e) => e.node))
+      hasNextPage = page.pageInfo.hasNextPage
+      lastCursor = page.pageInfo.endCursor || undefined
+    }
+    return results
+  }
+
   public getDataObjectDetails(objectId: string): Promise<DataObjectDetailsFragment | null> {
   public getDataObjectDetails(objectId: string): Promise<DataObjectDetailsFragment | null> {
     return this.uniqueEntityQuery<GetDataObjectDetailsQuery, GetDataObjectDetailsQueryVariables>(
     return this.uniqueEntityQuery<GetDataObjectDetailsQuery, GetDataObjectDetailsQueryVariables>(
       GetDataObjectDetails,
       GetDataObjectDetails,
@@ -93,9 +139,10 @@ export class QueryNodeApi {
   }
   }
 
 
   public getActiveStorageBucketOperatorsData(): Promise<StorageBucketOperatorFieldsFragment[]> {
   public getActiveStorageBucketOperatorsData(): Promise<StorageBucketOperatorFieldsFragment[]> {
-    return this.multipleEntitiesQuery<
+    return this.multipleEntitiesWithPagination<
+      StorageBucketOperatorFieldsFragment,
       GetActiveStorageBucketOperatorsDataQuery,
       GetActiveStorageBucketOperatorsDataQuery,
-      GetActiveStorageBucketOperatorsDataQueryVariables
-    >(GetActiveStorageBucketOperatorsData, {}, 'storageBuckets')
+      CustomVariables<GetActiveStorageBucketOperatorsDataQueryVariables>
+    >(GetActiveStorageBucketOperatorsData, {}, 'storageBucketsConnection')
   }
   }
 }
 }

+ 60 - 20
distributor-node/src/services/networking/query-node/generated/queries.ts

@@ -1,7 +1,13 @@
 import * as Types from './schema';
 import * as Types from './schema';
 
 
 import gql from 'graphql-tag';
 import gql from 'graphql-tag';
-export type DataObjectDetailsFragment = { id: string, size: any, ipfsHash: string, isAccepted: boolean, storageBag: { storageAssignments: Array<{ storageBucket: { id: string, operatorMetadata?: Types.Maybe<{ nodeEndpoint?: Types.Maybe<string> }>, operatorStatus: { __typename: 'StorageBucketOperatorStatusMissing' } | { __typename: 'StorageBucketOperatorStatusInvited' } | { __typename: 'StorageBucketOperatorStatusActive' } } }>, distirbutionAssignments: Array<{ distributionBucket: { id: string, operators: Array<{ workerId: number, status: Types.DistributionBucketOperatorStatus }> } }> } };
+export type DistributionBucketOperatorDetailsFragment = { workerId: number, status: Types.DistributionBucketOperatorStatus };
+
+export type DistributionBucketDetailsFragment = { id: string, operators: Array<DistributionBucketOperatorDetailsFragment> };
+
+export type StorageBucketDetailsFragment = { id: string, operatorMetadata?: Types.Maybe<{ nodeEndpoint?: Types.Maybe<string> }>, operatorStatus: { __typename: 'StorageBucketOperatorStatusMissing' } | { __typename: 'StorageBucketOperatorStatusInvited' } | { __typename: 'StorageBucketOperatorStatusActive' } };
+
+export type DataObjectDetailsFragment = { id: string, size: any, ipfsHash: string, isAccepted: boolean, storageBag: { storageAssignments: Array<{ storageBucket: StorageBucketDetailsFragment }>, distirbutionAssignments: Array<{ distributionBucket: DistributionBucketDetailsFragment }> } };
 
 
 export type GetDataObjectDetailsQueryVariables = Types.Exact<{
 export type GetDataObjectDetailsQueryVariables = Types.Exact<{
   id: Types.Scalars['ID'];
   id: Types.Scalars['ID'];
@@ -28,11 +34,41 @@ export type GetDistributionBucketsWithObjectsByWorkerIdQuery = { distributionBuc
 
 
 export type StorageBucketOperatorFieldsFragment = { id: string, operatorMetadata?: Types.Maybe<{ nodeEndpoint?: Types.Maybe<string> }> };
 export type StorageBucketOperatorFieldsFragment = { id: string, operatorMetadata?: Types.Maybe<{ nodeEndpoint?: Types.Maybe<string> }> };
 
 
-export type GetActiveStorageBucketOperatorsDataQueryVariables = Types.Exact<{ [key: string]: never; }>;
+export type StorageBucketsConnectionFieldsFragment = { edges: Array<{ node: StorageBucketOperatorFieldsFragment }>, pageInfo: { hasNextPage: boolean, endCursor?: Types.Maybe<string> } };
+
+export type GetActiveStorageBucketOperatorsDataQueryVariables = Types.Exact<{
+  limit: Types.Scalars['Int'];
+  lastCursor?: Types.Maybe<Types.Scalars['String']>;
+}>;
 
 
 
 
-export type GetActiveStorageBucketOperatorsDataQuery = { storageBuckets: Array<StorageBucketOperatorFieldsFragment> };
+export type GetActiveStorageBucketOperatorsDataQuery = { storageBucketsConnection: StorageBucketsConnectionFieldsFragment };
 
 
+export const StorageBucketDetails = gql`
+    fragment StorageBucketDetails on StorageBucket {
+  id
+  operatorMetadata {
+    nodeEndpoint
+  }
+  operatorStatus {
+    __typename
+  }
+}
+    `;
+export const DistributionBucketOperatorDetails = gql`
+    fragment DistributionBucketOperatorDetails on DistributionBucketOperator {
+  workerId
+  status
+}
+    `;
+export const DistributionBucketDetails = gql`
+    fragment DistributionBucketDetails on DistributionBucket {
+  id
+  operators {
+    ...DistributionBucketOperatorDetails
+  }
+}
+    ${DistributionBucketOperatorDetails}`;
 export const DataObjectDetails = gql`
 export const DataObjectDetails = gql`
     fragment DataObjectDetails on StorageDataObject {
     fragment DataObjectDetails on StorageDataObject {
   id
   id
@@ -42,27 +78,18 @@ export const DataObjectDetails = gql`
   storageBag {
   storageBag {
     storageAssignments {
     storageAssignments {
       storageBucket {
       storageBucket {
-        id
-        operatorMetadata {
-          nodeEndpoint
-        }
-        operatorStatus {
-          __typename
-        }
+        ...StorageBucketDetails
       }
       }
     }
     }
     distirbutionAssignments {
     distirbutionAssignments {
       distributionBucket {
       distributionBucket {
-        id
-        operators {
-          workerId
-          status
-        }
+        ...DistributionBucketDetails
       }
       }
     }
     }
   }
   }
 }
 }
-    `;
+    ${StorageBucketDetails}
+${DistributionBucketDetails}`;
 export const DistirubtionBucketWithObjects = gql`
 export const DistirubtionBucketWithObjects = gql`
     fragment DistirubtionBucketWithObjects on DistributionBucket {
     fragment DistirubtionBucketWithObjects on DistributionBucket {
   id
   id
@@ -85,6 +112,19 @@ export const StorageBucketOperatorFields = gql`
   }
   }
 }
 }
     `;
     `;
+export const StorageBucketsConnectionFields = gql`
+    fragment StorageBucketsConnectionFields on StorageBucketConnection {
+  edges {
+    node {
+      ...StorageBucketOperatorFields
+    }
+  }
+  pageInfo {
+    hasNextPage
+    endCursor
+  }
+}
+    ${StorageBucketOperatorFields}`;
 export const GetDataObjectDetails = gql`
 export const GetDataObjectDetails = gql`
     query getDataObjectDetails($id: ID!) {
     query getDataObjectDetails($id: ID!) {
   storageDataObjectByUniqueInput(where: {id: $id}) {
   storageDataObjectByUniqueInput(where: {id: $id}) {
@@ -107,9 +147,9 @@ export const GetDistributionBucketsWithObjectsByWorkerId = gql`
 }
 }
     ${DistirubtionBucketWithObjects}`;
     ${DistirubtionBucketWithObjects}`;
 export const GetActiveStorageBucketOperatorsData = gql`
 export const GetActiveStorageBucketOperatorsData = gql`
-    query getActiveStorageBucketOperatorsData {
-  storageBuckets(where: {operatorStatus_json: {isTypeOf_eq: "StorageBucketOperatorStatusActive"}, operatorMetadata: {nodeEndpoint_contains: "http"}}, limit: 9999) {
-    ...StorageBucketOperatorFields
+    query getActiveStorageBucketOperatorsData($limit: Int!, $lastCursor: String) {
+  storageBucketsConnection(first: $limit, after: $lastCursor, where: {operatorStatus_json: {isTypeOf_eq: "StorageBucketOperatorStatusActive"}}) {
+    ...StorageBucketsConnectionFields
   }
   }
 }
 }
-    ${StorageBucketOperatorFields}`;
+    ${StorageBucketsConnectionFields}`;

+ 436 - 146
distributor-node/src/services/networking/query-node/generated/schema.ts

@@ -11,119 +11,10 @@ export type Scalars = {
   Float: number;
   Float: number;
   /** The javascript `Date` as string. Type represents date and time as the ISO Date string. */
   /** The javascript `Date` as string. Type represents date and time as the ISO Date string. */
   DateTime: any;
   DateTime: any;
-  /** The `JSONObject` scalar type represents JSON objects as specified by [ECMA-404](http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf). */
-  JSONObject: any;
   /** GraphQL representation of BigInt */
   /** GraphQL representation of BigInt */
   BigInt: any;
   BigInt: any;
-};
-
-export type Asset = AssetExternal | AssetJoystreamStorage | AssetNone;
-
-export type AssetExternal = {
-  /** JSON array of the urls */
-  urls: Scalars['String'];
-};
-
-export type AssetExternalCreateInput = {
-  urls: Scalars['String'];
-};
-
-export type AssetExternalUpdateInput = {
-  urls?: Maybe<Scalars['String']>;
-};
-
-export type AssetExternalWhereInput = {
-  id_eq?: Maybe<Scalars['ID']>;
-  id_in?: Maybe<Array<Scalars['ID']>>;
-  createdAt_eq?: Maybe<Scalars['DateTime']>;
-  createdAt_lt?: Maybe<Scalars['DateTime']>;
-  createdAt_lte?: Maybe<Scalars['DateTime']>;
-  createdAt_gt?: Maybe<Scalars['DateTime']>;
-  createdAt_gte?: Maybe<Scalars['DateTime']>;
-  createdById_eq?: Maybe<Scalars['ID']>;
-  createdById_in?: Maybe<Array<Scalars['ID']>>;
-  updatedAt_eq?: Maybe<Scalars['DateTime']>;
-  updatedAt_lt?: Maybe<Scalars['DateTime']>;
-  updatedAt_lte?: Maybe<Scalars['DateTime']>;
-  updatedAt_gt?: Maybe<Scalars['DateTime']>;
-  updatedAt_gte?: Maybe<Scalars['DateTime']>;
-  updatedById_eq?: Maybe<Scalars['ID']>;
-  updatedById_in?: Maybe<Array<Scalars['ID']>>;
-  deletedAt_all?: Maybe<Scalars['Boolean']>;
-  deletedAt_eq?: Maybe<Scalars['DateTime']>;
-  deletedAt_lt?: Maybe<Scalars['DateTime']>;
-  deletedAt_lte?: Maybe<Scalars['DateTime']>;
-  deletedAt_gt?: Maybe<Scalars['DateTime']>;
-  deletedAt_gte?: Maybe<Scalars['DateTime']>;
-  deletedById_eq?: Maybe<Scalars['ID']>;
-  deletedById_in?: Maybe<Array<Scalars['ID']>>;
-  urls_eq?: Maybe<Scalars['String']>;
-  urls_contains?: Maybe<Scalars['String']>;
-  urls_startsWith?: Maybe<Scalars['String']>;
-  urls_endsWith?: Maybe<Scalars['String']>;
-  urls_in?: Maybe<Array<Scalars['String']>>;
-  AND?: Maybe<Array<AssetExternalWhereInput>>;
-  OR?: Maybe<Array<AssetExternalWhereInput>>;
-};
-
-export type AssetExternalWhereUniqueInput = {
-  id: Scalars['ID'];
-};
-
-export type AssetJoystreamStorage = {
-  /** Related data object */
-  dataObject?: Maybe<StorageDataObject>;
-};
-
-export type AssetNone = {
-  phantom?: Maybe<Scalars['Int']>;
-};
-
-export type AssetNoneCreateInput = {
-  phantom?: Maybe<Scalars['Float']>;
-};
-
-export type AssetNoneUpdateInput = {
-  phantom?: Maybe<Scalars['Float']>;
-};
-
-export type AssetNoneWhereInput = {
-  id_eq?: Maybe<Scalars['ID']>;
-  id_in?: Maybe<Array<Scalars['ID']>>;
-  createdAt_eq?: Maybe<Scalars['DateTime']>;
-  createdAt_lt?: Maybe<Scalars['DateTime']>;
-  createdAt_lte?: Maybe<Scalars['DateTime']>;
-  createdAt_gt?: Maybe<Scalars['DateTime']>;
-  createdAt_gte?: Maybe<Scalars['DateTime']>;
-  createdById_eq?: Maybe<Scalars['ID']>;
-  createdById_in?: Maybe<Array<Scalars['ID']>>;
-  updatedAt_eq?: Maybe<Scalars['DateTime']>;
-  updatedAt_lt?: Maybe<Scalars['DateTime']>;
-  updatedAt_lte?: Maybe<Scalars['DateTime']>;
-  updatedAt_gt?: Maybe<Scalars['DateTime']>;
-  updatedAt_gte?: Maybe<Scalars['DateTime']>;
-  updatedById_eq?: Maybe<Scalars['ID']>;
-  updatedById_in?: Maybe<Array<Scalars['ID']>>;
-  deletedAt_all?: Maybe<Scalars['Boolean']>;
-  deletedAt_eq?: Maybe<Scalars['DateTime']>;
-  deletedAt_lt?: Maybe<Scalars['DateTime']>;
-  deletedAt_lte?: Maybe<Scalars['DateTime']>;
-  deletedAt_gt?: Maybe<Scalars['DateTime']>;
-  deletedAt_gte?: Maybe<Scalars['DateTime']>;
-  deletedById_eq?: Maybe<Scalars['ID']>;
-  deletedById_in?: Maybe<Array<Scalars['ID']>>;
-  phantom_eq?: Maybe<Scalars['Int']>;
-  phantom_gt?: Maybe<Scalars['Int']>;
-  phantom_gte?: Maybe<Scalars['Int']>;
-  phantom_lt?: Maybe<Scalars['Int']>;
-  phantom_lte?: Maybe<Scalars['Int']>;
-  phantom_in?: Maybe<Array<Scalars['Int']>>;
-  AND?: Maybe<Array<AssetNoneWhereInput>>;
-  OR?: Maybe<Array<AssetNoneWhereInput>>;
-};
-
-export type AssetNoneWhereUniqueInput = {
-  id: Scalars['ID'];
+  /** The `JSONObject` scalar type represents JSON objects as specified by [ECMA-404](http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf). */
+  JSONObject: any;
 };
 };
 
 
 export type BaseGraphQlObject = {
 export type BaseGraphQlObject = {
@@ -207,10 +98,10 @@ export type Channel = BaseGraphQlObject & {
   title?: Maybe<Scalars['String']>;
   title?: Maybe<Scalars['String']>;
   /** The description of a Channel */
   /** The description of a Channel */
   description?: Maybe<Scalars['String']>;
   description?: Maybe<Scalars['String']>;
-  /** Channel's cover (background) photo asset. Recommended ratio: 16:9. */
-  coverPhoto: Asset;
-  /** Channel's avatar photo asset. */
-  avatarPhoto: Asset;
+  coverPhoto?: Maybe<StorageDataObject>;
+  coverPhotoId?: Maybe<Scalars['String']>;
+  avatarPhoto?: Maybe<StorageDataObject>;
+  avatarPhotoId?: Maybe<Scalars['String']>;
   /** Flag signaling whether a channel is public. */
   /** Flag signaling whether a channel is public. */
   isPublic?: Maybe<Scalars['Boolean']>;
   isPublic?: Maybe<Scalars['Boolean']>;
   /** Flag signaling whether a channel is censored. */
   /** Flag signaling whether a channel is censored. */
@@ -341,8 +232,8 @@ export type ChannelCreateInput = {
   deletionPrizeDestAccount: Scalars['String'];
   deletionPrizeDestAccount: Scalars['String'];
   title?: Maybe<Scalars['String']>;
   title?: Maybe<Scalars['String']>;
   description?: Maybe<Scalars['String']>;
   description?: Maybe<Scalars['String']>;
-  coverPhoto: Scalars['JSONObject'];
-  avatarPhoto: Scalars['JSONObject'];
+  coverPhoto?: Maybe<Scalars['ID']>;
+  avatarPhoto?: Maybe<Scalars['ID']>;
   isPublic?: Maybe<Scalars['Boolean']>;
   isPublic?: Maybe<Scalars['Boolean']>;
   isCensored: Scalars['Boolean'];
   isCensored: Scalars['Boolean'];
   language?: Maybe<Scalars['ID']>;
   language?: Maybe<Scalars['ID']>;
@@ -375,6 +266,10 @@ export enum ChannelOrderByInput {
   TitleDesc = 'title_DESC',
   TitleDesc = 'title_DESC',
   DescriptionAsc = 'description_ASC',
   DescriptionAsc = 'description_ASC',
   DescriptionDesc = 'description_DESC',
   DescriptionDesc = 'description_DESC',
+  CoverPhotoAsc = 'coverPhoto_ASC',
+  CoverPhotoDesc = 'coverPhoto_DESC',
+  AvatarPhotoAsc = 'avatarPhoto_ASC',
+  AvatarPhotoDesc = 'avatarPhoto_DESC',
   IsPublicAsc = 'isPublic_ASC',
   IsPublicAsc = 'isPublic_ASC',
   IsPublicDesc = 'isPublic_DESC',
   IsPublicDesc = 'isPublic_DESC',
   IsCensoredAsc = 'isCensored_ASC',
   IsCensoredAsc = 'isCensored_ASC',
@@ -393,8 +288,8 @@ export type ChannelUpdateInput = {
   deletionPrizeDestAccount?: Maybe<Scalars['String']>;
   deletionPrizeDestAccount?: Maybe<Scalars['String']>;
   title?: Maybe<Scalars['String']>;
   title?: Maybe<Scalars['String']>;
   description?: Maybe<Scalars['String']>;
   description?: Maybe<Scalars['String']>;
-  coverPhoto?: Maybe<Scalars['JSONObject']>;
-  avatarPhoto?: Maybe<Scalars['JSONObject']>;
+  coverPhoto?: Maybe<Scalars['ID']>;
+  avatarPhoto?: Maybe<Scalars['ID']>;
   isPublic?: Maybe<Scalars['Boolean']>;
   isPublic?: Maybe<Scalars['Boolean']>;
   isCensored?: Maybe<Scalars['Boolean']>;
   isCensored?: Maybe<Scalars['Boolean']>;
   language?: Maybe<Scalars['ID']>;
   language?: Maybe<Scalars['ID']>;
@@ -452,8 +347,10 @@ export type ChannelWhereInput = {
   description_startsWith?: Maybe<Scalars['String']>;
   description_startsWith?: Maybe<Scalars['String']>;
   description_endsWith?: Maybe<Scalars['String']>;
   description_endsWith?: Maybe<Scalars['String']>;
   description_in?: Maybe<Array<Scalars['String']>>;
   description_in?: Maybe<Array<Scalars['String']>>;
-  coverPhoto_json?: Maybe<Scalars['JSONObject']>;
-  avatarPhoto_json?: Maybe<Scalars['JSONObject']>;
+  coverPhoto_eq?: Maybe<Scalars['ID']>;
+  coverPhoto_in?: Maybe<Array<Scalars['ID']>>;
+  avatarPhoto_eq?: Maybe<Scalars['ID']>;
+  avatarPhoto_in?: Maybe<Array<Scalars['ID']>>;
   isPublic_eq?: Maybe<Scalars['Boolean']>;
   isPublic_eq?: Maybe<Scalars['Boolean']>;
   isPublic_in?: Maybe<Array<Scalars['Boolean']>>;
   isPublic_in?: Maybe<Array<Scalars['Boolean']>>;
   isCensored_eq?: Maybe<Scalars['Boolean']>;
   isCensored_eq?: Maybe<Scalars['Boolean']>;
@@ -469,6 +366,8 @@ export type ChannelWhereInput = {
   ownerMember?: Maybe<MembershipWhereInput>;
   ownerMember?: Maybe<MembershipWhereInput>;
   ownerCuratorGroup?: Maybe<CuratorGroupWhereInput>;
   ownerCuratorGroup?: Maybe<CuratorGroupWhereInput>;
   category?: Maybe<ChannelCategoryWhereInput>;
   category?: Maybe<ChannelCategoryWhereInput>;
+  coverPhoto?: Maybe<StorageDataObjectWhereInput>;
+  avatarPhoto?: Maybe<StorageDataObjectWhereInput>;
   language?: Maybe<LanguageWhereInput>;
   language?: Maybe<LanguageWhereInput>;
   videos_none?: Maybe<VideoWhereInput>;
   videos_none?: Maybe<VideoWhereInput>;
   videos_some?: Maybe<VideoWhereInput>;
   videos_some?: Maybe<VideoWhereInput>;
@@ -481,6 +380,16 @@ export type ChannelWhereUniqueInput = {
   id: Scalars['ID'];
   id: Scalars['ID'];
 };
 };
 
 
+export enum Continent {
+  Af = 'AF',
+  Na = 'NA',
+  Oc = 'OC',
+  An = 'AN',
+  As = 'AS',
+  Eu = 'EU',
+  Sa = 'SA'
+}
+
 export type CuratorGroup = BaseGraphQlObject & {
 export type CuratorGroup = BaseGraphQlObject & {
   id: Scalars['ID'];
   id: Scalars['ID'];
   createdAt: Scalars['DateTime'];
   createdAt: Scalars['DateTime'];
@@ -567,6 +476,79 @@ export type CuratorGroupWhereUniqueInput = {
   id: Scalars['ID'];
   id: Scalars['ID'];
 };
 };
 
 
+export type DataObjectType = DataObjectTypeChannelAvatar | DataObjectTypeChannelCoverPhoto | DataObjectTypeVideoMedia | DataObjectTypeVideoThumbnail | DataObjectTypeUnknown;
+
+export type DataObjectTypeChannelAvatar = {
+  /** Related channel entity */
+  channel?: Maybe<Channel>;
+};
+
+export type DataObjectTypeChannelCoverPhoto = {
+  /** Related channel entity */
+  channel?: Maybe<Channel>;
+};
+
+export type DataObjectTypeUnknown = {
+  phantom?: Maybe<Scalars['Int']>;
+};
+
+export type DataObjectTypeUnknownCreateInput = {
+  phantom?: Maybe<Scalars['Float']>;
+};
+
+export type DataObjectTypeUnknownUpdateInput = {
+  phantom?: Maybe<Scalars['Float']>;
+};
+
+export type DataObjectTypeUnknownWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  phantom_eq?: Maybe<Scalars['Int']>;
+  phantom_gt?: Maybe<Scalars['Int']>;
+  phantom_gte?: Maybe<Scalars['Int']>;
+  phantom_lt?: Maybe<Scalars['Int']>;
+  phantom_lte?: Maybe<Scalars['Int']>;
+  phantom_in?: Maybe<Array<Scalars['Int']>>;
+  AND?: Maybe<Array<DataObjectTypeUnknownWhereInput>>;
+  OR?: Maybe<Array<DataObjectTypeUnknownWhereInput>>;
+};
+
+export type DataObjectTypeUnknownWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type DataObjectTypeVideoMedia = {
+  /** Related video entity */
+  video?: Maybe<Video>;
+};
+
+export type DataObjectTypeVideoThumbnail = {
+  /** Related video entity */
+  video?: Maybe<Video>;
+};
+
 
 
 export type DeleteResponse = {
 export type DeleteResponse = {
   id: Scalars['ID'];
   id: Scalars['ID'];
@@ -637,6 +619,90 @@ export type DistributionBucketFamilyEdge = {
   cursor: Scalars['String'];
   cursor: Scalars['String'];
 };
 };
 
 
+export type DistributionBucketFamilyGeographicArea = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  /** Geographical area (continent / country / subdivision) */
+  area: GeographicalArea;
+  distributionBucketFamilyMetadata: DistributionBucketFamilyMetadata;
+  distributionBucketFamilyMetadataId: Scalars['String'];
+};
+
+export type DistributionBucketFamilyGeographicAreaConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<DistributionBucketFamilyGeographicAreaEdge>;
+  pageInfo: PageInfo;
+};
+
+export type DistributionBucketFamilyGeographicAreaCreateInput = {
+  area: Scalars['JSONObject'];
+  distributionBucketFamilyMetadata: Scalars['ID'];
+};
+
+export type DistributionBucketFamilyGeographicAreaEdge = {
+  node: DistributionBucketFamilyGeographicArea;
+  cursor: Scalars['String'];
+};
+
+export enum DistributionBucketFamilyGeographicAreaOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  DistributionBucketFamilyMetadataAsc = 'distributionBucketFamilyMetadata_ASC',
+  DistributionBucketFamilyMetadataDesc = 'distributionBucketFamilyMetadata_DESC'
+}
+
+export type DistributionBucketFamilyGeographicAreaUpdateInput = {
+  area?: Maybe<Scalars['JSONObject']>;
+  distributionBucketFamilyMetadata?: Maybe<Scalars['ID']>;
+};
+
+export type DistributionBucketFamilyGeographicAreaWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  area_json?: Maybe<Scalars['JSONObject']>;
+  distributionBucketFamilyMetadata_eq?: Maybe<Scalars['ID']>;
+  distributionBucketFamilyMetadata_in?: Maybe<Array<Scalars['ID']>>;
+  distributionBucketFamilyMetadata?: Maybe<DistributionBucketFamilyMetadataWhereInput>;
+  AND?: Maybe<Array<DistributionBucketFamilyGeographicAreaWhereInput>>;
+  OR?: Maybe<Array<DistributionBucketFamilyGeographicAreaWhereInput>>;
+};
+
+export type DistributionBucketFamilyGeographicAreaWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
 export type DistributionBucketFamilyMetadata = BaseGraphQlObject & {
 export type DistributionBucketFamilyMetadata = BaseGraphQlObject & {
   id: Scalars['ID'];
   id: Scalars['ID'];
   createdAt: Scalars['DateTime'];
   createdAt: Scalars['DateTime'];
@@ -650,7 +716,9 @@ export type DistributionBucketFamilyMetadata = BaseGraphQlObject & {
   region?: Maybe<Scalars['String']>;
   region?: Maybe<Scalars['String']>;
   /** Optional, more specific description of the region covered by the family */
   /** Optional, more specific description of the region covered by the family */
   description?: Maybe<Scalars['String']>;
   description?: Maybe<Scalars['String']>;
-  boundary: Array<GeoCoordinates>;
+  areas: Array<DistributionBucketFamilyGeographicArea>;
+  /** List of targets (hosts/ips) best suited latency measurements for the family */
+  latencyTestTargets?: Maybe<Array<Scalars['String']>>;
   distributionbucketfamilymetadata?: Maybe<Array<DistributionBucketFamily>>;
   distributionbucketfamilymetadata?: Maybe<Array<DistributionBucketFamily>>;
 };
 };
 
 
@@ -663,6 +731,7 @@ export type DistributionBucketFamilyMetadataConnection = {
 export type DistributionBucketFamilyMetadataCreateInput = {
 export type DistributionBucketFamilyMetadataCreateInput = {
   region?: Maybe<Scalars['String']>;
   region?: Maybe<Scalars['String']>;
   description?: Maybe<Scalars['String']>;
   description?: Maybe<Scalars['String']>;
+  latencyTestTargets?: Maybe<Array<Scalars['String']>>;
 };
 };
 
 
 export type DistributionBucketFamilyMetadataEdge = {
 export type DistributionBucketFamilyMetadataEdge = {
@@ -686,6 +755,7 @@ export enum DistributionBucketFamilyMetadataOrderByInput {
 export type DistributionBucketFamilyMetadataUpdateInput = {
 export type DistributionBucketFamilyMetadataUpdateInput = {
   region?: Maybe<Scalars['String']>;
   region?: Maybe<Scalars['String']>;
   description?: Maybe<Scalars['String']>;
   description?: Maybe<Scalars['String']>;
+  latencyTestTargets?: Maybe<Array<Scalars['String']>>;
 };
 };
 
 
 export type DistributionBucketFamilyMetadataWhereInput = {
 export type DistributionBucketFamilyMetadataWhereInput = {
@@ -723,9 +793,9 @@ export type DistributionBucketFamilyMetadataWhereInput = {
   description_startsWith?: Maybe<Scalars['String']>;
   description_startsWith?: Maybe<Scalars['String']>;
   description_endsWith?: Maybe<Scalars['String']>;
   description_endsWith?: Maybe<Scalars['String']>;
   description_in?: Maybe<Array<Scalars['String']>>;
   description_in?: Maybe<Array<Scalars['String']>>;
-  boundary_none?: Maybe<GeoCoordinatesWhereInput>;
-  boundary_some?: Maybe<GeoCoordinatesWhereInput>;
-  boundary_every?: Maybe<GeoCoordinatesWhereInput>;
+  areas_none?: Maybe<DistributionBucketFamilyGeographicAreaWhereInput>;
+  areas_some?: Maybe<DistributionBucketFamilyGeographicAreaWhereInput>;
+  areas_every?: Maybe<DistributionBucketFamilyGeographicAreaWhereInput>;
   distributionbucketfamilymetadata_none?: Maybe<DistributionBucketFamilyWhereInput>;
   distributionbucketfamilymetadata_none?: Maybe<DistributionBucketFamilyWhereInput>;
   distributionbucketfamilymetadata_some?: Maybe<DistributionBucketFamilyWhereInput>;
   distributionbucketfamilymetadata_some?: Maybe<DistributionBucketFamilyWhereInput>;
   distributionbucketfamilymetadata_every?: Maybe<DistributionBucketFamilyWhereInput>;
   distributionbucketfamilymetadata_every?: Maybe<DistributionBucketFamilyWhereInput>;
@@ -1087,8 +1157,6 @@ export type GeoCoordinates = BaseGraphQlObject & {
   version: Scalars['Int'];
   version: Scalars['Int'];
   latitude: Scalars['Float'];
   latitude: Scalars['Float'];
   longitude: Scalars['Float'];
   longitude: Scalars['Float'];
-  boundarySourceBucketFamilyMeta?: Maybe<DistributionBucketFamilyMetadata>;
-  boundarySourceBucketFamilyMetaId?: Maybe<Scalars['String']>;
   nodelocationmetadatacoordinates?: Maybe<Array<NodeLocationMetadata>>;
   nodelocationmetadatacoordinates?: Maybe<Array<NodeLocationMetadata>>;
 };
 };
 
 
@@ -1101,7 +1169,6 @@ export type GeoCoordinatesConnection = {
 export type GeoCoordinatesCreateInput = {
 export type GeoCoordinatesCreateInput = {
   latitude: Scalars['Float'];
   latitude: Scalars['Float'];
   longitude: Scalars['Float'];
   longitude: Scalars['Float'];
-  boundarySourceBucketFamilyMeta?: Maybe<Scalars['ID']>;
 };
 };
 
 
 export type GeoCoordinatesEdge = {
 export type GeoCoordinatesEdge = {
@@ -1119,15 +1186,12 @@ export enum GeoCoordinatesOrderByInput {
   LatitudeAsc = 'latitude_ASC',
   LatitudeAsc = 'latitude_ASC',
   LatitudeDesc = 'latitude_DESC',
   LatitudeDesc = 'latitude_DESC',
   LongitudeAsc = 'longitude_ASC',
   LongitudeAsc = 'longitude_ASC',
-  LongitudeDesc = 'longitude_DESC',
-  BoundarySourceBucketFamilyMetaAsc = 'boundarySourceBucketFamilyMeta_ASC',
-  BoundarySourceBucketFamilyMetaDesc = 'boundarySourceBucketFamilyMeta_DESC'
+  LongitudeDesc = 'longitude_DESC'
 }
 }
 
 
 export type GeoCoordinatesUpdateInput = {
 export type GeoCoordinatesUpdateInput = {
   latitude?: Maybe<Scalars['Float']>;
   latitude?: Maybe<Scalars['Float']>;
   longitude?: Maybe<Scalars['Float']>;
   longitude?: Maybe<Scalars['Float']>;
-  boundarySourceBucketFamilyMeta?: Maybe<Scalars['ID']>;
 };
 };
 
 
 export type GeoCoordinatesWhereInput = {
 export type GeoCoordinatesWhereInput = {
@@ -1167,9 +1231,6 @@ export type GeoCoordinatesWhereInput = {
   longitude_lt?: Maybe<Scalars['Float']>;
   longitude_lt?: Maybe<Scalars['Float']>;
   longitude_lte?: Maybe<Scalars['Float']>;
   longitude_lte?: Maybe<Scalars['Float']>;
   longitude_in?: Maybe<Array<Scalars['Float']>>;
   longitude_in?: Maybe<Array<Scalars['Float']>>;
-  boundarySourceBucketFamilyMeta_eq?: Maybe<Scalars['ID']>;
-  boundarySourceBucketFamilyMeta_in?: Maybe<Array<Scalars['ID']>>;
-  boundarySourceBucketFamilyMeta?: Maybe<DistributionBucketFamilyMetadataWhereInput>;
   nodelocationmetadatacoordinates_none?: Maybe<NodeLocationMetadataWhereInput>;
   nodelocationmetadatacoordinates_none?: Maybe<NodeLocationMetadataWhereInput>;
   nodelocationmetadatacoordinates_some?: Maybe<NodeLocationMetadataWhereInput>;
   nodelocationmetadatacoordinates_some?: Maybe<NodeLocationMetadataWhereInput>;
   nodelocationmetadatacoordinates_every?: Maybe<NodeLocationMetadataWhereInput>;
   nodelocationmetadatacoordinates_every?: Maybe<NodeLocationMetadataWhereInput>;
@@ -1181,6 +1242,157 @@ export type GeoCoordinatesWhereUniqueInput = {
   id: Scalars['ID'];
   id: Scalars['ID'];
 };
 };
 
 
+export type GeographicalArea = GeographicalAreaContinent | GeographicalAreaCountry | GeographicalAreaSubdivistion;
+
+export type GeographicalAreaContinent = {
+  code?: Maybe<Continent>;
+};
+
+export type GeographicalAreaContinentCreateInput = {
+  code?: Maybe<Continent>;
+};
+
+export type GeographicalAreaContinentUpdateInput = {
+  code?: Maybe<Continent>;
+};
+
+export type GeographicalAreaContinentWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  code_eq?: Maybe<Continent>;
+  code_in?: Maybe<Array<Continent>>;
+  AND?: Maybe<Array<GeographicalAreaContinentWhereInput>>;
+  OR?: Maybe<Array<GeographicalAreaContinentWhereInput>>;
+};
+
+export type GeographicalAreaContinentWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type GeographicalAreaCountry = {
+  /** ISO 3166-1 alpha-2 country code */
+  code?: Maybe<Scalars['String']>;
+};
+
+export type GeographicalAreaCountryCreateInput = {
+  code?: Maybe<Scalars['String']>;
+};
+
+export type GeographicalAreaCountryUpdateInput = {
+  code?: Maybe<Scalars['String']>;
+};
+
+export type GeographicalAreaCountryWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  code_eq?: Maybe<Scalars['String']>;
+  code_contains?: Maybe<Scalars['String']>;
+  code_startsWith?: Maybe<Scalars['String']>;
+  code_endsWith?: Maybe<Scalars['String']>;
+  code_in?: Maybe<Array<Scalars['String']>>;
+  AND?: Maybe<Array<GeographicalAreaCountryWhereInput>>;
+  OR?: Maybe<Array<GeographicalAreaCountryWhereInput>>;
+};
+
+export type GeographicalAreaCountryWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type GeographicalAreaSubdivistion = {
+  /** ISO 3166-2 subdivision code */
+  code?: Maybe<Scalars['String']>;
+};
+
+export type GeographicalAreaSubdivistionCreateInput = {
+  code?: Maybe<Scalars['String']>;
+};
+
+export type GeographicalAreaSubdivistionUpdateInput = {
+  code?: Maybe<Scalars['String']>;
+};
+
+export type GeographicalAreaSubdivistionWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  code_eq?: Maybe<Scalars['String']>;
+  code_contains?: Maybe<Scalars['String']>;
+  code_startsWith?: Maybe<Scalars['String']>;
+  code_endsWith?: Maybe<Scalars['String']>;
+  code_in?: Maybe<Array<Scalars['String']>>;
+  AND?: Maybe<Array<GeographicalAreaSubdivistionWhereInput>>;
+  OR?: Maybe<Array<GeographicalAreaSubdivistionWhereInput>>;
+};
+
+export type GeographicalAreaSubdivistionWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
 
 
 export type Language = BaseGraphQlObject & {
 export type Language = BaseGraphQlObject & {
   id: Scalars['ID'];
   id: Scalars['ID'];
@@ -1701,6 +1913,9 @@ export type Query = {
   curatorGroups: Array<CuratorGroup>;
   curatorGroups: Array<CuratorGroup>;
   curatorGroupByUniqueInput?: Maybe<CuratorGroup>;
   curatorGroupByUniqueInput?: Maybe<CuratorGroup>;
   curatorGroupsConnection: CuratorGroupConnection;
   curatorGroupsConnection: CuratorGroupConnection;
+  distributionBucketFamilyGeographicAreas: Array<DistributionBucketFamilyGeographicArea>;
+  distributionBucketFamilyGeographicAreaByUniqueInput?: Maybe<DistributionBucketFamilyGeographicArea>;
+  distributionBucketFamilyGeographicAreasConnection: DistributionBucketFamilyGeographicAreaConnection;
   distributionBucketFamilyMetadata: Array<DistributionBucketFamilyMetadata>;
   distributionBucketFamilyMetadata: Array<DistributionBucketFamilyMetadata>;
   distributionBucketFamilyMetadataByUniqueInput?: Maybe<DistributionBucketFamilyMetadata>;
   distributionBucketFamilyMetadataByUniqueInput?: Maybe<DistributionBucketFamilyMetadata>;
   distributionBucketFamilyMetadataConnection: DistributionBucketFamilyMetadataConnection;
   distributionBucketFamilyMetadataConnection: DistributionBucketFamilyMetadataConnection;
@@ -1843,6 +2058,29 @@ export type QueryCuratorGroupsConnectionArgs = {
 };
 };
 
 
 
 
+export type QueryDistributionBucketFamilyGeographicAreasArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<DistributionBucketFamilyGeographicAreaWhereInput>;
+  orderBy?: Maybe<Array<DistributionBucketFamilyGeographicAreaOrderByInput>>;
+};
+
+
+export type QueryDistributionBucketFamilyGeographicAreaByUniqueInputArgs = {
+  where: DistributionBucketFamilyGeographicAreaWhereUniqueInput;
+};
+
+
+export type QueryDistributionBucketFamilyGeographicAreasConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<DistributionBucketFamilyGeographicAreaWhereInput>;
+  orderBy?: Maybe<Array<DistributionBucketFamilyGeographicAreaOrderByInput>>;
+};
+
+
 export type QueryDistributionBucketFamilyMetadataArgs = {
 export type QueryDistributionBucketFamilyMetadataArgs = {
   offset?: Maybe<Scalars['Int']>;
   offset?: Maybe<Scalars['Int']>;
   limit?: Maybe<Scalars['Int']>;
   limit?: Maybe<Scalars['Int']>;
@@ -3370,6 +3608,16 @@ export type StorageDataObject = BaseGraphQlObject & {
   storageBagId: Scalars['String'];
   storageBagId: Scalars['String'];
   /** IPFS content hash */
   /** IPFS content hash */
   ipfsHash: Scalars['String'];
   ipfsHash: Scalars['String'];
+  /** The type of the asset that the data object represents (if known) */
+  type: DataObjectType;
+  /** Prize for removing the data object */
+  deletionPrize: Scalars['BigInt'];
+  /** If the object is no longer used as an asset - the time at which it was unset (if known) */
+  unsetAt?: Maybe<Scalars['DateTime']>;
+  channelcoverPhoto?: Maybe<Array<Channel>>;
+  channelavatarPhoto?: Maybe<Array<Channel>>;
+  videothumbnailPhoto?: Maybe<Array<Video>>;
+  videomedia?: Maybe<Array<Video>>;
 };
 };
 
 
 export type StorageDataObjectConnection = {
 export type StorageDataObjectConnection = {
@@ -3383,6 +3631,9 @@ export type StorageDataObjectCreateInput = {
   size: Scalars['BigInt'];
   size: Scalars['BigInt'];
   storageBag: Scalars['ID'];
   storageBag: Scalars['ID'];
   ipfsHash: Scalars['String'];
   ipfsHash: Scalars['String'];
+  type: Scalars['JSONObject'];
+  deletionPrize: Scalars['BigInt'];
+  unsetAt?: Maybe<Scalars['DateTime']>;
 };
 };
 
 
 export type StorageDataObjectEdge = {
 export type StorageDataObjectEdge = {
@@ -3404,7 +3655,11 @@ export enum StorageDataObjectOrderByInput {
   StorageBagAsc = 'storageBag_ASC',
   StorageBagAsc = 'storageBag_ASC',
   StorageBagDesc = 'storageBag_DESC',
   StorageBagDesc = 'storageBag_DESC',
   IpfsHashAsc = 'ipfsHash_ASC',
   IpfsHashAsc = 'ipfsHash_ASC',
-  IpfsHashDesc = 'ipfsHash_DESC'
+  IpfsHashDesc = 'ipfsHash_DESC',
+  DeletionPrizeAsc = 'deletionPrize_ASC',
+  DeletionPrizeDesc = 'deletionPrize_DESC',
+  UnsetAtAsc = 'unsetAt_ASC',
+  UnsetAtDesc = 'unsetAt_DESC'
 }
 }
 
 
 export type StorageDataObjectUpdateInput = {
 export type StorageDataObjectUpdateInput = {
@@ -3412,6 +3667,9 @@ export type StorageDataObjectUpdateInput = {
   size?: Maybe<Scalars['BigInt']>;
   size?: Maybe<Scalars['BigInt']>;
   storageBag?: Maybe<Scalars['ID']>;
   storageBag?: Maybe<Scalars['ID']>;
   ipfsHash?: Maybe<Scalars['String']>;
   ipfsHash?: Maybe<Scalars['String']>;
+  type?: Maybe<Scalars['JSONObject']>;
+  deletionPrize?: Maybe<Scalars['BigInt']>;
+  unsetAt?: Maybe<Scalars['DateTime']>;
 };
 };
 
 
 export type StorageDataObjectWhereInput = {
 export type StorageDataObjectWhereInput = {
@@ -3454,7 +3712,31 @@ export type StorageDataObjectWhereInput = {
   ipfsHash_startsWith?: Maybe<Scalars['String']>;
   ipfsHash_startsWith?: Maybe<Scalars['String']>;
   ipfsHash_endsWith?: Maybe<Scalars['String']>;
   ipfsHash_endsWith?: Maybe<Scalars['String']>;
   ipfsHash_in?: Maybe<Array<Scalars['String']>>;
   ipfsHash_in?: Maybe<Array<Scalars['String']>>;
+  type_json?: Maybe<Scalars['JSONObject']>;
+  deletionPrize_eq?: Maybe<Scalars['BigInt']>;
+  deletionPrize_gt?: Maybe<Scalars['BigInt']>;
+  deletionPrize_gte?: Maybe<Scalars['BigInt']>;
+  deletionPrize_lt?: Maybe<Scalars['BigInt']>;
+  deletionPrize_lte?: Maybe<Scalars['BigInt']>;
+  deletionPrize_in?: Maybe<Array<Scalars['BigInt']>>;
+  unsetAt_eq?: Maybe<Scalars['DateTime']>;
+  unsetAt_lt?: Maybe<Scalars['DateTime']>;
+  unsetAt_lte?: Maybe<Scalars['DateTime']>;
+  unsetAt_gt?: Maybe<Scalars['DateTime']>;
+  unsetAt_gte?: Maybe<Scalars['DateTime']>;
   storageBag?: Maybe<StorageBagWhereInput>;
   storageBag?: Maybe<StorageBagWhereInput>;
+  channelcoverPhoto_none?: Maybe<ChannelWhereInput>;
+  channelcoverPhoto_some?: Maybe<ChannelWhereInput>;
+  channelcoverPhoto_every?: Maybe<ChannelWhereInput>;
+  channelavatarPhoto_none?: Maybe<ChannelWhereInput>;
+  channelavatarPhoto_some?: Maybe<ChannelWhereInput>;
+  channelavatarPhoto_every?: Maybe<ChannelWhereInput>;
+  videothumbnailPhoto_none?: Maybe<VideoWhereInput>;
+  videothumbnailPhoto_some?: Maybe<VideoWhereInput>;
+  videothumbnailPhoto_every?: Maybe<VideoWhereInput>;
+  videomedia_none?: Maybe<VideoWhereInput>;
+  videomedia_some?: Maybe<VideoWhereInput>;
+  videomedia_every?: Maybe<VideoWhereInput>;
   AND?: Maybe<Array<StorageDataObjectWhereInput>>;
   AND?: Maybe<Array<StorageDataObjectWhereInput>>;
   OR?: Maybe<Array<StorageDataObjectWhereInput>>;
   OR?: Maybe<Array<StorageDataObjectWhereInput>>;
 };
 };
@@ -3641,8 +3923,8 @@ export type Video = BaseGraphQlObject & {
   description?: Maybe<Scalars['String']>;
   description?: Maybe<Scalars['String']>;
   /** Video duration in seconds */
   /** Video duration in seconds */
   duration?: Maybe<Scalars['Int']>;
   duration?: Maybe<Scalars['Int']>;
-  /** Video thumbnail asset (recommended ratio: 16:9) */
-  thumbnailPhoto: Asset;
+  thumbnailPhoto?: Maybe<StorageDataObject>;
+  thumbnailPhotoId?: Maybe<Scalars['String']>;
   language?: Maybe<Language>;
   language?: Maybe<Language>;
   languageId?: Maybe<Scalars['String']>;
   languageId?: Maybe<Scalars['String']>;
   /** Whether or not Video contains marketing */
   /** Whether or not Video contains marketing */
@@ -3657,8 +3939,8 @@ export type Video = BaseGraphQlObject & {
   isExplicit?: Maybe<Scalars['Boolean']>;
   isExplicit?: Maybe<Scalars['Boolean']>;
   license?: Maybe<License>;
   license?: Maybe<License>;
   licenseId?: Maybe<Scalars['String']>;
   licenseId?: Maybe<Scalars['String']>;
-  /** Video media asset */
-  media: Asset;
+  media?: Maybe<StorageDataObject>;
+  mediaId?: Maybe<Scalars['String']>;
   mediaMetadata?: Maybe<VideoMediaMetadata>;
   mediaMetadata?: Maybe<VideoMediaMetadata>;
   mediaMetadataId?: Maybe<Scalars['String']>;
   mediaMetadataId?: Maybe<Scalars['String']>;
   createdInBlock: Scalars['Int'];
   createdInBlock: Scalars['Int'];
@@ -3783,7 +4065,7 @@ export type VideoCreateInput = {
   title?: Maybe<Scalars['String']>;
   title?: Maybe<Scalars['String']>;
   description?: Maybe<Scalars['String']>;
   description?: Maybe<Scalars['String']>;
   duration?: Maybe<Scalars['Float']>;
   duration?: Maybe<Scalars['Float']>;
-  thumbnailPhoto: Scalars['JSONObject'];
+  thumbnailPhoto?: Maybe<Scalars['ID']>;
   language?: Maybe<Scalars['ID']>;
   language?: Maybe<Scalars['ID']>;
   hasMarketing?: Maybe<Scalars['Boolean']>;
   hasMarketing?: Maybe<Scalars['Boolean']>;
   publishedBeforeJoystream?: Maybe<Scalars['DateTime']>;
   publishedBeforeJoystream?: Maybe<Scalars['DateTime']>;
@@ -3791,7 +4073,7 @@ export type VideoCreateInput = {
   isCensored: Scalars['Boolean'];
   isCensored: Scalars['Boolean'];
   isExplicit?: Maybe<Scalars['Boolean']>;
   isExplicit?: Maybe<Scalars['Boolean']>;
   license?: Maybe<Scalars['ID']>;
   license?: Maybe<Scalars['ID']>;
-  media: Scalars['JSONObject'];
+  media?: Maybe<Scalars['ID']>;
   mediaMetadata?: Maybe<Scalars['ID']>;
   mediaMetadata?: Maybe<Scalars['ID']>;
   createdInBlock: Scalars['Float'];
   createdInBlock: Scalars['Float'];
   isFeatured: Scalars['Boolean'];
   isFeatured: Scalars['Boolean'];
@@ -4054,6 +4336,8 @@ export enum VideoOrderByInput {
   DescriptionDesc = 'description_DESC',
   DescriptionDesc = 'description_DESC',
   DurationAsc = 'duration_ASC',
   DurationAsc = 'duration_ASC',
   DurationDesc = 'duration_DESC',
   DurationDesc = 'duration_DESC',
+  ThumbnailPhotoAsc = 'thumbnailPhoto_ASC',
+  ThumbnailPhotoDesc = 'thumbnailPhoto_DESC',
   LanguageAsc = 'language_ASC',
   LanguageAsc = 'language_ASC',
   LanguageDesc = 'language_DESC',
   LanguageDesc = 'language_DESC',
   HasMarketingAsc = 'hasMarketing_ASC',
   HasMarketingAsc = 'hasMarketing_ASC',
@@ -4068,6 +4352,8 @@ export enum VideoOrderByInput {
   IsExplicitDesc = 'isExplicit_DESC',
   IsExplicitDesc = 'isExplicit_DESC',
   LicenseAsc = 'license_ASC',
   LicenseAsc = 'license_ASC',
   LicenseDesc = 'license_DESC',
   LicenseDesc = 'license_DESC',
+  MediaAsc = 'media_ASC',
+  MediaDesc = 'media_DESC',
   MediaMetadataAsc = 'mediaMetadata_ASC',
   MediaMetadataAsc = 'mediaMetadata_ASC',
   MediaMetadataDesc = 'mediaMetadata_DESC',
   MediaMetadataDesc = 'mediaMetadata_DESC',
   CreatedInBlockAsc = 'createdInBlock_ASC',
   CreatedInBlockAsc = 'createdInBlock_ASC',
@@ -4082,7 +4368,7 @@ export type VideoUpdateInput = {
   title?: Maybe<Scalars['String']>;
   title?: Maybe<Scalars['String']>;
   description?: Maybe<Scalars['String']>;
   description?: Maybe<Scalars['String']>;
   duration?: Maybe<Scalars['Float']>;
   duration?: Maybe<Scalars['Float']>;
-  thumbnailPhoto?: Maybe<Scalars['JSONObject']>;
+  thumbnailPhoto?: Maybe<Scalars['ID']>;
   language?: Maybe<Scalars['ID']>;
   language?: Maybe<Scalars['ID']>;
   hasMarketing?: Maybe<Scalars['Boolean']>;
   hasMarketing?: Maybe<Scalars['Boolean']>;
   publishedBeforeJoystream?: Maybe<Scalars['DateTime']>;
   publishedBeforeJoystream?: Maybe<Scalars['DateTime']>;
@@ -4090,7 +4376,7 @@ export type VideoUpdateInput = {
   isCensored?: Maybe<Scalars['Boolean']>;
   isCensored?: Maybe<Scalars['Boolean']>;
   isExplicit?: Maybe<Scalars['Boolean']>;
   isExplicit?: Maybe<Scalars['Boolean']>;
   license?: Maybe<Scalars['ID']>;
   license?: Maybe<Scalars['ID']>;
-  media?: Maybe<Scalars['JSONObject']>;
+  media?: Maybe<Scalars['ID']>;
   mediaMetadata?: Maybe<Scalars['ID']>;
   mediaMetadata?: Maybe<Scalars['ID']>;
   createdInBlock?: Maybe<Scalars['Float']>;
   createdInBlock?: Maybe<Scalars['Float']>;
   isFeatured?: Maybe<Scalars['Boolean']>;
   isFeatured?: Maybe<Scalars['Boolean']>;
@@ -4141,7 +4427,8 @@ export type VideoWhereInput = {
   duration_lt?: Maybe<Scalars['Int']>;
   duration_lt?: Maybe<Scalars['Int']>;
   duration_lte?: Maybe<Scalars['Int']>;
   duration_lte?: Maybe<Scalars['Int']>;
   duration_in?: Maybe<Array<Scalars['Int']>>;
   duration_in?: Maybe<Array<Scalars['Int']>>;
-  thumbnailPhoto_json?: Maybe<Scalars['JSONObject']>;
+  thumbnailPhoto_eq?: Maybe<Scalars['ID']>;
+  thumbnailPhoto_in?: Maybe<Array<Scalars['ID']>>;
   language_eq?: Maybe<Scalars['ID']>;
   language_eq?: Maybe<Scalars['ID']>;
   language_in?: Maybe<Array<Scalars['ID']>>;
   language_in?: Maybe<Array<Scalars['ID']>>;
   hasMarketing_eq?: Maybe<Scalars['Boolean']>;
   hasMarketing_eq?: Maybe<Scalars['Boolean']>;
@@ -4159,7 +4446,8 @@ export type VideoWhereInput = {
   isExplicit_in?: Maybe<Array<Scalars['Boolean']>>;
   isExplicit_in?: Maybe<Array<Scalars['Boolean']>>;
   license_eq?: Maybe<Scalars['ID']>;
   license_eq?: Maybe<Scalars['ID']>;
   license_in?: Maybe<Array<Scalars['ID']>>;
   license_in?: Maybe<Array<Scalars['ID']>>;
-  media_json?: Maybe<Scalars['JSONObject']>;
+  media_eq?: Maybe<Scalars['ID']>;
+  media_in?: Maybe<Array<Scalars['ID']>>;
   mediaMetadata_eq?: Maybe<Scalars['ID']>;
   mediaMetadata_eq?: Maybe<Scalars['ID']>;
   mediaMetadata_in?: Maybe<Array<Scalars['ID']>>;
   mediaMetadata_in?: Maybe<Array<Scalars['ID']>>;
   createdInBlock_eq?: Maybe<Scalars['Int']>;
   createdInBlock_eq?: Maybe<Scalars['Int']>;
@@ -4172,8 +4460,10 @@ export type VideoWhereInput = {
   isFeatured_in?: Maybe<Array<Scalars['Boolean']>>;
   isFeatured_in?: Maybe<Array<Scalars['Boolean']>>;
   channel?: Maybe<ChannelWhereInput>;
   channel?: Maybe<ChannelWhereInput>;
   category?: Maybe<VideoCategoryWhereInput>;
   category?: Maybe<VideoCategoryWhereInput>;
+  thumbnailPhoto?: Maybe<StorageDataObjectWhereInput>;
   language?: Maybe<LanguageWhereInput>;
   language?: Maybe<LanguageWhereInput>;
   license?: Maybe<LicenseWhereInput>;
   license?: Maybe<LicenseWhereInput>;
+  media?: Maybe<StorageDataObjectWhereInput>;
   mediaMetadata?: Maybe<VideoMediaMetadataWhereInput>;
   mediaMetadata?: Maybe<VideoMediaMetadataWhereInput>;
   AND?: Maybe<Array<VideoWhereInput>>;
   AND?: Maybe<Array<VideoWhereInput>>;
   OR?: Maybe<Array<VideoWhereInput>>;
   OR?: Maybe<Array<VideoWhereInput>>;

+ 41 - 19
distributor-node/src/services/networking/query-node/queries/queries.graphql

@@ -1,3 +1,25 @@
+fragment DistributionBucketOperatorDetails on DistributionBucketOperator {
+  workerId
+  status
+}
+
+fragment DistributionBucketDetails on DistributionBucket {
+  id
+  operators {
+    ...DistributionBucketOperatorDetails
+  }
+}
+
+fragment StorageBucketDetails on StorageBucket {
+  id
+  operatorMetadata {
+    nodeEndpoint
+  }
+  operatorStatus {
+    __typename
+  }
+}
+
 fragment DataObjectDetails on StorageDataObject {
 fragment DataObjectDetails on StorageDataObject {
   id
   id
   size
   size
@@ -6,22 +28,12 @@ fragment DataObjectDetails on StorageDataObject {
   storageBag {
   storageBag {
     storageAssignments {
     storageAssignments {
       storageBucket {
       storageBucket {
-        id
-        operatorMetadata {
-          nodeEndpoint
-        }
-        operatorStatus {
-          __typename
-        }
+        ...StorageBucketDetails
       }
       }
     }
     }
     distirbutionAssignments {
     distirbutionAssignments {
       distributionBucket {
       distributionBucket {
-        id
-        operators {
-          workerId
-          status
-        }
+        ...DistributionBucketDetails
       }
       }
     }
     }
   }
   }
@@ -65,14 +77,24 @@ fragment StorageBucketOperatorFields on StorageBucket {
   }
   }
 }
 }
 
 
-query getActiveStorageBucketOperatorsData {
-  storageBuckets(
-    where: {
-      operatorStatus_json: { isTypeOf_eq: "StorageBucketOperatorStatusActive" }
-      operatorMetadata: { nodeEndpoint_contains: "http" }
+fragment StorageBucketsConnectionFields on StorageBucketConnection {
+  edges {
+    node {
+      ...StorageBucketOperatorFields
     }
     }
-    limit: 9999
+  }
+  pageInfo {
+    hasNextPage
+    endCursor
+  }
+}
+
+query getActiveStorageBucketOperatorsData($limit: Int!, $lastCursor: String) {
+  storageBucketsConnection(
+    first: $limit
+    after: $lastCursor
+    where: { operatorStatus_json: { isTypeOf_eq: "StorageBucketOperatorStatusActive" } }
   ) {
   ) {
-    ...StorageBucketOperatorFields
+    ...StorageBucketsConnectionFields
   }
   }
 }
 }

+ 36 - 15
distributor-node/src/services/networking/runtime/api.ts

@@ -2,7 +2,7 @@ import { types } from '@joystream/types/'
 import { ApiPromise, WsProvider, SubmittableResult } from '@polkadot/api'
 import { ApiPromise, WsProvider, SubmittableResult } from '@polkadot/api'
 import { SubmittableExtrinsic, AugmentedEvent } from '@polkadot/api/types'
 import { SubmittableExtrinsic, AugmentedEvent } from '@polkadot/api/types'
 import { KeyringPair } from '@polkadot/keyring/types'
 import { KeyringPair } from '@polkadot/keyring/types'
-import { Balance } from '@polkadot/types/interfaces'
+import { Balance, Call } from '@polkadot/types/interfaces'
 import { formatBalance } from '@polkadot/util'
 import { formatBalance } from '@polkadot/util'
 import { IEvent } from '@polkadot/types/types'
 import { IEvent } from '@polkadot/types/types'
 import { DispatchError } from '@polkadot/types/interfaces/system'
 import { DispatchError } from '@polkadot/types/interfaces/system'
@@ -26,13 +26,13 @@ export class RuntimeApi {
   static async create(
   static async create(
     logging: LoggingService,
     logging: LoggingService,
     apiUri: string,
     apiUri: string,
-    metadataCache?: Record<string, any>
+    metadataCache?: Record<string, string>
   ): Promise<RuntimeApi> {
   ): Promise<RuntimeApi> {
     const { api, chainType } = await RuntimeApi.initApi(apiUri, metadataCache)
     const { api, chainType } = await RuntimeApi.initApi(apiUri, metadataCache)
     return new RuntimeApi(logging, api, chainType.isDevelopment || chainType.isLocal)
     return new RuntimeApi(logging, api, chainType.isDevelopment || chainType.isLocal)
   }
   }
 
 
-  private static async initApi(apiUri: string, metadataCache?: Record<string, any>) {
+  private static async initApi(apiUri: string, metadataCache?: Record<string, string>) {
     const wsProvider: WsProvider = new WsProvider(apiUri)
     const wsProvider: WsProvider = new WsProvider(apiUri)
     const api = await ApiPromise.create({ provider: wsProvider, types, metadata: metadataCache })
     const api = await ApiPromise.create({ provider: wsProvider, types, metadata: metadataCache })
 
 
@@ -100,8 +100,22 @@ export class RuntimeApi {
     return (event as unknown) as EventType
     return (event as unknown) as EventType
   }
   }
 
 
+  private formatDispatchError(err: DispatchError): string {
+    try {
+      const { name, docs } = this._api.registry.findMetaError(err.asModule)
+      return `${name} (${docs.join(', ')})`
+    } catch (e) {
+      return err.toString()
+    }
+  }
+
   sendExtrinsic(keyPair: KeyringPair, tx: SubmittableExtrinsic<'promise'>): Promise<SubmittableResult> {
   sendExtrinsic(keyPair: KeyringPair, tx: SubmittableExtrinsic<'promise'>): Promise<SubmittableResult> {
-    this.logger.info(`Sending ${tx.method.section}.${tx.method.method} extrinsic from ${keyPair.address}`)
+    let txName = `${tx.method.section}.${tx.method.method}`
+    if (txName === 'sudo.sudo') {
+      const innerCall = tx.args[0] as Call
+      txName = `sudo.sudo(${innerCall.section}.${innerCall.method})`
+    }
+    this.logger.info(`Sending ${txName} extrinsic from ${keyPair.address}`)
     return new Promise((resolve, reject) => {
     return new Promise((resolve, reject) => {
       let unsubscribe: () => void
       let unsubscribe: () => void
       tx.signAndSend(keyPair, {}, (result) => {
       tx.signAndSend(keyPair, {}, (result) => {
@@ -117,19 +131,26 @@ export class RuntimeApi {
             .forEach(({ event }) => {
             .forEach(({ event }) => {
               if (event.method === 'ExtrinsicFailed') {
               if (event.method === 'ExtrinsicFailed') {
                 const dispatchError = event.data[0] as DispatchError
                 const dispatchError = event.data[0] as DispatchError
-                let errorMsg = dispatchError.toString()
-                if (dispatchError.isModule) {
-                  try {
-                    const { name, docs } = this._api.registry.findMetaError(dispatchError.asModule)
-                    errorMsg = `${name} (${docs.join(', ')})`
-                  } catch (e) {
-                    // This probably means we don't have this error in the metadata
-                    // In this case - continue (we'll just display dispatchError.toString())
+                reject(
+                  new ExtrinsicFailedError(`Extrinsic execution error: ${this.formatDispatchError(dispatchError)}`)
+                )
+              } else if (event.method === 'ExtrinsicSuccess') {
+                const sudidEvent = this.findEvent(result, 'sudo', 'Sudid')
+
+                if (sudidEvent) {
+                  const [dispatchResult] = sudidEvent.data
+                  if (dispatchResult.isOk) {
+                    resolve(result)
+                  } else {
+                    reject(
+                      new ExtrinsicFailedError(
+                        `Sudo extrinsic execution error! ${this.formatDispatchError(dispatchResult.asErr)}`
+                      )
+                    )
                   }
                   }
+                } else {
+                  resolve(result)
                 }
                 }
-                reject(new ExtrinsicFailedError(`Extrinsic execution error: ${errorMsg}`))
-              } else if (event.method === 'ExtrinsicSuccess') {
-                resolve(result)
               }
               }
             })
             })
         } else if (result.isError) {
         } else if (result.isError) {

+ 70 - 26
distributor-node/src/services/parsers/BagIdParserService.ts

@@ -1,57 +1,101 @@
-import { BagId, StaticBagId } from '@joystream/types/storage'
+import { BagId, DynamicBagTypeKey } from '@joystream/types/storage'
 import { createType } from '@joystream/types'
 import { createType } from '@joystream/types'
 import { WorkingGroup, WorkingGroupKey } from '@joystream/types/common'
 import { WorkingGroup, WorkingGroupKey } from '@joystream/types/common'
+import { CLIError } from '@oclif/errors'
+import ExitCodes from '../../command-base/ExitCodes'
 
 
 export class BagIdParserService {
 export class BagIdParserService {
-  public parseBagId(bagId: string): BagId {
+  private bagId: string
+  private bagIdParts: [string, string, string?]
+
+  public constructor(bagId: string) {
+    this.bagId = bagId
     const bagIdParts = bagId.toLowerCase().split(':')
     const bagIdParts = bagId.toLowerCase().split(':')
 
 
     if (bagIdParts.length > 3 || bagIdParts.length < 2) {
     if (bagIdParts.length > 3 || bagIdParts.length < 2) {
-      throw new Error(`Invalid bagId: ${bagId}`)
+      this.invalidBagId()
     }
     }
 
 
-    if (bagIdParts[0] === 'static') {
-      return this.parseStaticBagId(bagId, bagIdParts)
+    const [bagType, bagSubtype, bagOptIdentifier] = bagIdParts
+    this.bagIdParts = [bagType, bagSubtype, bagOptIdentifier]
+  }
+
+  private invalidBagId(reason?: string): never {
+    throw new CLIError(`Invalid bagId: ${this.bagId}. ${reason}`, {
+      exit: ExitCodes.InvalidInput,
+    })
+  }
+
+  public parse(): BagId {
+    const [bagType] = this.bagIdParts
+    if (bagType === 'static') {
+      return this.parseStaticBagId()
     }
     }
 
 
-    if (bagIdParts[0] === 'dynamic') {
+    if (bagType === 'dynamic') {
       return this.parseDynamicBagId()
       return this.parseDynamicBagId()
     }
     }
 
 
-    throw new Error(`Invalid bagId: ${bagId}`)
+    this.invalidBagId(`Unrecognized bag type: ${bagType}.`)
   }
   }
 
 
-  public parseStaticBagId(bagId: string, bagIdParts: string[]): BagId {
+  private parseStaticBagId(): BagId {
+    const [, staticBagType, optGroupName] = this.bagIdParts
     // Try to construct static council bag ID.
     // Try to construct static council bag ID.
-    if (bagIdParts[1] === 'council') {
-      if (bagIdParts.length === 2) {
-        const staticBagId = createType<StaticBagId, 'StaticBagId'>('StaticBagId', 'Council')
-        const constructedBagId = createType<BagId, 'BagId'>('BagId', {
-          'Static': staticBagId,
+    if (staticBagType === 'council') {
+      if (optGroupName === undefined) {
+        return createType<BagId, 'BagId'>('BagId', {
+          'Static': 'Council',
         })
         })
-
-        return constructedBagId
       }
       }
+
+      this.invalidBagId(`Unexpected identifier after "static:council": ${optGroupName}.`)
     }
     }
 
 
     // Try to construct static working group bag ID.
     // Try to construct static working group bag ID.
-    if (bagIdParts[1] === 'wg' && bagIdParts.length === 3) {
-      const groups = Object.keys(WorkingGroup.typeDefinitions) as WorkingGroupKey[]
-      const inputGroup = bagIdParts[2]
+    if (staticBagType === 'wg') {
+      if (optGroupName) {
+        const groups = Object.keys(WorkingGroup.typeDefinitions) as WorkingGroupKey[]
 
 
-      if (groups.find((g) => g.toLocaleLowerCase() === inputGroup)) {
-        return createType<BagId, 'BagId'>('BagId', {
-          Static: {
-            WorkingGroup: inputGroup as WorkingGroupKey,
-          },
-        })
+        if (groups.find((g) => g.toLowerCase() === optGroupName)) {
+          return createType<BagId, 'BagId'>('BagId', {
+            Static: {
+              WorkingGroup: optGroupName as WorkingGroupKey,
+            },
+          })
+        }
+        this.invalidBagId(`Unrecognized working group name: ${optGroupName}`)
       }
       }
+
+      this.invalidBagId(`Missing working group name.`)
     }
     }
 
 
-    throw new Error(`Invalid bagId: ${bagId}`)
+    this.invalidBagId(`Unrecognized static bag type: ${staticBagType}.`)
   }
   }
 
 
   public parseDynamicBagId(): BagId {
   public parseDynamicBagId(): BagId {
-    throw new Error('Function not implemented.')
+    const [, dynamicBagType, entityIdStr] = this.bagIdParts
+    if (entityIdStr) {
+      const entityId = parseInt(entityIdStr)
+
+      // Verify successful entity ID parsing
+      if (!isNaN(entityId)) {
+        const resultByType: { [key in DynamicBagTypeKey]: BagId } = {
+          Member: createType<BagId, 'BagId'>('BagId', { Dynamic: { Member: entityId } }),
+          Channel: createType<BagId, 'BagId'>('BagId', { Dynamic: { Channel: entityId } }),
+        }
+
+        for (const [type, result] of Object.entries(resultByType)) {
+          if (type.toLowerCase() === dynamicBagType) {
+            return result
+          }
+        }
+
+        this.invalidBagId(`Unrecognized dynamic bag type: ${dynamicBagType}.`)
+      }
+      this.invalidBagId(`Invalid entity id: ${entityId}.`)
+    }
+
+    this.invalidBagId(`Missing entity id.`)
   }
   }
 }
 }

+ 72 - 33
distributor-node/src/services/parsers/ConfigParserService.ts

@@ -1,11 +1,11 @@
-import { ValidationService } from '../validation/ValidationService'
+import { ValidationError, ValidationService } from '../validation/ValidationService'
 import { Config } from '../../types'
 import { Config } from '../../types'
 import fs from 'fs'
 import fs from 'fs'
 import path from 'path'
 import path from 'path'
 import YAML from 'yaml'
 import YAML from 'yaml'
 import _ from 'lodash'
 import _ from 'lodash'
 import configSchema, { bytesizeUnits } from '../../schemas/configSchema'
 import configSchema, { bytesizeUnits } from '../../schemas/configSchema'
-import { JSONSchema4 } from 'json-schema'
+import { JSONSchema4, JSONSchema4TypeName } from 'json-schema'
 
 
 const MIN_CACHE_SIZE = 20 * Math.pow(1024, 3)
 const MIN_CACHE_SIZE = 20 * Math.pow(1024, 3)
 
 
@@ -36,51 +36,90 @@ export class ConfigParserService {
   }
   }
 
 
   private schemaTypeOf(schema: JSONSchema4, path: string[]): JSONSchema4['type'] {
   private schemaTypeOf(schema: JSONSchema4, path: string[]): JSONSchema4['type'] {
-    if (path.length === 0) {
-      return undefined
-    }
     if (schema.properties && schema.properties[path[0]]) {
     if (schema.properties && schema.properties[path[0]]) {
       const item = schema.properties[path[0]]
       const item = schema.properties[path[0]]
-      if (item.type === 'object') {
+      if (path.length > 1) {
         return this.schemaTypeOf(item, path.slice(1))
         return this.schemaTypeOf(item, path.slice(1))
-      } else {
-        return item.type
+      }
+      if (item.oneOf) {
+        const validTypesSet = new Set<JSONSchema4TypeName>()
+        item.oneOf.forEach(
+          (s) =>
+            Array.isArray(s.type)
+              ? s.type.forEach((t) => validTypesSet.add(t))
+              : s.type
+              ? validTypesSet.add(s.type)
+              : undefined // do nothing
+        )
+        return Array.from(validTypesSet)
+      }
+      return item.type
+    }
+  }
+
+  private setConfigEnvValue(
+    config: Record<string, unknown>,
+    path: string[],
+    envKey: string,
+    envValue: string | undefined
+  ) {
+    const schemaType = this.schemaTypeOf(configSchema, path)
+    const possibleTypes = Array.isArray(schemaType) ? schemaType : [schemaType]
+
+    for (const i in possibleTypes) {
+      try {
+        switch (possibleTypes[i]) {
+          case undefined:
+            // Invalid key - skip
+            break
+          case 'integer':
+            _.set(config, path, parseInt(envValue || ''))
+            break
+          case 'number':
+            _.set(config, path, parseFloat(envValue || ''))
+            break
+          case 'boolean':
+            _.set(config, path, !!envValue)
+            break
+          case 'array':
+          case 'object':
+            try {
+              const parsed = JSON.parse(envValue || 'undefined')
+              _.set(config, path, parsed)
+            } catch (e) {
+              throw new ValidationError(`Invalid env value of ${envKey}: Not a valid JSON`, null)
+            }
+            break
+          default:
+            _.set(config, path, envValue)
+        }
+        const errors = this.validator.errorsByProperty('Config', path.join('.'), config)
+        if (errors) {
+          throw new ValidationError(`Invalid env value of ${envKey}`, errors)
+        }
+        return
+      } catch (e) {
+        // Only throw if there are no more possible types to test against
+        if (parseInt(i) === possibleTypes.length - 1) {
+          throw e
+        }
       }
       }
     }
     }
   }
   }
 
 
   private mergeEnvConfigWith(config: Record<string, unknown>) {
   private mergeEnvConfigWith(config: Record<string, unknown>) {
     Object.entries(process.env)
     Object.entries(process.env)
-      .filter(([k]) => k.startsWith('JOYSTREAM_DISTRIBUTOR__'))
-      .forEach(([k, v]) => {
-        const path = k
+      .filter(([envKey]) => envKey.startsWith('JOYSTREAM_DISTRIBUTOR__'))
+      .forEach(([envKey, envValue]) => {
+        const configPath = envKey
           .replace('JOYSTREAM_DISTRIBUTOR__', '')
           .replace('JOYSTREAM_DISTRIBUTOR__', '')
           .split('__')
           .split('__')
-          .map((k) => _.camelCase(k))
-
-        const valueType = this.schemaTypeOf(configSchema, path)
-        if (valueType === undefined) {
-          // Invalid key - skip
-        } else if (valueType === 'integer') {
-          _.set(config, path, parseInt(v || ''))
-        } else if (valueType === 'number') {
-          _.set(config, path, parseFloat(v || ''))
-        } else if (valueType === 'boolean') {
-          _.set(config, path, !!v)
-        } else if (valueType === 'array') {
-          try {
-            const parsed = JSON.parse(v || 'undefined')
-            _.set(config, path, parsed)
-          } catch (e) {
-            throw new Error(`Env value ${k} is not a valid JSON array`)
-          }
-        } else {
-          _.set(config, path, v)
-        }
+          .map((key) => _.camelCase(key))
+        this.setConfigEnvValue(config, configPath, envKey, envValue)
       })
       })
   }
   }
 
 
-  public loadConfing(configPath: string): Config {
+  public loadConfig(configPath: string): Config {
     let inputConfig: Record<string, unknown> = {}
     let inputConfig: Record<string, unknown> = {}
     // Try to load config from file if exists
     // Try to load config from file if exists
     if (fs.existsSync(configPath)) {
     if (fs.existsSync(configPath)) {

+ 21 - 11
distributor-node/src/services/parsers/errors.ts

@@ -1,14 +1,24 @@
-import { AxiosError } from 'axios'
+import { AxiosError, AxiosResponse } from 'axios'
 
 
-export function parseAxiosError(e: AxiosError) {
-  return {
-    message: e.message,
-    stack: e.stack,
-    response: {
-      data: e.response?.data,
-      status: e.response?.status,
-      statusText: e.response?.statusText,
-      headers: e.response?.headers,
-    },
+type ParsedAxiosErrorResponse = Pick<AxiosResponse, 'data' | 'status' | 'statusText' | 'headers'>
+
+type ParsedAxiosError = Pick<AxiosError, 'message' | 'stack'> & {
+  response?: ParsedAxiosErrorResponse
+}
+
+export function parseAxiosError({ message, stack, response }: AxiosError): ParsedAxiosError {
+  const parsedError: ParsedAxiosError = {
+    message,
+    stack,
+  }
+  if (response) {
+    const { data, status, statusText, headers } = response
+    parsedError.response = {
+      data,
+      status,
+      statusText,
+      headers,
+    }
   }
   }
+  return parsedError
 }
 }

+ 14 - 8
distributor-node/src/services/validation/ValidationService.ts

@@ -1,12 +1,16 @@
 import Ajv from 'ajv'
 import Ajv from 'ajv'
 import { SchemaKey, schemas, TypeBySchemaKey } from '../../schemas'
 import { SchemaKey, schemas, TypeBySchemaKey } from '../../schemas'
 
 
-class ValidationError extends Error {
-  public readonly errors: string[]
+export class ValidationError extends Error {
+  public readonly errors: Ajv['errors']
+  public readonly errorMessages: string[]
 
 
-  public constructor(message: string, errors: string[]) {
-    super(`${message}\n\n${errors.join('\n')}`)
+  public constructor(message: string, errors: Ajv['errors']) {
+    const errorMessages: string[] = []
+    errors?.forEach((e) => errorMessages.push(`${e.dataPath}: ${e.message} (${JSON.stringify(e.params)})`))
+    super(`${message}\n\n${errorMessages.join('\n')}`)
     this.errors = errors
     this.errors = errors
+    this.errorMessages = errorMessages
   }
   }
 }
 }
 
 
@@ -20,11 +24,13 @@ export class ValidationService {
   validate<SK extends SchemaKey>(schemaKey: SK, input: unknown): TypeBySchemaKey<SK> {
   validate<SK extends SchemaKey>(schemaKey: SK, input: unknown): TypeBySchemaKey<SK> {
     const valid = this.ajv.validate(schemaKey, input) as boolean
     const valid = this.ajv.validate(schemaKey, input) as boolean
     if (!valid) {
     if (!valid) {
-      throw new ValidationError(
-        `${schemaKey} is not valid`,
-        this.ajv.errors?.map((e) => `${e.dataPath}: ${e.message} (${JSON.stringify(e.params)})`) || []
-      )
+      throw new ValidationError(`${schemaKey} is not valid`, this.ajv.errors)
     }
     }
     return input as TypeBySchemaKey<SK>
     return input as TypeBySchemaKey<SK>
   }
   }
+
+  errorsByProperty<T>(schemaKey: SchemaKey, path: string, input: T): Ajv['errors'] {
+    this.ajv.validate(schemaKey, input)
+    return this.ajv.errors?.filter((e) => e.dataPath === `/${path}` || e.dataPath.startsWith(`/${path}/`))
+  }
 }
 }

+ 81 - 0
distributor-node/src/types/content.ts

@@ -0,0 +1,81 @@
+import { AxiosResponse } from 'axios'
+import { Readable } from 'stream'
+
+export type StorageNodeEndpointData = {
+  bucketId: string
+  endpoint: string
+}
+
+export type DataObjectAccessPoints = {
+  storageNodes: StorageNodeEndpointData[]
+}
+
+export type DataObjectData = {
+  objectId: string
+  size: number
+  contentHash: string
+  accessPoints?: DataObjectAccessPoints
+}
+
+export type StorageNodeDownloadResponse = AxiosResponse<Readable>
+
+export type DownloadData = {
+  startAt?: number
+  objectData: DataObjectData
+}
+
+export type DataObjectInfo = {
+  exists: boolean
+  isSupported: boolean
+  data?: DataObjectData
+}
+
+export enum PendingDownloadStatus {
+  Waiting = 'Waiting',
+  LookingForSource = 'LookingForSource',
+  Downloading = 'Downloading',
+}
+
+export type PendingDownloadData = {
+  objectSize: number
+  status: PendingDownloadStatus
+  promise: Promise<StorageNodeDownloadResponse>
+}
+
+export enum ObjectStatusType {
+  Available = 'Available',
+  PendingDownload = 'PendingDownload',
+  NotFound = 'NotFound',
+  NotSupported = 'NotSupported',
+  Missing = 'Missing',
+}
+
+export type ObjectStatusAvailable = {
+  type: ObjectStatusType.Available
+  path: string
+}
+
+export type ObjectStatusPendingDownload = {
+  type: ObjectStatusType.PendingDownload
+  pendingDownloadData: PendingDownloadData
+}
+
+export type ObjectStatusNotFound = {
+  type: ObjectStatusType.NotFound
+}
+
+export type ObjectStatusNotSupported = {
+  type: ObjectStatusType.NotSupported
+}
+
+export type ObjectStatusMissing = {
+  type: ObjectStatusType.Missing
+  objectData: DataObjectData
+}
+
+export type ObjectStatus =
+  | ObjectStatusAvailable
+  | ObjectStatusPendingDownload
+  | ObjectStatusNotFound
+  | ObjectStatusNotSupported
+  | ObjectStatusMissing

+ 1 - 1
distributor-node/src/types/generated/ConfigJson.d.ts

@@ -112,7 +112,7 @@ export interface DistributorNodeConfiguration {
     cacheCleanup: number
     cacheCleanup: number
   }
   }
   /**
   /**
-   * Distributor node http server port
+   * Distributor node http api port
    */
    */
   port: number
   port: number
   /**
   /**

+ 1 - 2
distributor-node/src/types/index.ts

@@ -1,5 +1,4 @@
 export * from './api'
 export * from './api'
 export * from './common'
 export * from './common'
 export * from './config'
 export * from './config'
-export * from './storage'
-export * from './networking'
+export * from './content'

+ 0 - 10
distributor-node/src/types/networking.ts

@@ -1,10 +0,0 @@
-import { AxiosResponse } from 'axios'
-import { Readable } from 'stream'
-import { DataObjectData } from './storage'
-
-export type StorageNodeDownloadResponse = AxiosResponse<Readable>
-
-export type DownloadData = {
-  startAt?: number
-  objectData: DataObjectData
-}

+ 0 - 21
distributor-node/src/types/storage.ts

@@ -1,21 +0,0 @@
-export type StorageNodeEndpointData = {
-  bucketId: string
-  endpoint: string
-}
-
-export type DataObjectAccessPoints = {
-  storageNodes: StorageNodeEndpointData[]
-}
-
-export type DataObjectData = {
-  objectId: string
-  size: number
-  contentHash: string
-  accessPoints?: DataObjectAccessPoints
-}
-
-export type DataObjectInfo = {
-  exists: boolean
-  isSupported: boolean
-  data?: DataObjectData
-}

+ 0 - 17
distributor-node/test/commands/hello.test.ts

@@ -1,17 +0,0 @@
-import { expect, test } from '@oclif/test'
-
-describe('hello', () => {
-  test
-    .stdout()
-    .command(['hello'])
-    .it('runs hello', (ctx) => {
-      expect(ctx.stdout).to.contain('hello world')
-    })
-
-  test
-    .stdout()
-    .command(['hello', '--name', 'jeff'])
-    .it('runs hello --name jeff', (ctx) => {
-      expect(ctx.stdout).to.contain('hello jeff')
-    })
-})

+ 0 - 5
distributor-node/test/mocha.opts

@@ -1,5 +0,0 @@
---require ts-node/register
---watch-extensions ts
---recursive
---reporter spec
---timeout 5000

+ 0 - 7
distributor-node/test/tsconfig.json

@@ -1,7 +0,0 @@
-{
-  "extends": "../tsconfig",
-  "compilerOptions": {
-    "noEmit": true
-  },
-  "references": [{ "path": ".." }]
-}

+ 0 - 2
distributor-node/tsconfig.json

@@ -13,8 +13,6 @@
     "types" : [ "node", "mocha" ],
     "types" : [ "node", "mocha" ],
     "resolveJsonModule": true,
     "resolveJsonModule": true,
     "strictNullChecks": true,
     "strictNullChecks": true,
-    "noUnusedLocals": false, // FIXME: Temporarly disabled during initial development
-    "noUnusedParameters": false, // FIXME: Temporarly disabled during initial development
     "paths": {
     "paths": {
       "@polkadot/types/augment": ["../types/augment-codec/augment-types.ts"],
       "@polkadot/types/augment": ["../types/augment-codec/augment-types.ts"],
       "@polkadot/api/augment": ["../types/augment-codec/augment-api.ts"]
       "@polkadot/api/augment": ["../types/augment-codec/augment-api.ts"]

+ 1 - 1
docker-compose.yml

@@ -76,7 +76,7 @@ services:
     #   JOYSTREAM_DISTRIBUTOR__LOG__ELASTIC: "off"
     #   JOYSTREAM_DISTRIBUTOR__LOG__ELASTIC: "off"
     #   JOYSTREAM_DISTRIBUTOR__LIMITS__STORAGE: 50G
     #   JOYSTREAM_DISTRIBUTOR__LIMITS__STORAGE: 50G
     #   JOYSTREAM_DISTRIBUTOR__PORT: 1234
     #   JOYSTREAM_DISTRIBUTOR__PORT: 1234
-    #   JOYSTREAM_DISTRIBUTOR__KEYS="[{\"suri\":\"//Bob\"}]"
+    #   JOYSTREAM_DISTRIBUTOR__KEYS: "[{\"suri\":\"//Bob\"}]"
     #   JOYSTREAM_DISTRIBUTOR__BUCKETS: "[1,2]"
     #   JOYSTREAM_DISTRIBUTOR__BUCKETS: "[1,2]"
     #   JOYSTREAM_DISTRIBUTOR__WORKER_ID: 0
     #   JOYSTREAM_DISTRIBUTOR__WORKER_ID: 0
     command: ["start"]
     command: ["start"]

+ 1 - 15
yarn.lock

@@ -3873,20 +3873,6 @@
     fs-extra "^9.0.1"
     fs-extra "^9.0.1"
     moment "^2.22.1"
     moment "^2.22.1"
 
 
-"@oclif/plugin-help@^2":
-  version "2.2.3"
-  resolved "https://registry.yarnpkg.com/@oclif/plugin-help/-/plugin-help-2.2.3.tgz#b993041e92047f0e1762668aab04d6738ac06767"
-  integrity sha512-bGHUdo5e7DjPJ0vTeRBMIrfqTRDBfyR5w0MP41u0n3r7YG5p14lvMmiCXxi6WDaP2Hw5nqx3PnkAIntCKZZN7g==
-  dependencies:
-    "@oclif/command" "^1.5.13"
-    chalk "^2.4.1"
-    indent-string "^4.0.0"
-    lodash.template "^4.4.0"
-    string-width "^3.0.0"
-    strip-ansi "^5.0.0"
-    widest-line "^2.0.1"
-    wrap-ansi "^4.0.0"
-
 "@oclif/plugin-help@^3", "@oclif/plugin-help@^3.2.0", "@oclif/plugin-help@^3.2.2":
 "@oclif/plugin-help@^3", "@oclif/plugin-help@^3.2.0", "@oclif/plugin-help@^3.2.2":
   version "3.2.2"
   version "3.2.2"
   resolved "https://registry.yarnpkg.com/@oclif/plugin-help/-/plugin-help-3.2.2.tgz#063ee08cee556573a5198fbdfdaa32796deba0ed"
   resolved "https://registry.yarnpkg.com/@oclif/plugin-help/-/plugin-help-3.2.2.tgz#063ee08cee556573a5198fbdfdaa32796deba0ed"
@@ -32060,7 +32046,7 @@ wide-align@1.1.3, wide-align@^1.1.0:
   dependencies:
   dependencies:
     string-width "^1.0.2 || 2"
     string-width "^1.0.2 || 2"
 
 
-widest-line@^2.0.0, widest-line@^2.0.1:
+widest-line@^2.0.0:
   version "2.0.1"
   version "2.0.1"
   resolved "https://registry.yarnpkg.com/widest-line/-/widest-line-2.0.1.tgz#7438764730ec7ef4381ce4df82fb98a53142a3fc"
   resolved "https://registry.yarnpkg.com/widest-line/-/widest-line-2.0.1.tgz#7438764730ec7ef4381ce4df82fb98a53142a3fc"
   integrity sha512-Ba5m9/Fa4Xt9eb2ELXt77JxVDV8w7qQrH0zS/TWSJdLyAwQjWoOzpzj5lwVftDz6n/EOu3tNACS84v509qwnJA==
   integrity sha512-Ba5m9/Fa4Xt9eb2ELXt77JxVDV8w7qQrH0zS/TWSJdLyAwQjWoOzpzj5lwVftDz6n/EOu3tNACS84v509qwnJA==