Browse Source

Merge pull request #3239 from Lezek123/olympia-migration-scripts

Olympia: Update migration scripts
Mokhtar Naamani 3 years ago
parent
commit
b023b94af0
26 changed files with 6518 additions and 3 deletions
  1. 123 2
      utils/migration-scripts/README.md
  2. 2 1
      utils/migration-scripts/package.json
  3. 32 0
      utils/migration-scripts/src/commands/giza-olympia/createContentDirectorySnapshot.ts
  4. 32 0
      utils/migration-scripts/src/commands/giza-olympia/createMembershipsSnapshot.ts
  5. 42 0
      utils/migration-scripts/src/commands/giza-olympia/fetchAllObjects.ts
  6. 77 0
      utils/migration-scripts/src/commands/giza-olympia/migrateContent.ts
  7. 51 0
      utils/migration-scripts/src/commands/giza-olympia/migrateMembers.ts
  8. 94 0
      utils/migration-scripts/src/giza-olympia/AssetsBase.ts
  9. 194 0
      utils/migration-scripts/src/giza-olympia/BaseMigration.ts
  10. 21 0
      utils/migration-scripts/src/giza-olympia/CategoryMigration.ts
  11. 78 0
      utils/migration-scripts/src/giza-olympia/ChannelCategoriesMigration.ts
  12. 199 0
      utils/migration-scripts/src/giza-olympia/ChannelsMigration.ts
  13. 22 0
      utils/migration-scripts/src/giza-olympia/ContentHash.ts
  14. 99 0
      utils/migration-scripts/src/giza-olympia/ContentMigration.ts
  15. 158 0
      utils/migration-scripts/src/giza-olympia/DownloadManager.ts
  16. 102 0
      utils/migration-scripts/src/giza-olympia/MembershipMigration.ts
  17. 106 0
      utils/migration-scripts/src/giza-olympia/SnapshotManager.ts
  18. 209 0
      utils/migration-scripts/src/giza-olympia/UploadManager.ts
  19. 20 0
      utils/migration-scripts/src/giza-olympia/UploadMigration.ts
  20. 78 0
      utils/migration-scripts/src/giza-olympia/VideoCategoriesMigration.ts
  21. 225 0
      utils/migration-scripts/src/giza-olympia/VideosMigration.ts
  22. 212 0
      utils/migration-scripts/src/giza-olympia/giza-query-node/api.ts
  23. 33 0
      utils/migration-scripts/src/giza-olympia/giza-query-node/codegen.yml
  24. 386 0
      utils/migration-scripts/src/giza-olympia/giza-query-node/generated/queries.ts
  25. 3715 0
      utils/migration-scripts/src/giza-olympia/giza-query-node/generated/schema.ts
  26. 208 0
      utils/migration-scripts/src/giza-olympia/giza-query-node/queries/queries.graphql

+ 123 - 2
utils/migration-scripts/README.md

@@ -19,7 +19,7 @@ $ npm install -g migration-scripts
 $ migration-scripts COMMAND
 running command...
 $ migration-scripts (-v|--version|version)
-migration-scripts/0.1.0 linux-x64 node-v14.16.1
+migration-scripts/0.1.0 linux-x64 node-v14.18.0
 $ migration-scripts --help [COMMAND]
 USAGE
   $ migration-scripts COMMAND
@@ -28,10 +28,128 @@ USAGE
 <!-- usagestop -->
 # Commands
 <!-- commands -->
+* [`migration-scripts giza-olympia:createContentDirectorySnapshot`](#migration-scripts-giza-olympiacreatecontentdirectorysnapshot)
+* [`migration-scripts giza-olympia:createMembershipsSnapshot`](#migration-scripts-giza-olympiacreatemembershipssnapshot)
+* [`migration-scripts giza-olympia:fetchAllObjects`](#migration-scripts-giza-olympiafetchallobjects)
+* [`migration-scripts giza-olympia:migrateContent`](#migration-scripts-giza-olympiamigratecontent)
+* [`migration-scripts giza-olympia:migrateMembers`](#migration-scripts-giza-olympiamigratemembers)
 * [`migration-scripts help [COMMAND]`](#migration-scripts-help-command)
 * [`migration-scripts sumer-giza:migrateContent`](#migration-scripts-sumer-gizamigratecontent)
 * [`migration-scripts sumer-giza:retryFailedUploads`](#migration-scripts-sumer-gizaretryfaileduploads)
 
+## `migration-scripts giza-olympia:createContentDirectorySnapshot`
+
+```
+USAGE
+  $ migration-scripts giza-olympia:createContentDirectorySnapshot
+
+OPTIONS
+  -o, --output=output          Output file path
+  --queryNodeUri=queryNodeUri  [default: https://hydra.joystream.org/graphql] Giza query node uri
+```
+
+_See code: [src/commands/giza-olympia/createContentDirectorySnapshot.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/giza-olympia/createContentDirectorySnapshot.ts)_
+
+## `migration-scripts giza-olympia:createMembershipsSnapshot`
+
+```
+USAGE
+  $ migration-scripts giza-olympia:createMembershipsSnapshot
+
+OPTIONS
+  -o, --output=output          Output file path
+  --queryNodeUri=queryNodeUri  [default: https://hydra.joystream.org/graphql] Giza query node uri
+```
+
+_See code: [src/commands/giza-olympia/createMembershipsSnapshot.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/giza-olympia/createMembershipsSnapshot.ts)_
+
+## `migration-scripts giza-olympia:fetchAllObjects`
+
+```
+USAGE
+  $ migration-scripts giza-olympia:fetchAllObjects
+
+OPTIONS
+  --continously                      Whether the script should run continously
+  --dataDir=dataDir                  [default: /tmp/joystream/giza-olympia-migration] Directory for storing data objects
+
+  --idleTime=idleTime                [default: 300] Time (in seconds) to remain idle in case no new data objects were
+                                     found
+
+  --objectsPerBatch=objectsPerBatch  [default: 20] Max. number of storage objects to fetch simultaneously
+
+  --queryNodeUri=queryNodeUri        [default: https://hydra.joystream.org/graphql] Giza query node uri
+```
+
+_See code: [src/commands/giza-olympia/fetchAllObjects.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/giza-olympia/fetchAllObjects.ts)_
+
+## `migration-scripts giza-olympia:migrateContent`
+
+```
+USAGE
+  $ migration-scripts giza-olympia:migrateContent
+
+OPTIONS
+  -c, --channelIds=channelIds                                      (required) Channel ids to migrate
+  --channelBatchSize=channelBatchSize                              [default: 20] Channel batch size
+
+  --dataDir=dataDir                                                (required) Directory where data objects to upload are
+                                                                   stored
+
+  --excludeVideoIds=excludeVideoIds                                [default: ] Video ids to exclude from migration
+
+  --forceChannelOwnerMemberId=forceChannelOwnerMemberId            Can be used to force a specific channel owner for all
+                                                                   channels, allowing to easily test the script in dev
+                                                                   environment
+
+  --membershipsMigrationResultPath=membershipsMigrationResultPath  (required) JSON artifact produced by membership
+                                                                   migration
+
+  --migrationStatePath=migrationStatePath                          [default:
+                                                                   /home/leszek/projects/joystream/joystream-ws-2/utils/
+                                                                   migration-scripts/results/giza-olympia] Path to
+                                                                   migration results directory
+
+  --snapshotFilePath=snapshotFilePath                              (required) Path to giza content directory snapshot
+                                                                   (json)
+
+  --sudoUri=sudoUri                                                [default: //Alice] Sudo key Substrate uri
+
+  --uploadSpBucketId=uploadSpBucketId                              [default: 0] Olympia storage bucket id
+
+  --uploadSpEndpoint=uploadSpEndpoint                              [default: http://localhost:3333] Olympia storage node
+                                                                   endpoint to use for uploading
+
+  --videoBatchSize=videoBatchSize                                  [default: 20] Video batch size
+
+  --wsProviderEndpointUri=wsProviderEndpointUri                    [default: ws://localhost:9944] WS provider endpoint
+                                                                   uri (Olympia)
+```
+
+_See code: [src/commands/giza-olympia/migrateContent.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/giza-olympia/migrateContent.ts)_
+
+## `migration-scripts giza-olympia:migrateMembers`
+
+```
+USAGE
+  $ migration-scripts giza-olympia:migrateMembers
+
+OPTIONS
+  --batchSize=batchSize                          [default: 100] Members batch size
+
+  --migrationStatePath=migrationStatePath        [default:
+                                                 /home/leszek/projects/joystream/joystream-ws-2/utils/migration-scripts/
+                                                 results/giza-olympia] Path to migration results directory
+
+  --snapshotFilePath=snapshotFilePath            (required) Path to giza memberships snapshot (json)
+
+  --sudoUri=sudoUri                              [default: //Alice] Sudo key Substrate uri
+
+  --wsProviderEndpointUri=wsProviderEndpointUri  [default: ws://localhost:9944] WS provider endpoint uri (Olympia)
+```
+
+_See code: [src/commands/giza-olympia/migrateMembers.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/giza-olympia/migrateMembers.ts)_
+
 ## `migration-scripts help [COMMAND]`
 
 display help for migration-scripts
@@ -62,8 +180,11 @@ OPTIONS
   --dataDir=dataDir                                            [default: /tmp/joystream/sumer-giza-migration] Directory
                                                                for storing data objects to upload
 
+  --excludeVideoIds=excludeVideoIds                            [default: ] Video ids to exclude from migration
+
   --forceChannelOwnerMemberId=forceChannelOwnerMemberId        Can be used to force a specific channel owner for all
-                                                               channels, allowing to test the script in dev environment
+                                                               channels, allowing to easily test the script in dev
+                                                               environment
 
   --migrationStatePath=migrationStatePath                      [default:
                                                                /home/leszek/projects/joystream/joystream-ws-2/utils/migr

+ 2 - 1
utils/migration-scripts/package.json

@@ -89,7 +89,8 @@
     "lint": "eslint ./src --ext .ts",
     "checks": "tsc --noEmit --pretty && prettier ./ --check && yarn lint",
     "format": "prettier ./ --write",
-    "generate:types:graphql": "yarn graphql-codegen -c ./src/sumer-giza/sumer-query-node/codegen.yml"
+    "sumer-giza:generate:types:graphql": "yarn graphql-codegen -c ./src/sumer-giza/sumer-query-node/codegen.yml",
+    "giza-olympia:generate:types:graphql": "yarn graphql-codegen -c ./src/giza-olympia/giza-query-node/codegen.yml"
   },
   "types": "lib/index.d.ts"
 }

+ 32 - 0
utils/migration-scripts/src/commands/giza-olympia/createContentDirectorySnapshot.ts

@@ -0,0 +1,32 @@
+import { Command, flags } from '@oclif/command'
+import { writeFileSync } from 'fs'
+import { QueryNodeApi } from '../../giza-olympia/giza-query-node/api'
+import { SnapshotManager } from '../../giza-olympia/SnapshotManager'
+
+export class CreateContentDirectorySnapshotCommand extends Command {
+  static flags = {
+    queryNodeUri: flags.string({
+      description: 'Giza query node uri',
+      default: 'https://hydra.joystream.org/graphql',
+    }),
+    output: flags.string({
+      char: 'o',
+      required: false,
+      description: 'Output file path',
+    }),
+  }
+
+  async run(): Promise<void> {
+    const { queryNodeUri, output } = this.parse(CreateContentDirectorySnapshotCommand).flags
+    const queryNodeApi = new QueryNodeApi(queryNodeUri)
+
+    const snapshotManager = new SnapshotManager({ queryNodeApi })
+    const snapshot = await snapshotManager.createContentDirectorySnapshot()
+    if (output) {
+      writeFileSync(output, JSON.stringify(snapshot, null, 2))
+    } else {
+      this.log(JSON.stringify(snapshot, null, 2))
+    }
+    this.exit(0)
+  }
+}

+ 32 - 0
utils/migration-scripts/src/commands/giza-olympia/createMembershipsSnapshot.ts

@@ -0,0 +1,32 @@
+import { Command, flags } from '@oclif/command'
+import { writeFileSync } from 'fs'
+import { QueryNodeApi } from '../../giza-olympia/giza-query-node/api'
+import { SnapshotManager } from '../../giza-olympia/SnapshotManager'
+
+export class CreateMembershipsSnapshotCommand extends Command {
+  static flags = {
+    queryNodeUri: flags.string({
+      description: 'Giza query node uri',
+      default: 'https://hydra.joystream.org/graphql',
+    }),
+    output: flags.string({
+      char: 'o',
+      required: false,
+      description: 'Output file path',
+    }),
+  }
+
+  async run(): Promise<void> {
+    const { queryNodeUri, output } = this.parse(CreateMembershipsSnapshotCommand).flags
+    const queryNodeApi = new QueryNodeApi(queryNodeUri)
+
+    const snapshotManager = new SnapshotManager({ queryNodeApi })
+    const snapshot = await snapshotManager.createMembershipsSnapshot()
+    if (output) {
+      writeFileSync(output, JSON.stringify(snapshot, null, 2))
+    } else {
+      this.log(JSON.stringify(snapshot, null, 2))
+    }
+    this.exit(0)
+  }
+}

+ 42 - 0
utils/migration-scripts/src/commands/giza-olympia/fetchAllObjects.ts

@@ -0,0 +1,42 @@
+import { Command, flags } from '@oclif/command'
+import path from 'path'
+import os from 'os'
+import { QueryNodeApi } from '../../giza-olympia/giza-query-node/api'
+import { DownloadManager } from '../../giza-olympia/DownloadManager'
+
+export class FetchAllObjectsCommand extends Command {
+  static flags = {
+    queryNodeUri: flags.string({
+      description: 'Giza query node uri',
+      default: 'https://hydra.joystream.org/graphql',
+    }),
+    dataDir: flags.string({
+      description: 'Directory for storing data objects',
+      default: path.join(os.tmpdir(), 'joystream/giza-olympia-migration'),
+    }),
+    continously: flags.boolean({
+      description: 'Whether the script should run continously',
+      default: true,
+    }),
+    objectsPerBatch: flags.integer({
+      required: false,
+      description: 'Max. number of storage objects to fetch simultaneously',
+      default: 20,
+    }),
+    idleTime: flags.integer({
+      required: false,
+      description: 'Time (in seconds) to remain idle in case no new data objects were found',
+      default: 300,
+      dependsOn: ['continously'],
+    }),
+  }
+
+  async run(): Promise<void> {
+    const opts = this.parse(FetchAllObjectsCommand).flags
+    const queryNodeApi = new QueryNodeApi(opts.queryNodeUri)
+
+    const downloadManager = new DownloadManager({ queryNodeApi, config: opts })
+    await downloadManager.fetchAllDataObjects(undefined, opts.continously, opts.idleTime)
+    this.exit(0)
+  }
+}

+ 77 - 0
utils/migration-scripts/src/commands/giza-olympia/migrateContent.ts

@@ -0,0 +1,77 @@
+import { Command, flags } from '@oclif/command'
+import path from 'path'
+import { ContentMigration } from '../../giza-olympia/ContentMigration'
+
+export class MigrateContentCommand extends Command {
+  static flags = {
+    snapshotFilePath: flags.string({
+      required: true,
+      description: 'Path to giza content directory snapshot (json)',
+    }),
+    membershipsMigrationResultPath: flags.string({
+      required: true,
+      description: 'JSON artifact produced by membership migration',
+    }),
+    wsProviderEndpointUri: flags.string({
+      description: 'WS provider endpoint uri (Olympia)',
+      default: 'ws://localhost:9944',
+    }),
+    sudoUri: flags.string({
+      description: 'Sudo key Substrate uri',
+      default: '//Alice',
+    }),
+    channelIds: flags.integer({
+      char: 'c',
+      multiple: true,
+      description: 'Channel ids to migrate',
+      required: true,
+    }),
+    dataDir: flags.string({
+      required: true,
+      description: 'Directory where data objects to upload are stored',
+    }),
+    channelBatchSize: flags.integer({
+      description: 'Channel batch size',
+      default: 20,
+    }),
+    videoBatchSize: flags.integer({
+      description: 'Video batch size',
+      default: 20,
+    }),
+    forceChannelOwnerMemberId: flags.integer({
+      description:
+        'Can be used to force a specific channel owner for all channels, allowing to easily test the script in dev environment',
+      required: false,
+    }),
+    uploadSpEndpoint: flags.string({
+      description: 'Olympia storage node endpoint to use for uploading',
+      default: 'http://localhost:3333',
+    }),
+    uploadSpBucketId: flags.integer({
+      description: 'Olympia storage bucket id',
+      default: 0,
+    }),
+    migrationStatePath: flags.string({
+      description: 'Path to migration results directory',
+      default: path.join(__dirname, '../../../results/giza-olympia'),
+    }),
+    excludeVideoIds: flags.integer({
+      multiple: true,
+      description: 'Video ids to exclude from migration',
+      required: false,
+      default: [],
+    }),
+  }
+
+  async run(): Promise<void> {
+    const opts = this.parse(MigrateContentCommand).flags
+    try {
+      const migration = new ContentMigration(opts)
+      await migration.run()
+    } catch (e) {
+      console.error(e)
+      this.exit(-1)
+    }
+    this.exit(0)
+  }
+}

+ 51 - 0
utils/migration-scripts/src/commands/giza-olympia/migrateMembers.ts

@@ -0,0 +1,51 @@
+import { Command, flags } from '@oclif/command'
+import { WsProvider } from '@polkadot/api'
+import { readFileSync } from 'fs'
+import path from 'path'
+import { MembershipMigration } from '../../giza-olympia/MembershipMigration'
+import { MembershipsSnapshot } from '../../giza-olympia/SnapshotManager'
+import { RuntimeApi } from '../../RuntimeApi'
+
+export class MigrateMembersCommand extends Command {
+  static flags = {
+    snapshotFilePath: flags.string({
+      required: true,
+      description: 'Path to giza memberships snapshot (json)',
+    }),
+    wsProviderEndpointUri: flags.string({
+      description: 'WS provider endpoint uri (Olympia)',
+      default: 'ws://localhost:9944',
+    }),
+    sudoUri: flags.string({
+      description: 'Sudo key Substrate uri',
+      default: '//Alice',
+    }),
+    batchSize: flags.integer({
+      description: 'Members batch size',
+      default: 100,
+    }),
+    migrationStatePath: flags.string({
+      description: 'Path to migration results directory',
+      default: path.join(__dirname, '../../../results/giza-olympia'),
+    }),
+  }
+
+  async run(): Promise<void> {
+    const opts = this.parse(MigrateMembersCommand).flags
+    try {
+      const api = new RuntimeApi({ provider: new WsProvider(opts.wsProviderEndpointUri) })
+      await api.isReadyOrError
+      const snapshot = JSON.parse(readFileSync(opts.snapshotFilePath).toString()) as MembershipsSnapshot
+      const migration = new MembershipMigration({
+        api,
+        snapshot,
+        config: opts,
+      })
+      await migration.run()
+    } catch (e) {
+      console.error(e)
+      this.exit(-1)
+    }
+    this.exit(0)
+  }
+}

+ 94 - 0
utils/migration-scripts/src/giza-olympia/AssetsBase.ts

@@ -0,0 +1,94 @@
+import axios from 'axios'
+import stringify from 'fast-safe-stringify'
+import { createReadStream, existsSync, statSync, mkdirSync } from 'fs'
+import path from 'path'
+import { Readable } from 'stream'
+import { Logger } from 'winston'
+import { createLogger } from '../logging'
+import { ContentHash } from './ContentHash'
+import { StorageDataObjectFieldsFragment } from './giza-query-node/generated/queries'
+
+export type AssetsBaseConfig = {
+  dataDir: string
+}
+
+export type AssetsBaseParams = {
+  config: AssetsBaseConfig
+}
+
+export abstract class AssetsBase {
+  protected config: AssetsBaseConfig
+  protected logger: Logger
+
+  protected constructor(params: AssetsBaseParams) {
+    const { config } = params
+    this.config = config
+    this.logger = createLogger('Assets Base')
+    mkdirSync(this.tmpAssetPath(''), { recursive: true })
+    mkdirSync(this.assetPath(''), { recursive: true })
+  }
+
+  protected tmpAssetPath(dataObjectId: string): string {
+    return path.join(this.config.dataDir, 'tmp', dataObjectId)
+  }
+
+  protected assetPath(contentHash: string): string {
+    return path.join(this.config.dataDir, 'objects', contentHash)
+  }
+
+  protected calcContentHash(assetPath: string): Promise<string> {
+    return new Promise<string>((resolve, reject) => {
+      const fReadStream = createReadStream(assetPath)
+      const hash = new ContentHash()
+      fReadStream.on('data', (chunk) => hash.update(chunk))
+      fReadStream.on('end', () => resolve(hash.digest()))
+      fReadStream.on('error', (err) => reject(err))
+    })
+  }
+
+  protected async isAssetMissing(dataObject: StorageDataObjectFieldsFragment): Promise<boolean> {
+    const assetPath = this.assetPath(dataObject.ipfsHash)
+    if (!existsSync(assetPath)) {
+      this.logger.debug(`isAssetMissing: ${assetPath} not found`)
+      return true
+    }
+    const { size } = statSync(assetPath)
+    if (size.toString() !== dataObject.size) {
+      this.logger.debug(`isAssetMissing: Unexpected size (expected: ${dataObject.size}, got: ${size.toString()})`)
+      return true
+    }
+    const hash = await this.calcContentHash(assetPath)
+    if (hash !== dataObject.ipfsHash) {
+      this.logger.debug(`isAssetMissing: Unexpected hash (expected: ${dataObject.ipfsHash}, got: ${hash})`)
+      return true
+    }
+    return false
+  }
+
+  private streamToString(stream: Readable) {
+    const chunks: Uint8Array[] = []
+    return new Promise((resolve, reject) => {
+      stream.on('data', (chunk) => chunks.push(Buffer.from(chunk)))
+      stream.on('error', (err) => reject(err))
+      stream.on('end', () => resolve(Buffer.concat(chunks).toString('utf8')))
+    })
+  }
+
+  protected async reqErrorMessage(e: unknown): Promise<string> {
+    if (axios.isAxiosError(e)) {
+      let msg = e.message
+      if (e.response && typeof e.response.data === 'string') {
+        msg += `: ${e.response.data}`
+      }
+      if (e.response && e.response.data && e.response.data.message) {
+        msg += `: ${e.response.data.message}`
+      }
+      if (e.response && e.response.data && e.response.data instanceof Readable) {
+        msg += `: ${await this.streamToString(e.response.data)}`
+      }
+
+      return msg
+    }
+    return e instanceof Error ? e.message : stringify(e)
+  }
+}

+ 194 - 0
utils/migration-scripts/src/giza-olympia/BaseMigration.ts

@@ -0,0 +1,194 @@
+import { SubmittableResult } from '@polkadot/api'
+import { KeyringPair } from '@polkadot/keyring/types'
+import { RuntimeApi } from '../RuntimeApi'
+import { Keyring } from '@polkadot/keyring'
+import { Logger } from 'winston'
+import path from 'path'
+import nodeCleanup from 'node-cleanup'
+import _ from 'lodash'
+import fs from 'fs'
+import { SubmittableExtrinsic } from '@polkadot/api/types'
+
+export type MigrationResult = {
+  idsMap: Map<number, number>
+  failedMigrations: number[]
+}
+
+export type MigrationStateJson = {
+  idsMapEntries: [number, number][]
+  failedMigrations: number[]
+}
+
+export type BaseMigrationConfig = {
+  migrationStatePath: string
+  sudoUri: string
+}
+
+export type BaseMigrationParams<T> = {
+  api: RuntimeApi
+  snapshot: T
+  config: BaseMigrationConfig
+}
+
+export abstract class BaseMigration<T> {
+  abstract readonly name: string
+  protected api: RuntimeApi
+  protected sudo!: KeyringPair
+  protected config: BaseMigrationConfig
+  protected snapshot: T
+  protected failedMigrations: Set<number>
+  protected idsMap: Map<number, number>
+  protected pendingMigrationIteration: Promise<void> | undefined
+  protected abstract logger: Logger
+
+  public constructor({ api, config, snapshot }: BaseMigrationParams<T>) {
+    this.api = api
+    this.config = config
+    this.failedMigrations = new Set()
+    this.idsMap = new Map()
+    this.snapshot = snapshot
+    fs.mkdirSync(config.migrationStatePath, { recursive: true })
+  }
+
+  protected getMigrationStateFilePath(): string {
+    const { migrationStatePath } = this.config
+    return path.join(migrationStatePath, `${_.camelCase(this.name)}.json`)
+  }
+
+  public async init(): Promise<void> {
+    this.loadMigrationState()
+    nodeCleanup(this.onExit.bind(this))
+    await this.loadSudoKey()
+  }
+
+  public abstract run(): Promise<MigrationResult>
+
+  protected abstract migrateBatch(batchTx: SubmittableExtrinsic<'promise'>, batch: { id: string }[]): Promise<void>
+
+  protected getMigrationStateJson(): MigrationStateJson {
+    return {
+      idsMapEntries: Array.from(this.idsMap.entries()),
+      failedMigrations: Array.from(this.failedMigrations),
+    }
+  }
+
+  protected loadMigrationState(): void {
+    const stateFilePath = this.getMigrationStateFilePath()
+    if (fs.existsSync(stateFilePath)) {
+      const migrationStateJson = fs.readFileSync(stateFilePath).toString()
+      const migrationState: MigrationStateJson = JSON.parse(migrationStateJson)
+      this.idsMap = new Map(migrationState.idsMapEntries)
+    }
+  }
+
+  protected onExit(exitCode: number | null, signal: string | null): void | false {
+    nodeCleanup.uninstall() // don't call cleanup handler again
+    this.logger.info('Exitting...')
+    if (signal && this.pendingMigrationIteration) {
+      this.logger.info('Waiting for currently pending migration iteration to finalize...')
+      this.pendingMigrationIteration.then(() => {
+        this.saveMigrationState(true)
+        this.logger.info('Done.')
+        process.kill(process.pid, signal)
+      })
+      return false
+    } else {
+      this.saveMigrationState(true)
+      this.logger.info('Done.')
+    }
+  }
+
+  protected saveMigrationState(isExitting: boolean): void {
+    this.logger.info(`Saving ${isExitting ? 'final' : 'intermediate'} migration state...`)
+    const stateFilePath = this.getMigrationStateFilePath()
+    const migrationState = this.getMigrationStateJson()
+    fs.writeFileSync(stateFilePath, JSON.stringify(migrationState, undefined, 2))
+  }
+
+  private async loadSudoKey() {
+    const { sudoUri } = this.config
+    const keyring = new Keyring({ type: 'sr25519' })
+    this.sudo = keyring.createFromUri(sudoUri)
+    const sudoKey = await this.api.query.sudo.key()
+    if (sudoKey.toString() !== this.sudo.address) {
+      throw new Error(`Invalid sudo key! Expected: ${sudoKey.toString()}, Got: ${this.sudo.address}`)
+    }
+  }
+
+  protected async executeBatchMigration<T extends { id: string }>(
+    batchTx: SubmittableExtrinsic<'promise'>,
+    batch: T[]
+  ): Promise<void> {
+    this.pendingMigrationIteration = (async () => {
+      await this.migrateBatch(batchTx, batch)
+      this.saveMigrationState(false)
+    })()
+    await this.pendingMigrationIteration
+    this.pendingMigrationIteration = undefined
+  }
+
+  /**
+   * Extract failed migrations (entity ids) from batch transaction result.
+   * Assumptions:
+   * - Each entity is migrated with a constant number of calls (2 by default: balnces.transferKeepAlive and sudo.sudoAs)
+   * - Ordering of the entities in the `batch` array matches the ordering of the batched calls through which they are migrated
+   * - If `usesSudoAs===true`: Last call for each entity is always sudo.sudoAs
+   * - If `usesSudoAs===true`: There is only one sudo.sudoAs call per entity
+   *
+   * Entity migration is considered failed if the last call (per entity) failed or was not executed at all, regardless of
+   * the result of any of the previous calls associated with that entity migration.
+   * (This means, for example, that regardless of whether balnces.transferKeepAlive failed and interrupted the batch or balnces.transferKeepAlive
+   * succeeded, but sudo.sudoAs failed - in both cases the migration is considered failed and should be fully re-executed on
+   * the next script run)
+   */
+  protected extractFailedMigrations<T extends { id: string }>(
+    result: SubmittableResult,
+    batch: T[],
+    callsPerEntity = 2,
+    usesSudoAs = true
+  ): void {
+    const { api } = this
+    const batchInterruptedEvent = api.findEvent(result, 'utility', 'BatchInterrupted')
+    const numberOfSuccesfulCalls = batchInterruptedEvent
+      ? batchInterruptedEvent.data[0].toNumber()
+      : callsPerEntity * batch.length
+    const numberOfMigratedEntites = Math.floor(numberOfSuccesfulCalls / callsPerEntity)
+
+    const sudoAsDoneEvents = api.findEvents(result, 'sudo', 'SudoAsDone')
+    if (usesSudoAs && sudoAsDoneEvents.length !== numberOfMigratedEntites) {
+      throw new Error(
+        `Unexpected number of SudoAsDone events (expected: ${numberOfMigratedEntites}, got: ${sudoAsDoneEvents.length})! ` +
+          `Could not extract failed migrations from: ${JSON.stringify(result.toHuman())}`
+      )
+    }
+
+    const failedIds: number[] = []
+    batch.forEach((entity, i) => {
+      const entityId = parseInt(entity.id)
+      if (i >= numberOfMigratedEntites || (usesSudoAs && sudoAsDoneEvents[i].data[0].isFalse)) {
+        failedIds.push(entityId)
+        this.failedMigrations.add(entityId)
+      }
+    })
+
+    if (batchInterruptedEvent) {
+      this.logger.error(
+        `Batch interrupted at call ${numberOfSuccesfulCalls}: ${this.api.formatDispatchError(
+          batchInterruptedEvent.data[1]
+        )}`
+      )
+    }
+
+    if (failedIds.length) {
+      this.logger.error(`Failed to migrate:`, { failedIds })
+    }
+  }
+
+  public getResult(): MigrationResult {
+    const { idsMap, failedMigrations } = this
+    return {
+      idsMap: new Map(idsMap.entries()),
+      failedMigrations: Array.from(failedMigrations),
+    }
+  }
+}

+ 21 - 0
utils/migration-scripts/src/giza-olympia/CategoryMigration.ts

@@ -0,0 +1,21 @@
+import { BaseMigration } from './BaseMigration'
+import { ContentDirectorySnapshot } from './SnapshotManager'
+
+export abstract class CategoryMigration extends BaseMigration<ContentDirectorySnapshot> {
+  protected contentLeadKey!: string
+
+  public async init(): Promise<void> {
+    await super.init()
+    await this.loadContentLeadKey()
+  }
+
+  private async loadContentLeadKey(): Promise<void> {
+    const { api } = this
+    const leadId = await api.query.contentWorkingGroup.currentLead()
+    if (!leadId.isSome) {
+      throw new Error('ContentWorkingGroup lead must be set!')
+    }
+    const leadWorker = await api.query.contentWorkingGroup.workerById(leadId.unwrap())
+    this.contentLeadKey = leadWorker.role_account_id.toString()
+  }
+}

+ 78 - 0
utils/migration-scripts/src/giza-olympia/ChannelCategoriesMigration.ts

@@ -0,0 +1,78 @@
+import { ChannelCategoryMetadata } from '@joystream/metadata-protobuf'
+import { ChannelCategoryId } from '@joystream/types/content'
+import { SubmittableExtrinsic } from '@polkadot/api/types'
+import { ISubmittableResult } from '@polkadot/types/types'
+import { Logger } from 'winston'
+import { createLogger } from '../logging'
+import { BaseMigrationParams, MigrationResult } from './BaseMigration'
+import { CategoryMigration } from './CategoryMigration'
+import { ContentDirectorySnapshot } from './SnapshotManager'
+
+export class ChannelCategoriesMigration extends CategoryMigration {
+  name = 'Channel categories migration'
+  protected logger: Logger
+
+  public constructor(params: BaseMigrationParams<ContentDirectorySnapshot>) {
+    super(params)
+    this.logger = createLogger(this.name)
+  }
+
+  protected async migrateBatch(
+    batchTx: SubmittableExtrinsic<'promise', ISubmittableResult>,
+    batch: { id: string }[]
+  ): Promise<void> {
+    const { api } = this
+    const result = await api.sendExtrinsic(this.sudo, batchTx)
+    const categoryCreatedEvents = api.findEvents(result, 'content', 'ChannelCategoryCreated')
+    const createdCategoryIds: ChannelCategoryId[] = categoryCreatedEvents.map((e) => e.data[0])
+
+    if (createdCategoryIds.length !== batch.length) {
+      this.extractFailedMigrations(result, batch)
+    }
+
+    let newCategoryIndex = 0
+    batch.forEach((c) => {
+      if (this.failedMigrations.has(parseInt(c.id))) {
+        return
+      }
+      const newCategoryId = createdCategoryIds[newCategoryIndex++]
+      this.idsMap.set(parseInt(c.id), newCategoryId.toNumber())
+    })
+    this.logger.info(`Channel categories map created!`, this.idsMap.entries())
+    if (this.failedMigrations.size) {
+      throw new Error(`Failed to create some channel categories: ${Array.from(this.failedMigrations).join(', ')}`)
+    }
+    this.logger.info(`All channel categories succesfully migrated!`)
+  }
+
+  public async run(): Promise<MigrationResult> {
+    await this.init()
+    const { api } = this
+    const allCategories = this.snapshot.channelCategories
+    const categoriesToMigrate = allCategories.filter((c) => !this.idsMap.has(parseInt(c.id)))
+
+    if (!categoriesToMigrate.length) {
+      this.logger.info('All channel categories already migrated, skipping...')
+      return this.getResult()
+    }
+
+    this.logger.info(`Migrating ${categoriesToMigrate.length} channel categories...`)
+    const txs = categoriesToMigrate
+      .sort((a, b) => parseInt(a.id) - parseInt(b.id))
+      .map((c) => {
+        const meta = new ChannelCategoryMetadata({ name: c.name })
+        const metaBytes = '0x' + Buffer.from(ChannelCategoryMetadata.encode(meta).finish()).toString('hex')
+        return api.tx.sudo.sudoAs(
+          this.contentLeadKey,
+          api.tx.content.createChannelCategory('Lead', {
+            meta: metaBytes,
+          })
+        )
+      })
+
+    const batchTx = api.tx.utility.batch(txs)
+    await this.migrateBatch(batchTx, categoriesToMigrate)
+
+    return this.getResult()
+  }
+}

+ 199 - 0
utils/migration-scripts/src/giza-olympia/ChannelsMigration.ts

@@ -0,0 +1,199 @@
+import { UploadMigration, UploadMigrationConfig, UploadMigrationParams } from './UploadMigration'
+import { ChannelMetadata } from '@joystream/metadata-protobuf'
+import { ChannelFieldsFragment } from './giza-query-node/generated/queries'
+import { createType } from '@joystream/types'
+import Long from 'long'
+import { ChannelCreationParameters } from '@joystream/types/content'
+import { ChannelId } from '@joystream/types/common'
+import _ from 'lodash'
+import { MigrationResult } from './BaseMigration'
+import { Logger } from 'winston'
+import { createLogger } from '../logging'
+import { SubmittableExtrinsic } from '@polkadot/api/types'
+
+export type ChannelsMigrationConfig = UploadMigrationConfig & {
+  channelIds: number[]
+  channelBatchSize: number
+  forceChannelOwnerMemberId: number | undefined
+  excludeVideoIds: number[]
+}
+
+export type ChannelsMigrationParams = UploadMigrationParams & {
+  config: ChannelsMigrationConfig
+  forcedChannelOwner: { id: string; controllerAccount: string } | undefined
+  categoriesMap: Map<number, number>
+  membershipsMap: Map<number, number>
+}
+
+export type ChannelsMigrationResult = MigrationResult & {
+  videoIds: number[]
+}
+
+export class ChannelMigration extends UploadMigration {
+  name = 'Channels migration'
+  protected config: ChannelsMigrationConfig
+  protected categoriesMap: Map<number, number>
+  protected membershipsMap: Map<number, number>
+  protected videoIds: number[] = []
+  protected forcedChannelOwner: { id: string; controllerAccount: string } | undefined
+  protected logger: Logger
+
+  public constructor(params: ChannelsMigrationParams) {
+    super(params)
+    this.config = params.config
+    this.forcedChannelOwner = params.forcedChannelOwner
+    this.categoriesMap = params.categoriesMap
+    this.membershipsMap = params.membershipsMap
+    this.logger = createLogger(this.name)
+  }
+
+  private getNewCategoryId(oldCategoryId: string | null | undefined): Long | undefined {
+    if (typeof oldCategoryId !== 'string') {
+      return undefined
+    }
+    const newCategoryId = this.categoriesMap.get(parseInt(oldCategoryId))
+    return newCategoryId ? Long.fromNumber(newCategoryId) : undefined
+  }
+
+  private getChannelOwnerMember({
+    id,
+    ownerMember,
+  }: ChannelFieldsFragment): Exclude<ChannelFieldsFragment['ownerMember'], null | undefined> {
+    if (!ownerMember) {
+      throw new Error(`Chanel ownerMember missing: ${id}. Only member-owned channels are supported!`)
+    }
+
+    if (this.forcedChannelOwner) {
+      return this.forcedChannelOwner
+    }
+
+    const newMemberId = this.membershipsMap.get(parseInt(ownerMember.id))
+    if (newMemberId === undefined) {
+      throw new Error(`Missing member ${ownerMember.id} (owner of channel ${id}) in the memberships map!`)
+    }
+
+    return { ...ownerMember, id: newMemberId.toString() }
+  }
+
+  protected async migrateBatch(tx: SubmittableExtrinsic<'promise'>, channels: ChannelFieldsFragment[]): Promise<void> {
+    const { api } = this
+    const result = await api.sendExtrinsic(this.sudo, tx)
+    const channelCreatedEvents = api.findEvents(result, 'content', 'ChannelCreated')
+    const newChannelIds: ChannelId[] = channelCreatedEvents.map((e) => e.data[1])
+    if (channelCreatedEvents.length !== channels.length) {
+      this.extractFailedMigrations(result, channels)
+    }
+    const newChannelMapEntries: [number, number][] = []
+    let newChannelIdIndex = 0
+    channels.forEach(({ id }) => {
+      if (this.failedMigrations.has(parseInt(id))) {
+        return
+      }
+      const newChannelId = newChannelIds[newChannelIdIndex++].toNumber()
+      this.idsMap.set(parseInt(id), newChannelId)
+      newChannelMapEntries.push([parseInt(id), newChannelId])
+    })
+    if (newChannelMapEntries.length) {
+      this.logger.info('Channel map entries added!', { newChannelMapEntries })
+      const dataObjectsUploadedEvents = this.api.findEvents(result, 'storage', 'DataObjectsUploaded')
+      this.uploadManager.queueUploadsFromEvents(dataObjectsUploadedEvents)
+    }
+  }
+
+  public async run(): Promise<ChannelsMigrationResult> {
+    await this.init()
+    const {
+      api,
+      config: { channelIds, channelBatchSize },
+    } = this
+    const ids = channelIds.sort((a, b) => a - b)
+    while (ids.length) {
+      const idsBatch = ids.splice(0, channelBatchSize)
+      this.logger.info(`Preparing a batch of ${idsBatch.length} channels...`)
+      const channelsBatch = this.snapshot.channels.filter((c) => idsBatch.includes(parseInt(c.id)))
+      if (channelsBatch.length < idsBatch.length) {
+        this.logger.warn(
+          `Some channels were not be found: ${_.difference(
+            idsBatch,
+            channelsBatch.map((c) => parseInt(c.id))
+          )}`
+        )
+      }
+      const channelsToMigrate = channelsBatch.filter((c) => !this.idsMap.has(parseInt(c.id)))
+      if (channelsToMigrate.length < channelsBatch.length) {
+        this.logger.info(
+          `${channelsToMigrate.length ? 'Some' : 'All'} channels in this batch were already migrated ` +
+            `(${channelsBatch.length - channelsToMigrate.length}/${channelsBatch.length})`
+        )
+      }
+      if (channelsToMigrate.length) {
+        const calls = _.flatten(await Promise.all(channelsToMigrate.map((c) => this.prepareChannel(c))))
+        const batchTx = api.tx.utility.batch(calls)
+        await this.executeBatchMigration(batchTx, channelsToMigrate)
+        await this.uploadManager.processQueuedUploads()
+      }
+      const videoIdsToMigrate: number[] = channelsBatch.reduce<number[]>(
+        (res, { id, videos }) =>
+          this.idsMap.has(parseInt(id))
+            ? res.concat(videos.map((v) => parseInt(v.id)).filter((id) => !this.config.excludeVideoIds.includes(id)))
+            : res,
+        []
+      )
+      this.videoIds = this.videoIds.concat(videoIdsToMigrate)
+      if (videoIdsToMigrate.length) {
+        this.logger.info(`Added ${videoIdsToMigrate.length} video ids to the list of videos to migrate`)
+      }
+    }
+    return {
+      ...this.getResult(),
+      videoIds: [...this.videoIds],
+    }
+  }
+
+  private async prepareChannel(channel: ChannelFieldsFragment) {
+    const { api } = this
+    const { avatarPhoto, coverPhoto, title, description, categoryId, isPublic, language, collaborators } = channel
+
+    const ownerMember = this.getChannelOwnerMember(channel)
+
+    const assetsToPrepare = {
+      avatar: avatarPhoto || undefined,
+      coverPhoto: coverPhoto || undefined,
+    }
+    const preparedAssets = await this.uploadManager.prepareAssets(assetsToPrepare)
+    const meta = new ChannelMetadata({
+      title,
+      description,
+      category: this.getNewCategoryId(categoryId),
+      avatarPhoto: preparedAssets.avatar?.index,
+      coverPhoto: preparedAssets.coverPhoto?.index,
+      isPublic,
+      language: language?.iso,
+    })
+    const assetsParams = Object.values(preparedAssets)
+      .sort((a, b) => a.index - b.index)
+      .map((a) => a.params)
+    const channelCreationParams = createType<ChannelCreationParameters, 'ChannelCreationParameters'>(
+      'ChannelCreationParameters',
+      {
+        assets: assetsParams.length
+          ? {
+              object_creation_list: assetsParams,
+              expected_data_size_fee: this.uploadManager.dataObjectFeePerMB,
+            }
+          : null,
+        meta: `0x${Buffer.from(ChannelMetadata.encode(meta).finish()).toString('hex')}`,
+        collaborators: collaborators.map(({ id }) => parseInt(id)),
+        reward_account: channel.rewardAccount,
+      }
+    )
+    const feesToCover = this.uploadManager.calcDataObjectsFee(assetsParams)
+    return [
+      api.tx.balances.transferKeepAlive(ownerMember.controllerAccount, feesToCover),
+      api.tx.sudo.sudoAs(
+        ownerMember.controllerAccount,
+        api.tx.content.createChannel({ Member: ownerMember.id }, channelCreationParams)
+      ),
+    ]
+  }
+}

+ 22 - 0
utils/migration-scripts/src/giza-olympia/ContentHash.ts

@@ -0,0 +1,22 @@
+import { createHash, HashInput, NodeHash } from 'blake3'
+import { HashReader } from 'blake3/dist/wasm/nodejs'
+import { toB58String, encode } from 'multihashes'
+
+// Based on distributor node's implementation
+export class ContentHash {
+  private hash: NodeHash<HashReader>
+  public static readonly algorithm = 'blake3'
+
+  constructor() {
+    this.hash = createHash()
+  }
+
+  update(data: HashInput): this {
+    this.hash.update(data)
+    return this
+  }
+
+  digest(): string {
+    return toB58String(encode(this.hash.digest(), ContentHash.algorithm))
+  }
+}

+ 99 - 0
utils/migration-scripts/src/giza-olympia/ContentMigration.ts

@@ -0,0 +1,99 @@
+import { WsProvider } from '@polkadot/api'
+import { RuntimeApi } from '../RuntimeApi'
+import { VideosMigration } from './VideosMigration'
+import { ChannelMigration } from './ChannelsMigration'
+import { UploadManager } from './UploadManager'
+import { ChannelCategoriesMigration } from './ChannelCategoriesMigration'
+import { VideoCategoriesMigration } from './VideoCategoriesMigration'
+import { ContentDirectorySnapshot } from './SnapshotManager'
+import { readFileSync } from 'fs'
+import { MigrationStateJson } from './BaseMigration'
+
+export type ContentMigrationConfig = {
+  wsProviderEndpointUri: string
+  sudoUri: string
+  channelIds: number[]
+  dataDir: string
+  channelBatchSize: number
+  videoBatchSize: number
+  forceChannelOwnerMemberId: number | undefined
+  preferredDownloadSpEndpoints?: string[]
+  uploadSpBucketId: number
+  uploadSpEndpoint: string
+  migrationStatePath: string
+  excludeVideoIds: number[]
+  snapshotFilePath: string
+  membershipsMigrationResultPath: string
+}
+
+export class ContentMigration {
+  private api: RuntimeApi
+  private config: ContentMigrationConfig
+
+  constructor(config: ContentMigrationConfig) {
+    const { wsProviderEndpointUri } = config
+    const provider = new WsProvider(wsProviderEndpointUri)
+    this.api = new RuntimeApi({ provider })
+    this.config = config
+  }
+
+  private async getForcedChannelOwner(): Promise<{ id: string; controllerAccount: string } | undefined> {
+    const { forceChannelOwnerMemberId } = this.config
+    if (forceChannelOwnerMemberId !== undefined) {
+      const ownerMember = await this.api.query.members.membershipById(forceChannelOwnerMemberId)
+      if (ownerMember.isEmpty) {
+        throw new Error(`Membership by id ${forceChannelOwnerMemberId} not found!`)
+      }
+      return {
+        id: forceChannelOwnerMemberId.toString(),
+        controllerAccount: ownerMember.controller_account.toString(),
+      }
+    }
+    return undefined
+  }
+
+  private loadSnapshot(): ContentDirectorySnapshot {
+    const snapshotJson = readFileSync(this.config.snapshotFilePath).toString()
+    return JSON.parse(snapshotJson) as ContentDirectorySnapshot
+  }
+
+  private loadMembershipsMap(): Map<number, number> {
+    const resultJson = readFileSync(this.config.membershipsMigrationResultPath).toString()
+    const mapEntries = (JSON.parse(resultJson) as MigrationStateJson).idsMapEntries
+    return new Map<number, number>(mapEntries)
+  }
+
+  public async run(): Promise<void> {
+    const { api, config } = this
+    await this.api.isReadyOrError
+    const snapshot = this.loadSnapshot()
+    const membershipsMap = this.loadMembershipsMap()
+    const { idsMap: channelCategoriesMap } = await new ChannelCategoriesMigration({ api, config, snapshot }).run()
+    const { idsMap: videoCategoriesMap } = await new VideoCategoriesMigration({ api, config, snapshot }).run()
+    const forcedChannelOwner = await this.getForcedChannelOwner()
+    const uploadManager = await UploadManager.create({
+      api,
+      config,
+    })
+    const { idsMap: channelsMap, videoIds } = await new ChannelMigration({
+      api,
+      config,
+      snapshot,
+      forcedChannelOwner,
+      uploadManager,
+      categoriesMap: channelCategoriesMap,
+      membershipsMap,
+    }).run()
+    await new VideosMigration({
+      api,
+      config,
+      snapshot,
+      channelsMap,
+      videoIds,
+      forcedChannelOwner,
+      uploadManager,
+      categoriesMap: videoCategoriesMap,
+      membershipsMap,
+    }).run()
+  }
+}

+ 158 - 0
utils/migration-scripts/src/giza-olympia/DownloadManager.ts

@@ -0,0 +1,158 @@
+import { Logger } from 'winston'
+import { createLogger } from '../logging'
+import { MAX_RESULTS_PER_QUERY, QueryNodeApi } from './giza-query-node/api'
+import { createWriteStream, renameSync, statSync } from 'fs'
+import axios from 'axios'
+import {
+  StorageDataObjectConnectionFieldsFragment,
+  StorageDataObjectFieldsFragment,
+} from './giza-query-node/generated/queries'
+import urljoin from 'url-join'
+import { pipeline, Readable } from 'stream'
+import { promisify } from 'util'
+import { DistributionBucketOperatorStatus } from './giza-query-node/generated/schema'
+import _ from 'lodash'
+import { AssetsBase } from './AssetsBase'
+import moment from 'moment'
+
+export type DownloadManagerConfig = {
+  objectsPerBatch: number
+  dataDir: string
+}
+
+export type DownloadManagerParams = {
+  config: DownloadManagerConfig
+  queryNodeApi: QueryNodeApi
+}
+
+export class DownloadManager extends AssetsBase {
+  name = 'Download Manager'
+  protected logger: Logger
+  protected config: DownloadManagerConfig
+  protected queryNodeApi: QueryNodeApi
+
+  public constructor(params: DownloadManagerParams) {
+    super(params)
+    this.config = params.config
+    this.queryNodeApi = params.queryNodeApi
+    this.logger = createLogger(this.name)
+  }
+
+  private async fetchAsset(dataObject: StorageDataObjectFieldsFragment, endpoint: string): Promise<void> {
+    const assetEndpoint = urljoin(endpoint, `api/v1/assets/${dataObject.id}`)
+    const response = await axios.get<Readable>(assetEndpoint, { responseType: 'stream', timeout: 5000 })
+    const pipe = promisify(pipeline)
+    const destPath = this.tmpAssetPath(dataObject.id)
+    const fWriteStream = createWriteStream(destPath)
+    await pipe(response.data, fWriteStream)
+    const { size } = statSync(destPath)
+    if (size !== parseInt(dataObject.size)) {
+      throw new Error('Invalid file size')
+    }
+    const hash = await this.calcContentHash(destPath)
+    if (hash !== dataObject.ipfsHash) {
+      throw new Error('Invalid file hash')
+    }
+    renameSync(destPath, this.assetPath(dataObject.ipfsHash))
+  }
+
+  private async fetchAssetWithRetry(
+    dataObject: StorageDataObjectFieldsFragment,
+    distributors: Map<string, string[]>
+  ): Promise<void> {
+    const endpoints = distributors.get(dataObject.storageBagId) || []
+    let lastError: Error | undefined
+    for (const endpoint of endpoints) {
+      try {
+        this.logger.debug(`Trying to fetch data object ${dataObject.id} (${dataObject.ipfsHash}) from ${endpoint}...`)
+        await this.fetchAsset(dataObject, endpoint)
+        return
+      } catch (e) {
+        this.logger.debug(
+          `Fetching object ${dataObject.id} (${dataObject.ipfsHash}) from ${endpoint} failed: ${(e as Error).message}`
+        )
+        lastError = e as Error
+        continue
+      }
+    }
+    this.logger.error(
+      `Could not fetch data object ${dataObject.id} (${dataObject.ipfsHash}) from any distributor. Last error: ${
+        lastError && (await this.reqErrorMessage(lastError))
+      }`
+    )
+  }
+
+  private async getDistributors(dataObjects: StorageDataObjectFieldsFragment[]): Promise<Map<string, string[]>> {
+    this.logger.info(`Fetching the distributors for ${dataObjects.length} data objects...`)
+    const bagIds = _.uniq(dataObjects.map((o) => o.storageBagId))
+    const buckets = await this.queryNodeApi.getDistributorsByBagIds(bagIds)
+    this.logger.info(`Fetched the data of ${buckets.length} unique distribution buckets`)
+
+    const endpointsByBagId = new Map<string, string[]>()
+    for (const bucket of buckets) {
+      for (const operator of bucket.operators) {
+        if (operator.status === DistributionBucketOperatorStatus.Active && operator.metadata?.nodeEndpoint) {
+          for (const bag of bucket.bags) {
+            const currEndpoints = endpointsByBagId.get(bag.id) || []
+            endpointsByBagId.set(bag.id, [...currEndpoints, operator.metadata.nodeEndpoint])
+          }
+        }
+      }
+    }
+
+    return endpointsByBagId
+  }
+
+  protected async fetchIfMissing(
+    dataObject: StorageDataObjectFieldsFragment,
+    knownDistributors: Map<string, string[]>
+  ): Promise<boolean> {
+    const missing = await this.isAssetMissing(dataObject)
+    if (missing) {
+      this.logger.debug(`Object ${dataObject.ipfsHash} missing, fetching...`)
+      await this.fetchAssetWithRetry(dataObject, knownDistributors)
+      return true
+    }
+
+    this.logger.debug(`Object ${dataObject.ipfsHash} already exists, skipping...`)
+    return false
+  }
+
+  public async fetchAllDataObjects(updatedAfter?: Date, continously = false, idleTimeSec?: number): Promise<void> {
+    do {
+      let currentPage: StorageDataObjectConnectionFieldsFragment | undefined
+      let lastObjectUpdatedAt: Date | undefined
+      this.logger.info(
+        `Fetching all data objects${updatedAfter ? ` updated after ${updatedAfter.toISOString()}` : ''}...`
+      )
+      do {
+        this.logger.info(`Fetching a page of up to ${MAX_RESULTS_PER_QUERY} data objects from the query node...`)
+        currentPage = await this.queryNodeApi.getStorageDataObjectsPage(
+          updatedAfter,
+          MAX_RESULTS_PER_QUERY,
+          currentPage?.pageInfo.endCursor || undefined
+        )
+        const objects = currentPage.edges.map((e) => e.node)
+        const maxUpdatedAt = _.maxBy(objects, (o) => moment(o.updatedAt).unix())?.updatedAt
+        lastObjectUpdatedAt = maxUpdatedAt ? new Date(maxUpdatedAt) : undefined
+        this.logger.info(`Fetched ${objects.length} data object rows`)
+        if (objects.length) {
+          const distributors = await this.getDistributors(objects)
+          while (objects.length) {
+            const batch = objects.splice(0, this.config.objectsPerBatch)
+            this.logger.info(`Processing a batch of ${batch.length} data objects...`)
+            const results = await Promise.all(batch.map((o) => this.fetchIfMissing(o, distributors)))
+            const downloadedObjectsLength = results.reduce((a, b) => a + (b ? 1 : 0), 0)
+            this.logger.info(`Downloaded ${downloadedObjectsLength} new data objects...`)
+          }
+        }
+      } while (currentPage.pageInfo.hasNextPage)
+      if (continously && idleTimeSec) {
+        this.logger.info(`Waiting ${idleTimeSec} seconds...`)
+        await new Promise((resolve) => setTimeout(resolve, 1000 * idleTimeSec))
+      }
+      updatedAfter = updatedAfter || lastObjectUpdatedAt
+      // eslint-disable-next-line no-unmodified-loop-condition
+    } while (continously)
+  }
+}

+ 102 - 0
utils/migration-scripts/src/giza-olympia/MembershipMigration.ts

@@ -0,0 +1,102 @@
+import { MembershipMetadata } from '@joystream/metadata-protobuf'
+import { MembershipFieldsFragment } from './giza-query-node/generated/queries'
+import { createType } from '@joystream/types'
+import { MemberId } from '@joystream/types/common'
+import { BaseMigration, BaseMigrationConfig, BaseMigrationParams, MigrationResult } from './BaseMigration'
+import { Logger } from 'winston'
+import { createLogger } from '../logging'
+import { SubmittableExtrinsic } from '@polkadot/api/types'
+import { MembershipsSnapshot } from './SnapshotManager'
+import { BuyMembershipParameters } from '@joystream/types/members'
+
+export type MembershipMigrationConfig = BaseMigrationConfig & {
+  batchSize: number
+}
+
+export type MembershipMigrationParams = BaseMigrationParams<MembershipsSnapshot> & {
+  config: MembershipMigrationConfig
+}
+
+export class MembershipMigration extends BaseMigration<MembershipsSnapshot> {
+  name = 'Membership migration'
+  protected config: MembershipMigrationConfig
+  protected logger: Logger
+
+  public constructor(params: MembershipMigrationParams) {
+    super(params)
+    this.config = params.config
+    this.logger = createLogger(this.name)
+  }
+
+  protected async migrateBatch(
+    tx: SubmittableExtrinsic<'promise'>,
+    members: MembershipFieldsFragment[]
+  ): Promise<void> {
+    const { api } = this
+    const result = await api.sendExtrinsic(this.sudo, tx)
+    const membershipBoughtEvents = api.findEvents(result, 'members', 'MembershipBought')
+    const newMemberIds: MemberId[] = membershipBoughtEvents.map((e) => e.data[0])
+    if (membershipBoughtEvents.length !== members.length) {
+      this.extractFailedMigrations(result, members, 1, false)
+    }
+    const newMembersMapEntries: [number, number][] = []
+    let newMemberIdIndex = 0
+    members.forEach(({ id }) => {
+      if (this.failedMigrations.has(parseInt(id))) {
+        return
+      }
+      const newMemberId = newMemberIds[newMemberIdIndex++].toNumber()
+      this.idsMap.set(parseInt(id), newMemberId)
+      newMembersMapEntries.push([parseInt(id), newMemberId])
+    })
+    if (newMembersMapEntries.length) {
+      this.logger.info('Members map entries added!', { newMembersMapEntries })
+    }
+  }
+
+  public async run(): Promise<MigrationResult> {
+    await this.init()
+    const {
+      api,
+      config: { batchSize },
+    } = this
+    let membersBatch: MembershipFieldsFragment[] = []
+    while ((membersBatch = this.snapshot.members.splice(0, batchSize)).length) {
+      this.logger.info(`Preparing a batch of ${membersBatch.length} members...`)
+      const membersToMigrate = membersBatch.filter((m) => !this.idsMap.has(parseInt(m.id)))
+      if (membersToMigrate.length < membersBatch.length) {
+        this.logger.info(
+          `${membersToMigrate.length ? 'Some' : 'All'} members in this batch were already migrated ` +
+            `(${membersBatch.length - membersToMigrate.length}/${membersBatch.length})`
+        )
+      }
+      if (membersToMigrate.length) {
+        const calls = await Promise.all(membersToMigrate.map((m) => this.prepareMember(m)))
+        const batchTx = api.tx.utility.batch(calls)
+        await this.executeBatchMigration(batchTx, membersToMigrate)
+      }
+    }
+    return this.getResult()
+  }
+
+  private async prepareMember(member: MembershipFieldsFragment) {
+    const { api } = this
+    const { handle, rootAccount, controllerAccount, about, avatarUri } = member
+
+    const meta = new MembershipMetadata({
+      about,
+      avatarUri,
+    })
+    const buyMembershipParams = createType<BuyMembershipParameters, 'BuyMembershipParameters'>(
+      'BuyMembershipParameters',
+      {
+        handle,
+        controller_account: controllerAccount,
+        root_account: rootAccount,
+        metadata: `0x${Buffer.from(MembershipMetadata.encode(meta).finish()).toString('hex')}`,
+      }
+    )
+
+    return api.tx.members.buyMembership(buyMembershipParams)
+  }
+}

+ 106 - 0
utils/migration-scripts/src/giza-olympia/SnapshotManager.ts

@@ -0,0 +1,106 @@
+import { Logger } from 'winston'
+import { createLogger } from '../logging'
+import { MAX_RESULTS_PER_QUERY, QueryNodeApi } from './giza-query-node/api'
+import {
+  ChannelCategoryFieldsFragment,
+  ChannelConnectionFieldsFragment,
+  ChannelFieldsFragment,
+  MembershipConnectionFieldsFragment,
+  MembershipFieldsFragment,
+  VideoCategoryFieldsFragment,
+  VideoConnectionFieldsFragment,
+  VideoFieldsFragment,
+} from './giza-query-node/generated/queries'
+
+export type SnapshotManagerParams = {
+  queryNodeApi: QueryNodeApi
+}
+
+export type ContentDirectorySnapshot = {
+  channelCategories: ChannelCategoryFieldsFragment[]
+  videoCategories: VideoCategoryFieldsFragment[]
+  channels: ChannelFieldsFragment[]
+  videos: VideoFieldsFragment[]
+}
+
+export type MembershipsSnapshot = {
+  members: MembershipFieldsFragment[]
+}
+
+export class SnapshotManager {
+  name = 'Snapshot Manager'
+  protected logger: Logger
+  protected queryNodeApi: QueryNodeApi
+
+  public constructor(params: SnapshotManagerParams) {
+    this.queryNodeApi = params.queryNodeApi
+    this.logger = createLogger(this.name)
+  }
+
+  private sortEntitiesByIds<T extends { id: string }>(entities: T[]): T[] {
+    return entities.sort((a, b) => parseInt(a.id) - parseInt(b.id))
+  }
+
+  public async getChannels(): Promise<ChannelFieldsFragment[]> {
+    let lastCursor: string | undefined
+    let currentPage: ChannelConnectionFieldsFragment
+    let channels: ChannelFieldsFragment[] = []
+    do {
+      this.logger.info(`Fetching a page of up to ${MAX_RESULTS_PER_QUERY} channels...`)
+      currentPage = await this.queryNodeApi.getChannelsPage(lastCursor)
+      channels = channels.concat(currentPage.edges.map((e) => e.node))
+      this.logger.info(`Total ${channels.length} channels fetched`)
+      lastCursor = currentPage.pageInfo.endCursor || undefined
+    } while (currentPage.pageInfo.hasNextPage)
+    this.logger.info('Finished channels fetching')
+
+    return this.sortEntitiesByIds(channels)
+  }
+
+  public async getVideos(): Promise<VideoFieldsFragment[]> {
+    let lastCursor: string | undefined
+    let currentPage: VideoConnectionFieldsFragment
+    let videos: VideoFieldsFragment[] = []
+    do {
+      this.logger.info(`Fetching a page of up to ${MAX_RESULTS_PER_QUERY} videos...`)
+      currentPage = await this.queryNodeApi.getVideosPage(lastCursor)
+      videos = videos.concat(currentPage.edges.map((e) => e.node))
+      this.logger.info(`Total ${videos.length} videos fetched`)
+      lastCursor = currentPage.pageInfo.endCursor || undefined
+    } while (currentPage.pageInfo.hasNextPage)
+    this.logger.info('Finished videos fetching')
+
+    return this.sortEntitiesByIds(videos)
+  }
+
+  public async getMemberships(): Promise<MembershipFieldsFragment[]> {
+    let lastCursor: string | undefined
+    let currentPage: MembershipConnectionFieldsFragment
+    let members: MembershipFieldsFragment[] = []
+    do {
+      this.logger.info(`Fetching a page of up to ${MAX_RESULTS_PER_QUERY} members...`)
+      currentPage = await this.queryNodeApi.getMembershipsPage(lastCursor)
+      members = members.concat(currentPage.edges.map((e) => e.node))
+      this.logger.info(`Total ${members.length} members fetched`)
+      lastCursor = currentPage.pageInfo.endCursor || undefined
+    } while (currentPage.pageInfo.hasNextPage)
+    this.logger.info('Finished members fetching')
+
+    return this.sortEntitiesByIds(members)
+  }
+
+  public async createContentDirectorySnapshot(): Promise<ContentDirectorySnapshot> {
+    const channelCategories = this.sortEntitiesByIds(await this.queryNodeApi.getChannelCategories())
+    const videoCategories = this.sortEntitiesByIds(await this.queryNodeApi.getVideoCategories())
+    const channels = await this.getChannels()
+    const videos = await this.getVideos()
+    return { channelCategories, videoCategories, videos, channels }
+  }
+
+  public async createMembershipsSnapshot(): Promise<MembershipsSnapshot> {
+    const members = await this.getMemberships()
+    return {
+      members,
+    }
+  }
+}

+ 209 - 0
utils/migration-scripts/src/giza-olympia/UploadManager.ts

@@ -0,0 +1,209 @@
+import BN from 'bn.js'
+import urljoin from 'url-join'
+import axios from 'axios'
+import fs from 'fs'
+import path from 'path'
+import { BagId, DataObjectCreationParameters, DataObjectId, UploadParameters } from '@joystream/types/storage'
+import { IEvent } from '@polkadot/types/types'
+import { Bytes, Vec } from '@polkadot/types'
+import { Balance } from '@polkadot/types/interfaces'
+import FormData from 'form-data'
+import { RuntimeApi } from '../RuntimeApi'
+import { Logger } from 'winston'
+import { createLogger } from '../logging'
+import { AssetsBase } from './AssetsBase'
+import { StorageDataObjectFieldsFragment } from './giza-query-node/generated/queries'
+import { createType } from '@joystream/types'
+
+export type UploadManagerConfig = {
+  uploadSpBucketId: number
+  uploadSpEndpoint: string
+  dataDir: string
+  migrationStatePath: string
+}
+
+export type UploadManagerParams = {
+  api: RuntimeApi
+  config: UploadManagerConfig
+}
+
+export type UploadManagerLoadableParams = {
+  dataObjectFeePerMB: BN
+}
+
+export type AssetsToPrepare = {
+  [name: string]: StorageDataObjectFieldsFragment | undefined
+}
+
+export type PreparedAsset = {
+  params: DataObjectCreationParameters
+  index: number
+}
+
+export class UploadManager extends AssetsBase {
+  private api: RuntimeApi
+  protected config: UploadManagerConfig
+  public readonly dataObjectFeePerMB: BN
+  private queuedUploads: Set<string>
+  private isQueueProcessing = false
+  private queueFilePath: string
+  protected logger: Logger
+
+  public get queueSize(): number {
+    return this.queuedUploads.size
+  }
+
+  public static async create(params: UploadManagerParams): Promise<UploadManager> {
+    const { api } = params
+    const dataObjectFeePerMB = await api.query.storage.dataObjectPerMegabyteFee()
+    return new UploadManager(params, { dataObjectFeePerMB })
+  }
+
+  private constructor(params: UploadManagerParams, loadableParams: UploadManagerLoadableParams) {
+    super(params)
+    const { api, config } = params
+    const { dataObjectFeePerMB } = loadableParams
+    this.dataObjectFeePerMB = dataObjectFeePerMB
+    this.api = api
+    this.config = config
+    this.queuedUploads = new Set()
+    this.logger = createLogger('Assets Manager')
+    this.queueFilePath = path.join(this.config.migrationStatePath, `unprocessedUploads_${Date.now()}.json`)
+    this.logger.info(`Failed/pending uploads will be saved to ${this.queueFilePath}`)
+  }
+
+  public calcDataObjectsFee(params: DataObjectCreationParameters[]): BN {
+    const { dataObjectFeePerMB, api } = this
+    const deletionPrize = api.consts.storage.dataObjectDeletionPrize
+    const totalSize = params
+      .reduce((a, b) => {
+        return a.add(b.getField('size'))
+      }, new BN(0))
+      .toNumber()
+    const totalStorageFee = dataObjectFeePerMB.muln(Math.ceil(totalSize / 1024 / 1024))
+    const totalDeletionPrize = deletionPrize.muln(params.length)
+    return totalStorageFee.add(totalDeletionPrize)
+  }
+
+  private async uploadDataObject(bagId: string, dataObjectId: number, contentHash: string): Promise<void> {
+    const {
+      config: { uploadSpBucketId, uploadSpEndpoint },
+    } = this
+    const dataPath = this.assetPath(contentHash)
+    if (!fs.existsSync(dataPath)) {
+      throw new Error(`Cannot upload object: ${dataObjectId} (${contentHash}): ${dataPath} not found`)
+    }
+
+    const fileStream = fs.createReadStream(dataPath)
+    const formData = new FormData()
+    formData.append('dataObjectId', dataObjectId)
+    formData.append('storageBucketId', uploadSpBucketId)
+    formData.append('bagId', bagId)
+    formData.append('file', fileStream, { filename: path.basename(dataPath) })
+    let uploadSuccesful: boolean
+    try {
+      await axios({
+        method: 'POST',
+        url: urljoin(uploadSpEndpoint, 'api/v1/files'),
+        data: formData,
+        maxBodyLength: Infinity,
+        headers: {
+          'content-type': 'multipart/form-data',
+          ...formData.getHeaders(),
+        },
+      })
+      uploadSuccesful = true
+    } catch (e) {
+      uploadSuccesful = false
+      const msg = await this.reqErrorMessage(e)
+      this.logger.error(`Upload of object ${dataObjectId} (${contentHash}) to ${uploadSpEndpoint} failed: ${msg}`)
+    }
+
+    if (uploadSuccesful) {
+      this.finalizeUpload(bagId, dataObjectId, contentHash)
+    }
+  }
+
+  public async processQueuedUploads(): Promise<void> {
+    if (this.isQueueProcessing) {
+      throw new Error('Uploads queue is already beeing processed!')
+    }
+    this.isQueueProcessing = true
+    this.logger.info(`Uploading ${this.queueSize} data objects...`)
+    await Promise.all(
+      Array.from(this.queuedUploads).map((queuedUpload) => {
+        const [bagId, objectId, contentHash] = queuedUpload.split('|')
+        return this.uploadDataObject(bagId, parseInt(objectId), contentHash)
+      })
+    )
+    this.isQueueProcessing = false
+  }
+
+  public loadQueue(queueFilePath: string): void {
+    const queue: string[] = JSON.parse(fs.readFileSync(queueFilePath).toString())
+    this.queuedUploads = new Set(queue)
+  }
+
+  public saveQueue(): void {
+    fs.writeFileSync(this.queueFilePath, JSON.stringify(Array.from(this.queuedUploads)))
+    this.logger.debug(`${this.queueFilePath} updated`, { queueSize: this.queuedUploads.size })
+  }
+
+  private queueUpload(bagId: BagId, objectId: DataObjectId, contentHash: Bytes): void {
+    const bagIdStr = `dynamic:channel:${bagId.asType('Dynamic').asType('Channel').toString()}`
+    const contentHashStr = Buffer.from(contentHash.toU8a(true)).toString()
+    this.queuedUploads.add(`${bagIdStr}|${objectId.toString()}|${contentHashStr}`)
+    this.saveQueue()
+  }
+
+  private finalizeUpload(bagId: string, dataObjectId: number, contentHash: string) {
+    this.queuedUploads.delete(`${bagId}|${dataObjectId}|${contentHash}`)
+    this.saveQueue()
+  }
+
+  public queueUploadsFromEvents(events: IEvent<[Vec<DataObjectId>, UploadParameters, Balance]>[]): void {
+    let queuedUploads = 0
+    events.map(({ data: [objectIds, uploadParams] }) => {
+      objectIds.forEach((objectId, i) => {
+        this.queueUpload(uploadParams.bagId, objectId, uploadParams.objectCreationList[i].ipfsContentId)
+        ++queuedUploads
+      })
+    })
+    this.logger.info(`Added ${queuedUploads} new data object uploads to the upload queue`)
+  }
+
+  private async prepareAsset(
+    dataObject: StorageDataObjectFieldsFragment
+  ): Promise<DataObjectCreationParameters | undefined> {
+    if (await this.isAssetMissing(dataObject)) {
+      this.logger.warn(
+        `Data object ${dataObject.id} (${dataObject.ipfsHash}) missing in the data directory! Skipping...`
+      )
+      return undefined
+    }
+    return createType<DataObjectCreationParameters, 'DataObjectCreationParameters'>('DataObjectCreationParameters', {
+      ipfsContentId: dataObject.ipfsHash,
+      size: dataObject.size,
+    })
+  }
+
+  public async prepareAssets<T extends AssetsToPrepare>(
+    assetsToPrepare: T
+  ): Promise<{ [K in keyof T]?: PreparedAsset }> {
+    const preparedAssets: { [K in keyof T]?: PreparedAsset } = {}
+    let assetIndex = 0
+    await Promise.all(
+      Object.entries(assetsToPrepare).map(async ([assetName, dataObject]) => {
+        if (!dataObject) {
+          return
+        }
+        const params = await this.prepareAsset(dataObject)
+        if (!params) {
+          return
+        }
+        preparedAssets[assetName as keyof T] = { params, index: assetIndex++ }
+      })
+    )
+    return preparedAssets
+  }
+}

+ 20 - 0
utils/migration-scripts/src/giza-olympia/UploadMigration.ts

@@ -0,0 +1,20 @@
+import { BaseMigration, BaseMigrationConfig, BaseMigrationParams } from './BaseMigration'
+import { UploadManager } from './UploadManager'
+import { ContentDirectorySnapshot } from './SnapshotManager'
+
+export type UploadMigrationConfig = BaseMigrationConfig
+
+export type UploadMigrationParams = BaseMigrationParams<ContentDirectorySnapshot> & {
+  uploadManager: UploadManager
+}
+
+export abstract class UploadMigration extends BaseMigration<ContentDirectorySnapshot> {
+  protected config: UploadMigrationConfig
+  protected uploadManager: UploadManager
+
+  public constructor({ api, snapshot, config, uploadManager }: UploadMigrationParams) {
+    super({ api, snapshot, config })
+    this.config = config
+    this.uploadManager = uploadManager
+  }
+}

+ 78 - 0
utils/migration-scripts/src/giza-olympia/VideoCategoriesMigration.ts

@@ -0,0 +1,78 @@
+import { VideoCategoryMetadata } from '@joystream/metadata-protobuf'
+import { VideoCategoryId } from '@joystream/types/content'
+import { SubmittableExtrinsic } from '@polkadot/api/types'
+import { ISubmittableResult } from '@polkadot/types/types'
+import { Logger } from 'winston'
+import { createLogger } from '../logging'
+import { BaseMigrationParams, MigrationResult } from './BaseMigration'
+import { CategoryMigration } from './CategoryMigration'
+import { ContentDirectorySnapshot } from './SnapshotManager'
+
+export class VideoCategoriesMigration extends CategoryMigration {
+  name = 'Video categories migration'
+  protected logger: Logger
+
+  public constructor(params: BaseMigrationParams<ContentDirectorySnapshot>) {
+    super(params)
+    this.logger = createLogger(this.name)
+  }
+
+  protected async migrateBatch(
+    batchTx: SubmittableExtrinsic<'promise', ISubmittableResult>,
+    batch: { id: string }[]
+  ): Promise<void> {
+    const { api } = this
+    const result = await api.sendExtrinsic(this.sudo, batchTx)
+    const categoryCreatedEvents = api.findEvents(result, 'content', 'VideoCategoryCreated')
+    const createdCategoryIds: VideoCategoryId[] = categoryCreatedEvents.map((e) => e.data[1])
+
+    if (createdCategoryIds.length !== batch.length) {
+      this.extractFailedMigrations(result, batch)
+    }
+
+    let newCategoryIndex = 0
+    batch.forEach((c) => {
+      if (this.failedMigrations.has(parseInt(c.id))) {
+        return
+      }
+      const newCategoryId = createdCategoryIds[newCategoryIndex++]
+      this.idsMap.set(parseInt(c.id), newCategoryId.toNumber())
+    })
+    this.logger.info(`Video categories map created!`, this.idsMap.entries())
+    if (this.failedMigrations.size) {
+      throw new Error(`Failed to create some video categories: ${Array.from(this.failedMigrations).join(', ')}`)
+    }
+    this.logger.info(`All video categories succesfully migrated!`)
+  }
+
+  public async run(): Promise<MigrationResult> {
+    await this.init()
+    const { api } = this
+    const allCategories = await this.snapshot.videoCategories
+    const categoriesToMigrate = allCategories.filter((c) => !this.idsMap.has(parseInt(c.id)))
+
+    if (!categoriesToMigrate.length) {
+      this.logger.info('All video categories already migrated, skipping...')
+      return this.getResult()
+    }
+
+    this.logger.info(`Migrating ${categoriesToMigrate.length} video categories...`)
+    const txs = categoriesToMigrate
+      .sort((a, b) => parseInt(a.id) - parseInt(b.id))
+      .map((c) => {
+        const meta = new VideoCategoryMetadata({ name: c.name })
+        const metaBytes = '0x' + Buffer.from(VideoCategoryMetadata.encode(meta).finish()).toString('hex')
+        return api.tx.sudo.sudoAs(
+          this.contentLeadKey,
+          api.tx.content.createVideoCategory('Lead', {
+            meta: metaBytes,
+          })
+        )
+      })
+
+    const batchTx = api.tx.utility.batch(txs)
+    await this.migrateBatch(batchTx, categoriesToMigrate)
+
+    return this.getResult()
+  }
+}

+ 225 - 0
utils/migration-scripts/src/giza-olympia/VideosMigration.ts

@@ -0,0 +1,225 @@
+import { VideoMetadata } from '@joystream/metadata-protobuf'
+import { VideoFieldsFragment } from './giza-query-node/generated/queries'
+import _ from 'lodash'
+import { createType } from '@joystream/types'
+import Long from 'long'
+import { VideoCreationParameters, VideoId } from '@joystream/types/content'
+import moment from 'moment'
+import { UploadMigration, UploadMigrationConfig, UploadMigrationParams } from './UploadMigration'
+import { MigrationResult } from './BaseMigration'
+import { Logger } from 'winston'
+import { createLogger } from '../logging'
+import { SubmittableExtrinsic } from '@polkadot/api/types'
+
+export type VideosMigrationConfig = UploadMigrationConfig & {
+  videoBatchSize: number
+}
+
+export type VideosMigrationParams = UploadMigrationParams & {
+  config: VideosMigrationConfig
+  videoIds: number[]
+  channelsMap: Map<number, number>
+  forcedChannelOwner: { id: string; controllerAccount: string } | undefined
+  categoriesMap: Map<number, number>
+  membershipsMap: Map<number, number>
+}
+
+export class VideosMigration extends UploadMigration {
+  name = 'Videos migration'
+  protected config: VideosMigrationConfig
+  protected categoriesMap: Map<number, number>
+  protected channelsMap: Map<number, number>
+  protected membershipsMap: Map<number, number>
+  protected videoIds: number[]
+  protected forcedChannelOwner: { id: string; controllerAccount: string } | undefined
+  protected logger: Logger
+
+  public constructor(params: VideosMigrationParams) {
+    super(params)
+    this.config = params.config
+    this.channelsMap = params.channelsMap
+    this.videoIds = params.videoIds
+    this.forcedChannelOwner = params.forcedChannelOwner
+    this.categoriesMap = params.categoriesMap
+    this.membershipsMap = params.membershipsMap
+    this.logger = createLogger(this.name)
+  }
+
+  private getNewCategoryId(oldCategoryId: string | null | undefined): Long | undefined {
+    if (typeof oldCategoryId !== 'string') {
+      return undefined
+    }
+    const newCategoryId = this.categoriesMap.get(parseInt(oldCategoryId))
+    return newCategoryId ? Long.fromNumber(newCategoryId) : undefined
+  }
+
+  private getNewChannelId(oldChannelId: number): number {
+    const newChannelId = this.channelsMap.get(oldChannelId)
+    if (!newChannelId) {
+      throw new Error(`Missing new channel id for channel ${oldChannelId} in the channelMap!`)
+    }
+    return newChannelId
+  }
+
+  protected async migrateBatch(tx: SubmittableExtrinsic<'promise'>, videos: VideoFieldsFragment[]): Promise<void> {
+    const { api } = this
+    const result = await api.sendExtrinsic(this.sudo, tx)
+    const videoCreatedEvents = api.findEvents(result, 'content', 'VideoCreated')
+    const newVideoIds: VideoId[] = videoCreatedEvents.map((e) => e.data[2])
+    if (videoCreatedEvents.length !== videos.length) {
+      this.extractFailedMigrations(result, videos)
+    }
+    const newVideoMapEntries: [number, number][] = []
+    let newVideoIdIndex = 0
+    videos.forEach(({ id }) => {
+      if (this.failedMigrations.has(parseInt(id))) {
+        return
+      }
+      const newVideoId = newVideoIds[newVideoIdIndex++].toNumber()
+      this.idsMap.set(parseInt(id), newVideoId)
+      newVideoMapEntries.push([parseInt(id), newVideoId])
+    })
+    if (newVideoMapEntries.length) {
+      this.logger.info('Video map entries added!', { newVideoMapEntries })
+      const dataObjectsUploadedEvents = api.findEvents(result, 'storage', 'DataObjectsUploaded')
+      this.uploadManager.queueUploadsFromEvents(dataObjectsUploadedEvents)
+    }
+  }
+
+  public async run(): Promise<MigrationResult> {
+    await this.init()
+    const {
+      api,
+      videoIds,
+      config: { videoBatchSize },
+    } = this
+    const idsToMigrate = videoIds.filter((id) => !this.idsMap.has(id)).sort((a, b) => a - b)
+    if (idsToMigrate.length < videoIds.length) {
+      const alreadyMigratedVideosNum = videoIds.length - idsToMigrate.length
+      this.logger.info(
+        (idsToMigrate.length ? `${alreadyMigratedVideosNum}/${videoIds.length}` : 'All') +
+          ' videos already migrated, skippping...'
+      )
+    }
+    while (idsToMigrate.length) {
+      const idsBatch = idsToMigrate.splice(0, videoBatchSize)
+      this.logger.info(`Preparing a batch of ${idsBatch.length} videos...`)
+      const videosBatch = this.snapshot.videos.filter((v) => idsBatch.includes(parseInt(v.id)))
+      if (videosBatch.length < idsBatch.length) {
+        this.logger.warn(
+          `Some videos were not be found: ${_.difference(
+            idsBatch,
+            videosBatch.map((v) => parseInt(v.id))
+          )}`
+        )
+      }
+      const calls = _.flatten(await Promise.all(videosBatch.map((v) => this.prepareVideo(v))))
+      const batchTx = api.tx.utility.batch(calls)
+      await this.executeBatchMigration(batchTx, videosBatch)
+      await this.uploadManager.processQueuedUploads()
+    }
+    return this.getResult()
+  }
+
+  private getChannelOwner({
+    id,
+    ownerMember,
+  }: VideoFieldsFragment['channel']): Exclude<VideoFieldsFragment['channel']['ownerMember'], null | undefined> {
+    if (!ownerMember) {
+      throw new Error(`Channel ownerMember missing for channel ${id}`)
+    }
+
+    if (this.forcedChannelOwner) {
+      return this.forcedChannelOwner
+    }
+
+    const newMemberId = this.membershipsMap.get(parseInt(ownerMember.id))
+    if (newMemberId === undefined) {
+      throw new Error(`Missing member ${ownerMember.id} (owner of channel ${id}) in the memberships map!`)
+    }
+
+    return { ...ownerMember, id: newMemberId.toString() }
+  }
+
+  private getVideoData(video: VideoFieldsFragment) {
+    const { id, channel } = video
+
+    if (!channel) {
+      throw new Error(`Channel data missing for video: ${id}`)
+    }
+
+    return { ...video, channel: { ...channel, ownerMember: this.getChannelOwner(channel) } }
+  }
+
+  private async prepareVideo(video: VideoFieldsFragment) {
+    const { api } = this
+
+    const {
+      categoryId,
+      description,
+      duration,
+      hasMarketing,
+      isExplicit,
+      isPublic,
+      language,
+      license,
+      media,
+      mediaMetadata,
+      publishedBeforeJoystream,
+      thumbnailPhoto,
+      title,
+      channel: { ownerMember, id: oldChannelId },
+    } = this.getVideoData(video)
+
+    const channelId = this.getNewChannelId(parseInt(oldChannelId))
+
+    const assetsToPrepare = {
+      thumbnail: thumbnailPhoto || undefined,
+      video: media || undefined,
+    }
+    const preparedAssets = await this.uploadManager.prepareAssets(assetsToPrepare)
+    const meta = new VideoMetadata({
+      title,
+      description,
+      category: this.getNewCategoryId(categoryId),
+      duration,
+      hasMarketing,
+      isExplicit,
+      isPublic,
+      language: language?.iso,
+      license: license,
+      mediaPixelHeight: mediaMetadata?.pixelHeight,
+      mediaPixelWidth: mediaMetadata?.pixelWidth,
+      mediaType: mediaMetadata?.encoding,
+      publishedBeforeJoystream: {
+        isPublished: !!publishedBeforeJoystream,
+        date: moment(publishedBeforeJoystream).format('YYYY-MM-DD'),
+      },
+      thumbnailPhoto: preparedAssets.thumbnail?.index,
+      video: preparedAssets.video?.index,
+    })
+    const assetsParams = Object.values(preparedAssets)
+      .sort((a, b) => a.index - b.index)
+      .map((a) => a.params)
+    const videoCreationParams = createType<VideoCreationParameters, 'VideoCreationParameters'>(
+      'VideoCreationParameters',
+      {
+        assets: assetsParams.length
+          ? {
+              object_creation_list: assetsParams,
+              expected_data_size_fee: this.uploadManager.dataObjectFeePerMB,
+            }
+          : null,
+        meta: `0x${Buffer.from(VideoMetadata.encode(meta).finish()).toString('hex')}`,
+      }
+    )
+    const feesToCover = this.uploadManager.calcDataObjectsFee(assetsParams)
+    return [
+      api.tx.balances.transferKeepAlive(ownerMember.controllerAccount, feesToCover),
+      api.tx.sudo.sudoAs(
+        ownerMember.controllerAccount,
+        api.tx.content.createVideo({ Member: ownerMember.id }, channelId, videoCreationParams)
+      ),
+    ]
+  }
+}

+ 212 - 0
utils/migration-scripts/src/giza-olympia/giza-query-node/api.ts

@@ -0,0 +1,212 @@
+import {
+  ApolloClient,
+  NormalizedCacheObject,
+  HttpLink,
+  InMemoryCache,
+  DocumentNode,
+  isApolloError,
+  ApolloQueryResult,
+} from '@apollo/client/core'
+import { disableFragmentWarnings } from 'graphql-tag'
+import fetch from 'cross-fetch'
+import {
+  ChannelCategoryFieldsFragment,
+  GetChannelsCategories,
+  GetChannelsCategoriesQuery,
+  GetChannelsCategoriesQueryVariables,
+  GetVideoCategories,
+  GetVideoCategoriesQuery,
+  GetVideoCategoriesQueryVariables,
+  VideoCategoryFieldsFragment,
+  GetDataObjectsPage,
+  GetDataObjectsPageQuery,
+  GetDataObjectsPageQueryVariables,
+  StorageDataObjectConnectionFieldsFragment,
+  DistributionBucketFieldsFragment,
+  GetDistributorsByBagIdsQuery,
+  GetDistributorsByBagIdsQueryVariables,
+  GetDistributorsByBagIds,
+  ChannelConnectionFieldsFragment,
+  GetChannelsPageQuery,
+  GetChannelsPageQueryVariables,
+  GetChannelsPage,
+  VideoConnectionFieldsFragment,
+  GetVideosPageQuery,
+  GetVideosPageQueryVariables,
+  GetVideosPage,
+  MembershipConnectionFieldsFragment,
+  GetMembershipsPageQuery,
+  GetMembershipsPageQueryVariables,
+  GetMembershipsPage,
+} from './generated/queries'
+import { Logger } from 'winston'
+import { createLogger } from '../../logging'
+import { Maybe } from '../../sumer-giza/sumer-query-node/generated/schema'
+
+disableFragmentWarnings()
+
+export const MAX_RESULTS_PER_QUERY = 1000
+
+export class QueryNodeApi {
+  private endpoint: string
+  private apolloClient: ApolloClient<NormalizedCacheObject>
+  private retryAttempts: number
+  private retryIntervalMs: number
+  private logger: Logger
+
+  public constructor(endpoint: string, retryAttempts = 5, retryIntervalMs = 5000) {
+    this.endpoint = endpoint
+    this.retryAttempts = retryAttempts
+    this.retryIntervalMs = retryIntervalMs
+    this.apolloClient = new ApolloClient({
+      link: new HttpLink({ uri: endpoint, fetch }),
+      cache: new InMemoryCache({ addTypename: false }),
+      defaultOptions: { query: { fetchPolicy: 'no-cache', errorPolicy: 'all' } },
+    })
+    this.logger = createLogger('Query Node Api')
+  }
+
+  private async query<T>(queryFunc: () => Promise<ApolloQueryResult<T>>): Promise<ApolloQueryResult<T>> {
+    let attempts = 0
+    while (true) {
+      try {
+        const result = await queryFunc()
+        return result
+      } catch (e) {
+        if (e instanceof Error && isApolloError(e) && e.networkError) {
+          this.logger.error(`${this.endpoint} network error: ${e.networkError.message}`)
+          if (attempts++ > this.retryAttempts) {
+            throw new Error(`Maximum number of query retry attempts reached for ${this.endpoint}`)
+          }
+          this.logger.info(`Retrying in ${this.retryIntervalMs}ms...`)
+          await new Promise((resolve) => setTimeout(resolve, this.retryIntervalMs))
+        } else {
+          throw e
+        }
+      }
+    }
+  }
+
+  // Get entity by unique input
+  protected async uniqueEntityQuery<
+    QueryT extends { [k: string]: Maybe<Record<string, unknown>> | undefined },
+    VariablesT extends Record<string, unknown>
+  >(
+    query: DocumentNode,
+    variables: VariablesT,
+    resultKey: keyof QueryT
+  ): Promise<Required<QueryT>[keyof QueryT] | null> {
+    return (await this.apolloClient.query<QueryT, VariablesT>({ query, variables })).data[resultKey] || null
+  }
+
+  // Query-node: get multiple entities
+  protected async multipleEntitiesQuery<
+    QueryT extends { [k: string]: unknown[] },
+    VariablesT extends Record<string, unknown>
+  >(query: DocumentNode, variables: VariablesT, resultKey: keyof QueryT): Promise<QueryT[keyof QueryT]> {
+    const q = this.query<QueryT>(() => this.apolloClient.query<QueryT, VariablesT>({ query, variables }))
+    return (await q).data[resultKey]
+  }
+
+  public getChannelCategories(): Promise<ChannelCategoryFieldsFragment[]> {
+    return this.multipleEntitiesQuery<GetChannelsCategoriesQuery, GetChannelsCategoriesQueryVariables>(
+      GetChannelsCategories,
+      {},
+      'channelCategories'
+    )
+  }
+
+  public getVideoCategories(): Promise<VideoCategoryFieldsFragment[]> {
+    return this.multipleEntitiesQuery<GetVideoCategoriesQuery, GetVideoCategoriesQueryVariables>(
+      GetVideoCategories,
+      {},
+      'videoCategories'
+    )
+  }
+
+  public async getChannelsPage(
+    lastCursor?: string,
+    limit: number = MAX_RESULTS_PER_QUERY
+  ): Promise<ChannelConnectionFieldsFragment> {
+    const conn = await this.uniqueEntityQuery<GetChannelsPageQuery, GetChannelsPageQueryVariables>(
+      GetChannelsPage,
+      {
+        limit,
+        lastCursor,
+      },
+      'channelsConnection'
+    )
+    if (!conn) {
+      throw new Error('Cannot get channelsConnection!')
+    }
+
+    return conn
+  }
+
+  public async getVideosPage(
+    lastCursor?: string,
+    limit: number = MAX_RESULTS_PER_QUERY
+  ): Promise<VideoConnectionFieldsFragment> {
+    const conn = await this.uniqueEntityQuery<GetVideosPageQuery, GetVideosPageQueryVariables>(
+      GetVideosPage,
+      {
+        limit,
+        lastCursor,
+      },
+      'videosConnection'
+    )
+    if (!conn) {
+      throw new Error('Cannot get videosConnection!')
+    }
+
+    return conn
+  }
+
+  public async getStorageDataObjectsPage(
+    updatedAfter?: Date,
+    limit: number = MAX_RESULTS_PER_QUERY,
+    lastCursor?: string
+  ): Promise<StorageDataObjectConnectionFieldsFragment> {
+    const conn = await this.uniqueEntityQuery<GetDataObjectsPageQuery, GetDataObjectsPageQueryVariables>(
+      GetDataObjectsPage,
+      {
+        updatedAfter,
+        limit,
+        lastCursor,
+      },
+      'storageDataObjectsConnection'
+    )
+    if (!conn) {
+      throw new Error('Cannot get storageDataObjectsConnection!')
+    }
+
+    return conn
+  }
+
+  public async getDistributorsByBagIds(bagIds: string[]): Promise<DistributionBucketFieldsFragment[]> {
+    return this.multipleEntitiesQuery<GetDistributorsByBagIdsQuery, GetDistributorsByBagIdsQueryVariables>(
+      GetDistributorsByBagIds,
+      { ids: bagIds },
+      'distributionBuckets'
+    )
+  }
+
+  public async getMembershipsPage(
+    lastCursor?: string,
+    limit: number = MAX_RESULTS_PER_QUERY
+  ): Promise<MembershipConnectionFieldsFragment> {
+    const conn = await this.uniqueEntityQuery<GetMembershipsPageQuery, GetMembershipsPageQueryVariables>(
+      GetMembershipsPage,
+      {
+        limit,
+        lastCursor,
+      },
+      'membershipsConnection'
+    )
+    if (!conn) {
+      throw new Error('Cannot get membershipsConnection!')
+    }
+
+    return conn
+  }
+}

+ 33 - 0
utils/migration-scripts/src/giza-olympia/giza-query-node/codegen.yml

@@ -0,0 +1,33 @@
+# Paths are relative to root distribution-node directory
+overwrite: true
+
+schema: https://hydra.joystream.org/graphql
+
+documents:
+  - 'src/giza-olympia/giza-query-node/queries/*.graphql'
+
+config:
+  scalars:
+    Date: Date
+  preResolveTypes: true # avoid using Pick
+  skipTypename: true # skip __typename field in typings unless it's part of the query
+
+generates:
+  src/giza-olympia/giza-query-node/generated/schema.ts:
+    hooks:
+      afterOneFileWrite:
+        - prettier --write
+        - eslint --fix
+    plugins:
+      - typescript
+  src/giza-olympia/giza-query-node/generated/queries.ts:
+    preset: import-types
+    presetConfig:
+      typesPath: ./schema
+    hooks:
+      afterOneFileWrite:
+        - prettier --write
+        - eslint --fix
+    plugins:
+      - typescript-operations
+      - typescript-document-nodes

+ 386 - 0
utils/migration-scripts/src/giza-olympia/giza-query-node/generated/queries.ts

@@ -0,0 +1,386 @@
+import * as Types from './schema'
+
+import gql from 'graphql-tag'
+export type VideoCategoryFieldsFragment = { id: string; name?: Types.Maybe<string> }
+
+export type ChannelCategoryFieldsFragment = { id: string; name?: Types.Maybe<string> }
+
+export type StorageDataObjectFieldsFragment = {
+  id: string
+  updatedAt?: Types.Maybe<any>
+  ipfsHash: string
+  isAccepted: boolean
+  size: any
+  storageBagId: string
+}
+
+export type StorageDataObjectConnectionFieldsFragment = {
+  edges: Array<{ node: StorageDataObjectFieldsFragment }>
+  pageInfo: { hasNextPage: boolean; endCursor?: Types.Maybe<string> }
+}
+
+export type VideoFieldsFragment = {
+  id: string
+  categoryId?: Types.Maybe<string>
+  title?: Types.Maybe<string>
+  description?: Types.Maybe<string>
+  duration?: Types.Maybe<number>
+  hasMarketing?: Types.Maybe<boolean>
+  publishedBeforeJoystream?: Types.Maybe<any>
+  isPublic?: Types.Maybe<boolean>
+  isCensored: boolean
+  isExplicit?: Types.Maybe<boolean>
+  isFeatured: boolean
+  thumbnailPhoto?: Types.Maybe<StorageDataObjectFieldsFragment>
+  language?: Types.Maybe<{ iso: string }>
+  license?: Types.Maybe<{
+    code?: Types.Maybe<number>
+    attribution?: Types.Maybe<string>
+    customText?: Types.Maybe<string>
+  }>
+  media?: Types.Maybe<StorageDataObjectFieldsFragment>
+  mediaMetadata?: Types.Maybe<{
+    pixelWidth?: Types.Maybe<number>
+    pixelHeight?: Types.Maybe<number>
+    size?: Types.Maybe<any>
+    encoding?: Types.Maybe<{
+      codecName?: Types.Maybe<string>
+      container?: Types.Maybe<string>
+      mimeMediaType?: Types.Maybe<string>
+    }>
+  }>
+  channel: { id: string; ownerMember?: Types.Maybe<{ id: string; controllerAccount: string }> }
+}
+
+export type VideoConnectionFieldsFragment = {
+  edges: Array<{ node: VideoFieldsFragment }>
+  pageInfo: { hasNextPage: boolean; endCursor?: Types.Maybe<string> }
+}
+
+export type ChannelFieldsFragment = {
+  id: string
+  categoryId?: Types.Maybe<string>
+  rewardAccount?: Types.Maybe<string>
+  title?: Types.Maybe<string>
+  description?: Types.Maybe<string>
+  isPublic?: Types.Maybe<boolean>
+  isCensored: boolean
+  ownerMember?: Types.Maybe<{ id: string; controllerAccount: string }>
+  coverPhoto?: Types.Maybe<StorageDataObjectFieldsFragment>
+  avatarPhoto?: Types.Maybe<StorageDataObjectFieldsFragment>
+  language?: Types.Maybe<{ iso: string }>
+  videos: Array<{ id: string }>
+  collaborators: Array<{ id: string }>
+}
+
+export type ChannelConnectionFieldsFragment = {
+  edges: Array<{ node: ChannelFieldsFragment }>
+  pageInfo: { hasNextPage: boolean; endCursor?: Types.Maybe<string> }
+}
+
+export type DistributionBucketFieldsFragment = {
+  distributing: boolean
+  bags: Array<{ id: string }>
+  operators: Array<{
+    status: Types.DistributionBucketOperatorStatus
+    metadata?: Types.Maybe<{ nodeEndpoint?: Types.Maybe<string> }>
+  }>
+}
+
+export type MembershipFieldsFragment = {
+  id: string
+  handle: string
+  avatarUri?: Types.Maybe<string>
+  about?: Types.Maybe<string>
+  controllerAccount: string
+  rootAccount: string
+}
+
+export type MembershipConnectionFieldsFragment = {
+  edges: Array<{ node: MembershipFieldsFragment }>
+  pageInfo: { hasNextPage: boolean; endCursor?: Types.Maybe<string> }
+}
+
+export type GetVideoCategoriesQueryVariables = Types.Exact<{ [key: string]: never }>
+
+export type GetVideoCategoriesQuery = { videoCategories: Array<VideoCategoryFieldsFragment> }
+
+export type GetChannelsCategoriesQueryVariables = Types.Exact<{ [key: string]: never }>
+
+export type GetChannelsCategoriesQuery = { channelCategories: Array<ChannelCategoryFieldsFragment> }
+
+export type GetDistributorsByBagIdsQueryVariables = Types.Exact<{
+  ids?: Types.Maybe<Array<Types.Scalars['ID']> | Types.Scalars['ID']>
+}>
+
+export type GetDistributorsByBagIdsQuery = { distributionBuckets: Array<DistributionBucketFieldsFragment> }
+
+export type GetDataObjectsPageQueryVariables = Types.Exact<{
+  updatedAfter?: Types.Maybe<Types.Scalars['DateTime']>
+  limit: Types.Scalars['Int']
+  lastCursor?: Types.Maybe<Types.Scalars['String']>
+}>
+
+export type GetDataObjectsPageQuery = { storageDataObjectsConnection: StorageDataObjectConnectionFieldsFragment }
+
+export type GetChannelsPageQueryVariables = Types.Exact<{
+  limit: Types.Scalars['Int']
+  lastCursor?: Types.Maybe<Types.Scalars['String']>
+}>
+
+export type GetChannelsPageQuery = { channelsConnection: ChannelConnectionFieldsFragment }
+
+export type GetVideosPageQueryVariables = Types.Exact<{
+  limit: Types.Scalars['Int']
+  lastCursor?: Types.Maybe<Types.Scalars['String']>
+}>
+
+export type GetVideosPageQuery = { videosConnection: VideoConnectionFieldsFragment }
+
+export type GetMembershipsPageQueryVariables = Types.Exact<{
+  limit: Types.Scalars['Int']
+  lastCursor?: Types.Maybe<Types.Scalars['String']>
+}>
+
+export type GetMembershipsPageQuery = { membershipsConnection: MembershipConnectionFieldsFragment }
+
+export const VideoCategoryFields = gql`
+  fragment VideoCategoryFields on VideoCategory {
+    id
+    name
+  }
+`
+export const ChannelCategoryFields = gql`
+  fragment ChannelCategoryFields on ChannelCategory {
+    id
+    name
+  }
+`
+export const StorageDataObjectFields = gql`
+  fragment StorageDataObjectFields on StorageDataObject {
+    id
+    updatedAt
+    ipfsHash
+    isAccepted
+    size
+    storageBagId
+  }
+`
+export const StorageDataObjectConnectionFields = gql`
+  fragment StorageDataObjectConnectionFields on StorageDataObjectConnection {
+    edges {
+      node {
+        ...StorageDataObjectFields
+      }
+    }
+    pageInfo {
+      hasNextPage
+      endCursor
+    }
+  }
+  ${StorageDataObjectFields}
+`
+export const VideoFields = gql`
+  fragment VideoFields on Video {
+    id
+    categoryId
+    title
+    description
+    duration
+    thumbnailPhoto {
+      ...StorageDataObjectFields
+    }
+    language {
+      iso
+    }
+    hasMarketing
+    publishedBeforeJoystream
+    isPublic
+    isCensored
+    isExplicit
+    license {
+      code
+      attribution
+      customText
+    }
+    media {
+      ...StorageDataObjectFields
+    }
+    mediaMetadata {
+      encoding {
+        codecName
+        container
+        mimeMediaType
+      }
+      pixelWidth
+      pixelHeight
+      size
+    }
+    isFeatured
+    channel {
+      id
+      ownerMember {
+        id
+        controllerAccount
+      }
+    }
+  }
+  ${StorageDataObjectFields}
+`
+export const VideoConnectionFields = gql`
+  fragment VideoConnectionFields on VideoConnection {
+    edges {
+      node {
+        ...VideoFields
+      }
+    }
+    pageInfo {
+      hasNextPage
+      endCursor
+    }
+  }
+  ${VideoFields}
+`
+export const ChannelFields = gql`
+  fragment ChannelFields on Channel {
+    id
+    ownerMember {
+      id
+      controllerAccount
+    }
+    categoryId
+    rewardAccount
+    title
+    description
+    coverPhoto {
+      ...StorageDataObjectFields
+    }
+    avatarPhoto {
+      ...StorageDataObjectFields
+    }
+    isPublic
+    isCensored
+    language {
+      iso
+    }
+    videos {
+      id
+    }
+    collaborators {
+      id
+    }
+  }
+  ${StorageDataObjectFields}
+`
+export const ChannelConnectionFields = gql`
+  fragment ChannelConnectionFields on ChannelConnection {
+    edges {
+      node {
+        ...ChannelFields
+      }
+    }
+    pageInfo {
+      hasNextPage
+      endCursor
+    }
+  }
+  ${ChannelFields}
+`
+export const DistributionBucketFields = gql`
+  fragment DistributionBucketFields on DistributionBucket {
+    distributing
+    bags {
+      id
+    }
+    operators {
+      status
+      metadata {
+        nodeEndpoint
+      }
+    }
+  }
+`
+export const MembershipFields = gql`
+  fragment MembershipFields on Membership {
+    id
+    handle
+    avatarUri
+    about
+    controllerAccount
+    rootAccount
+  }
+`
+export const MembershipConnectionFields = gql`
+  fragment MembershipConnectionFields on MembershipConnection {
+    edges {
+      node {
+        ...MembershipFields
+      }
+    }
+    pageInfo {
+      hasNextPage
+      endCursor
+    }
+  }
+  ${MembershipFields}
+`
+export const GetVideoCategories = gql`
+  query getVideoCategories {
+    videoCategories {
+      ...VideoCategoryFields
+    }
+  }
+  ${VideoCategoryFields}
+`
+export const GetChannelsCategories = gql`
+  query getChannelsCategories {
+    channelCategories {
+      ...ChannelCategoryFields
+    }
+  }
+  ${ChannelCategoryFields}
+`
+export const GetDistributorsByBagIds = gql`
+  query getDistributorsByBagIds($ids: [ID!]) {
+    distributionBuckets(where: { bags_some: { id_in: $ids }, distributing_eq: true }) {
+      ...DistributionBucketFields
+    }
+  }
+  ${DistributionBucketFields}
+`
+export const GetDataObjectsPage = gql`
+  query getDataObjectsPage($updatedAfter: DateTime, $limit: Int!, $lastCursor: String) {
+    storageDataObjectsConnection(
+      where: { updatedAt_gt: $updatedAfter, isAccepted_eq: true }
+      first: $limit
+      after: $lastCursor
+    ) {
+      ...StorageDataObjectConnectionFields
+    }
+  }
+  ${StorageDataObjectConnectionFields}
+`
+export const GetChannelsPage = gql`
+  query getChannelsPage($limit: Int!, $lastCursor: String) {
+    channelsConnection(first: $limit, after: $lastCursor) {
+      ...ChannelConnectionFields
+    }
+  }
+  ${ChannelConnectionFields}
+`
+export const GetVideosPage = gql`
+  query getVideosPage($limit: Int!, $lastCursor: String) {
+    videosConnection(first: $limit, after: $lastCursor) {
+      ...VideoConnectionFields
+    }
+  }
+  ${VideoConnectionFields}
+`
+export const GetMembershipsPage = gql`
+  query getMembershipsPage($limit: Int!, $lastCursor: String) {
+    membershipsConnection(first: $limit, after: $lastCursor) {
+      ...MembershipConnectionFields
+    }
+  }
+  ${MembershipConnectionFields}
+`

+ 3715 - 0
utils/migration-scripts/src/giza-olympia/giza-query-node/generated/schema.ts

@@ -0,0 +1,3715 @@
+export type Maybe<T> = T | null
+export type Exact<T extends { [key: string]: unknown }> = { [K in keyof T]: T[K] }
+export type MakeOptional<T, K extends keyof T> = Omit<T, K> & { [SubKey in K]?: Maybe<T[SubKey]> }
+export type MakeMaybe<T, K extends keyof T> = Omit<T, K> & { [SubKey in K]: Maybe<T[SubKey]> }
+/** All built-in and custom scalars, mapped to their actual values */
+export type Scalars = {
+  ID: string
+  String: string
+  Boolean: boolean
+  Int: number
+  Float: number
+  /** The javascript `Date` as string. Type represents date and time as the ISO Date string. */
+  DateTime: any
+  /** GraphQL representation of BigInt */
+  BigInt: any
+  /** The `JSONObject` scalar type represents JSON objects as specified by [ECMA-404](http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf). */
+  JSONObject: any
+}
+
+export type BaseGraphQlObject = {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+}
+
+export type BaseModel = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+}
+
+export type BaseModelUuid = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+}
+
+export type BaseWhereInput = {
+  id_eq?: Maybe<Scalars['String']>
+  id_in?: Maybe<Array<Scalars['String']>>
+  createdAt_eq?: Maybe<Scalars['String']>
+  createdAt_lt?: Maybe<Scalars['String']>
+  createdAt_lte?: Maybe<Scalars['String']>
+  createdAt_gt?: Maybe<Scalars['String']>
+  createdAt_gte?: Maybe<Scalars['String']>
+  createdById_eq?: Maybe<Scalars['String']>
+  updatedAt_eq?: Maybe<Scalars['String']>
+  updatedAt_lt?: Maybe<Scalars['String']>
+  updatedAt_lte?: Maybe<Scalars['String']>
+  updatedAt_gt?: Maybe<Scalars['String']>
+  updatedAt_gte?: Maybe<Scalars['String']>
+  updatedById_eq?: Maybe<Scalars['String']>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['String']>
+  deletedAt_lt?: Maybe<Scalars['String']>
+  deletedAt_lte?: Maybe<Scalars['String']>
+  deletedAt_gt?: Maybe<Scalars['String']>
+  deletedAt_gte?: Maybe<Scalars['String']>
+  deletedById_eq?: Maybe<Scalars['String']>
+}
+
+export type Channel = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  ownerMember?: Maybe<Membership>
+  ownerMemberId?: Maybe<Scalars['String']>
+  ownerCuratorGroup?: Maybe<CuratorGroup>
+  ownerCuratorGroupId?: Maybe<Scalars['String']>
+  category?: Maybe<ChannelCategory>
+  categoryId?: Maybe<Scalars['String']>
+  /** Reward account where revenue is sent if set. */
+  rewardAccount?: Maybe<Scalars['String']>
+  /** The title of the Channel */
+  title?: Maybe<Scalars['String']>
+  /** The description of a Channel */
+  description?: Maybe<Scalars['String']>
+  coverPhoto?: Maybe<StorageDataObject>
+  coverPhotoId?: Maybe<Scalars['String']>
+  avatarPhoto?: Maybe<StorageDataObject>
+  avatarPhotoId?: Maybe<Scalars['String']>
+  /** Flag signaling whether a channel is public. */
+  isPublic?: Maybe<Scalars['Boolean']>
+  /** Flag signaling whether a channel is censored. */
+  isCensored: Scalars['Boolean']
+  language?: Maybe<Language>
+  languageId?: Maybe<Scalars['String']>
+  videos: Array<Video>
+  /** Number of the block the channel was created in */
+  createdInBlock: Scalars['Int']
+  collaborators: Array<Membership>
+}
+
+export type ChannelCategoriesByNameFtsOutput = {
+  item: ChannelCategoriesByNameSearchResult
+  rank: Scalars['Float']
+  isTypeOf: Scalars['String']
+  highlight: Scalars['String']
+}
+
+export type ChannelCategoriesByNameSearchResult = ChannelCategory
+
+/** Category of media channel */
+export type ChannelCategory = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** The name of the category */
+  name?: Maybe<Scalars['String']>
+  channels: Array<Channel>
+  createdInBlock: Scalars['Int']
+}
+
+export type ChannelCategoryConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<ChannelCategoryEdge>
+  pageInfo: PageInfo
+}
+
+export type ChannelCategoryCreateInput = {
+  name?: Maybe<Scalars['String']>
+  createdInBlock: Scalars['Float']
+}
+
+export type ChannelCategoryEdge = {
+  node: ChannelCategory
+  cursor: Scalars['String']
+}
+
+export enum ChannelCategoryOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  NameAsc = 'name_ASC',
+  NameDesc = 'name_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+}
+
+export type ChannelCategoryUpdateInput = {
+  name?: Maybe<Scalars['String']>
+  createdInBlock?: Maybe<Scalars['Float']>
+}
+
+export type ChannelCategoryWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  name_eq?: Maybe<Scalars['String']>
+  name_contains?: Maybe<Scalars['String']>
+  name_startsWith?: Maybe<Scalars['String']>
+  name_endsWith?: Maybe<Scalars['String']>
+  name_in?: Maybe<Array<Scalars['String']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  channels_none?: Maybe<ChannelWhereInput>
+  channels_some?: Maybe<ChannelWhereInput>
+  channels_every?: Maybe<ChannelWhereInput>
+  AND?: Maybe<Array<ChannelCategoryWhereInput>>
+  OR?: Maybe<Array<ChannelCategoryWhereInput>>
+}
+
+export type ChannelCategoryWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type ChannelConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<ChannelEdge>
+  pageInfo: PageInfo
+}
+
+export type ChannelCreateInput = {
+  ownerMember?: Maybe<Scalars['ID']>
+  ownerCuratorGroup?: Maybe<Scalars['ID']>
+  category?: Maybe<Scalars['ID']>
+  rewardAccount?: Maybe<Scalars['String']>
+  title?: Maybe<Scalars['String']>
+  description?: Maybe<Scalars['String']>
+  coverPhoto?: Maybe<Scalars['ID']>
+  avatarPhoto?: Maybe<Scalars['ID']>
+  isPublic?: Maybe<Scalars['Boolean']>
+  isCensored: Scalars['Boolean']
+  language?: Maybe<Scalars['ID']>
+  createdInBlock: Scalars['Float']
+}
+
+export type ChannelEdge = {
+  node: Channel
+  cursor: Scalars['String']
+}
+
+export enum ChannelOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  OwnerMemberAsc = 'ownerMember_ASC',
+  OwnerMemberDesc = 'ownerMember_DESC',
+  OwnerCuratorGroupAsc = 'ownerCuratorGroup_ASC',
+  OwnerCuratorGroupDesc = 'ownerCuratorGroup_DESC',
+  CategoryAsc = 'category_ASC',
+  CategoryDesc = 'category_DESC',
+  RewardAccountAsc = 'rewardAccount_ASC',
+  RewardAccountDesc = 'rewardAccount_DESC',
+  TitleAsc = 'title_ASC',
+  TitleDesc = 'title_DESC',
+  DescriptionAsc = 'description_ASC',
+  DescriptionDesc = 'description_DESC',
+  CoverPhotoAsc = 'coverPhoto_ASC',
+  CoverPhotoDesc = 'coverPhoto_DESC',
+  AvatarPhotoAsc = 'avatarPhoto_ASC',
+  AvatarPhotoDesc = 'avatarPhoto_DESC',
+  IsPublicAsc = 'isPublic_ASC',
+  IsPublicDesc = 'isPublic_DESC',
+  IsCensoredAsc = 'isCensored_ASC',
+  IsCensoredDesc = 'isCensored_DESC',
+  LanguageAsc = 'language_ASC',
+  LanguageDesc = 'language_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+}
+
+export type ChannelUpdateInput = {
+  ownerMember?: Maybe<Scalars['ID']>
+  ownerCuratorGroup?: Maybe<Scalars['ID']>
+  category?: Maybe<Scalars['ID']>
+  rewardAccount?: Maybe<Scalars['String']>
+  title?: Maybe<Scalars['String']>
+  description?: Maybe<Scalars['String']>
+  coverPhoto?: Maybe<Scalars['ID']>
+  avatarPhoto?: Maybe<Scalars['ID']>
+  isPublic?: Maybe<Scalars['Boolean']>
+  isCensored?: Maybe<Scalars['Boolean']>
+  language?: Maybe<Scalars['ID']>
+  createdInBlock?: Maybe<Scalars['Float']>
+}
+
+export type ChannelWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  rewardAccount_eq?: Maybe<Scalars['String']>
+  rewardAccount_contains?: Maybe<Scalars['String']>
+  rewardAccount_startsWith?: Maybe<Scalars['String']>
+  rewardAccount_endsWith?: Maybe<Scalars['String']>
+  rewardAccount_in?: Maybe<Array<Scalars['String']>>
+  title_eq?: Maybe<Scalars['String']>
+  title_contains?: Maybe<Scalars['String']>
+  title_startsWith?: Maybe<Scalars['String']>
+  title_endsWith?: Maybe<Scalars['String']>
+  title_in?: Maybe<Array<Scalars['String']>>
+  description_eq?: Maybe<Scalars['String']>
+  description_contains?: Maybe<Scalars['String']>
+  description_startsWith?: Maybe<Scalars['String']>
+  description_endsWith?: Maybe<Scalars['String']>
+  description_in?: Maybe<Array<Scalars['String']>>
+  isPublic_eq?: Maybe<Scalars['Boolean']>
+  isPublic_in?: Maybe<Array<Scalars['Boolean']>>
+  isCensored_eq?: Maybe<Scalars['Boolean']>
+  isCensored_in?: Maybe<Array<Scalars['Boolean']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  ownerMember?: Maybe<MembershipWhereInput>
+  ownerCuratorGroup?: Maybe<CuratorGroupWhereInput>
+  category?: Maybe<ChannelCategoryWhereInput>
+  coverPhoto?: Maybe<StorageDataObjectWhereInput>
+  avatarPhoto?: Maybe<StorageDataObjectWhereInput>
+  language?: Maybe<LanguageWhereInput>
+  videos_none?: Maybe<VideoWhereInput>
+  videos_some?: Maybe<VideoWhereInput>
+  videos_every?: Maybe<VideoWhereInput>
+  collaborators_none?: Maybe<MembershipWhereInput>
+  collaborators_some?: Maybe<MembershipWhereInput>
+  collaborators_every?: Maybe<MembershipWhereInput>
+  AND?: Maybe<Array<ChannelWhereInput>>
+  OR?: Maybe<Array<ChannelWhereInput>>
+}
+
+export type ChannelWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export enum Continent {
+  Af = 'AF',
+  Na = 'NA',
+  Oc = 'OC',
+  An = 'AN',
+  As = 'AS',
+  Eu = 'EU',
+  Sa = 'SA',
+}
+
+export type CuratorGroup = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Curators belonging to this group */
+  curatorIds: Array<Scalars['Int']>
+  /** Is group active or not */
+  isActive: Scalars['Boolean']
+  channels: Array<Channel>
+}
+
+export type CuratorGroupConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<CuratorGroupEdge>
+  pageInfo: PageInfo
+}
+
+export type CuratorGroupCreateInput = {
+  curatorIds: Array<Scalars['Int']>
+  isActive: Scalars['Boolean']
+}
+
+export type CuratorGroupEdge = {
+  node: CuratorGroup
+  cursor: Scalars['String']
+}
+
+export enum CuratorGroupOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  IsActiveAsc = 'isActive_ASC',
+  IsActiveDesc = 'isActive_DESC',
+}
+
+export type CuratorGroupUpdateInput = {
+  curatorIds?: Maybe<Array<Scalars['Int']>>
+  isActive?: Maybe<Scalars['Boolean']>
+}
+
+export type CuratorGroupWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  curatorIds_containsAll?: Maybe<Array<Scalars['Int']>>
+  curatorIds_containsNone?: Maybe<Array<Scalars['Int']>>
+  curatorIds_containsAny?: Maybe<Array<Scalars['Int']>>
+  isActive_eq?: Maybe<Scalars['Boolean']>
+  isActive_in?: Maybe<Array<Scalars['Boolean']>>
+  channels_none?: Maybe<ChannelWhereInput>
+  channels_some?: Maybe<ChannelWhereInput>
+  channels_every?: Maybe<ChannelWhereInput>
+  AND?: Maybe<Array<CuratorGroupWhereInput>>
+  OR?: Maybe<Array<CuratorGroupWhereInput>>
+}
+
+export type CuratorGroupWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type DataObjectType =
+  | DataObjectTypeChannelAvatar
+  | DataObjectTypeChannelCoverPhoto
+  | DataObjectTypeVideoMedia
+  | DataObjectTypeVideoThumbnail
+  | DataObjectTypeUnknown
+
+export type DataObjectTypeChannelAvatar = {
+  /** Related channel entity */
+  channel?: Maybe<Channel>
+}
+
+export type DataObjectTypeChannelCoverPhoto = {
+  /** Related channel entity */
+  channel?: Maybe<Channel>
+}
+
+export type DataObjectTypeUnknown = {
+  phantom?: Maybe<Scalars['Int']>
+}
+
+export type DataObjectTypeVideoMedia = {
+  /** Related video entity */
+  video?: Maybe<Video>
+}
+
+export type DataObjectTypeVideoThumbnail = {
+  /** Related video entity */
+  video?: Maybe<Video>
+}
+
+export type DeleteResponse = {
+  id: Scalars['ID']
+}
+
+export type DistributionBucket = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  family: DistributionBucketFamily
+  familyId: Scalars['String']
+  /** Bucket index within the family */
+  bucketIndex: Scalars['Int']
+  operators: Array<DistributionBucketOperator>
+  /** Whether the bucket is accepting any new bags */
+  acceptingNewBags: Scalars['Boolean']
+  /** Whether the bucket is currently distributing content */
+  distributing: Scalars['Boolean']
+  bags: Array<StorageBag>
+}
+
+export type DistributionBucketConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<DistributionBucketEdge>
+  pageInfo: PageInfo
+}
+
+export type DistributionBucketCreateInput = {
+  family: Scalars['ID']
+  bucketIndex: Scalars['Float']
+  acceptingNewBags: Scalars['Boolean']
+  distributing: Scalars['Boolean']
+}
+
+export type DistributionBucketEdge = {
+  node: DistributionBucket
+  cursor: Scalars['String']
+}
+
+export type DistributionBucketFamily = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  metadata?: Maybe<DistributionBucketFamilyMetadata>
+  metadataId?: Maybe<Scalars['String']>
+  buckets: Array<DistributionBucket>
+}
+
+export type DistributionBucketFamilyConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<DistributionBucketFamilyEdge>
+  pageInfo: PageInfo
+}
+
+export type DistributionBucketFamilyCreateInput = {
+  metadata?: Maybe<Scalars['ID']>
+}
+
+export type DistributionBucketFamilyEdge = {
+  node: DistributionBucketFamily
+  cursor: Scalars['String']
+}
+
+export type DistributionBucketFamilyGeographicArea = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Geographical area (continent / country / subdivision) */
+  area: GeographicalArea
+  distributionBucketFamilyMetadata: DistributionBucketFamilyMetadata
+  distributionBucketFamilyMetadataId: Scalars['String']
+}
+
+export type DistributionBucketFamilyGeographicAreaConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<DistributionBucketFamilyGeographicAreaEdge>
+  pageInfo: PageInfo
+}
+
+export type DistributionBucketFamilyGeographicAreaCreateInput = {
+  area: Scalars['JSONObject']
+  distributionBucketFamilyMetadata: Scalars['ID']
+}
+
+export type DistributionBucketFamilyGeographicAreaEdge = {
+  node: DistributionBucketFamilyGeographicArea
+  cursor: Scalars['String']
+}
+
+export enum DistributionBucketFamilyGeographicAreaOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  DistributionBucketFamilyMetadataAsc = 'distributionBucketFamilyMetadata_ASC',
+  DistributionBucketFamilyMetadataDesc = 'distributionBucketFamilyMetadata_DESC',
+}
+
+export type DistributionBucketFamilyGeographicAreaUpdateInput = {
+  area?: Maybe<Scalars['JSONObject']>
+  distributionBucketFamilyMetadata?: Maybe<Scalars['ID']>
+}
+
+export type DistributionBucketFamilyGeographicAreaWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  area_json?: Maybe<Scalars['JSONObject']>
+  distributionBucketFamilyMetadata?: Maybe<DistributionBucketFamilyMetadataWhereInput>
+  AND?: Maybe<Array<DistributionBucketFamilyGeographicAreaWhereInput>>
+  OR?: Maybe<Array<DistributionBucketFamilyGeographicAreaWhereInput>>
+}
+
+export type DistributionBucketFamilyGeographicAreaWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type DistributionBucketFamilyMetadata = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Name of the geographical region covered by the family (ie.: us-east-1) */
+  region?: Maybe<Scalars['String']>
+  /** Optional, more specific description of the region covered by the family */
+  description?: Maybe<Scalars['String']>
+  areas: Array<DistributionBucketFamilyGeographicArea>
+  /** List of targets (hosts/ips) best suited latency measurements for the family */
+  latencyTestTargets?: Maybe<Array<Scalars['String']>>
+  distributionbucketfamilymetadata?: Maybe<Array<DistributionBucketFamily>>
+}
+
+export type DistributionBucketFamilyMetadataConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<DistributionBucketFamilyMetadataEdge>
+  pageInfo: PageInfo
+}
+
+export type DistributionBucketFamilyMetadataCreateInput = {
+  region?: Maybe<Scalars['String']>
+  description?: Maybe<Scalars['String']>
+  latencyTestTargets?: Maybe<Array<Scalars['String']>>
+}
+
+export type DistributionBucketFamilyMetadataEdge = {
+  node: DistributionBucketFamilyMetadata
+  cursor: Scalars['String']
+}
+
+export enum DistributionBucketFamilyMetadataOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  RegionAsc = 'region_ASC',
+  RegionDesc = 'region_DESC',
+  DescriptionAsc = 'description_ASC',
+  DescriptionDesc = 'description_DESC',
+}
+
+export type DistributionBucketFamilyMetadataUpdateInput = {
+  region?: Maybe<Scalars['String']>
+  description?: Maybe<Scalars['String']>
+  latencyTestTargets?: Maybe<Array<Scalars['String']>>
+}
+
+export type DistributionBucketFamilyMetadataWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  region_eq?: Maybe<Scalars['String']>
+  region_contains?: Maybe<Scalars['String']>
+  region_startsWith?: Maybe<Scalars['String']>
+  region_endsWith?: Maybe<Scalars['String']>
+  region_in?: Maybe<Array<Scalars['String']>>
+  description_eq?: Maybe<Scalars['String']>
+  description_contains?: Maybe<Scalars['String']>
+  description_startsWith?: Maybe<Scalars['String']>
+  description_endsWith?: Maybe<Scalars['String']>
+  description_in?: Maybe<Array<Scalars['String']>>
+  latencyTestTargets_containsAll?: Maybe<Array<Scalars['String']>>
+  latencyTestTargets_containsNone?: Maybe<Array<Scalars['String']>>
+  latencyTestTargets_containsAny?: Maybe<Array<Scalars['String']>>
+  areas_none?: Maybe<DistributionBucketFamilyGeographicAreaWhereInput>
+  areas_some?: Maybe<DistributionBucketFamilyGeographicAreaWhereInput>
+  areas_every?: Maybe<DistributionBucketFamilyGeographicAreaWhereInput>
+  distributionbucketfamilymetadata_none?: Maybe<DistributionBucketFamilyWhereInput>
+  distributionbucketfamilymetadata_some?: Maybe<DistributionBucketFamilyWhereInput>
+  distributionbucketfamilymetadata_every?: Maybe<DistributionBucketFamilyWhereInput>
+  AND?: Maybe<Array<DistributionBucketFamilyMetadataWhereInput>>
+  OR?: Maybe<Array<DistributionBucketFamilyMetadataWhereInput>>
+}
+
+export type DistributionBucketFamilyMetadataWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export enum DistributionBucketFamilyOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  MetadataAsc = 'metadata_ASC',
+  MetadataDesc = 'metadata_DESC',
+}
+
+export type DistributionBucketFamilyUpdateInput = {
+  metadata?: Maybe<Scalars['ID']>
+}
+
+export type DistributionBucketFamilyWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  metadata?: Maybe<DistributionBucketFamilyMetadataWhereInput>
+  buckets_none?: Maybe<DistributionBucketWhereInput>
+  buckets_some?: Maybe<DistributionBucketWhereInput>
+  buckets_every?: Maybe<DistributionBucketWhereInput>
+  AND?: Maybe<Array<DistributionBucketFamilyWhereInput>>
+  OR?: Maybe<Array<DistributionBucketFamilyWhereInput>>
+}
+
+export type DistributionBucketFamilyWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type DistributionBucketOperator = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  distributionBucket: DistributionBucket
+  distributionBucketId: Scalars['String']
+  /** ID of the distribution group worker */
+  workerId: Scalars['Int']
+  /** Current operator status */
+  status: DistributionBucketOperatorStatus
+  metadata?: Maybe<DistributionBucketOperatorMetadata>
+  metadataId?: Maybe<Scalars['String']>
+}
+
+export type DistributionBucketOperatorConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<DistributionBucketOperatorEdge>
+  pageInfo: PageInfo
+}
+
+export type DistributionBucketOperatorCreateInput = {
+  distributionBucket: Scalars['ID']
+  workerId: Scalars['Float']
+  status: DistributionBucketOperatorStatus
+  metadata?: Maybe<Scalars['ID']>
+}
+
+export type DistributionBucketOperatorEdge = {
+  node: DistributionBucketOperator
+  cursor: Scalars['String']
+}
+
+export type DistributionBucketOperatorMetadata = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Root distributor node api endpoint */
+  nodeEndpoint?: Maybe<Scalars['String']>
+  nodeLocation?: Maybe<NodeLocationMetadata>
+  nodeLocationId?: Maybe<Scalars['String']>
+  /** Additional information about the node/operator */
+  extra?: Maybe<Scalars['String']>
+  distributionbucketoperatormetadata?: Maybe<Array<DistributionBucketOperator>>
+}
+
+export type DistributionBucketOperatorMetadataConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<DistributionBucketOperatorMetadataEdge>
+  pageInfo: PageInfo
+}
+
+export type DistributionBucketOperatorMetadataCreateInput = {
+  nodeEndpoint?: Maybe<Scalars['String']>
+  nodeLocation?: Maybe<Scalars['ID']>
+  extra?: Maybe<Scalars['String']>
+}
+
+export type DistributionBucketOperatorMetadataEdge = {
+  node: DistributionBucketOperatorMetadata
+  cursor: Scalars['String']
+}
+
+export enum DistributionBucketOperatorMetadataOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  NodeEndpointAsc = 'nodeEndpoint_ASC',
+  NodeEndpointDesc = 'nodeEndpoint_DESC',
+  NodeLocationAsc = 'nodeLocation_ASC',
+  NodeLocationDesc = 'nodeLocation_DESC',
+  ExtraAsc = 'extra_ASC',
+  ExtraDesc = 'extra_DESC',
+}
+
+export type DistributionBucketOperatorMetadataUpdateInput = {
+  nodeEndpoint?: Maybe<Scalars['String']>
+  nodeLocation?: Maybe<Scalars['ID']>
+  extra?: Maybe<Scalars['String']>
+}
+
+export type DistributionBucketOperatorMetadataWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  nodeEndpoint_eq?: Maybe<Scalars['String']>
+  nodeEndpoint_contains?: Maybe<Scalars['String']>
+  nodeEndpoint_startsWith?: Maybe<Scalars['String']>
+  nodeEndpoint_endsWith?: Maybe<Scalars['String']>
+  nodeEndpoint_in?: Maybe<Array<Scalars['String']>>
+  extra_eq?: Maybe<Scalars['String']>
+  extra_contains?: Maybe<Scalars['String']>
+  extra_startsWith?: Maybe<Scalars['String']>
+  extra_endsWith?: Maybe<Scalars['String']>
+  extra_in?: Maybe<Array<Scalars['String']>>
+  nodeLocation?: Maybe<NodeLocationMetadataWhereInput>
+  distributionbucketoperatormetadata_none?: Maybe<DistributionBucketOperatorWhereInput>
+  distributionbucketoperatormetadata_some?: Maybe<DistributionBucketOperatorWhereInput>
+  distributionbucketoperatormetadata_every?: Maybe<DistributionBucketOperatorWhereInput>
+  AND?: Maybe<Array<DistributionBucketOperatorMetadataWhereInput>>
+  OR?: Maybe<Array<DistributionBucketOperatorMetadataWhereInput>>
+}
+
+export type DistributionBucketOperatorMetadataWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export enum DistributionBucketOperatorOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  DistributionBucketAsc = 'distributionBucket_ASC',
+  DistributionBucketDesc = 'distributionBucket_DESC',
+  WorkerIdAsc = 'workerId_ASC',
+  WorkerIdDesc = 'workerId_DESC',
+  StatusAsc = 'status_ASC',
+  StatusDesc = 'status_DESC',
+  MetadataAsc = 'metadata_ASC',
+  MetadataDesc = 'metadata_DESC',
+}
+
+export enum DistributionBucketOperatorStatus {
+  Invited = 'INVITED',
+  Active = 'ACTIVE',
+}
+
+export type DistributionBucketOperatorUpdateInput = {
+  distributionBucket?: Maybe<Scalars['ID']>
+  workerId?: Maybe<Scalars['Float']>
+  status?: Maybe<DistributionBucketOperatorStatus>
+  metadata?: Maybe<Scalars['ID']>
+}
+
+export type DistributionBucketOperatorWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  workerId_eq?: Maybe<Scalars['Int']>
+  workerId_gt?: Maybe<Scalars['Int']>
+  workerId_gte?: Maybe<Scalars['Int']>
+  workerId_lt?: Maybe<Scalars['Int']>
+  workerId_lte?: Maybe<Scalars['Int']>
+  workerId_in?: Maybe<Array<Scalars['Int']>>
+  status_eq?: Maybe<DistributionBucketOperatorStatus>
+  status_in?: Maybe<Array<DistributionBucketOperatorStatus>>
+  distributionBucket?: Maybe<DistributionBucketWhereInput>
+  metadata?: Maybe<DistributionBucketOperatorMetadataWhereInput>
+  AND?: Maybe<Array<DistributionBucketOperatorWhereInput>>
+  OR?: Maybe<Array<DistributionBucketOperatorWhereInput>>
+}
+
+export type DistributionBucketOperatorWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export enum DistributionBucketOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  FamilyAsc = 'family_ASC',
+  FamilyDesc = 'family_DESC',
+  BucketIndexAsc = 'bucketIndex_ASC',
+  BucketIndexDesc = 'bucketIndex_DESC',
+  AcceptingNewBagsAsc = 'acceptingNewBags_ASC',
+  AcceptingNewBagsDesc = 'acceptingNewBags_DESC',
+  DistributingAsc = 'distributing_ASC',
+  DistributingDesc = 'distributing_DESC',
+}
+
+export type DistributionBucketUpdateInput = {
+  family?: Maybe<Scalars['ID']>
+  bucketIndex?: Maybe<Scalars['Float']>
+  acceptingNewBags?: Maybe<Scalars['Boolean']>
+  distributing?: Maybe<Scalars['Boolean']>
+}
+
+export type DistributionBucketWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  bucketIndex_eq?: Maybe<Scalars['Int']>
+  bucketIndex_gt?: Maybe<Scalars['Int']>
+  bucketIndex_gte?: Maybe<Scalars['Int']>
+  bucketIndex_lt?: Maybe<Scalars['Int']>
+  bucketIndex_lte?: Maybe<Scalars['Int']>
+  bucketIndex_in?: Maybe<Array<Scalars['Int']>>
+  acceptingNewBags_eq?: Maybe<Scalars['Boolean']>
+  acceptingNewBags_in?: Maybe<Array<Scalars['Boolean']>>
+  distributing_eq?: Maybe<Scalars['Boolean']>
+  distributing_in?: Maybe<Array<Scalars['Boolean']>>
+  family?: Maybe<DistributionBucketFamilyWhereInput>
+  operators_none?: Maybe<DistributionBucketOperatorWhereInput>
+  operators_some?: Maybe<DistributionBucketOperatorWhereInput>
+  operators_every?: Maybe<DistributionBucketOperatorWhereInput>
+  bags_none?: Maybe<StorageBagWhereInput>
+  bags_some?: Maybe<StorageBagWhereInput>
+  bags_every?: Maybe<StorageBagWhereInput>
+  AND?: Maybe<Array<DistributionBucketWhereInput>>
+  OR?: Maybe<Array<DistributionBucketWhereInput>>
+}
+
+export type DistributionBucketWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type GeoCoordinates = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  latitude: Scalars['Float']
+  longitude: Scalars['Float']
+  nodelocationmetadatacoordinates?: Maybe<Array<NodeLocationMetadata>>
+}
+
+export type GeoCoordinatesConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<GeoCoordinatesEdge>
+  pageInfo: PageInfo
+}
+
+export type GeoCoordinatesCreateInput = {
+  latitude: Scalars['Float']
+  longitude: Scalars['Float']
+}
+
+export type GeoCoordinatesEdge = {
+  node: GeoCoordinates
+  cursor: Scalars['String']
+}
+
+export enum GeoCoordinatesOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  LatitudeAsc = 'latitude_ASC',
+  LatitudeDesc = 'latitude_DESC',
+  LongitudeAsc = 'longitude_ASC',
+  LongitudeDesc = 'longitude_DESC',
+}
+
+export type GeoCoordinatesUpdateInput = {
+  latitude?: Maybe<Scalars['Float']>
+  longitude?: Maybe<Scalars['Float']>
+}
+
+export type GeoCoordinatesWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  latitude_eq?: Maybe<Scalars['Float']>
+  latitude_gt?: Maybe<Scalars['Float']>
+  latitude_gte?: Maybe<Scalars['Float']>
+  latitude_lt?: Maybe<Scalars['Float']>
+  latitude_lte?: Maybe<Scalars['Float']>
+  latitude_in?: Maybe<Array<Scalars['Float']>>
+  longitude_eq?: Maybe<Scalars['Float']>
+  longitude_gt?: Maybe<Scalars['Float']>
+  longitude_gte?: Maybe<Scalars['Float']>
+  longitude_lt?: Maybe<Scalars['Float']>
+  longitude_lte?: Maybe<Scalars['Float']>
+  longitude_in?: Maybe<Array<Scalars['Float']>>
+  nodelocationmetadatacoordinates_none?: Maybe<NodeLocationMetadataWhereInput>
+  nodelocationmetadatacoordinates_some?: Maybe<NodeLocationMetadataWhereInput>
+  nodelocationmetadatacoordinates_every?: Maybe<NodeLocationMetadataWhereInput>
+  AND?: Maybe<Array<GeoCoordinatesWhereInput>>
+  OR?: Maybe<Array<GeoCoordinatesWhereInput>>
+}
+
+export type GeoCoordinatesWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type GeographicalArea = GeographicalAreaContinent | GeographicalAreaCountry | GeographicalAreaSubdivistion
+
+export type GeographicalAreaContinent = {
+  code?: Maybe<Continent>
+}
+
+export type GeographicalAreaContinentCreateInput = {
+  code?: Maybe<Continent>
+}
+
+export type GeographicalAreaContinentUpdateInput = {
+  code?: Maybe<Continent>
+}
+
+export type GeographicalAreaContinentWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  code_eq?: Maybe<Continent>
+  code_in?: Maybe<Array<Continent>>
+  AND?: Maybe<Array<GeographicalAreaContinentWhereInput>>
+  OR?: Maybe<Array<GeographicalAreaContinentWhereInput>>
+}
+
+export type GeographicalAreaContinentWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type GeographicalAreaCountry = {
+  /** ISO 3166-1 alpha-2 country code */
+  code?: Maybe<Scalars['String']>
+}
+
+export type GeographicalAreaSubdivistion = {
+  /** ISO 3166-2 subdivision code */
+  code?: Maybe<Scalars['String']>
+}
+
+export type Language = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Language identifier ISO 639-1 */
+  iso: Scalars['String']
+  createdInBlock: Scalars['Int']
+  channellanguage?: Maybe<Array<Channel>>
+  videolanguage?: Maybe<Array<Video>>
+}
+
+export type LanguageConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<LanguageEdge>
+  pageInfo: PageInfo
+}
+
+export type LanguageCreateInput = {
+  iso: Scalars['String']
+  createdInBlock: Scalars['Float']
+}
+
+export type LanguageEdge = {
+  node: Language
+  cursor: Scalars['String']
+}
+
+export enum LanguageOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  IsoAsc = 'iso_ASC',
+  IsoDesc = 'iso_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+}
+
+export type LanguageUpdateInput = {
+  iso?: Maybe<Scalars['String']>
+  createdInBlock?: Maybe<Scalars['Float']>
+}
+
+export type LanguageWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  iso_eq?: Maybe<Scalars['String']>
+  iso_contains?: Maybe<Scalars['String']>
+  iso_startsWith?: Maybe<Scalars['String']>
+  iso_endsWith?: Maybe<Scalars['String']>
+  iso_in?: Maybe<Array<Scalars['String']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  channellanguage_none?: Maybe<ChannelWhereInput>
+  channellanguage_some?: Maybe<ChannelWhereInput>
+  channellanguage_every?: Maybe<ChannelWhereInput>
+  videolanguage_none?: Maybe<VideoWhereInput>
+  videolanguage_some?: Maybe<VideoWhereInput>
+  videolanguage_every?: Maybe<VideoWhereInput>
+  AND?: Maybe<Array<LanguageWhereInput>>
+  OR?: Maybe<Array<LanguageWhereInput>>
+}
+
+export type LanguageWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type License = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** License code defined by Joystream */
+  code?: Maybe<Scalars['Int']>
+  /** Attribution (if required by the license) */
+  attribution?: Maybe<Scalars['String']>
+  /** Custom license content */
+  customText?: Maybe<Scalars['String']>
+  videolicense?: Maybe<Array<Video>>
+}
+
+export type LicenseConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<LicenseEdge>
+  pageInfo: PageInfo
+}
+
+export type LicenseCreateInput = {
+  code?: Maybe<Scalars['Float']>
+  attribution?: Maybe<Scalars['String']>
+  customText?: Maybe<Scalars['String']>
+}
+
+export type LicenseEdge = {
+  node: License
+  cursor: Scalars['String']
+}
+
+export enum LicenseOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  CodeAsc = 'code_ASC',
+  CodeDesc = 'code_DESC',
+  AttributionAsc = 'attribution_ASC',
+  AttributionDesc = 'attribution_DESC',
+  CustomTextAsc = 'customText_ASC',
+  CustomTextDesc = 'customText_DESC',
+}
+
+export type LicenseUpdateInput = {
+  code?: Maybe<Scalars['Float']>
+  attribution?: Maybe<Scalars['String']>
+  customText?: Maybe<Scalars['String']>
+}
+
+export type LicenseWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  code_eq?: Maybe<Scalars['Int']>
+  code_gt?: Maybe<Scalars['Int']>
+  code_gte?: Maybe<Scalars['Int']>
+  code_lt?: Maybe<Scalars['Int']>
+  code_lte?: Maybe<Scalars['Int']>
+  code_in?: Maybe<Array<Scalars['Int']>>
+  attribution_eq?: Maybe<Scalars['String']>
+  attribution_contains?: Maybe<Scalars['String']>
+  attribution_startsWith?: Maybe<Scalars['String']>
+  attribution_endsWith?: Maybe<Scalars['String']>
+  attribution_in?: Maybe<Array<Scalars['String']>>
+  customText_eq?: Maybe<Scalars['String']>
+  customText_contains?: Maybe<Scalars['String']>
+  customText_startsWith?: Maybe<Scalars['String']>
+  customText_endsWith?: Maybe<Scalars['String']>
+  customText_in?: Maybe<Array<Scalars['String']>>
+  videolicense_none?: Maybe<VideoWhereInput>
+  videolicense_some?: Maybe<VideoWhereInput>
+  videolicense_every?: Maybe<VideoWhereInput>
+  AND?: Maybe<Array<LicenseWhereInput>>
+  OR?: Maybe<Array<LicenseWhereInput>>
+}
+
+export type LicenseWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type MembersByHandleFtsOutput = {
+  item: MembersByHandleSearchResult
+  rank: Scalars['Float']
+  isTypeOf: Scalars['String']
+  highlight: Scalars['String']
+}
+
+export type MembersByHandleSearchResult = Membership
+
+/** Stored information about a registered user */
+export type Membership = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** The unique handle chosen by member */
+  handle: Scalars['String']
+  /** A Url to member's Avatar image */
+  avatarUri?: Maybe<Scalars['String']>
+  /** Short text chosen by member to share information about themselves */
+  about?: Maybe<Scalars['String']>
+  /** Member's controller account id */
+  controllerAccount: Scalars['String']
+  /** Member's root account id */
+  rootAccount: Scalars['String']
+  /** Blocknumber when member was registered */
+  createdInBlock: Scalars['Int']
+  /** How the member was registered */
+  entry: MembershipEntryMethod
+  /** The type of subscription the member has purchased if any. */
+  subscription?: Maybe<Scalars['Int']>
+  channels: Array<Channel>
+  collaboratorInChannels: Array<Channel>
+}
+
+export type MembershipConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<MembershipEdge>
+  pageInfo: PageInfo
+}
+
+export type MembershipCreateInput = {
+  handle: Scalars['String']
+  avatarUri?: Maybe<Scalars['String']>
+  about?: Maybe<Scalars['String']>
+  controllerAccount: Scalars['String']
+  rootAccount: Scalars['String']
+  createdInBlock: Scalars['Float']
+  entry: MembershipEntryMethod
+  subscription?: Maybe<Scalars['Float']>
+}
+
+export type MembershipEdge = {
+  node: Membership
+  cursor: Scalars['String']
+}
+
+export enum MembershipEntryMethod {
+  Paid = 'PAID',
+  Screening = 'SCREENING',
+  Genesis = 'GENESIS',
+}
+
+export enum MembershipOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  HandleAsc = 'handle_ASC',
+  HandleDesc = 'handle_DESC',
+  AvatarUriAsc = 'avatarUri_ASC',
+  AvatarUriDesc = 'avatarUri_DESC',
+  AboutAsc = 'about_ASC',
+  AboutDesc = 'about_DESC',
+  ControllerAccountAsc = 'controllerAccount_ASC',
+  ControllerAccountDesc = 'controllerAccount_DESC',
+  RootAccountAsc = 'rootAccount_ASC',
+  RootAccountDesc = 'rootAccount_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+  EntryAsc = 'entry_ASC',
+  EntryDesc = 'entry_DESC',
+  SubscriptionAsc = 'subscription_ASC',
+  SubscriptionDesc = 'subscription_DESC',
+}
+
+export type MembershipUpdateInput = {
+  handle?: Maybe<Scalars['String']>
+  avatarUri?: Maybe<Scalars['String']>
+  about?: Maybe<Scalars['String']>
+  controllerAccount?: Maybe<Scalars['String']>
+  rootAccount?: Maybe<Scalars['String']>
+  createdInBlock?: Maybe<Scalars['Float']>
+  entry?: Maybe<MembershipEntryMethod>
+  subscription?: Maybe<Scalars['Float']>
+}
+
+export type MembershipWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  handle_eq?: Maybe<Scalars['String']>
+  handle_contains?: Maybe<Scalars['String']>
+  handle_startsWith?: Maybe<Scalars['String']>
+  handle_endsWith?: Maybe<Scalars['String']>
+  handle_in?: Maybe<Array<Scalars['String']>>
+  avatarUri_eq?: Maybe<Scalars['String']>
+  avatarUri_contains?: Maybe<Scalars['String']>
+  avatarUri_startsWith?: Maybe<Scalars['String']>
+  avatarUri_endsWith?: Maybe<Scalars['String']>
+  avatarUri_in?: Maybe<Array<Scalars['String']>>
+  about_eq?: Maybe<Scalars['String']>
+  about_contains?: Maybe<Scalars['String']>
+  about_startsWith?: Maybe<Scalars['String']>
+  about_endsWith?: Maybe<Scalars['String']>
+  about_in?: Maybe<Array<Scalars['String']>>
+  controllerAccount_eq?: Maybe<Scalars['String']>
+  controllerAccount_contains?: Maybe<Scalars['String']>
+  controllerAccount_startsWith?: Maybe<Scalars['String']>
+  controllerAccount_endsWith?: Maybe<Scalars['String']>
+  controllerAccount_in?: Maybe<Array<Scalars['String']>>
+  rootAccount_eq?: Maybe<Scalars['String']>
+  rootAccount_contains?: Maybe<Scalars['String']>
+  rootAccount_startsWith?: Maybe<Scalars['String']>
+  rootAccount_endsWith?: Maybe<Scalars['String']>
+  rootAccount_in?: Maybe<Array<Scalars['String']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  entry_eq?: Maybe<MembershipEntryMethod>
+  entry_in?: Maybe<Array<MembershipEntryMethod>>
+  subscription_eq?: Maybe<Scalars['Int']>
+  subscription_gt?: Maybe<Scalars['Int']>
+  subscription_gte?: Maybe<Scalars['Int']>
+  subscription_lt?: Maybe<Scalars['Int']>
+  subscription_lte?: Maybe<Scalars['Int']>
+  subscription_in?: Maybe<Array<Scalars['Int']>>
+  channels_none?: Maybe<ChannelWhereInput>
+  channels_some?: Maybe<ChannelWhereInput>
+  channels_every?: Maybe<ChannelWhereInput>
+  collaboratorInChannels_none?: Maybe<ChannelWhereInput>
+  collaboratorInChannels_some?: Maybe<ChannelWhereInput>
+  collaboratorInChannels_every?: Maybe<ChannelWhereInput>
+  AND?: Maybe<Array<MembershipWhereInput>>
+  OR?: Maybe<Array<MembershipWhereInput>>
+}
+
+export type MembershipWhereUniqueInput = {
+  id?: Maybe<Scalars['ID']>
+  handle?: Maybe<Scalars['String']>
+}
+
+export type NodeLocationMetadata = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** ISO 3166-1 alpha-2 country code (2 letters) */
+  countryCode?: Maybe<Scalars['String']>
+  /** City name */
+  city?: Maybe<Scalars['String']>
+  coordinates?: Maybe<GeoCoordinates>
+  coordinatesId?: Maybe<Scalars['String']>
+  distributionbucketoperatormetadatanodeLocation?: Maybe<Array<DistributionBucketOperatorMetadata>>
+  storagebucketoperatormetadatanodeLocation?: Maybe<Array<StorageBucketOperatorMetadata>>
+}
+
+export type NodeLocationMetadataConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<NodeLocationMetadataEdge>
+  pageInfo: PageInfo
+}
+
+export type NodeLocationMetadataCreateInput = {
+  countryCode?: Maybe<Scalars['String']>
+  city?: Maybe<Scalars['String']>
+  coordinates?: Maybe<Scalars['ID']>
+}
+
+export type NodeLocationMetadataEdge = {
+  node: NodeLocationMetadata
+  cursor: Scalars['String']
+}
+
+export enum NodeLocationMetadataOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  CountryCodeAsc = 'countryCode_ASC',
+  CountryCodeDesc = 'countryCode_DESC',
+  CityAsc = 'city_ASC',
+  CityDesc = 'city_DESC',
+  CoordinatesAsc = 'coordinates_ASC',
+  CoordinatesDesc = 'coordinates_DESC',
+}
+
+export type NodeLocationMetadataUpdateInput = {
+  countryCode?: Maybe<Scalars['String']>
+  city?: Maybe<Scalars['String']>
+  coordinates?: Maybe<Scalars['ID']>
+}
+
+export type NodeLocationMetadataWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  countryCode_eq?: Maybe<Scalars['String']>
+  countryCode_contains?: Maybe<Scalars['String']>
+  countryCode_startsWith?: Maybe<Scalars['String']>
+  countryCode_endsWith?: Maybe<Scalars['String']>
+  countryCode_in?: Maybe<Array<Scalars['String']>>
+  city_eq?: Maybe<Scalars['String']>
+  city_contains?: Maybe<Scalars['String']>
+  city_startsWith?: Maybe<Scalars['String']>
+  city_endsWith?: Maybe<Scalars['String']>
+  city_in?: Maybe<Array<Scalars['String']>>
+  coordinates?: Maybe<GeoCoordinatesWhereInput>
+  distributionbucketoperatormetadatanodeLocation_none?: Maybe<DistributionBucketOperatorMetadataWhereInput>
+  distributionbucketoperatormetadatanodeLocation_some?: Maybe<DistributionBucketOperatorMetadataWhereInput>
+  distributionbucketoperatormetadatanodeLocation_every?: Maybe<DistributionBucketOperatorMetadataWhereInput>
+  storagebucketoperatormetadatanodeLocation_none?: Maybe<StorageBucketOperatorMetadataWhereInput>
+  storagebucketoperatormetadatanodeLocation_some?: Maybe<StorageBucketOperatorMetadataWhereInput>
+  storagebucketoperatormetadatanodeLocation_every?: Maybe<StorageBucketOperatorMetadataWhereInput>
+  AND?: Maybe<Array<NodeLocationMetadataWhereInput>>
+  OR?: Maybe<Array<NodeLocationMetadataWhereInput>>
+}
+
+export type NodeLocationMetadataWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type PageInfo = {
+  hasNextPage: Scalars['Boolean']
+  hasPreviousPage: Scalars['Boolean']
+  startCursor?: Maybe<Scalars['String']>
+  endCursor?: Maybe<Scalars['String']>
+}
+
+export type ProcessorState = {
+  lastCompleteBlock: Scalars['Float']
+  lastProcessedEvent: Scalars['String']
+  indexerHead: Scalars['Float']
+  chainHead: Scalars['Float']
+}
+
+export type Query = {
+  channelCategories: Array<ChannelCategory>
+  channelCategoryByUniqueInput?: Maybe<ChannelCategory>
+  channelCategoriesConnection: ChannelCategoryConnection
+  channels: Array<Channel>
+  channelByUniqueInput?: Maybe<Channel>
+  channelsConnection: ChannelConnection
+  curatorGroups: Array<CuratorGroup>
+  curatorGroupByUniqueInput?: Maybe<CuratorGroup>
+  curatorGroupsConnection: CuratorGroupConnection
+  distributionBucketFamilyGeographicAreas: Array<DistributionBucketFamilyGeographicArea>
+  distributionBucketFamilyGeographicAreaByUniqueInput?: Maybe<DistributionBucketFamilyGeographicArea>
+  distributionBucketFamilyGeographicAreasConnection: DistributionBucketFamilyGeographicAreaConnection
+  distributionBucketFamilyMetadata: Array<DistributionBucketFamilyMetadata>
+  distributionBucketFamilyMetadataByUniqueInput?: Maybe<DistributionBucketFamilyMetadata>
+  distributionBucketFamilyMetadataConnection: DistributionBucketFamilyMetadataConnection
+  distributionBucketFamilies: Array<DistributionBucketFamily>
+  distributionBucketFamilyByUniqueInput?: Maybe<DistributionBucketFamily>
+  distributionBucketFamiliesConnection: DistributionBucketFamilyConnection
+  distributionBucketOperatorMetadata: Array<DistributionBucketOperatorMetadata>
+  distributionBucketOperatorMetadataByUniqueInput?: Maybe<DistributionBucketOperatorMetadata>
+  distributionBucketOperatorMetadataConnection: DistributionBucketOperatorMetadataConnection
+  distributionBucketOperators: Array<DistributionBucketOperator>
+  distributionBucketOperatorByUniqueInput?: Maybe<DistributionBucketOperator>
+  distributionBucketOperatorsConnection: DistributionBucketOperatorConnection
+  distributionBuckets: Array<DistributionBucket>
+  distributionBucketByUniqueInput?: Maybe<DistributionBucket>
+  distributionBucketsConnection: DistributionBucketConnection
+  geoCoordinates: Array<GeoCoordinates>
+  geoCoordinatesByUniqueInput?: Maybe<GeoCoordinates>
+  geoCoordinatesConnection: GeoCoordinatesConnection
+  languages: Array<Language>
+  languageByUniqueInput?: Maybe<Language>
+  languagesConnection: LanguageConnection
+  licenses: Array<License>
+  licenseByUniqueInput?: Maybe<License>
+  licensesConnection: LicenseConnection
+  memberships: Array<Membership>
+  membershipByUniqueInput?: Maybe<Membership>
+  membershipsConnection: MembershipConnection
+  nodeLocationMetadata: Array<NodeLocationMetadata>
+  nodeLocationMetadataByUniqueInput?: Maybe<NodeLocationMetadata>
+  nodeLocationMetadataConnection: NodeLocationMetadataConnection
+  channelCategoriesByName: Array<ChannelCategoriesByNameFtsOutput>
+  membersByHandle: Array<MembersByHandleFtsOutput>
+  search: Array<SearchFtsOutput>
+  videoCategoriesByName: Array<VideoCategoriesByNameFtsOutput>
+  storageBags: Array<StorageBag>
+  storageBagByUniqueInput?: Maybe<StorageBag>
+  storageBagsConnection: StorageBagConnection
+  storageBucketOperatorMetadata: Array<StorageBucketOperatorMetadata>
+  storageBucketOperatorMetadataByUniqueInput?: Maybe<StorageBucketOperatorMetadata>
+  storageBucketOperatorMetadataConnection: StorageBucketOperatorMetadataConnection
+  storageBuckets: Array<StorageBucket>
+  storageBucketByUniqueInput?: Maybe<StorageBucket>
+  storageBucketsConnection: StorageBucketConnection
+  storageDataObjects: Array<StorageDataObject>
+  storageDataObjectByUniqueInput?: Maybe<StorageDataObject>
+  storageDataObjectsConnection: StorageDataObjectConnection
+  storageSystemParameters: Array<StorageSystemParameters>
+  storageSystemParametersByUniqueInput?: Maybe<StorageSystemParameters>
+  storageSystemParametersConnection: StorageSystemParametersConnection
+  videoCategories: Array<VideoCategory>
+  videoCategoryByUniqueInput?: Maybe<VideoCategory>
+  videoCategoriesConnection: VideoCategoryConnection
+  videoMediaEncodings: Array<VideoMediaEncoding>
+  videoMediaEncodingByUniqueInput?: Maybe<VideoMediaEncoding>
+  videoMediaEncodingsConnection: VideoMediaEncodingConnection
+  videoMediaMetadata: Array<VideoMediaMetadata>
+  videoMediaMetadataByUniqueInput?: Maybe<VideoMediaMetadata>
+  videoMediaMetadataConnection: VideoMediaMetadataConnection
+  videos: Array<Video>
+  videoByUniqueInput?: Maybe<Video>
+  videosConnection: VideoConnection
+  workers: Array<Worker>
+  workerByUniqueInput?: Maybe<Worker>
+  workersConnection: WorkerConnection
+}
+
+export type QueryChannelCategoriesArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<ChannelCategoryWhereInput>
+  orderBy?: Maybe<Array<ChannelCategoryOrderByInput>>
+}
+
+export type QueryChannelCategoryByUniqueInputArgs = {
+  where: ChannelCategoryWhereUniqueInput
+}
+
+export type QueryChannelCategoriesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<ChannelCategoryWhereInput>
+  orderBy?: Maybe<Array<ChannelCategoryOrderByInput>>
+}
+
+export type QueryChannelsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<ChannelWhereInput>
+  orderBy?: Maybe<Array<ChannelOrderByInput>>
+}
+
+export type QueryChannelByUniqueInputArgs = {
+  where: ChannelWhereUniqueInput
+}
+
+export type QueryChannelsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<ChannelWhereInput>
+  orderBy?: Maybe<Array<ChannelOrderByInput>>
+}
+
+export type QueryCuratorGroupsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<CuratorGroupWhereInput>
+  orderBy?: Maybe<Array<CuratorGroupOrderByInput>>
+}
+
+export type QueryCuratorGroupByUniqueInputArgs = {
+  where: CuratorGroupWhereUniqueInput
+}
+
+export type QueryCuratorGroupsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<CuratorGroupWhereInput>
+  orderBy?: Maybe<Array<CuratorGroupOrderByInput>>
+}
+
+export type QueryDistributionBucketFamilyGeographicAreasArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<DistributionBucketFamilyGeographicAreaWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketFamilyGeographicAreaOrderByInput>>
+}
+
+export type QueryDistributionBucketFamilyGeographicAreaByUniqueInputArgs = {
+  where: DistributionBucketFamilyGeographicAreaWhereUniqueInput
+}
+
+export type QueryDistributionBucketFamilyGeographicAreasConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<DistributionBucketFamilyGeographicAreaWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketFamilyGeographicAreaOrderByInput>>
+}
+
+export type QueryDistributionBucketFamilyMetadataArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<DistributionBucketFamilyMetadataWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketFamilyMetadataOrderByInput>>
+}
+
+export type QueryDistributionBucketFamilyMetadataByUniqueInputArgs = {
+  where: DistributionBucketFamilyMetadataWhereUniqueInput
+}
+
+export type QueryDistributionBucketFamilyMetadataConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<DistributionBucketFamilyMetadataWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketFamilyMetadataOrderByInput>>
+}
+
+export type QueryDistributionBucketFamiliesArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<DistributionBucketFamilyWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketFamilyOrderByInput>>
+}
+
+export type QueryDistributionBucketFamilyByUniqueInputArgs = {
+  where: DistributionBucketFamilyWhereUniqueInput
+}
+
+export type QueryDistributionBucketFamiliesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<DistributionBucketFamilyWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketFamilyOrderByInput>>
+}
+
+export type QueryDistributionBucketOperatorMetadataArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<DistributionBucketOperatorMetadataWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketOperatorMetadataOrderByInput>>
+}
+
+export type QueryDistributionBucketOperatorMetadataByUniqueInputArgs = {
+  where: DistributionBucketOperatorMetadataWhereUniqueInput
+}
+
+export type QueryDistributionBucketOperatorMetadataConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<DistributionBucketOperatorMetadataWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketOperatorMetadataOrderByInput>>
+}
+
+export type QueryDistributionBucketOperatorsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<DistributionBucketOperatorWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketOperatorOrderByInput>>
+}
+
+export type QueryDistributionBucketOperatorByUniqueInputArgs = {
+  where: DistributionBucketOperatorWhereUniqueInput
+}
+
+export type QueryDistributionBucketOperatorsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<DistributionBucketOperatorWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketOperatorOrderByInput>>
+}
+
+export type QueryDistributionBucketsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<DistributionBucketWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketOrderByInput>>
+}
+
+export type QueryDistributionBucketByUniqueInputArgs = {
+  where: DistributionBucketWhereUniqueInput
+}
+
+export type QueryDistributionBucketsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<DistributionBucketWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketOrderByInput>>
+}
+
+export type QueryGeoCoordinatesArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<GeoCoordinatesWhereInput>
+  orderBy?: Maybe<Array<GeoCoordinatesOrderByInput>>
+}
+
+export type QueryGeoCoordinatesByUniqueInputArgs = {
+  where: GeoCoordinatesWhereUniqueInput
+}
+
+export type QueryGeoCoordinatesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<GeoCoordinatesWhereInput>
+  orderBy?: Maybe<Array<GeoCoordinatesOrderByInput>>
+}
+
+export type QueryLanguagesArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<LanguageWhereInput>
+  orderBy?: Maybe<Array<LanguageOrderByInput>>
+}
+
+export type QueryLanguageByUniqueInputArgs = {
+  where: LanguageWhereUniqueInput
+}
+
+export type QueryLanguagesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<LanguageWhereInput>
+  orderBy?: Maybe<Array<LanguageOrderByInput>>
+}
+
+export type QueryLicensesArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<LicenseWhereInput>
+  orderBy?: Maybe<Array<LicenseOrderByInput>>
+}
+
+export type QueryLicenseByUniqueInputArgs = {
+  where: LicenseWhereUniqueInput
+}
+
+export type QueryLicensesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<LicenseWhereInput>
+  orderBy?: Maybe<Array<LicenseOrderByInput>>
+}
+
+export type QueryMembershipsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<MembershipWhereInput>
+  orderBy?: Maybe<Array<MembershipOrderByInput>>
+}
+
+export type QueryMembershipByUniqueInputArgs = {
+  where: MembershipWhereUniqueInput
+}
+
+export type QueryMembershipsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<MembershipWhereInput>
+  orderBy?: Maybe<Array<MembershipOrderByInput>>
+}
+
+export type QueryNodeLocationMetadataArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<NodeLocationMetadataWhereInput>
+  orderBy?: Maybe<Array<NodeLocationMetadataOrderByInput>>
+}
+
+export type QueryNodeLocationMetadataByUniqueInputArgs = {
+  where: NodeLocationMetadataWhereUniqueInput
+}
+
+export type QueryNodeLocationMetadataConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<NodeLocationMetadataWhereInput>
+  orderBy?: Maybe<Array<NodeLocationMetadataOrderByInput>>
+}
+
+export type QueryChannelCategoriesByNameArgs = {
+  whereChannelCategory?: Maybe<ChannelCategoryWhereInput>
+  skip?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  text: Scalars['String']
+}
+
+export type QueryMembersByHandleArgs = {
+  whereMembership?: Maybe<MembershipWhereInput>
+  skip?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  text: Scalars['String']
+}
+
+export type QuerySearchArgs = {
+  whereVideo?: Maybe<VideoWhereInput>
+  whereChannel?: Maybe<ChannelWhereInput>
+  skip?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  text: Scalars['String']
+}
+
+export type QueryVideoCategoriesByNameArgs = {
+  whereVideoCategory?: Maybe<VideoCategoryWhereInput>
+  skip?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  text: Scalars['String']
+}
+
+export type QueryStorageBagsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<StorageBagWhereInput>
+  orderBy?: Maybe<Array<StorageBagOrderByInput>>
+}
+
+export type QueryStorageBagByUniqueInputArgs = {
+  where: StorageBagWhereUniqueInput
+}
+
+export type QueryStorageBagsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<StorageBagWhereInput>
+  orderBy?: Maybe<Array<StorageBagOrderByInput>>
+}
+
+export type QueryStorageBucketOperatorMetadataArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<StorageBucketOperatorMetadataWhereInput>
+  orderBy?: Maybe<Array<StorageBucketOperatorMetadataOrderByInput>>
+}
+
+export type QueryStorageBucketOperatorMetadataByUniqueInputArgs = {
+  where: StorageBucketOperatorMetadataWhereUniqueInput
+}
+
+export type QueryStorageBucketOperatorMetadataConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<StorageBucketOperatorMetadataWhereInput>
+  orderBy?: Maybe<Array<StorageBucketOperatorMetadataOrderByInput>>
+}
+
+export type QueryStorageBucketsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<StorageBucketWhereInput>
+  orderBy?: Maybe<Array<StorageBucketOrderByInput>>
+}
+
+export type QueryStorageBucketByUniqueInputArgs = {
+  where: StorageBucketWhereUniqueInput
+}
+
+export type QueryStorageBucketsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<StorageBucketWhereInput>
+  orderBy?: Maybe<Array<StorageBucketOrderByInput>>
+}
+
+export type QueryStorageDataObjectsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<StorageDataObjectWhereInput>
+  orderBy?: Maybe<Array<StorageDataObjectOrderByInput>>
+}
+
+export type QueryStorageDataObjectByUniqueInputArgs = {
+  where: StorageDataObjectWhereUniqueInput
+}
+
+export type QueryStorageDataObjectsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<StorageDataObjectWhereInput>
+  orderBy?: Maybe<Array<StorageDataObjectOrderByInput>>
+}
+
+export type QueryStorageSystemParametersArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<StorageSystemParametersWhereInput>
+  orderBy?: Maybe<Array<StorageSystemParametersOrderByInput>>
+}
+
+export type QueryStorageSystemParametersByUniqueInputArgs = {
+  where: StorageSystemParametersWhereUniqueInput
+}
+
+export type QueryStorageSystemParametersConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<StorageSystemParametersWhereInput>
+  orderBy?: Maybe<Array<StorageSystemParametersOrderByInput>>
+}
+
+export type QueryVideoCategoriesArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<VideoCategoryWhereInput>
+  orderBy?: Maybe<Array<VideoCategoryOrderByInput>>
+}
+
+export type QueryVideoCategoryByUniqueInputArgs = {
+  where: VideoCategoryWhereUniqueInput
+}
+
+export type QueryVideoCategoriesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<VideoCategoryWhereInput>
+  orderBy?: Maybe<Array<VideoCategoryOrderByInput>>
+}
+
+export type QueryVideoMediaEncodingsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<VideoMediaEncodingWhereInput>
+  orderBy?: Maybe<Array<VideoMediaEncodingOrderByInput>>
+}
+
+export type QueryVideoMediaEncodingByUniqueInputArgs = {
+  where: VideoMediaEncodingWhereUniqueInput
+}
+
+export type QueryVideoMediaEncodingsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<VideoMediaEncodingWhereInput>
+  orderBy?: Maybe<Array<VideoMediaEncodingOrderByInput>>
+}
+
+export type QueryVideoMediaMetadataArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<VideoMediaMetadataWhereInput>
+  orderBy?: Maybe<Array<VideoMediaMetadataOrderByInput>>
+}
+
+export type QueryVideoMediaMetadataByUniqueInputArgs = {
+  where: VideoMediaMetadataWhereUniqueInput
+}
+
+export type QueryVideoMediaMetadataConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<VideoMediaMetadataWhereInput>
+  orderBy?: Maybe<Array<VideoMediaMetadataOrderByInput>>
+}
+
+export type QueryVideosArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<VideoWhereInput>
+  orderBy?: Maybe<Array<VideoOrderByInput>>
+}
+
+export type QueryVideoByUniqueInputArgs = {
+  where: VideoWhereUniqueInput
+}
+
+export type QueryVideosConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<VideoWhereInput>
+  orderBy?: Maybe<Array<VideoOrderByInput>>
+}
+
+export type QueryWorkersArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<WorkerWhereInput>
+  orderBy?: Maybe<Array<WorkerOrderByInput>>
+}
+
+export type QueryWorkerByUniqueInputArgs = {
+  where: WorkerWhereUniqueInput
+}
+
+export type QueryWorkersConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<WorkerWhereInput>
+  orderBy?: Maybe<Array<WorkerOrderByInput>>
+}
+
+export type SearchFtsOutput = {
+  item: SearchSearchResult
+  rank: Scalars['Float']
+  isTypeOf: Scalars['String']
+  highlight: Scalars['String']
+}
+
+export type SearchSearchResult = Channel | Video
+
+export type StandardDeleteResponse = {
+  id: Scalars['ID']
+}
+
+export type StorageBag = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  objects: Array<StorageDataObject>
+  storageBuckets: Array<StorageBucket>
+  distributionBuckets: Array<DistributionBucket>
+  /** Owner of the storage bag */
+  owner: StorageBagOwner
+}
+
+export type StorageBagConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<StorageBagEdge>
+  pageInfo: PageInfo
+}
+
+export type StorageBagCreateInput = {
+  owner: Scalars['JSONObject']
+}
+
+export type StorageBagEdge = {
+  node: StorageBag
+  cursor: Scalars['String']
+}
+
+export enum StorageBagOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+}
+
+export type StorageBagOwner =
+  | StorageBagOwnerCouncil
+  | StorageBagOwnerWorkingGroup
+  | StorageBagOwnerMember
+  | StorageBagOwnerChannel
+  | StorageBagOwnerDao
+
+export type StorageBagOwnerChannel = {
+  channelId?: Maybe<Scalars['Int']>
+}
+
+export type StorageBagOwnerCouncil = {
+  phantom?: Maybe<Scalars['Int']>
+}
+
+export type StorageBagOwnerDao = {
+  daoId?: Maybe<Scalars['Int']>
+}
+
+export type StorageBagOwnerMember = {
+  memberId?: Maybe<Scalars['Int']>
+}
+
+export type StorageBagOwnerWorkingGroup = {
+  workingGroupId?: Maybe<Scalars['String']>
+}
+
+export type StorageBagUpdateInput = {
+  owner?: Maybe<Scalars['JSONObject']>
+}
+
+export type StorageBagWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  owner_json?: Maybe<Scalars['JSONObject']>
+  objects_none?: Maybe<StorageDataObjectWhereInput>
+  objects_some?: Maybe<StorageDataObjectWhereInput>
+  objects_every?: Maybe<StorageDataObjectWhereInput>
+  storageBuckets_none?: Maybe<StorageBucketWhereInput>
+  storageBuckets_some?: Maybe<StorageBucketWhereInput>
+  storageBuckets_every?: Maybe<StorageBucketWhereInput>
+  distributionBuckets_none?: Maybe<DistributionBucketWhereInput>
+  distributionBuckets_some?: Maybe<DistributionBucketWhereInput>
+  distributionBuckets_every?: Maybe<DistributionBucketWhereInput>
+  AND?: Maybe<Array<StorageBagWhereInput>>
+  OR?: Maybe<Array<StorageBagWhereInput>>
+}
+
+export type StorageBagWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageBucket = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Current bucket operator status */
+  operatorStatus: StorageBucketOperatorStatus
+  operatorMetadata?: Maybe<StorageBucketOperatorMetadata>
+  operatorMetadataId?: Maybe<Scalars['String']>
+  /** Whether the bucket is accepting any new storage bags */
+  acceptingNewBags: Scalars['Boolean']
+  bags: Array<StorageBag>
+  /** Bucket's data object size limit in bytes */
+  dataObjectsSizeLimit: Scalars['BigInt']
+  /** Bucket's data object count limit */
+  dataObjectCountLimit: Scalars['BigInt']
+  /** Number of assigned data objects */
+  dataObjectsCount: Scalars['BigInt']
+  /** Total size of assigned data objects */
+  dataObjectsSize: Scalars['BigInt']
+}
+
+export type StorageBucketConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<StorageBucketEdge>
+  pageInfo: PageInfo
+}
+
+export type StorageBucketCreateInput = {
+  operatorStatus: Scalars['JSONObject']
+  operatorMetadata?: Maybe<Scalars['ID']>
+  acceptingNewBags: Scalars['Boolean']
+  dataObjectsSizeLimit: Scalars['String']
+  dataObjectCountLimit: Scalars['String']
+  dataObjectsCount: Scalars['String']
+  dataObjectsSize: Scalars['String']
+}
+
+export type StorageBucketEdge = {
+  node: StorageBucket
+  cursor: Scalars['String']
+}
+
+export type StorageBucketOperatorMetadata = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Root node endpoint */
+  nodeEndpoint?: Maybe<Scalars['String']>
+  nodeLocation?: Maybe<NodeLocationMetadata>
+  nodeLocationId?: Maybe<Scalars['String']>
+  /** Additional information about the node/operator */
+  extra?: Maybe<Scalars['String']>
+  storagebucketoperatorMetadata?: Maybe<Array<StorageBucket>>
+}
+
+export type StorageBucketOperatorMetadataConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<StorageBucketOperatorMetadataEdge>
+  pageInfo: PageInfo
+}
+
+export type StorageBucketOperatorMetadataCreateInput = {
+  nodeEndpoint?: Maybe<Scalars['String']>
+  nodeLocation?: Maybe<Scalars['ID']>
+  extra?: Maybe<Scalars['String']>
+}
+
+export type StorageBucketOperatorMetadataEdge = {
+  node: StorageBucketOperatorMetadata
+  cursor: Scalars['String']
+}
+
+export enum StorageBucketOperatorMetadataOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  NodeEndpointAsc = 'nodeEndpoint_ASC',
+  NodeEndpointDesc = 'nodeEndpoint_DESC',
+  NodeLocationAsc = 'nodeLocation_ASC',
+  NodeLocationDesc = 'nodeLocation_DESC',
+  ExtraAsc = 'extra_ASC',
+  ExtraDesc = 'extra_DESC',
+}
+
+export type StorageBucketOperatorMetadataUpdateInput = {
+  nodeEndpoint?: Maybe<Scalars['String']>
+  nodeLocation?: Maybe<Scalars['ID']>
+  extra?: Maybe<Scalars['String']>
+}
+
+export type StorageBucketOperatorMetadataWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  nodeEndpoint_eq?: Maybe<Scalars['String']>
+  nodeEndpoint_contains?: Maybe<Scalars['String']>
+  nodeEndpoint_startsWith?: Maybe<Scalars['String']>
+  nodeEndpoint_endsWith?: Maybe<Scalars['String']>
+  nodeEndpoint_in?: Maybe<Array<Scalars['String']>>
+  extra_eq?: Maybe<Scalars['String']>
+  extra_contains?: Maybe<Scalars['String']>
+  extra_startsWith?: Maybe<Scalars['String']>
+  extra_endsWith?: Maybe<Scalars['String']>
+  extra_in?: Maybe<Array<Scalars['String']>>
+  nodeLocation?: Maybe<NodeLocationMetadataWhereInput>
+  storagebucketoperatorMetadata_none?: Maybe<StorageBucketWhereInput>
+  storagebucketoperatorMetadata_some?: Maybe<StorageBucketWhereInput>
+  storagebucketoperatorMetadata_every?: Maybe<StorageBucketWhereInput>
+  AND?: Maybe<Array<StorageBucketOperatorMetadataWhereInput>>
+  OR?: Maybe<Array<StorageBucketOperatorMetadataWhereInput>>
+}
+
+export type StorageBucketOperatorMetadataWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageBucketOperatorStatus =
+  | StorageBucketOperatorStatusMissing
+  | StorageBucketOperatorStatusInvited
+  | StorageBucketOperatorStatusActive
+
+export type StorageBucketOperatorStatusActive = {
+  workerId: Scalars['Int']
+  transactorAccountId: Scalars['String']
+}
+
+export type StorageBucketOperatorStatusInvited = {
+  workerId: Scalars['Int']
+}
+
+export type StorageBucketOperatorStatusMissing = {
+  phantom?: Maybe<Scalars['Int']>
+}
+
+export enum StorageBucketOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  OperatorMetadataAsc = 'operatorMetadata_ASC',
+  OperatorMetadataDesc = 'operatorMetadata_DESC',
+  AcceptingNewBagsAsc = 'acceptingNewBags_ASC',
+  AcceptingNewBagsDesc = 'acceptingNewBags_DESC',
+  DataObjectsSizeLimitAsc = 'dataObjectsSizeLimit_ASC',
+  DataObjectsSizeLimitDesc = 'dataObjectsSizeLimit_DESC',
+  DataObjectCountLimitAsc = 'dataObjectCountLimit_ASC',
+  DataObjectCountLimitDesc = 'dataObjectCountLimit_DESC',
+  DataObjectsCountAsc = 'dataObjectsCount_ASC',
+  DataObjectsCountDesc = 'dataObjectsCount_DESC',
+  DataObjectsSizeAsc = 'dataObjectsSize_ASC',
+  DataObjectsSizeDesc = 'dataObjectsSize_DESC',
+}
+
+export type StorageBucketUpdateInput = {
+  operatorStatus?: Maybe<Scalars['JSONObject']>
+  operatorMetadata?: Maybe<Scalars['ID']>
+  acceptingNewBags?: Maybe<Scalars['Boolean']>
+  dataObjectsSizeLimit?: Maybe<Scalars['String']>
+  dataObjectCountLimit?: Maybe<Scalars['String']>
+  dataObjectsCount?: Maybe<Scalars['String']>
+  dataObjectsSize?: Maybe<Scalars['String']>
+}
+
+export type StorageBucketWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  operatorStatus_json?: Maybe<Scalars['JSONObject']>
+  acceptingNewBags_eq?: Maybe<Scalars['Boolean']>
+  acceptingNewBags_in?: Maybe<Array<Scalars['Boolean']>>
+  dataObjectsSizeLimit_eq?: Maybe<Scalars['BigInt']>
+  dataObjectsSizeLimit_gt?: Maybe<Scalars['BigInt']>
+  dataObjectsSizeLimit_gte?: Maybe<Scalars['BigInt']>
+  dataObjectsSizeLimit_lt?: Maybe<Scalars['BigInt']>
+  dataObjectsSizeLimit_lte?: Maybe<Scalars['BigInt']>
+  dataObjectsSizeLimit_in?: Maybe<Array<Scalars['BigInt']>>
+  dataObjectCountLimit_eq?: Maybe<Scalars['BigInt']>
+  dataObjectCountLimit_gt?: Maybe<Scalars['BigInt']>
+  dataObjectCountLimit_gte?: Maybe<Scalars['BigInt']>
+  dataObjectCountLimit_lt?: Maybe<Scalars['BigInt']>
+  dataObjectCountLimit_lte?: Maybe<Scalars['BigInt']>
+  dataObjectCountLimit_in?: Maybe<Array<Scalars['BigInt']>>
+  dataObjectsCount_eq?: Maybe<Scalars['BigInt']>
+  dataObjectsCount_gt?: Maybe<Scalars['BigInt']>
+  dataObjectsCount_gte?: Maybe<Scalars['BigInt']>
+  dataObjectsCount_lt?: Maybe<Scalars['BigInt']>
+  dataObjectsCount_lte?: Maybe<Scalars['BigInt']>
+  dataObjectsCount_in?: Maybe<Array<Scalars['BigInt']>>
+  dataObjectsSize_eq?: Maybe<Scalars['BigInt']>
+  dataObjectsSize_gt?: Maybe<Scalars['BigInt']>
+  dataObjectsSize_gte?: Maybe<Scalars['BigInt']>
+  dataObjectsSize_lt?: Maybe<Scalars['BigInt']>
+  dataObjectsSize_lte?: Maybe<Scalars['BigInt']>
+  dataObjectsSize_in?: Maybe<Array<Scalars['BigInt']>>
+  operatorMetadata?: Maybe<StorageBucketOperatorMetadataWhereInput>
+  bags_none?: Maybe<StorageBagWhereInput>
+  bags_some?: Maybe<StorageBagWhereInput>
+  bags_every?: Maybe<StorageBagWhereInput>
+  AND?: Maybe<Array<StorageBucketWhereInput>>
+  OR?: Maybe<Array<StorageBucketWhereInput>>
+}
+
+export type StorageBucketWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageDataObject = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Whether the data object was uploaded and accepted by the storage provider */
+  isAccepted: Scalars['Boolean']
+  /** Data object size in bytes */
+  size: Scalars['BigInt']
+  storageBag: StorageBag
+  storageBagId: Scalars['String']
+  /** IPFS content hash */
+  ipfsHash: Scalars['String']
+  /** The type of the asset that the data object represents (if known) */
+  type: DataObjectType
+  /** Prize for removing the data object */
+  deletionPrize: Scalars['BigInt']
+  /** If the object is no longer used as an asset - the time at which it was unset (if known) */
+  unsetAt?: Maybe<Scalars['DateTime']>
+  channelcoverPhoto?: Maybe<Array<Channel>>
+  channelavatarPhoto?: Maybe<Array<Channel>>
+  videothumbnailPhoto?: Maybe<Array<Video>>
+  videomedia?: Maybe<Array<Video>>
+}
+
+export type StorageDataObjectConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<StorageDataObjectEdge>
+  pageInfo: PageInfo
+}
+
+export type StorageDataObjectCreateInput = {
+  isAccepted: Scalars['Boolean']
+  size: Scalars['String']
+  storageBag: Scalars['ID']
+  ipfsHash: Scalars['String']
+  type: Scalars['JSONObject']
+  deletionPrize: Scalars['String']
+  unsetAt?: Maybe<Scalars['DateTime']>
+}
+
+export type StorageDataObjectEdge = {
+  node: StorageDataObject
+  cursor: Scalars['String']
+}
+
+export enum StorageDataObjectOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  IsAcceptedAsc = 'isAccepted_ASC',
+  IsAcceptedDesc = 'isAccepted_DESC',
+  SizeAsc = 'size_ASC',
+  SizeDesc = 'size_DESC',
+  StorageBagAsc = 'storageBag_ASC',
+  StorageBagDesc = 'storageBag_DESC',
+  IpfsHashAsc = 'ipfsHash_ASC',
+  IpfsHashDesc = 'ipfsHash_DESC',
+  DeletionPrizeAsc = 'deletionPrize_ASC',
+  DeletionPrizeDesc = 'deletionPrize_DESC',
+  UnsetAtAsc = 'unsetAt_ASC',
+  UnsetAtDesc = 'unsetAt_DESC',
+}
+
+export type StorageDataObjectUpdateInput = {
+  isAccepted?: Maybe<Scalars['Boolean']>
+  size?: Maybe<Scalars['String']>
+  storageBag?: Maybe<Scalars['ID']>
+  ipfsHash?: Maybe<Scalars['String']>
+  type?: Maybe<Scalars['JSONObject']>
+  deletionPrize?: Maybe<Scalars['String']>
+  unsetAt?: Maybe<Scalars['DateTime']>
+}
+
+export type StorageDataObjectWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  isAccepted_eq?: Maybe<Scalars['Boolean']>
+  isAccepted_in?: Maybe<Array<Scalars['Boolean']>>
+  size_eq?: Maybe<Scalars['BigInt']>
+  size_gt?: Maybe<Scalars['BigInt']>
+  size_gte?: Maybe<Scalars['BigInt']>
+  size_lt?: Maybe<Scalars['BigInt']>
+  size_lte?: Maybe<Scalars['BigInt']>
+  size_in?: Maybe<Array<Scalars['BigInt']>>
+  ipfsHash_eq?: Maybe<Scalars['String']>
+  ipfsHash_contains?: Maybe<Scalars['String']>
+  ipfsHash_startsWith?: Maybe<Scalars['String']>
+  ipfsHash_endsWith?: Maybe<Scalars['String']>
+  ipfsHash_in?: Maybe<Array<Scalars['String']>>
+  type_json?: Maybe<Scalars['JSONObject']>
+  deletionPrize_eq?: Maybe<Scalars['BigInt']>
+  deletionPrize_gt?: Maybe<Scalars['BigInt']>
+  deletionPrize_gte?: Maybe<Scalars['BigInt']>
+  deletionPrize_lt?: Maybe<Scalars['BigInt']>
+  deletionPrize_lte?: Maybe<Scalars['BigInt']>
+  deletionPrize_in?: Maybe<Array<Scalars['BigInt']>>
+  unsetAt_eq?: Maybe<Scalars['DateTime']>
+  unsetAt_lt?: Maybe<Scalars['DateTime']>
+  unsetAt_lte?: Maybe<Scalars['DateTime']>
+  unsetAt_gt?: Maybe<Scalars['DateTime']>
+  unsetAt_gte?: Maybe<Scalars['DateTime']>
+  storageBag?: Maybe<StorageBagWhereInput>
+  channelcoverPhoto_none?: Maybe<ChannelWhereInput>
+  channelcoverPhoto_some?: Maybe<ChannelWhereInput>
+  channelcoverPhoto_every?: Maybe<ChannelWhereInput>
+  channelavatarPhoto_none?: Maybe<ChannelWhereInput>
+  channelavatarPhoto_some?: Maybe<ChannelWhereInput>
+  channelavatarPhoto_every?: Maybe<ChannelWhereInput>
+  videothumbnailPhoto_none?: Maybe<VideoWhereInput>
+  videothumbnailPhoto_some?: Maybe<VideoWhereInput>
+  videothumbnailPhoto_every?: Maybe<VideoWhereInput>
+  videomedia_none?: Maybe<VideoWhereInput>
+  videomedia_some?: Maybe<VideoWhereInput>
+  videomedia_every?: Maybe<VideoWhereInput>
+  AND?: Maybe<Array<StorageDataObjectWhereInput>>
+  OR?: Maybe<Array<StorageDataObjectWhereInput>>
+}
+
+export type StorageDataObjectWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+/** Global storage system parameters */
+export type StorageSystemParameters = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Blacklisted content hashes */
+  blacklist: Array<Scalars['String']>
+  /** How many buckets can be assigned to store a bag */
+  storageBucketsPerBagLimit: Scalars['Int']
+  /** How many buckets can be assigned to distribute a bag */
+  distributionBucketsPerBagLimit: Scalars['Int']
+  /** Whether the uploading is globally blocked */
+  uploadingBlocked: Scalars['Boolean']
+  /** Additional fee for storing 1 MB of data */
+  dataObjectFeePerMb: Scalars['BigInt']
+  /** Global max. number of objects a storage bucket can store (can also be further limitted the provider) */
+  storageBucketMaxObjectsCountLimit: Scalars['BigInt']
+  /** Global max. size of objects a storage bucket can store (can also be further limitted the provider) */
+  storageBucketMaxObjectsSizeLimit: Scalars['BigInt']
+  /** ID of the next data object when created */
+  nextDataObjectId: Scalars['BigInt']
+}
+
+export type StorageSystemParametersConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<StorageSystemParametersEdge>
+  pageInfo: PageInfo
+}
+
+export type StorageSystemParametersCreateInput = {
+  blacklist: Array<Scalars['String']>
+  storageBucketsPerBagLimit: Scalars['Float']
+  distributionBucketsPerBagLimit: Scalars['Float']
+  uploadingBlocked: Scalars['Boolean']
+  dataObjectFeePerMb: Scalars['String']
+  storageBucketMaxObjectsCountLimit: Scalars['String']
+  storageBucketMaxObjectsSizeLimit: Scalars['String']
+  nextDataObjectId: Scalars['String']
+}
+
+export type StorageSystemParametersEdge = {
+  node: StorageSystemParameters
+  cursor: Scalars['String']
+}
+
+export enum StorageSystemParametersOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  StorageBucketsPerBagLimitAsc = 'storageBucketsPerBagLimit_ASC',
+  StorageBucketsPerBagLimitDesc = 'storageBucketsPerBagLimit_DESC',
+  DistributionBucketsPerBagLimitAsc = 'distributionBucketsPerBagLimit_ASC',
+  DistributionBucketsPerBagLimitDesc = 'distributionBucketsPerBagLimit_DESC',
+  UploadingBlockedAsc = 'uploadingBlocked_ASC',
+  UploadingBlockedDesc = 'uploadingBlocked_DESC',
+  DataObjectFeePerMbAsc = 'dataObjectFeePerMb_ASC',
+  DataObjectFeePerMbDesc = 'dataObjectFeePerMb_DESC',
+  StorageBucketMaxObjectsCountLimitAsc = 'storageBucketMaxObjectsCountLimit_ASC',
+  StorageBucketMaxObjectsCountLimitDesc = 'storageBucketMaxObjectsCountLimit_DESC',
+  StorageBucketMaxObjectsSizeLimitAsc = 'storageBucketMaxObjectsSizeLimit_ASC',
+  StorageBucketMaxObjectsSizeLimitDesc = 'storageBucketMaxObjectsSizeLimit_DESC',
+  NextDataObjectIdAsc = 'nextDataObjectId_ASC',
+  NextDataObjectIdDesc = 'nextDataObjectId_DESC',
+}
+
+export type StorageSystemParametersUpdateInput = {
+  blacklist?: Maybe<Array<Scalars['String']>>
+  storageBucketsPerBagLimit?: Maybe<Scalars['Float']>
+  distributionBucketsPerBagLimit?: Maybe<Scalars['Float']>
+  uploadingBlocked?: Maybe<Scalars['Boolean']>
+  dataObjectFeePerMb?: Maybe<Scalars['String']>
+  storageBucketMaxObjectsCountLimit?: Maybe<Scalars['String']>
+  storageBucketMaxObjectsSizeLimit?: Maybe<Scalars['String']>
+  nextDataObjectId?: Maybe<Scalars['String']>
+}
+
+export type StorageSystemParametersWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  blacklist_containsAll?: Maybe<Array<Scalars['String']>>
+  blacklist_containsNone?: Maybe<Array<Scalars['String']>>
+  blacklist_containsAny?: Maybe<Array<Scalars['String']>>
+  storageBucketsPerBagLimit_eq?: Maybe<Scalars['Int']>
+  storageBucketsPerBagLimit_gt?: Maybe<Scalars['Int']>
+  storageBucketsPerBagLimit_gte?: Maybe<Scalars['Int']>
+  storageBucketsPerBagLimit_lt?: Maybe<Scalars['Int']>
+  storageBucketsPerBagLimit_lte?: Maybe<Scalars['Int']>
+  storageBucketsPerBagLimit_in?: Maybe<Array<Scalars['Int']>>
+  distributionBucketsPerBagLimit_eq?: Maybe<Scalars['Int']>
+  distributionBucketsPerBagLimit_gt?: Maybe<Scalars['Int']>
+  distributionBucketsPerBagLimit_gte?: Maybe<Scalars['Int']>
+  distributionBucketsPerBagLimit_lt?: Maybe<Scalars['Int']>
+  distributionBucketsPerBagLimit_lte?: Maybe<Scalars['Int']>
+  distributionBucketsPerBagLimit_in?: Maybe<Array<Scalars['Int']>>
+  uploadingBlocked_eq?: Maybe<Scalars['Boolean']>
+  uploadingBlocked_in?: Maybe<Array<Scalars['Boolean']>>
+  dataObjectFeePerMb_eq?: Maybe<Scalars['BigInt']>
+  dataObjectFeePerMb_gt?: Maybe<Scalars['BigInt']>
+  dataObjectFeePerMb_gte?: Maybe<Scalars['BigInt']>
+  dataObjectFeePerMb_lt?: Maybe<Scalars['BigInt']>
+  dataObjectFeePerMb_lte?: Maybe<Scalars['BigInt']>
+  dataObjectFeePerMb_in?: Maybe<Array<Scalars['BigInt']>>
+  storageBucketMaxObjectsCountLimit_eq?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsCountLimit_gt?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsCountLimit_gte?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsCountLimit_lt?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsCountLimit_lte?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsCountLimit_in?: Maybe<Array<Scalars['BigInt']>>
+  storageBucketMaxObjectsSizeLimit_eq?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsSizeLimit_gt?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsSizeLimit_gte?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsSizeLimit_lt?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsSizeLimit_lte?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsSizeLimit_in?: Maybe<Array<Scalars['BigInt']>>
+  nextDataObjectId_eq?: Maybe<Scalars['BigInt']>
+  nextDataObjectId_gt?: Maybe<Scalars['BigInt']>
+  nextDataObjectId_gte?: Maybe<Scalars['BigInt']>
+  nextDataObjectId_lt?: Maybe<Scalars['BigInt']>
+  nextDataObjectId_lte?: Maybe<Scalars['BigInt']>
+  nextDataObjectId_in?: Maybe<Array<Scalars['BigInt']>>
+  AND?: Maybe<Array<StorageSystemParametersWhereInput>>
+  OR?: Maybe<Array<StorageSystemParametersWhereInput>>
+}
+
+export type StorageSystemParametersWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type Subscription = {
+  stateSubscription: ProcessorState
+}
+
+export type Video = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  channel: Channel
+  channelId: Scalars['String']
+  category?: Maybe<VideoCategory>
+  categoryId?: Maybe<Scalars['String']>
+  /** The title of the video */
+  title?: Maybe<Scalars['String']>
+  /** The description of the Video */
+  description?: Maybe<Scalars['String']>
+  /** Video duration in seconds */
+  duration?: Maybe<Scalars['Int']>
+  thumbnailPhoto?: Maybe<StorageDataObject>
+  thumbnailPhotoId?: Maybe<Scalars['String']>
+  language?: Maybe<Language>
+  languageId?: Maybe<Scalars['String']>
+  /** Whether or not Video contains marketing */
+  hasMarketing?: Maybe<Scalars['Boolean']>
+  /** If the Video was published on other platform before beeing published on Joystream - the original publication date */
+  publishedBeforeJoystream?: Maybe<Scalars['DateTime']>
+  /** Whether the Video is supposed to be publically displayed */
+  isPublic?: Maybe<Scalars['Boolean']>
+  /** Flag signaling whether a video is censored. */
+  isCensored: Scalars['Boolean']
+  /** Whether the Video contains explicit material. */
+  isExplicit?: Maybe<Scalars['Boolean']>
+  license?: Maybe<License>
+  licenseId?: Maybe<Scalars['String']>
+  media?: Maybe<StorageDataObject>
+  mediaId?: Maybe<Scalars['String']>
+  mediaMetadata?: Maybe<VideoMediaMetadata>
+  mediaMetadataId?: Maybe<Scalars['String']>
+  createdInBlock: Scalars['Int']
+  /** Is video featured or not */
+  isFeatured: Scalars['Boolean']
+}
+
+export type VideoCategoriesByNameFtsOutput = {
+  item: VideoCategoriesByNameSearchResult
+  rank: Scalars['Float']
+  isTypeOf: Scalars['String']
+  highlight: Scalars['String']
+}
+
+export type VideoCategoriesByNameSearchResult = VideoCategory
+
+export type VideoCategory = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** The name of the category */
+  name?: Maybe<Scalars['String']>
+  videos: Array<Video>
+  createdInBlock: Scalars['Int']
+}
+
+export type VideoCategoryConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<VideoCategoryEdge>
+  pageInfo: PageInfo
+}
+
+export type VideoCategoryCreateInput = {
+  name?: Maybe<Scalars['String']>
+  createdInBlock: Scalars['Float']
+}
+
+export type VideoCategoryEdge = {
+  node: VideoCategory
+  cursor: Scalars['String']
+}
+
+export enum VideoCategoryOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  NameAsc = 'name_ASC',
+  NameDesc = 'name_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+}
+
+export type VideoCategoryUpdateInput = {
+  name?: Maybe<Scalars['String']>
+  createdInBlock?: Maybe<Scalars['Float']>
+}
+
+export type VideoCategoryWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  name_eq?: Maybe<Scalars['String']>
+  name_contains?: Maybe<Scalars['String']>
+  name_startsWith?: Maybe<Scalars['String']>
+  name_endsWith?: Maybe<Scalars['String']>
+  name_in?: Maybe<Array<Scalars['String']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  videos_none?: Maybe<VideoWhereInput>
+  videos_some?: Maybe<VideoWhereInput>
+  videos_every?: Maybe<VideoWhereInput>
+  AND?: Maybe<Array<VideoCategoryWhereInput>>
+  OR?: Maybe<Array<VideoCategoryWhereInput>>
+}
+
+export type VideoCategoryWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type VideoConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<VideoEdge>
+  pageInfo: PageInfo
+}
+
+export type VideoCreateInput = {
+  channel: Scalars['ID']
+  category?: Maybe<Scalars['ID']>
+  title?: Maybe<Scalars['String']>
+  description?: Maybe<Scalars['String']>
+  duration?: Maybe<Scalars['Float']>
+  thumbnailPhoto?: Maybe<Scalars['ID']>
+  language?: Maybe<Scalars['ID']>
+  hasMarketing?: Maybe<Scalars['Boolean']>
+  publishedBeforeJoystream?: Maybe<Scalars['DateTime']>
+  isPublic?: Maybe<Scalars['Boolean']>
+  isCensored: Scalars['Boolean']
+  isExplicit?: Maybe<Scalars['Boolean']>
+  license?: Maybe<Scalars['ID']>
+  media?: Maybe<Scalars['ID']>
+  mediaMetadata?: Maybe<Scalars['ID']>
+  createdInBlock: Scalars['Float']
+  isFeatured: Scalars['Boolean']
+}
+
+export type VideoEdge = {
+  node: Video
+  cursor: Scalars['String']
+}
+
+export type VideoMediaEncoding = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Encoding of the video media object */
+  codecName?: Maybe<Scalars['String']>
+  /** Media container format */
+  container?: Maybe<Scalars['String']>
+  /** Content MIME type */
+  mimeMediaType?: Maybe<Scalars['String']>
+  videomediametadataencoding?: Maybe<Array<VideoMediaMetadata>>
+}
+
+export type VideoMediaEncodingConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<VideoMediaEncodingEdge>
+  pageInfo: PageInfo
+}
+
+export type VideoMediaEncodingCreateInput = {
+  codecName?: Maybe<Scalars['String']>
+  container?: Maybe<Scalars['String']>
+  mimeMediaType?: Maybe<Scalars['String']>
+}
+
+export type VideoMediaEncodingEdge = {
+  node: VideoMediaEncoding
+  cursor: Scalars['String']
+}
+
+export enum VideoMediaEncodingOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  CodecNameAsc = 'codecName_ASC',
+  CodecNameDesc = 'codecName_DESC',
+  ContainerAsc = 'container_ASC',
+  ContainerDesc = 'container_DESC',
+  MimeMediaTypeAsc = 'mimeMediaType_ASC',
+  MimeMediaTypeDesc = 'mimeMediaType_DESC',
+}
+
+export type VideoMediaEncodingUpdateInput = {
+  codecName?: Maybe<Scalars['String']>
+  container?: Maybe<Scalars['String']>
+  mimeMediaType?: Maybe<Scalars['String']>
+}
+
+export type VideoMediaEncodingWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  codecName_eq?: Maybe<Scalars['String']>
+  codecName_contains?: Maybe<Scalars['String']>
+  codecName_startsWith?: Maybe<Scalars['String']>
+  codecName_endsWith?: Maybe<Scalars['String']>
+  codecName_in?: Maybe<Array<Scalars['String']>>
+  container_eq?: Maybe<Scalars['String']>
+  container_contains?: Maybe<Scalars['String']>
+  container_startsWith?: Maybe<Scalars['String']>
+  container_endsWith?: Maybe<Scalars['String']>
+  container_in?: Maybe<Array<Scalars['String']>>
+  mimeMediaType_eq?: Maybe<Scalars['String']>
+  mimeMediaType_contains?: Maybe<Scalars['String']>
+  mimeMediaType_startsWith?: Maybe<Scalars['String']>
+  mimeMediaType_endsWith?: Maybe<Scalars['String']>
+  mimeMediaType_in?: Maybe<Array<Scalars['String']>>
+  videomediametadataencoding_none?: Maybe<VideoMediaMetadataWhereInput>
+  videomediametadataencoding_some?: Maybe<VideoMediaMetadataWhereInput>
+  videomediametadataencoding_every?: Maybe<VideoMediaMetadataWhereInput>
+  AND?: Maybe<Array<VideoMediaEncodingWhereInput>>
+  OR?: Maybe<Array<VideoMediaEncodingWhereInput>>
+}
+
+export type VideoMediaEncodingWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type VideoMediaMetadata = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  encoding?: Maybe<VideoMediaEncoding>
+  encodingId?: Maybe<Scalars['String']>
+  /** Video media width in pixels */
+  pixelWidth?: Maybe<Scalars['Int']>
+  /** Video media height in pixels */
+  pixelHeight?: Maybe<Scalars['Int']>
+  /** Video media size in bytes */
+  size?: Maybe<Scalars['BigInt']>
+  video?: Maybe<Video>
+  createdInBlock: Scalars['Int']
+}
+
+export type VideoMediaMetadataConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<VideoMediaMetadataEdge>
+  pageInfo: PageInfo
+}
+
+export type VideoMediaMetadataCreateInput = {
+  encoding?: Maybe<Scalars['ID']>
+  pixelWidth?: Maybe<Scalars['Float']>
+  pixelHeight?: Maybe<Scalars['Float']>
+  size?: Maybe<Scalars['String']>
+  createdInBlock: Scalars['Float']
+}
+
+export type VideoMediaMetadataEdge = {
+  node: VideoMediaMetadata
+  cursor: Scalars['String']
+}
+
+export enum VideoMediaMetadataOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  EncodingAsc = 'encoding_ASC',
+  EncodingDesc = 'encoding_DESC',
+  PixelWidthAsc = 'pixelWidth_ASC',
+  PixelWidthDesc = 'pixelWidth_DESC',
+  PixelHeightAsc = 'pixelHeight_ASC',
+  PixelHeightDesc = 'pixelHeight_DESC',
+  SizeAsc = 'size_ASC',
+  SizeDesc = 'size_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+}
+
+export type VideoMediaMetadataUpdateInput = {
+  encoding?: Maybe<Scalars['ID']>
+  pixelWidth?: Maybe<Scalars['Float']>
+  pixelHeight?: Maybe<Scalars['Float']>
+  size?: Maybe<Scalars['String']>
+  createdInBlock?: Maybe<Scalars['Float']>
+}
+
+export type VideoMediaMetadataWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  pixelWidth_eq?: Maybe<Scalars['Int']>
+  pixelWidth_gt?: Maybe<Scalars['Int']>
+  pixelWidth_gte?: Maybe<Scalars['Int']>
+  pixelWidth_lt?: Maybe<Scalars['Int']>
+  pixelWidth_lte?: Maybe<Scalars['Int']>
+  pixelWidth_in?: Maybe<Array<Scalars['Int']>>
+  pixelHeight_eq?: Maybe<Scalars['Int']>
+  pixelHeight_gt?: Maybe<Scalars['Int']>
+  pixelHeight_gte?: Maybe<Scalars['Int']>
+  pixelHeight_lt?: Maybe<Scalars['Int']>
+  pixelHeight_lte?: Maybe<Scalars['Int']>
+  pixelHeight_in?: Maybe<Array<Scalars['Int']>>
+  size_eq?: Maybe<Scalars['BigInt']>
+  size_gt?: Maybe<Scalars['BigInt']>
+  size_gte?: Maybe<Scalars['BigInt']>
+  size_lt?: Maybe<Scalars['BigInt']>
+  size_lte?: Maybe<Scalars['BigInt']>
+  size_in?: Maybe<Array<Scalars['BigInt']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  encoding?: Maybe<VideoMediaEncodingWhereInput>
+  video?: Maybe<VideoWhereInput>
+  AND?: Maybe<Array<VideoMediaMetadataWhereInput>>
+  OR?: Maybe<Array<VideoMediaMetadataWhereInput>>
+}
+
+export type VideoMediaMetadataWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export enum VideoOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  ChannelAsc = 'channel_ASC',
+  ChannelDesc = 'channel_DESC',
+  CategoryAsc = 'category_ASC',
+  CategoryDesc = 'category_DESC',
+  TitleAsc = 'title_ASC',
+  TitleDesc = 'title_DESC',
+  DescriptionAsc = 'description_ASC',
+  DescriptionDesc = 'description_DESC',
+  DurationAsc = 'duration_ASC',
+  DurationDesc = 'duration_DESC',
+  ThumbnailPhotoAsc = 'thumbnailPhoto_ASC',
+  ThumbnailPhotoDesc = 'thumbnailPhoto_DESC',
+  LanguageAsc = 'language_ASC',
+  LanguageDesc = 'language_DESC',
+  HasMarketingAsc = 'hasMarketing_ASC',
+  HasMarketingDesc = 'hasMarketing_DESC',
+  PublishedBeforeJoystreamAsc = 'publishedBeforeJoystream_ASC',
+  PublishedBeforeJoystreamDesc = 'publishedBeforeJoystream_DESC',
+  IsPublicAsc = 'isPublic_ASC',
+  IsPublicDesc = 'isPublic_DESC',
+  IsCensoredAsc = 'isCensored_ASC',
+  IsCensoredDesc = 'isCensored_DESC',
+  IsExplicitAsc = 'isExplicit_ASC',
+  IsExplicitDesc = 'isExplicit_DESC',
+  LicenseAsc = 'license_ASC',
+  LicenseDesc = 'license_DESC',
+  MediaAsc = 'media_ASC',
+  MediaDesc = 'media_DESC',
+  MediaMetadataAsc = 'mediaMetadata_ASC',
+  MediaMetadataDesc = 'mediaMetadata_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+  IsFeaturedAsc = 'isFeatured_ASC',
+  IsFeaturedDesc = 'isFeatured_DESC',
+}
+
+export type VideoUpdateInput = {
+  channel?: Maybe<Scalars['ID']>
+  category?: Maybe<Scalars['ID']>
+  title?: Maybe<Scalars['String']>
+  description?: Maybe<Scalars['String']>
+  duration?: Maybe<Scalars['Float']>
+  thumbnailPhoto?: Maybe<Scalars['ID']>
+  language?: Maybe<Scalars['ID']>
+  hasMarketing?: Maybe<Scalars['Boolean']>
+  publishedBeforeJoystream?: Maybe<Scalars['DateTime']>
+  isPublic?: Maybe<Scalars['Boolean']>
+  isCensored?: Maybe<Scalars['Boolean']>
+  isExplicit?: Maybe<Scalars['Boolean']>
+  license?: Maybe<Scalars['ID']>
+  media?: Maybe<Scalars['ID']>
+  mediaMetadata?: Maybe<Scalars['ID']>
+  createdInBlock?: Maybe<Scalars['Float']>
+  isFeatured?: Maybe<Scalars['Boolean']>
+}
+
+export type VideoWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  title_eq?: Maybe<Scalars['String']>
+  title_contains?: Maybe<Scalars['String']>
+  title_startsWith?: Maybe<Scalars['String']>
+  title_endsWith?: Maybe<Scalars['String']>
+  title_in?: Maybe<Array<Scalars['String']>>
+  description_eq?: Maybe<Scalars['String']>
+  description_contains?: Maybe<Scalars['String']>
+  description_startsWith?: Maybe<Scalars['String']>
+  description_endsWith?: Maybe<Scalars['String']>
+  description_in?: Maybe<Array<Scalars['String']>>
+  duration_eq?: Maybe<Scalars['Int']>
+  duration_gt?: Maybe<Scalars['Int']>
+  duration_gte?: Maybe<Scalars['Int']>
+  duration_lt?: Maybe<Scalars['Int']>
+  duration_lte?: Maybe<Scalars['Int']>
+  duration_in?: Maybe<Array<Scalars['Int']>>
+  hasMarketing_eq?: Maybe<Scalars['Boolean']>
+  hasMarketing_in?: Maybe<Array<Scalars['Boolean']>>
+  publishedBeforeJoystream_eq?: Maybe<Scalars['DateTime']>
+  publishedBeforeJoystream_lt?: Maybe<Scalars['DateTime']>
+  publishedBeforeJoystream_lte?: Maybe<Scalars['DateTime']>
+  publishedBeforeJoystream_gt?: Maybe<Scalars['DateTime']>
+  publishedBeforeJoystream_gte?: Maybe<Scalars['DateTime']>
+  isPublic_eq?: Maybe<Scalars['Boolean']>
+  isPublic_in?: Maybe<Array<Scalars['Boolean']>>
+  isCensored_eq?: Maybe<Scalars['Boolean']>
+  isCensored_in?: Maybe<Array<Scalars['Boolean']>>
+  isExplicit_eq?: Maybe<Scalars['Boolean']>
+  isExplicit_in?: Maybe<Array<Scalars['Boolean']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  isFeatured_eq?: Maybe<Scalars['Boolean']>
+  isFeatured_in?: Maybe<Array<Scalars['Boolean']>>
+  channel?: Maybe<ChannelWhereInput>
+  category?: Maybe<VideoCategoryWhereInput>
+  thumbnailPhoto?: Maybe<StorageDataObjectWhereInput>
+  language?: Maybe<LanguageWhereInput>
+  license?: Maybe<LicenseWhereInput>
+  media?: Maybe<StorageDataObjectWhereInput>
+  mediaMetadata?: Maybe<VideoMediaMetadataWhereInput>
+  AND?: Maybe<Array<VideoWhereInput>>
+  OR?: Maybe<Array<VideoWhereInput>>
+}
+
+export type VideoWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type Worker = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Sign of worker still being active */
+  isActive: Scalars['Boolean']
+  /** Runtime identifier */
+  workerId: Scalars['String']
+  /** Associated working group */
+  type: WorkerType
+  /** Custom metadata set by provider */
+  metadata?: Maybe<Scalars['String']>
+}
+
+export type WorkerConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<WorkerEdge>
+  pageInfo: PageInfo
+}
+
+export type WorkerCreateInput = {
+  isActive: Scalars['Boolean']
+  workerId: Scalars['String']
+  type: WorkerType
+  metadata?: Maybe<Scalars['String']>
+}
+
+export type WorkerEdge = {
+  node: Worker
+  cursor: Scalars['String']
+}
+
+export enum WorkerOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  IsActiveAsc = 'isActive_ASC',
+  IsActiveDesc = 'isActive_DESC',
+  WorkerIdAsc = 'workerId_ASC',
+  WorkerIdDesc = 'workerId_DESC',
+  TypeAsc = 'type_ASC',
+  TypeDesc = 'type_DESC',
+  MetadataAsc = 'metadata_ASC',
+  MetadataDesc = 'metadata_DESC',
+}
+
+export enum WorkerType {
+  Gateway = 'GATEWAY',
+  Storage = 'STORAGE',
+}
+
+export type WorkerUpdateInput = {
+  isActive?: Maybe<Scalars['Boolean']>
+  workerId?: Maybe<Scalars['String']>
+  type?: Maybe<WorkerType>
+  metadata?: Maybe<Scalars['String']>
+}
+
+export type WorkerWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  isActive_eq?: Maybe<Scalars['Boolean']>
+  isActive_in?: Maybe<Array<Scalars['Boolean']>>
+  workerId_eq?: Maybe<Scalars['String']>
+  workerId_contains?: Maybe<Scalars['String']>
+  workerId_startsWith?: Maybe<Scalars['String']>
+  workerId_endsWith?: Maybe<Scalars['String']>
+  workerId_in?: Maybe<Array<Scalars['String']>>
+  type_eq?: Maybe<WorkerType>
+  type_in?: Maybe<Array<WorkerType>>
+  metadata_eq?: Maybe<Scalars['String']>
+  metadata_contains?: Maybe<Scalars['String']>
+  metadata_startsWith?: Maybe<Scalars['String']>
+  metadata_endsWith?: Maybe<Scalars['String']>
+  metadata_in?: Maybe<Array<Scalars['String']>>
+  AND?: Maybe<Array<WorkerWhereInput>>
+  OR?: Maybe<Array<WorkerWhereInput>>
+}
+
+export type WorkerWhereUniqueInput = {
+  id: Scalars['ID']
+}

+ 208 - 0
utils/migration-scripts/src/giza-olympia/giza-query-node/queries/queries.graphql

@@ -0,0 +1,208 @@
+fragment VideoCategoryFields on VideoCategory {
+  id
+  name
+}
+
+fragment ChannelCategoryFields on ChannelCategory {
+  id
+  name
+}
+
+fragment StorageDataObjectFields on StorageDataObject {
+  id
+  updatedAt
+  ipfsHash
+  isAccepted
+  size
+  storageBagId
+}
+
+fragment StorageDataObjectConnectionFields on StorageDataObjectConnection {
+  edges {
+    node {
+      ...StorageDataObjectFields
+    }
+  }
+  pageInfo {
+    hasNextPage
+    endCursor
+  }
+}
+
+fragment VideoFields on Video {
+  id
+  categoryId
+  title
+  description
+  duration
+  thumbnailPhoto {
+    ...StorageDataObjectFields
+  }
+  language {
+    iso
+  }
+  hasMarketing
+  publishedBeforeJoystream
+  isPublic
+  isCensored
+  isExplicit
+  license {
+    code
+    attribution
+    customText
+  }
+  media {
+    ...StorageDataObjectFields
+  }
+  mediaMetadata {
+    encoding {
+      codecName
+      container
+      mimeMediaType
+    }
+    pixelWidth
+    pixelHeight
+    size
+  }
+  isFeatured
+  channel {
+    id
+    ownerMember {
+      id
+      controllerAccount
+    }
+  }
+}
+
+fragment VideoConnectionFields on VideoConnection {
+  edges {
+    node {
+      ...VideoFields
+    }
+  }
+  pageInfo {
+    hasNextPage
+    endCursor
+  }
+}
+
+fragment ChannelFields on Channel {
+  id
+  ownerMember {
+    id
+    controllerAccount
+  }
+  categoryId
+  rewardAccount
+  title
+  description
+  coverPhoto {
+    ...StorageDataObjectFields
+  }
+  avatarPhoto {
+    ...StorageDataObjectFields
+  }
+  isPublic
+  isCensored
+  language {
+    iso
+  }
+  videos {
+    id
+  }
+  collaborators {
+    id
+  }
+}
+
+fragment ChannelConnectionFields on ChannelConnection {
+  edges {
+    node {
+      ...ChannelFields
+    }
+  }
+  pageInfo {
+    hasNextPage
+    endCursor
+  }
+}
+
+fragment DistributionBucketFields on DistributionBucket {
+  distributing
+  bags {
+    id
+  }
+  operators {
+    status
+    metadata {
+      nodeEndpoint
+    }
+  }
+}
+
+fragment MembershipFields on Membership {
+  id
+  handle
+  avatarUri
+  about
+  controllerAccount
+  rootAccount
+}
+
+fragment MembershipConnectionFields on MembershipConnection {
+  edges {
+    node {
+      ...MembershipFields
+    }
+  }
+  pageInfo {
+    hasNextPage
+    endCursor
+  }
+}
+
+query getVideoCategories {
+  videoCategories {
+    ...VideoCategoryFields
+  }
+}
+
+query getChannelsCategories {
+  channelCategories {
+    ...ChannelCategoryFields
+  }
+}
+
+query getDistributorsByBagIds($ids: [ID!]) {
+  distributionBuckets(where: { bags_some: { id_in: $ids }, distributing_eq: true }) {
+    ...DistributionBucketFields
+  }
+}
+
+query getDataObjectsPage($updatedAfter: DateTime, $limit: Int!, $lastCursor: String) {
+  storageDataObjectsConnection(
+    where: { updatedAt_gt: $updatedAfter, isAccepted_eq: true }
+    first: $limit
+    after: $lastCursor
+  ) {
+    ...StorageDataObjectConnectionFields
+  }
+}
+
+query getChannelsPage($limit: Int!, $lastCursor: String) {
+  channelsConnection(first: $limit, after: $lastCursor) {
+    ...ChannelConnectionFields
+  }
+}
+
+query getVideosPage($limit: Int!, $lastCursor: String) {
+  videosConnection(first: $limit, after: $lastCursor) {
+    ...VideoConnectionFields
+  }
+}
+
+query getMembershipsPage($limit: Int!, $lastCursor: String) {
+  membershipsConnection(first: $limit, after: $lastCursor) {
+    ...MembershipConnectionFields
+  }
+}