Browse Source

Integration tests: CLI base utils & tests, bash scripts

Leszek Wiesner 3 years ago
parent
commit
ae97af37f1
32 changed files with 1431 additions and 592 deletions
  1. 0 1
      package.json
  2. 15 0
      tests/integration-tests/get-host-ip.sh
  3. 13 9
      tests/integration-tests/package.json
  4. 41 0
      tests/integration-tests/run-full-tests.sh
  5. 29 0
      tests/integration-tests/run-proposals-tests.sh
  6. 78 0
      tests/integration-tests/run-test-node-docker.sh
  7. 4 0
      tests/integration-tests/src/Api.ts
  8. 12 0
      tests/integration-tests/src/QueryNodeApi.ts
  9. 2 2
      tests/integration-tests/src/Scenario.ts
  10. 54 0
      tests/integration-tests/src/cli/base.ts
  11. 48 0
      tests/integration-tests/src/cli/distributor.ts
  12. 47 0
      tests/integration-tests/src/cli/joystream.ts
  13. 66 0
      tests/integration-tests/src/cli/storage.ts
  14. 126 0
      tests/integration-tests/src/cli/utils.ts
  15. 69 0
      tests/integration-tests/src/flows/clis/createChannel.ts
  16. 35 0
      tests/integration-tests/src/flows/clis/initDistributionBucket.ts
  17. 49 0
      tests/integration-tests/src/flows/clis/initStorageBucket.ts
  18. 1 1
      tests/integration-tests/src/flows/proposals/index.ts
  19. 1 0
      tests/integration-tests/src/flows/storage/initDistribution.ts
  20. 1 0
      tests/integration-tests/src/flows/storage/initStorage.ts
  21. 58 49
      tests/integration-tests/src/flows/working-groups/leadOpening.ts
  22. 141 0
      tests/integration-tests/src/graphql/generated/queries.ts
  23. 431 494
      tests/integration-tests/src/graphql/generated/schema.ts
  24. 68 0
      tests/integration-tests/src/graphql/queries/content.graphql
  25. 1 1
      tests/integration-tests/src/scenarios/forum.ts
  26. 1 1
      tests/integration-tests/src/scenarios/forumPostDeletionsBug.ts
  27. 8 6
      tests/integration-tests/src/scenarios/full.ts
  28. 12 0
      tests/integration-tests/src/scenarios/initStorageAndDistribution.ts
  29. 1 1
      tests/integration-tests/src/scenarios/setupNewChain.ts
  30. 1 1
      tests/integration-tests/src/scenarios/workingGroups.ts
  31. 2 2
      tests/integration-tests/src/sender.ts
  32. 16 24
      yarn.lock

+ 0 - 1
package.json

@@ -41,7 +41,6 @@
     "@polkadot/rpc-provider": "5.9.1",
     "@polkadot/x-global": "7.3.1",
     "@polkadot/networks": "7.3.1",
-    "babel-core": "^7.0.0-bridge.0",
     "typescript": "^4.4.3",
     "bn.js": "4.12.0",
     "rxjs": "^7.4.0",

+ 15 - 0
tests/integration-tests/get-host-ip.sh

@@ -0,0 +1,15 @@
+#!/usr/bin/env bash
+set -e
+
+if [[ "$OSTYPE" == "linux-gnu" ]]; then
+    docker network inspect --format='{{range .IPAM.Config}}{{.Gateway}}{{end}}' joystream_default
+elif [[ "$OSTYPE" == "darwin"* ]]; then
+    # try en0 if not set then en1 (Wired or Wireless)
+    ipconfig getifaddr en0 || ipconfig getifaddr en1
+else
+    echo 'localhost'
+fi
+
+# Some alternative approaches (cross platform)
+# ifconfig | grep -Eo 'inet (addr:)?([0-9]*\.){3}[0-9]*' | grep -Eo '([0-9]*\.){3}[0-9]*' | grep -v '127.0.0.1'
+# ip route | awk '/default/ {print $3}'

+ 13 - 9
tests/integration-tests/package.json

@@ -10,14 +10,14 @@
     "lint": "eslint . --quiet --ext .ts",
     "checks": "tsc --noEmit --pretty && prettier ./ --check && yarn lint",
     "format": "prettier ./ --write",
-    "generate:graphql-types": "graphql-codegen",
-    "generate:all": "yarn generate:graphql-types"
+    "generate:types:graphql": "graphql-codegen",
+    "generate:all": "yarn generate:types:graphql"
   },
   "dependencies": {
     "@apollo/client": "^3.2.5",
     "@joystream/types": "^0.18.0",
-    "@polkadot/api": "5.3.2",
-    "@polkadot/keyring": "^7.1.1",
+    "@polkadot/api": "5.9.1",
+    "@polkadot/keyring": "7.3.1",
     "@types/async-lock": "^1.1.2",
     "@types/bn.js": "^4.11.5",
     "@types/lowdb": "^1.0.9",
@@ -26,16 +26,20 @@
     "cross-fetch": "^3.0.6",
     "dotenv": "^8.2.0",
     "fs": "^0.0.1-security",
-    "uuid": "^7.0.3"
+    "uuid": "^7.0.3",
+    "bmp-js": "^0.1.0",
+    "@types/bmp-js": "^0.1.0",
+    "node-cleanup": "^2.1.2",
+    "graphql": "^14.7.0"
   },
   "devDependencies": {
-    "@polkadot/ts": "^0.4.4",
+    "@polkadot/ts": "^0.4.8",
     "@types/chai": "^4.2.11",
     "@types/uuid": "^7.0.2",
     "chai": "^4.2.0",
-    "prettier": "2.0.2",
-    "ts-node": "^8.8.1",
-    "typescript": "^4.3.5",
+    "prettier": "^2.2.1",
+    "ts-node": "^10.2.1",
+    "typescript": "^4.4.3",
     "@graphql-codegen/cli": "^1.21.4",
     "@graphql-codegen/typescript": "^1.22.0",
     "@graphql-codegen/import-types-preset": "^1.18.1",

+ 41 - 0
tests/integration-tests/run-full-tests.sh

@@ -0,0 +1,41 @@
+#!/usr/bin/env bash
+set -e
+
+SCRIPT_PATH="$(dirname "${BASH_SOURCE[0]}")"
+cd $SCRIPT_PATH
+
+set -a
+. ../../.env
+set +a
+
+CONTAINER_ID=$(./run-test-node-docker.sh)
+
+
+function cleanup() {
+    docker logs ${CONTAINER_ID} --tail 15
+    docker-compose -f ../../docker-compose.yml down -v
+}
+
+trap cleanup EXIT
+
+sleep 3
+
+# Display runtime version
+yarn workspace api-scripts tsnode-strict src/status.ts | grep Runtime
+
+# Start a query-node
+../../query-node/start.sh
+
+# Setup storage & distribution
+HOST_IP=$(./get-host-ip.sh)
+export COLOSSUS_1_URL="http://${HOST_IP}:3333"
+export COLOSSUS_1_TRANSACTOR_KEY=$(docker run --rm --pull=always docker.io/parity/subkey:2.0.1 inspect ${COLOSSUS_1_TRANSACTOR_URI} --output-type json | jq .ss58Address -r)
+export DISTRIBUTOR_1_URL="http://${HOST_IP}:3334"
+./run-test-scenario.sh initStorageAndDistribution
+
+# Start colossus & argus
+docker-compose -f ../../docker-compose.yml up -d colossus-1
+docker-compose -f ../../docker-compose.yml up -d distributor-1
+
+# Run full tests reusing the existing keys
+REUSE_KEYS=true IGNORE_HIRED_LEADS=true ./run-test-scenario.sh full

+ 29 - 0
tests/integration-tests/run-proposals-tests.sh

@@ -0,0 +1,29 @@
+#!/usr/bin/env bash
+set -e
+
+SCRIPT_PATH="$(dirname "${BASH_SOURCE[0]}")"
+cd $SCRIPT_PATH
+
+set -a
+. ../../.env
+set +a
+
+export JOYSTREAM_NODE_TAG=$(TEST_NODE=true ../../scripts/runtime-code-shasum.sh)
+
+# Fresh start
+docker-compose -f ../../docker-compose.yml down -v
+
+if [ "${CHAINSPEC_NODE}" == true ]
+then
+  ./run-test-node-docker.sh
+else
+  docker-compose up -d joystream-node
+fi
+
+sleep 3
+
+# Start a query-node
+../../query-node/start.sh
+
+# Run full tests reusing the existing keys
+./run-test-scenario.sh proposals

+ 78 - 0
tests/integration-tests/run-test-node-docker.sh

@@ -0,0 +1,78 @@
+#!/usr/bin/env bash
+set -e
+
+SCRIPT_PATH="$(dirname "${BASH_SOURCE[0]}")"
+cd $SCRIPT_PATH
+
+# Log only to stderr
+# Only output from this script should be the container id of the node at the very end
+
+# Location that will be mounted to /spec in containers
+# This is where the initial members and balances files and generated chainspec files will be located.
+DATA_PATH=$PWD/data
+mkdir -p ${DATA_PATH}
+
+# Initial account balance for sudo account
+SUDO_INITIAL_BALANCE=${SUDO_INITIAL_BALANCE:=100000000}
+SUDO_ACCOUNT_URI=${SUDO_ACCOUNT_URI:="//Alice"}
+SUDO_ACCOUNT=$(docker run --rm --pull=always docker.io/parity/subkey:2.0.1 inspect ${SUDO_ACCOUNT_URI} --output-type json | jq .ss58Address -r)
+
+# Source of funds for all new accounts that are created in the tests.
+TREASURY_INITIAL_BALANCE=${TREASURY_INITIAL_BALANCE:=100000000}
+TREASURY_ACCOUNT_URI=${TREASURY_ACCOUNT_URI:=$SUDO_ACCOUNT_URI}
+TREASURY_ACCOUNT=$(docker run --rm --pull=always docker.io/parity/subkey:2.0.1 inspect ${TREASURY_ACCOUNT_URI} --output-type json | jq .ss58Address -r)
+
+>&2 echo "sudo account from suri: ${SUDO_ACCOUNT}"
+>&2 echo "treasury account from suri: ${TREASURY_ACCOUNT}"
+
+# The docker image tag to use for joystream/node
+RUNTIME=${RUNTIME:=$(TEST_NODE=true ../../scripts/runtime-code-shasum.sh)}
+
+echo "{
+  \"balances\":[
+    [\"$SUDO_ACCOUNT\", $SUDO_INITIAL_BALANCE],
+    [\"$TREASURY_ACCOUNT\", $TREASURY_INITIAL_BALANCE]
+  ]
+}" > ${DATA_PATH}/initial-balances.json
+
+# Remember if there are initial members at genesis query-node needs to be bootstrapped
+# or any events processed for this member will cause processor to fail.
+if [ "${MAKE_SUDO_MEMBER}" == true ]
+then
+  echo "
+    [{
+      \"member_id\":0,
+      \"root_account\":\"$SUDO_ACCOUNT\",
+      \"controller_account\":\"$SUDO_ACCOUNT\",
+      \"handle\":\"sudosudo\",
+      \"avatar_uri\":\"https://sudo.com/avatar.png\",
+      \"about\":\"Sudo\",
+      \"registered_at_time\":0
+    }]
+  " > ${DATA_PATH}/initial-members.json
+else
+  echo "[]" > ${DATA_PATH}/initial-members.json
+fi
+
+# Create a chain spec file
+docker run --rm -v ${DATA_PATH}:/spec --entrypoint ./chain-spec-builder joystream/node:${RUNTIME} \
+  new \
+  --authority-seeds Alice \
+  --sudo-account ${SUDO_ACCOUNT} \
+  --deployment dev \
+  --chain-spec-path /spec/chain-spec.json \
+  --initial-balances-path /spec/initial-balances.json \
+  --initial-members-path /spec/initial-members.json
+
+# Convert the chain spec file to a raw chainspec file
+docker run --rm -v ${DATA_PATH}:/spec joystream/node:${RUNTIME} build-spec \
+  --raw --disable-default-bootnode \
+  --chain /spec/chain-spec.json > ${DATA_PATH}/chain-spec-raw.json
+
+# Start a chain with generated chain spec
+export JOYSTREAM_NODE_TAG=${RUNTIME}
+docker-compose -f ../../docker-compose.yml run -d -v ${DATA_PATH}:/spec --name joystream-node \
+  -p 9944:9944 -p 9933:9933 joystream-node \
+  --alice --validator --unsafe-ws-external --unsafe-rpc-external \
+  --rpc-methods Unsafe --rpc-cors=all -l runtime \
+  --chain /spec/chain-spec-raw.json

+ 4 - 0
tests/integration-tests/src/Api.ts

@@ -230,6 +230,10 @@ export class Api {
     return this.sender.signAndSend(tx, sender)
   }
 
+  public getSuri(addr: AccountId | string): string {
+    return this.factory.getSuri(addr)
+  }
+
   public async sendExtrinsicsAndGetResults(
     // Extrinsics can be separated into batches in order to makes sure they are processed in specified order
     txs: SubmittableExtrinsic<'promise'>[] | SubmittableExtrinsic<'promise'>[][],

+ 12 - 0
tests/integration-tests/src/QueryNodeApi.ts

@@ -295,6 +295,10 @@ import {
   GetProposalDiscussionThreadsByIdsQuery,
   GetProposalDiscussionThreadsByIdsQueryVariables,
   GetProposalDiscussionThreadsByIds,
+  GetChannelById,
+  GetChannelByIdQuery,
+  GetChannelByIdQueryVariables,
+  ChannelFieldsFragment,
 } from './graphql/generated/queries'
 import { Maybe } from './graphql/generated/schema'
 import { OperationDefinitionNode } from 'graphql'
@@ -1071,4 +1075,12 @@ export class QueryNodeApi {
       GetProposalDiscussionThreadsByIdsQueryVariables
     >(GetProposalDiscussionThreadsByIds, { ids: ids.map((id) => id.toString()) }, 'proposalDiscussionThreads')
   }
+
+  public async channelById(id: string): Promise<Maybe<ChannelFieldsFragment>> {
+    return this.uniqueEntityQuery<GetChannelByIdQuery, GetChannelByIdQueryVariables>(
+      GetChannelById,
+      { id },
+      'channelByUniqueInput'
+    )
+  }
 }

+ 2 - 2
tests/integration-tests/src/Scenario.ts

@@ -9,7 +9,7 @@ import { Job } from './Job'
 import { JobManager } from './JobManager'
 import { ResourceManager } from './Resources'
 import fetch from 'cross-fetch'
-import fs, { readFileSync } from 'fs'
+import fs, { existsSync, readFileSync } from 'fs'
 
 export type ScenarioProps = {
   env: NodeJS.ProcessEnv
@@ -64,7 +64,7 @@ export async function scenario(scene: (props: ScenarioProps) => Promise<void>):
   const reuseKeys = Boolean(env.REUSE_KEYS)
   let startKeyId: number
   let customKeys: string[] = []
-  if (reuseKeys) {
+  if (reuseKeys && existsSync(OUTPUT_FILE_PATH)) {
     const output = JSON.parse(readFileSync(OUTPUT_FILE_PATH).toString()) as TestsOutput
     startKeyId = output.keyIds.final
     customKeys = output.keyIds.custom

+ 54 - 0
tests/integration-tests/src/cli/base.ts

@@ -0,0 +1,54 @@
+import path from 'path'
+import { execFile } from 'child_process'
+import { promisify } from 'util'
+import { Sender } from '../sender'
+
+export type CommandResult = { stdout: string; stderr: string; out: string }
+
+export abstract class CLI {
+  protected env: Record<string, string>
+  protected readonly rootPath: string
+  protected readonly binPath: string
+  protected defaultArgs: string[]
+
+  constructor(rootPath: string, defaultEnv: Record<string, string> = {}, defaultArgs: string[] = []) {
+    this.rootPath = rootPath
+    this.binPath = path.resolve(rootPath, './bin/run')
+    this.env = {
+      ...process.env,
+      AUTO_CONFIRM: 'true',
+      FORCE_COLOR: '0',
+      ...defaultEnv,
+    }
+    this.defaultArgs = [...defaultArgs]
+  }
+
+  protected getArgs(customArgs: string[]): string[] {
+    return [...this.defaultArgs, ...customArgs]
+  }
+
+  protected getFlagStringValue(args: string[], flag: string, alias?: string): string | undefined {
+    const flagIndex = args.lastIndexOf(flag)
+    const aliasIndex = alias ? args.lastIndexOf(alias) : -1
+    const flagOrAliasIndex = Math.max(flagIndex, aliasIndex)
+    if (flagOrAliasIndex === -1) {
+      return undefined
+    }
+    const nextArg = args[flagOrAliasIndex + 1]
+    return nextArg
+  }
+
+  async run(command: string, customArgs: string[] = [], lockKeys: string[] = []): Promise<CommandResult> {
+    const pExecFile = promisify(execFile)
+    const { env } = this
+    const { stdout, stderr } = await Sender.asyncLock.acquire(
+      lockKeys.map((k) => `nonce-${k}`),
+      () =>
+        pExecFile(this.binPath, [command, ...this.getArgs(customArgs)], {
+          env,
+          cwd: this.rootPath,
+        })
+    )
+    return { stdout, stderr, out: stdout.trim() }
+  }
+}

+ 48 - 0
tests/integration-tests/src/cli/distributor.ts

@@ -0,0 +1,48 @@
+import path from 'path'
+import { spawn } from 'child_process'
+import { DistributorNodeConfiguration } from '@joystream/distributor-cli/src/types/generated/ConfigJson'
+import { CLI, CommandResult } from './base'
+import { WorkerId } from '@joystream/types/working-group'
+import { ProcessManager } from './utils'
+import Keyring from '@polkadot/keyring'
+
+const CLI_ROOT_PATH = path.resolve(__dirname, '../../../../distributor-node')
+
+export class DistributorCLI extends CLI {
+  protected keys: string[]
+
+  constructor(keyUris: string[]) {
+    const keys: DistributorNodeConfiguration['keys'] = keyUris.map((suri) => ({
+      suri,
+    })) as DistributorNodeConfiguration['keys']
+    const defaultEnv = {
+      JOYSTREAM_DISTRIBUTOR__KEYS: JSON.stringify(keys),
+    }
+    super(CLI_ROOT_PATH, defaultEnv)
+    const keyring = new Keyring({ type: 'sr25519' })
+    keyUris.forEach((uri) => keyring.addFromUri(uri))
+    this.keys = keyring.getPairs().map((p) => p.address)
+  }
+
+  async run(command: string, customArgs: string[] = [], keyLocks?: string[]): Promise<CommandResult> {
+    return super.run(command, customArgs, keyLocks || this.keys)
+  }
+
+  async spawnServer(
+    operatorId: number | WorkerId,
+    port = 3334,
+    buckets: number[] | 'all' = 'all'
+  ): Promise<ProcessManager> {
+    const { env } = this
+    const serverEnv = {
+      ...env,
+      JOYSTREAM_DISTRIBUTOR__PORT: port.toString(),
+      JOYSTREAM_DISTRIBUTOR__WORKER_ID: operatorId.toString(),
+      JOYSTREAM_DISTRIBUTOR__BUCKETS: buckets === 'all' ? 'all' : JSON.stringify(buckets),
+    }
+    const serverProcess = spawn(this.binPath, ['start'], { env: serverEnv, cwd: this.rootPath })
+    const serverManager = new ProcessManager('Distributor node server', serverProcess, 'stdout')
+    await serverManager.untilOutput(`listening on port ${port}`)
+    return serverManager
+  }
+}

+ 47 - 0
tests/integration-tests/src/cli/joystream.ts

@@ -0,0 +1,47 @@
+import { KeyringPair } from '@polkadot/keyring/types'
+import path from 'path'
+import { CLI, CommandResult } from './base'
+import { TmpFileManager } from './utils'
+import { ChannelCreationInputParameters } from '@joystream/cli/src/Types'
+
+const CLI_ROOT_PATH = path.resolve(__dirname, '../../../../cli')
+
+export class JoystreamCLI extends CLI {
+  protected keys: string[] = []
+  protected tmpFileManager: TmpFileManager
+
+  constructor(tmpFileManager: TmpFileManager) {
+    const defaultEnv = {
+      HOME: tmpFileManager.tmpDataDir,
+    }
+    super(CLI_ROOT_PATH, defaultEnv)
+    this.tmpFileManager = tmpFileManager
+  }
+
+  async init(): Promise<void> {
+    await this.run('api:setUri', [process.env.NODE_URL || 'ws://127.0.0.1:9944'])
+    await this.run('api:setQueryNodeEndpoint', [process.env.QUERY_NODE_URL || 'http://127.0.0.1:8081/graphql'])
+  }
+
+  async importKey(pair: KeyringPair): Promise<void> {
+    const jsonFile = this.tmpFileManager.jsonFile(pair.toJson())
+    await this.run('account:import', [
+      '--backupFilePath',
+      jsonFile,
+      '--name',
+      `Account${this.keys.length}`,
+      '--password',
+      '',
+    ])
+    this.keys.push(pair.address)
+  }
+
+  async run(command: string, customArgs: string[] = [], keyLocks?: string[]): Promise<CommandResult> {
+    return super.run(command, customArgs, keyLocks || this.keys)
+  }
+
+  async createChannel(inputData: ChannelCreationInputParameters, args: string[]): Promise<CommandResult> {
+    const jsonFile = this.tmpFileManager.jsonFile(inputData)
+    return this.run('content:createChannel', ['--input', jsonFile, ...args])
+  }
+}

+ 66 - 0
tests/integration-tests/src/cli/storage.ts

@@ -0,0 +1,66 @@
+import path from 'path'
+import { CLI, CommandResult } from './base'
+import { spawn } from 'child_process'
+import { v4 as uuid } from 'uuid'
+import { WorkerId } from '@joystream/types/working-group'
+import os from 'os'
+import { ProcessManager } from './utils'
+import fs from 'fs'
+import { Keyring } from '@polkadot/keyring'
+
+const CLI_ROOT_PATH = path.resolve(__dirname, '../../../../storage-node')
+
+export class StorageCLI extends CLI {
+  constructor(defaultSuri?: string) {
+    super(CLI_ROOT_PATH, undefined, defaultSuri ? ['--accountUri', defaultSuri] : [])
+  }
+
+  setDefaultSuri(defaultSuri: string): void {
+    this.defaultArgs = ['--accountUri', defaultSuri]
+  }
+
+  async run(command: string, customArgs: string[] = []): Promise<CommandResult> {
+    const args = this.getArgs(customArgs)
+    const accountUri = this.getFlagStringValue(args, '--accountUri', '-y')
+    if (!accountUri) {
+      throw new Error('Missing accountUri')
+    }
+    const accountKey = new Keyring({ type: 'sr25519' }).createFromUri(accountUri).address
+    return super.run(command, args, [accountKey])
+  }
+
+  async spawnServer(
+    operatorId: number | WorkerId,
+    port = 3333,
+    sync = true,
+    syncInterval = 1
+  ): Promise<ProcessManager> {
+    const queryNodeHost = new URL(process.env.QUERY_NODE_URL || '').host
+    const apiUrl = new URL(process.env.NODE_URL || '').toString()
+    const uploadsDir = path.join(os.tmpdir(), uuid())
+    fs.mkdirSync(uploadsDir)
+    const { env } = this
+    const args = [
+      ...this.defaultArgs,
+      '--worker',
+      operatorId.toString(),
+      '--port',
+      port.toString(),
+      '--queryNodeHost',
+      queryNodeHost,
+      '--apiUrl',
+      apiUrl,
+      '--uploads',
+      uploadsDir,
+    ]
+    if (sync) {
+      args.push('--sync')
+      args.push('--syncInterval')
+      args.push(syncInterval.toString())
+    }
+    const serverProcess = spawn(this.binPath, ['server', ...args], { env, cwd: this.rootPath })
+    const serverListener = new ProcessManager('Storage node server', serverProcess, 'stderr')
+    await serverListener.untilOutput('Listening')
+    return serverListener
+  }
+}

+ 126 - 0
tests/integration-tests/src/cli/utils.ts

@@ -0,0 +1,126 @@
+import fs, { mkdirSync, rmSync } from 'fs'
+import path from 'path'
+import { v4 as uuid } from 'uuid'
+import { ChildProcessWithoutNullStreams } from 'child_process'
+import { Utils } from '../utils'
+import _ from 'lodash'
+import bmp from 'bmp-js'
+import nodeCleanup from 'node-cleanup'
+
+export class TmpFileManager {
+  tmpDataDir: string
+
+  constructor(baseDir?: string) {
+    this.tmpDataDir = path.join(
+      baseDir || process.env.DATA_PATH || path.join(__filename, '../../../data'),
+      'joystream-testing',
+      uuid()
+    )
+    mkdirSync(this.tmpDataDir, { recursive: true })
+    nodeCleanup(() => {
+      rmSync(this.tmpDataDir, { recursive: true, force: true })
+    })
+  }
+
+  public jsonFile(value: unknown): string {
+    const tmpFilePath = path.join(this.tmpDataDir, `${uuid()}.json`)
+    fs.writeFileSync(tmpFilePath, JSON.stringify(value))
+    return tmpFilePath
+  }
+
+  public randomImgFile(width: number, height: number): string {
+    const data = Buffer.from(Array.from({ length: width * height * 3 }, () => _.random(0, 255)))
+    const rawBmp = bmp.encode({ width, height, data })
+    const tmpFilePath = path.join(this.tmpDataDir, `${uuid()}.bmp`)
+    fs.writeFileSync(tmpFilePath, rawBmp.data)
+    return tmpFilePath
+  }
+}
+
+type OutputType = 'stdout' | 'stderr'
+
+export class ProcessManager {
+  private label: string
+  private stdout = ''
+  private stderr = ''
+  private subprocess: ChildProcessWithoutNullStreams
+  private defaultOutput: OutputType
+  private onStdoutListener: (chunk: Uint8Array) => void
+  private onStderrListener: (chunk: Uint8Array) => void
+
+  constructor(
+    label: string,
+    subprocess: ChildProcessWithoutNullStreams,
+    defaultOutput: OutputType = 'stdout',
+    maxOutputSize = 1024 * 1024 * 10
+  ) {
+    this.label = label
+    this.defaultOutput = defaultOutput
+    this.subprocess = subprocess
+    const onDataListener = (outputType: OutputType) => (chunk: Uint8Array) => {
+      const chunkStr = Buffer.from(chunk).toString()
+      this[outputType] += chunkStr
+      if (this[outputType].length > maxOutputSize) {
+        this[outputType] = this[outputType].slice(-maxOutputSize)
+      }
+    }
+    this.onStdoutListener = onDataListener('stdout')
+    this.onStderrListener = onDataListener('stderr')
+
+    subprocess.stdout.on('data', this.onStdoutListener)
+    subprocess.stderr.on('data', this.onStderrListener)
+    nodeCleanup(() => {
+      console.log(this.recentOutput())
+      subprocess.kill()
+    })
+  }
+
+  private recentOutput() {
+    const length = parseInt(process.env.SUBPROCESSES_FINAL_LOG_LENGTH || '20')
+    return (
+      `\n\nLast STDOUT of ${this.label}:\n ${this.stdout.split('\n').slice(-length).join('\n')}\n\n` +
+      `Last STDERR of ${this.label}:\n ${this.stderr.split('\n').slice(-length).join('\n')}\n\n`
+    )
+  }
+
+  kill(): void {
+    this.subprocess.kill()
+  }
+
+  expectAlive(): void {
+    if (this.subprocess.exitCode !== null) {
+      throw new Error(`Process ${this.label} exited unexpectedly with code: ${this.subprocess.exitCode}`)
+    }
+  }
+
+  expectOutput(expected: string, outputType?: OutputType): void {
+    const outT = outputType || this.defaultOutput
+    if (!this[outT].includes(expected)) {
+      throw new Error(`Expected output: "${expected}" missing in ${this.label} process (${outputType})`)
+    }
+  }
+
+  async untilOutput(
+    expected: string,
+    outputType?: 'stderr' | 'stdout',
+    failOnExit = true,
+    timeoutMs = 120000,
+    waitMs = 1000
+  ): Promise<void> {
+    const start = Date.now()
+    while (true) {
+      try {
+        this.expectOutput(expected, outputType)
+        return
+      } catch (e) {
+        if (failOnExit) {
+          this.expectAlive()
+        }
+        if (Date.now() - start + waitMs >= timeoutMs) {
+          throw new Error(`untilOutput timeout reached. ${(e as Error).message}`)
+        }
+        await Utils.wait(waitMs)
+      }
+    }
+  }
+}

+ 69 - 0
tests/integration-tests/src/flows/clis/createChannel.ts

@@ -0,0 +1,69 @@
+import { FlowProps } from '../../Flow'
+import { extendDebug } from '../../Debugger'
+import { JoystreamCLI } from '../../cli/joystream'
+import { BuyMembershipHappyCaseFixture } from '../../fixtures/membership/BuyMembershipHappyCaseFixture'
+import { FixtureRunner } from '../../Fixture'
+import { TmpFileManager } from '../../cli/utils'
+import { assert } from 'chai'
+import { Utils } from '../../utils'
+import { statSync } from 'fs'
+
+export default async function createChannel({ api, query }: FlowProps): Promise<void> {
+  const debug = extendDebug('flow:createChannel')
+  debug('Started')
+
+  // Create channel owner membership
+  const [channelOwnerKeypair] = await api.createKeyPairs(1)
+  const buyMembershipFixture = new BuyMembershipHappyCaseFixture(api, query, [channelOwnerKeypair.key.address])
+  await new FixtureRunner(buyMembershipFixture).run()
+
+  // Send some funds to pay the deletion_prize and fees
+  const channelOwnerBalance = api.consts.storage.dataObjectDeletionPrize.muln(2)
+  await api.treasuryTransferBalance(channelOwnerKeypair.key.address, channelOwnerBalance)
+
+  // Create Joystream CLI
+  const tmpFileManager = new TmpFileManager()
+  const joystreamCli = new JoystreamCLI(tmpFileManager)
+
+  // Init CLI, import & select channel owner key
+  await joystreamCli.init()
+  await joystreamCli.importKey(channelOwnerKeypair.key)
+
+  // Create channel
+  const avatarPhotoPath = tmpFileManager.randomImgFile(300, 300)
+  const coverPhotoPath = tmpFileManager.randomImgFile(1920, 500)
+  const channelInput = {
+    title: 'Test channel',
+    avatarPhotoPath,
+    coverPhotoPath,
+    description: 'This is a test channel',
+    isPublic: true,
+    language: 'en',
+    rewardAccount: channelOwnerKeypair.key.address,
+  }
+  const { out: createChannelOut } = await joystreamCli.createChannel(channelInput, ['--context', 'Member'])
+
+  const channelIdMatch = /Channel with id ([0-9]+) successfully created/.exec(createChannelOut)
+  if (!channelIdMatch) {
+    throw new Error(`No channel id found in output:\n${createChannelOut}`)
+  }
+  const [, channelId] = channelIdMatch
+
+  await query.tryQueryWithTimeout(
+    () => query.channelById(channelId),
+    (channel) => {
+      Utils.assert(channel, 'Channel not found')
+      assert.equal(channel.title, channelInput.title)
+      assert.equal(channel.description, channelInput.description)
+      assert.equal(channel.isPublic, channelInput.isPublic)
+      assert.equal(channel.language?.iso, channelInput.language)
+      assert.equal(channel.rewardAccount, channelInput.rewardAccount)
+      assert.equal(channel.avatarPhoto?.type.__typename, 'DataObjectTypeChannelAvatar')
+      assert.equal(channel.avatarPhoto?.size, statSync(avatarPhotoPath).size)
+      assert.equal(channel.coverPhoto?.type.__typename, 'DataObjectTypeChannelCoverPhoto')
+      assert.equal(channel.coverPhoto?.size, statSync(coverPhotoPath).size)
+    }
+  )
+
+  debug('Done')
+}

+ 35 - 0
tests/integration-tests/src/flows/clis/initDistributionBucket.ts

@@ -0,0 +1,35 @@
+import { FlowProps } from '../../Flow'
+import { extendDebug } from '../../Debugger'
+import { DistributorCLI } from '../../cli/distributor'
+
+export default async function initDistributionBucket({ api }: FlowProps): Promise<void> {
+  const debug = extendDebug('flow:initDistributionBucketViaCLI')
+  debug('Started')
+
+  const [leaderId, leader] = await api.getLeader('distributionWorkingGroup')
+
+  const operatorId = leaderId.toString()
+  const leaderSuri = api.getSuri(leader.role_account_id)
+
+  const cli = new DistributorCLI([leaderSuri])
+
+  await cli.run('leader:set-buckets-per-bag-limit', ['--limit', '10'])
+  const { out: familyId } = await cli.run('leader:create-bucket-family')
+  const { out: bucketIndex } = await cli.run('leader:create-bucket', ['--familyId', familyId, '--acceptingBags', 'yes'])
+  const bucketId = `${familyId}:${bucketIndex}`
+  await cli.run('leader:update-bag', ['--bagId', 'static:council', '--familyId', familyId, '--add', bucketIndex])
+  await cli.run('leader:update-dynamic-bag-policy', ['--type', 'Channel', '--policy', `${familyId}:1`])
+  await cli.run('leader:update-bucket-mode', ['--bucketId', bucketId, '--mode', 'on'])
+  await cli.run('leader:invite-bucket-operator', ['--bucketId', bucketId, '--workerId', operatorId])
+  await cli.run('operator:accept-invitation', ['--bucketId', bucketId, '--workerId', operatorId])
+  await cli.run('operator:set-metadata', [
+    '--bucketId',
+    bucketId,
+    '--workerId',
+    operatorId,
+    '--endpoint',
+    'http://localhost:3334',
+  ])
+
+  debug('Done')
+}

+ 49 - 0
tests/integration-tests/src/flows/clis/initStorageBucket.ts

@@ -0,0 +1,49 @@
+import { FlowProps } from '../../Flow'
+import { extendDebug } from '../../Debugger'
+import { StorageCLI } from '../../cli/storage'
+
+export default async function initStorageBucket({ api }: FlowProps): Promise<void> {
+  const debug = extendDebug('flow:initStorageBucketViaCLI')
+  debug('Started')
+
+  const [leaderId, leader] = await api.getLeader('storageWorkingGroup')
+
+  const leaderSuri = api.getSuri(leader.role_account_id)
+  const transactorKey = '5DkE5YD8m5Yzno6EH2RTBnH268TDnnibZMEMjxwYemU4XevU' // //Colossus1
+
+  const operatorId = leaderId.toString()
+
+  const cli = new StorageCLI(leaderSuri)
+  await cli.run('leader:update-bag-limit', ['--limit', '10'])
+  await cli.run('leader:update-voucher-limits', ['--objects', '1000', '--size', '10000000000'])
+  const { out: bucketId } = await cli.run('leader:create-bucket', [
+    '--invited',
+    operatorId,
+    '--allow',
+    '--number',
+    '1000',
+    '--size',
+    '10000000000',
+  ])
+  await cli.run('operator:accept-invitation', [
+    '--workerId',
+    operatorId,
+    '--bucketId',
+    bucketId,
+    '--transactorAccountId',
+    transactorKey,
+  ])
+  await cli.run('leader:update-bag', ['--add', bucketId, '--bagId', 'static:council'])
+  await cli.run('leader:update-dynamic-bag-policy', ['--bagType', 'Channel', '--number', '1'])
+  await cli.run('operator:set-metadata', [
+    '--bucketId',
+    bucketId,
+    '--operatorId',
+    operatorId,
+    '--endpoint',
+    'http://localhost:3333',
+  ])
+  await cli.run('leader:update-data-fee', ['-f', '0'])
+
+  debug('Done')
+}

+ 1 - 1
tests/integration-tests/src/flows/proposals/index.ts

@@ -14,7 +14,7 @@ import { AllProposalsOutcomesFixture, TestedProposal } from '../../fixtures/prop
 export default async function creatingProposals({ api, query, lock }: FlowProps): Promise<void> {
   const debug = extendDebug('flow:creating-proposals')
   debug('Started')
-  api.enableDebugTxLogs()
+  api.enableVerboseTxLogs()
 
   debug('Creating test lead openings and applications...')
   const createLeadOpeningsFixture = new CreateOpeningsFixture(

+ 1 - 0
tests/integration-tests/src/flows/storage/initDistribution.ts

@@ -97,6 +97,7 @@ export const doubleBucketConfig: InitDistributionConfig = {
 export default function createFlow({ families }: InitDistributionConfig) {
   return async function initDistribution({ api }: FlowProps): Promise<void> {
     const debug = extendDebug('flow:initDistribution')
+    api.enableDebugTxLogs()
     debug('Started')
 
     // Get working group leaders

+ 1 - 0
tests/integration-tests/src/flows/storage/initStorage.ts

@@ -83,6 +83,7 @@ export const doubleBucketConfig: InitStorageConfig = {
 export default function createFlow({ buckets, dynamicBagPolicy }: InitStorageConfig) {
   return async function initDistribution({ api }: FlowProps): Promise<void> {
     const debug = extendDebug('flow:initStorage')
+    api.enableDebugTxLogs()
     debug('Started')
 
     // Get working group leaders

+ 58 - 49
tests/integration-tests/src/flows/working-groups/leadOpening.ts

@@ -6,65 +6,74 @@ import {
   ApplicantDetails,
   DEFAULT_OPENING_PARAMS,
 } from '../../fixtures/workingGroups'
-
+import { WorkingGroupModuleName } from '../../types'
 import { extendDebug } from '../../Debugger'
 import { FixtureRunner } from '../../Fixture'
 import { AddStakingAccountsHappyCaseFixture, BuyMembershipHappyCaseFixture } from '../../fixtures/membership'
 import { workingGroups } from '../../consts'
 
-export default async function leadOpening({ api, query, env }: FlowProps): Promise<void> {
-  await Promise.all(
-    workingGroups.map(async (group) => {
-      const debug = extendDebug(`flow:lead-opening:${group}`)
-      debug('Started')
-      api.enableDebugTxLogs()
+export default (skipIfAlreadySet = false, groups: WorkingGroupModuleName[] = workingGroups) =>
+  async function leadOpening({ api, query }: FlowProps): Promise<void> {
+    await Promise.all(
+      groups.map(async (group) => {
+        const debug = extendDebug(`flow:lead-opening:${group}`)
+        debug('Started')
+        api.enableDebugTxLogs()
+        const leadId = await api.query[group].currentLead()
+        if (leadId.isSome) {
+          if (skipIfAlreadySet) {
+            debug('Leader already set, skipping...')
+            return
+          }
+          throw new Error('Cannot hire lead - lead already set!')
+        }
 
-      const createOpeningFixture = new CreateOpeningsFixture(api, query, group, undefined, true)
-      const openingRunner = new FixtureRunner(createOpeningFixture)
-      await openingRunner.run()
-      const [openingId] = createOpeningFixture.getCreatedOpeningIds()
-      const { stake: openingStake, metadata: openingMetadata } = DEFAULT_OPENING_PARAMS
+        const createOpeningFixture = new CreateOpeningsFixture(api, query, group, undefined, true)
+        const openingRunner = new FixtureRunner(createOpeningFixture)
+        await openingRunner.run()
+        const [openingId] = createOpeningFixture.getCreatedOpeningIds()
+        const { stake: openingStake, metadata: openingMetadata } = DEFAULT_OPENING_PARAMS
 
-      const [roleAccount, stakingAccount, rewardAccount] = (await api.createKeyPairs(3)).map(({ key }) => key.address)
-      const buyMembershipFixture = new BuyMembershipHappyCaseFixture(api, query, [roleAccount])
-      await new FixtureRunner(buyMembershipFixture).run()
-      const [memberId] = buyMembershipFixture.getCreatedMembers()
+        const [roleAccount, stakingAccount, rewardAccount] = (await api.createKeyPairs(3)).map(({ key }) => key.address)
+        const buyMembershipFixture = new BuyMembershipHappyCaseFixture(api, query, [roleAccount])
+        await new FixtureRunner(buyMembershipFixture).run()
+        const [memberId] = buyMembershipFixture.getCreatedMembers()
 
-      const addStakingAccFixture = new AddStakingAccountsHappyCaseFixture(api, query, [
-        {
-          asMember: memberId,
-          account: stakingAccount,
-        },
-      ])
-      await new FixtureRunner(addStakingAccFixture).run()
-      await api.treasuryTransferBalance(stakingAccount, openingStake)
+        const addStakingAccFixture = new AddStakingAccountsHappyCaseFixture(api, query, [
+          {
+            asMember: memberId,
+            account: stakingAccount,
+          },
+        ])
+        await new FixtureRunner(addStakingAccFixture).run()
+        await api.treasuryTransferBalance(stakingAccount, openingStake)
 
-      const applicantDetails: ApplicantDetails = {
-        memberId,
-        roleAccount,
-        rewardAccount,
-        stakingAccount,
-      }
+        const applicantDetails: ApplicantDetails = {
+          memberId,
+          roleAccount,
+          rewardAccount,
+          stakingAccount,
+        }
 
-      const applyOnOpeningFixture = new ApplyOnOpeningsHappyCaseFixture(api, query, group, [
-        {
-          openingId,
-          openingMetadata,
-          applicants: [applicantDetails],
-        },
-      ])
-      const applicationRunner = new FixtureRunner(applyOnOpeningFixture)
-      await applicationRunner.run()
-      const [applicationId] = applyOnOpeningFixture.getCreatedApplicationsByOpeningId(openingId)
+        const applyOnOpeningFixture = new ApplyOnOpeningsHappyCaseFixture(api, query, group, [
+          {
+            openingId,
+            openingMetadata,
+            applicants: [applicantDetails],
+          },
+        ])
+        const applicationRunner = new FixtureRunner(applyOnOpeningFixture)
+        await applicationRunner.run()
+        const [applicationId] = applyOnOpeningFixture.getCreatedApplicationsByOpeningId(openingId)
 
-      // Run query node checks once this part of the flow is done
-      await Promise.all([openingRunner.runQueryNodeChecks(), applicationRunner.runQueryNodeChecks()])
+        // Run query node checks once this part of the flow is done
+        await Promise.all([openingRunner.runQueryNodeChecks(), applicationRunner.runQueryNodeChecks()])
 
-      // Fill opening
-      const fillOpeningFixture = new FillOpeningsFixture(api, query, group, [openingId], [[applicationId]], true)
-      await new FixtureRunner(fillOpeningFixture).runWithQueryNodeChecks()
+        // Fill opening
+        const fillOpeningFixture = new FillOpeningsFixture(api, query, group, [openingId], [[applicationId]], true)
+        await new FixtureRunner(fillOpeningFixture).runWithQueryNodeChecks()
 
-      debug('Done')
-    })
-  )
-}
+        debug('Done')
+      })
+    )
+  }

+ 141 - 0
tests/integration-tests/src/graphql/generated/queries.ts

@@ -1,6 +1,71 @@
 import * as Types from './schema'
 
 import gql from 'graphql-tag'
+type DataObjectTypeFields_DataObjectTypeChannelAvatar_Fragment = {
+  __typename: 'DataObjectTypeChannelAvatar'
+  channel?: Types.Maybe<{ id: string }>
+}
+
+type DataObjectTypeFields_DataObjectTypeChannelCoverPhoto_Fragment = {
+  __typename: 'DataObjectTypeChannelCoverPhoto'
+  channel?: Types.Maybe<{ id: string }>
+}
+
+type DataObjectTypeFields_DataObjectTypeVideoMedia_Fragment = {
+  __typename: 'DataObjectTypeVideoMedia'
+  video?: Types.Maybe<{ id: string }>
+}
+
+type DataObjectTypeFields_DataObjectTypeVideoThumbnail_Fragment = {
+  __typename: 'DataObjectTypeVideoThumbnail'
+  video?: Types.Maybe<{ id: string }>
+}
+
+type DataObjectTypeFields_DataObjectTypeUnknown_Fragment = { __typename: 'DataObjectTypeUnknown' }
+
+export type DataObjectTypeFieldsFragment =
+  | DataObjectTypeFields_DataObjectTypeChannelAvatar_Fragment
+  | DataObjectTypeFields_DataObjectTypeChannelCoverPhoto_Fragment
+  | DataObjectTypeFields_DataObjectTypeVideoMedia_Fragment
+  | DataObjectTypeFields_DataObjectTypeVideoThumbnail_Fragment
+  | DataObjectTypeFields_DataObjectTypeUnknown_Fragment
+
+export type StorageDataObjectFieldsFragment = {
+  id: string
+  ipfsHash: string
+  isAccepted: boolean
+  size: any
+  deletionPrize: any
+  unsetAt?: Types.Maybe<any>
+  storageBagId: string
+  type:
+    | DataObjectTypeFields_DataObjectTypeChannelAvatar_Fragment
+    | DataObjectTypeFields_DataObjectTypeChannelCoverPhoto_Fragment
+    | DataObjectTypeFields_DataObjectTypeVideoMedia_Fragment
+    | DataObjectTypeFields_DataObjectTypeVideoThumbnail_Fragment
+    | DataObjectTypeFields_DataObjectTypeUnknown_Fragment
+}
+
+export type ChannelFieldsFragment = {
+  title?: Types.Maybe<string>
+  description?: Types.Maybe<string>
+  isPublic?: Types.Maybe<boolean>
+  rewardAccount?: Types.Maybe<string>
+  isCensored: boolean
+  language?: Types.Maybe<{ iso: string }>
+  ownerMember?: Types.Maybe<{ id: string }>
+  ownerCuratorGroup?: Types.Maybe<{ id: string }>
+  category?: Types.Maybe<{ name?: Types.Maybe<string> }>
+  avatarPhoto?: Types.Maybe<StorageDataObjectFieldsFragment>
+  coverPhoto?: Types.Maybe<StorageDataObjectFieldsFragment>
+}
+
+export type GetChannelByIdQueryVariables = Types.Exact<{
+  id: Types.Scalars['ID']
+}>
+
+export type GetChannelByIdQuery = { channelByUniqueInput?: Types.Maybe<ChannelFieldsFragment> }
+
 export type CouncilMemberFieldsFragment = { id: string; member: { id: string } }
 
 export type ElectedCouncilFieldsFragment = { councilMembers: Array<CouncilMemberFieldsFragment> }
@@ -1900,6 +1965,74 @@ export type GetBudgetSpendingEventsByEventIdsQueryVariables = Types.Exact<{
 
 export type GetBudgetSpendingEventsByEventIdsQuery = { budgetSpendingEvents: Array<BudgetSpendingEventFieldsFragment> }
 
+export const DataObjectTypeFields = gql`
+  fragment DataObjectTypeFields on DataObjectType {
+    __typename
+    ... on DataObjectTypeChannelAvatar {
+      channel {
+        id
+      }
+    }
+    ... on DataObjectTypeChannelCoverPhoto {
+      channel {
+        id
+      }
+    }
+    ... on DataObjectTypeVideoThumbnail {
+      video {
+        id
+      }
+    }
+    ... on DataObjectTypeVideoMedia {
+      video {
+        id
+      }
+    }
+  }
+`
+export const StorageDataObjectFields = gql`
+  fragment StorageDataObjectFields on StorageDataObject {
+    id
+    ipfsHash
+    isAccepted
+    size
+    type {
+      ...DataObjectTypeFields
+    }
+    deletionPrize
+    unsetAt
+    storageBagId
+  }
+  ${DataObjectTypeFields}
+`
+export const ChannelFields = gql`
+  fragment ChannelFields on Channel {
+    title
+    description
+    isPublic
+    language {
+      iso
+    }
+    rewardAccount
+    isCensored
+    ownerMember {
+      id
+    }
+    ownerCuratorGroup {
+      id
+    }
+    category {
+      name
+    }
+    avatarPhoto {
+      ...StorageDataObjectFields
+    }
+    coverPhoto {
+      ...StorageDataObjectFields
+    }
+  }
+  ${StorageDataObjectFields}
+`
 export const CouncilMemberFields = gql`
   fragment CouncilMemberFields on CouncilMember {
     id
@@ -3657,6 +3790,14 @@ export const BudgetSpendingEventFields = gql`
     rationale
   }
 `
+export const GetChannelById = gql`
+  query getChannelById($id: ID!) {
+    channelByUniqueInput(where: { id: $id }) {
+      ...ChannelFields
+    }
+  }
+  ${ChannelFields}
+`
 export const GetCurrentCouncilMembers = gql`
   query getCurrentCouncilMembers {
     electedCouncils(where: { endedAtBlock_eq: null }) {

File diff suppressed because it is too large
+ 431 - 494
tests/integration-tests/src/graphql/generated/schema.ts


+ 68 - 0
tests/integration-tests/src/graphql/queries/content.graphql

@@ -0,0 +1,68 @@
+fragment DataObjectTypeFields on DataObjectType {
+  __typename
+  ... on DataObjectTypeChannelAvatar {
+    channel {
+      id
+    }
+  }
+  ... on DataObjectTypeChannelCoverPhoto {
+    channel {
+      id
+    }
+  }
+  ... on DataObjectTypeVideoThumbnail {
+    video {
+      id
+    }
+  }
+  ... on DataObjectTypeVideoMedia {
+    video {
+      id
+    }
+  }
+}
+
+fragment StorageDataObjectFields on StorageDataObject {
+  id
+  ipfsHash
+  isAccepted
+  size
+  type {
+    ...DataObjectTypeFields
+  }
+  deletionPrize
+  unsetAt
+  storageBagId
+}
+
+fragment ChannelFields on Channel {
+  title
+  description
+  isPublic
+  language {
+    iso
+  }
+  rewardAccount
+  isCensored
+  ownerMember {
+    id
+  }
+  ownerCuratorGroup {
+    id
+  }
+  category {
+    name
+  }
+  avatarPhoto {
+    ...StorageDataObjectFields
+  }
+  coverPhoto {
+    ...StorageDataObjectFields
+  }
+}
+
+query getChannelById($id: ID!) {
+  channelByUniqueInput(where: { id: $id }) {
+    ...ChannelFields
+  }
+}

+ 1 - 1
tests/integration-tests/src/scenarios/forum.ts

@@ -8,7 +8,7 @@ import threadTags from '../flows/forum/threadTags'
 import { scenario } from '../Scenario'
 
 scenario(async ({ job }) => {
-  const sudoHireLead = job('hiring working group leads', leadOpening)
+  const sudoHireLead = job('hiring working group leads', leadOpening())
   job('forum categories', categories).requires(sudoHireLead)
   job('forum threads', threads).requires(sudoHireLead)
   job('forum thread tags', threadTags).requires(sudoHireLead)

+ 1 - 1
tests/integration-tests/src/scenarios/forumPostDeletionsBug.ts

@@ -3,6 +3,6 @@ import multiplePostDeletionsBug from '../flows/forum/multiplePostDeletionsBug'
 import { scenario } from '../Scenario'
 
 scenario(async ({ job }) => {
-  const sudoHireLead = job('hiring working group leads', leadOpening)
+  const sudoHireLead = job('hiring working group leads', leadOpening())
   job('forum post deletions bug', multiplePostDeletionsBug).requires(sudoHireLead)
 })

+ 8 - 6
tests/integration-tests/src/scenarios/full.ts

@@ -51,6 +51,10 @@ scenario(async ({ job, env }) => {
   job('transferring invites', transferringInvites).after(membershipSystemJob)
   job('managing staking accounts', managingStakingAccounts).after(membershipSystemJob)
 
+  // Council (should not interrupt proposalsJob!)
+  const secondCouncilJob = job('electing second council', electCouncil).requires(membershipSystemJob)
+  const councilFailuresJob = job('council election failures', failToElect).requires(secondCouncilJob)
+
   // Proposals:
   const proposalsJob = job('proposals & proposal discussion', [
     proposals,
@@ -59,10 +63,12 @@ scenario(async ({ job, env }) => {
     exactExecutionBlock,
     expireProposal,
     proposalsDiscussion,
-  ]).requires(membershipSystemJob)
+  ]).requires(councilFailuresJob)
 
   // Working groups
-  const sudoHireLead = job('sudo lead opening', leadOpening).after(proposalsJob)
+  const sudoHireLead = job('sudo lead opening', leadOpening(process.env.IGNORE_HIRED_LEADS === 'true')).after(
+    proposalsJob
+  )
   job('openings and applications', openingsAndApplications).requires(sudoHireLead)
   job('upcoming openings', upcomingOpenings).requires(sudoHireLead)
   job('group status', groupStatus).requires(sudoHireLead)
@@ -76,8 +82,4 @@ scenario(async ({ job, env }) => {
   job('forum polls', polls).requires(sudoHireLead)
   job('forum posts', posts).requires(sudoHireLead)
   job('forum moderation', moderation).requires(sudoHireLead)
-
-  // Council
-  const secondCouncilJob = job('electing second council', electCouncil).requires(membershipSystemJob)
-  job('council election failures', failToElect).requires(secondCouncilJob)
 })

+ 12 - 0
tests/integration-tests/src/scenarios/initStorageAndDistribution.ts

@@ -0,0 +1,12 @@
+import leaderSetup from '../flows/working-groups/leadOpening'
+import initStorage, { singleBucketConfig as defaultStorageConfig } from '../flows/storage/initStorage'
+import initDistribution, { singleBucketConfig as defaultDistributionConfig } from '../flows/storage/initDistribution'
+import { scenario } from '../Scenario'
+import updateAccountsFlow from '../misc/updateAllWorkerRoleAccountsFlow'
+
+scenario(async ({ job }) => {
+  const setupLead = job('setup leads', leaderSetup(true, ['storageWorkingGroup', 'distributionWorkingGroup']))
+  const updateWorkerAccounts = job('Update worker accounts', updateAccountsFlow).after(setupLead)
+  job('initialize storage system', initStorage(defaultStorageConfig)).after(updateWorkerAccounts)
+  job('initialize distribution system', initDistribution(defaultDistributionConfig)).after(updateWorkerAccounts)
+})

+ 1 - 1
tests/integration-tests/src/scenarios/setupNewChain.ts

@@ -7,7 +7,7 @@ import { scenario } from '../Scenario'
 
 scenario(async ({ job }) => {
   job('Elect Council', electCouncil)
-  const leads = job('Set WorkingGroup Leads', leaderSetup)
+  const leads = job('Set WorkingGroup Leads', leaderSetup())
   const updateWorkerAccounts = job('Update worker accounts', updateAccountsFlow).after(leads)
 
   if (!process.env.SKIP_STORAGE_AND_DISTRIBUTION) {

+ 1 - 1
tests/integration-tests/src/scenarios/workingGroups.ts

@@ -7,7 +7,7 @@ import { scenario } from '../Scenario'
 import groupBudget from '../flows/working-groups/groupBudget'
 
 scenario(async ({ job }) => {
-  const sudoHireLead = job('sudo lead opening', leadOpening)
+  const sudoHireLead = job('sudo lead opening', leadOpening())
   job('openings and applications', openingsAndApplications).requires(sudoHireLead)
   job('upcoming openings', upcomingOpenings).requires(sudoHireLead)
   job('group status', groupStatus).requires(sudoHireLead)

+ 2 - 2
tests/integration-tests/src/sender.ts

@@ -19,7 +19,7 @@ const nonceCacheByAccount = new Map<string, number>()
 
 export class Sender {
   private readonly api: ApiPromise
-  private static readonly asyncLock: AsyncLock = new AsyncLock()
+  static readonly asyncLock: AsyncLock = new AsyncLock()
   private readonly keyring: Keyring
   private readonly debug: Debugger.Debugger
   private logs: LogLevel = LogLevel.None
@@ -128,7 +128,7 @@ export class Sender {
     // Instead use a single lock for all calls, to force all transactions to be submitted in same order
     // of call to signAndSend. Otherwise it raises chance of race conditions.
     // It happens in rare cases and has lead some tests to fail occasionally in the past
-    await Sender.asyncLock.acquire('tx-queue', async () => {
+    await Sender.asyncLock.acquire(['tx-queue', `nonce-${account.toString()}`], async () => {
       // The node sometimes returns invalid account nonce at the exact time a new block is produced
       // For a split second the node will then not take "pending" transactions into account,
       // that's why we must partialy rely on cached nonce

+ 16 - 24
yarn.lock

@@ -2389,7 +2389,7 @@
     "@polkadot/util-crypto" "^7.3.1"
     rxjs "^7.3.0"
 
-"@polkadot/api@5.3.2", "@polkadot/api@5.9.1":
+"@polkadot/api@5.9.1":
   version "5.9.1"
   resolved "https://registry.yarnpkg.com/@polkadot/api/-/api-5.9.1.tgz#ce314cc34f0a47098d039db7b9036bb491c2898c"
   integrity sha512-POpIXn/Ao+NLB0uMldXdXU44dVbRr6+6Ax77Z0R285M8Z2EiF5jl2K3SPvlowLo4SntxiCSaHQxCekYhUcJKlw==
@@ -2406,7 +2406,7 @@
     eventemitter3 "^4.0.7"
     rxjs "^7.3.0"
 
-"@polkadot/keyring@7.3.1", "@polkadot/keyring@^7.1.1", "@polkadot/keyring@^7.3.1":
+"@polkadot/keyring@7.3.1", "@polkadot/keyring@^7.3.1":
   version "7.3.1"
   resolved "https://registry.yarnpkg.com/@polkadot/keyring/-/keyring-7.3.1.tgz#bf36115cfb395567bec9cf13c8e3fc0fb39c802a"
   integrity sha512-3lbwIjUql8yjs6AR2fMdCgmTc5D9ne7+y2jqHmGjyzVQFz1w1jiHb+N38L0pwl9/23UxmzC3aVvHLfl3gEGSIQ==
@@ -2447,13 +2447,6 @@
     "@polkadot/x-ws" "^7.3.1"
     eventemitter3 "^4.0.7"
 
-"@polkadot/ts@^0.4.4":
-  version "0.4.4"
-  resolved "https://registry.yarnpkg.com/@polkadot/ts/-/ts-0.4.4.tgz#e86aa47c2bcbc70ac8385b31014c81927c4b0a88"
-  integrity sha512-lzB8lg8GfdJlA7RdeoOJVFopecN4i++JndbUs6jW7AgRz+joeXQIIRomVgCNE52nW1uWpXMELnlvEP812v7sVw==
-  dependencies:
-    "@types/chrome" "^0.0.145"
-
 "@polkadot/ts@^0.4.8":
   version "0.4.8"
   resolved "https://registry.yarnpkg.com/@polkadot/ts/-/ts-0.4.8.tgz#9cc9542e2f9c3b49bf142201f299d17589198c13"
@@ -2905,6 +2898,13 @@
   resolved "https://registry.yarnpkg.com/@types/bluebird/-/bluebird-3.5.36.tgz#00d9301d4dc35c2f6465a8aec634bb533674c652"
   integrity sha512-HBNx4lhkxN7bx6P0++W8E289foSu8kO8GCk2unhuVggO+cE7rh9DhZUyPhUxNRG9m+5B5BTKxZQ5ZP92x/mx9Q==
 
+"@types/bmp-js@^0.1.0":
+  version "0.1.0"
+  resolved "https://registry.yarnpkg.com/@types/bmp-js/-/bmp-js-0.1.0.tgz#301afe2bb3ac7ef0f18465966e4166f0491b3332"
+  integrity sha512-uMU85ROcmlY1f4mVPTlNodRXa6Z5f0AIxvv5b0pvjty3KNg7ljf5lNSspHgaF6iFDCiGpLQmJna+VwEpUC9TyA==
+  dependencies:
+    "@types/node" "*"
+
 "@types/bn.js@^4.11.5", "@types/bn.js@^4.11.6":
   version "4.11.6"
   resolved "https://registry.yarnpkg.com/@types/bn.js/-/bn.js-4.11.6.tgz#c306c70d9358aaea33cd4eda092a742b9505967c"
@@ -2935,14 +2935,6 @@
   resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.2.21.tgz#9f35a5643129df132cf3b5c1ec64046ea1af0650"
   integrity sha512-yd+9qKmJxm496BOV9CMNaey8TWsikaZOwMRwPHQIjcOJM9oV+fi9ZMNw3JsVnbEEbo2gRTDnGEBv8pjyn67hNg==
 
-"@types/chrome@^0.0.145":
-  version "0.0.145"
-  resolved "https://registry.yarnpkg.com/@types/chrome/-/chrome-0.0.145.tgz#6c53ae0af5f25350b07bfd24cf459b5fe65cd9b8"
-  integrity sha512-vLvTMmfc8mvwOZzkmn2UwlWSNu0t0txBkyuIv8NgihRkvFCe6XJX65YZAgAP/RdBit3enhU2GTxCr+prn4uZmA==
-  dependencies:
-    "@types/filesystem" "*"
-    "@types/har-format" "*"
-
 "@types/chrome@^0.0.157":
   version "0.0.157"
   resolved "https://registry.yarnpkg.com/@types/chrome/-/chrome-0.0.157.tgz#5a50bd378f4f632383c6ebbc34c88fb87d501f58"
@@ -4744,11 +4736,6 @@ axios@^0.24.0:
   dependencies:
     follow-redirects "^1.14.4"
 
-babel-core@^7.0.0-bridge.0:
-  version "7.0.0-bridge.0"
-  resolved "https://registry.yarnpkg.com/babel-core/-/babel-core-7.0.0-bridge.0.tgz#95a492ddd90f9b4e9a4a1da14eb335b87b634ece"
-  integrity sha512-poPX9mZH/5CSanm50Q+1toVci6pv5KSRv/5TWCwtzQS5XEwn40BcCrgIeMFWP9CKKIniKXNxoIOnOq4VVlGXhg==
-
 babel-jest@^24.9.0:
   version "24.9.0"
   resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-24.9.0.tgz#3fc327cb8467b89d14d7bc70e315104a783ccd54"
@@ -5053,6 +5040,11 @@ bluebird@~3.4.1:
   resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.4.7.tgz#f72d760be09b7f76d08ed8fae98b289a8d05fab3"
   integrity sha1-9y12C+Cbf3bQjtj66Ysomo0F+rM=
 
+bmp-js@^0.1.0:
+  version "0.1.0"
+  resolved "https://registry.yarnpkg.com/bmp-js/-/bmp-js-0.1.0.tgz#e05a63f796a6c1ff25f4771ec7adadc148c07233"
+  integrity sha1-4Fpj95amwf8l9Hcex62twUjAcjM=
+
 bn.js@4.12.0, bn.js@^4.11.8, bn.js@^4.11.9, bn.js@^4.12.0, bn.js@^5.1.2, bn.js@^5.1.3, bn.js@^5.2.0:
   version "4.12.0"
   resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.12.0.tgz#775b3f278efbb9718eec7361f483fb36fbbfea88"
@@ -17940,7 +17932,7 @@ ts-node@^7.0.1:
     source-map-support "^0.5.6"
     yn "^2.0.0"
 
-ts-node@^8, ts-node@^8.10, ts-node@^8.8.1:
+ts-node@^8, ts-node@^8.10:
   version "8.10.2"
   resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-8.10.2.tgz#eee03764633b1234ddd37f8db9ec10b75ec7fb8d"
   integrity sha512-ISJJGgkIpDdBhWVu3jufsWpK3Rzo7bdiIXJjQc0ynKxVOVcg2oIrf2H2cejminGrptVc6q6/uynAHNCuWGbpVA==
@@ -18199,7 +18191,7 @@ typescript-tuple@^2.2.1:
   dependencies:
     typescript-compare "^0.0.2"
 
-typescript@2.2.2, typescript@^3.0.3, typescript@^3.3, typescript@^3.8.3, typescript@^3.9.5, typescript@^3.9.7, typescript@^4.3.5, typescript@^4.4, typescript@^4.4.3:
+typescript@2.2.2, typescript@^3.0.3, typescript@^3.3, typescript@^3.8.3, typescript@^3.9.5, typescript@^3.9.7, typescript@^4.4, typescript@^4.4.3:
   version "4.4.3"
   resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.4.3.tgz#bdc5407caa2b109efd4f82fe130656f977a29324"
   integrity sha512-4xfscpisVgqqDfPaJo5vkd+Qd/ItkoagnHpufr+i2QCHBsNYp+G7UAoyFl8aPtx879u38wPV65rZ8qbGZijalA==

Some files were not shown because too many files changed in this diff