Browse Source

storage-node-v2: Sync branch merge fixes.

Shamil Gadelshin 3 years ago
parent
commit
94eb42ca84

+ 2 - 2
storage-node-v2/package.json

@@ -15,7 +15,7 @@
     "@oclif/plugin-help": "^3",
     "@polkadot/api": "4.2.1",
     "@types/base64url": "^2.0.0",
-    "@types/express": "4.17.13",
+    "@types/express": "4.17.1",
     "@types/file-type": "^10.9.1",
     "@types/lodash": "^4.14.171",
     "@types/multer": "^1.4.5",
@@ -32,7 +32,7 @@
     "blake3": "^2.1.4",
     "cross-fetch": "^3.1.4",
     "express": "4.17.1",
-    "express-openapi-validator": "^4.12.4",
+    "express-openapi-validator": "4.12.4",
     "express-winston": "^4.1.0",
     "fast-folder-size": "^1.4.0",
     "file-type": "^16.5.0",

+ 2 - 5
storage-node-v2/src/command-base/ApiCommandBase.ts

@@ -1,10 +1,6 @@
 import { Command, flags } from '@oclif/command'
 import { createApi } from '../services/runtime/api'
-import {
-  getAccountFromJsonFile,
-  getAlicePair,
-  getAccountFromUri
-} from '../services/runtime/accounts'
+import { getAccountFromJsonFile, getAlicePair, getAccountFromUri } from '../services/runtime/accounts'
 import { parseBagId } from '../services/helpers/bagTypes'
 import { KeyringPair } from '@polkadot/keyring/types'
 import { ApiPromise } from '@polkadot/api'
@@ -40,6 +36,7 @@ export default abstract class ApiCommandBase extends Command {
       char: 'y',
       description:
         'Account URI (optional). Has a priority over the keyfile and password flags. Could be overriden by ACCOUNT_URI environment variable.',
+    }),
   }
 
   static extraFlags = {

+ 3 - 11
storage-node-v2/src/commands/dev/sync.ts

@@ -35,8 +35,7 @@ export default class DevSync extends Command {
     dataSourceOperatorHost: flags.string({
       char: 'o',
       required: false,
-      description:
-        'Storage node host and port (e.g.: some.com:8081) to get data from.',
+      description: 'Storage node host and port (e.g.: some.com:8081) to get data from.',
     }),
     uploads: flags.string({
       char: 'd',
@@ -53,18 +52,11 @@ export default class DevSync extends Command {
     const queryNodeHost = flags.queryNodeHost ?? 'localhost:8081'
     const queryNodeUrl = `http://${queryNodeHost}/graphql`
     const syncWorkersNumber = flags.syncWorkersNumber ?? 20
-    const dataSourceOperatorHost =
-      flags.dataSourceOperatorHost ?? 'localhost:3333'
+    const dataSourceOperatorHost = flags.dataSourceOperatorHost ?? 'localhost:3333'
     const operatorUrl = `http://${dataSourceOperatorHost}/`
 
     try {
-      await performSync(
-        flags.workerId,
-        syncWorkersNumber,
-        queryNodeUrl,
-        flags.uploads,
-        operatorUrl
-      )
+      await performSync(flags.workerId, syncWorkersNumber, queryNodeUrl, flags.uploads, operatorUrl)
     } catch (err) {
       logger.error(err)
       logger.error(JSON.stringify(err, null, 2))

+ 1 - 1
storage-node-v2/src/commands/leader/update-bag.ts

@@ -65,7 +65,7 @@ export default class LeaderUpdateBag extends ApiCommandBase {
     const account = this.getAccount(flags)
     const api = await this.getApi()
 
-    const success = await updateStorageBucketsForBag(api, bagId, account, flags.add, flags.remove)
+    const success = await updateStorageBucketsForBag(api, flags.bagId, account, flags.add, flags.remove)
 
     this.exitAfterRuntimeCall(success)
   }

+ 6 - 7
storage-node-v2/src/commands/leader/update-dynamic-bag-policy.ts

@@ -45,13 +45,12 @@ export default class LeaderUpdateDynamicBagPolicy extends ApiCommandBase {
 
     const api = await this.getApi()
     const dynamicBagType = parseDynamicBagType(flags.bagType)
-    const success =
-      await updateNumberOfStorageBucketsInDynamicBagCreationPolicy(
-        api,
-        account,
-        dynamicBagType,
-        newNumber
-      )
+    const success = await updateNumberOfStorageBucketsInDynamicBagCreationPolicy(
+      api,
+      account,
+      dynamicBagType,
+      newNumber
+    )
 
     this.exitAfterRuntimeCall(success)
   }

+ 11 - 42
storage-node-v2/src/commands/server.ts

@@ -68,8 +68,7 @@ export default class Server extends ApiCommandBase {
     }),
     disableUploadAuth: flags.boolean({
       char: 'a',
-      description:
-        'Disable uploading authentication (should be used in testing-context only).',
+      description: 'Disable uploading authentication (should be used in testing-context only).',
       default: false,
     }),
     ...ApiCommandBase.flags,
@@ -101,13 +100,7 @@ export default class Server extends ApiCommandBase {
     if (flags.sync) {
       logger.info(`Synchronization enabled.`)
 
-      runSyncWithInterval(
-        flags.worker,
-        queryNodeUrl,
-        flags.uploads,
-        flags.syncWorkersNumber,
-        flags.syncInterval
-      )
+      runSyncWithInterval(flags.worker, queryNodeUrl, flags.uploads, flags.syncWorkersNumber, flags.syncInterval)
     }
 
     const account = this.getAccount(flags)
@@ -125,8 +118,7 @@ export default class Server extends ApiCommandBase {
         api,
         account,
         workerId,
-		maxFileSize,
-        this.config,
+        maxFileSize,
         uploadsDir: flags.uploads,
         tempDirName,
         process: this.config,
@@ -173,23 +165,12 @@ function runSyncWithInterval(
     logger.info(`Resume syncing....`)
 
     try {
-      await performSync(
-        workerId,
-        syncWorkersNumber,
-        queryNodeUrl,
-        uploadsDirectory
-      )
+      await performSync(workerId, syncWorkersNumber, queryNodeUrl, uploadsDirectory)
     } catch (err) {
       logger.error(`Critical sync error: ${err}`)
     }
 
-    runSyncWithInterval(
-      workerId,
-      queryNodeUrl,
-      uploadsDirectory,
-      syncWorkersNumber,
-      syncIntervalMinutes
-    )
+    runSyncWithInterval(workerId, queryNodeUrl, uploadsDirectory, syncWorkersNumber, syncIntervalMinutes)
   }, 0)
 }
 
@@ -201,10 +182,7 @@ function runSyncWithInterval(
  * @param tempDirName - temporary directory name within the uploading directory
  * @returns void promise.
  */
-async function removeTempDirectory(
-  uploadsDir: string,
-  tempDirName: string
-): Promise<void> {
+async function removeTempDirectory(uploadsDir: string, tempDirName: string): Promise<void> {
   try {
     logger.info(`Removing temp directory ...`)
     const tempFileUploadingDir = path.join(uploadsDir, tempDirName)
@@ -225,23 +203,14 @@ async function removeTempDirectory(
  * @param account - Joystream account KeyringPair
  * @returns void promise.
  */
-async function verifyWorkerId(
-  api: ApiPromise,
-  workerId: number,
-  account: KeyringPair
-): Promise<void> {
+async function verifyWorkerId(api: ApiPromise, workerId: number, account: KeyringPair): Promise<void> {
   // Cast Codec type to Worker type
-  const workerObj = (await api.query.storageWorkingGroup.workerById(
-    workerId
-  )) as unknown
+  const workerObj = (await api.query.storageWorkingGroup.workerById(workerId)) as unknown
   const worker = workerObj as Worker
 
   if (worker.role_account_id.toString() !== account.address) {
-    throw new CLIError(
-      `Provided worker ID doesn't match the Joystream account.`,
-      {
-        exit: ExitCodes.InvalidWorkerId,
-      }
-    )
+    throw new CLIError(`Provided worker ID doesn't match the Joystream account.`, {
+      exit: ExitCodes.InvalidWorkerId,
+    })
   }
 }

+ 4 - 15
storage-node-v2/src/services/helpers/bagTypes.ts

@@ -28,9 +28,7 @@ export class BagIdValidationError extends CLIError {
  * @param bagType - dynamic bag type string
  * @returns The DynamicBagType instance.
  */
-export function parseDynamicBagType(
-  bagType: DynamicBagTypeKey
-): DynamicBagType {
+export function parseDynamicBagType(bagType: DynamicBagTypeKey): DynamicBagType {
   return createJoystreamType('DynamicBagType', bagType)
 }
 
@@ -108,10 +106,7 @@ class BagIdParser {
 
         for (const group of groups) {
           if (group.toLowerCase() === actualGroup) {
-            const workingGroup: WorkingGroup = createJoystreamType(
-              'WorkingGroup',
-              group
-            )
+            const workingGroup: WorkingGroup = createJoystreamType('WorkingGroup', group)
             const staticBagId: Static = createJoystreamType('Static', {
               'WorkingGroup': workingGroup,
             })
@@ -147,10 +142,7 @@ class BagIdParser {
             const dynamic = {} as Record<DynamicBagTypeKey, number>
             dynamic[dynamicBagType as DynamicBagTypeKey] = parsedId
 
-            const dynamicBagId: Dynamic = createJoystreamType(
-              'Dynamic',
-              dynamic
-            )
+            const dynamicBagId: Dynamic = createJoystreamType('Dynamic', dynamic)
             const constructedBagId: BagId = createJoystreamType('BagId', {
               'Dynamic': dynamicBagId,
             })
@@ -168,9 +160,6 @@ class BagIdParser {
 /**
  * Creates Joystream type using type registry.
  */
-function createJoystreamType<T extends keyof InterfaceTypes>(
-  type: T,
-  value: unknown
-): InterfaceTypes[T] {
+function createJoystreamType<T extends keyof InterfaceTypes>(type: T, value: unknown): InterfaceTypes[T] {
   return createType(registry, type, value)
 }

+ 4 - 9
storage-node-v2/src/services/logger.ts

@@ -147,6 +147,7 @@ export function createStdConsoleLogger(): winston.Logger {
     transports,
   })
 }
+/**
  * Creates Winston logger with Elastic search.
  *
  * @returns Winston logger
@@ -158,17 +159,13 @@ function createElasticLogger(elasticSearchEndpoint: string): winston.Logger {
   // Formats
   loggerOptions.format = winston.format.combine(
     winston.format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss:ms' }),
-    winston.format.printf(
-      (info) => `${info.timestamp} ${info.level}: ${info.message}`
-    )
+    winston.format.printf((info) => `${info.timestamp} ${info.level}: ${info.message}`)
   )
 
   // Transports
   let transports: transport[] = []
   if (loggerOptions.transports !== undefined) {
-    transports = Array.isArray(loggerOptions.transports)
-      ? loggerOptions.transports
-      : [loggerOptions.transports]
+    transports = Array.isArray(loggerOptions.transports) ? loggerOptions.transports : [loggerOptions.transports]
   }
 
   const esTransport = createElasticTransport(elasticSearchEndpoint)
@@ -202,9 +199,7 @@ export function initElasticLogger(elasticSearchEndpoint: string): void {
  * @param elasticSearchEndpoint - elastic search engine endpoint.
  * @returns elastic search winston transport
  */
-function createElasticTransport(
-  elasticSearchEndpoint: string
-): winston.transport {
+function createElasticTransport(elasticSearchEndpoint: string): winston.transport {
   const esTransportOpts = {
     level: 'debug', // TODO: consider changing to warn
     clientOpts: { node: elasticSearchEndpoint, maxRetries: 5 },

+ 11 - 34
storage-node-v2/src/services/queryNode/api.ts

@@ -1,10 +1,4 @@
-import {
-  ApolloClient,
-  NormalizedCacheObject,
-  HttpLink,
-  InMemoryCache,
-  DocumentNode,
-} from '@apollo/client'
+import { ApolloClient, NormalizedCacheObject, HttpLink, InMemoryCache, DocumentNode } from '@apollo/client'
 import fetch from 'cross-fetch'
 import {
   GetStorageBucketDetails,
@@ -20,11 +14,7 @@ import {
   GetDataObjectDetailsQueryVariables,
   GetDataObjectDetails,
 } from './generated/queries'
-import {
-  Maybe,
-  StorageBucketWhereInput,
-  StorageBagWhereInput,
-} from './generated/schema'
+import { Maybe, StorageBucketWhereInput, StorageBagWhereInput } from './generated/schema'
 
 /**
  * Query node class helper. Incapsulates custom queries.
@@ -74,11 +64,7 @@ export class QueryNodeApi {
   protected async firstEntityQuery<
     QueryT extends { [k: string]: unknown[] },
     VariablesT extends Record<string, unknown>
-  >(
-    query: DocumentNode,
-    variables: VariablesT,
-    resultKey: keyof QueryT
-  ): Promise<QueryT[keyof QueryT][number] | null> {
+  >(query: DocumentNode, variables: VariablesT, resultKey: keyof QueryT): Promise<QueryT[keyof QueryT][number] | null> {
     const result = await this.apolloClient.query<QueryT, VariablesT>({
       query,
       variables,
@@ -100,11 +86,7 @@ export class QueryNodeApi {
   protected async multipleEntitiesQuery<
     QueryT extends { [k: string]: unknown[] },
     VariablesT extends Record<string, unknown>
-  >(
-    query: DocumentNode,
-    variables: VariablesT,
-    resultKey: keyof QueryT
-  ): Promise<QueryT[keyof QueryT] | null> {
+  >(query: DocumentNode, variables: VariablesT, resultKey: keyof QueryT): Promise<QueryT[keyof QueryT] | null> {
     const result = await this.apolloClient.query<QueryT, VariablesT>({
       query,
       variables,
@@ -122,10 +104,7 @@ export class QueryNodeApi {
    * @param offset - starting record of the page
    * @param limit - page size
    */
-  public async getStorageBucketDetails(
-    offset: number,
-    limit: number
-  ): Promise<Array<StorageBucketDetailsFragment>> {
+  public async getStorageBucketDetails(offset: number, limit: number): Promise<Array<StorageBucketDetailsFragment>> {
     const result = await this.multipleEntitiesQuery<
       GetStorageBucketDetailsQuery,
       GetStorageBucketDetailsQueryVariables
@@ -151,10 +130,11 @@ export class QueryNodeApi {
     limit: number
   ): Promise<Array<StorageBagDetailsFragment>> {
     const input: StorageBucketWhereInput = { id_in: bucketIds }
-    const result = await this.multipleEntitiesQuery<
-      GetStorageBagDetailsQuery,
-      GetStorageBagDetailsQueryVariables
-    >(GetStorageBagDetails, { offset, limit, bucketIds: input }, 'storageBags')
+    const result = await this.multipleEntitiesQuery<GetStorageBagDetailsQuery, GetStorageBagDetailsQueryVariables>(
+      GetStorageBagDetails,
+      { offset, limit, bucketIds: input },
+      'storageBags'
+    )
 
     if (result === null) {
       return []
@@ -176,10 +156,7 @@ export class QueryNodeApi {
     limit: number
   ): Promise<Array<DataObjectDetailsFragment>> {
     const input: StorageBagWhereInput = { id_in: bagIds }
-    const result = await this.multipleEntitiesQuery<
-      GetDataObjectDetailsQuery,
-      GetDataObjectDetailsQueryVariables
-    >(
+    const result = await this.multipleEntitiesQuery<GetDataObjectDetailsQuery, GetDataObjectDetailsQueryVariables>(
       GetDataObjectDetails,
       { offset, limit, bagIds: input },
       'storageDataObjects'

+ 4 - 20
storage-node-v2/src/services/queryNode/generated/queries.ts

@@ -85,32 +85,16 @@ export const GetStorageBucketDetails = gql`
   ${StorageBucketDetails}
 `
 export const GetStorageBagDetails = gql`
-  query getStorageBagDetails(
-    $bucketIds: StorageBucketWhereInput
-    $offset: Int
-    $limit: Int
-  ) {
-    storageBags(
-      offset: $offset
-      limit: $limit
-      where: { storedBy_some: $bucketIds }
-    ) {
+  query getStorageBagDetails($bucketIds: StorageBucketWhereInput, $offset: Int, $limit: Int) {
+    storageBags(offset: $offset, limit: $limit, where: { storedBy_some: $bucketIds }) {
       ...StorageBagDetails
     }
   }
   ${StorageBagDetails}
 `
 export const GetDataObjectDetails = gql`
-  query getDataObjectDetails(
-    $bagIds: StorageBagWhereInput
-    $offset: Int
-    $limit: Int
-  ) {
-    storageDataObjects(
-      offset: $offset
-      limit: $limit
-      where: { storageBag: $bagIds, isAccepted_eq: true }
-    ) {
+  query getDataObjectDetails($bagIds: StorageBagWhereInput, $offset: Int, $limit: Int) {
+    storageDataObjects(offset: $offset, limit: $limit, where: { storageBag: $bagIds, isAccepted_eq: true }) {
       ...DataObjectDetails
     }
   }

+ 2 - 4
storage-node-v2/src/services/queryNode/generated/schema.ts

@@ -2,10 +2,8 @@ export type Maybe<T> = T | null
 export type Exact<T extends { [key: string]: unknown }> = {
   [K in keyof T]: T[K]
 }
-export type MakeOptional<T, K extends keyof T> = Omit<T, K> &
-  { [SubKey in K]?: Maybe<T[SubKey]> }
-export type MakeMaybe<T, K extends keyof T> = Omit<T, K> &
-  { [SubKey in K]: Maybe<T[SubKey]> }
+export type MakeOptional<T, K extends keyof T> = Omit<T, K> & { [SubKey in K]?: Maybe<T[SubKey]> }
+export type MakeMaybe<T, K extends keyof T> = Omit<T, K> & { [SubKey in K]: Maybe<T[SubKey]> }
 /** All built-in and custom scalars, mapped to their actual values */
 export type Scalars = {
   ID: string

+ 4 - 20
storage-node-v2/src/services/queryNode/queries/queries.graphql

@@ -24,16 +24,8 @@ fragment StorageBagDetails on StorageBag {
   }
 }
 
-query getStorageBagDetails(
-  $bucketIds: StorageBucketWhereInput
-  $offset: Int
-  $limit: Int
-) {
-  storageBags(
-    offset: $offset
-    limit: $limit
-    where: { storedBy_some: $bucketIds }
-  ) {
+query getStorageBagDetails($bucketIds: StorageBucketWhereInput, $offset: Int, $limit: Int) {
+  storageBags(offset: $offset, limit: $limit, where: { storedBy_some: $bucketIds }) {
     ...StorageBagDetails
   }
 }
@@ -45,16 +37,8 @@ fragment DataObjectDetails on StorageDataObject {
   }
 }
 
-query getDataObjectDetails(
-  $bagIds: StorageBagWhereInput
-  $offset: Int
-  $limit: Int
-) {
-  storageDataObjects(
-    offset: $offset
-    limit: $limit
-    where: { storageBag: $bagIds, isAccepted_eq: true }
-  ) {
+query getDataObjectDetails($bagIds: StorageBagWhereInput, $offset: Int, $limit: Int) {
+  storageDataObjects(offset: $offset, limit: $limit, where: { storageBag: $bagIds, isAccepted_eq: true }) {
     ...DataObjectDetails
   }
 }

+ 4 - 12
storage-node-v2/src/services/runtime/api.ts

@@ -6,10 +6,7 @@ import { TypeRegistry } from '@polkadot/types'
 import { KeyringPair } from '@polkadot/keyring/types'
 import { SubmittableExtrinsic, AugmentedEvent } from '@polkadot/api/types'
 import { DispatchError, DispatchResult } from '@polkadot/types/interfaces/system'
-import {
-  getTransactionNonce,
-  resetTransactionNonceCache,
-} from './transactionNonceKeeper'
+import { getTransactionNonce, resetTransactionNonceCache } from './transactionNonceKeeper'
 import logger from '../../services/logger'
 import ExitCodes from '../../command-base/ExitCodes'
 import { CLIError } from '@oclif/errors'
@@ -27,16 +24,12 @@ export class ExtrinsicFailedError extends CLIError {}
  */
 export async function createApi(apiUrl: string): Promise<ApiPromise> {
   const provider = new WsProvider(apiUrl)
-  provider.on('error', (err) =>
-    logger.error(`Api provider error: ${err.target?._url}`)
-  )
+  provider.on('error', (err) => logger.error(`Api provider error: ${err.target?._url}`))
 
   const api = new ApiPromise({ provider, types })
   await api.isReadyOrError
 
-  api.on('error', (err) =>
-    logger.error(`Api promise error: ${err.target?._url}`)
-  )
+  api.on('error', (err) => logger.error(`Api promise error: ${err.target?._url}`))
 
   return api
 }
@@ -174,9 +167,8 @@ export async function sendAndFollowNamedTx<T>(
       eventResult = eventParser(result)
     }
     logger.debug(`Extrinsic successful!`)
-  
-    return eventResult
 
+    return eventResult
   } catch (err) {
     await resetTransactionNonceCache()
     throw err

+ 1 - 4
storage-node-v2/src/services/runtime/transactionNonceKeeper.ts

@@ -26,10 +26,7 @@ const lock = new AwaitLock()
  * @returns promise with transaction nonce for a given account.
  *
  */
-export async function getTransactionNonce(
-  api: ApiPromise,
-  account: KeyringPair
-): Promise<Index> {
+export async function getTransactionNonce(api: ApiPromise, account: KeyringPair): Promise<Index> {
   await lock.acquireAsync()
   try {
     let nonce: Index | undefined = nonceCache.get(nonceEntryName)

+ 1 - 3
storage-node-v2/src/services/sync/remoteStorageData.ts

@@ -30,9 +30,7 @@ const badOperatorUrls = new NodeCache({
  *
  * @param operatorUrl - remote storage node URL
  */
-export async function getRemoteDataObjects(
-  operatorUrl: string
-): Promise<string[]> {
+export async function getRemoteDataObjects(operatorUrl: string): Promise<string[]> {
   const url = urljoin(operatorUrl, 'api/v1/state/data-objects')
 
   const faultyOperator = badOperatorUrls.has(operatorUrl)

+ 8 - 26
storage-node-v2/src/services/sync/storageObligations.ts

@@ -127,10 +127,7 @@ export async function getStorageObligationsFromRuntime(
  * @param workerId - worker ID
  * @returns storage bucket IDs
  */
-export async function getStorageBucketIdsByWorkerId(
-  queryNodeUrl: string,
-  workerId: number
-): Promise<string[]> {
+export async function getStorageBucketIdsByWorkerId(queryNodeUrl: string, workerId: number): Promise<string[]> {
   const api = new QueryNodeApi(queryNodeUrl)
   const allBuckets = await getAllBuckets(api)
 
@@ -148,10 +145,7 @@ export async function getStorageBucketIdsByWorkerId(
  * @param bagId - bag ID
  * @returns data object IDs
  */
-export async function getDataObjectIDsByBagId(
-  queryNodeUrl: string,
-  bagId: string
-): Promise<string[]> {
+export async function getDataObjectIDsByBagId(queryNodeUrl: string, bagId: string): Promise<string[]> {
   const api = new QueryNodeApi(queryNodeUrl)
   const dataObjects = await getAllAssignedDataObjects(api, [bagId])
 
@@ -164,9 +158,7 @@ export async function getDataObjectIDsByBagId(
  * @param api - initialiazed QueryNodeApi instance
  * @returns storage buckets data
  */
-async function getAllBuckets(
-  api: QueryNodeApi
-): Promise<StorageBucketDetailsFragment[]> {
+async function getAllBuckets(api: QueryNodeApi): Promise<StorageBucketDetailsFragment[]> {
   return await getAllObjectsWithPaging(
     'all storage buckets',
     async (offset, limit) => await api.getStorageBucketDetails(offset, limit)
@@ -180,14 +172,10 @@ async function getAllBuckets(
  * @param bagIds - assigned storage bags' IDs
  * @returns storage bag data
  */
-async function getAllAssignedDataObjects(
-  api: QueryNodeApi,
-  bagIds: string[]
-): Promise<DataObjectDetailsFragment[]> {
+async function getAllAssignedDataObjects(api: QueryNodeApi, bagIds: string[]): Promise<DataObjectDetailsFragment[]> {
   return await getAllObjectsWithPaging(
     'assigned data objects',
-    async (offset, limit) =>
-      await api.getDataObjectDetails(bagIds, offset, limit)
+    async (offset, limit) => await api.getDataObjectDetails(bagIds, offset, limit)
   )
 }
 
@@ -198,14 +186,10 @@ async function getAllAssignedDataObjects(
  * @param bucketIds - assigned storage provider buckets' IDs
  * @returns storage bag data
  */
-async function getAllAssignedBags(
-  api: QueryNodeApi,
-  bucketIds: string[]
-): Promise<StorageBagDetailsFragment[]> {
+async function getAllAssignedBags(api: QueryNodeApi, bucketIds: string[]): Promise<StorageBagDetailsFragment[]> {
   return await getAllObjectsWithPaging(
     'assigned bags',
-    async (offset, limit) =>
-      await api.getStorageBagsDetails(bucketIds, offset, limit)
+    async (offset, limit) => await api.getStorageBagsDetails(bucketIds, offset, limit)
   )
 }
 
@@ -227,9 +211,7 @@ async function getAllObjectsWithPaging<T>(
 
   let resultPart = []
   do {
-    logger.debug(
-      `Sync - getting ${objectName}: offset = ${offset}, limit = ${limit}`
-    )
+    logger.debug(`Sync - getting ${objectName}: offset = ${offset}, limit = ${limit}`)
     resultPart = await query(offset, limit)
     offset += limit
     result.push(...resultPart)

+ 8 - 35
storage-node-v2/src/services/sync/synchronizer.ts

@@ -1,14 +1,6 @@
-import {
-  getStorageObligationsFromRuntime,
-  DataObligations,
-} from './storageObligations'
+import { getStorageObligationsFromRuntime, DataObligations } from './storageObligations'
 import logger from '../../services/logger'
-import {
-  SyncTask,
-  DownloadFileTask,
-  DeleteLocalFileTask,
-  PrepareDownloadFileTask,
-} from './tasks'
+import { SyncTask, DownloadFileTask, DeleteLocalFileTask, PrepareDownloadFileTask } from './tasks'
 import { WorkingStack, TaskProcessorSpawner, TaskSink } from './workingProcess'
 import _ from 'lodash'
 import fs from 'fs'
@@ -50,28 +42,18 @@ export async function performSync(
   logger.debug(`Sync - deleted objects: ${deleted.length}`)
 
   const workingStack = new WorkingStack()
-  const deletedTasks = deleted.map(
-    (fileName) => new DeleteLocalFileTask(uploadDirectory, fileName)
-  )
+  const deletedTasks = deleted.map((fileName) => new DeleteLocalFileTask(uploadDirectory, fileName))
 
   let addedTasks: SyncTask[]
   if (operatorUrl !== null) {
-    addedTasks = await getPrepareDownloadTasks(
-      model,
-      added,
-      uploadDirectory,
-      workingStack
-    )
+    addedTasks = await getPrepareDownloadTasks(model, added, uploadDirectory, workingStack)
   } else {
     addedTasks = await getDownloadTasks(operatorUrl, added, uploadDirectory)
   }
 
   logger.debug(`Sync - started processing...`)
 
-  const processSpawner = new TaskProcessorSpawner(
-    workingStack,
-    asyncWorkersNumber
-  )
+  const processSpawner = new TaskProcessorSpawner(workingStack, asyncWorkersNumber)
 
   await workingStack.add(addedTasks)
   await workingStack.add(deletedTasks)
@@ -138,12 +120,7 @@ async function getPrepareDownloadTasks(
       }
     }
 
-    return new PrepareDownloadFileTask(
-      operatorUrls,
-      cid,
-      uploadDirectory,
-      taskSink
-    )
+    return new PrepareDownloadFileTask(operatorUrls, cid, uploadDirectory, taskSink)
   })
 
   return tasks
@@ -161,9 +138,7 @@ async function getDownloadTasks(
   addedCids: string[],
   uploadDirectory: string
 ): Promise<DownloadFileTask[]> {
-  const addedTasks = addedCids.map(
-    (fileName) => new DownloadFileTask(operatorUrl, fileName, uploadDirectory)
-  )
+  const addedTasks = addedCids.map((fileName) => new DownloadFileTask(operatorUrl, fileName, uploadDirectory))
 
   return addedTasks
 }
@@ -173,9 +148,7 @@ async function getDownloadTasks(
  *
  * @param uploadDirectory - local directory to get file names from
  */
-export async function getLocalDataObjects(
-  uploadDirectory: string
-): Promise<string[]> {
+export async function getLocalDataObjects(uploadDirectory: string): Promise<string[]> {
   const localCids = await getLocalFileNames(uploadDirectory)
 
   return localCids

+ 5 - 20
storage-node-v2/src/services/sync/tasks.ts

@@ -74,9 +74,7 @@ export class DownloadFileTask implements SyncTask {
       const timeoutMs = 30 * 60 * 1000 // 30 min for large files (~ 10 GB)
       // Casting because of:
       // https://stackoverflow.com/questions/38478034/pipe-superagent-response-to-express-response
-      const request = superagent
-        .get(this.url)
-        .timeout(timeoutMs) as unknown as NodeJS.ReadableStream
+      const request = (superagent.get(this.url).timeout(timeoutMs) as unknown) as NodeJS.ReadableStream
 
       // We create tempfile first to mitigate partial downloads on app (or remote node) crash.
       // This partial downloads will be cleaned up during the next sync iteration.
@@ -106,12 +104,7 @@ export class PrepareDownloadFileTask implements SyncTask {
   taskSink: TaskSink
   uploadsDirectory: string
 
-  constructor(
-    operatorUrlCandidates: string[],
-    cid: string,
-    uploadsDirectory: string,
-    taskSink: TaskSink
-  ) {
+  constructor(operatorUrlCandidates: string[], cid: string, uploadsDirectory: string, taskSink: TaskSink) {
     this.cid = cid
     this.taskSink = taskSink
     this.operatorUrlCandidates = operatorUrlCandidates
@@ -126,9 +119,7 @@ export class PrepareDownloadFileTask implements SyncTask {
     // Create an array of operator URL indices to maintain a random URL choice
     // cannot use the original array because we shouldn't modify the original data.
     // And cloning it seems like a heavy operation.
-    const operatorUrlIndices: number[] = [
-      ...Array(this.operatorUrlCandidates.length).keys(),
-    ]
+    const operatorUrlIndices: number[] = [...Array(this.operatorUrlCandidates.length).keys()]
 
     while (!_.isEmpty(operatorUrlIndices)) {
       const randomUrlIndex = _.sample(operatorUrlIndices)
@@ -145,16 +136,10 @@ export class PrepareDownloadFileTask implements SyncTask {
 
       try {
         const chosenBaseUrl = randomUrl
-        const remoteOperatorCids: string[] = await getRemoteDataObjects(
-          chosenBaseUrl
-        )
+        const remoteOperatorCids: string[] = await getRemoteDataObjects(chosenBaseUrl)
 
         if (remoteOperatorCids.includes(this.cid)) {
-          const newTask = new DownloadFileTask(
-            chosenBaseUrl,
-            this.cid,
-            this.uploadsDirectory
-          )
+          const newTask = new DownloadFileTask(chosenBaseUrl, this.cid, this.uploadsDirectory)
 
           return this.taskSink.add([newTask])
         }

+ 10 - 18
storage-node-v2/src/services/webApi/app.ts

@@ -3,17 +3,12 @@ import path from 'path'
 import cors from 'cors'
 import { Express, NextFunction } from 'express-serve-static-core'
 import * as OpenApiValidator from 'express-openapi-validator'
-import {
-  HttpError,
-  OpenAPIV3,
-  ValidateSecurityOpts,
-} from 'express-openapi-validator/dist/framework/types'
+import { HttpError, OpenAPIV3, ValidateSecurityOpts } from 'express-openapi-validator/dist/framework/types'
 import { KeyringPair } from '@polkadot/keyring/types'
 import { ApiPromise } from '@polkadot/api'
 import { RequestData, verifyTokenSignature, parseUploadToken, UploadToken } from '../helpers/auth'
 import { checkRemoveNonce } from '../../services/helpers/tokenNonceKeeper'
 import { httpLogger, errorLogger } from '../../services/logger'
-import { getLocalDataObjects } from '../../services/sync/synchronizer'
 
 /**
  * Web application parameters.
@@ -62,9 +57,14 @@ export type AppConfig = {
   enableUploadingAuth: boolean
 
   /**
-   * Runtime API promise
+   * ElasticSearch logging endpoint URL
    */
   elasticSearchEndpoint?: string
+
+  /**
+   * Max file size for uploading limit.
+   */
+  maxFileSize: number
 }
 
 /**
@@ -112,14 +112,10 @@ export async function createApp(config: AppConfig): Promise<Express> {
           // For multipart forms, the max number of file fields (Default: Infinity)
           files: 1,
           // For multipart forms, the max file size (in bytes) (Default: Infinity)
-          fileSize: maxFileSize,
+          fileSize: config.maxFileSize,
         },
       },
-      validateSecurity: setupUploadingValidation(
-        config.enableUploadingAuth,
-        config.api,
-        config.account
-      ),
+      validateSecurity: setupUploadingValidation(config.enableUploadingAuth, config.api, config.account),
     })
   ) // Required signature.
 
@@ -197,11 +193,7 @@ type ValidateUploadFunction = (
 function validateUpload(api: ApiPromise, account: KeyringPair): ValidateUploadFunction {
   // We don't use these variables yet.
   /* eslint-disable @typescript-eslint/no-unused-vars */
-  return (
-    req: express.Request,
-    scopes: string[],
-    schema: OpenAPIV3.SecuritySchemeObject
-  ) => {
+  return (req: express.Request, scopes: string[], schema: OpenAPIV3.SecuritySchemeObject) => {
     const tokenString = req.headers['x-api-key'] as string
     const token = parseUploadToken(tokenString)
 

+ 25 - 2
storage-node-v2/src/services/webApi/controllers/common.ts

@@ -1,4 +1,27 @@
 import * as express from 'express'
+import { CLIError } from '@oclif/errors'
+
+/**
+ * Dedicated error for the web api requests.
+ */
+export class WebApiError extends CLIError {
+  httpStatusCode: number
+
+  constructor(err: string, httpStatusCode: number) {
+    super(err)
+
+    this.httpStatusCode = httpStatusCode
+  }
+}
+
+/**
+ * Dedicated server error for the web api requests.
+ */
+export class ServerError extends WebApiError {
+  constructor(err: string) {
+    super(err, 500)
+  }
+}
 
 /**
  * Returns a directory for file uploading from the response.
@@ -12,7 +35,7 @@ export function getUploadsDir(res: express.Response): string {
     return res.locals.uploadsDir
   }
 
-  throw new Error('No upload directory path loaded.')
+  throw new ServerError('No upload directory path loaded.')
 }
 
 /**
@@ -42,7 +65,7 @@ export function getWorkerId(res: express.Response): number {
     return res.locals.workerId
   }
 
-  throw new Error('No Joystream worker ID loaded.')
+  throw new ServerError('No Joystream worker ID loaded.')
 }
 
 /**

+ 30 - 159
storage-node-v2/src/services/webApi/controllers/publicApi.ts

@@ -8,12 +8,8 @@ import {
   verifyTokenSignature,
 } from '../../helpers/auth'
 import { hashFile } from '../../../services/helpers/hashing'
-import {
-  createNonce,
-  getTokenExpirationTime,
-} from '../../../services/helpers/tokenNonceKeeper'
+import { createNonce, getTokenExpirationTime } from '../../../services/helpers/tokenNonceKeeper'
 import { getFileInfo } from '../../../services/helpers/fileInfo'
-import { parseBagId } from '../../helpers/bagTypes'
 import { BagId } from '@joystream/types/storage'
 import logger from '../../../services/logger'
 import { KeyringPair } from '@polkadot/keyring/types'
@@ -24,33 +20,13 @@ import path from 'path'
 import send from 'send'
 import { CLIError } from '@oclif/errors'
 import { hexToString } from '@polkadot/util'
+import { parseBagId } from '../../helpers/bagTypes'
 import { timeout } from 'promise-timeout'
-import { getUploadsDir, getWorkerId, getQueryNodeUrl } from './common'
+import { getUploadsDir, getWorkerId, getQueryNodeUrl, WebApiError, ServerError } from './common'
 import { getStorageBucketIdsByWorkerId } from '../../../services/sync/storageObligations'
+import { Membership } from '@joystream/types/members'
 const fsPromises = fs.promises
 
-/**
- * Dedicated error for the web api requests.
- */
-export class WebApiError extends CLIError {
-  httpStatusCode: number
-
-  constructor(err: string, httpStatusCode: number) {
-    super(err)
-
-    this.httpStatusCode = httpStatusCode
-  }
-}
-
-/**
- * Dedicated server error for the web api requests.
- */
-export class ServerError extends WebApiError {
-  constructor(err: string) {
-    super(err, 500)
-  }
-}
-
 /**
  * A public endpoint: serves files by CID.
  */
@@ -125,9 +101,11 @@ export async function uploadFile(req: express.Request, res: express.Response): P
       hashFile(fileObj.path),
     ])
 
+    const api = getApi(res)
+    const bagId = parseBagId(uploadRequest.bagId)
     const accepted = await verifyDataObjectInfo(api, bagId, uploadRequest.dataObjectId, fileObj.size, hash)
 
-    // Prepare new file name\
+    // Prepare new file name
     const uploadsDir = getUploadsDir(res)
     const newPath = path.join(uploadsDir, hash)
 
@@ -135,7 +113,6 @@ export async function uploadFile(req: express.Request, res: express.Response): P
     await fsPromises.rename(fileObj.path, newPath)
     cleanupFileName = newPath
 
-    const workerId = getWorkerId(res)
     if (!accepted) {
       await acceptPendingDataObjects(api, bagId, getAccount(res), workerId, uploadRequest.storageBucketId, [
         uploadRequest.dataObjectId,
@@ -201,36 +178,6 @@ function getFileObject(req: express.Request): Express.Multer.File {
   throw new WebApiError('No file uploaded', 400)
 }
 
-/**
- * Returns worker ID from the response.
- *
- * @remarks
- * This is a helper function. It parses the response object for a variable and
- * throws an error on failure.
- */
-function getWorkerId(res: express.Response): number {
-  if (res.locals.workerId || res.locals.workerId === 0) {
-    return res.locals.workerId
-  }
-
-  throw new ServerError('No Joystream worker ID loaded.')
-}
-
-/**
- * Returns a directory for file uploading from the response.
- *
- * @remarks
- * This is a helper function. It parses the response object for a variable and
- * throws an error on failure.
- */
-function getUploadsDir(res: express.Response): string {
-  if (res.locals.uploadsDir) {
-    return res.locals.uploadsDir
-  }
-
-  throw new ServerError('No upload directory path loaded.')
-}
-
 /**
  * Returns a KeyPair instance from the response.
  *
@@ -308,15 +255,11 @@ async function validateTokenRequest(api: ApiPromise, tokenRequest: UploadTokenRe
     throw new WebApiError('Invalid upload token request signature.', 401)
   }
 
-  const membershipPromise = api.query.members.membershipById(
-    tokenRequest.data.memberId
-  )
+  const membershipPromise = api.query.members.membershipById(tokenRequest.data.memberId)
 
   const membership = (await timeout(membershipPromise, 5000)) as Membership
 
-  if (
-    membership.controller_account.toString() !== tokenRequest.data.accountId
-  ) {
+  if (membership.controller_account.toString() !== tokenRequest.data.accountId) {
     throw new Error(`Provided controller account and member id don't match.`)
   }
 }
@@ -402,6 +345,8 @@ function sendResponseWithError(res: express.Response, err: Error, errorType: str
 function isNofileError(err: Error): boolean {
   return err.toString().includes('ENOENT')
 }
+
+/**
  * Get the status code by error.
  *
  * @param err - error
@@ -426,66 +371,11 @@ function getHttpStatusCodeByError(err: Error): number {
 
   return 500
 }
-/**
- * A public endpoint: return all local data objects.
- */
-export async function getAllLocalDataObjects(
-  req: express.Request,
-  res: express.Response
-): Promise<void> {
-  try {
-    const uploadsDir = getUploadsDir(res)
-
-    const cids = await getLocalDataObjects(uploadsDir)
-
-    res.status(200).json(cids)
-  } catch (err) {
-    res.status(500).json({
-      type: 'all_data_objects',
-      message: err.toString(),
-    })
-  }
-}
-
-/**
- * A public endpoint: return local data objects for the bag.
- */
-export async function getLocalDataObjectsByBagId(
-  req: express.Request,
-  res: express.Response
-): Promise<void> {
-  try {
-    const uploadsDir = getUploadsDir(res)
-
-    const workerId = getWorkerId(res)
-    const queryNodeUrl = getQueryNodeUrl(res)
-    const bagId = getBagId(req)
-
-    // TODO: Introduce dedicated QueryNode method.
-    const [cids, obligations] = await Promise.all([
-      getLocalDataObjects(uploadsDir),
-      getStorageObligationsFromRuntime(queryNodeUrl, workerId)])
-
-    const requiredCids = obligations.dataObjects.filter((obj) => obj.bagId == bagId).map((obj) => obj.cid)
-
-    const localDataForBag = _.intersection(cids, requiredCids)
-
-    res.status(200).json(localDataForBag)
-  } catch (err) {
-    res.status(500).json({
-      type: 'data_objects_by_bag',
-      message: err.toString(),
-    })
-  }
-}
 
 /**
  * A public endpoint: return the server version.
  */
-export async function getVersion(
-  req: express.Request,
-  res: express.Response
-): Promise<void> {
+export async function getVersion(req: express.Request, res: express.Response): Promise<void> {
   try {
     const config = getCommandConfig(res)
 
@@ -509,7 +399,9 @@ export async function getVersion(
  * This is a helper function. It parses the response object for a variable and
  * throws an error on failure.
  */
-function getCommandConfig(res: express.Response): {
+function getCommandConfig(
+  res: express.Response
+): {
   version: string
   userAgent: string
 } {
@@ -520,37 +412,6 @@ function getCommandConfig(res: express.Response): {
   throw new Error('No upload directory path loaded.')
 }
 
-/**
- * Returns Bag ID from the request.
- *
- * @remarks
- * This is a helper function. It parses the request object for a variable and
- * throws an error on failure.
- */
- function getBagId(req: express.Request): string {
-  const bagId = req.params.bagId || ''
-  if (bagId.length > 0) {
-    return bagId
-  }
-
-  throw new Error('No bagId provided.')
-}
-
-/**
- * Returns the QueryNode URL from the starting parameters.
- *
- * @remarks
- * This is a helper function. It parses the response object for a variable and
- * throws an error on failure.
- */
- function getQueryNodeUrl(res: express.Response): string {
-  if (res.locals.queryNodeUrl) {
-    return res.locals.queryNodeUrl
-  }
-
-  throw new Error('No Query Node URL loaded.')
-}
-
 /**
  * Validates the storage bucket ID obligations for the worker (storage provider).
  * It throws an error when storage bucket doesn't belong to the worker.
@@ -560,14 +421,24 @@ function getCommandConfig(res: express.Response): {
  * @param bucketId - storage bucket ID
  * @returns void promise.
  */
-async function verifyBucketId(
-  queryNodeUrl: string,
-  workerId: number,
-  bucketId: number
-): Promise<void> {
+async function verifyBucketId(queryNodeUrl: string, workerId: number, bucketId: number): Promise<void> {
   const bucketIds = await getStorageBucketIdsByWorkerId(queryNodeUrl, workerId)
 
   if (!bucketIds.includes(bucketId.toString())) {
     throw new Error('Incorrect storage bucket ID.')
   }
 }
+
+/**
+ * Validates file size. It throws an error when file size exceeds the limit
+ *
+ * @param fileSize - runtime API promise
+ * @returns void promise.
+ */
+function verifyFileSize(fileSize: number) {
+  const MAX_FILE_SIZE = 1000000 // TODO: Get this const from the runtime
+
+  if (fileSize > MAX_FILE_SIZE) {
+    throw new WebApiError('Max file size exceeded.', 400)
+  }
+}

+ 12 - 34
storage-node-v2/src/services/webApi/controllers/stateApi.ts

@@ -2,11 +2,7 @@ import { getLocalDataObjects } from '../../../services/sync/synchronizer'
 import * as express from 'express'
 import _ from 'lodash'
 import { getDataObjectIDsByBagId } from '../../sync/storageObligations'
-import {
-  getUploadsDir,
-  getTempFileUploadingDir,
-  getQueryNodeUrl,
-} from './common'
+import { getUploadsDir, getTempFileUploadingDir, getQueryNodeUrl, WebApiError } from './common'
 import fastFolderSize from 'fast-folder-size'
 import { promisify } from 'util'
 import fs from 'fs'
@@ -26,10 +22,7 @@ const dataCache = new NodeCache({
 /**
  * A public endpoint: return all local data objects.
  */
-export async function getAllLocalDataObjects(
-  req: express.Request,
-  res: express.Response
-): Promise<void> {
+export async function getAllLocalDataObjects(req: express.Request, res: express.Response): Promise<void> {
   try {
     const uploadsDir = getUploadsDir(res)
 
@@ -49,10 +42,7 @@ export async function getAllLocalDataObjects(
  *
  *  @return total size and count of the data objects.
  */
-export async function getLocalDataStats(
-  req: express.Request,
-  res: express.Response
-): Promise<void> {
+export async function getLocalDataStats(req: express.Request, res: express.Response): Promise<void> {
   try {
     const uploadsDir = getUploadsDir(res)
     const tempFileDir = getTempFileUploadingDir(res)
@@ -75,10 +65,7 @@ export async function getLocalDataStats(
       const tempDirStatsPromise = fsPromises.readdir(tempFileDir)
       const tempDirSizePromise = fastFolderSizeAsync(tempFileDir)
 
-      const [tempDirStats, tempSize] = await Promise.all([
-        tempDirStatsPromise,
-        tempDirSizePromise,
-      ])
+      const [tempDirStats, tempSize] = await Promise.all([tempDirStatsPromise, tempDirSizePromise])
 
       tempDirSize = tempSize ?? 0
       tempDownloads = tempDirStats.length
@@ -101,10 +88,7 @@ export async function getLocalDataStats(
 /**
  * A public endpoint: return local data objects for the bag.
  */
-export async function getLocalDataObjectsByBagId(
-  req: express.Request,
-  res: express.Response
-): Promise<void> {
+export async function getLocalDataObjectsByBagId(req: express.Request, res: express.Response): Promise<void> {
   try {
     const uploadsDir = getUploadsDir(res)
 
@@ -130,10 +114,7 @@ export async function getLocalDataObjectsByBagId(
 /**
  * A public endpoint: return the server version.
  */
-export async function getVersion(
-  req: express.Request,
-  res: express.Response
-): Promise<void> {
+export async function getVersion(req: express.Request, res: express.Response): Promise<void> {
   try {
     const config = getCommandConfig(res)
 
@@ -157,7 +138,9 @@ export async function getVersion(
  * This is a helper function. It parses the response object for a variable and
  * throws an error on failure.
  */
-function getCommandConfig(res: express.Response): {
+function getCommandConfig(
+  res: express.Response
+): {
   version: string
   userAgent: string
 } {
@@ -181,7 +164,7 @@ function getBagId(req: express.Request): string {
     return bagId
   }
 
-  throw new Error('No bagId provided.')
+  throw new WebApiError('No bagId provided.', 400)
 }
 
 /**
@@ -189,9 +172,7 @@ function getBagId(req: express.Request): string {
  * obsolete until cache expiration.
  *
  */
-async function getCachedLocalDataObjects(
-  uploadsDir: string
-): Promise<string[]> {
+async function getCachedLocalDataObjects(uploadsDir: string): Promise<string[]> {
   const entryName = 'local_data_object'
 
   if (!dataCache.has(entryName)) {
@@ -207,10 +188,7 @@ async function getCachedLocalDataObjects(
  * obsolete until cache expiration.
  *
  */
-async function getCachedDataObjectsObligations(
-  queryNodeUrl: string,
-  bagId: string
-): Promise<string[]> {
+async function getCachedDataObjectsObligations(queryNodeUrl: string, bagId: string): Promise<string[]> {
   const entryName = 'data_object_obligations'
 
   if (!dataCache.has(entryName)) {

+ 5 - 5
types/augment-codec/augment-api-tx.ts

@@ -677,12 +677,12 @@ declare module '@polkadot/api/types/submittable' {
        * Create 'Begin review working group leader applications' proposal type.
        * This proposal uses `begin_applicant_review()` extrinsic from the Joystream `working group` module.
        **/
-      createBeginReviewWorkingGroupLeaderApplicationsProposal: AugmentedSubmittable<(memberId: MemberId | AnyNumber | Uint8Array, title: Bytes | string | Uint8Array, description: Bytes | string | Uint8Array, stakeBalance: Option<BalanceOf> | null | object | string | Uint8Array, openingId: OpeningId | AnyNumber | Uint8Array, workingGroup: WorkingGroup | 'Storage' | 'Content' | 'Operations' | 'Gateway' | number | Uint8Array) => SubmittableExtrinsic<ApiType>, [MemberId, Bytes, Bytes, Option<BalanceOf>, OpeningId, WorkingGroup]>;
+      createBeginReviewWorkingGroupLeaderApplicationsProposal: AugmentedSubmittable<(memberId: MemberId | AnyNumber | Uint8Array, title: Bytes | string | Uint8Array, description: Bytes | string | Uint8Array, stakeBalance: Option<BalanceOf> | null | object | string | Uint8Array, openingId: OpeningId | AnyNumber | Uint8Array, workingGroup: WorkingGroup | 'Reserved' | 'Forum' | 'Storage' | 'Content' | 'Operations' | 'Gateway' | number | Uint8Array) => SubmittableExtrinsic<ApiType>, [MemberId, Bytes, Bytes, Option<BalanceOf>, OpeningId, WorkingGroup]>;
       /**
        * Create 'decrease working group leader stake' proposal type.
        * This proposal uses `decrease_stake()` extrinsic from the `working-group`  module.
        **/
-      createDecreaseWorkingGroupLeaderStakeProposal: AugmentedSubmittable<(memberId: MemberId | AnyNumber | Uint8Array, title: Bytes | string | Uint8Array, description: Bytes | string | Uint8Array, stakeBalance: Option<BalanceOf> | null | object | string | Uint8Array, workerId: WorkerId | AnyNumber | Uint8Array, decreasingStake: BalanceOf | AnyNumber | Uint8Array, workingGroup: WorkingGroup | 'Storage' | 'Content' | 'Operations' | 'Gateway' | number | Uint8Array) => SubmittableExtrinsic<ApiType>, [MemberId, Bytes, Bytes, Option<BalanceOf>, WorkerId, BalanceOf, WorkingGroup]>;
+      createDecreaseWorkingGroupLeaderStakeProposal: AugmentedSubmittable<(memberId: MemberId | AnyNumber | Uint8Array, title: Bytes | string | Uint8Array, description: Bytes | string | Uint8Array, stakeBalance: Option<BalanceOf> | null | object | string | Uint8Array, workerId: WorkerId | AnyNumber | Uint8Array, decreasingStake: BalanceOf | AnyNumber | Uint8Array, workingGroup: WorkingGroup | 'Reserved' | 'Forum' | 'Storage' | 'Content' | 'Operations' | 'Gateway' | number | Uint8Array) => SubmittableExtrinsic<ApiType>, [MemberId, Bytes, Bytes, Option<BalanceOf>, WorkerId, BalanceOf, WorkingGroup]>;
       /**
        * Create 'Fill working group leader opening' proposal type.
        * This proposal uses `fill_opening()` extrinsic from the Joystream `working group` module.
@@ -707,17 +707,17 @@ declare module '@polkadot/api/types/submittable' {
        * Create 'set working group leader reward' proposal type.
        * This proposal uses `update_reward_amount()` extrinsic from the `working-group`  module.
        **/
-      createSetWorkingGroupLeaderRewardProposal: AugmentedSubmittable<(memberId: MemberId | AnyNumber | Uint8Array, title: Bytes | string | Uint8Array, description: Bytes | string | Uint8Array, stakeBalance: Option<BalanceOf> | null | object | string | Uint8Array, workerId: WorkerId | AnyNumber | Uint8Array, rewardAmount: BalanceOfMint | AnyNumber | Uint8Array, workingGroup: WorkingGroup | 'Storage' | 'Content' | 'Operations' | 'Gateway' | number | Uint8Array) => SubmittableExtrinsic<ApiType>, [MemberId, Bytes, Bytes, Option<BalanceOf>, WorkerId, BalanceOfMint, WorkingGroup]>;
+      createSetWorkingGroupLeaderRewardProposal: AugmentedSubmittable<(memberId: MemberId | AnyNumber | Uint8Array, title: Bytes | string | Uint8Array, description: Bytes | string | Uint8Array, stakeBalance: Option<BalanceOf> | null | object | string | Uint8Array, workerId: WorkerId | AnyNumber | Uint8Array, rewardAmount: BalanceOfMint | AnyNumber | Uint8Array, workingGroup: WorkingGroup | 'Reserved' | 'Forum' | 'Storage' | 'Content' | 'Operations' | 'Gateway' | number | Uint8Array) => SubmittableExtrinsic<ApiType>, [MemberId, Bytes, Bytes, Option<BalanceOf>, WorkerId, BalanceOfMint, WorkingGroup]>;
       /**
        * Create 'Set working group mint capacity' proposal type.
        * This proposal uses `set_mint_capacity()` extrinsic from the `working-group`  module.
        **/
-      createSetWorkingGroupMintCapacityProposal: AugmentedSubmittable<(memberId: MemberId | AnyNumber | Uint8Array, title: Bytes | string | Uint8Array, description: Bytes | string | Uint8Array, stakeBalance: Option<BalanceOf> | null | object | string | Uint8Array, mintBalance: BalanceOfMint | AnyNumber | Uint8Array, workingGroup: WorkingGroup | 'Storage' | 'Content' | 'Operations' | 'Gateway' | number | Uint8Array) => SubmittableExtrinsic<ApiType>, [MemberId, Bytes, Bytes, Option<BalanceOf>, BalanceOfMint, WorkingGroup]>;
+      createSetWorkingGroupMintCapacityProposal: AugmentedSubmittable<(memberId: MemberId | AnyNumber | Uint8Array, title: Bytes | string | Uint8Array, description: Bytes | string | Uint8Array, stakeBalance: Option<BalanceOf> | null | object | string | Uint8Array, mintBalance: BalanceOfMint | AnyNumber | Uint8Array, workingGroup: WorkingGroup | 'Reserved' | 'Forum' | 'Storage' | 'Content' | 'Operations' | 'Gateway' | number | Uint8Array) => SubmittableExtrinsic<ApiType>, [MemberId, Bytes, Bytes, Option<BalanceOf>, BalanceOfMint, WorkingGroup]>;
       /**
        * Create 'slash working group leader stake' proposal type.
        * This proposal uses `slash_stake()` extrinsic from the `working-group`  module.
        **/
-      createSlashWorkingGroupLeaderStakeProposal: AugmentedSubmittable<(memberId: MemberId | AnyNumber | Uint8Array, title: Bytes | string | Uint8Array, description: Bytes | string | Uint8Array, stakeBalance: Option<BalanceOf> | null | object | string | Uint8Array, workerId: WorkerId | AnyNumber | Uint8Array, slashingStake: BalanceOf | AnyNumber | Uint8Array, workingGroup: WorkingGroup | 'Storage' | 'Content' | 'Operations' | 'Gateway' | number | Uint8Array) => SubmittableExtrinsic<ApiType>, [MemberId, Bytes, Bytes, Option<BalanceOf>, WorkerId, BalanceOf, WorkingGroup]>;
+      createSlashWorkingGroupLeaderStakeProposal: AugmentedSubmittable<(memberId: MemberId | AnyNumber | Uint8Array, title: Bytes | string | Uint8Array, description: Bytes | string | Uint8Array, stakeBalance: Option<BalanceOf> | null | object | string | Uint8Array, workerId: WorkerId | AnyNumber | Uint8Array, slashingStake: BalanceOf | AnyNumber | Uint8Array, workingGroup: WorkingGroup | 'Reserved' | 'Forum' | 'Storage' | 'Content' | 'Operations' | 'Gateway' | number | Uint8Array) => SubmittableExtrinsic<ApiType>, [MemberId, Bytes, Bytes, Option<BalanceOf>, WorkerId, BalanceOf, WorkingGroup]>;
       /**
        * Create 'Spending' proposal type.
        * This proposal uses `spend_from_council_mint()` extrinsic from the `governance::council`  module.

+ 5 - 5
types/augment/augment-api-tx.ts

@@ -677,12 +677,12 @@ declare module '@polkadot/api/types/submittable' {
        * Create 'Begin review working group leader applications' proposal type.
        * This proposal uses `begin_applicant_review()` extrinsic from the Joystream `working group` module.
        **/
-      createBeginReviewWorkingGroupLeaderApplicationsProposal: AugmentedSubmittable<(memberId: MemberId | AnyNumber | Uint8Array, title: Bytes | string | Uint8Array, description: Bytes | string | Uint8Array, stakeBalance: Option<BalanceOf> | null | object | string | Uint8Array, openingId: OpeningId | AnyNumber | Uint8Array, workingGroup: WorkingGroup | 'Storage' | 'Content' | 'Operations' | 'Gateway' | number | Uint8Array) => SubmittableExtrinsic<ApiType>, [MemberId, Bytes, Bytes, Option<BalanceOf>, OpeningId, WorkingGroup]>;
+      createBeginReviewWorkingGroupLeaderApplicationsProposal: AugmentedSubmittable<(memberId: MemberId | AnyNumber | Uint8Array, title: Bytes | string | Uint8Array, description: Bytes | string | Uint8Array, stakeBalance: Option<BalanceOf> | null | object | string | Uint8Array, openingId: OpeningId | AnyNumber | Uint8Array, workingGroup: WorkingGroup | 'Reserved' | 'Forum' | 'Storage' | 'Content' | 'Operations' | 'Gateway' | number | Uint8Array) => SubmittableExtrinsic<ApiType>, [MemberId, Bytes, Bytes, Option<BalanceOf>, OpeningId, WorkingGroup]>;
       /**
        * Create 'decrease working group leader stake' proposal type.
        * This proposal uses `decrease_stake()` extrinsic from the `working-group`  module.
        **/
-      createDecreaseWorkingGroupLeaderStakeProposal: AugmentedSubmittable<(memberId: MemberId | AnyNumber | Uint8Array, title: Bytes | string | Uint8Array, description: Bytes | string | Uint8Array, stakeBalance: Option<BalanceOf> | null | object | string | Uint8Array, workerId: WorkerId | AnyNumber | Uint8Array, decreasingStake: BalanceOf | AnyNumber | Uint8Array, workingGroup: WorkingGroup | 'Storage' | 'Content' | 'Operations' | 'Gateway' | number | Uint8Array) => SubmittableExtrinsic<ApiType>, [MemberId, Bytes, Bytes, Option<BalanceOf>, WorkerId, BalanceOf, WorkingGroup]>;
+      createDecreaseWorkingGroupLeaderStakeProposal: AugmentedSubmittable<(memberId: MemberId | AnyNumber | Uint8Array, title: Bytes | string | Uint8Array, description: Bytes | string | Uint8Array, stakeBalance: Option<BalanceOf> | null | object | string | Uint8Array, workerId: WorkerId | AnyNumber | Uint8Array, decreasingStake: BalanceOf | AnyNumber | Uint8Array, workingGroup: WorkingGroup | 'Reserved' | 'Forum' | 'Storage' | 'Content' | 'Operations' | 'Gateway' | number | Uint8Array) => SubmittableExtrinsic<ApiType>, [MemberId, Bytes, Bytes, Option<BalanceOf>, WorkerId, BalanceOf, WorkingGroup]>;
       /**
        * Create 'Fill working group leader opening' proposal type.
        * This proposal uses `fill_opening()` extrinsic from the Joystream `working group` module.
@@ -707,17 +707,17 @@ declare module '@polkadot/api/types/submittable' {
        * Create 'set working group leader reward' proposal type.
        * This proposal uses `update_reward_amount()` extrinsic from the `working-group`  module.
        **/
-      createSetWorkingGroupLeaderRewardProposal: AugmentedSubmittable<(memberId: MemberId | AnyNumber | Uint8Array, title: Bytes | string | Uint8Array, description: Bytes | string | Uint8Array, stakeBalance: Option<BalanceOf> | null | object | string | Uint8Array, workerId: WorkerId | AnyNumber | Uint8Array, rewardAmount: BalanceOfMint | AnyNumber | Uint8Array, workingGroup: WorkingGroup | 'Storage' | 'Content' | 'Operations' | 'Gateway' | number | Uint8Array) => SubmittableExtrinsic<ApiType>, [MemberId, Bytes, Bytes, Option<BalanceOf>, WorkerId, BalanceOfMint, WorkingGroup]>;
+      createSetWorkingGroupLeaderRewardProposal: AugmentedSubmittable<(memberId: MemberId | AnyNumber | Uint8Array, title: Bytes | string | Uint8Array, description: Bytes | string | Uint8Array, stakeBalance: Option<BalanceOf> | null | object | string | Uint8Array, workerId: WorkerId | AnyNumber | Uint8Array, rewardAmount: BalanceOfMint | AnyNumber | Uint8Array, workingGroup: WorkingGroup | 'Reserved' | 'Forum' | 'Storage' | 'Content' | 'Operations' | 'Gateway' | number | Uint8Array) => SubmittableExtrinsic<ApiType>, [MemberId, Bytes, Bytes, Option<BalanceOf>, WorkerId, BalanceOfMint, WorkingGroup]>;
       /**
        * Create 'Set working group mint capacity' proposal type.
        * This proposal uses `set_mint_capacity()` extrinsic from the `working-group`  module.
        **/
-      createSetWorkingGroupMintCapacityProposal: AugmentedSubmittable<(memberId: MemberId | AnyNumber | Uint8Array, title: Bytes | string | Uint8Array, description: Bytes | string | Uint8Array, stakeBalance: Option<BalanceOf> | null | object | string | Uint8Array, mintBalance: BalanceOfMint | AnyNumber | Uint8Array, workingGroup: WorkingGroup | 'Storage' | 'Content' | 'Operations' | 'Gateway' | number | Uint8Array) => SubmittableExtrinsic<ApiType>, [MemberId, Bytes, Bytes, Option<BalanceOf>, BalanceOfMint, WorkingGroup]>;
+      createSetWorkingGroupMintCapacityProposal: AugmentedSubmittable<(memberId: MemberId | AnyNumber | Uint8Array, title: Bytes | string | Uint8Array, description: Bytes | string | Uint8Array, stakeBalance: Option<BalanceOf> | null | object | string | Uint8Array, mintBalance: BalanceOfMint | AnyNumber | Uint8Array, workingGroup: WorkingGroup | 'Reserved' | 'Forum' | 'Storage' | 'Content' | 'Operations' | 'Gateway' | number | Uint8Array) => SubmittableExtrinsic<ApiType>, [MemberId, Bytes, Bytes, Option<BalanceOf>, BalanceOfMint, WorkingGroup]>;
       /**
        * Create 'slash working group leader stake' proposal type.
        * This proposal uses `slash_stake()` extrinsic from the `working-group`  module.
        **/
-      createSlashWorkingGroupLeaderStakeProposal: AugmentedSubmittable<(memberId: MemberId | AnyNumber | Uint8Array, title: Bytes | string | Uint8Array, description: Bytes | string | Uint8Array, stakeBalance: Option<BalanceOf> | null | object | string | Uint8Array, workerId: WorkerId | AnyNumber | Uint8Array, slashingStake: BalanceOf | AnyNumber | Uint8Array, workingGroup: WorkingGroup | 'Storage' | 'Content' | 'Operations' | 'Gateway' | number | Uint8Array) => SubmittableExtrinsic<ApiType>, [MemberId, Bytes, Bytes, Option<BalanceOf>, WorkerId, BalanceOf, WorkingGroup]>;
+      createSlashWorkingGroupLeaderStakeProposal: AugmentedSubmittable<(memberId: MemberId | AnyNumber | Uint8Array, title: Bytes | string | Uint8Array, description: Bytes | string | Uint8Array, stakeBalance: Option<BalanceOf> | null | object | string | Uint8Array, workerId: WorkerId | AnyNumber | Uint8Array, slashingStake: BalanceOf | AnyNumber | Uint8Array, workingGroup: WorkingGroup | 'Reserved' | 'Forum' | 'Storage' | 'Content' | 'Operations' | 'Gateway' | number | Uint8Array) => SubmittableExtrinsic<ApiType>, [MemberId, Bytes, Bytes, Option<BalanceOf>, WorkerId, BalanceOf, WorkingGroup]>;
       /**
        * Create 'Spending' proposal type.
        * This proposal uses `spend_from_council_mint()` extrinsic from the `governance::council`  module.

File diff suppressed because it is too large
+ 399 - 263
yarn.lock


Some files were not shown because too many files changed in this diff