Browse Source

Merge branch 'giza-mappings-update-hydra' into distributor-node

Leszek Wiesner 3 years ago
parent
commit
e44396b941
35 changed files with 1604 additions and 386 deletions
  1. 2 1
      package.json
  2. 6 6
      pioneer/packages/joy-proposals/src/Proposal/Body.tsx
  3. 2 2
      query-node/codegen/package.json
  4. 618 110
      query-node/codegen/yarn.lock
  5. 1 1
      query-node/manifest.yml
  6. 0 0
      query-node/mappings/bootstrap-data/data/channelCategories.json
  7. 1 0
      query-node/mappings/bootstrap-data/data/members.json
  8. 0 0
      query-node/mappings/bootstrap-data/data/storageSystem.json
  9. 1 0
      query-node/mappings/bootstrap-data/data/videoCategories.json
  10. 8 0
      query-node/mappings/bootstrap-data/data/workingGroups.json
  11. 14 0
      query-node/mappings/bootstrap-data/index.ts
  12. 19 0
      query-node/mappings/bootstrap-data/scripts/api.ts
  13. 68 0
      query-node/mappings/bootstrap-data/scripts/fetchCategories.ts
  14. 51 0
      query-node/mappings/bootstrap-data/scripts/fetchMembersData.ts
  15. 64 0
      query-node/mappings/bootstrap-data/scripts/fetchWorkingGroupsData.ts
  16. 51 0
      query-node/mappings/bootstrap-data/types.ts
  17. 98 0
      query-node/mappings/bootstrap.ts
  18. 1 1
      query-node/mappings/common.ts
  19. 0 8
      query-node/mappings/genesis-data/index.ts
  20. 0 20
      query-node/mappings/genesis-data/types.ts
  21. 0 35
      query-node/mappings/genesis.ts
  22. 1 1
      query-node/mappings/index.ts
  23. 11 4
      query-node/mappings/package.json
  24. 35 0
      query-node/mappings/scripts/postInstall.ts
  25. 49 71
      query-node/mappings/storage/index.ts
  26. 8 4
      query-node/mappings/storage/utils.ts
  27. 2 1
      query-node/mappings/tsconfig.json
  28. 9 3
      query-node/mappings/workingGroup.ts
  29. 1 1
      query-node/package.json
  30. 8 38
      query-node/schemas/storage.graphql
  31. 7 11
      runtime-modules/common/src/working_group.rs
  32. 0 2
      types/augment/all/defs.json
  33. 0 2
      types/augment/all/types.ts
  34. 1 8
      types/src/common.ts
  35. 467 56
      yarn.lock

+ 2 - 1
package.json

@@ -51,7 +51,8 @@
     "rxjs": "^7.4.0",
     "typeorm": "0.2.34",
     "pg": "^8.4.0",
-    "chalk": "^4.0.0"
+    "chalk": "^4.0.0",
+    "@joystream/warthog": "2.39.0"
   },
   "devDependencies": {
     "eslint": "^7.25.0",

+ 6 - 6
pioneer/packages/joy-proposals/src/Proposal/Body.tsx

@@ -16,7 +16,7 @@ import { formatBalance } from '@polkadot/util';
 import PromiseComponent from '@polkadot/joy-utils/react/components/PromiseComponent';
 import ReactMarkdown from 'react-markdown';
 import { StakingPolicy } from '@joystream/types/hiring';
-import { WorkingGroup, WorkingGroupKey } from '@joystream/types/common';
+import { WorkingGroup } from '@joystream/types/common';
 import { ApplicationsDetailsByOpening } from '@polkadot/joy-utils/react/components/working-groups/ApplicationDetails';
 import { LeadInfoFromId } from '@polkadot/joy-utils/react/components/working-groups/LeadInfo';
 import { formatReward } from '@polkadot/joy-utils/functions/format';
@@ -269,7 +269,7 @@ const paramParsers: { [k in ProposalType]: (params: SpecificProposalDetails<k>,
         : <ApplicationsDetailsByOpening
           openingId={openingId.toNumber()}
           acceptedIds={[succesfulApplicationId.toNumber()]}
-          group={workingGroup.type as WorkingGroupKey}/>,
+          group={workingGroup.type}/>,
       true
     )
   ],
@@ -280,7 +280,7 @@ const paramParsers: { [k in ProposalType]: (params: SpecificProposalDetails<k>,
       'Lead',
       historical
         ? `#${(leadId as WorkerId).toNumber()}`
-        : <LeadInfoFromId group={(group as WorkingGroup).type as WorkingGroupKey} leadId={(leadId as WorkerId).toNumber()}/>,
+        : <LeadInfoFromId group={(group as WorkingGroup).type} leadId={(leadId as WorkerId).toNumber()}/>,
       true
     )
   ],
@@ -291,7 +291,7 @@ const paramParsers: { [k in ProposalType]: (params: SpecificProposalDetails<k>,
       'Lead',
       historical
         ? `#${(leadId as WorkerId).toNumber()}`
-        : <LeadInfoFromId group={(group as WorkingGroup).type as WorkingGroupKey} leadId={(leadId as WorkerId).toNumber()}/>,
+        : <LeadInfoFromId group={(group as WorkingGroup).type} leadId={(leadId as WorkerId).toNumber()}/>,
       true
     )
   ],
@@ -302,7 +302,7 @@ const paramParsers: { [k in ProposalType]: (params: SpecificProposalDetails<k>,
       'Lead',
       historical
         ? `#${(leadId as WorkerId).toNumber()}`
-        : <LeadInfoFromId group={(group as WorkingGroup).type as WorkingGroupKey} leadId={(leadId as WorkerId).toNumber()}/>,
+        : <LeadInfoFromId group={(group as WorkingGroup).type} leadId={(leadId as WorkerId).toNumber()}/>,
       true
     )
   ],
@@ -321,7 +321,7 @@ const paramParsers: { [k in ProposalType]: (params: SpecificProposalDetails<k>,
         'Lead',
         historical
           ? `#${leadId.toNumber()}`
-          : <LeadInfoFromId group={workingGroup.type as WorkingGroupKey} leadId={leadId.toNumber()}/>,
+          : <LeadInfoFromId group={workingGroup.type} leadId={leadId.toNumber()}/>,
         true
       )
     ];

+ 2 - 2
query-node/codegen/package.json

@@ -5,7 +5,7 @@
   "author": "",
   "license": "ISC",
   "dependencies": {
-    "@joystream/hydra-cli": "3.1.0-alpha.1",
-    "@joystream/hydra-typegen": "3.1.0-alpha.1"
+    "@joystream/hydra-cli": "3.1.0-alpha.13",
+    "@joystream/hydra-typegen": "3.1.0-alpha.13"
   }
 }

File diff suppressed because it is too large
+ 618 - 110
query-node/codegen/yarn.lock


+ 1 - 1
query-node/manifest.yml

@@ -284,7 +284,7 @@ mappings:
     #- extrinsic: Sudo.batchCall
     #  handler: handleSudoCall(DatabaseManager,SubstrateEvent)
   preBlockHooks:
-    - handler: loadGenesisData
+    - handler: bootstrapData
       filter:
         height: "[0,0]" # will be executed only at genesis
   postBlockHooks:

+ 0 - 0
query-node/mappings/genesis-data/members.json → query-node/mappings/bootstrap-data/data/channelCategories.json


+ 1 - 0
query-node/mappings/bootstrap-data/data/members.json

@@ -0,0 +1 @@
+[]

+ 0 - 0
query-node/mappings/genesis-data/storageSystem.json → query-node/mappings/bootstrap-data/data/storageSystem.json


+ 1 - 0
query-node/mappings/bootstrap-data/data/videoCategories.json

@@ -0,0 +1 @@
+[]

+ 8 - 0
query-node/mappings/bootstrap-data/data/workingGroups.json

@@ -0,0 +1,8 @@
+{
+  "STORAGE": {
+    "workers": []
+  },
+  "GATEWAY": {
+    "workers": []
+  }
+}

+ 14 - 0
query-node/mappings/bootstrap-data/index.ts

@@ -0,0 +1,14 @@
+import { MemberJson, StorageSystemJson, WorkingGroupsJson, VideoCategoryJson, ChannelCategoryJson } from './types'
+import storageSystemJson from './data/storageSystem.json'
+import membersJson from './data/members.json'
+import workingGroupsJson from './data/workingGroups.json'
+import channelCategoriesJson from './data/channelCategories.json'
+import videoCategoriesJson from './data/videoCategories.json'
+
+const storageSystemData: StorageSystemJson = storageSystemJson
+const membersData: MemberJson[] = membersJson
+const workingGroupsData: WorkingGroupsJson = workingGroupsJson
+const channelCategoriesData: ChannelCategoryJson[] = channelCategoriesJson
+const videoCategoriesData: VideoCategoryJson[] = videoCategoriesJson
+
+export { storageSystemData, membersData, workingGroupsData, channelCategoriesData, videoCategoriesData }

+ 19 - 0
query-node/mappings/bootstrap-data/scripts/api.ts

@@ -0,0 +1,19 @@
+import { ApiPromise, WsProvider } from '@polkadot/api'
+import types from '@joystream/sumer-types/augment/all/defs.json'
+
+export default async function createApi(): Promise<ApiPromise> {
+  // Get URL to websocket endpoint from environment or connect to local node by default
+  const WS_URL = process.env.WS_PROVIDER_ENDPOINT_URI || 'ws://127.0.0.1:9944'
+
+  // explicitely state what RPC we are connecting to
+  console.error('Connecting to RPC at: ' + WS_URL)
+
+  // Initialise the provider
+  const provider = new WsProvider(WS_URL)
+
+  // Create the API and wait until ready
+  const api = await ApiPromise.create({ provider, types })
+  await api.isReadyOrError
+
+  return api
+}

+ 68 - 0
query-node/mappings/bootstrap-data/scripts/fetchCategories.ts

@@ -0,0 +1,68 @@
+import fs from 'fs'
+import path from 'path'
+import { ApolloClient, InMemoryCache, HttpLink, gql } from '@apollo/client'
+import fetch from 'cross-fetch'
+
+type categoryType = {
+  id: string
+  name: string
+  createdInBlock: number
+  createdAt: Date
+  updatedAt: Date
+}
+
+async function main() {
+  const env = process.env
+  const queryNodeUrl: string = env.QUERY_NODE_URL || 'http://127.0.0.1:8081/graphql'
+
+  console.log(`Connecting to Query Node at: ${queryNodeUrl}`)
+  const queryNodeProvider = new ApolloClient({
+    link: new HttpLink({ uri: queryNodeUrl, fetch }),
+    cache: new InMemoryCache(),
+  })
+
+  const videoCategories = await getCategories(queryNodeProvider, 'videoCategories')
+
+  const channelCategories = await getCategories(queryNodeProvider, 'channelCategories')
+
+  fs.writeFileSync(
+    path.resolve(__dirname, '../data/videoCategories.json'),
+    JSON.stringify(videoCategories, undefined, 4)
+  )
+  fs.writeFileSync(
+    path.resolve(__dirname, '../data/channelCategories.json'),
+    JSON.stringify(channelCategories, undefined, 4)
+  )
+
+  console.log(`${videoCategories.length} video categories exported & saved!`)
+  console.log(`${channelCategories.length} channel categories exported & saved!`)
+}
+
+async function getCategories(queryNodeProvider, categoryType): Promise<Array<categoryType>> {
+  const GET_ALL_CATEGORY_ITEMS = gql`
+    query {
+      ${categoryType} {
+        id
+        name
+        createdInBlock
+        createdAt
+        updatedAt
+      }
+    }
+  `
+  const queryResult = await queryNodeProvider.query({ query: GET_ALL_CATEGORY_ITEMS })
+  const categories = queryResult.data[categoryType].map(({ id, name, createdInBlock, createdAt, updatedAt }) => {
+    return {
+      id,
+      name,
+      createdInBlock,
+      createdAt,
+      updatedAt,
+    }
+  })
+  return categories
+}
+
+main()
+  .then(() => process.exit())
+  .catch(console.error)

+ 51 - 0
query-node/mappings/bootstrap-data/scripts/fetchMembersData.ts

@@ -0,0 +1,51 @@
+import createApi from './api'
+import { ApiPromise } from '@polkadot/api'
+import { MemberId, Membership } from '@joystream/sumer-types/augment/all'
+import { BlockHash } from '@polkadot/types/interfaces'
+import { MemberJson } from '../types'
+import fs from 'fs'
+import path from 'path'
+
+async function main() {
+  const api = await createApi()
+  const blockNumner = parseInt(process.env.AT_BLOCK_NUMBER || '')
+  const hash = process.env.AT_BLOCK_NUMBER ? await api.rpc.chain.getBlockHash(blockNumner) : undefined
+
+  if (!hash) {
+    console.warn('No AT_BLOCK_NUMBER was specified! Exporting from current block...')
+  }
+
+  const members = await getAllMembers(api, hash)
+
+  fs.writeFileSync(path.resolve(__dirname, '../data/members.json'), JSON.stringify(members, undefined, 4))
+  const lastMemberId = Math.max(...members.map((m) => parseInt(m.memberId)))
+  console.log(`${members.length} members exported & saved! Last member id: ${lastMemberId}`)
+
+  await api.disconnect()
+}
+
+async function getAllMembers(api: ApiPromise, hash?: BlockHash): Promise<MemberJson[]> {
+  const memberStorageEntries = hash
+    ? await api.query.members.membershipById.entriesAt(hash)
+    : await api.query.members.membershipById.entries()
+  const memberEntries: [MemberId, Membership][] = memberStorageEntries.map(([{ args: [memberId] }, member]) => [
+    memberId,
+    member,
+  ])
+  const members: MemberJson[] = memberEntries.map(([memberId, member]) => ({
+    memberId: memberId.toString(),
+    rootAccount: member.root_account.toString(),
+    controllerAccount: member.controller_account.toString(),
+    handle: member.handle.toString(),
+    avatarUri: member.avatar_uri.toString(),
+    about: member.about.toString(),
+    registeredAtTime: member.registered_at_time.toNumber(),
+    registeredAtBlock: member.registered_at_block.toNumber(),
+  }))
+
+  return members
+}
+
+main()
+  .then(() => process.exit())
+  .catch(console.error)

+ 64 - 0
query-node/mappings/bootstrap-data/scripts/fetchWorkingGroupsData.ts

@@ -0,0 +1,64 @@
+import createApi from './api'
+import { BlockHash } from '@polkadot/types/interfaces'
+import { ApiPromise } from '@polkadot/api'
+import { WorkerJson, WorkingGroupJson, WorkingGroupsJson } from '../types'
+import fs from 'fs'
+import path from 'path'
+
+export enum WorkingGroups {
+  Storage = 'storageWorkingGroup',
+  Gateway = 'gatewayWorkingGroup',
+}
+
+// export flow
+async function main() {
+  // prepare api connection
+  const api = await createApi()
+
+  const blockNumner = parseInt(process.env.AT_BLOCK_NUMBER || '')
+  const hash = process.env.AT_BLOCK_NUMBER ? await api.rpc.chain.getBlockHash(blockNumner) : undefined
+  const now = new Date()
+
+  // get results for all relevant groups
+  const workingGroups: WorkingGroupsJson = {
+    STORAGE: await getWorkingGroupData(api, WorkingGroups.Storage, hash, now),
+    GATEWAY: await getWorkingGroupData(api, WorkingGroups.Gateway, hash, now),
+  }
+
+  // output results
+  fs.writeFileSync(path.resolve(__dirname, '../data/workingGroups.json'), JSON.stringify(workingGroups, undefined, 4))
+  console.log(`${workingGroups.GATEWAY?.workers.length || 0} GATEWAY workers exported & saved!`)
+  console.log(`${workingGroups.STORAGE?.workers.length || 0} STORAGE workers exported & saved!`)
+
+  // disconnect api
+  api.disconnect()
+}
+
+// retrieves all active workers in working group
+async function getWorkingGroupData(
+  api: ApiPromise,
+  group: WorkingGroups,
+  hash: BlockHash | undefined,
+  now: Date
+): Promise<WorkingGroupJson> {
+  // get working group entries
+  const entries = await (hash ? api.query[group].workerById.entriesAt(hash) : api.query[group].workerById.entries())
+
+  const workers: WorkerJson[] = []
+  entries.forEach(([storageKey]) => {
+    // prepare workerId
+    const workerId = storageKey.args[0]
+    // add record
+    workers.push({
+      workerId: workerId.toString(),
+      // set time of running this script as createdAt
+      createdAt: now.getTime(),
+    })
+  })
+
+  return { workers }
+}
+
+main()
+  .then(() => process.exit())
+  .catch(console.error)

+ 51 - 0
query-node/mappings/bootstrap-data/types.ts

@@ -0,0 +1,51 @@
+export type MemberJson = {
+  memberId: string
+  rootAccount: string
+  controllerAccount: string
+  handle: string
+  about?: string
+  avatarUri?: string
+  registeredAtTime: number
+  registeredAtBlock: number
+}
+
+export type StorageSystemJson = {
+  id: string
+  blacklist: string[]
+  storageBucketsPerBagLimit: number
+  distributionBucketsPerBagLimit: number
+  uploadingBlocked: boolean
+  dataObjectFeePerMb: number | string
+  storageBucketMaxObjectsCountLimit: number | string
+  storageBucketMaxObjectsSizeLimit: number | string
+}
+
+export type WorkerJson = {
+  workerId: string
+  metadata?: string
+  createdAt: number
+}
+
+export type WorkingGroupJson = {
+  workers: WorkerJson[]
+}
+
+export type WorkingGroupsJson = {
+  [group in 'GATEWAY' | 'STORAGE']?: WorkingGroupJson
+}
+
+export type VideoCategoryJson = {
+  id: string
+  name: string
+  createdInBlock: number
+  createdAt: string
+  updatedAt: string
+}
+
+export type ChannelCategoryJson = {
+  id: string
+  name: string
+  createdInBlock: number
+  createdAt: string
+  updatedAt: string
+}

+ 98 - 0
query-node/mappings/bootstrap.ts

@@ -0,0 +1,98 @@
+import { StoreContext } from '@joystream/hydra-common'
+import BN from 'bn.js'
+import {
+  Membership,
+  MembershipEntryMethod,
+  StorageSystemParameters,
+  Worker,
+  WorkerType,
+  ChannelCategory,
+  VideoCategory,
+} from 'query-node/dist/model'
+import { workerEntityId } from './workingGroup'
+import {
+  storageSystemData,
+  membersData,
+  workingGroupsData,
+  videoCategoriesData,
+  channelCategoriesData,
+} from './bootstrap-data'
+
+export async function bootstrapData({ store }: StoreContext): Promise<void> {
+  // Storage system
+  await store.save<StorageSystemParameters>(
+    new StorageSystemParameters({
+      ...storageSystemData,
+      storageBucketMaxObjectsCountLimit: new BN(storageSystemData.storageBucketMaxObjectsCountLimit),
+      storageBucketMaxObjectsSizeLimit: new BN(storageSystemData.storageBucketMaxObjectsSizeLimit),
+      dataObjectFeePerMb: new BN(storageSystemData.dataObjectFeePerMb),
+    })
+  )
+
+  // Members
+  const members = membersData.map(
+    (m) =>
+      new Membership({
+        // main data
+        id: m.memberId,
+        rootAccount: m.rootAccount,
+        controllerAccount: m.controllerAccount,
+        handle: m.handle,
+        about: m.about,
+        avatarUri: m.avatarUri,
+        createdInBlock: m.registeredAtBlock,
+        entry: m.registeredAtBlock === 1 ? MembershipEntryMethod.GENESIS : MembershipEntryMethod.PAID,
+        // fill in auto-generated fields
+        createdAt: new Date(m.registeredAtTime),
+        updatedAt: new Date(m.registeredAtTime),
+      })
+  )
+  await Promise.all(members.map((m) => store.save<Membership>(m)))
+
+  // Workers
+  let workers: Worker[] = []
+  ;(['GATEWAY', 'STORAGE'] as const).map((group) => {
+    const workersJson = workingGroupsData[group]?.workers || []
+    workers = workers.concat(
+      workersJson.map(
+        (w) =>
+          new Worker({
+            id: workerEntityId(WorkerType[group], w.workerId),
+            workerId: w.workerId,
+            isActive: true,
+            type: WorkerType[group],
+            createdAt: new Date(w.createdAt),
+            updatedAt: new Date(w.createdAt),
+            metadata: w.metadata,
+          })
+      )
+    )
+  })
+  await Promise.all(workers.map((w) => store.save<Worker>(w)))
+
+  const channelCategories = channelCategoriesData.map(
+    (m) =>
+      new ChannelCategory({
+        id: m.id,
+        name: m.name,
+        channels: [],
+        createdInBlock: m.createdInBlock,
+        createdAt: new Date(m.createdAt),
+        updatedAt: new Date(m.updatedAt),
+      })
+  )
+  await Promise.all(channelCategories.map((m) => store.save<ChannelCategory>(m)))
+
+  const videoCategories = videoCategoriesData.map(
+    (m) =>
+      new VideoCategory({
+        id: m.id,
+        name: m.name,
+        videos: [],
+        createdInBlock: m.createdInBlock,
+        createdAt: new Date(m.createdAt),
+        updatedAt: new Date(m.updatedAt),
+      })
+  )
+  await Promise.all(videoCategories.map((m) => store.save<VideoCategory>(m)))
+}

+ 1 - 1
query-node/mappings/common.ts

@@ -205,7 +205,7 @@ type EntityClass<T extends BaseModel> = {
   name: string
 }
 
-type RelationsArr<T extends BaseModel> = Exclude<
+export type RelationsArr<T extends BaseModel> = Exclude<
   keyof T & string,
   { [K in keyof T]: T[K] extends BaseModel | undefined ? '' : T[K] extends BaseModel[] | undefined ? '' : K }[keyof T]
 >[]

+ 0 - 8
query-node/mappings/genesis-data/index.ts

@@ -1,8 +0,0 @@
-import { MemberJson, StorageSystemJson } from './types'
-import storageSystemJson from './storageSystem.json'
-import membersJson from './members.json'
-
-const storageSystem: StorageSystemJson = storageSystemJson
-const members: MemberJson[] = membersJson
-
-export { storageSystem, members }

+ 0 - 20
query-node/mappings/genesis-data/types.ts

@@ -1,20 +0,0 @@
-export type MemberJson = {
-  member_id: string
-  root_account: string
-  controller_account: string
-  handle: string
-  about?: string
-  avatar_uri?: string
-  registered_at_time: number
-}
-
-export type StorageSystemJson = {
-  id: string
-  blacklist: string[]
-  storageBucketsPerBagLimit: number
-  distributionBucketsPerBagLimit: number
-  uploadingBlocked: boolean
-  dataObjectFeePerMb: number | string
-  storageBucketMaxObjectsCountLimit: number | string
-  storageBucketMaxObjectsSizeLimit: number | string
-}

+ 0 - 35
query-node/mappings/genesis.ts

@@ -1,35 +0,0 @@
-import { StoreContext } from '@joystream/hydra-common'
-import BN from 'bn.js'
-import { Membership, MembershipEntryMethod, StorageSystemParameters } from 'query-node/dist/model'
-import { storageSystem, members } from './genesis-data'
-
-export async function loadGenesisData({ store }: StoreContext): Promise<void> {
-  // Storage system
-  await store.save<StorageSystemParameters>(
-    new StorageSystemParameters({
-      ...storageSystem,
-      storageBucketMaxObjectsCountLimit: new BN(storageSystem.storageBucketMaxObjectsCountLimit),
-      storageBucketMaxObjectsSizeLimit: new BN(storageSystem.storageBucketMaxObjectsSizeLimit),
-      dataObjectFeePerMb: new BN(storageSystem.dataObjectFeePerMb),
-    })
-  )
-  // Members
-  for (const m of members) {
-    // create new membership
-    const member = new Membership({
-      // main data
-      id: m.member_id,
-      rootAccount: m.root_account,
-      controllerAccount: m.controller_account,
-      handle: m.handle,
-      about: m.about,
-      avatarUri: m.avatar_uri,
-      createdInBlock: 0,
-      entry: MembershipEntryMethod.GENESIS,
-      // fill in auto-generated fields
-      createdAt: new Date(m.registered_at_time),
-      updatedAt: new Date(m.registered_at_time),
-    })
-    await store.save<Membership>(member)
-  }
-}

+ 1 - 1
query-node/mappings/index.ts

@@ -2,4 +2,4 @@ export * from './membership'
 export * from './workingGroup'
 export * from './content'
 export * from './storage'
-export * from './genesis'
+export * from './bootstrap'

+ 11 - 4
query-node/mappings/package.json

@@ -10,15 +10,22 @@
     "clean": "rm -rf lib",
     "lint": "eslint . --quiet --ext .ts",
     "checks": "prettier ./ --check && yarn lint",
-    "format": "prettier ./ --write "
+    "format": "prettier ./ --write ",
+    "postinstall": "yarn ts-node ./scripts/postInstall.ts",
+    "bootstrap-data:fetch:members": "yarn ts-node ./bootstrap-data/scripts/fetchMembersData.ts",
+    "bootstrap-data:fetch:categories": "yarn ts-node ./bootstrap-data/scripts/fetchCategories.ts",
+    "bootstrap-data:fetch:workingGroups": "yarn ts-node ./bootstrap-data/scripts/fetchWorkingGroupsData.ts",
+    "bootstrap-data:fetch": "yarn bootstrap-data:fetch:members && yarn bootstrap-data:fetch:workingGroups && yarn bootstrap-data:fetch:categories"
   },
   "dependencies": {
     "@polkadot/types": "5.9.1",
-    "@joystream/hydra-common": "3.1.0-alpha.1",
-    "@joystream/hydra-db-utils": "3.1.0-alpha.1",
+    "@joystream/hydra-common": "3.1.0-alpha.13",
+    "@joystream/hydra-db-utils": "3.1.0-alpha.13",
     "@joystream/metadata-protobuf": "^1.0.0",
+    "@joystream/sumer-types": "npm:@joystream/types@^0.16.0",
     "@joystream/types": "^0.17.0",
-    "@joystream/warthog": "2.35.0"
+    "@joystream/warthog": "2.39.0",
+    "@apollo/client": "^3.2.5"
   },
   "devDependencies": {
     "prettier": "^2.2.1",

+ 35 - 0
query-node/mappings/scripts/postInstall.ts

@@ -0,0 +1,35 @@
+// A script to be executed post query-node install, that may include workarounds in Hydra node_modules
+import fs from 'fs'
+import path from 'path'
+
+// FIXME: Temporarly remove broken sanitizeNullCharacter call
+const subscribersJsPath = path.resolve(
+  __dirname,
+  '../../../node_modules/@joystream/hydra-processor/lib/db/subscribers.js'
+)
+const subscribersJsContent = fs.readFileSync(subscribersJsPath).toString()
+fs.writeFileSync(
+  subscribersJsPath,
+  subscribersJsContent.replace(/sanitizeNullCharacter\(entity, field\);/g, '//sanitizeNullCharacter(entity, field)')
+)
+
+// FIXME: Temporarly replace broken relations resolution in @joystream/warthog
+const dataLoaderJsPath = path.resolve(
+  __dirname,
+  '../../../node_modules/@joystream/warthog/dist/middleware/DataLoaderMiddleware.js'
+)
+const dataLoaderJsContent = fs.readFileSync(dataLoaderJsPath).toString()
+const dataLoaderJsContentLines = dataLoaderJsContent.split('\n')
+dataLoaderJsContentLines.splice(
+  dataLoaderJsContentLines.findIndex((l) => l.match(/return context\.connection\.relationIdLoader/)),
+  0,
+  `return Promise.all(
+    entities.map(entity => context.connection.relationLoader.load(relation, entity))
+  ).then(function (results) {
+    return results.map(function (related) {
+      return (relation.isManyToOne || relation.isOneToOne) ? related[0] : related
+    })
+  })
+  `
+)
+fs.writeFileSync(dataLoaderJsPath, dataLoaderJsContentLines.join('\n'))

+ 49 - 71
query-node/mappings/storage/index.ts

@@ -18,12 +18,9 @@ import {
   StorageDataObject,
   StorageSystemParameters,
   GeoCoordinates,
-  StorageBagDistributionAssignment,
-  StorageBagStorageAssignment,
 } from 'query-node/dist/model'
 import BN from 'bn.js'
-import { getById } from '../common'
-import { In } from 'typeorm'
+import { getById, inconsistentState } from '../common'
 import {
   processDistributionBucketFamilyMetadata,
   processDistributionOperatorMetadata,
@@ -141,25 +138,15 @@ export async function storage_StorageBucketsUpdatedForBag({
   event,
   store,
 }: EventContext & StoreContext): Promise<void> {
-  const [bagId, addedBucketsIds, removedBucketsIds] = new Storage.StorageBucketsUpdatedForBagEvent(event).params
+  const [bagId, addedBucketsSet, removedBucketsSet] = new Storage.StorageBucketsUpdatedForBagEvent(event).params
   // Get or create bag
-  const storageBag = await getBag(store, bagId)
-  const assignmentsToRemove = await store.getMany(StorageBagStorageAssignment, {
-    where: {
-      storageBag,
-      storageBucket: { id: In(Array.from(removedBucketsIds).map((bucketId) => bucketId.toString())) },
-    },
-  })
-  const assignmentsToAdd = Array.from(addedBucketsIds).map(
-    (bucketId) =>
-      new StorageBagStorageAssignment({
-        id: `${storageBag.id}-${bucketId.toString()}`,
-        storageBag,
-        storageBucket: new StorageBucket({ id: bucketId.toString() }),
-      })
-  )
-  await Promise.all(assignmentsToRemove.map((a) => store.remove<StorageBagStorageAssignment>(a)))
-  await Promise.all(assignmentsToAdd.map((a) => store.save<StorageBagStorageAssignment>(a)))
+  const storageBag = await getBag(store, bagId, ['storageBuckets'])
+  const removedBucketsIds = Array.from(removedBucketsSet).map((id) => id.toString())
+  const addedBucketsIds = Array.from(addedBucketsSet).map((id) => id.toString())
+  storageBag.storageBuckets = (storageBag.storageBuckets || [])
+    .filter((bucket) => !removedBucketsIds.includes(bucket.id))
+    .concat(addedBucketsIds.map((id) => new StorageBucket({ id })))
+  await store.save<StorageBag>(storageBag)
 }
 
 export async function storage_VoucherChanged({ event, store }: EventContext & StoreContext): Promise<void> {
@@ -189,11 +176,21 @@ export async function storage_StorageBucketVoucherLimitsSet({
 export async function storage_StorageBucketDeleted({ event, store }: EventContext & StoreContext): Promise<void> {
   const [bucketId] = new Storage.StorageBucketDeletedEvent(event).params
   // TODO: Cascade remove on db level (would require changes in Hydra / comitting autogenerated files)
-  const assignments = await store.getMany(StorageBagStorageAssignment, {
-    where: { storageBucket: { id: bucketId.toString() } },
+  const storageBucket = await store.get(StorageBucket, {
+    where: { id: bucketId.toString() },
+    relations: ['bags', 'bags.storageBuckets'],
   })
-  await Promise.all(assignments.map((a) => store.remove<StorageBagStorageAssignment>(a)))
-  await store.remove<StorageBucket>(new StorageBucket({ id: bucketId.toString() }))
+  if (!storageBucket) {
+    inconsistentState(`Storage bucket by id ${bucketId.toString()} not found!`)
+  }
+  // Remove relations
+  await Promise.all(
+    (storageBucket.bags || []).map((bag) => {
+      bag.storageBuckets = (bag.storageBuckets || []).filter((bucket) => bucket.id !== bucketId.toString())
+      return store.save<StorageBag>(bag)
+    })
+  )
+  await store.remove<StorageBucket>(storageBucket)
 }
 
 // DYNAMIC BAGS
@@ -202,36 +199,17 @@ export async function storage_DynamicBagCreated({ event, store }: EventContext &
   const storageBag = new StorageBag({
     id: getDynamicBagId(bagId),
     owner: getDynamicBagOwner(bagId),
+    storageBuckets: Array.from(storageBucketIdsSet).map((id) => new StorageBucket({ id: id.toString() })),
+    distributionBuckets: Array.from(distributionBucketIdsSet).map(
+      (id) => new DistributionBucket({ id: id.toString() })
+    ),
   })
-  const storageAssignments = Array.from(storageBucketIdsSet).map(
-    (bucketId) =>
-      new StorageBagStorageAssignment({
-        id: `${storageBag.id}-${bucketId.toString()}`,
-        storageBag,
-        storageBucket: new StorageBucket({ id: bucketId.toString() }),
-      })
-  )
-  const distributionAssignments = Array.from(distributionBucketIdsSet).map(
-    (bucketId) =>
-      new StorageBagDistributionAssignment({
-        id: `${storageBag.id}-${bucketId.toString()}`,
-        storageBag,
-        distributionBucket: new DistributionBucket({ id: bucketId.toString() }),
-      })
-  )
   await store.save<StorageBag>(storageBag)
-  await Promise.all(storageAssignments.map((a) => store.save<StorageBagStorageAssignment>(a)))
-  await Promise.all(distributionAssignments.map((a) => store.save<StorageBagDistributionAssignment>(a)))
 }
 
 export async function storage_DynamicBagDeleted({ event, store }: EventContext & StoreContext): Promise<void> {
   const [, bagId] = new Storage.DynamicBagDeletedEvent(event).params
   const storageBag = await getDynamicBag(store, bagId, ['objects'])
-  // TODO: Cascade remove on db level (would require changes in Hydra / comitting autogenerated files)
-  const storageAssignments = await store.getMany(StorageBagStorageAssignment, { where: { storageBag } })
-  const distributionAssignments = await store.getMany(StorageBagDistributionAssignment, { where: { storageBag } })
-  await Promise.all(storageAssignments.map((a) => store.remove<StorageBagStorageAssignment>(a)))
-  await Promise.all(distributionAssignments.map((a) => store.remove<StorageBagDistributionAssignment>(a)))
   await store.remove<StorageBag>(storageBag)
 }
 
@@ -341,36 +319,36 @@ export async function storage_DistributionBucketStatusUpdated({
 export async function storage_DistributionBucketDeleted({ event, store }: EventContext & StoreContext): Promise<void> {
   const [, bucketId] = new Storage.DistributionBucketDeletedEvent(event).params
   // TODO: Cascade remove on db level (would require changes in Hydra / comitting autogenerated files)
-  const assignments = await store.getMany(StorageBagDistributionAssignment, {
-    where: { distributionBucket: { id: bucketId.toString() } },
+  const distributionBucket = await store.get(DistributionBucket, {
+    where: { id: bucketId.toString() },
+    relations: ['bags', 'bags.distributionBuckets'],
   })
-  await Promise.all(assignments.map((a) => store.remove<StorageBagDistributionAssignment>(a)))
-  await store.remove<DistributionBucket>(new DistributionBucket({ id: bucketId.toString() }))
+  if (!distributionBucket) {
+    inconsistentState(`Distribution bucket by id ${bucketId.toString()} not found!`)
+  }
+  // Remove relations
+  await Promise.all(
+    (distributionBucket.bags || []).map((bag) => {
+      bag.distributionBuckets = (bag.distributionBuckets || []).filter((bucket) => bucket.id !== bucketId.toString())
+      return store.save<StorageBag>(bag)
+    })
+  )
+  await store.remove<DistributionBucket>(distributionBucket)
 }
 
 export async function storage_DistributionBucketsUpdatedForBag({
   event,
   store,
 }: EventContext & StoreContext): Promise<void> {
-  const [bagId, , addedBucketsIds, removedBucketsIds] = new Storage.DistributionBucketsUpdatedForBagEvent(event).params
+  const [bagId, , addedBucketsSet, removedBucketsSet] = new Storage.DistributionBucketsUpdatedForBagEvent(event).params
   // Get or create bag
-  const storageBag = await getBag(store, bagId)
-  const assignmentsToRemove = await store.getMany(StorageBagDistributionAssignment, {
-    where: {
-      storageBag,
-      distributionBucket: { id: In(Array.from(removedBucketsIds).map((bucketId) => bucketId.toString())) },
-    },
-  })
-  const assignmentsToAdd = Array.from(addedBucketsIds).map(
-    (bucketId) =>
-      new StorageBagDistributionAssignment({
-        id: `${storageBag.id}-${bucketId.toString()}`,
-        storageBag,
-        distributionBucket: new DistributionBucket({ id: bucketId.toString() }),
-      })
-  )
-  await Promise.all(assignmentsToRemove.map((a) => store.remove<StorageBagDistributionAssignment>(a)))
-  await Promise.all(assignmentsToAdd.map((a) => store.save<StorageBagDistributionAssignment>(a)))
+  const storageBag = await getBag(store, bagId, ['distributionBuckets'])
+  const removedBucketsIds = Array.from(removedBucketsSet).map((id) => id.toString())
+  const addedBucketsIds = Array.from(addedBucketsSet).map((id) => id.toString())
+  storageBag.distributionBuckets = (storageBag.distributionBuckets || [])
+    .filter((bucket) => !removedBucketsIds.includes(bucket.id))
+    .concat(addedBucketsIds.map((id) => new DistributionBucket({ id })))
+  await store.save<StorageBag>(storageBag)
 }
 
 export async function storage_DistributionBucketModeUpdated({

+ 8 - 4
query-node/mappings/storage/utils.ts

@@ -17,7 +17,7 @@ import {
   DistributionBucketFamily,
 } from 'query-node/dist/model'
 import BN from 'bn.js'
-import { bytesToString, inconsistentState, getById } from '../common'
+import { bytesToString, inconsistentState, getById, RelationsArr } from '../common'
 import { In } from 'typeorm'
 import { unsetAssetRelations } from '../content/utils'
 
@@ -101,7 +101,7 @@ export function getBagId(bagId: BagId) {
 export async function getDynamicBag(
   store: DatabaseManager,
   bagId: DynamicBagId,
-  relations?: 'objects'[]
+  relations?: RelationsArr<StorageBag>
 ): Promise<StorageBag> {
   return getById(store, StorageBag, getDynamicBagId(bagId), relations)
 }
@@ -109,7 +109,7 @@ export async function getDynamicBag(
 export async function getStaticBag(
   store: DatabaseManager,
   bagId: StaticBagId,
-  relations?: 'objects'[]
+  relations?: RelationsArr<StorageBag>
 ): Promise<StorageBag> {
   const id = getStaticBagId(bagId)
   const bag = await store.get(StorageBag, { where: { id }, relations })
@@ -125,7 +125,11 @@ export async function getStaticBag(
   return bag
 }
 
-export async function getBag(store: DatabaseManager, bagId: BagId, relations?: 'objects'[]): Promise<StorageBag> {
+export async function getBag(
+  store: DatabaseManager,
+  bagId: BagId,
+  relations?: RelationsArr<StorageBag>
+): Promise<StorageBag> {
   return bagId.isStatic
     ? getStaticBag(store, bagId.asStatic, relations)
     : getDynamicBag(store, bagId.asDynamic, relations)

+ 2 - 1
query-node/mappings/tsconfig.json

@@ -15,7 +15,8 @@
     "resolveJsonModule": true,
     "baseUrl": ".",
     "paths": {
-      "@polkadot/types/augment": ["../../types/augment/augment-types.ts"]
+      "@polkadot/types/augment": ["../../types/augment/augment-types.ts"],
+      "@polkadot/api/augment": ["../../types/augment/augment-api.ts"]
     }
   },
   "include": ["./**/*"]

+ 9 - 3
query-node/mappings/workingGroup.ts

@@ -4,6 +4,10 @@ import { Worker, WorkerType } from 'query-node/dist/model'
 import { StorageWorkingGroup } from './generated/types'
 import { WorkerId } from '@joystream/types/augment'
 
+export function workerEntityId(type: WorkerType, workerId: string | WorkerId): string {
+  return `${type}-${workerId.toString()}`
+}
+
 export async function workingGroup_OpeningFilled({ event, store }: EventContext & StoreContext): Promise<void> {
   const workerType = getWorkerType(event)
   if (!workerType) {
@@ -94,11 +98,13 @@ export async function workingGroup_TerminatedLeader({ event, store }: EventConte
 /// ///////////////// Helpers ////////////////////////////////////////////////////
 
 function getWorkerType(event: SubstrateEvent): WorkerType | null {
-  if (event.section === 'storageWorkingGroup') {
+  // Note: event.section is not available!
+  const [eventSection] = event.name.split('.')
+  if (eventSection === 'storageWorkingGroup') {
     return WorkerType.STORAGE
   }
 
-  if (event.section === 'gatewayWorkingGroup') {
+  if (eventSection === 'gatewayWorkingGroup') {
     return WorkerType.GATEWAY
   }
 
@@ -113,7 +119,7 @@ async function createWorker(
 ): Promise<void> {
   // create entity
   const newWorker = new Worker({
-    id: `${workerType}-${workerId.toString()}`,
+    id: workerEntityId(workerType, workerId),
     workerId: workerId.toString(),
     type: workerType,
     isActive: true,

+ 1 - 1
query-node/package.json

@@ -41,7 +41,7 @@
     "tslib": "^2.0.0",
     "@types/bn.js": "^4.11.6",
     "bn.js": "^5.1.2",
-    "@joystream/hydra-processor": "3.1.0-alpha.1"
+    "@joystream/hydra-processor": "3.1.0-alpha.13"
   },
   "volta": {
 		"extends": "../package.json"

+ 8 - 38
query-node/schemas/storage.graphql

@@ -105,8 +105,8 @@ type StorageBucket @entity {
   "Whether the bucket is accepting any new storage bags"
   acceptingNewBags: Boolean!
 
-  "Assignments to store a bag"
-  bagAssignments: [StorageBagStorageAssignment!] @derivedFrom(field: "storageBucket")
+  "Storage bags assigned to the bucket"
+  bags: [StorageBag!] @derivedFrom(field: "storageBuckets")
 
   "Bucket's data object size limit in bytes"
   dataObjectsSizeLimit: BigInt!
@@ -151,46 +151,16 @@ type StorageBag @entity {
   "Data objects in the bag"
   objects: [StorageDataObject!] @derivedFrom(field: "storageBag")
 
-  "Assignments to a storage bucket"
-  storageAssignments: [StorageBagStorageAssignment!] @derivedFrom(field: "storageBag")
+  "Storage buckets assigned to the bag"
+  storageBuckets: [StorageBucket!]
 
-  "Assignments to a distribution bucket"
-  distirbutionAssignments: [StorageBagDistributionAssignment!] @derivedFrom(field: "storageBag")
+  "Distribution buckets assigned to the bag"
+  distributionBuckets: [DistributionBucket!]
 
   "Owner of the storage bag"
   owner: StorageBagOwner!
 }
 
-type StorageBagStorageAssignment @entity {
-  "{storageBagId-storageBucketId}"
-  id: ID!
-
-  "Storage bag to be stored"
-  storageBag: StorageBag!
-
-  "Storage bucket that should store the bag"
-  storageBucket: StorageBucket!
-
-  # Relationship filtering workaround
-  storageBagId: ID
-  storageBucketId: ID
-}
-
-type StorageBagDistributionAssignment @entity {
-  "{storageBagId-distributionBucketId}"
-  id: ID!
-
-  "Storage bag to be distributed"
-  storageBag: StorageBag!
-
-  "Distribution bucket that should distribute the bag"
-  distributionBucket: DistributionBucket!
-
-  # Relationship filtering workaround
-  storageBagId: ID
-  distributionBucketId: ID
-}
-
 type DataObjectTypeChannelAvatar @variant {
   "Related channel entity"
   channel: Channel!
@@ -318,8 +288,8 @@ type DistributionBucket @entity {
   "Whether the bucket is currently distributing content"
   distributing: Boolean!
 
-  "Assignments to distribute a bag"
-  bagAssignments: [StorageBagDistributionAssignment!] @derivedFrom(field: "distributionBucket")
+  "Storage bags assigned to the bucket"
+  bags: [StorageBag!] @derivedFrom(field: "distributionBuckets")
 }
 
 type DistributionBucketFamily @entity {

+ 7 - 11
runtime-modules/common/src/working_group.rs

@@ -11,28 +11,24 @@ use strum_macros::EnumIter;
 #[cfg_attr(feature = "std", derive(Serialize, Deserialize, EnumIter))]
 #[derive(Encode, Decode, Clone, PartialEq, Eq, Copy, Debug, PartialOrd, Ord)]
 pub enum WorkingGroup {
-    /* Reserved
-        /// Forum working group: working_group::Instance1.
-        Forum,
-    */
     /// Storage working group: working_group::Instance2.
-    Storage = 2isize,
+    Storage,
 
     /// Storage working group: working_group::Instance3.
-    Content = 3isize,
+    Content,
 
     /// Operations working group: working_group::Instance4.
-    OperationsAlpha = 4isize,
+    OperationsAlpha,
 
     /// Gateway working group: working_group::Instance5.
-    Gateway = 5isize,
+    Gateway,
 
     /// Distribution working group: working_group::Instance6.
-    Distribution = 6isize,
+    Distribution,
 
     /// Operations working group: working_group::Instance7.
-    OperationsBeta = 7isize,
+    OperationsBeta,
 
     /// Operations working group: working_group::Instance8.
-    OperationsGamma = 8isize,
+    OperationsGamma,
 }

+ 0 - 2
types/augment/all/defs.json

@@ -82,8 +82,6 @@
     },
     "WorkingGroup": {
         "_enum": [
-            "_Reserved0",
-            "_Reserved1",
             "Storage",
             "Content",
             "OperationsAlpha",

+ 0 - 2
types/augment/all/types.ts

@@ -1400,8 +1400,6 @@ export interface WorkerOf extends Struct {
 
 /** @name WorkingGroup */
 export interface WorkingGroup extends Enum {
-  readonly isReserved0: boolean;
-  readonly isReserved1: boolean;
   readonly isStorage: boolean;
   readonly isContent: boolean;
   readonly isOperationsAlpha: boolean;

+ 1 - 8
types/src/common.ts

@@ -70,10 +70,7 @@ export class InputValidationLengthConstraint
   }
 }
 
-// Reserved keys are not part of the exported definition const, since they are not intented to be used
 export const WorkingGroupDef = {
-  // _Reserved0
-  // _Reserved1
   Storage: Null,
   Content: Null,
   OperationsAlpha: Null,
@@ -83,11 +80,7 @@ export const WorkingGroupDef = {
   OperationsGamma: Null,
 } as const
 export type WorkingGroupKey = keyof typeof WorkingGroupDef
-export class WorkingGroup extends JoyEnum({
-  _Reserved0: Null,
-  _Reserved1: Null,
-  ...WorkingGroupDef,
-}) {}
+export class WorkingGroup extends JoyEnum(WorkingGroupDef) {}
 
 // Temporarly in "common", because used both by /working-group and /content-working-group:
 export type ISlashableTerms = {

File diff suppressed because it is too large
+ 467 - 56
yarn.lock


Some files were not shown because too many files changed in this diff