Browse Source

Merge branch 'giza' into giza_distribution_bucket_changes

Shamil Gadelshin 3 years ago
parent
commit
9b9d7fc26c

+ 3 - 2
package.json

@@ -48,9 +48,10 @@
     "typescript": "^4.4.3",
     "bn.js": "4.12.0",
     "rxjs": "^7.4.0",
-    "typeorm": "^0.2.31",
+    "typeorm": "0.2.34",
     "pg": "^8.4.0",
-    "chalk": "^4.0.0"
+    "chalk": "^4.0.0",
+    "@joystream/warthog": "2.39.0"
   },
   "devDependencies": {
     "eslint": "^7.25.0",

+ 6 - 6
pioneer/packages/joy-proposals/src/Proposal/Body.tsx

@@ -16,7 +16,7 @@ import { formatBalance } from '@polkadot/util';
 import PromiseComponent from '@polkadot/joy-utils/react/components/PromiseComponent';
 import ReactMarkdown from 'react-markdown';
 import { StakingPolicy } from '@joystream/types/hiring';
-import { WorkingGroup, WorkingGroupKey } from '@joystream/types/common';
+import { WorkingGroup } from '@joystream/types/common';
 import { ApplicationsDetailsByOpening } from '@polkadot/joy-utils/react/components/working-groups/ApplicationDetails';
 import { LeadInfoFromId } from '@polkadot/joy-utils/react/components/working-groups/LeadInfo';
 import { formatReward } from '@polkadot/joy-utils/functions/format';
@@ -269,7 +269,7 @@ const paramParsers: { [k in ProposalType]: (params: SpecificProposalDetails<k>,
         : <ApplicationsDetailsByOpening
           openingId={openingId.toNumber()}
           acceptedIds={[succesfulApplicationId.toNumber()]}
-          group={workingGroup.type as WorkingGroupKey}/>,
+          group={workingGroup.type}/>,
       true
     )
   ],
@@ -280,7 +280,7 @@ const paramParsers: { [k in ProposalType]: (params: SpecificProposalDetails<k>,
       'Lead',
       historical
         ? `#${(leadId as WorkerId).toNumber()}`
-        : <LeadInfoFromId group={(group as WorkingGroup).type as WorkingGroupKey} leadId={(leadId as WorkerId).toNumber()}/>,
+        : <LeadInfoFromId group={(group as WorkingGroup).type} leadId={(leadId as WorkerId).toNumber()}/>,
       true
     )
   ],
@@ -291,7 +291,7 @@ const paramParsers: { [k in ProposalType]: (params: SpecificProposalDetails<k>,
       'Lead',
       historical
         ? `#${(leadId as WorkerId).toNumber()}`
-        : <LeadInfoFromId group={(group as WorkingGroup).type as WorkingGroupKey} leadId={(leadId as WorkerId).toNumber()}/>,
+        : <LeadInfoFromId group={(group as WorkingGroup).type} leadId={(leadId as WorkerId).toNumber()}/>,
       true
     )
   ],
@@ -302,7 +302,7 @@ const paramParsers: { [k in ProposalType]: (params: SpecificProposalDetails<k>,
       'Lead',
       historical
         ? `#${(leadId as WorkerId).toNumber()}`
-        : <LeadInfoFromId group={(group as WorkingGroup).type as WorkingGroupKey} leadId={(leadId as WorkerId).toNumber()}/>,
+        : <LeadInfoFromId group={(group as WorkingGroup).type} leadId={(leadId as WorkerId).toNumber()}/>,
       true
     )
   ],
@@ -321,7 +321,7 @@ const paramParsers: { [k in ProposalType]: (params: SpecificProposalDetails<k>,
         'Lead',
         historical
           ? `#${leadId.toNumber()}`
-          : <LeadInfoFromId group={workingGroup.type as WorkingGroupKey} leadId={leadId.toNumber()}/>,
+          : <LeadInfoFromId group={workingGroup.type} leadId={leadId.toNumber()}/>,
         true
       )
     ];

+ 3 - 0
query-node/build.sh

@@ -23,6 +23,9 @@ yarn ts-node --project ./mappings/tsconfig.json ./mappings/scripts/postCodegen.t
 # and are inline with root workspace resolutions
 yarn
 
+# Add missing typeorm binary symlink
+ln -s ../../../../../node_modules/typeorm/cli.js ./generated/graphql-server/node_modules/.bin/typeorm
+
 yarn workspace query-node codegen
 yarn workspace query-node build
 

+ 5 - 2
query-node/codegen/package.json

@@ -4,8 +4,11 @@
   "description": "Hydra codegen tools for Joystream Query Node",
   "author": "",
   "license": "ISC",
+  "scripts": {
+    "postinstall": "cd .. && yarn workspace query-node-mappings postHydraCLIInstall"
+  },
   "dependencies": {
-    "@joystream/hydra-cli": "3.1.0-alpha.1",
-    "@joystream/hydra-typegen": "3.1.0-alpha.1"
+    "@joystream/hydra-cli": "3.1.0-alpha.13",
+    "@joystream/hydra-typegen": "3.1.0-alpha.13"
   }
 }

File diff suppressed because it is too large
+ 618 - 110
query-node/codegen/yarn.lock


+ 4 - 4
query-node/mappings/bootstrap-data/types.ts

@@ -38,14 +38,14 @@ export type VideoCategoryJson = {
   id: string
   name: string
   createdInBlock: number
-  createdAt: Date
-  updatedAt: Date
+  createdAt: string
+  updatedAt: string
 }
 
 export type ChannelCategoryJson = {
   id: string
   name: string
   createdInBlock: number
-  createdAt: Date
-  updatedAt: Date
+  createdAt: string
+  updatedAt: string
 }

+ 1 - 1
query-node/mappings/common.ts

@@ -205,7 +205,7 @@ type EntityClass<T extends BaseModel> = {
   name: string
 }
 
-type RelationsArr<T extends BaseModel> = Exclude<
+export type RelationsArr<T extends BaseModel> = Exclude<
   keyof T & string,
   { [K in keyof T]: T[K] extends BaseModel | undefined ? '' : T[K] extends BaseModel[] | undefined ? '' : K }[keyof T]
 >[]

+ 5 - 3
query-node/mappings/package.json

@@ -11,6 +11,8 @@
     "lint": "eslint . --quiet --ext .ts",
     "checks": "prettier ./ --check && yarn lint",
     "format": "prettier ./ --write ",
+    "postinstall": "yarn ts-node ./scripts/postInstall.ts",
+    "postHydraCLIInstall": "yarn ts-node ./scripts/postHydraCLIInstall.ts",
     "bootstrap-data:fetch:members": "yarn ts-node ./bootstrap-data/scripts/fetchMembersData.ts",
     "bootstrap-data:fetch:categories": "yarn ts-node ./bootstrap-data/scripts/fetchCategories.ts",
     "bootstrap-data:fetch:workingGroups": "yarn ts-node ./bootstrap-data/scripts/fetchWorkingGroupsData.ts",
@@ -18,12 +20,12 @@
   },
   "dependencies": {
     "@polkadot/types": "5.9.1",
-    "@joystream/hydra-common": "3.1.0-alpha.1",
-    "@joystream/hydra-db-utils": "3.1.0-alpha.1",
+    "@joystream/hydra-common": "3.1.0-alpha.13",
+    "@joystream/hydra-db-utils": "3.1.0-alpha.13",
     "@joystream/metadata-protobuf": "^1.0.0",
     "@joystream/sumer-types": "npm:@joystream/types@^0.16.0",
     "@joystream/types": "^0.17.0",
-    "@joystream/warthog": "2.35.0",
+    "@joystream/warthog": "2.39.0",
     "@apollo/client": "^3.2.5"
   },
   "devDependencies": {

+ 23 - 0
query-node/mappings/scripts/postHydraCLIInstall.ts

@@ -0,0 +1,23 @@
+// A script to be executed post hydra-cli install, that may include patches for Hydra CLI
+import path from 'path'
+import { replaceInFile } from './utils'
+
+// FIXME: Temporary fix for missing JOIN and HAVING conditions in search queries (Hydra)
+const searchServiceTemplatePath = path.resolve(
+  __dirname,
+  '../../codegen/node_modules/@joystream/hydra-cli/lib/src/templates/textsearch/service.ts.mst'
+)
+
+replaceInFile({
+  filePath: searchServiceTemplatePath,
+  regex: /queries = queries\.concat\(generateSqlQuery\(repositories\[index\]\.metadata\.tableName, WHERE\)\);/,
+  newContent:
+    'queries = queries.concat(generateSqlQuery(repositories[index].metadata.tableName, qb.createJoinExpression(), WHERE, qb.createHavingExpression()));',
+})
+
+replaceInFile({
+  filePath: searchServiceTemplatePath,
+  regex: /const generateSqlQuery =[\s\S]+\+ where;/,
+  newContent: `const generateSqlQuery = (table: string, joins: string, where: string, having: string) =>
+  \`SELECT '\${table}_' || "\${table}"."id" AS unique_id FROM "\${table}" \` + joins + ' ' + where + ' ' + having;`,
+})

+ 45 - 0
query-node/mappings/scripts/postInstall.ts

@@ -0,0 +1,45 @@
+// A script to be executed post query-node install, that may include workarounds in Hydra node_modules
+import path from 'path'
+import { replaceInFile } from './utils'
+
+// FIXME: Temporarly remove broken sanitizeNullCharacter call
+const subscribersJsPath = path.resolve(
+  __dirname,
+  '../../../node_modules/@joystream/hydra-processor/lib/db/subscribers.js'
+)
+replaceInFile({
+  filePath: subscribersJsPath,
+  regex: /sanitizeNullCharacter\(entity, field\);/g,
+  newContent: '//sanitizeNullCharacter(entity, field)',
+})
+
+// FIXME: Temporarly replace broken relations resolution in @joystream/warthog
+const dataLoaderJsPath = path.resolve(
+  __dirname,
+  '../../../node_modules/@joystream/warthog/dist/middleware/DataLoaderMiddleware.js'
+)
+replaceInFile({
+  filePath: dataLoaderJsPath,
+  regex: /return context\.connection\.relationIdLoader[\s\S]+return group\.related;\s+\}\);\s+\}\)/,
+  newContent: `return Promise.all(
+    entities.map(entity => context.connection.relationLoader.load(relation, entity))
+  ).then(function (results) {
+    return results.map(function (related) {
+      return (relation.isManyToOne || relation.isOneToOne) ? related[0] : related
+    })
+  })`,
+})
+
+// FIXME: Temporary fix for "table name x specified more than once"
+const baseServiceJsPath = path.resolve(__dirname, '../../../node_modules/@joystream/warthog/dist/core/BaseService.js')
+replaceInFile({
+  filePath: baseServiceJsPath,
+  regex: /function common\(parameters, localIdColumn, foreignTableName, foreignColumnMap, foreignColumnName\) \{[^}]+\}/,
+  newContent: `function common(parameters, localIdColumn, foreignTableName, foreignColumnMap, foreignColumnName) {
+    const uuid = require('uuid/v4')
+    const foreignTableAlias = uuid().replace('-', '')
+    var foreingIdColumn = "\\"" + foreignTableAlias + "\\".\\"" + foreignColumnMap[foreignColumnName] + "\\"";
+    parameters.topLevelQb.leftJoin(foreignTableName, foreignTableAlias, localIdColumn + " = " + foreingIdColumn);
+    addWhereCondition(parameters, foreignTableAlias, foreignColumnMap);
+  }`,
+})

+ 19 - 0
query-node/mappings/scripts/utils.ts

@@ -0,0 +1,19 @@
+import fs from 'fs'
+import { blake2AsHex } from '@polkadot/util-crypto'
+
+type ReplaceLinesInFileParams = {
+  filePath: string
+  regex: RegExp
+  newContent: string
+}
+
+export function replaceInFile({ filePath, regex, newContent }: ReplaceLinesInFileParams): void {
+  const paramsHash = blake2AsHex(filePath + '|' + regex.source + '|' + newContent)
+  const startMark = `/* BEGIN REPLACED CONTENT ${paramsHash} */`
+  const endMark = `/* END REPLACED CONTENT ${paramsHash} */`
+  const fileContent = fs.readFileSync(filePath).toString()
+  if (fileContent.includes(startMark)) {
+    return
+  }
+  fs.writeFileSync(filePath, fileContent.replace(regex, `${startMark}\n${newContent}\n${endMark}`))
+}

+ 49 - 71
query-node/mappings/storage/index.ts

@@ -18,12 +18,9 @@ import {
   StorageDataObject,
   StorageSystemParameters,
   GeoCoordinates,
-  StorageBagDistributionAssignment,
-  StorageBagStorageAssignment,
 } from 'query-node/dist/model'
 import BN from 'bn.js'
-import { getById } from '../common'
-import { In } from 'typeorm'
+import { getById, inconsistentState } from '../common'
 import {
   processDistributionBucketFamilyMetadata,
   processDistributionOperatorMetadata,
@@ -141,25 +138,15 @@ export async function storage_StorageBucketsUpdatedForBag({
   event,
   store,
 }: EventContext & StoreContext): Promise<void> {
-  const [bagId, addedBucketsIds, removedBucketsIds] = new Storage.StorageBucketsUpdatedForBagEvent(event).params
+  const [bagId, addedBucketsSet, removedBucketsSet] = new Storage.StorageBucketsUpdatedForBagEvent(event).params
   // Get or create bag
-  const storageBag = await getBag(store, bagId)
-  const assignmentsToRemove = await store.getMany(StorageBagStorageAssignment, {
-    where: {
-      storageBag,
-      storageBucket: { id: In(Array.from(removedBucketsIds).map((bucketId) => bucketId.toString())) },
-    },
-  })
-  const assignmentsToAdd = Array.from(addedBucketsIds).map(
-    (bucketId) =>
-      new StorageBagStorageAssignment({
-        id: `${storageBag.id}-${bucketId.toString()}`,
-        storageBag,
-        storageBucket: new StorageBucket({ id: bucketId.toString() }),
-      })
-  )
-  await Promise.all(assignmentsToRemove.map((a) => store.remove<StorageBagStorageAssignment>(a)))
-  await Promise.all(assignmentsToAdd.map((a) => store.save<StorageBagStorageAssignment>(a)))
+  const storageBag = await getBag(store, bagId, ['storageBuckets'])
+  const removedBucketsIds = Array.from(removedBucketsSet).map((id) => id.toString())
+  const addedBucketsIds = Array.from(addedBucketsSet).map((id) => id.toString())
+  storageBag.storageBuckets = (storageBag.storageBuckets || [])
+    .filter((bucket) => !removedBucketsIds.includes(bucket.id))
+    .concat(addedBucketsIds.map((id) => new StorageBucket({ id })))
+  await store.save<StorageBag>(storageBag)
 }
 
 export async function storage_VoucherChanged({ event, store }: EventContext & StoreContext): Promise<void> {
@@ -189,11 +176,21 @@ export async function storage_StorageBucketVoucherLimitsSet({
 export async function storage_StorageBucketDeleted({ event, store }: EventContext & StoreContext): Promise<void> {
   const [bucketId] = new Storage.StorageBucketDeletedEvent(event).params
   // TODO: Cascade remove on db level (would require changes in Hydra / comitting autogenerated files)
-  const assignments = await store.getMany(StorageBagStorageAssignment, {
-    where: { storageBucket: { id: bucketId.toString() } },
+  const storageBucket = await store.get(StorageBucket, {
+    where: { id: bucketId.toString() },
+    relations: ['bags', 'bags.storageBuckets'],
   })
-  await Promise.all(assignments.map((a) => store.remove<StorageBagStorageAssignment>(a)))
-  await store.remove<StorageBucket>(new StorageBucket({ id: bucketId.toString() }))
+  if (!storageBucket) {
+    inconsistentState(`Storage bucket by id ${bucketId.toString()} not found!`)
+  }
+  // Remove relations
+  await Promise.all(
+    (storageBucket.bags || []).map((bag) => {
+      bag.storageBuckets = (bag.storageBuckets || []).filter((bucket) => bucket.id !== bucketId.toString())
+      return store.save<StorageBag>(bag)
+    })
+  )
+  await store.remove<StorageBucket>(storageBucket)
 }
 
 // DYNAMIC BAGS
@@ -202,36 +199,17 @@ export async function storage_DynamicBagCreated({ event, store }: EventContext &
   const storageBag = new StorageBag({
     id: getDynamicBagId(bagId),
     owner: getDynamicBagOwner(bagId),
+    storageBuckets: Array.from(storageBucketIdsSet).map((id) => new StorageBucket({ id: id.toString() })),
+    distributionBuckets: Array.from(distributionBucketIdsSet).map(
+      (id) => new DistributionBucket({ id: id.toString() })
+    ),
   })
-  const storageAssignments = Array.from(storageBucketIdsSet).map(
-    (bucketId) =>
-      new StorageBagStorageAssignment({
-        id: `${storageBag.id}-${bucketId.toString()}`,
-        storageBag,
-        storageBucket: new StorageBucket({ id: bucketId.toString() }),
-      })
-  )
-  const distributionAssignments = Array.from(distributionBucketIdsSet).map(
-    (bucketId) =>
-      new StorageBagDistributionAssignment({
-        id: `${storageBag.id}-${bucketId.toString()}`,
-        storageBag,
-        distributionBucket: new DistributionBucket({ id: bucketId.toString() }),
-      })
-  )
   await store.save<StorageBag>(storageBag)
-  await Promise.all(storageAssignments.map((a) => store.save<StorageBagStorageAssignment>(a)))
-  await Promise.all(distributionAssignments.map((a) => store.save<StorageBagDistributionAssignment>(a)))
 }
 
 export async function storage_DynamicBagDeleted({ event, store }: EventContext & StoreContext): Promise<void> {
   const [, bagId] = new Storage.DynamicBagDeletedEvent(event).params
   const storageBag = await getDynamicBag(store, bagId, ['objects'])
-  // TODO: Cascade remove on db level (would require changes in Hydra / comitting autogenerated files)
-  const storageAssignments = await store.getMany(StorageBagStorageAssignment, { where: { storageBag } })
-  const distributionAssignments = await store.getMany(StorageBagDistributionAssignment, { where: { storageBag } })
-  await Promise.all(storageAssignments.map((a) => store.remove<StorageBagStorageAssignment>(a)))
-  await Promise.all(distributionAssignments.map((a) => store.remove<StorageBagDistributionAssignment>(a)))
   await store.remove<StorageBag>(storageBag)
 }
 
@@ -341,36 +319,36 @@ export async function storage_DistributionBucketStatusUpdated({
 export async function storage_DistributionBucketDeleted({ event, store }: EventContext & StoreContext): Promise<void> {
   const [, bucketId] = new Storage.DistributionBucketDeletedEvent(event).params
   // TODO: Cascade remove on db level (would require changes in Hydra / comitting autogenerated files)
-  const assignments = await store.getMany(StorageBagDistributionAssignment, {
-    where: { distributionBucket: { id: bucketId.toString() } },
+  const distributionBucket = await store.get(DistributionBucket, {
+    where: { id: bucketId.toString() },
+    relations: ['bags', 'bags.distributionBuckets'],
   })
-  await Promise.all(assignments.map((a) => store.remove<StorageBagDistributionAssignment>(a)))
-  await store.remove<DistributionBucket>(new DistributionBucket({ id: bucketId.toString() }))
+  if (!distributionBucket) {
+    inconsistentState(`Distribution bucket by id ${bucketId.toString()} not found!`)
+  }
+  // Remove relations
+  await Promise.all(
+    (distributionBucket.bags || []).map((bag) => {
+      bag.distributionBuckets = (bag.distributionBuckets || []).filter((bucket) => bucket.id !== bucketId.toString())
+      return store.save<StorageBag>(bag)
+    })
+  )
+  await store.remove<DistributionBucket>(distributionBucket)
 }
 
 export async function storage_DistributionBucketsUpdatedForBag({
   event,
   store,
 }: EventContext & StoreContext): Promise<void> {
-  const [bagId, , addedBucketsIds, removedBucketsIds] = new Storage.DistributionBucketsUpdatedForBagEvent(event).params
+  const [bagId, , addedBucketsSet, removedBucketsSet] = new Storage.DistributionBucketsUpdatedForBagEvent(event).params
   // Get or create bag
-  const storageBag = await getBag(store, bagId)
-  const assignmentsToRemove = await store.getMany(StorageBagDistributionAssignment, {
-    where: {
-      storageBag,
-      distributionBucket: { id: In(Array.from(removedBucketsIds).map((bucketId) => bucketId.toString())) },
-    },
-  })
-  const assignmentsToAdd = Array.from(addedBucketsIds).map(
-    (bucketId) =>
-      new StorageBagDistributionAssignment({
-        id: `${storageBag.id}-${bucketId.toString()}`,
-        storageBag,
-        distributionBucket: new DistributionBucket({ id: bucketId.toString() }),
-      })
-  )
-  await Promise.all(assignmentsToRemove.map((a) => store.remove<StorageBagDistributionAssignment>(a)))
-  await Promise.all(assignmentsToAdd.map((a) => store.save<StorageBagDistributionAssignment>(a)))
+  const storageBag = await getBag(store, bagId, ['distributionBuckets'])
+  const removedBucketsIds = Array.from(removedBucketsSet).map((id) => id.toString())
+  const addedBucketsIds = Array.from(addedBucketsSet).map((id) => id.toString())
+  storageBag.distributionBuckets = (storageBag.distributionBuckets || [])
+    .filter((bucket) => !removedBucketsIds.includes(bucket.id))
+    .concat(addedBucketsIds.map((id) => new DistributionBucket({ id })))
+  await store.save<StorageBag>(storageBag)
 }
 
 export async function storage_DistributionBucketModeUpdated({

+ 8 - 4
query-node/mappings/storage/utils.ts

@@ -17,7 +17,7 @@ import {
   DistributionBucketFamily,
 } from 'query-node/dist/model'
 import BN from 'bn.js'
-import { bytesToString, inconsistentState, getById } from '../common'
+import { bytesToString, inconsistentState, getById, RelationsArr } from '../common'
 import { In } from 'typeorm'
 import { unsetAssetRelations } from '../content/utils'
 
@@ -101,7 +101,7 @@ export function getBagId(bagId: BagId) {
 export async function getDynamicBag(
   store: DatabaseManager,
   bagId: DynamicBagId,
-  relations?: 'objects'[]
+  relations?: RelationsArr<StorageBag>
 ): Promise<StorageBag> {
   return getById(store, StorageBag, getDynamicBagId(bagId), relations)
 }
@@ -109,7 +109,7 @@ export async function getDynamicBag(
 export async function getStaticBag(
   store: DatabaseManager,
   bagId: StaticBagId,
-  relations?: 'objects'[]
+  relations?: RelationsArr<StorageBag>
 ): Promise<StorageBag> {
   const id = getStaticBagId(bagId)
   const bag = await store.get(StorageBag, { where: { id }, relations })
@@ -125,7 +125,11 @@ export async function getStaticBag(
   return bag
 }
 
-export async function getBag(store: DatabaseManager, bagId: BagId, relations?: 'objects'[]): Promise<StorageBag> {
+export async function getBag(
+  store: DatabaseManager,
+  bagId: BagId,
+  relations?: RelationsArr<StorageBag>
+): Promise<StorageBag> {
   return bagId.isStatic
     ? getStaticBag(store, bagId.asStatic, relations)
     : getDynamicBag(store, bagId.asDynamic, relations)

+ 1 - 1
query-node/package.json

@@ -41,7 +41,7 @@
     "tslib": "^2.0.0",
     "@types/bn.js": "^4.11.6",
     "bn.js": "^5.1.2",
-    "@joystream/hydra-processor": "3.1.0-alpha.1"
+    "@joystream/hydra-processor": "3.1.0-alpha.13"
   },
   "volta": {
 		"extends": "../package.json"

+ 8 - 38
query-node/schemas/storage.graphql

@@ -105,8 +105,8 @@ type StorageBucket @entity {
   "Whether the bucket is accepting any new storage bags"
   acceptingNewBags: Boolean!
 
-  "Assignments to store a bag"
-  bagAssignments: [StorageBagStorageAssignment!] @derivedFrom(field: "storageBucket")
+  "Storage bags assigned to the bucket"
+  bags: [StorageBag!] @derivedFrom(field: "storageBuckets")
 
   "Bucket's data object size limit in bytes"
   dataObjectsSizeLimit: BigInt!
@@ -151,46 +151,16 @@ type StorageBag @entity {
   "Data objects in the bag"
   objects: [StorageDataObject!] @derivedFrom(field: "storageBag")
 
-  "Assignments to a storage bucket"
-  storageAssignments: [StorageBagStorageAssignment!] @derivedFrom(field: "storageBag")
+  "Storage buckets assigned to the bag"
+  storageBuckets: [StorageBucket!]
 
-  "Assignments to a distribution bucket"
-  distirbutionAssignments: [StorageBagDistributionAssignment!] @derivedFrom(field: "storageBag")
+  "Distribution buckets assigned to the bag"
+  distributionBuckets: [DistributionBucket!]
 
   "Owner of the storage bag"
   owner: StorageBagOwner!
 }
 
-type StorageBagStorageAssignment @entity {
-  "{storageBagId-storageBucketId}"
-  id: ID!
-
-  "Storage bag to be stored"
-  storageBag: StorageBag!
-
-  "Storage bucket that should store the bag"
-  storageBucket: StorageBucket!
-
-  # Relationship filtering workaround
-  storageBagId: ID
-  storageBucketId: ID
-}
-
-type StorageBagDistributionAssignment @entity {
-  "{storageBagId-distributionBucketId}"
-  id: ID!
-
-  "Storage bag to be distributed"
-  storageBag: StorageBag!
-
-  "Distribution bucket that should distribute the bag"
-  distributionBucket: DistributionBucket!
-
-  # Relationship filtering workaround
-  storageBagId: ID
-  distributionBucketId: ID
-}
-
 type DataObjectTypeChannelAvatar @variant {
   "Related channel entity"
   channel: Channel!
@@ -318,8 +288,8 @@ type DistributionBucket @entity {
   "Whether the bucket is currently distributing content"
   distributing: Boolean!
 
-  "Assignments to distribute a bag"
-  bagAssignments: [StorageBagDistributionAssignment!] @derivedFrom(field: "distributionBucket")
+  "Storage bags assigned to the bucket"
+  bags: [StorageBag!] @derivedFrom(field: "distributionBuckets")
 }
 
 type DistributionBucketFamily @entity {

+ 7 - 11
runtime-modules/common/src/working_group.rs

@@ -11,28 +11,24 @@ use strum_macros::EnumIter;
 #[cfg_attr(feature = "std", derive(Serialize, Deserialize, EnumIter))]
 #[derive(Encode, Decode, Clone, PartialEq, Eq, Copy, Debug, PartialOrd, Ord)]
 pub enum WorkingGroup {
-    /* Reserved
-        /// Forum working group: working_group::Instance1.
-        Forum,
-    */
     /// Storage working group: working_group::Instance2.
-    Storage = 2isize,
+    Storage,
 
     /// Storage working group: working_group::Instance3.
-    Content = 3isize,
+    Content,
 
     /// Operations working group: working_group::Instance4.
-    OperationsAlpha = 4isize,
+    OperationsAlpha,
 
     /// Gateway working group: working_group::Instance5.
-    Gateway = 5isize,
+    Gateway,
 
     /// Distribution working group: working_group::Instance6.
-    Distribution = 6isize,
+    Distribution,
 
     /// Operations working group: working_group::Instance7.
-    OperationsBeta = 7isize,
+    OperationsBeta,
 
     /// Operations working group: working_group::Instance8.
-    OperationsGamma = 8isize,
+    OperationsGamma,
 }

+ 0 - 2
types/augment/all/defs.json

@@ -82,8 +82,6 @@
     },
     "WorkingGroup": {
         "_enum": [
-            "_Reserved0",
-            "_Reserved1",
             "Storage",
             "Content",
             "OperationsAlpha",

+ 0 - 2
types/augment/all/types.ts

@@ -1405,8 +1405,6 @@ export interface WorkerOf extends Struct {
 
 /** @name WorkingGroup */
 export interface WorkingGroup extends Enum {
-  readonly isReserved0: boolean;
-  readonly isReserved1: boolean;
   readonly isStorage: boolean;
   readonly isContent: boolean;
   readonly isOperationsAlpha: boolean;

+ 1 - 8
types/src/common.ts

@@ -70,10 +70,7 @@ export class InputValidationLengthConstraint
   }
 }
 
-// Reserved keys are not part of the exported definition const, since they are not intented to be used
 export const WorkingGroupDef = {
-  // _Reserved0
-  // _Reserved1
   Storage: Null,
   Content: Null,
   OperationsAlpha: Null,
@@ -83,11 +80,7 @@ export const WorkingGroupDef = {
   OperationsGamma: Null,
 } as const
 export type WorkingGroupKey = keyof typeof WorkingGroupDef
-export class WorkingGroup extends JoyEnum({
-  _Reserved0: Null,
-  _Reserved1: Null,
-  ...WorkingGroupDef,
-}) {}
+export class WorkingGroup extends JoyEnum(WorkingGroupDef) {}
 
 // Temporarly in "common", because used both by /working-group and /content-working-group:
 export type ISlashableTerms = {

File diff suppressed because it is too large
+ 477 - 48
yarn.lock


Some files were not shown because too many files changed in this diff