Browse Source

Merge branch 'storage_v2' into giza-protobuf-and-query-node

Leszek Wiesner 3 years ago
parent
commit
e4a282916b

+ 0 - 6
.github/workflows/query-node.yml

@@ -17,9 +17,6 @@ jobs:
     - name: checks
       run: |
         yarn install --frozen-lockfile
-        yarn workspace @joystream/types build
-        yarn workspace @joystream/metadata-protobuf build
-        ./query-node/build.sh
         yarn workspace query-node-mappings checks --quiet
 
   query_node_build_osx:
@@ -37,7 +34,4 @@ jobs:
     - name: checks
       run: |
         yarn install --frozen-lockfile --network-timeout 120000
-        yarn workspace @joystream/types build
-        yarn workspace @joystream/metadata-protobuf build
-        ./query-node/build.sh
         yarn workspace query-node-mappings checks --quiet

+ 2 - 2
metadata-protobuf/src/types.ts

@@ -10,8 +10,8 @@ export type AnyMetadataClass<T> = {
   decode(binary: Uint8Array): AnyMessage<T>
   encode(obj: T): { finish(): Uint8Array }
   toObject(obj: AnyMessage<T>, options?: IConversionOptions): Record<string, unknown>
-  verify(message: { [k: string]: any }): null | string
-  fromObject(object: { [k: string]: any }): AnyMessage<T>
+  verify(message: { [k: string]: unknown }): null | string
+  fromObject(object: { [k: string]: unknown }): AnyMessage<T>
 }
 
 export type DecodedMetadataObject<T> = {

+ 1 - 1
metadata-protobuf/src/utils.ts

@@ -6,7 +6,7 @@ export function isSet<T>(v: T | null | undefined): v is T {
   return v !== null && v !== undefined
 }
 
-export function isEmptyObject(object: Record<string, any>): boolean {
+export function isEmptyObject(object: Record<string, unknown>): boolean {
   return Object.keys(object).length === 0
 }
 

+ 3 - 6
query-node/manifest.yml

@@ -34,7 +34,6 @@ typegen:
     - storage.DynamicBagCreated
     - storage.VoucherChanged
     - storage.StorageBucketDeleted
-    - storage.NumberOfStorageBucketsInDynamicBagCreationPolicyUpdated
     - storage.DistributionBucketFamilyCreated
     - storage.DistributionBucketFamilyDeleted
     - storage.DistributionBucketCreated
@@ -43,13 +42,15 @@ typegen:
     - storage.DistributionBucketsUpdatedForBag
     - storage.DistributionBucketsPerBagLimitUpdated
     - storage.DistributionBucketModeUpdated
-    - storage.FamiliesInDynamicBagCreationPolicyUpdated
     - storage.DistributionBucketOperatorInvited
     - storage.DistributionBucketInvitationCancelled
     - storage.DistributionBucketInvitationAccepted
     - storage.DistributionBucketMetadataSet
     - storage.DistributionBucketOperatorRemoved
     - storage.DistributionBucketFamilyMetadataSet
+    # Not required:
+    # - storage.NumberOfStorageBucketsInDynamicBagCreationPolicyUpdated
+    # - storage.FamiliesInDynamicBagCreationPolicyUpdated
 
     # membership
     - members.MemberRegistered
@@ -246,8 +247,6 @@ mappings:
       handler: storage_VoucherChanged
     - event: storage.StorageBucketDeleted
       handler: storage_StorageBucketDeleted
-    - event: storage.NumberOfStorageBucketsInDynamicBagCreationPolicyUpdated
-      handler: storage_NumberOfStorageBucketsInDynamicBagCreationPolicyUpdated
     - event: storage.DistributionBucketFamilyCreated
       handler: storage_DistributionBucketFamilyCreated
     - event: storage.DistributionBucketFamilyDeleted
@@ -264,8 +263,6 @@ mappings:
       handler: storage_DistributionBucketsPerBagLimitUpdated
     - event: storage.DistributionBucketModeUpdated
       handler: storage_DistributionBucketModeUpdated
-    - event: storage.FamiliesInDynamicBagCreationPolicyUpdated
-      handler: storage_FamiliesInDynamicBagCreationPolicyUpdated
     - event: storage.DistributionBucketOperatorInvited
       handler: storage_DistributionBucketOperatorInvited
     - event: storage.DistributionBucketInvitationCancelled

+ 1 - 0
query-node/mappings/.eslintignore

@@ -1 +1,2 @@
 lib/
+generated

+ 1 - 0
query-node/mappings/.prettierignore

@@ -1 +1,2 @@
 lib/
+generated

+ 10 - 1
query-node/mappings/giza/genesis-data/storageSystem.json

@@ -1 +1,10 @@
-{ "blacklist": [] }
+{
+  "id": "1",
+  "blacklist": [],
+  "storageBucketsPerBagLimit": 0,
+  "distributionBucketsPerBagLimit": 0,
+  "uploadingBlocked": false,
+  "dataObjectFeePerMb": 0,
+  "storageBucketMaxObjectsCountLimit": 0,
+  "storageBucketMaxObjectsSizeLimit": 0
+}

+ 7 - 0
query-node/mappings/giza/genesis-data/types.ts

@@ -9,5 +9,12 @@ export type MemberJson = {
 }
 
 export type StorageSystemJson = {
+  id: string
   blacklist: string[]
+  storageBucketsPerBagLimit: number
+  distributionBucketsPerBagLimit: number
+  uploadingBlocked: boolean
+  dataObjectFeePerMb: number | string
+  storageBucketMaxObjectsCountLimit: number | string
+  storageBucketMaxObjectsSizeLimit: number | string
 }

+ 4 - 0
query-node/mappings/giza/genesis.ts

@@ -1,4 +1,5 @@
 import { StoreContext } from '@joystream/hydra-common'
+import BN from 'bn.js'
 import { Membership, MembershipEntryMethod, StorageSystemParameters } from 'query-node/dist/model'
 import { storageSystem, members } from './genesis-data'
 
@@ -7,6 +8,9 @@ export async function loadGenesisData({ store }: StoreContext): Promise<void> {
   await store.save<StorageSystemParameters>(
     new StorageSystemParameters({
       ...storageSystem,
+      storageBucketMaxObjectsCountLimit: new BN(storageSystem.storageBucketMaxObjectsCountLimit),
+      storageBucketMaxObjectsSizeLimit: new BN(storageSystem.storageBucketMaxObjectsSizeLimit),
+      dataObjectFeePerMb: new BN(storageSystem.dataObjectFeePerMb),
     })
   )
   // Members

+ 98 - 69
query-node/mappings/giza/storage/index.ts

@@ -90,9 +90,9 @@ function getDynamicBagOwner(bagId: DynamicBagId) {
 
 function getStaticBagId(bagId: StaticBagId): string {
   if (bagId.isCouncil) {
-    return `CO`
+    return `static:council`
   } else if (bagId.isWorkingGroup) {
-    return `WG-${bagId.asWorkingGroup.type}`
+    return `static:wg:${bagId.asWorkingGroup.type.toLowerCase()}`
   } else {
     throw new Error(`Unexpected static bag type: ${bagId.type}`)
   }
@@ -100,9 +100,9 @@ function getStaticBagId(bagId: StaticBagId): string {
 
 function getDynamicBagId(bagId: DynamicBagId): string {
   if (bagId.isChannel) {
-    return `CH-${bagId.asChannel.toString()}`
+    return `dynamic:channel:${bagId.asChannel.toString()}`
   } else if (bagId.isMember) {
-    return `M-${bagId.asMember.toString()}`
+    return `dynamic:member:${bagId.asMember.toString()}`
   } else {
     throw new Error(`Unexpected dynamic bag type: ${bagId.type}`)
   }
@@ -180,7 +180,16 @@ async function getDistributionBucketFamilyWithMetadata(
   return family
 }
 
-// BUCKETS
+async function getStorageSystem(store: DatabaseManager) {
+  const storageSystem = await store.get(StorageSystemParameters, {})
+  if (!storageSystem) {
+    throw new Error('Storage system entity is missing!')
+  }
+
+  return storageSystem
+}
+
+// STORAGE BUCKETS
 
 export async function storage_StorageBucketCreated({ event, store }: EventContext & StoreContext): Promise<void> {
   const [
@@ -196,6 +205,8 @@ export async function storage_StorageBucketCreated({ event, store }: EventContex
     acceptingNewBags: acceptingNewBags.isTrue,
     dataObjectCountLimit: new BN(dataObjectCountLimit.toString()),
     dataObjectsSizeLimit: new BN(dataObjectSizeLimit.toString()),
+    dataObjectsCount: new BN(0),
+    dataObjectsSize: new BN(0),
   })
   if (invitedWorkerId.isSome) {
     const operatorStatus = new StorageBucketOperatorStatusInvited()
@@ -297,10 +308,33 @@ export async function storage_StorageBucketsUpdatedForBag({
   await Promise.all(assignmentsToAdd.map((a) => store.save<StorageBagStorageAssignment>(a)))
 }
 
+export async function storage_VoucherChanged({ event, store }: EventContext & StoreContext): Promise<void> {
+  const [bucketId, voucher] = new Storage.VoucherChangedEvent(event).params
+  const bucket = await getById(store, StorageBucket, bucketId.toString())
+
+  bucket.dataObjectCountLimit = voucher.objectsLimit
+  bucket.dataObjectsSizeLimit = voucher.sizeLimit
+  bucket.dataObjectsCount = voucher.objectsUsed
+  bucket.dataObjectsSize = voucher.sizeUsed
+
+  await store.save<StorageBucket>(bucket)
+}
+
+export async function storage_StorageBucketVoucherLimitsSet({
+  event,
+  store,
+}: EventContext & StoreContext): Promise<void> {
+  const [bucketId, sizeLimit, countLimit] = new Storage.StorageBucketVoucherLimitsSetEvent(event).params
+  const bucket = await getById(store, StorageBucket, bucketId.toString())
+  bucket.dataObjectsSizeLimit = sizeLimit
+  bucket.dataObjectCountLimit = countLimit
+
+  await store.save<StorageBucket>(bucket)
+}
+
 export async function storage_StorageBucketDeleted({ event, store }: EventContext & StoreContext): Promise<void> {
   const [bucketId] = new Storage.StorageBucketDeletedEvent(event).params
-  // TODO: Delete or just change status?
-  // TODO: Cascade remove on db level?
+  // TODO: Cascade remove on db level (would require changes in Hydra / comitting autogenerated files)
   const assignments = await store.getMany(StorageBagStorageAssignment, {
     where: { storageBucket: { id: bucketId.toString() } },
   })
@@ -320,10 +354,8 @@ export async function storage_DynamicBagCreated({ event, store }: EventContext &
 
 export async function storage_DynamicBagDeleted({ event, store }: EventContext & StoreContext): Promise<void> {
   const [, bagId] = new Storage.DynamicBagDeletedEvent(event).params
-  // TODO: Delete or just change status?
-  // TODO: Cascade remove on db level?
   const storageBag = await getDynamicBag(store, bagId, ['objects'])
-  await Promise.all((storageBag.objects || []).map((o) => store.remove<StorageDataObject>(o)))
+  // The bag should already be empty, so no cascade-remove required
   await store.remove<StorageBag>(storageBag)
 }
 
@@ -332,13 +364,12 @@ export async function storage_DynamicBagDeleted({ event, store }: EventContext &
 // Note: "Uploaded" here actually means "created" (the real upload happens later)
 export async function storage_DataObjectsUploaded({ event, store }: EventContext & StoreContext): Promise<void> {
   const [dataObjectIds, uploadParams] = new Storage.DataObjectsUploadedEvent(event).params
-  const { bagId, authenticationKey, objectCreationList } = uploadParams
+  const { bagId, objectCreationList } = uploadParams
   const storageBag = await getBag(store, bagId)
   const dataObjects = dataObjectIds.map((objectId, i) => {
     const objectParams = new DataObjectCreationParameters(registry, objectCreationList[i].toJSON() as any)
     return new StorageDataObject({
       id: objectId.toString(),
-      authenticationKey: bytesToString(authenticationKey),
       isAccepted: false,
       ipfsHash: bytesToString(objectParams.ipfsContentId),
       size: new BN(objectParams.getField('size').toString()),
@@ -354,8 +385,6 @@ export async function storage_PendingDataObjectsAccepted({ event, store }: Event
   await Promise.all(
     dataObjects.map(async (dataObject) => {
       dataObject.isAccepted = true
-      // TODO: Do we still want other storage providers to accept it? How long should the key be valid?
-      // dataObject.authenticationKey = null as any
       await store.save<StorageDataObject>(dataObject)
     })
   )
@@ -376,24 +405,10 @@ export async function storage_DataObjectsMoved({ event, store }: EventContext &
 export async function storage_DataObjectsDeleted({ event, store }: EventContext & StoreContext): Promise<void> {
   const [, bagId, dataObjectIds] = new Storage.DataObjectsDeletedEvent(event).params
   const dataObjects = await getDataObjectsInBag(store, bagId, dataObjectIds)
-  // TODO: Delete them or just change status?
-  // (may not be so optimal if we expect a large amount of data objects)
   await Promise.all(dataObjects.map((o) => store.remove<StorageDataObject>(o)))
 }
 
-// BLACKLIST
-export async function storage_UpdateBlacklist({ event, store }: EventContext & StoreContext): Promise<void> {
-  const [removedContentIds, addedContentIds] = new Storage.UpdateBlacklistEvent(event).params
-  const storageSystem = await store.get(StorageSystemParameters, {})
-  if (!storageSystem) {
-    throw new Error('StorageSystemParameters entity not found!')
-  }
-  storageSystem.blacklist = storageSystem.blacklist
-    .filter((cid) => !Array.from(removedContentIds).some((id) => id.eq(cid)))
-    .concat(Array.from(addedContentIds).map((id) => id.toString()))
-
-  await store.save<StorageSystemParameters>(storageSystem)
-}
+// DISTRIBUTION FAMILY
 
 export async function storage_DistributionBucketFamilyCreated({
   event,
@@ -408,6 +423,18 @@ export async function storage_DistributionBucketFamilyCreated({
   await store.save<DistributionBucketFamily>(family)
 }
 
+export async function storage_DistributionBucketFamilyMetadataSet({
+  event,
+  store,
+}: EventContext & StoreContext): Promise<void> {
+  const [familyId, metadataBytes] = new Storage.DistributionBucketFamilyMetadataSetEvent(event).params
+
+  const family = await getDistributionBucketFamilyWithMetadata(store, familyId.toString())
+  family.metadata = await processDistributionBucketFamilyMetadata(store, family.metadata, metadataBytes)
+
+  await store.save<DistributionBucketFamily>(family)
+}
+
 export async function storage_DistributionBucketFamilyDeleted({
   event,
   store,
@@ -419,6 +446,8 @@ export async function storage_DistributionBucketFamilyDeleted({
   await store.remove(family)
 }
 
+// DISTRIBUTION BUCKET
+
 export async function storage_DistributionBucketCreated({ event, store }: EventContext & StoreContext): Promise<void> {
   const [familyId, acceptingNewBags, bucketId] = new Storage.DistributionBucketCreatedEvent(event).params
 
@@ -447,8 +476,7 @@ export async function storage_DistributionBucketStatusUpdated({
 
 export async function storage_DistributionBucketDeleted({ event, store }: EventContext & StoreContext): Promise<void> {
   const [, bucketId] = new Storage.DistributionBucketDeletedEvent(event).params
-  // TODO: Delete or just change status?
-  // TODO: Cascade remove on db level?
+  // TODO: Cascade remove on db level (would require changes in Hydra / comitting autogenerated files)
   const assignments = await store.getMany(StorageBagDistributionAssignment, {
     where: { distributionBucket: { id: bucketId.toString() } },
   })
@@ -551,7 +579,7 @@ export async function storage_DistributionBucketOperatorRemoved({
 }: EventContext & StoreContext): Promise<void> {
   const [, bucketId, workerId] = new Storage.DistributionBucketOperatorRemovedEvent(event).params
 
-  // TODO: Cascade remove
+  // TODO: Cascade remove on db level (would require changes in Hydra / comitting autogenerated files)
 
   const operator = await getDistributionBucketOperatorWithMetadata(store, `${bucketId}-${workerId}`)
   await store.remove<DistributionBucketOperator>(operator)
@@ -566,74 +594,75 @@ export async function storage_DistributionBucketOperatorRemoved({
   }
 }
 
-export async function storage_DistributionBucketFamilyMetadataSet({
-  event,
-  store,
-}: EventContext & StoreContext): Promise<void> {
-  const [familyId, metadataBytes] = new Storage.DistributionBucketFamilyMetadataSetEvent(event).params
+// STORAGE SYSTEM GLOBAL PARAMS
 
-  const family = await getDistributionBucketFamilyWithMetadata(store, familyId.toString())
-  family.metadata = await processDistributionBucketFamilyMetadata(store, family.metadata, metadataBytes)
+export async function storage_UpdateBlacklist({ event, store }: EventContext & StoreContext): Promise<void> {
+  const [removedContentIds, addedContentIds] = new Storage.UpdateBlacklistEvent(event).params
+  const storageSystem = await getStorageSystem(store)
+  storageSystem.blacklist = storageSystem.blacklist
+    .filter((cid) => !Array.from(removedContentIds).some((id) => id.eq(cid)))
+    .concat(Array.from(addedContentIds).map((id) => id.toString()))
 
-  await store.save<DistributionBucketFamily>(family)
+  await store.save<StorageSystemParameters>(storageSystem)
 }
 
 export async function storage_DistributionBucketsPerBagLimitUpdated({
   event,
   store,
 }: EventContext & StoreContext): Promise<void> {
-  // To be implemented
+  const [newLimit] = new Storage.DistributionBucketsPerBagLimitUpdatedEvent(event).params
+  const storageSystem = await getStorageSystem(store)
+
+  storageSystem.distributionBucketsPerBagLimit = newLimit.toNumber()
+
+  await store.save<StorageSystemParameters>(storageSystem)
 }
 
-export async function storage_FamiliesInDynamicBagCreationPolicyUpdated({
+export async function storage_StorageBucketsPerBagLimitUpdated({
   event,
   store,
 }: EventContext & StoreContext): Promise<void> {
-  // To be implemented
+  const [newLimit] = new Storage.StorageBucketsPerBagLimitUpdatedEvent(event).params
+  const storageSystem = await getStorageSystem(store)
+
+  storageSystem.storageBucketsPerBagLimit = newLimit.toNumber()
+
+  await store.save<StorageSystemParameters>(storageSystem)
 }
 
-export async function storage_StorageBucketVoucherLimitsSet({
+export async function storage_StorageBucketsVoucherMaxLimitsUpdated({
   event,
   store,
 }: EventContext & StoreContext): Promise<void> {
-  // To be implemented
+  const [sizeLimit, countLimit] = new Storage.StorageBucketsVoucherMaxLimitsUpdatedEvent(event).params
+  const storageSystem = await getStorageSystem(store)
+
+  storageSystem.storageBucketMaxObjectsSizeLimit = sizeLimit
+  storageSystem.storageBucketMaxObjectsCountLimit = countLimit
+
+  await store.save<StorageSystemParameters>(storageSystem)
 }
 
 export async function storage_UploadingBlockStatusUpdated({
   event,
   store,
 }: EventContext & StoreContext): Promise<void> {
-  // To be implemented
-}
+  const [isBlocked] = new Storage.UploadingBlockStatusUpdatedEvent(event).params
+  const storageSystem = await getStorageSystem(store)
 
-export async function storage_DataObjectPerMegabyteFeeUpdated({
-  event,
-  store,
-}: EventContext & StoreContext): Promise<void> {
-  // To be implemented
-}
+  storageSystem.uploadingBlocked = isBlocked.isTrue
 
-export async function storage_StorageBucketsPerBagLimitUpdated({
-  event,
-  store,
-}: EventContext & StoreContext): Promise<void> {
-  // To be implemented
+  await store.save<StorageSystemParameters>(storageSystem)
 }
 
-export async function storage_StorageBucketsVoucherMaxLimitsUpdated({
+export async function storage_DataObjectPerMegabyteFeeUpdated({
   event,
   store,
 }: EventContext & StoreContext): Promise<void> {
-  // To be implemented
-}
+  const [newFee] = new Storage.DataObjectPerMegabyteFeeUpdatedEvent(event).params
+  const storageSystem = await getStorageSystem(store)
 
-export async function storage_VoucherChanged({ event, store }: EventContext & StoreContext): Promise<void> {
-  // To be implemented
-}
+  storageSystem.dataObjectFeePerMb = newFee
 
-export async function storage_NumberOfStorageBucketsInDynamicBagCreationPolicyUpdated({
-  event,
-  store,
-}: EventContext & StoreContext): Promise<void> {
-  // To be implemented
+  await store.save<StorageSystemParameters>(storageSystem)
 }

+ 1 - 1
query-node/mappings/package.json

@@ -9,7 +9,7 @@
     "copy-types": "cp ../../types/augment/all/defs.json lib/generated/types/typedefs.json",
     "clean": "rm -rf lib",
     "lint": "eslint . --quiet --ext .ts",
-    "checks": "tsc --noEmit --pretty && prettier ./ --check && yarn lint",
+    "checks": "prettier ./ --check && yarn lint",
     "format": "prettier ./ --write "
   },
   "dependencies": {

+ 21 - 20
query-node/schemas/storage.graphql

@@ -3,12 +3,23 @@ type StorageSystemParameters @entity {
   "Blacklisted content hashes"
   blacklist: [String!]
 
-  # TODO: Consider if parameters like:
-  # dataObjectFeePerMB,
-  # storageBucketBagsLimit,
-  # storageBucketsVoucherMaxLimits,
-  # etc.
-  # are needed here (they can be easily queried from the node)
+  "How many buckets can be assigned to store a bag"
+  storageBucketsPerBagLimit: Int!
+
+  "How many buckets can be assigned to distribute a bag"
+  distributionBucketsPerBagLimit: Int!
+
+  "Whether the uploading is globally blocked"
+  uploadingBlocked: Boolean!
+
+  "Additional fee for storing 1 MB of data"
+  dataObjectFeePerMB: BigInt!
+
+  "Global max. number of objects a storage bucket can store (can also be further limitted the provider)"
+  storageBucketMaxObjectsCountLimit: BigInt!
+
+  "Global max. size of objects a storage bucket can store (can also be further limitted the provider)"
+  storageBucketMaxObjectsSizeLimit: BigInt!
 }
 
 type StorageBucketOperatorStatusMissing @variant {
@@ -77,12 +88,11 @@ type StorageBucket @entity {
   "Bucket's data object count limit"
   dataObjectCountLimit: BigInt!
 
-  # TODO: Are those useful for storage node?
-  # "Currently stored (assigned) data objects size"
-  # storedObjectsSize: BigInt!
+  "Number of assigned data objects"
+  dataObjectsCount: BigInt!
 
-  # "Currently stored (assigned) objects count"
-  # storedObjectsCount: BigInt!
+  "Total size of assigned data objects"
+  dataObjectsSize: BigInt!
 }
 
 type StorageBagOwnerCouncil @variant {
@@ -154,23 +164,14 @@ type StorageDataObject @entity {
   "Whether the data object was uploaded and accepted by the storage provider"
   isAccepted: Boolean!
 
-  # TODO: Is this useful for storage node?
-  # "A reward for the data object deletion"
-  # deletionPrize: BigInt!
-
   "Data object size in bytes"
   size: BigInt!
 
   "Storage bag the data object is part of"
   storageBag: StorageBag!
 
-  # TODO: Use "Bytes" for better optimalization?
   "IPFS content hash"
   ipfsHash: String!
-
-  # TODO: Use "Bytes" for better optimalization?
-  "Public key used to authenticate the uploader by the storage provider"
-  authenticationKey: String
 }
 
 type DistributionBucketFamilyMetadata @entity {

+ 22 - 3
runtime-modules/storage/src/lib.rs

@@ -319,6 +319,9 @@ pub trait Trait: frame_system::Trait + balances::Trait + membership::Trait {
     /// Max number of pending invitations per distribution bucket.
     type MaxNumberOfPendingInvitationsPerDistributionBucket: Get<u64>;
 
+    /// Max data object size in bytes.
+    type MaxDataObjectSize: Get<u64>;
+
     /// Demand the storage working group leader authorization.
     /// TODO: Refactor after merging with the Olympia release.
     fn ensure_storage_working_group_leader_origin(origin: Self::Origin) -> DispatchResult;
@@ -467,6 +470,9 @@ pub struct DataObject<Balance> {
 
     /// Object size in bytes.
     pub size: u64,
+
+    /// Content identifier presented as IPFS hash.
+    pub ipfs_content_id: Vec<u8>,
 }
 
 /// Type alias for the BagRecord.
@@ -652,9 +658,6 @@ pub type UploadParameters<T> = UploadParametersRecord<
 #[cfg_attr(feature = "std", derive(Serialize, Deserialize))]
 #[derive(Encode, Decode, Default, Clone, PartialEq, Eq, Debug)]
 pub struct UploadParametersRecord<MemberId, ChannelId, AccountId, Balance> {
-    /// Public key used authentication in upload to liaison.
-    pub authentication_key: Vec<u8>,
-
     /// Static or dynamic bag to upload data.
     pub bag_id: BagIdType<MemberId, ChannelId>,
 
@@ -1427,6 +1430,9 @@ decl_error! {
 
         /// Distribution family bound to a bag creation policy.
         DistributionFamilyBoundToBagCreationPolicy,
+
+        /// Max data object size exceeded.
+        MaxDataObjectSizeExceeded,
     }
 }
 
@@ -1477,6 +1483,9 @@ decl_module! {
         const MaxNumberOfPendingInvitationsPerDistributionBucket: u64 =
             T::MaxNumberOfPendingInvitationsPerDistributionBucket::get();
 
+        /// Exports const - max data object size in bytes.
+        const MaxDataObjectSize: u64 = T::MaxDataObjectSize::get();
+
         // ===== Storage Lead actions =====
 
         /// Delete storage bucket. Must be empty. Storage operator must be missing.
@@ -2900,6 +2909,7 @@ impl<T: Trait> Module<T> {
             accepted: false,
             deletion_prize,
             size: obj.size,
+            ipfs_content_id: obj.ipfs_content_id,
         });
 
         let mut next_data_object_id = Self::next_data_object_id();
@@ -3184,6 +3194,15 @@ impl<T: Trait> Module<T> {
             Error::<T>::NoObjectsOnUpload
         );
 
+        // Check data objects' max size.
+        ensure!(
+            params
+                .object_creation_list
+                .iter()
+                .all(|obj| obj.size <= T::MaxDataObjectSize::get()),
+            Error::<T>::MaxDataObjectSizeExceeded
+        );
+
         let bag = Self::ensure_bag_exists(&params.bag_id)?;
 
         let new_objects_number: u64 = params.object_creation_list.len().saturated_into();

+ 2 - 0
runtime-modules/storage/src/tests/mocks.rs

@@ -65,6 +65,7 @@ parameter_types! {
     pub const DefaultChannelDynamicBagNumberOfStorageBuckets: u64 = 4;
     pub const DistributionBucketsPerBagValueConstraint: crate::DistributionBucketsPerBagValueConstraint =
         crate::StorageBucketsPerBagValueConstraint {min: 3, max_min_diff: 7};
+    pub const MaxDataObjectSize: u64 = 400;
 }
 
 pub const STORAGE_WG_LEADER_ACCOUNT_ID: u64 = 100001;
@@ -101,6 +102,7 @@ impl crate::Trait for Test {
     type DistributionBucketsPerBagValueConstraint = DistributionBucketsPerBagValueConstraint;
     type MaxNumberOfPendingInvitationsPerDistributionBucket =
         MaxNumberOfPendingInvitationsPerDistributionBucket;
+    type MaxDataObjectSize = MaxDataObjectSize;
 
     fn ensure_storage_working_group_leader_origin(origin: Self::Origin) -> DispatchResult {
         let account_id = ensure_signed(origin)?;

+ 29 - 33
runtime-modules/storage/src/tests/mod.rs

@@ -24,7 +24,7 @@ use crate::{
 use mocks::{
     build_test_externalities, Balances, DataObjectDeletionPrize,
     DefaultChannelDynamicBagNumberOfStorageBuckets, DefaultMemberDynamicBagNumberOfStorageBuckets,
-    InitialStorageBucketsNumberForDynamicBag, MaxDistributionBucketFamilyNumber,
+    InitialStorageBucketsNumberForDynamicBag, MaxDataObjectSize, MaxDistributionBucketFamilyNumber,
     MaxDistributionBucketNumberPerFamily, MaxNumberOfDataObjectsPerBag, MaxRandomIterationNumber,
     Storage, Test, ANOTHER_DISTRIBUTION_PROVIDER_ID, ANOTHER_STORAGE_PROVIDER_ID,
     DEFAULT_DISTRIBUTION_PROVIDER_ACCOUNT_ID, DEFAULT_DISTRIBUTION_PROVIDER_ID,
@@ -360,7 +360,6 @@ fn update_storage_buckets_for_bags_succeeded_with_voucher_usage() {
 
         let upload_params = UploadParameters::<Test> {
             bag_id: bag_id.clone(),
-            authentication_key: Vec::new(),
             deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
             object_creation_list: object_creation_list.clone(),
             expected_data_size_fee: Storage::data_object_per_mega_byte_fee(),
@@ -424,7 +423,6 @@ fn update_storage_buckets_for_bags_fails_with_exceeding_the_voucher_objects_numb
 
         let upload_params = UploadParameters::<Test> {
             bag_id: bag_id.clone(),
-            authentication_key: Vec::new(),
             deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
             object_creation_list: object_creation_list.clone(),
             expected_data_size_fee: Storage::data_object_per_mega_byte_fee(),
@@ -470,7 +468,6 @@ fn update_storage_buckets_for_bags_fails_with_exceeding_the_voucher_objects_tota
 
         let upload_params = UploadParameters::<Test> {
             bag_id: bag_id.clone(),
-            authentication_key: Vec::new(),
             deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
             object_creation_list: object_creation_list.clone(),
             expected_data_size_fee: Storage::data_object_per_mega_byte_fee(),
@@ -671,7 +668,6 @@ fn upload_succeeded() {
 
         let upload_params = UploadParameters::<Test> {
             bag_id: BagId::<Test>::Static(StaticBagId::Council),
-            authentication_key: Vec::new(),
             deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
             object_creation_list: create_single_data_object(),
             expected_data_size_fee: Storage::data_object_per_mega_byte_fee(),
@@ -695,6 +691,9 @@ fn upload_succeeded() {
             Storage::data_object_by_id(&bag_id, &data_object_id),
             DataObject {
                 size: upload_params.object_creation_list[0].size,
+                ipfs_content_id: upload_params.object_creation_list[0]
+                    .ipfs_content_id
+                    .clone(),
                 deletion_prize: DataObjectDeletionPrize::get(),
                 accepted: false,
             }
@@ -717,6 +716,28 @@ fn upload_succeeded() {
     });
 }
 
+#[test]
+fn upload_failed_with_exceeding_the_data_object_max_size() {
+    build_test_externalities().execute_with(|| {
+        let initial_balance = 1000;
+        increase_account_balance(&DEFAULT_MEMBER_ACCOUNT_ID, initial_balance);
+
+        let mut data_object_list = create_single_data_object();
+        data_object_list[0].size = MaxDataObjectSize::get() + 1;
+
+        let upload_params = UploadParameters::<Test> {
+            bag_id: BagId::<Test>::Static(StaticBagId::Council),
+            deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
+            object_creation_list: data_object_list,
+            expected_data_size_fee: Storage::data_object_per_mega_byte_fee(),
+        };
+
+        UploadFixture::default()
+            .with_params(upload_params.clone())
+            .call_and_assert(Err(Error::<Test>::MaxDataObjectSizeExceeded.into()));
+    });
+}
+
 #[test]
 fn upload_succeeded_with_data_size_fee() {
     build_test_externalities().execute_with(|| {
@@ -732,7 +753,6 @@ fn upload_succeeded_with_data_size_fee() {
 
         let upload_params = UploadParameters::<Test> {
             bag_id: BagId::<Test>::Static(StaticBagId::Council),
-            authentication_key: Vec::new(),
             deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
             object_creation_list: create_single_data_object(),
             expected_data_size_fee: Storage::data_object_per_mega_byte_fee(),
@@ -768,7 +788,6 @@ fn upload_succeeded_with_active_storage_bucket_having_voucher() {
 
         let upload_params = UploadParameters::<Test> {
             bag_id,
-            authentication_key: Vec::new(),
             deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
             object_creation_list: object_creation_list.clone(),
             expected_data_size_fee: Storage::data_object_per_mega_byte_fee(),
@@ -803,7 +822,6 @@ fn upload_fails_with_active_storage_bucket_with_voucher_object_number_limit_exce
 
         let upload_params = UploadParameters::<Test> {
             bag_id,
-            authentication_key: Vec::new(),
             deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
             object_creation_list: object_creation_list.clone(),
             expected_data_size_fee: Storage::data_object_per_mega_byte_fee(),
@@ -838,7 +856,6 @@ fn upload_fails_with_active_storage_bucket_with_voucher_object_size_limit_exceed
 
         let upload_params = UploadParameters::<Test> {
             bag_id,
-            authentication_key: Vec::new(),
             deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
             object_creation_list: object_creation_list.clone(),
             expected_data_size_fee: Storage::data_object_per_mega_byte_fee(),
@@ -864,7 +881,6 @@ fn upload_succeeded_with_dynamic_bag() {
 
         let upload_params = UploadParameters::<Test> {
             bag_id: BagId::<Test>::Dynamic(dynamic_bag_id.clone()),
-            authentication_key: Vec::new(),
             deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
             object_creation_list: create_single_data_object(),
             expected_data_size_fee: Storage::data_object_per_mega_byte_fee(),
@@ -888,6 +904,9 @@ fn upload_succeeded_with_dynamic_bag() {
             Storage::data_object_by_id(&bag_id, &data_object_id),
             DataObject {
                 size: upload_params.object_creation_list[0].size,
+                ipfs_content_id: upload_params.object_creation_list[0]
+                    .ipfs_content_id
+                    .clone(),
                 deletion_prize: DataObjectDeletionPrize::get(),
                 accepted: false,
             }
@@ -902,7 +921,6 @@ fn upload_fails_with_non_existent_dynamic_bag() {
 
         let upload_params = UploadParameters::<Test> {
             bag_id: BagId::<Test>::Dynamic(dynamic_bag_id.clone()),
-            authentication_key: Vec::new(),
             deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
             object_creation_list: create_single_data_object(),
             expected_data_size_fee: Storage::data_object_per_mega_byte_fee(),
@@ -921,7 +939,6 @@ fn upload_succeeded_with_non_empty_bag() {
 
         let upload_params1 = UploadParameters::<Test> {
             bag_id: BagId::<Test>::Static(StaticBagId::Council),
-            authentication_key: Vec::new(),
             deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
             object_creation_list: create_data_object_candidates(1, 2),
             expected_data_size_fee: Storage::data_object_per_mega_byte_fee(),
@@ -933,7 +950,6 @@ fn upload_succeeded_with_non_empty_bag() {
 
         let upload_params2 = UploadParameters::<Test> {
             bag_id: BagId::<Test>::Static(StaticBagId::Council),
-            authentication_key: Vec::new(),
             deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
             object_creation_list: create_data_object_candidates(3, 2),
             expected_data_size_fee: Storage::data_object_per_mega_byte_fee(),
@@ -964,7 +980,6 @@ fn upload_fails_with_zero_object_size() {
     build_test_externalities().execute_with(|| {
         let upload_params = UploadParameters::<Test> {
             bag_id: BagId::<Test>::Static(StaticBagId::Council),
-            authentication_key: Vec::new(),
             deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
             object_creation_list: vec![DataObjectCreationParameters {
                 ipfs_content_id: vec![1],
@@ -984,7 +999,6 @@ fn upload_fails_with_empty_object_cid() {
     build_test_externalities().execute_with(|| {
         let upload_params = UploadParameters::<Test> {
             bag_id: BagId::<Test>::Static(StaticBagId::Council),
-            authentication_key: Vec::new(),
             deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
             object_creation_list: vec![DataObjectCreationParameters {
                 ipfs_content_id: Vec::new(),
@@ -1007,7 +1021,6 @@ fn upload_fails_with_max_data_object_size_exceeded() {
 
         let upload_params = UploadParameters::<Test> {
             bag_id: BagId::<Test>::Static(StaticBagId::Council),
-            authentication_key: Vec::new(),
             deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
             object_creation_list: create_data_object_candidates(1, invalid_object_number),
             expected_data_size_fee: Storage::data_object_per_mega_byte_fee(),
@@ -1024,7 +1037,6 @@ fn upload_fails_with_insufficient_balance_for_deletion_prize() {
     build_test_externalities().execute_with(|| {
         let upload_params = UploadParameters::<Test> {
             bag_id: BagId::<Test>::Static(StaticBagId::Council),
-            authentication_key: Vec::new(),
             deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
             object_creation_list: create_single_data_object(),
             expected_data_size_fee: Storage::data_object_per_mega_byte_fee(),
@@ -1043,7 +1055,6 @@ fn upload_fails_with_insufficient_balance_for_data_size_fee() {
 
         let upload_params = UploadParameters::<Test> {
             bag_id: BagId::<Test>::Static(StaticBagId::Council),
-            authentication_key: Vec::new(),
             deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
             object_creation_list: create_single_data_object(),
             expected_data_size_fee: Storage::data_object_per_mega_byte_fee(),
@@ -1078,7 +1089,6 @@ fn upload_fails_with_data_size_fee_changed() {
 
         let upload_params = UploadParameters::<Test> {
             bag_id: BagId::<Test>::Static(StaticBagId::Council),
-            authentication_key: Vec::new(),
             deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
             object_creation_list: create_single_data_object(),
             expected_data_size_fee: Storage::data_object_per_mega_byte_fee(),
@@ -1105,7 +1115,6 @@ fn upload_failed_with_blocked_uploading() {
 
         let upload_params = UploadParameters::<Test> {
             bag_id: BagId::<Test>::Static(StaticBagId::Council),
-            authentication_key: Vec::new(),
             deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
             object_creation_list: create_single_data_object(),
             expected_data_size_fee: Storage::data_object_per_mega_byte_fee(),
@@ -1140,7 +1149,6 @@ fn upload_failed_with_blacklisted_data_object() {
 
         let upload_params = UploadParameters::<Test> {
             bag_id: BagId::<Test>::Static(StaticBagId::Council),
-            authentication_key: Vec::new(),
             deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
             object_creation_list,
             expected_data_size_fee: Storage::data_object_per_mega_byte_fee(),
@@ -1283,7 +1291,6 @@ fn accept_pending_data_objects_succeeded() {
 
         let upload_params = UploadParameters::<Test> {
             bag_id: bag_id.clone(),
-            authentication_key: Vec::new(),
             deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
             object_creation_list: create_single_data_object(),
             expected_data_size_fee: Storage::data_object_per_mega_byte_fee(),
@@ -1348,7 +1355,6 @@ fn accept_pending_data_objects_fails_with_unrelated_storage_bucket() {
 
         let upload_params = UploadParameters::<Test> {
             bag_id: bag_id.clone(),
-            authentication_key: Vec::new(),
             deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
             object_creation_list: create_single_data_object(),
             expected_data_size_fee: Storage::data_object_per_mega_byte_fee(),
@@ -1443,7 +1449,6 @@ fn accept_pending_data_objects_succeeded_with_dynamic_bag() {
         let bag_id = BagId::<Test>::Dynamic(dynamic_bag_id.clone());
         let upload_params = UploadParameters::<Test> {
             bag_id: bag_id.clone(),
-            authentication_key: Vec::new(),
             deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
             object_creation_list: create_single_data_object(),
             expected_data_size_fee: Storage::data_object_per_mega_byte_fee(),
@@ -1773,7 +1778,6 @@ fn move_data_objects_succeeded() {
 
         let upload_params = UploadParameters::<Test> {
             bag_id: src_bag_id.clone(),
-            authentication_key: Vec::new(),
             deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
             object_creation_list: create_single_data_object(),
             expected_data_size_fee: Storage::data_object_per_mega_byte_fee(),
@@ -1873,7 +1877,6 @@ fn move_data_objects_succeeded_having_voucher() {
 
         let upload_params = UploadParameters::<Test> {
             bag_id: src_bag_id.clone(),
-            authentication_key: Vec::new(),
             deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
             object_creation_list: object_creation_list.clone(),
             expected_data_size_fee: Storage::data_object_per_mega_byte_fee(),
@@ -1948,7 +1951,6 @@ fn move_data_objects_fails_with_exceeding_voucher_object_number_limit() {
 
         let upload_params = UploadParameters::<Test> {
             bag_id: src_bag_id.clone(),
-            authentication_key: Vec::new(),
             deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
             object_creation_list: object_creation_list.clone(),
             expected_data_size_fee: Storage::data_object_per_mega_byte_fee(),
@@ -2007,7 +2009,6 @@ fn move_data_objects_fails_with_exceeding_voucher_objects_size_limit() {
 
         let upload_params = UploadParameters::<Test> {
             bag_id: src_bag_id.clone(),
-            authentication_key: Vec::new(),
             deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
             object_creation_list: object_creation_list.clone(),
             expected_data_size_fee: Storage::data_object_per_mega_byte_fee(),
@@ -2086,7 +2087,6 @@ fn delete_data_objects_succeeded() {
 
         let upload_params = UploadParameters::<Test> {
             bag_id: bag_id.clone(),
-            authentication_key: Vec::new(),
             deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
             object_creation_list: create_single_data_object(),
             expected_data_size_fee: Storage::data_object_per_mega_byte_fee(),
@@ -2186,7 +2186,6 @@ fn delete_data_objects_fails_with_invalid_treasury_balance() {
         let council_bag_id = BagId::<Test>::Static(StaticBagId::Council);
         let upload_params = UploadParameters::<Test> {
             bag_id: council_bag_id.clone(),
-            authentication_key: Vec::new(),
             deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
             object_creation_list: create_single_data_object(),
             expected_data_size_fee: Storage::data_object_per_mega_byte_fee(),
@@ -2231,7 +2230,6 @@ fn delete_data_objects_succeeded_with_voucher_usage() {
 
         let upload_params = UploadParameters::<Test> {
             bag_id: bag_id.clone(),
-            authentication_key: Vec::new(),
             deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
             object_creation_list: object_creation_list.clone(),
             expected_data_size_fee: Storage::data_object_per_mega_byte_fee(),
@@ -2657,7 +2655,6 @@ fn delete_storage_bucket_fails_with_non_empty_bucket() {
 
         let upload_params = UploadParameters::<Test> {
             bag_id: bag_id.clone(),
-            authentication_key: Vec::new(),
             deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
             object_creation_list: object_creation_list.clone(),
             expected_data_size_fee: Storage::data_object_per_mega_byte_fee(),
@@ -2844,7 +2841,6 @@ fn storage_bucket_voucher_changed_event_fired() {
 
         let upload_params = UploadParameters::<Test> {
             bag_id: bag_id.clone(),
-            authentication_key: Vec::new(),
             deletion_prize_source_account_id: DEFAULT_MEMBER_ACCOUNT_ID,
             object_creation_list: object_creation_list.clone(),
             expected_data_size_fee: Storage::data_object_per_mega_byte_fee(),

+ 2 - 0
runtime/src/lib.rs

@@ -694,6 +694,7 @@ parameter_types! {
     pub const DefaultChannelDynamicBagNumberOfStorageBuckets: u64 = 4; //TODO: adjust value
     pub const DistributionBucketsPerBagValueConstraint: storage::DistributionBucketsPerBagValueConstraint =
         storage::DistributionBucketsPerBagValueConstraint {min: 3, max_min_diff: 7}; //TODO: adjust value
+    pub const MaxDataObjectSize: u64 = 10 * 1024 * 1024 * 1024; // 10 GB
 }
 
 impl storage::Trait for Runtime {
@@ -721,6 +722,7 @@ impl storage::Trait for Runtime {
     type DistributionBucketOperatorId = DistributionBucketOperatorId;
     type MaxNumberOfPendingInvitationsPerDistributionBucket =
         MaxNumberOfPendingInvitationsPerDistributionBucket;
+    type MaxDataObjectSize = MaxDataObjectSize;
 
     fn ensure_storage_working_group_leader_origin(origin: Self::Origin) -> DispatchResult {
         StorageWorkingGroup::ensure_origin_is_active_leader(origin)

+ 1 - 1
storage-node-v2/package.json

@@ -1,6 +1,6 @@
 {
   "name": "storage-node-v2",
-  "description": "Jostream storage subsystem.",
+  "description": "Joystream storage subsystem.",
   "version": "0.1.0",
   "author": "Joystream contributors",
   "bin": {

+ 6 - 0
storage-node-v2/src/api-spec/openapi.yaml

@@ -174,6 +174,12 @@ paths:
             application/json:
               schema:
                 $ref: '#/components/schemas/ErrorResponse'
+        401:
+          description: Unauthorized
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/ErrorResponse'
 
 components:
   securitySchemes:

+ 4 - 1
storage-node-v2/src/commands/server.ts

@@ -45,7 +45,10 @@ export default class Server extends ApiCommandBase {
     try {
       const port = flags.port
       const workerId = flags.worker ?? 0
-      const app = await createApp(api, account, workerId, flags.uploads)
+      const maxFileSize = await api.consts.storage.maxDataObjectSize.toNumber()
+      logger.debug(`Max file size runtime parameter: ${maxFileSize}`)
+
+      const app = await createApp(api, account, workerId, flags.uploads, maxFileSize)
       logger.info(`Listening on http://localhost:${port}`)
       app.listen(port)
     } catch (err) {

+ 22 - 6
storage-node-v2/src/services/webApi/app.ts

@@ -17,13 +17,15 @@ import { httpLogger, errorLogger } from '../../services/logger'
  * @param account - KeyringPair instance
  * @param workerId - storage provider ID (worker ID)
  * @param uploadsDir - directory for the file uploading
+ * @param maxFileSize - max allowed file size
  * @returns Express promise.
  */
 export async function createApp(
   api: ApiPromise,
   account: KeyringPair,
   workerId: number,
-  uploadsDir: string
+  uploadsDir: string,
+  maxFileSize: number
 ): Promise<Express> {
   const spec = path.join(__dirname, './../../api-spec/openapi.yaml')
 
@@ -52,7 +54,16 @@ export async function createApp(
         basePath: path.join(__dirname, './controllers'),
         resolver: OpenApiValidator.resolvers.modulePathResolver,
       },
-      fileUploader: { dest: uploadsDir },
+      fileUploader: {
+        dest: uploadsDir,
+        // Busboy library settings
+        limits: {
+          // For multipart forms, the max number of file fields (Default: Infinity)
+          files: 1,
+          // For multipart forms, the max file size (in bytes) (Default: Infinity)
+          fileSize: maxFileSize,
+        },
+      },
       validateSecurity: {
         handlers: {
           UploadAuth: validateUpload(api, account),
@@ -65,12 +76,17 @@ export async function createApp(
 
   /* eslint-disable @typescript-eslint/no-unused-vars */
   app.use((err: Error, req: express.Request, res: express.Response, next: express.NextFunction) => {
-    // Request validation error handler.
+    // Express error handling recommendation:
+    // https://expressjs.com/en/guide/error-handling.html
+    if (res.headersSent) {
+      return next(err)
+    }
+
+    // Request error handler.
     if (err instanceof HttpError) {
-      res.status(err.status).json({
-        type: 'request_validation',
+      res.status(err.status || 500).json({
+        type: 'request_exception',
         message: err.message,
-        errors: err.errors,
       })
     } else {
       res.status(500).json({

+ 47 - 13
storage-node-v2/src/services/webApi/controllers/publicApi.ts

@@ -11,6 +11,7 @@ import { hashFile } from '../../../services/helpers/hashing'
 import { createNonce, getTokenExpirationTime } from '../../../services/helpers/tokenNonceKeeper'
 import { getFileInfo } from '../../../services/helpers/fileInfo'
 import { parseBagId } from '../../helpers/bagTypes'
+import { BagId } from '@joystream/types/storage'
 import logger from '../../../services/logger'
 import { KeyringPair } from '@polkadot/keyring/types'
 import { ApiPromise } from '@polkadot/api'
@@ -19,6 +20,7 @@ import fs from 'fs'
 import path from 'path'
 import send from 'send'
 import { CLIError } from '@oclif/errors'
+import { hexToString } from '@polkadot/util'
 const fsPromises = fs.promises
 
 /**
@@ -107,10 +109,13 @@ export async function uploadFile(req: express.Request, res: express.Response): P
     const fileObj = getFileObject(req)
     cleanupFileName = fileObj.path
 
-    verifyFileSize(fileObj.size)
+    const api = getApi(res)
     await verifyFileMimeType(fileObj.path)
 
     const hash = await hashFile(fileObj.path)
+    const bagId = parseBagId(api, uploadRequest.bagId)
+
+    const accepted = await verifyDataObjectInfo(api, bagId, uploadRequest.dataObjectId, fileObj.size, hash)
 
     // Prepare new file name
     const newPath = fileObj.path.replace(fileObj.filename, hash)
@@ -119,11 +124,16 @@ export async function uploadFile(req: express.Request, res: express.Response): P
     await fsPromises.rename(fileObj.path, newPath)
     cleanupFileName = newPath
 
-    const api = getApi(res)
-    const bagId = parseBagId(api, uploadRequest.bagId)
-    await acceptPendingDataObjects(api, bagId, getAccount(res), getWorkerId(res), uploadRequest.storageBucketId, [
-      uploadRequest.dataObjectId,
-    ])
+    const workerId = getWorkerId(res)
+    if (!accepted) {
+      await acceptPendingDataObjects(api, bagId, getAccount(res), workerId, uploadRequest.storageBucketId, [
+        uploadRequest.dataObjectId,
+      ])
+    } else {
+      logger.warn(
+        `Received already accepted data object. DataObjectId = ${uploadRequest.dataObjectId} WorkerId = ${workerId}`
+      )
+    }
     res.status(201).json({
       id: hash,
     })
@@ -294,17 +304,41 @@ async function validateTokenRequest(api: ApiPromise, tokenRequest: UploadTokenRe
 }
 
 /**
- * Validates file size. It throws an error when file size exceeds the limit
+ * Validates the runtime info for the data object. It verifies contentID,
+ * file size, and 'accepted' status.
  *
- * @param fileSize - runtime API promise
- * @returns void promise.
+ * @param api - runtime API promise
+ * @param bagId - bag ID
+ * @param dataObjectId - data object ID to validate in runtime
+ * @param fileSize - file size to validate
+ * @param hash - file multihash
+ * @returns promise with the 'data object accepted' flag.
  */
-function verifyFileSize(fileSize: number) {
-  const MAX_FILE_SIZE = 1000000 // TODO: Get this const from the runtime
+async function verifyDataObjectInfo(
+  api: ApiPromise,
+  bagId: BagId,
+  dataObjectId: number,
+  fileSize: number,
+  hash: string
+): Promise<boolean> {
+  const dataObject = await api.query.storage.dataObjectsById(bagId, dataObjectId)
+
+  // Cannot get 'size' as a regular property.
+  const dataObjectSize = dataObject.getField('size')
+
+  if (dataObjectSize?.toNumber() !== fileSize) {
+    throw new WebApiError(`File size doesn't match the data object's size for data object ID = ${dataObjectId}`, 400)
+  }
 
-  if (fileSize > MAX_FILE_SIZE) {
-    throw new WebApiError('Max file size exceeded.', 400)
+  const runtimeHash = hexToString(dataObject.ipfsContentId.toString())
+  if (runtimeHash !== hash) {
+    throw new WebApiError(
+      `File multihash doesn't match the data object's ipfsContentId for data object ID = ${dataObjectId}`,
+      400
+    )
   }
+
+  return dataObject.accepted.valueOf()
 }
 
 /**

+ 4 - 0
types/augment-codec/augment-api-consts.ts

@@ -193,6 +193,10 @@ declare module '@polkadot/api/types/consts' {
        * Exports const - "Distribution buckets per bag" value constraint.
        **/
       distributionBucketsPerBagValueConstraint: StorageBucketsPerBagValueConstraint & AugmentedConst<ApiType>;
+      /**
+       * Exports const - max data object size in bytes.
+       **/
+      maxDataObjectSize: u64 & AugmentedConst<ApiType>;
       /**
        * Exports const - max allowed distribution bucket family number.
        **/

+ 4 - 0
types/augment-codec/augment-api-errors.ts

@@ -2315,6 +2315,10 @@ declare module '@polkadot/api/types/errors' {
        * Invalid operation with invites: storage provider was already invited.
        **/
       InvitedStorageProvider: AugmentedError<ApiType>;
+      /**
+       * Max data object size exceeded.
+       **/
+      MaxDataObjectSizeExceeded: AugmentedError<ApiType>;
       /**
        * Max distribution bucket family number limit exceeded.
        **/

+ 1 - 1
types/augment-codec/augment-api-tx.ts

@@ -1365,7 +1365,7 @@ declare module '@polkadot/api/types/submittable' {
       /**
        * Upload new data objects. Development mode.
        **/
-      sudoUploadDataObjects: AugmentedSubmittable<(params: UploadParameters | { authenticationKey?: any; bagId?: any; objectCreationList?: any; deletionPrizeSourceAccountId?: any; expectedDataSizeFee?: any } | string | Uint8Array) => SubmittableExtrinsic<ApiType>, [UploadParameters]>;
+      sudoUploadDataObjects: AugmentedSubmittable<(params: UploadParameters | { bagId?: any; objectCreationList?: any; deletionPrizeSourceAccountId?: any; expectedDataSizeFee?: any } | string | Uint8Array) => SubmittableExtrinsic<ApiType>, [UploadParameters]>;
       /**
        * Add and remove hashes to the current blacklist.
        **/

+ 2 - 2
types/augment/all/defs.json

@@ -564,7 +564,6 @@
         }
     },
     "UploadParameters": {
-        "authenticationKey": "Bytes",
         "bagId": "BagId",
         "objectCreationList": "Vec<DataObjectCreationParameters>",
         "deletionPrizeSourceAccountId": "GenericAccountId",
@@ -584,7 +583,8 @@
     "DataObject": {
         "accepted": "bool",
         "deletion_prize": "u128",
-        "size": "u64"
+        "size": "u64",
+        "ipfsContentId": "Bytes"
     },
     "DistributionBucketId": "u64",
     "DistributionBucketFamilyId": "u64",

+ 1 - 1
types/augment/all/types.ts

@@ -366,6 +366,7 @@ export interface DAOId extends u64 {}
 export interface DataObject extends Struct {
   readonly accepted: bool;
   readonly deletion_prize: u128;
+  readonly ipfsContentId: Bytes;
 }
 
 /** @name DataObjectCreationParameters */
@@ -1302,7 +1303,6 @@ export interface UpdatePropertyValuesOperation extends Null {}
 
 /** @name UploadParameters */
 export interface UploadParameters extends Struct {
-  readonly authenticationKey: Bytes;
   readonly bagId: BagId;
   readonly objectCreationList: Vec<DataObjectCreationParameters>;
   readonly deletionPrizeSourceAccountId: GenericAccountId;

+ 4 - 0
types/augment/augment-api-consts.ts

@@ -193,6 +193,10 @@ declare module '@polkadot/api/types/consts' {
        * Exports const - "Distribution buckets per bag" value constraint.
        **/
       distributionBucketsPerBagValueConstraint: StorageBucketsPerBagValueConstraint & AugmentedConst<ApiType>;
+      /**
+       * Exports const - max data object size in bytes.
+       **/
+      maxDataObjectSize: u64 & AugmentedConst<ApiType>;
       /**
        * Exports const - max allowed distribution bucket family number.
        **/

+ 4 - 0
types/augment/augment-api-errors.ts

@@ -2315,6 +2315,10 @@ declare module '@polkadot/api/types/errors' {
        * Invalid operation with invites: storage provider was already invited.
        **/
       InvitedStorageProvider: AugmentedError<ApiType>;
+      /**
+       * Max data object size exceeded.
+       **/
+      MaxDataObjectSizeExceeded: AugmentedError<ApiType>;
       /**
        * Max distribution bucket family number limit exceeded.
        **/

+ 1 - 1
types/augment/augment-api-tx.ts

@@ -1365,7 +1365,7 @@ declare module '@polkadot/api/types/submittable' {
       /**
        * Upload new data objects. Development mode.
        **/
-      sudoUploadDataObjects: AugmentedSubmittable<(params: UploadParameters | { authenticationKey?: any; bagId?: any; objectCreationList?: any; deletionPrizeSourceAccountId?: any; expectedDataSizeFee?: any } | string | Uint8Array) => SubmittableExtrinsic<ApiType>, [UploadParameters]>;
+      sudoUploadDataObjects: AugmentedSubmittable<(params: UploadParameters | { bagId?: any; objectCreationList?: any; deletionPrizeSourceAccountId?: any; expectedDataSizeFee?: any } | string | Uint8Array) => SubmittableExtrinsic<ApiType>, [UploadParameters]>;
       /**
        * Add and remove hashes to the current blacklist.
        **/

+ 2 - 2
types/src/storage.ts

@@ -34,6 +34,7 @@ export type IDataObject = {
   accepted: bool
   deletion_prize: BalanceOf
   size: u64
+  ipfsContentId: Bytes
 }
 
 export class DataObject
@@ -41,6 +42,7 @@ export class DataObject
     accepted: bool,
     deletion_prize: BalanceOf,
     size: u64,
+    ipfsContentId: Bytes,
   })
   implements IDataObject {}
 
@@ -178,7 +180,6 @@ export class DataObjectCreationParameters
   implements IDataObjectCreationParameters {}
 
 export type IUploadParameters = {
-  authenticationKey: Bytes
   bagId: BagId
   objectCreationList: Vec<DataObjectCreationParameters>
   deletionPrizeSourceAccountId: AccountId
@@ -187,7 +188,6 @@ export type IUploadParameters = {
 
 export class UploadParameters
   extends JoyStructDecorated({
-    authenticationKey: Bytes,
     bagId: BagId,
     objectCreationList: Vec.with(DataObjectCreationParameters),
     deletionPrizeSourceAccountId: AccountId,

+ 49 - 16
yarn.lock

@@ -4468,7 +4468,7 @@
   resolved "https://registry.yarnpkg.com/@types/async-lock/-/async-lock-1.1.2.tgz#cbc26a34b11b83b28f7783a843c393b443ef8bef"
   integrity sha512-j9n4bb6RhgFIydBe0+kpjnBPYumDaDyU8zvbWykyVMkku+c2CSu31MZkLeaBfqIwU+XCxlDpYDfyMQRkM0AkeQ==
 
-"@types/babel__core@^7.0.0", "@types/babel__core@^7.1.0", "@types/babel__core@^7.1.7":
+"@types/babel__core@^7.0.0", "@types/babel__core@^7.1.7":
   version "7.1.14"
   resolved "https://registry.yarnpkg.com/@types/babel__core/-/babel__core-7.1.14.tgz#faaeefc4185ec71c389f4501ee5ec84b170cc402"
   integrity sha512-zGZJzzBUVDo/eV6KgbE0f0ZI7dInEYvo12Rb70uNQDshC3SkRMb67ja0GgRHZgAX3Za6rhaWlvbDO8rrGyAb1g==
@@ -4479,6 +4479,17 @@
     "@types/babel__template" "*"
     "@types/babel__traverse" "*"
 
+"@types/babel__core@^7.1.0":
+  version "7.1.16"
+  resolved "https://registry.yarnpkg.com/@types/babel__core/-/babel__core-7.1.16.tgz#bc12c74b7d65e82d29876b5d0baf5c625ac58702"
+  integrity sha512-EAEHtisTMM+KaKwfWdC3oyllIqswlznXCIVCt7/oRNrh+DhgT4UEBNC/jlADNjvw7UnfbcdkGQcPVZ1xYiLcrQ==
+  dependencies:
+    "@babel/parser" "^7.1.0"
+    "@babel/types" "^7.0.0"
+    "@types/babel__generator" "*"
+    "@types/babel__template" "*"
+    "@types/babel__traverse" "*"
+
 "@types/babel__generator@*":
   version "7.6.2"
   resolved "https://registry.yarnpkg.com/@types/babel__generator/-/babel__generator-7.6.2.tgz#f3d71178e187858f7c45e30380f8f1b7415a12d8"
@@ -4509,9 +4520,9 @@
     base64url "*"
 
 "@types/bluebird@^3.5.20":
-  version "3.5.35"
-  resolved "https://registry.yarnpkg.com/@types/bluebird/-/bluebird-3.5.35.tgz#3964c48372bf62d60616d8673dd77a9719ebac9b"
-  integrity sha512-2WeeXK7BuQo7yPI4WGOBum90SzF/f8rqlvpaXx4rjeTmNssGRDHWf7fgDUH90xMB3sUOu716fUK5d+OVx0+ncQ==
+  version "3.5.36"
+  resolved "https://registry.yarnpkg.com/@types/bluebird/-/bluebird-3.5.36.tgz#00d9301d4dc35c2f6465a8aec634bb533674c652"
+  integrity sha512-HBNx4lhkxN7bx6P0++W8E289foSu8kO8GCk2unhuVggO+cE7rh9DhZUyPhUxNRG9m+5B5BTKxZQ5ZP92x/mx9Q==
 
 "@types/bn.js@^4.11.5", "@types/bn.js@^4.11.6":
   version "4.11.6"
@@ -4898,9 +4909,9 @@
     pretty-format "^26.0.0"
 
 "@types/js-yaml@^3.11.1":
-  version "3.12.6"
-  resolved "https://registry.yarnpkg.com/@types/js-yaml/-/js-yaml-3.12.6.tgz#7f10c926aa41e189a2755c4c7fcf8e4573bd7ac1"
-  integrity sha512-cK4XqrLvP17X6c0C8n4iTbT59EixqyXL3Fk8/Rsk4dF3oX4dg70gYUXrXVUUHpnsGMPNlTQMqf+TVmNPX6FmSQ==
+  version "3.12.7"
+  resolved "https://registry.yarnpkg.com/@types/js-yaml/-/js-yaml-3.12.7.tgz#330c5d97a3500e9c903210d6e49f02964af04a0e"
+  integrity sha512-S6+8JAYTE1qdsc9HMVsfY7+SgSuUU/Tp6TYTmITW0PZxiyIMvol3Gy//y69Wkhs0ti4py5qgR3uZH6uz/DNzJQ==
 
 "@types/json-schema@^7.0.3", "@types/json-schema@^7.0.4", "@types/json-schema@^7.0.5", "@types/json-schema@^7.0.6":
   version "7.0.7"
@@ -5375,7 +5386,7 @@
 "@types/stack-utils@^1.0.1":
   version "1.0.1"
   resolved "https://registry.yarnpkg.com/@types/stack-utils/-/stack-utils-1.0.1.tgz#0a851d3bd96498fa25c33ab7278ed3bd65f06c3e"
-  integrity "sha1-CoUdO9lkmPolwzq3J47TvWXwbD4= sha512-l42BggppR6zLmpfU6fq9HEa2oGPEI8yrSPL3GITjfRInppYFahObbIQOQK3UGxEnyQpltZLaPe75046NOZQikw=="
+  integrity sha512-l42BggppR6zLmpfU6fq9HEa2oGPEI8yrSPL3GITjfRInppYFahObbIQOQK3UGxEnyQpltZLaPe75046NOZQikw==
 
 "@types/stack-utils@^2.0.0":
   version "2.0.0"
@@ -6674,7 +6685,7 @@ anymatch@^2.0.0:
     micromatch "^3.1.4"
     normalize-path "^2.1.1"
 
-anymatch@^3.0.3, anymatch@~3.1.1:
+anymatch@^3.0.3, anymatch@~3.1.1, anymatch@~3.1.2:
   version "3.1.2"
   resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.2.tgz#c0557c096af32f106198f4f4e2a383537e378716"
   integrity sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg==
@@ -7378,9 +7389,9 @@ aws-credstash@^3.0.0:
     debug "^4.3.1"
 
 aws-sdk@^2.567.0:
-  version "2.894.0"
-  resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.894.0.tgz#acf740256ee051ea2c075549f7a10929c324d70a"
-  integrity sha512-qzmxkZ1JOQ/sQPIlxE+aOpJ9OlPq640ab1Ot8p4VIuatkGSEvQvk8nWqAeLyrRwmNQuacGM7xWTI0k9pGhbDXA==
+  version "2.984.0"
+  resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.984.0.tgz#fee0e73d63826a413cc7053c5daeb518d3261561"
+  integrity sha512-wFwNKhlO03V7UnpIge2qT/gYOMvGUlmVuFgF2gQRIkt6lWYvnf8/QDTCKZLhGBpC8/mml10m0CM3khMNwU1KVQ==
   dependencies:
     buffer "4.9.2"
     events "1.1.1"
@@ -8989,7 +9000,7 @@ check-type@^0.4.11:
   dependencies:
     underscore "1.6.0"
 
-chokidar@3.5.1, chokidar@^3.4.0, chokidar@^3.4.1, chokidar@^3.5.1:
+chokidar@3.5.1, chokidar@^3.4.0, chokidar@^3.4.1:
   version "3.5.1"
   resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.5.1.tgz#ee9ce7bbebd2b79f49f304799d5468e31e14e68a"
   integrity sha512-9+s+Od+W0VJJzawDma/gvBNQqkTiqYTWLuZoyAsivsI4AaWTCzHG06/TMjsf1cYe9Cb97UCEhjz7HvnPk2p/tw==
@@ -9039,6 +9050,21 @@ chokidar@^2.0.3, chokidar@^2.0.4, chokidar@^2.1.8:
   optionalDependencies:
     fsevents "^1.2.7"
 
+chokidar@^3.5.1:
+  version "3.5.2"
+  resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.5.2.tgz#dba3976fcadb016f66fd365021d91600d01c1e75"
+  integrity sha512-ekGhOnNVPgT77r4K/U3GDhu+FQ2S8TnK/s2KbIGXi0SZWuwkZ2QNyfWdZW+TVfn84DpEP7rLeCt2UI6bJ8GwbQ==
+  dependencies:
+    anymatch "~3.1.2"
+    braces "~3.0.2"
+    glob-parent "~5.1.2"
+    is-binary-path "~2.1.0"
+    is-glob "~4.0.1"
+    normalize-path "~3.0.0"
+    readdirp "~3.6.0"
+  optionalDependencies:
+    fsevents "~2.3.2"
+
 chownr@^1.0.1, chownr@^1.1.1, chownr@^1.1.2:
   version "1.1.4"
   resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b"
@@ -12157,7 +12183,7 @@ escape-string-regexp@^2.0.0:
 escodegen@^1.9.1:
   version "1.14.3"
   resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-1.14.3.tgz#4e7b81fba61581dc97582ed78cab7f0e8d63f503"
-  integrity "sha1-TnuB+6YVgdyXWC7XjKt/Do1j9QM= sha512-qFcX0XJkdg+PB3xjZZG/wKSuT1PnQWx57+TVSjIMmILd2yC/6ByYElPwJnslDsuWuSAp4AwJGumarAAmJch5Kw=="
+  integrity sha512-qFcX0XJkdg+PB3xjZZG/wKSuT1PnQWx57+TVSjIMmILd2yC/6ByYElPwJnslDsuWuSAp4AwJGumarAAmJch5Kw==
   dependencies:
     esprima "^4.0.1"
     estraverse "^4.2.0"
@@ -13802,7 +13828,7 @@ fsevents@^1.0.0, fsevents@^1.2.7:
     bindings "^1.5.0"
     nan "^2.12.1"
 
-fsevents@^2.1.2, fsevents@~2.3.1:
+fsevents@^2.1.2, fsevents@~2.3.1, fsevents@~2.3.2:
   version "2.3.2"
   resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a"
   integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==
@@ -14131,7 +14157,7 @@ glob-parent@^3.1.0:
     is-glob "^3.1.0"
     path-dirname "^1.0.0"
 
-glob-parent@^5.0.0, glob-parent@^5.1.0, glob-parent@^5.1.1, glob-parent@~5.1.0:
+glob-parent@^5.0.0, glob-parent@^5.1.0, glob-parent@^5.1.1, glob-parent@~5.1.0, glob-parent@~5.1.2:
   version "5.1.2"
   resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4"
   integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==
@@ -24374,6 +24400,13 @@ readdirp@~3.5.0:
   dependencies:
     picomatch "^2.2.1"
 
+readdirp@~3.6.0:
+  version "3.6.0"
+  resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.6.0.tgz#74a370bd857116e245b29cc97340cd431a02a6c7"
+  integrity sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==
+  dependencies:
+    picomatch "^2.2.1"
+
 realpath-native@^1.1.0:
   version "1.1.0"
   resolved "https://registry.yarnpkg.com/realpath-native/-/realpath-native-1.1.0.tgz#2003294fea23fb0672f2476ebe22fcf498a2d65c"