浏览代码

storage-node-v2: Improve auth schema.

- update types: add ipfsContentId to DataObject and remove authenticationKey
- add check for data object size, accepted status and multihash
Shamil Gadelshin 3 年之前
父节点
当前提交
afc025867d

+ 1 - 1
storage-node-v2/package.json

@@ -1,6 +1,6 @@
 {
   "name": "storage-node-v2",
-  "description": "Jostream storage subsystem.",
+  "description": "Joystream storage subsystem.",
   "version": "0.1.0",
   "author": "Joystream contributors",
   "bin": {

+ 49 - 7
storage-node-v2/src/services/webApi/controllers/publicApi.ts

@@ -11,6 +11,7 @@ import { hashFile } from '../../../services/helpers/hashing'
 import { createNonce, getTokenExpirationTime } from '../../../services/helpers/tokenNonceKeeper'
 import { getFileInfo } from '../../../services/helpers/fileInfo'
 import { parseBagId } from '../../helpers/bagTypes'
+import { BagId } from '@joystream/types/storage'
 import logger from '../../../services/logger'
 import { KeyringPair } from '@polkadot/keyring/types'
 import { ApiPromise } from '@polkadot/api'
@@ -19,6 +20,7 @@ import fs from 'fs'
 import path from 'path'
 import send from 'send'
 import { CLIError } from '@oclif/errors'
+import { hexToString } from '@polkadot/util'
 const fsPromises = fs.promises
 
 /**
@@ -107,10 +109,14 @@ export async function uploadFile(req: express.Request, res: express.Response): P
     const fileObj = getFileObject(req)
     cleanupFileName = fileObj.path
 
-    verifyFileSize(fileObj.size)
+    const api = getApi(res)
+    await verifyFileSize(api, fileObj.size)
     await verifyFileMimeType(fileObj.path)
 
     const hash = await hashFile(fileObj.path)
+    const bagId = parseBagId(api, uploadRequest.bagId)
+
+    await verifyDataObjectInfo(api, bagId, uploadRequest.dataObjectId, fileObj.size, hash)
 
     // Prepare new file name
     const newPath = fileObj.path.replace(fileObj.filename, hash)
@@ -119,8 +125,6 @@ export async function uploadFile(req: express.Request, res: express.Response): P
     await fsPromises.rename(fileObj.path, newPath)
     cleanupFileName = newPath
 
-    const api = getApi(res)
-    const bagId = parseBagId(api, uploadRequest.bagId)
     await acceptPendingDataObjects(api, bagId, getAccount(res), getWorkerId(res), uploadRequest.storageBucketId, [
       uploadRequest.dataObjectId,
     ])
@@ -296,17 +300,55 @@ async function validateTokenRequest(api: ApiPromise, tokenRequest: UploadTokenRe
 /**
  * Validates file size. It throws an error when file size exceeds the limit
  *
- * @param fileSize - runtime API promise
+ * @param api - runtime API promise
+ * @param fileSize - file size to validate
  * @returns void promise.
  */
-function verifyFileSize(fileSize: number) {
-  const MAX_FILE_SIZE = 1000000 // TODO: Get this const from the runtime
+async function verifyFileSize(api: ApiPromise, fileSize: number) {
+  const maxRuntimeFileSize = await api.consts.storage.maxDataObjectSize.toNumber()
 
-  if (fileSize > MAX_FILE_SIZE) {
+  if (fileSize > maxRuntimeFileSize) {
     throw new WebApiError('Max file size exceeded.', 400)
   }
 }
 
+/**
+ * Validates the runtime info for the data object. It verifies contentID,
+ * file size, and 'accepted' status.
+ *
+ * @param api - runtime API promise
+ * @param bagId - bag ID
+ * @param dataObjectId - data object ID to validate in runtime
+ * @param fileSize - file size to validate
+ * @param hash - file multihash
+ * @returns void promise.
+ */
+async function verifyDataObjectInfo(
+  api: ApiPromise,
+  bagId: BagId,
+  dataObjectId: number,
+  fileSize: number,
+  hash: string
+) {
+  const dataObject = await api.query.storage.dataObjectsById(bagId, dataObjectId)
+
+  if (dataObject.accepted.valueOf()) {
+    throw new WebApiError(`Data object had been already accepted ID = ${dataObjectId}`, 400)
+  }
+
+  if (dataObject.get('size').toNumber() !== fileSize) {
+    throw new WebApiError(`File size doesn't match the data object's size for data object ID = ${dataObjectId}`, 400)
+  }
+
+  const runtimeHash = hexToString(dataObject.ipfsContentId.toString())
+  if (runtimeHash !== hash) {
+    throw new WebApiError(
+      `File multihash doesn't match the data object's ipfsContentId for data object ID = ${dataObjectId}`,
+      400
+    )
+  }
+}
+
 /**
  * Tries to remove file on error. It silences possible IO error and logs it.
  *

+ 4 - 0
types/augment-codec/augment-api-consts.ts

@@ -193,6 +193,10 @@ declare module '@polkadot/api/types/consts' {
        * Exports const - "Distribution buckets per bag" value constraint.
        **/
       distributionBucketsPerBagValueConstraint: StorageBucketsPerBagValueConstraint & AugmentedConst<ApiType>;
+      /**
+       * Exports const - max data object size in bytes.
+       **/
+      maxDataObjectSize: u64 & AugmentedConst<ApiType>;
       /**
        * Exports const - max allowed distribution bucket family number.
        **/

+ 4 - 0
types/augment-codec/augment-api-errors.ts

@@ -2315,6 +2315,10 @@ declare module '@polkadot/api/types/errors' {
        * Invalid operation with invites: storage provider was already invited.
        **/
       InvitedStorageProvider: AugmentedError<ApiType>;
+      /**
+       * Max data object size exceeded.
+       **/
+      MaxDataObjectSizeExceeded: AugmentedError<ApiType>;
       /**
        * Max distribution bucket family number limit exceeded.
        **/

+ 1 - 1
types/augment-codec/augment-api-tx.ts

@@ -1365,7 +1365,7 @@ declare module '@polkadot/api/types/submittable' {
       /**
        * Upload new data objects. Development mode.
        **/
-      sudoUploadDataObjects: AugmentedSubmittable<(params: UploadParameters | { authenticationKey?: any; bagId?: any; objectCreationList?: any; deletionPrizeSourceAccountId?: any; expectedDataSizeFee?: any } | string | Uint8Array) => SubmittableExtrinsic<ApiType>, [UploadParameters]>;
+      sudoUploadDataObjects: AugmentedSubmittable<(params: UploadParameters | { bagId?: any; objectCreationList?: any; deletionPrizeSourceAccountId?: any; expectedDataSizeFee?: any } | string | Uint8Array) => SubmittableExtrinsic<ApiType>, [UploadParameters]>;
       /**
        * Add and remove hashes to the current blacklist.
        **/

+ 2 - 2
types/augment/all/defs.json

@@ -563,7 +563,6 @@
         }
     },
     "UploadParameters": {
-        "authenticationKey": "Bytes",
         "bagId": "BagId",
         "objectCreationList": "Vec<DataObjectCreationParameters>",
         "deletionPrizeSourceAccountId": "GenericAccountId",
@@ -583,7 +582,8 @@
     "DataObject": {
         "accepted": "bool",
         "deletion_prize": "u128",
-        "size": "u64"
+        "size": "u64",
+        "ipfsContentId": "Bytes"
     },
     "DistributionBucketId": "u64",
     "DistributionBucketFamilyId": "u64",

+ 1 - 1
types/augment/all/types.ts

@@ -366,6 +366,7 @@ export interface DAOId extends u64 {}
 export interface DataObject extends Struct {
   readonly accepted: bool;
   readonly deletion_prize: u128;
+  readonly ipfsContentId: Bytes;
 }
 
 /** @name DataObjectCreationParameters */
@@ -1292,7 +1293,6 @@ export interface UpdatePropertyValuesOperation extends Null {}
 
 /** @name UploadParameters */
 export interface UploadParameters extends Struct {
-  readonly authenticationKey: Bytes;
   readonly bagId: BagId;
   readonly objectCreationList: Vec<DataObjectCreationParameters>;
   readonly deletionPrizeSourceAccountId: GenericAccountId;

+ 4 - 0
types/augment/augment-api-consts.ts

@@ -193,6 +193,10 @@ declare module '@polkadot/api/types/consts' {
        * Exports const - "Distribution buckets per bag" value constraint.
        **/
       distributionBucketsPerBagValueConstraint: StorageBucketsPerBagValueConstraint & AugmentedConst<ApiType>;
+      /**
+       * Exports const - max data object size in bytes.
+       **/
+      maxDataObjectSize: u64 & AugmentedConst<ApiType>;
       /**
        * Exports const - max allowed distribution bucket family number.
        **/

+ 4 - 0
types/augment/augment-api-errors.ts

@@ -2315,6 +2315,10 @@ declare module '@polkadot/api/types/errors' {
        * Invalid operation with invites: storage provider was already invited.
        **/
       InvitedStorageProvider: AugmentedError<ApiType>;
+      /**
+       * Max data object size exceeded.
+       **/
+      MaxDataObjectSizeExceeded: AugmentedError<ApiType>;
       /**
        * Max distribution bucket family number limit exceeded.
        **/

+ 1 - 1
types/augment/augment-api-tx.ts

@@ -1365,7 +1365,7 @@ declare module '@polkadot/api/types/submittable' {
       /**
        * Upload new data objects. Development mode.
        **/
-      sudoUploadDataObjects: AugmentedSubmittable<(params: UploadParameters | { authenticationKey?: any; bagId?: any; objectCreationList?: any; deletionPrizeSourceAccountId?: any; expectedDataSizeFee?: any } | string | Uint8Array) => SubmittableExtrinsic<ApiType>, [UploadParameters]>;
+      sudoUploadDataObjects: AugmentedSubmittable<(params: UploadParameters | { bagId?: any; objectCreationList?: any; deletionPrizeSourceAccountId?: any; expectedDataSizeFee?: any } | string | Uint8Array) => SubmittableExtrinsic<ApiType>, [UploadParameters]>;
       /**
        * Add and remove hashes to the current blacklist.
        **/

+ 2 - 2
types/src/storage.ts

@@ -23,6 +23,7 @@ export type IDataObject = {
   accepted: bool
   deletion_prize: BalanceOf
   size: u64
+  ipfsContentId: Bytes
 }
 
 export class DataObject
@@ -30,6 +31,7 @@ export class DataObject
     accepted: bool,
     deletion_prize: BalanceOf,
     size: u64,
+    ipfsContentId: Bytes,
   })
   implements IDataObject {}
 
@@ -164,7 +166,6 @@ export class DataObjectCreationParameters
   implements IDataObjectCreationParameters {}
 
 export type IUploadParameters = {
-  authenticationKey: Bytes
   bagId: BagId
   objectCreationList: Vec<DataObjectCreationParameters>
   deletionPrizeSourceAccountId: AccountId
@@ -173,7 +174,6 @@ export type IUploadParameters = {
 
 export class UploadParameters
   extends JoyStructDecorated({
-    authenticationKey: Bytes,
     bagId: BagId,
     objectCreationList: Vec.with(DataObjectCreationParameters),
     deletionPrizeSourceAccountId: AccountId,