Parcourir la source

Query node: Initial Storage v2 mappings

Leszek Wiesner il y a 3 ans
Parent
commit
a7d7dad7ad

+ 48 - 2
query-node/manifest.yml

@@ -49,8 +49,54 @@ mappings:
   imports:
     - mappings/lib/generated/types
   eventHandlers:
-    # - event: storage.StorageBucketCreated
-    #   handler: storage_StorageBucketCreated
+    - event: storage.StorageBucketCreated
+      handler: storage_StorageBucketCreated
+    - event: storage.StorageBucketInvitationAccepted
+      handler: storage_StorageBucketInvitationAccepted
+    - event: storage.StorageBucketsUpdatedForBag
+      handler: storage_StorageBucketsUpdatedForBag
+    - event: storage.DataObjectdUploaded
+      handler: storage_DataObjectdUploaded
+    - event: storage.StorageOperatorMetadataSet
+      handler: storage_StorageOperatorMetadataSet
+    - event: storage.StorageBucketVoucherLimitsSet
+      handler: storage_StorageBucketVoucherLimitsSet
+    - event: storage.PendingDataObjectsAccepted
+      handler: storage_PendingDataObjectsAccepted
+    - event: storage.StorageBucketInvitationCancelled
+      handler: storage_StorageBucketInvitationCancelled
+    - event: storage.StorageBucketOperatorInvited
+      handler: storage_StorageBucketOperatorInvited
+    - event: storage.StorageBucketOperatorRemoved
+      handler: storage_StorageBucketOperatorRemoved
+    - event: storage.UploadingBlockStatusUpdated
+      handler: storage_UploadingBlockStatusUpdated
+    - event: storage.DataObjectPerMegabyteFeeUpdated
+      handler: storage_DataObjectPerMegabyteFeeUpdated
+    - event: storage.StorageBucketsPerBagLimitUpdated
+      handler: storage_StorageBucketsPerBagLimitUpdated
+    - event: storage.StorageBucketsVoucherMaxLimitsUpdated
+      handler: storage_StorageBucketsVoucherMaxLimitsUpdated
+    - event: storage.DataObjectsMoved
+      handler: storage_DataObjectsMoved
+    - event: storage.DataObjectsDeleted
+      handler: storage_DataObjectsDeleted
+    - event: storage.StorageBucketStatusUpdated
+      handler: storage_StorageBucketStatusUpdated
+    - event: storage.UpdateBlacklist
+      handler: storage_UpdateBlacklist
+    - event: storage.DynamicBagDeleted
+      handler: storage_DynamicBagDeleted
+    - event: storage.DynamicBagCreated
+      handler: storage_DynamicBagCreated
+    - event: storage.DeletionPrizeChanged
+      handler: storage_DeletionPrizeChanged
+    - event: storage.VoucherChanged
+      handler: storage_VoucherChanged
+    - event: storage.StorageBucketDeleted
+      handler: storage_StorageBucketDeleted
+    - event: storage.NumberOfStorageBucketsInDynamicBagCreationPolicyUpdated
+      handler: storage_NumberOfStorageBucketsInDynamicBagCreationPolicyUpdated
   extrinsicHandlers:
     # infer defaults here
     #- extrinsic: Balances.Transfer

+ 37 - 0
query-node/mappings/common.ts

@@ -0,0 +1,37 @@
+import { DatabaseManager } from '@joystream/hydra-common'
+import { BaseModel } from '@joystream/warthog'
+import { WorkingGroup } from '@joystream/types/augment/all'
+
+type EntityClass<T extends BaseModel> = {
+  new (): T
+  name: string
+}
+
+export async function getById<T extends BaseModel>(
+  store: DatabaseManager,
+  entityClass: EntityClass<T>,
+  id: string,
+  relations?: Exclude<
+    keyof T & string,
+    { [K in keyof T]: T[K] extends BaseModel | undefined ? '' : T[K] extends BaseModel[] | undefined ? '' : K }[keyof T]
+  >[]
+): Promise<T> {
+  const result = await store.get(entityClass, { where: { id }, relations })
+  if (!result) {
+    throw new Error(`Expected ${entityClass.name} not found by ID: ${id}`)
+  }
+
+  return result
+}
+
+export type WorkingGroupModuleName = 'storageWorkingGroup' | 'contentDirectoryWorkingGroup'
+
+export function getWorkingGroupModuleName(group: WorkingGroup): WorkingGroupModuleName {
+  if (group.isContent) {
+    return 'contentDirectoryWorkingGroup'
+  } else if (group.isStorage) {
+    return 'storageWorkingGroup'
+  }
+
+  throw new Error(`Unsupported working group encountered: ${group.type}`)
+}

+ 3 - 0
query-node/mappings/genesis-data/index.ts

@@ -0,0 +1,3 @@
+import storageSystem from './storageSystem.json'
+
+export { storageSystem }

+ 1 - 0
query-node/mappings/genesis-data/storageSystem.json

@@ -0,0 +1 @@
+{ "blacklist": [] }

+ 12 - 0
query-node/mappings/genesis.ts

@@ -0,0 +1,12 @@
+import { StoreContext } from '@joystream/hydra-common'
+import { StorageSystemParameters } from 'query-node/dist/model'
+import { storageSystem } from './genesis-data'
+
+export async function loadGenesisData({ store }: StoreContext): Promise<void> {
+  // Storage system
+  await store.save<StorageSystemParameters>(
+    new StorageSystemParameters({
+      ...storageSystem,
+    })
+  )
+}

+ 1 - 0
query-node/mappings/index.ts

@@ -1 +1,2 @@
+export * from './genesis'
 export * from './storage'

+ 257 - 17
query-node/mappings/storage.ts

@@ -1,92 +1,332 @@
 /*
 eslint-disable @typescript-eslint/naming-convention
 */
-import { EventContext, StoreContext } from '@joystream/hydra-common'
+import { DatabaseManager, EventContext, StoreContext } from '@joystream/hydra-common'
+import { Storage } from './generated/types/storage'
+import {
+  StorageBag,
+  StorageBagOwner,
+  StorageBagOwnerChannel,
+  StorageBagOwnerCouncil,
+  StorageBagOwnerMember,
+  StorageBagOwnerWorkingGroup,
+  StorageBucket,
+  StorageBucketOperatorStatusActive,
+  StorageBucketOperatorStatusInvited,
+  StorageBucketOperatorStatusMissing,
+  StorageDataObject,
+  StorageSystemParameters,
+} from 'query-node/dist/model'
+import BN from 'bn.js'
+import { getById, getWorkingGroupModuleName } from './common'
+import { BTreeSet } from '@polkadot/types'
+import { DataObjectCreationParameters } from '@joystream/types/storage'
+import { registry } from '@joystream/types'
+import { In } from 'typeorm'
+import _ from 'lodash'
+import { DataObjectId, BagId, DynamicBagId, StaticBagId } from '@joystream/types/augment/all'
+
+async function getDataObjectsInBag(
+  store: DatabaseManager,
+  bagId: BagId,
+  dataObjectIds: BTreeSet<DataObjectId>
+): Promise<StorageDataObject[]> {
+  const dataObjects = await store.getMany(StorageDataObject, {
+    where: {
+      id: In(Array.from(dataObjectIds).map((id) => id.toString())),
+      storageBag: { id: getBagId(bagId) },
+    },
+  })
+  if (dataObjects.length !== Array.from(dataObjectIds).length) {
+    throw new Error(
+      `Missing data objects: ${_.difference(
+        Array.from(dataObjectIds).map((id) => id.toString()),
+        dataObjects.map((o) => o.id)
+      )} in bag ${getBagId(bagId)}`
+    )
+  }
+  return dataObjects
+}
+
+function getStaticBagOwner(bagId: StaticBagId): typeof StorageBagOwner {
+  if (bagId.isCouncil) {
+    return new StorageBagOwnerCouncil()
+  } else if (bagId.isWorkingGroup) {
+    const owner = new StorageBagOwnerWorkingGroup()
+    owner.workingGroupId = getWorkingGroupModuleName(bagId.asWorkingGroup)
+    return owner
+  } else {
+    throw new Error(`Unexpected static bag type: ${bagId.type}`)
+  }
+}
+
+function getDynamicBagOwner(bagId: DynamicBagId) {
+  if (bagId.isChannel) {
+    const owner = new StorageBagOwnerChannel()
+    owner.channelId = bagId.asChannel.toNumber()
+    return owner
+  } else if (bagId.isMember) {
+    const owner = new StorageBagOwnerMember()
+    owner.memberId = bagId.asMember.toNumber()
+    return owner
+  } else {
+    throw new Error(`Unexpected dynamic bag type: ${bagId.type}`)
+  }
+}
+
+function getStaticBagId(bagId: StaticBagId): string {
+  if (bagId.isCouncil) {
+    return `CO`
+  } else if (bagId.isWorkingGroup) {
+    return `WG-${bagId.asWorkingGroup.type}`
+  } else {
+    throw new Error(`Unexpected static bag type: ${bagId.type}`)
+  }
+}
+
+function getDynamicBagId(bagId: DynamicBagId): string {
+  if (bagId.isChannel) {
+    return `CH-${bagId.asChannel.toString()}`
+  } else if (bagId.isMember) {
+    return `M-${bagId.asMember.toString()}`
+  } else {
+    throw new Error(`Unexpected dynamic bag type: ${bagId.type}`)
+  }
+}
+
+function getBagId(bagId: BagId) {
+  return bagId.isStatic ? getStaticBagId(bagId.asStatic) : getDynamicBagId(bagId.asDynamic)
+}
+
+async function getDynamicBag(
+  store: DatabaseManager,
+  bagId: DynamicBagId,
+  relations?: ('storedBy' | 'objects')[]
+): Promise<StorageBag> {
+  return getById(store, StorageBag, getDynamicBagId(bagId), relations)
+}
+
+async function getStaticBag(
+  store: DatabaseManager,
+  bagId: StaticBagId,
+  relations?: ('storedBy' | 'objects')[]
+): Promise<StorageBag> {
+  const id = getStaticBagId(bagId)
+  const bag = await store.get(StorageBag, { where: { id }, relations })
+  if (!bag) {
+    console.log(`Creating new static bag: ${id}`)
+    const newBag = new StorageBag({
+      id,
+      owner: getStaticBagOwner(bagId),
+    })
+    await store.save<StorageBag>(newBag)
+    return newBag
+  }
+  return bag
+}
+
+async function getBag(store: DatabaseManager, bagId: BagId, relations?: 'storedBy'[]): Promise<StorageBag> {
+  return bagId.isStatic
+    ? getStaticBag(store, bagId.asStatic, relations)
+    : getDynamicBag(store, bagId.asDynamic, relations)
+}
 
 // BUCKETS
 
 export async function storage_StorageBucketCreated({ event, store }: EventContext & StoreContext): Promise<void> {
-  // To be implemented
+  const [
+    bucketId,
+    invitedWorkerId,
+    acceptingNewBags,
+    dataObjectSizeLimit,
+    dataObjectCountLimit,
+  ] = new Storage.StorageBucketCreatedEvent(event).params
+
+  const storageBucket = new StorageBucket({
+    id: bucketId.toString(),
+    acceptingNewBags: acceptingNewBags.isTrue,
+    dataObjectCountLimit: new BN(dataObjectCountLimit.toString()),
+    dataObjectsSizeLimit: new BN(dataObjectSizeLimit.toString()),
+  })
+  if (invitedWorkerId.isSome) {
+    const operatorStatus = new StorageBucketOperatorStatusInvited()
+    operatorStatus.workerId = invitedWorkerId.unwrap().toNumber()
+    storageBucket.operatorStatus = operatorStatus
+  } else {
+    storageBucket.operatorStatus = new StorageBucketOperatorStatusMissing()
+  }
+  await store.save<StorageBucket>(storageBucket)
 }
 
 export async function storage_StorageOperatorMetadataSet({ event, store }: EventContext & StoreContext): Promise<void> {
-  // To be implemented
+  const [bucketId, , metadataBytes] = new Storage.StorageOperatorMetadataSetEvent(event).params
+  const storageBucket = await getById(store, StorageBucket, bucketId.toString())
+  storageBucket.operatorMetadata = Buffer.from(metadataBytes.toU8a(true))
+  await store.save<StorageBucket>(storageBucket)
 }
 
 export async function storage_StorageBucketStatusUpdated({ event, store }: EventContext & StoreContext): Promise<void> {
-  // To be implemented
+  const [bucketId, , acceptingNewBags] = new Storage.StorageBucketStatusUpdatedEvent(event).params
+
+  const storageBucket = await getById(store, StorageBucket, bucketId.toString())
+  storageBucket.acceptingNewBags = acceptingNewBags.isTrue
+  await store.save<StorageBucket>(storageBucket)
 }
 
 export async function storage_StorageBucketInvitationAccepted({
   event,
   store,
 }: EventContext & StoreContext): Promise<void> {
-  // To be implemented
+  const [bucketId, workerId] = new Storage.StorageBucketInvitationAcceptedEvent(event).params
+  const storageBucket = await getById(store, StorageBucket, bucketId.toString())
+  const operatorStatus = new StorageBucketOperatorStatusActive()
+  operatorStatus.workerId = workerId.toNumber()
+  storageBucket.operatorStatus = operatorStatus
+  await store.save<StorageBucket>(storageBucket)
 }
 
 export async function storage_StorageBucketInvitationCancelled({
   event,
   store,
 }: EventContext & StoreContext): Promise<void> {
-  // To be implemented
+  const [bucketId] = new Storage.StorageBucketInvitationCancelledEvent(event).params
+  const storageBucket = await getById(store, StorageBucket, bucketId.toString())
+  const operatorStatus = new StorageBucketOperatorStatusMissing()
+  storageBucket.operatorStatus = operatorStatus
+  await store.save<StorageBucket>(storageBucket)
 }
 
 export async function storage_StorageBucketOperatorInvited({
   event,
   store,
 }: EventContext & StoreContext): Promise<void> {
-  // To be implemented
+  const [bucketId, workerId] = new Storage.StorageBucketOperatorInvitedEvent(event).params
+  const storageBucket = await getById(store, StorageBucket, bucketId.toString())
+  const operatorStatus = new StorageBucketOperatorStatusInvited()
+  operatorStatus.workerId = workerId.toNumber()
+  storageBucket.operatorStatus = operatorStatus
+  await store.save<StorageBucket>(storageBucket)
 }
 
 export async function storage_StorageBucketOperatorRemoved({
   event,
   store,
 }: EventContext & StoreContext): Promise<void> {
-  // To be implemented
+  const [bucketId] = new Storage.StorageBucketInvitationCancelledEvent(event).params
+  const storageBucket = await getById(store, StorageBucket, bucketId.toString())
+  const operatorStatus = new StorageBucketOperatorStatusMissing()
+  storageBucket.operatorStatus = operatorStatus
+  await store.save<StorageBucket>(storageBucket)
 }
 
 export async function storage_StorageBucketsUpdatedForBag({
   event,
   store,
 }: EventContext & StoreContext): Promise<void> {
-  // To be implemented
+  const [bagId, addedBucketsIds, removedBucketsIds] = new Storage.StorageBucketsUpdatedForBagEvent(event).params
+  const storageBag = await getBag(store, bagId, ['storedBy'])
+  storageBag.storedBy = (storageBag.storedBy || [])
+    .filter((b) => !Array.from(removedBucketsIds).some((id) => id.eq(b.id)))
+    .concat(Array.from(addedBucketsIds).map((id) => new StorageBucket({ id: id.toString() })))
+
+  await store.save<StorageBag>(storageBag)
 }
 
 export async function storage_StorageBucketDeleted({ event, store }: EventContext & StoreContext): Promise<void> {
-  // To be implemented
+  const [bucketId] = new Storage.StorageBucketDeletedEvent(event).params
+  // TODO: Delete or just change status?
+  // TODO: Cascade remove on db level?
+  // We shouldn't have to worry about deleting DataObjects, since this is already enforced by the runtime
+  const storageBucket = await getById(store, StorageBucket, bucketId.toString(), ['storedBags'])
+  await Promise.all((storageBucket.storedBags || []).map((b) => store.remove<StorageBag>(b)))
+  await store.remove<StorageBucket>(storageBucket)
 }
 
 // DYNAMIC BAGS
 export async function storage_DynamicBagCreated({ event, store }: EventContext & StoreContext): Promise<void> {
-  // To be implemented
+  const [bagId] = new Storage.DynamicBagCreatedEvent(event).params
+  const storageBag = new StorageBag({
+    id: getDynamicBagId(bagId),
+    owner: getDynamicBagOwner(bagId),
+  })
+  await store.save<StorageBag>(storageBag)
 }
 
 export async function storage_DynamicBagDeleted({ event, store }: EventContext & StoreContext): Promise<void> {
-  // To be implemented
+  const [, bagId] = new Storage.DynamicBagDeletedEvent(event).params
+  // TODO: Delete or just change status?
+  // TODO: Cascade remove on db level?
+  const storageBag = await getDynamicBag(store, bagId, ['objects'])
+  await Promise.all((storageBag.objects || []).map((o) => store.remove<StorageDataObject>(o)))
+  await store.remove<StorageBag>(storageBag)
 }
 
 // DATA OBJECTS
 
 // Note: "Uploaded" here actually means "created" (the real upload happens later)
 export async function storage_DataObjectdUploaded({ event, store }: EventContext & StoreContext): Promise<void> {
-  // To be implemented
+  const [dataObjectIds, uploadParams] = new Storage.DataObjectdUploadedEvent(event).params
+  const { bagId, authenticationKey, objectCreationList } = uploadParams
+  const storageBag = await getBag(store, bagId)
+  const dataObjects = dataObjectIds.map((objectId, i) => {
+    const objectParams = new DataObjectCreationParameters(registry, objectCreationList[i].toJSON() as any)
+    return new StorageDataObject({
+      id: objectId.toString(),
+      authenticationKey: authenticationKey.toString(),
+      isAccepted: false,
+      ipfsHash: objectParams.ipfsContentId.toString(),
+      size: new BN(objectParams.getField('size').toString()),
+      storageBag,
+    })
+  })
+  await Promise.all(dataObjects.map((o) => store.save<StorageDataObject>(o)))
 }
 
 export async function storage_PendingDataObjectsAccepted({ event, store }: EventContext & StoreContext): Promise<void> {
-  // To be implemented
+  const [, , bagId, dataObjectIds] = new Storage.PendingDataObjectsAcceptedEvent(event).params
+  const dataObjects = await getDataObjectsInBag(store, bagId, dataObjectIds)
+  await Promise.all(
+    dataObjects.map(async (dataObject) => {
+      dataObject.isAccepted = true
+      // TODO: Do we still want other storage providers to accept it? How long should the key be valid?
+      // dataObject.authenticationKey = null as any
+      await store.save<StorageDataObject>(dataObject)
+    })
+  )
 }
 
 export async function storage_DataObjectsMoved({ event, store }: EventContext & StoreContext): Promise<void> {
-  // To be implemented
+  const [srcBagId, destBagId, dataObjectIds] = new Storage.DataObjectsMovedEvent(event).params
+  const dataObjects = await getDataObjectsInBag(store, srcBagId, dataObjectIds)
+  const destBag = await getBag(store, destBagId)
+  await Promise.all(
+    dataObjects.map(async (dataObject) => {
+      dataObject.storageBag = destBag
+      await store.save<StorageDataObject>(dataObject)
+    })
+  )
 }
 
 export async function storage_DataObjectsDeleted({ event, store }: EventContext & StoreContext): Promise<void> {
-  // To be implemented
+  const [, bagId, dataObjectIds] = new Storage.DataObjectsDeletedEvent(event).params
+  const dataObjects = await getDataObjectsInBag(store, bagId, dataObjectIds)
+  // TODO: Delete them or just change status?
+  // (may not be so optimal if we expect a large amount of data objects)
+  await Promise.all(dataObjects.map((o) => store.remove<StorageDataObject>(o)))
 }
 
 // BLACKLIST
 export async function storage_UpdateBlacklist({ event, store }: EventContext & StoreContext): Promise<void> {
-  // To be implemented
+  const [removedContentIds, addedContentIds] = new Storage.UpdateBlacklistEvent(event).params
+  const storageSystem = await store.get(StorageSystemParameters, {})
+  if (!storageSystem) {
+    throw new Error('StorageSystemParameters entity not found!')
+  }
+  storageSystem.blacklist = storageSystem.blacklist
+    .filter((cid) => !Array.from(removedContentIds).some((id) => id.eq(cid)))
+    .concat(Array.from(addedContentIds).map((id) => id.toString()))
+
+  await store.save<StorageSystemParameters>(storageSystem)
 }
 
 export async function storage_StorageBucketVoucherLimitsSet({

+ 10 - 10
yarn.lock

@@ -2477,8 +2477,8 @@
 
 "@joystream/hydra-cli@3.1.0-alpha.1":
   version "3.1.0-alpha.1"
-  resolved "https://registry.yarnpkg.com/@joystream/hydra-cli/-/hydra-cli-3.1.0-alpha.1.tgz#30755ee4145a11186ad32646643fff18cb92c474"
-  integrity "sha1-mImLDJxu3W1ZsCMsqLh0RaFOnTI= sha512-mjnEj1cGeee7ly3vQiY5wdJIPOtbxMCzqEspxTeVlDZavseReJrlA+L1ru1dYP2vyNNrj1cLM5hqYi+DFwLxBw=="
+  resolved "https://registry.yarnpkg.com/@joystream/hydra-cli/-/hydra-cli-3.1.0-alpha.1.tgz#98898b0c9c6edd6d59b0232ca8b87445a14e9d32"
+  integrity sha512-mjnEj1cGeee7ly3vQiY5wdJIPOtbxMCzqEspxTeVlDZavseReJrlA+L1ru1dYP2vyNNrj1cLM5hqYi+DFwLxBw==
   dependencies:
     "@inquirer/input" "^0.0.13-alpha.0"
     "@inquirer/password" "^0.0.12-alpha.0"
@@ -2512,15 +2512,15 @@
 
 "@joystream/hydra-common@3.1.0-alpha.1", "@joystream/hydra-common@^3.1.0-alpha.1":
   version "3.1.0-alpha.1"
-  resolved "https://registry.yarnpkg.com/@joystream/hydra-common/-/hydra-common-3.1.0-alpha.1.tgz#a41876a7f9006b41f5c7a782abd7c340435c9e69"
-  integrity "sha1-rWjGCkGpR0j/hrzzUngfx0QFUT4= sha512-1nlxo1LZtLQgopYreY6owiVWekYBXFyn7JHThYbZDYszS9uNgfZtWNZnW12p0R7i22XQq+oI1d76hiW4Xs4HWg=="
+  resolved "https://registry.yarnpkg.com/@joystream/hydra-common/-/hydra-common-3.1.0-alpha.1.tgz#ad68c60a41a94748ff86bcf352781fc74405513e"
+  integrity sha512-1nlxo1LZtLQgopYreY6owiVWekYBXFyn7JHThYbZDYszS9uNgfZtWNZnW12p0R7i22XQq+oI1d76hiW4Xs4HWg==
   dependencies:
     bn.js "^5.1.3"
 
 "@joystream/hydra-db-utils@3.1.0-alpha.1", "@joystream/hydra-db-utils@^3.1.0-alpha.1":
   version "3.1.0-alpha.1"
-  resolved "https://registry.yarnpkg.com/@joystream/hydra-db-utils/-/hydra-db-utils-3.1.0-alpha.1.tgz#0b024216c4348d00ed6e4b15d3b2479ecbcd6339"
-  integrity "sha1-OSwY8BKvSFLj+NBgRgPXYB7+3Bo= sha512-mnvhGg64AIL7DklJvziqUcEsNW4vTtUW0iUGCxpNDFHQxGOo1XsHCiVth4v7wG1QeM0CzZo8iS5T6+sr6lvAcw=="
+  resolved "https://registry.yarnpkg.com/@joystream/hydra-db-utils/-/hydra-db-utils-3.1.0-alpha.1.tgz#392c18f012af4852e3f8d0604603d7601efedc1a"
+  integrity sha512-mnvhGg64AIL7DklJvziqUcEsNW4vTtUW0iUGCxpNDFHQxGOo1XsHCiVth4v7wG1QeM0CzZo8iS5T6+sr6lvAcw==
   dependencies:
     "@joystream/hydra-common" "^3.1.0-alpha.1"
     "@types/ioredis" "^4.17.4"
@@ -2532,8 +2532,8 @@
 
 "@joystream/hydra-processor@3.1.0-alpha.1":
   version "3.1.0-alpha.1"
-  resolved "https://registry.yarnpkg.com/@joystream/hydra-processor/-/hydra-processor-3.1.0-alpha.1.tgz#1074fbbc5e55dea73a60ac738ce112ccb521310f"
-  integrity "sha1-YFhDJEh5Ob+EtvpxoWc6Ri4b3Y0= sha512-y3Dz+gY79Vy6HL8SMoZbs129dZG+4R8/mCGn1IOGyLcIjJw9OSngCG9Plig0JrE7hBkkow7kAiaJNNC67L1sYA=="
+  resolved "https://registry.yarnpkg.com/@joystream/hydra-processor/-/hydra-processor-3.1.0-alpha.1.tgz#60584324487939bf84b6fa71a1673a462e1bdd8d"
+  integrity sha512-y3Dz+gY79Vy6HL8SMoZbs129dZG+4R8/mCGn1IOGyLcIjJw9OSngCG9Plig0JrE7hBkkow7kAiaJNNC67L1sYA==
   dependencies:
     "@joystream/hydra-common" "^3.1.0-alpha.1"
     "@joystream/hydra-db-utils" "^3.1.0-alpha.1"
@@ -2561,8 +2561,8 @@
 
 "@joystream/hydra-typegen@3.1.0-alpha.1":
   version "3.1.0-alpha.1"
-  resolved "https://registry.yarnpkg.com/@joystream/hydra-typegen/-/hydra-typegen-3.1.0-alpha.1.tgz#eb6047ffce1883dc0cdac05e9162c12a1911ee86"
-  integrity "sha1-7pmt1dmnoOqXcjGnWgCIZUvhvms= sha512-aoiQD/BI6NPjUYGiVbP9YifZYndWTfDw8Txdm45dG9aK9a0zdoxLL4BmZUhBx+RZTVmFyN/R/QLfnTmuDBAmzw=="
+  resolved "https://registry.yarnpkg.com/@joystream/hydra-typegen/-/hydra-typegen-3.1.0-alpha.1.tgz#ee99add5d9a7a0ea977231a75a0088654be1be6b"
+  integrity sha512-aoiQD/BI6NPjUYGiVbP9YifZYndWTfDw8Txdm45dG9aK9a0zdoxLL4BmZUhBx+RZTVmFyN/R/QLfnTmuDBAmzw==
   dependencies:
     "@oclif/command" "^1.8.0"
     "@oclif/config" "^1"