Browse Source

Update sumer mappings for Giza (Hydra v3, protobufjs)

Leszek Wiesner 3 years ago
parent
commit
7ebaef3b8b

+ 2 - 1
metadata-protobuf/package.json

@@ -40,7 +40,8 @@
     "google-protobuf": "^3.14.0",
     "long": "^4.0.0",
     "@types/long": "^4.0.1",
-    "i18n-iso-countries": "^6.8.0"
+    "i18n-iso-countries": "^6.8.0",
+    "iso-639-1": "^2.1.9"
   },
   "devDependencies": {
     "@types/chai": "^4.2.11",

+ 7 - 1
metadata-protobuf/src/utils.ts

@@ -1,5 +1,6 @@
 import { AnyMessage, AnyMetadataClass, DecodedMetadataObject } from './types'
 import countries from 'i18n-iso-countries'
+import langs from 'iso-639-1'
 
 export function isSet<T>(v: T | null | undefined): v is T {
   return v !== null && v !== undefined
@@ -32,7 +33,12 @@ export function metaToObject<T>(metaClass: AnyMetadataClass<T>, value: AnyMessag
   return metaClass.toObject(value, { arrays: false, longs: String }) as DecodedMetadataObject<T>
 }
 
-// According to ISO 3166-1 alpha-2 standard
+// Checks if the provided code is valid according to ISO 3166-1 alpha-2 standard
 export function isValidCountryCode(code: string): boolean {
   return countries.getAlpha2Codes()[code] !== undefined
 }
+
+// Checks if the provided code is valid according to ISO 639-1 standard
+export function isValidLanguageCode(code: string): boolean {
+  return langs.validate(code)
+}

+ 132 - 201
query-node/manifest.yml

@@ -11,6 +11,7 @@ typegen:
   metadata:
     source: ws://localhost:9944
   events:
+    # storage
     - storage.StorageBucketCreated
     - storage.StorageBucketInvitationAccepted
     - storage.StorageBucketsUpdatedForBag
@@ -49,226 +50,156 @@ typegen:
     - storage.DistributionBucketMetadataSet
     - storage.DistributionBucketOperatorRemoved
     - storage.DistributionBucketFamilyMetadataSet
-  # TODO: Sumer mappings
-  #   # membership
-  #   - members.MemberRegistered
-  #   - members.MemberUpdatedAboutText
-  #   - members.MemberUpdatedAvatar
-  #   - members.MemberUpdatedHandle
-  #   - members.MemberSetRootAccount
-  #   - members.MemberSetControllerAccount
 
-  #   # content directory
-  #   - content.CuratorGroupCreated
-  #   - content.CuratorGroupStatusSet
-  #   - content.CuratorAdded
-  #   - content.CuratorRemoved
-  #   - content.ChannelCreated
-  #   - content.ChannelUpdated
-  #   - content.ChannelAssetsRemoved
-  #   - content.ChannelCensorshipStatusUpdated
-  #   - content.ChannelOwnershipTransferRequested
-  #   - content.ChannelOwnershipTransferRequestWithdrawn
-  #   - content.ChannelOwnershipTransferred
-  #   - content.ChannelCategoryCreated
-  #   - content.ChannelCategoryUpdated
-  #   - content.ChannelCategoryDeleted
-  #   - content.VideoCategoryCreated
-  #   - content.VideoCategoryUpdated
-  #   - content.VideoCategoryDeleted
-  #   - content.VideoCreated
-  #   - content.VideoUpdated
-  #   - content.VideoDeleted
-  #   - content.VideoCensorshipStatusUpdated
-  #   - content.FeaturedVideosSet
+    # membership
+    - members.MemberRegistered
+    - members.MemberUpdatedAboutText
+    - members.MemberUpdatedAvatar
+    - members.MemberUpdatedHandle
+    - members.MemberSetRootAccount
+    - members.MemberSetControllerAccount
 
-  #   # storage
-  #   - data_directory.ContentAdded
-  #   - data_directory.ContentRemoved
-  #   - data_directory.ContentAccepted
-  #   - data_directory.ContentRejected
-  #   - data_directory.ContentUploadingStatusUpdated
+    # content directory
+    - content.CuratorGroupCreated
+    - content.CuratorGroupStatusSet
+    - content.CuratorAdded
+    - content.CuratorRemoved
+    - content.ChannelCreated
+    - content.ChannelUpdated
+    - content.ChannelAssetsRemoved
+    - content.ChannelCensorshipStatusUpdated
+    - content.ChannelOwnershipTransferRequested
+    - content.ChannelOwnershipTransferRequestWithdrawn
+    - content.ChannelOwnershipTransferred
+    - content.ChannelCategoryCreated
+    - content.ChannelCategoryUpdated
+    - content.ChannelCategoryDeleted
+    - content.VideoCategoryCreated
+    - content.VideoCategoryUpdated
+    - content.VideoCategoryDeleted
+    - content.VideoCreated
+    - content.VideoUpdated
+    - content.VideoDeleted
+    - content.VideoCensorshipStatusUpdated
+    - content.FeaturedVideosSet
 
-  #   # working groups
-  #   - storage_working_group.WorkerStorageUpdated
-  #   - storage_working_group.OpeningFilled
-  #   - storage_working_group.TerminatedWorker
-  #   - storage_working_group.WorkerExited
-  #   - storage_working_group.TerminatedLeader
-
-  #   - gateway_working_group.WorkerStorageUpdated
-  #   - gateway_working_group.OpeningFilled
-  #   - gateway_working_group.TerminatedWorker
-  #   - gateway_working_group.WorkerExited
-  #   - gateway_working_group.TerminatedLeader
-  # calls:
-  #   # members
-  #   - members.buyMembership
-  #   - members.addScreenedMember
-  #   - members.changeMemberAboutText
-  #   - members.changeMemberAvatar
-  #   - members.changeMemberHandle
-  #   - members.setRootAccount
-  #   - members.setControllerAccount
-  #   - members.updateMembership
-
-  #   # content directory
-  #   - content.create_curator_group
-  #   - content.set_curator_group_status
-  #   - content.add_curator_to_group
-  #   - content.remove_curator_from_group
-  #   - content.create_channel
-  #   - content.update_channel
-  #   - content.remove_channel_assets
-  #   - content.update_channel_censorship_status
-  #   - content.create_channel_category
-  #   - content.update_channel_category
-  #   - content.delete_channel_category
-  #   - content.request_channel_transfer
-  #   - content.cancel_channel_transfer_request
-  #   - content.accept_channel_transfer
-  #   - content.create_video
-  #   - content.update_video
-  #   - content.delete_video
-  #   - content.create_playlist
-  #   - content.update_playlist
-  #   - content.delete_playlist
-  #   - content.set_featured_videos
-  #   - content.create_video_category
-  #   - content.update_video_category
-  #   - content.delete_video_category
-  #   - content.remove_person_from_video
-  #   - content.update_video_censorship_status
-
-  #   # storage
-  #   - data_directory.add_content
-  #   - data_directory.remove_content
-  #   - data_directory.accept_content
-  #   - data_directory.update_content_uploading_status
-
-  #   # working groups
-  #   - storage_working_group.update_role_storage
-  #   - storage_working_group.fill_opening
-  #   - storage_working_group.leave_role
-  #   - storage_working_group.terminate_role
-
-  #   - gateway_working_group.update_role_storage
-  #   - gateway_working_group.fill_opening
-  #   - gateway_working_group.leave_role
-  #   - gateway_working_group.terminate_role
+    # working groups (we're using "storage_working_group" as a reference module)
+    - storage_working_group.WorkerStorageUpdated
+    - storage_working_group.OpeningFilled
+    - storage_working_group.TerminatedWorker
+    - storage_working_group.WorkerExited
+    - storage_working_group.TerminatedLeader
+  calls:
+    # members
+    - members.buyMembership
+    - members.addScreenedMember
+    - members.changeMemberAboutText
+    - members.changeMemberAvatar
+    - members.changeMemberHandle
+    - members.setRootAccount
+    - members.setControllerAccount
+    - members.updateMembership
   outDir: ./mappings/generated/types
   customTypes:
     lib: '@joystream/types/augment/all/types'
     typedefsLoc: '../types/augment/all/defs.json'
 mappings:
   # js module that exports the handler functions
-  mappingsModule: mappings/lib/giza
+  mappingsModule: mappings/lib
   # additinal libraries the processor loads
   # typically it is a module with event and extrinsic types generated by hydra-typegen
   imports:
     - mappings/lib/generated/types
   eventHandlers:
-    # TODO: Sumer mappings
-    # # membership
-    # - event: members.MemberRegistered
-    #   handler: members_MemberRegistered(DatabaseManager, SubstrateEvent)
-    # - event: members.MemberUpdatedAboutText
-    #   handler: members_MemberUpdatedAboutText(DatabaseManager, SubstrateEvent)
-    # - event: members.MemberUpdatedAvatar
-    #   handler: members_MemberUpdatedAvatar(DatabaseManager, SubstrateEvent)
-    # - event: members.MemberUpdatedHandle
-    #   handler: members_MemberUpdatedHandle(DatabaseManager, SubstrateEvent)
-    # - event: members.MemberSetRootAccount
-    #   handler: members_MemberSetRootAccount(DatabaseManager, SubstrateEvent)
-    # - event: members.MemberSetControllerAccount
-    #   handler: members_MemberSetControllerAccount(DatabaseManager, SubstrateEvent)
-
-    # # content directory
-    # - event: content.CuratorGroupCreated
-    #   handler: content_CuratorGroupCreated(DatabaseManager, SubstrateEvent)
-    # - event: content.CuratorGroupStatusSet
-    #   handler: content_CuratorGroupStatusSet(DatabaseManager, SubstrateEvent)
-    # - event: content.CuratorAdded
-    #   handler: content_CuratorAdded(DatabaseManager, SubstrateEvent)
-    # - event: content.CuratorRemoved
-    #   handler: content_CuratorRemoved(DatabaseManager, SubstrateEvent)
-    # - event: content.ChannelCreated
-    #   handler: content_ChannelCreated(DatabaseManager, SubstrateEvent)
-    # - event: content.ChannelUpdated
-    #   handler: content_ChannelUpdated(DatabaseManager, SubstrateEvent)
-    # - event: content.ChannelAssetsRemoved
-    #   handler: content_ChannelAssetsRemoved(DatabaseManager, SubstrateEvent)
-    # - event: content.ChannelCensorshipStatusUpdated
-    #   handler: content_ChannelCensorshipStatusUpdated(DatabaseManager, SubstrateEvent)
-    # # these events are defined in runtime but never calles (at the time of writing)
-    # #- event: content.ChannelOwnershipTransferRequested
-    # #  handler: content_ChannelOwnershipTransferRequested(DatabaseManager, SubstrateEvent)
-    # #- event: content.ChannelOwnershipTransferRequestWithdrawn
-    # #  handler: content_ChannelOwnershipTransferRequestWithdrawn(DatabaseManager, SubstrateEvent)
-    # #- event: content.ChannelOwnershipTransferred
-    # #  handler: content_ChannelOwnershipTransferred(DatabaseManager, SubstrateEvent)
-    # - event: content.ChannelCategoryCreated
-    #   handler: content_ChannelCategoryCreated(DatabaseManager, SubstrateEvent)
-    # - event: content.ChannelCategoryUpdated
-    #   handler: content_ChannelCategoryUpdated(DatabaseManager, SubstrateEvent)
-    # - event: content.ChannelCategoryDeleted
-    #   handler: content_ChannelCategoryDeleted(DatabaseManager, SubstrateEvent)
-    # - event: content.VideoCategoryCreated
-    #   handler: content_VideoCategoryCreated(DatabaseManager, SubstrateEvent)
-    # - event: content.VideoCategoryUpdated
-    #   handler: content_VideoCategoryUpdated(DatabaseManager, SubstrateEvent)
-    # - event: content.VideoCategoryDeleted
-    #   handler: content_VideoCategoryDeleted(DatabaseManager, SubstrateEvent)
-    # - event: content.VideoCreated
-    #   handler: content_VideoCreated(DatabaseManager, SubstrateEvent)
-    # - event: content.VideoUpdated
-    #   handler: content_VideoUpdated(DatabaseManager, SubstrateEvent)
-    # - event: content.VideoDeleted
-    #   handler: content_VideoDeleted(DatabaseManager, SubstrateEvent)
-    # - event: content.VideoCensorshipStatusUpdated
-    #   handler: content_VideoCensorshipStatusUpdated(DatabaseManager, SubstrateEvent)
-    # - event: content.FeaturedVideosSet
-    #   handler: content_FeaturedVideosSet(DatabaseManager, SubstrateEvent)
+    # membership
+    - event: members.MemberRegistered
+      handler: members_MemberRegistered
+    - event: members.MemberUpdatedAboutText
+      handler: members_MemberUpdatedAboutText
+    - event: members.MemberUpdatedAvatar
+      handler: members_MemberUpdatedAvatar
+    - event: members.MemberUpdatedHandle
+      handler: members_MemberUpdatedHandle
+    - event: members.MemberSetRootAccount
+      handler: members_MemberSetRootAccount
+    - event: members.MemberSetControllerAccount
+      handler: members_MemberSetControllerAccount
 
-    # # storage
-    # - event: dataDirectory.ContentAdded
-    #   handler: dataDirectory_ContentAdded(DatabaseManager, SubstrateEvent)
-    # - event: dataDirectory.ContentRemoved
-    #   handler: dataDirectory_ContentRemoved(DatabaseManager, SubstrateEvent)
-    # - event: dataDirectory.ContentAccepted
-    #   handler: dataDirectory_ContentAccepted(DatabaseManager, SubstrateEvent)
-    # # not handled at the moment
-    # #- event: dataDirectory.ContentUploadingStatusUpdated
-    # #  handler: data_directory_ContentUploadingStatusUpdated(DatabaseManager, SubstrateEvent)
+    # content directory
+    - event: content.CuratorGroupCreated
+      handler: content_CuratorGroupCreated
+    - event: content.CuratorGroupStatusSet
+      handler: content_CuratorGroupStatusSet
+    - event: content.CuratorAdded
+      handler: content_CuratorAdded
+    - event: content.CuratorRemoved
+      handler: content_CuratorRemoved
+    - event: content.ChannelCreated
+      handler: content_ChannelCreated
+    - event: content.ChannelUpdated
+      handler: content_ChannelUpdated
+    - event: content.ChannelAssetsRemoved
+      handler: content_ChannelAssetsRemoved
+    - event: content.ChannelCensorshipStatusUpdated
+      handler: content_ChannelCensorshipStatusUpdated
+    # these events are defined in runtime but never emitted (at the time of writing)
+    #- event: content.ChannelOwnershipTransferRequested
+    #  handler: content_ChannelOwnershipTransferRequested
+    #- event: content.ChannelOwnershipTransferRequestWithdrawn
+    #  handler: content_ChannelOwnershipTransferRequestWithdrawn
+    #- event: content.ChannelOwnershipTransferred
+    #  handler: content_ChannelOwnershipTransferred
+    - event: content.ChannelCategoryCreated
+      handler: content_ChannelCategoryCreated
+    - event: content.ChannelCategoryUpdated
+      handler: content_ChannelCategoryUpdated
+    - event: content.ChannelCategoryDeleted
+      handler: content_ChannelCategoryDeleted
+    - event: content.VideoCategoryCreated
+      handler: content_VideoCategoryCreated
+    - event: content.VideoCategoryUpdated
+      handler: content_VideoCategoryUpdated
+    - event: content.VideoCategoryDeleted
+      handler: content_VideoCategoryDeleted
+    - event: content.VideoCreated
+      handler: content_VideoCreated
+    - event: content.VideoUpdated
+      handler: content_VideoUpdated
+    - event: content.VideoDeleted
+      handler: content_VideoDeleted
+    - event: content.VideoCensorshipStatusUpdated
+      handler: content_VideoCensorshipStatusUpdated
+    - event: content.FeaturedVideosSet
+      handler: content_FeaturedVideosSet
 
-    # # working groups
-    # ## storage - workers
-    # - event: storageWorkingGroup.WorkerStorageUpdated
-    #   handler: storageWorkingGroup_WorkerStorageUpdated(DatabaseManager, SubstrateEvent)
-    # - event: storageWorkingGroup.OpeningFilled
-    #   handler: storageWorkingGroup_OpeningFilled(DatabaseManager, SubstrateEvent)
-    # - event: storageWorkingGroup.TerminatedWorker
-    #   handler: storageWorkingGroup_TerminatedWorker(DatabaseManager, SubstrateEvent)
-    # - event: storageWorkingGroup.WorkerExited
-    #   handler: storageWorkingGroup_WorkerExited(DatabaseManager, SubstrateEvent)
+    # working groups
+    ## storage - workers
+    - event: storageWorkingGroup.WorkerStorageUpdated
+      handler: workingGroup_WorkerStorageUpdated
+    - event: storageWorkingGroup.OpeningFilled
+      handler: workingGroup_OpeningFilled
+    - event: storageWorkingGroup.TerminatedWorker
+      handler: workingGroup_TerminatedWorker
+    - event: storageWorkingGroup.WorkerExited
+      handler: workingGroup_WorkerExited
 
-    # ## storage - leader
-    # - event: storageWorkingGroup.TerminatedLeader
-    #   handler: storageWorkingGroup_TerminatedLeader(DatabaseManager, SubstrateEvent)
+    ## storage - leader
+    - event: storageWorkingGroup.TerminatedLeader
+      handler: workingGroup_TerminatedLeader
 
-    # ## gateway - workers
-    # - event: gatewayWorkingGroup.WorkerStorageUpdated
-    #   handler: gatewayWorkingGroup_WorkerStorageUpdated(DatabaseManager, SubstrateEvent)
-    # - event: gatewayWorkingGroup.OpeningFilled
-    #   handler: gatewayWorkingGroup_OpeningFilled(DatabaseManager, SubstrateEvent)
-    # - event: gatewayWorkingGroup.TerminatedWorker
-    #   handler: gatewayWorkingGroup_TerminatedWorker(DatabaseManager, SubstrateEvent)
-    # - event: gatewayWorkingGroup.WorkerExited
-    #   handler: gatewayWorkingGroup_WorkerExited(DatabaseManager, SubstrateEvent)
+    ## gateway - workers
+    - event: gatewayWorkingGroup.WorkerStorageUpdated
+      handler: workingGroup_WorkerStorageUpdated
+    - event: gatewayWorkingGroup.OpeningFilled
+      handler: workingGroup_OpeningFilled
+    - event: gatewayWorkingGroup.TerminatedWorker
+      handler: workingGroup_TerminatedWorker
+    - event: gatewayWorkingGroup.WorkerExited
+      handler: workingGroup_WorkerExited
 
-    # ## gateway - leader
-    # - event: gatewayWorkingGroup.TerminatedLeader
-    #   handler: gatewayWorkingGroup_TerminatedLeader(DatabaseManager, SubstrateEvent)
+    ## gateway - leader
+    - event: gatewayWorkingGroup.TerminatedLeader
+      handler: workingGroup_TerminatedLeader
 
     # storage v2
     - event: storage.StorageBucketCreated

+ 96 - 98
query-node/mappings/sumer/common.ts → query-node/mappings/common.ts

@@ -1,16 +1,34 @@
-import { SubstrateEvent, SubstrateExtrinsic, ExtrinsicArg } from '@dzlzv/hydra-common'
-import { DatabaseManager } from '@dzlzv/hydra-db-utils'
-import { u64, Bytes } from '@polkadot/types/primitive'
-import { fixBlockTimestamp } from './eventFix'
+import { DatabaseManager, SubstrateEvent, SubstrateExtrinsic, ExtrinsicArg } from '@joystream/hydra-common'
+import { Bytes } from '@polkadot/types'
+import { Network } from 'query-node/dist/model'
+import { BaseModel } from '@joystream/warthog'
+import { metaToObject } from '@joystream/metadata-protobuf/utils'
+import { AnyMetadataClass, DecodedMetadataObject } from '@joystream/metadata-protobuf/types'
+
+export const CURRENT_NETWORK = Network.GIZA
+/*
+  Simple logger enabling error and informational reporting.
 
-// Asset
-import { DataObjectOwner, DataObject, LiaisonJudgement, Network, NextEntityId } from 'query-node'
-import { ContentParameters } from '@joystream/types/augment'
+  FIXME: `Logger` class will not be needed in the future when Hydra v3 will be released.
+  Hydra will provide logger instance and relevant code using `Logger` should be refactored.
+*/
+class Logger {
+  /*
+    Log significant event.
+  */
+  info(message: string, data?: unknown) {
+    console.log(message, data)
+  }
 
-import { ContentParameters as Custom_ContentParameters } from '@joystream/types/storage'
-import { registry } from '@joystream/types'
+  /*
+    Log significant error.
+  */
+  error(message: string, data?: unknown) {
+    console.error(message, data)
+  }
+}
 
-const currentNetwork = Network.BABYLON
+export const logger = new Logger()
 
 /*
   Reports that insurmountable inconsistent state has been encountered and throws an exception.
@@ -21,71 +39,32 @@ export function inconsistentState(extraInfo: string, data?: unknown): never {
   // log error
   logger.error(errorMessage, data)
 
-  throw new Error(errorMessage)
+  throw errorMessage
 }
 
 /*
-  Reports that metadata inserted by the user are not entirely valid, but the problem can be overcome.
+  Reports that insurmountable unexpected data has been encountered and throws an exception.
 */
-export function invalidMetadata(extraInfo: string, data?: unknown): void {
-  const errorMessage = 'Invalid metadata: ' + extraInfo
+export function unexpectedData(extraInfo: string, data?: unknown): never {
+  const errorMessage = 'Unexpected data: ' + extraInfo
 
   // log error
-  logger.info(errorMessage, data)
-}
-
-/*
-  Creates a predictable and unique ID for the given content.
-*/
-export async function getNextId(db: DatabaseManager): Promise<string> {
-  // load or create record
-  const existingRecord = (await db.get(NextEntityId, {})) || new NextEntityId({ id: '0', nextId: 1 })
-
-  // remember id
-  const entityId = existingRecord.nextId
-
-  // increment id
-  existingRecord.nextId = existingRecord.nextId + 1
-
-  // save record
-  await db.save<NextEntityId>(existingRecord)
+  logger.error(errorMessage, data)
 
-  return entityId.toString()
+  throw errorMessage
 }
 
 /*
-  Prepares data object from content parameters.
+  Reports that metadata inserted by the user are not entirely valid, but the problem can be overcome.
 */
-export async function prepareDataObject(
-  db: DatabaseManager,
-  contentParameters: ContentParameters,
-  event: SubstrateEvent,
-  owner: typeof DataObjectOwner
-): Promise<DataObject> {
-  // convert generic content parameters coming from processor to custom Joystream data type
-  const customContentParameters = new Custom_ContentParameters(registry, contentParameters.toJSON() as any)
-
-  const dataObject = new DataObject({
-    id: await getNextId(db),
-    owner,
-    createdInBlock: event.blockNumber,
-    typeId: contentParameters.type_id.toNumber(),
-    size: customContentParameters.size_in_bytes.toNumber(),
-    liaisonJudgement: LiaisonJudgement.PENDING, // judgement is pending at start; liaison id is set when content is accepted/rejected
-    ipfsContentId: convertBytesToString(contentParameters.ipfs_content_id),
-    joystreamContentId: customContentParameters.content_id.encode(),
-
-    createdAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
-    updatedAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
-
-    createdById: '1',
-    updatedById: '1',
-  })
+export function invalidMetadata(extraInfo: string, data?: unknown): void {
+  const errorMessage = 'Invalid metadata: ' + extraInfo
 
-  return dataObject
+  // log error
+  logger.info(errorMessage, data)
 }
 
-/// ///////////////// Sudo extrinsic calls ///////////////////////////////////////
+/// //////////////// Sudo extrinsic calls ///////////////////////////////////////
 
 // soft-peg interface for typegen-generated `*Call` types
 export interface IGenericExtrinsicObject<T> {
@@ -105,12 +84,13 @@ export interface ISudoCallArgs<T> extends ExtrinsicArg {
 */
 export function extractExtrinsicArgs<DataParams, EventObject extends IGenericExtrinsicObject<DataParams>>(
   rawEvent: SubstrateEvent,
-  callFactory: new (event: SubstrateEvent) => EventObject,
+  callFactoryConstructor: new (event: SubstrateEvent) => EventObject,
 
   // in ideal world this parameter would not be needed, but there is no way to associate parameters
   // used in sudo to extrinsic parameters without it
   argsIndeces: Record<keyof DataParams, number>
 ): EventObject['args'] {
+  const CallFactory = callFactoryConstructor
   // this is equal to DataParams but only this notation works properly
   // escape when extrinsic info is not available
   if (!rawEvent.extrinsic) {
@@ -119,8 +99,7 @@ export function extractExtrinsicArgs<DataParams, EventObject extends IGenericExt
 
   // regural extrinsic call?
   if (rawEvent.extrinsic.section !== 'sudo') {
-    // eslint-disable-next-line new-cap
-    return new callFactory(rawEvent).args
+    return new CallFactory(rawEvent).args
   }
 
   // sudo extrinsic call
@@ -150,8 +129,7 @@ export function extractExtrinsicArgs<DataParams, EventObject extends IGenericExt
   } as SubstrateEvent
 
   // create event object and extract processed args
-  // eslint-disable-next-line new-cap
-  const finalArgs = new callFactory(partialEvent).args
+  const finalArgs = new CallFactory(partialEvent).args
 
   return finalArgs
 }
@@ -183,45 +161,65 @@ export function extractSudoCallParameters<DataParams>(rawEvent: SubstrateEvent):
   return callArgs
 }
 
-/// ///////////////// Logger /////////////////////////////////////////////////////
-
-/*
-  Simple logger enabling error and informational reporting.
-
-  `Logger` class will not be needed in the future when Hydra v3 will be released.
-  Hydra will provide logger instance and relevant code using `Logger` should be refactored.
-*/
-class Logger {
-  /*
-    Log significant event.
-  */
-  info(message: string, data?: unknown) {
-    console.log(message, data)
+export function deserializeMetadata<T>(
+  metadataType: AnyMetadataClass<T>,
+  metadataBytes: Bytes
+): DecodedMetadataObject<T> | null {
+  try {
+    return metaToObject(metadataType, metadataType.decode(metadataBytes.toU8a(true)))
+  } catch (e) {
+    invalidMetadata(`Cannot deserialize ${metadataType.name}! Provided bytes: (${metadataBytes.toHex()})`)
+    return null
   }
+}
 
-  /*
-    Log significant error.
-  */
-  error(message: string, data?: unknown) {
-    console.error(message, data)
-  }
+export function bytesToString(b: Bytes): string {
+  return (
+    Buffer.from(b.toU8a(true))
+      .toString()
+      // eslint-disable-next-line no-control-regex
+      .replace(/\u0000/g, '')
+  )
 }
 
-export const logger = new Logger()
+export function perpareString(s: string): string {
+  // eslint-disable-next-line no-control-regex
+  return s.replace(/\u0000/g, '')
+}
 
-/*
-  Helper for converting Bytes type to string
-*/
-export function convertBytesToString(b: Bytes | null): string {
-  if (!b) {
-    return ''
-  }
+export function hasValuesForProperties<
+  T extends Record<string, unknown>,
+  P extends keyof T & string,
+  PA extends readonly P[]
+>(obj: T, props: PA): obj is T & { [K in PA[number]]: NonNullable<K> } {
+  props.forEach((p) => {
+    if (obj[p] === null || obj[p] === undefined) {
+      return false
+    }
+  })
+  return true
+}
 
-  const text = Buffer.from(b.toU8a(true)).toString()
+type EntityClass<T extends BaseModel> = {
+  new (): T
+  name: string
+}
 
-  // prevent utf-8 null character
-  // eslint-disable-next-line no-control-regex
-  const result = text.replace(/\u0000/g, '')
+type RelationsArr<T extends BaseModel> = Exclude<
+  keyof T & string,
+  { [K in keyof T]: T[K] extends BaseModel | undefined ? '' : T[K] extends BaseModel[] | undefined ? '' : K }[keyof T]
+>[]
+
+export async function getById<T extends BaseModel>(
+  store: DatabaseManager,
+  entityClass: EntityClass<T>,
+  id: string,
+  relations?: RelationsArr<T>
+): Promise<T> {
+  const result = await store.get(entityClass, { where: { id }, relations })
+  if (!result) {
+    throw new Error(`Expected ${entityClass.name} not found by ID: ${id}`)
+  }
 
   return result
 }

+ 0 - 63
query-node/mappings/giza/common.ts

@@ -1,63 +0,0 @@
-import { DatabaseManager } from '@joystream/hydra-common'
-import { BaseModel } from '@joystream/warthog'
-import { WorkingGroup } from '@joystream/types/augment/all'
-import { AnyMetadataClass, DecodedMetadataObject } from '@joystream/metadata-protobuf/types'
-import { metaToObject } from '@joystream/metadata-protobuf/utils'
-import { Bytes } from '@polkadot/types'
-
-type EntityClass<T extends BaseModel> = {
-  new (): T
-  name: string
-}
-
-type RelationsArr<T extends BaseModel> = Exclude<
-  keyof T & string,
-  { [K in keyof T]: T[K] extends BaseModel | undefined ? '' : T[K] extends BaseModel[] | undefined ? '' : K }[keyof T]
->[]
-
-export async function getById<T extends BaseModel>(
-  store: DatabaseManager,
-  entityClass: EntityClass<T>,
-  id: string,
-  relations?: RelationsArr<T>
-): Promise<T> {
-  const result = await store.get(entityClass, { where: { id }, relations })
-  if (!result) {
-    throw new Error(`Expected ${entityClass.name} not found by ID: ${id}`)
-  }
-
-  return result
-}
-
-export type WorkingGroupModuleName = 'storageWorkingGroup' | 'contentDirectoryWorkingGroup'
-
-export function getWorkingGroupModuleName(group: WorkingGroup): WorkingGroupModuleName {
-  if (group.isContent) {
-    return 'contentDirectoryWorkingGroup'
-  } else if (group.isStorage) {
-    return 'storageWorkingGroup'
-  }
-
-  throw new Error(`Unsupported working group encountered: ${group.type}`)
-}
-
-export function deserializeMetadata<T>(
-  metadataType: AnyMetadataClass<T>,
-  metadataBytes: Bytes
-): DecodedMetadataObject<T> | null {
-  try {
-    return metaToObject(metadataType, metadataType.decode(metadataBytes.toU8a(true)))
-  } catch (e) {
-    console.error(`Cannot deserialize ${metadataType.name}! Provided bytes: (${metadataBytes.toHex()})`)
-    return null
-  }
-}
-
-export function bytesToString(b: Bytes): string {
-  return (
-    Buffer.from(b.toU8a(true))
-      .toString()
-      // eslint-disable-next-line no-control-regex
-      .replace(/\u0000/g, '')
-  )
-}

+ 7 - 2
query-node/mappings/giza/genesis-data/index.ts

@@ -1,3 +1,8 @@
-import storageSystem from './storageSystem.json'
+import { MemberJson, StorageSystemJson } from './types'
+import storageSystemJson from './storageSystem.json'
+import membersJson from './members.json'
 
-export { storageSystem }
+const storageSystem: StorageSystemJson = storageSystemJson
+const members: MemberJson[] = membersJson
+
+export { storageSystem, members }

+ 0 - 0
query-node/mappings/sumer/bootstrap/data/members.json → query-node/mappings/giza/genesis-data/members.json


+ 13 - 0
query-node/mappings/giza/genesis-data/types.ts

@@ -0,0 +1,13 @@
+export type MemberJson = {
+  member_id: string
+  root_account: string
+  controller_account: string
+  handle: string
+  about?: string
+  avatar_uri?: string
+  registered_at_time: number
+}
+
+export type StorageSystemJson = {
+  blacklist: string[]
+}

+ 21 - 2
query-node/mappings/giza/genesis.ts

@@ -1,6 +1,6 @@
 import { StoreContext } from '@joystream/hydra-common'
-import { StorageSystemParameters } from 'query-node/dist/model'
-import { storageSystem } from './genesis-data'
+import { Membership, MembershipEntryMethod, StorageSystemParameters } from 'query-node/dist/model'
+import { storageSystem, members } from './genesis-data'
 
 export async function loadGenesisData({ store }: StoreContext): Promise<void> {
   // Storage system
@@ -9,4 +9,23 @@ export async function loadGenesisData({ store }: StoreContext): Promise<void> {
       ...storageSystem,
     })
   )
+  // Members
+  for (const m of members) {
+    // create new membership
+    const member = new Membership({
+      // main data
+      id: m.member_id,
+      rootAccount: m.root_account,
+      controllerAccount: m.controller_account,
+      handle: m.handle,
+      about: m.about,
+      avatarUri: m.avatar_uri,
+      createdInBlock: 0,
+      entry: MembershipEntryMethod.GENESIS,
+      // fill in auto-generated fields
+      createdAt: new Date(m.registered_at_time),
+      updatedAt: new Date(m.registered_at_time),
+    })
+    await store.save<Membership>(member)
+  }
 }

+ 2 - 2
query-node/mappings/giza/storage/index.ts

@@ -27,7 +27,7 @@ import {
   StorageBagStorageAssignment,
 } from 'query-node/dist/model'
 import BN from 'bn.js'
-import { getById, getWorkingGroupModuleName, bytesToString } from '../common'
+import { getById, bytesToString } from '../../common'
 import { BTreeSet } from '@polkadot/types'
 import { DataObjectCreationParameters } from '@joystream/types/storage'
 import { registry } from '@joystream/types'
@@ -67,7 +67,7 @@ function getStaticBagOwner(bagId: StaticBagId): typeof StorageBagOwner {
     return new StorageBagOwnerCouncil()
   } else if (bagId.isWorkingGroup) {
     const owner = new StorageBagOwnerWorkingGroup()
-    owner.workingGroupId = getWorkingGroupModuleName(bagId.asWorkingGroup)
+    owner.workingGroupId = bagId.asWorkingGroup.toString().toLowerCase()
     return owner
   } else {
     throw new Error(`Unexpected static bag type: ${bagId.type}`)

+ 1 - 1
query-node/mappings/giza/storage/metadata.ts

@@ -6,7 +6,7 @@ import {
   GeoCoordinates,
   NodeLocationMetadata,
 } from 'query-node/dist/model'
-import { deserializeMetadata } from '../common'
+import { deserializeMetadata } from '../../common'
 import { Bytes } from '@polkadot/types'
 import {
   DistributionBucketOperatorMetadata as DistributionBucketOperatorMetadataProto,

+ 2 - 0
query-node/mappings/index.ts

@@ -0,0 +1,2 @@
+export * from './giza'
+export * from './sumer'

+ 0 - 1
query-node/mappings/sumer/bootstrap/data/workers.json

@@ -1 +0,0 @@
-{}

+ 0 - 69
query-node/mappings/sumer/bootstrap/index.ts

@@ -1,69 +0,0 @@
-import { createDBConnection } from '@dzlzv/hydra-processor/lib/db'
-import { DatabaseManager, makeDatabaseManager } from '@dzlzv/hydra-db-utils'
-import { Connection, getManager, FindConditions } from 'typeorm'
-
-import { bootMembers, IBootstrapMember } from './members'
-import { bootWorkers, IBootstrapWorker, IBootstrapWorkers } from './workers'
-import { Worker, WorkerType } from 'query-node'
-
-import fs from 'fs'
-import path from 'path'
-
-// run bootstrap
-init()
-
-// bootstrap flow
-async function init() {
-  // prepare database and import data
-  const [databaseManager, connection] = await createDatabaseManager()
-
-  // escape if db is already initialized
-  if (await isDbInitialized(databaseManager)) {
-    await connection.close()
-    return
-  }
-
-  // load import data
-  const data = loadData()
-
-  // bootstrap entities
-  await bootMembers(databaseManager, data.members)
-  await bootWorkers(databaseManager, data.workers)
-
-  await connection.close()
-}
-
-async function isDbInitialized(db: DatabaseManager): Promise<boolean> {
-  // simple way to check if db is bootstrapped already - check if there is at least 1 storage provider
-  const membership = await db.get(Worker, {
-    where: {
-      type: WorkerType.STORAGE,
-    } as FindConditions<Worker>,
-  })
-
-  return !!membership
-}
-
-async function createDatabaseManager(): Promise<[DatabaseManager, Connection]> {
-  // paths in `entities` should be the same as `entities` set in `manifest.yml`
-  const entities = ['generated/graphql-server/dist/**/*.model.js']
-
-  // connect to db and create manager
-  const connection = await createDBConnection(entities)
-  const entityManager = getManager(connection.name)
-  const databaseManager = makeDatabaseManager(entityManager)
-
-  return [databaseManager, connection]
-}
-
-interface IBootstrapData {
-  members: IBootstrapMember[]
-  workers: IBootstrapWorkers
-}
-
-function loadData(): IBootstrapData {
-  return {
-    members: JSON.parse(fs.readFileSync(process.env.BOOTSTRAP_DATA_FOLDER + '/members.json').toString()),
-    workers: JSON.parse(fs.readFileSync(process.env.BOOTSTRAP_DATA_FOLDER + '/workers.json').toString()),
-  }
-}

+ 0 - 42
query-node/mappings/sumer/bootstrap/members.ts

@@ -1,42 +0,0 @@
-// import { Connection } from 'typeorm'
-import { DatabaseManager } from '@dzlzv/hydra-db-utils'
-import { logger } from '../src/common'
-import { MembershipEntryMethod, Membership } from 'query-node'
-
-export interface IBootstrapMember {
-  member_id: number
-  root_account: string
-  controller_account: string
-  handle: string
-  avatar_uri: string
-  about: string
-  registered_at_time: number
-}
-
-// export async function bootMembers(members: IBootstrapMember[], db: Connection): Promise<void> {
-export async function bootMembers(db: DatabaseManager, members: IBootstrapMember[]): Promise<void> {
-  for (const rawMember of members) {
-    // create new membership
-    const member = new Membership({
-      // main data
-      id: rawMember.member_id.toString(),
-      rootAccount: rawMember.root_account,
-      controllerAccount: rawMember.controller_account,
-      handle: rawMember.handle,
-      about: rawMember.about,
-      avatarUri: rawMember.avatar_uri,
-      createdInBlock: 0,
-      entry: MembershipEntryMethod.GENESIS,
-
-      // fill in auto-generated fields
-      createdAt: new Date(rawMember.registered_at_time),
-      updatedAt: new Date(rawMember.registered_at_time),
-    })
-
-    // save membership
-    await db.save<Membership>(member)
-
-    // emit log event
-    logger.info('Member has been bootstrapped', { id: rawMember.member_id })
-  }
-}

+ 0 - 48
query-node/mappings/sumer/bootstrap/workers.ts

@@ -1,48 +0,0 @@
-import { DatabaseManager } from '@dzlzv/hydra-db-utils'
-import { Worker, WorkerType } from 'query-node'
-import { logger, getNextId } from '../src/common'
-
-export interface IBootstrapWorkers {
-  storage: IBootstrapWorker[]
-  gateway: IBootstrapWorker[]
-}
-
-export interface IBootstrapWorker {
-  id: string
-  created_at: string
-}
-
-export async function bootWorkers(db: DatabaseManager, workers: IBootstrapWorkers): Promise<void> {
-  await bootWorkersInGroup(db, workers.storage, WorkerType.STORAGE)
-  await bootWorkersInGroup(db, workers.gateway, WorkerType.GATEWAY)
-}
-
-export async function bootWorkersInGroup(
-  db: DatabaseManager,
-  workers: IBootstrapWorker[],
-  workerType: WorkerType
-): Promise<void> {
-  if (!workers) {
-    return
-  }
-
-  for (const rawWorker of workers) {
-    // create new membership
-    const worker = new Worker({
-      // main data
-      id: await getNextId(db),
-      workerId: rawWorker.id,
-      type: workerType,
-      isActive: true,
-
-      createdAt: new Date(rawWorker.created_at),
-      updatedAt: new Date(rawWorker.created_at),
-    })
-
-    // save worker
-    await db.save<Worker>(worker)
-
-    // emit log event
-    logger.info('Worker has been bootstrapped', { id: rawWorker.id, workerType })
-  }
-}

+ 78 - 127
query-node/mappings/sumer/content/channel.ts

@@ -1,35 +1,20 @@
-import { fixBlockTimestamp } from '../eventFix'
-import { SubstrateEvent } from '@dzlzv/hydra-common'
-import { DatabaseManager } from '@dzlzv/hydra-db-utils'
-import ISO6391 from 'iso-639-1'
-import { FindConditions, In } from 'typeorm'
-
+/*
+eslint-disable @typescript-eslint/naming-convention
+*/
+import { EventContext, StoreContext } from '@joystream/hydra-common'
 import { AccountId } from '@polkadot/types/interfaces'
 import { Option } from '@polkadot/types/codec'
-import { Content } from '../../../generated/types'
-import {
-  readProtobuf,
-  readProtobufWithAssets,
-  convertContentActorToChannelOwner,
-  convertContentActorToDataObjectOwner,
-} from './utils'
-
-import { Channel, ChannelCategory, DataObject, AssetAvailability } from 'query-node'
-import { inconsistentState, logger } from '../common'
-
-// eslint-disable-next-line @typescript-eslint/naming-convention
-export async function content_ChannelCreated(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
+import { Content } from '../../generated/types'
+import { convertContentActorToChannelOwner, processChannelMetadata } from './utils'
+import { AssetNone, Channel, ChannelCategory } from 'query-node/dist/model'
+import { deserializeMetadata, inconsistentState, logger } from '../../common'
+import { ChannelCategoryMetadata, ChannelMetadata } from '@joystream/metadata-protobuf'
+import { integrateMeta } from '@joystream/metadata-protobuf/utils'
+
+export async function content_ChannelCreated(ctx: EventContext & StoreContext): Promise<void> {
+  const { store, event } = ctx
   // read event data
-  const { channelId, channelCreationParameters, contentActor } = new Content.ChannelCreatedEvent(event).data
-
-  // read metadata
-  const protobufContent = await readProtobufWithAssets(new Channel(), {
-    metadata: channelCreationParameters.meta,
-    db,
-    event,
-    assets: channelCreationParameters.assets,
-    contentOwner: convertContentActorToDataObjectOwner(contentActor, channelId.toNumber()),
-  })
+  const [contentActor, channelId, , channelCreationParameters] = new Content.ChannelCreatedEvent(event).params
 
   // create entity
   const channel = new Channel({
@@ -38,38 +23,34 @@ export async function content_ChannelCreated(db: DatabaseManager, event: Substra
     isCensored: false,
     videos: [],
     createdInBlock: event.blockNumber,
-
-    // default values for properties that might or might not be filled by metadata
-    coverPhotoUrls: [],
-    coverPhotoAvailability: AssetAvailability.INVALID,
-    avatarPhotoUrls: [],
-    avatarPhotoAvailability: AssetAvailability.INVALID,
-
+    // assets
+    coverPhoto: new AssetNone(),
+    avatarPhoto: new AssetNone(),
     // fill in auto-generated fields
-    createdAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
-    updatedAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
-
+    createdAt: new Date(event.blockTimestamp),
+    updatedAt: new Date(event.blockTimestamp),
     // prepare channel owner (handles fields `ownerMember` and `ownerCuratorGroup`)
-    ...(await convertContentActorToChannelOwner(db, contentActor)),
-
-    // integrate metadata
-    ...protobufContent,
+    ...(await convertContentActorToChannelOwner(store, contentActor)),
   })
 
+  // deserialize & process metadata
+  const metadata = deserializeMetadata(ChannelMetadata, channelCreationParameters.meta) || {}
+  await processChannelMetadata(ctx, channel, metadata, channelCreationParameters.assets)
+
   // save entity
-  await db.save<Channel>(channel)
+  await store.save<Channel>(channel)
 
   // emit log event
   logger.info('Channel has been created', { id: channel.id })
 }
 
-// eslint-disable-next-line @typescript-eslint/naming-convention
-export async function content_ChannelUpdated(db: DatabaseManager, event: SubstrateEvent) {
+export async function content_ChannelUpdated(ctx: EventContext & StoreContext): Promise<void> {
+  const { store, event } = ctx
   // read event data
-  const { channelId, channelUpdateParameters, contentActor } = new Content.ChannelUpdatedEvent(event).data
+  const [, channelId, , channelUpdateParameters] = new Content.ChannelUpdatedEvent(event).params
 
   // load channel
-  const channel = await db.get(Channel, { where: { id: channelId.toString() } as FindConditions<Channel> })
+  const channel = await store.get(Channel, { where: { id: channelId.toString() } })
 
   // ensure channel exists
   if (!channel) {
@@ -77,22 +58,12 @@ export async function content_ChannelUpdated(db: DatabaseManager, event: Substra
   }
 
   // prepare changed metadata
-  const newMetadata = channelUpdateParameters.new_meta.unwrapOr(null)
+  const newMetadataBytes = channelUpdateParameters.new_meta.unwrapOr(null)
 
   //  update metadata if it was changed
-  if (newMetadata) {
-    const protobufContent = await readProtobufWithAssets(new Channel(), {
-      metadata: newMetadata,
-      db,
-      event,
-      assets: channelUpdateParameters.assets.unwrapOr([]),
-      contentOwner: convertContentActorToDataObjectOwner(contentActor, channelId.toNumber()),
-    })
-
-    // update all fields read from protobuf
-    for (const [key, value] of Object.entries(protobufContent)) {
-      channel[key] = value
-    }
+  if (newMetadataBytes) {
+    const newMetadata = deserializeMetadata(ChannelMetadata, newMetadataBytes) || {}
+    await processChannelMetadata(ctx, channel, newMetadata, channelUpdateParameters.assets.unwrapOr([]))
   }
 
   // prepare changed reward account
@@ -105,42 +76,39 @@ export async function content_ChannelUpdated(db: DatabaseManager, event: Substra
   }
 
   // set last update time
-  channel.updatedAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
+  channel.updatedAt = new Date(event.blockTimestamp)
 
   // save channel
-  await db.save<Channel>(channel)
+  await store.save<Channel>(channel)
 
   // emit log event
   logger.info('Channel has been updated', { id: channel.id })
 }
 
-export async function content_ChannelAssetsRemoved(db: DatabaseManager, event: SubstrateEvent) {
-  // read event data
-  const { contentId: contentIds } = new Content.ChannelAssetsRemovedEvent(event).data
-
-  // load channel
-  const assets = await db.getMany(DataObject, {
-    where: {
-      id: In(contentIds.toArray().map((item) => item.toString())),
-    } as FindConditions<DataObject>,
-  })
-
-  // delete assets
-  for (const asset of assets) {
-    await db.remove<DataObject>(asset)
-  }
-
-  // emit log event
-  logger.info('Channel assets have been removed', { ids: contentIds })
+export async function content_ChannelAssetsRemoved({ store, event }: EventContext & StoreContext): Promise<void> {
+  // TODO: Storage v2 integration
+  // // read event data
+  // const [, , contentIds] = new Content.ChannelAssetsRemovedEvent(event).params
+  // const assets = await store.getMany(StorageDataObject, {
+  //   where: {
+  //     id: In(contentIds.toArray().map((item) => item.toString())),
+  //   },
+  // })
+  // // delete assets
+  // await Promise.all(assets.map((a) => store.remove<StorageDataObject>(a)))
+  // // emit log event
+  // logger.info('Channel assets have been removed', { ids: contentIds })
 }
 
-// eslint-disable-next-line @typescript-eslint/naming-convention
-export async function content_ChannelCensorshipStatusUpdated(db: DatabaseManager, event: SubstrateEvent) {
+export async function content_ChannelCensorshipStatusUpdated({
+  store,
+  event,
+}: EventContext & StoreContext): Promise<void> {
   // read event data
-  const { channelId, isCensored } = new Content.ChannelCensorshipStatusUpdatedEvent(event).data
+  const [, channelId, isCensored] = new Content.ChannelCensorshipStatusUpdatedEvent(event).params
 
   // load event
-  const channel = await db.get(Channel, { where: { id: channelId.toString() } as FindConditions<Channel> })
+  const channel = await store.get(Channel, { where: { id: channelId.toString() } })
 
   // ensure channel exists
   if (!channel) {
@@ -151,28 +119,23 @@ export async function content_ChannelCensorshipStatusUpdated(db: DatabaseManager
   channel.isCensored = isCensored.isTrue
 
   // set last update time
-  channel.updatedAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
+  channel.updatedAt = new Date(event.blockTimestamp)
 
   // save channel
-  await db.save<Channel>(channel)
+  await store.save<Channel>(channel)
 
   // emit log event
   logger.info('Channel censorship status has been updated', { id: channelId, isCensored: isCensored.isTrue })
 }
 
-/// ///////////////// ChannelCategory ////////////////////////////////////////////
+/// //////////////// ChannelCategory ////////////////////////////////////////////
 
-// eslint-disable-next-line @typescript-eslint/naming-convention
-export async function content_ChannelCategoryCreated(db: DatabaseManager, event: SubstrateEvent) {
+export async function content_ChannelCategoryCreated({ store, event }: EventContext & StoreContext): Promise<void> {
   // read event data
-  const { channelCategoryCreationParameters, channelCategoryId } = new Content.ChannelCategoryCreatedEvent(event).data
+  const [channelCategoryId, , channelCategoryCreationParameters] = new Content.ChannelCategoryCreatedEvent(event).params
 
   // read metadata
-  const protobufContent = await readProtobuf(new ChannelCategory(), {
-    metadata: channelCategoryCreationParameters.meta,
-    db,
-    event,
-  })
+  const metadata = deserializeMetadata(ChannelCategoryMetadata, channelCategoryCreationParameters.meta) || {}
 
   // create new channel category
   const channelCategory = new ChannelCategory({
@@ -182,30 +145,27 @@ export async function content_ChannelCategoryCreated(db: DatabaseManager, event:
     createdInBlock: event.blockNumber,
 
     // fill in auto-generated fields
-    createdAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
-    updatedAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
-
-    // integrate metadata
-    ...protobufContent,
+    createdAt: new Date(event.blockTimestamp),
+    updatedAt: new Date(event.blockTimestamp),
   })
+  integrateMeta(channelCategory, metadata, ['name'])
 
   // save channel
-  await db.save<ChannelCategory>(channelCategory)
+  await store.save<ChannelCategory>(channelCategory)
 
   // emit log event
   logger.info('Channel category has been created', { id: channelCategory.id })
 }
 
-// eslint-disable-next-line @typescript-eslint/naming-convention
-export async function content_ChannelCategoryUpdated(db: DatabaseManager, event: SubstrateEvent) {
+export async function content_ChannelCategoryUpdated({ store, event }: EventContext & StoreContext): Promise<void> {
   // read event data
-  const { channelCategoryId, channelCategoryUpdateParameters } = new Content.ChannelCategoryUpdatedEvent(event).data
+  const [, channelCategoryId, channelCategoryUpdateParameters] = new Content.ChannelCategoryUpdatedEvent(event).params
 
   // load channel category
-  const channelCategory = await db.get(ChannelCategory, {
+  const channelCategory = await store.get(ChannelCategory, {
     where: {
       id: channelCategoryId.toString(),
-    } as FindConditions<ChannelCategory>,
+    },
   })
 
   // ensure channel exists
@@ -214,37 +174,28 @@ export async function content_ChannelCategoryUpdated(db: DatabaseManager, event:
   }
 
   // read metadata
-  const protobufContent = await readProtobuf(new ChannelCategory(), {
-    metadata: channelCategoryUpdateParameters.new_meta,
-    db,
-    event,
-  })
-
-  // update all fields read from protobuf
-  for (const [key, value] of Object.entries(protobufContent)) {
-    channelCategory[key] = value
-  }
+  const newMeta = deserializeMetadata(ChannelCategoryMetadata, channelCategoryUpdateParameters.new_meta) || {}
+  integrateMeta(channelCategory, newMeta, ['name'])
 
   // set last update time
-  channelCategory.updatedAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
+  channelCategory.updatedAt = new Date(event.blockTimestamp)
 
   // save channel category
-  await db.save<ChannelCategory>(channelCategory)
+  await store.save<ChannelCategory>(channelCategory)
 
   // emit log event
   logger.info('Channel category has been updated', { id: channelCategory.id })
 }
 
-// eslint-disable-next-line @typescript-eslint/naming-convention
-export async function content_ChannelCategoryDeleted(db: DatabaseManager, event: SubstrateEvent) {
+export async function content_ChannelCategoryDeleted({ store, event }: EventContext & StoreContext): Promise<void> {
   // read event data
-  const { channelCategoryId } = new Content.ChannelCategoryDeletedEvent(event).data
+  const [, channelCategoryId] = new Content.ChannelCategoryDeletedEvent(event).params
 
   // load channel category
-  const channelCategory = await db.get(ChannelCategory, {
+  const channelCategory = await store.get(ChannelCategory, {
     where: {
       id: channelCategoryId.toString(),
-    } as FindConditions<ChannelCategory>,
+    },
   })
 
   // ensure channel category exists
@@ -253,13 +204,13 @@ export async function content_ChannelCategoryDeleted(db: DatabaseManager, event:
   }
 
   // delete channel category
-  await db.remove<ChannelCategory>(channelCategory)
+  await store.remove<ChannelCategory>(channelCategory)
 
   // emit log event
   logger.info('Channel category has been deleted', { id: channelCategory.id })
 }
 
-/// ///////////////// Helpers ////////////////////////////////////////////////////
+/// //////////////// Helpers ////////////////////////////////////////////////////
 
 function handleChannelRewardAccountChange(
   channel: Channel, // will be modified inside of the function!

+ 26 - 27
query-node/mappings/sumer/content/curatorGroup.ts

@@ -1,16 +1,15 @@
-import { fixBlockTimestamp } from '../eventFix'
-import { SubstrateEvent } from '@dzlzv/hydra-common'
-import { DatabaseManager } from '@dzlzv/hydra-db-utils'
+/*
+eslint-disable @typescript-eslint/naming-convention
+*/
+import { EventContext, StoreContext } from '@joystream/hydra-common'
 import { FindConditions } from 'typeorm'
+import { CuratorGroup } from 'query-node/dist/model'
+import { Content } from '../../generated/types'
+import { inconsistentState, logger } from '../../common'
 
-import { CuratorGroup } from 'query-node'
-import { Content } from '../../../generated/types'
-
-import { inconsistentState, logger } from '../common'
-
-export async function content_CuratorGroupCreated(db: DatabaseManager, event: SubstrateEvent) {
+export async function content_CuratorGroupCreated({ store, event }: EventContext & StoreContext): Promise<void> {
   // read event data
-  const { curatorGroupId } = new Content.CuratorGroupCreatedEvent(event).data
+  const [curatorGroupId] = new Content.CuratorGroupCreatedEvent(event).params
 
   // create new curator group
   const curatorGroup = new CuratorGroup({
@@ -20,23 +19,23 @@ export async function content_CuratorGroupCreated(db: DatabaseManager, event: Su
     isActive: false, // runtime creates inactive curator groups by default
 
     // fill in auto-generated fields
-    createdAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
-    updatedAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
+    createdAt: new Date(event.blockTimestamp),
+    updatedAt: new Date(event.blockTimestamp),
   })
 
   // save curator group
-  await db.save<CuratorGroup>(curatorGroup)
+  await store.save<CuratorGroup>(curatorGroup)
 
   // emit log event
   logger.info('Curator group has been created', { id: curatorGroupId })
 }
 
-export async function content_CuratorGroupStatusSet(db: DatabaseManager, event: SubstrateEvent) {
+export async function content_CuratorGroupStatusSet({ store, event }: EventContext & StoreContext): Promise<void> {
   // read event data
-  const { curatorGroupId, bool: isActive } = new Content.CuratorGroupStatusSetEvent(event).data
+  const [curatorGroupId, isActive] = new Content.CuratorGroupStatusSetEvent(event).params
 
   // load curator group
-  const curatorGroup = await db.get(CuratorGroup, {
+  const curatorGroup = await store.get(CuratorGroup, {
     where: { id: curatorGroupId.toString() } as FindConditions<CuratorGroup>,
   })
 
@@ -49,21 +48,21 @@ export async function content_CuratorGroupStatusSet(db: DatabaseManager, event:
   curatorGroup.isActive = isActive.isTrue
 
   // set last update time
-  curatorGroup.updatedAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
+  curatorGroup.updatedAt = new Date(event.blockTimestamp)
 
   // save curator group
-  await db.save<CuratorGroup>(curatorGroup)
+  await store.save<CuratorGroup>(curatorGroup)
 
   // emit log event
   logger.info('Curator group status has been set', { id: curatorGroupId, isActive })
 }
 
-export async function content_CuratorAdded(db: DatabaseManager, event: SubstrateEvent) {
+export async function content_CuratorAdded({ store, event }: EventContext & StoreContext): Promise<void> {
   // read event data
-  const { curatorGroupId, curatorId } = new Content.CuratorAddedEvent(event).data
+  const [curatorGroupId, curatorId] = new Content.CuratorAddedEvent(event).params
 
   // load curator group
-  const curatorGroup = await db.get(CuratorGroup, {
+  const curatorGroup = await store.get(CuratorGroup, {
     where: { id: curatorGroupId.toString() } as FindConditions<CuratorGroup>,
   })
 
@@ -76,21 +75,21 @@ export async function content_CuratorAdded(db: DatabaseManager, event: Substrate
   curatorGroup.curatorIds.push(curatorId.toNumber())
 
   // set last update time
-  curatorGroup.updatedAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
+  curatorGroup.updatedAt = new Date(event.blockTimestamp)
 
   // save curator group
-  await db.save<CuratorGroup>(curatorGroup)
+  await store.save<CuratorGroup>(curatorGroup)
 
   // emit log event
   logger.info('Curator has been added to curator group', { id: curatorGroupId, curatorId })
 }
 
-export async function content_CuratorRemoved(db: DatabaseManager, event: SubstrateEvent) {
+export async function content_CuratorRemoved({ store, event }: EventContext & StoreContext): Promise<void> {
   // read event data
-  const { curatorGroupId, curatorId } = new Content.CuratorAddedEvent(event).data
+  const [curatorGroupId, curatorId] = new Content.CuratorAddedEvent(event).params
 
   // load curator group
-  const curatorGroup = await db.get(CuratorGroup, {
+  const curatorGroup = await store.get(CuratorGroup, {
     where: { id: curatorGroupId.toString() } as FindConditions<CuratorGroup>,
   })
 
@@ -110,7 +109,7 @@ export async function content_CuratorRemoved(db: DatabaseManager, event: Substra
   curatorGroup.curatorIds.splice(curatorIndex, 1)
 
   // save curator group
-  await db.save<CuratorGroup>(curatorGroup)
+  await store.save<CuratorGroup>(curatorGroup)
 
   // emit log event
   logger.info('Curator has been removed from curator group', { id: curatorGroupId, curatorId })

+ 299 - 606
query-node/mappings/sumer/content/utils.ts

@@ -1,352 +1,212 @@
-// TODO: finish db cascade on save/remove; right now there is manually added `cascade: ["insert", "update"]` directive
-//       to all relations in `query-node/generated/graphql-server/src/modules/**/*.model.ts`. That should ensure all records
-//       are saved on one `db.save(...)` call. Missing features
-//       - find a proper way to cascade on remove or implement custom removals for every entity
-//       - convert manual changes done to `*model.ts` file into some patch or bash commands that can be executed
-//         every time query node codegen is run (that will overwrite said manual changes)
-//       - verify in integration tests that the records are trully created/updated/removed as expected
-
-import { SubstrateEvent } from '@dzlzv/hydra-common'
-import { DatabaseManager } from '@dzlzv/hydra-db-utils'
-import { Bytes } from '@polkadot/types'
-import ISO6391 from 'iso-639-1'
-import { u64 } from '@polkadot/types/primitive'
+import { DatabaseManager, EventContext, StoreContext } from '@joystream/hydra-common'
 import { FindConditions } from 'typeorm'
-import * as jspb from 'google-protobuf'
-import { fixBlockTimestamp } from '../eventFix'
-
-// protobuf definitions
 import {
-  ChannelMetadata,
-  ChannelCategoryMetadata,
-  PublishedBeforeJoystream as PublishedBeforeJoystreamMetadata,
-  License as LicenseMetadata,
-  MediaType as MediaTypeMetadata,
-  VideoMetadata,
-  VideoCategoryMetadata,
+  IVideoMetadata,
+  IPublishedBeforeJoystream,
+  ILicense,
+  IMediaType,
+  IChannelMetadata,
 } from '@joystream/metadata-protobuf'
-
-import { Content } from '../../../generated/types'
-
-import { invalidMetadata, inconsistentState, logger, prepareDataObject, getNextId } from '../common'
-
+import { integrateMeta, isSet, isValidLanguageCode } from '@joystream/metadata-protobuf/utils'
+import { invalidMetadata, inconsistentState, logger } from '../../common'
 import {
   // primary entities
   CuratorGroup,
   Channel,
-  ChannelCategory,
   Video,
   VideoCategory,
-
   // secondary entities
   Language,
   License,
-  VideoMediaEncoding,
   VideoMediaMetadata,
-
   // asset
-  DataObjectOwner,
-  DataObjectOwnerMember,
-  DataObjectOwnerChannel,
-  DataObject,
-  LiaisonJudgement,
-  AssetAvailability,
+  Asset,
   Membership,
-} from 'query-node'
-
+  VideoMediaEncoding,
+  ChannelCategory,
+  AssetNone,
+} from 'query-node/dist/model'
 // Joystream types
-import { ChannelId, ContentParameters, NewAsset, ContentActor } from '@joystream/types/augment'
+import { NewAsset, ContentActor } from '@joystream/types/augment'
+import { DecodedMetadataObject } from '@joystream/metadata-protobuf/types'
+import BN from 'bn.js'
+
+export async function processChannelMetadata(
+  ctx: EventContext & StoreContext,
+  channel: Channel,
+  meta: DecodedMetadataObject<IChannelMetadata>,
+  assets: NewAsset[]
+): Promise<Channel> {
+  // TODO: Assets processing (Storage v2)
+  // const assetsOwner = new DataObjectOwnerChannel()
+  // assetsOwner.channelId = channel.id
 
-import { ContentParameters as Custom_ContentParameters } from '@joystream/types/storage'
-import { registry } from '@joystream/types'
+  // const processedAssets = await Promise.all(assets.map((asset) => processNewAsset(ctx, asset, assetsOwner)))
 
-/*
-  Asset either stored in storage or describing list of URLs.
-*/
-type AssetStorageOrUrls = DataObject | string[]
+  integrateMeta(channel, meta, ['title', 'description', 'isPublic'])
 
-/*
-  Type guard differentiating asset stored in storage from asset describing a list of URLs.
-*/
-function isAssetInStorage(dataObject: AssetStorageOrUrls): dataObject is DataObject {
-  if (Array.isArray(dataObject)) {
-    return false
+  // prepare channel category if needed
+  if (isSet(meta.category)) {
+    channel.category = await processChannelCategory(ctx, channel.category, parseInt(meta.category))
   }
 
-  return true
-}
+  channel.coverPhoto = new AssetNone()
+  channel.avatarPhoto = new AssetNone()
+  // // prepare cover photo asset if needed
+  // if (isSet(meta.coverPhoto)) {
+  //   const asset = findAssetByIndex(processedAssets, meta.coverPhoto, 'channel cover photo')
+  //   if (asset) {
+  //     channel.coverPhoto = asset
+  //   }
+  // }
 
-export interface IReadProtobufArguments {
-  metadata: Bytes
-  db: DatabaseManager
-  event: SubstrateEvent
-}
+  // // prepare avatar photo asset if needed
+  // if (isSet(meta.avatarPhoto)) {
+  //   const asset = findAssetByIndex(processedAssets, meta.avatarPhoto, 'channel avatar photo')
+  //   if (asset) {
+  //     channel.avatarPhoto = asset
+  //   }
+  // }
 
-export interface IReadProtobufArgumentsWithAssets extends IReadProtobufArguments {
-  assets: NewAsset[] // assets provided in event
-  contentOwner: typeof DataObjectOwner
-}
-
-/*
-  This class represents one of 3 possible states when changing property read from metadata.
-  NoChange - don't change anything (used when invalid metadata are encountered)
-  Unset - unset the value (used when the unset is requested in runtime)
-  Change - set the new value
-*/
-export class PropertyChange<T> {
-  static newUnset<T>(): PropertyChange<T> {
-    return new PropertyChange<T>('unset')
+  // prepare language if needed
+  if (isSet(meta.language)) {
+    channel.language = await processLanguage(ctx, channel.language, meta.language)
   }
 
-  static newNoChange<T>(): PropertyChange<T> {
-    return new PropertyChange<T>('nochange')
-  }
+  return channel
+}
 
-  static newChange<T>(value: T): PropertyChange<T> {
-    return new PropertyChange<T>('change', value)
-  }
+export async function processVideoMetadata(
+  ctx: EventContext & StoreContext,
+  channel: Channel,
+  video: Video,
+  meta: DecodedMetadataObject<IVideoMetadata>,
+  assets: NewAsset[]
+): Promise<Video> {
+  // TODO: Assets processing (Storage v2)
+  // const assetsOwner = new DataObjectOwnerChannel()
+  // assetsOwner.channelId = channel.id
 
-  /*
-    Determines property change from the given object property.
-  */
-  static fromObjectProperty<T, Key extends string, ChangedObject extends { [key in Key]?: T }>(
-    object: ChangedObject,
-    key: Key
-  ): PropertyChange<T> {
-    if (!(key in object)) {
-      return PropertyChange.newNoChange<T>()
-    }
+  // const processedAssets = await Promise.all(assets.map((asset) => processNewAsset(ctx, asset, assetsOwner)))
 
-    if (object[key] === undefined) {
-      return PropertyChange.newUnset<T>()
-    }
+  integrateMeta(video, meta, ['title', 'description', 'duration', 'hasMarketing', 'isExplicit', 'isPublic'])
 
-    return PropertyChange.newChange<T>(object[key] as T)
+  // prepare video category if needed
+  if (meta.category) {
+    video.category = await processVideoCategory(ctx, video.category, parseInt(meta.category))
   }
 
-  private type: string
-  private value?: T
-
-  private constructor(type: 'change' | 'nochange' | 'unset', value?: T) {
-    this.type = type
-    this.value = value
+  // prepare media meta information if needed
+  if (isSet(meta.mediaType) || isSet(meta.mediaPixelWidth) || isSet(meta.mediaPixelHeight)) {
+    // prepare video file size if poosible
+    const videoSize = 0 // TODO: extractVideoSize(assets, meta.video)
+    video.mediaMetadata = await processVideoMediaMetadata(ctx, video.mediaMetadata, meta, videoSize)
   }
 
-  public isUnset(): boolean {
-    return this.type === 'unset'
+  // prepare license if needed
+  if (isSet(meta.license)) {
+    await updateVideoLicense(ctx, video, meta.license)
   }
 
-  public isNoChange(): boolean {
-    return this.type === 'nochange'
-  }
+  video.thumbnailPhoto = new AssetNone()
+  video.media = new AssetNone()
+  // // prepare thumbnail photo asset if needed
+  // if (isSet(meta.thumbnailPhoto)) {
+  //   const asset = findAssetByIndex(processedAssets, meta.thumbnailPhoto, 'thumbnail photo')
+  //   if (asset) {
+  //     video.thumbnailPhoto = asset
+  //   }
+  // }
 
-  public isValue(): boolean {
-    return this.type === 'change'
-  }
+  // // prepare video asset if needed
+  // if (isSet(meta.video)) {
+  //   const asset = findAssetByIndex(processedAssets, meta.video, 'video')
+  //   if (asset) {
+  //     video.media = asset
+  //   }
+  // }
 
-  public getValue(): T | undefined {
-    return this.type === 'change' ? this.value : undefined
+  // prepare language if needed
+  if (isSet(meta.language)) {
+    video.language = await processLanguage(ctx, video.language, meta.language)
   }
 
-  /*
-    Integrates the value into the given dictionary.
-  */
-  public integrateInto(object: Object, key: string): void {
-    if (this.isNoChange()) {
-      return
-    }
-
-    if (this.isUnset()) {
-      delete object[key]
-      return
-    }
-
-    object[key] = this.value
+  if (isSet(meta.publishedBeforeJoystream)) {
+    video.publishedBeforeJoystream = processPublishedBeforeJoystream(
+      ctx,
+      video.publishedBeforeJoystream,
+      meta.publishedBeforeJoystream
+    )
   }
-}
 
-export interface RawVideoMetadata {
-  encoding: {
-    codecName: PropertyChange<string>
-    container: PropertyChange<string>
-    mimeMediaType: PropertyChange<string>
-  }
-  pixelWidth: PropertyChange<number>
-  pixelHeight: PropertyChange<number>
-  size: PropertyChange<number>
+  return video
 }
 
-/*
-  Reads information from the event and protobuf metadata and constructs changeset that's fit to be used when saving to db.
-*/
-export async function readProtobuf<T extends ChannelCategory | VideoCategory>(
-  type: T,
-  parameters: IReadProtobufArguments
-): Promise<Partial<T>> {
-  // true option here is crucial, it indicates that we want just the underlying bytes (by default it will also include bytes encoding the length)
-  const metaU8a = parameters.metadata.toU8a(true)
-
-  // process channel category
-  if (type instanceof ChannelCategory) {
-    const meta = ChannelCategoryMetadata.deserializeBinary(metaU8a)
-    const result = convertMetadataToObject<ChannelCategoryMetadata.AsObject>(meta) as Partial<T>
-
-    return result
-  }
-
-  // process video category
-  if (type instanceof VideoCategory) {
-    const meta = VideoCategoryMetadata.deserializeBinary(metaU8a)
-    const result = convertMetadataToObject<VideoCategoryMetadata.AsObject>(meta) as Partial<T>
+function findAssetByIndex(assets: typeof Asset[], index: number, name?: string): typeof Asset | null {
+  if (assets[index]) {
+    return assets[index]
+  } else {
+    invalidMetadata(`Invalid${name ? ' ' + name : ''} asset index`, {
+      numberOfAssets: assets.length,
+      requestedAssetIndex: index,
+    })
 
-    return result
+    return null
   }
-
-  // this should never happen
-  logger.error('Not implemented metadata type', { type })
-  throw new Error(`Not implemented metadata type`)
 }
 
-/*
-  Reads information from the event and protobuf metadata and constructs changeset that's fit to be used when saving to db.
-  In addition it handles any assets associated with the metadata.
-*/
-
-export async function readProtobufWithAssets<T extends Channel | Video>(
-  type: T,
-  parameters: IReadProtobufArgumentsWithAssets
-): Promise<Partial<T>> {
-  // true option here is crucial, it indicates that we want just the underlying bytes (by default it will also include bytes encoding the length)
-  const metaU8a = parameters.metadata.toU8a(true)
-
-  // process channel
-  if (type instanceof Channel) {
-    const meta = ChannelMetadata.deserializeBinary(metaU8a)
-    const metaAsObject = convertMetadataToObject<ChannelMetadata.AsObject>(meta)
-    const result = (metaAsObject as any) as Partial<Channel>
-
-    // prepare cover photo asset if needed
-    if ('coverPhoto' in metaAsObject) {
-      const asset = await extractAsset({
-        assetIndex: metaAsObject.coverPhoto,
-        assets: parameters.assets,
-        db: parameters.db,
-        event: parameters.event,
-        contentOwner: parameters.contentOwner,
-      })
-      integrateAsset('coverPhoto', result, asset) // changes `result` inline!
-      delete metaAsObject.coverPhoto
-    }
-
-    // prepare avatar photo asset if needed
-    if ('avatarPhoto' in metaAsObject) {
-      const asset = await extractAsset({
-        assetIndex: metaAsObject.avatarPhoto,
-        assets: parameters.assets,
-        db: parameters.db,
-        event: parameters.event,
-        contentOwner: parameters.contentOwner,
-      })
-      integrateAsset('avatarPhoto', result, asset) // changes `result` inline!
-      delete metaAsObject.avatarPhoto
-    }
-
-    // prepare language if needed
-    if ('language' in metaAsObject) {
-      const language = await prepareLanguage(metaAsObject.language, parameters.db, parameters.event)
-      delete metaAsObject.language // make sure temporary value will not interfere
-      language.integrateInto(result, 'language')
-    }
-
-    return result as Partial<T>
-  }
-
-  // process video
-  if (type instanceof Video) {
-    const meta = VideoMetadata.deserializeBinary(metaU8a)
-    const metaAsObject = convertMetadataToObject<VideoMetadata.AsObject>(meta)
-    const result = (metaAsObject as any) as Partial<Video>
-
-    // prepare video category if needed
-    if ('category' in metaAsObject) {
-      const category = await prepareVideoCategory(metaAsObject.category, parameters.db)
-      delete metaAsObject.category // make sure temporary value will not interfere
-      category.integrateInto(result, 'category')
-    }
-
-    // prepare media meta information if needed
-    if ('mediaType' in metaAsObject || 'mediaPixelWidth' in metaAsObject || 'mediaPixelHeight' in metaAsObject) {
-      // prepare video file size if poosible
-      const videoSize = extractVideoSize(parameters.assets, metaAsObject.video)
-
-      // NOTE: type hack - `RawVideoMetadata` is inserted instead of VideoMediaMetadata - it should be edited in `video.ts`
-      //       see `integrateVideoMetadata()` in `video.ts` for more info
-      result.mediaMetadata = (prepareVideoMetadata(
-        metaAsObject,
-        videoSize,
-        parameters.event.blockNumber
-      ) as unknown) as VideoMediaMetadata
-
-      // remove extra values
-      delete metaAsObject.mediaType
-      delete metaAsObject.mediaPixelWidth
-      delete metaAsObject.mediaPixelHeight
-    }
-
-    // prepare license if needed
-    if ('license' in metaAsObject) {
-      result.license = await prepareLicense(parameters.db, metaAsObject.license, parameters.event)
-    }
-
-    // prepare thumbnail photo asset if needed
-    if ('thumbnailPhoto' in metaAsObject) {
-      const asset = await extractAsset({
-        assetIndex: metaAsObject.thumbnailPhoto,
-        assets: parameters.assets,
-        db: parameters.db,
-        event: parameters.event,
-        contentOwner: parameters.contentOwner,
-      })
-      integrateAsset('thumbnailPhoto', result, asset) // changes `result` inline!
-      delete metaAsObject.thumbnailPhoto
-    }
-
-    // prepare video asset if needed
-    if ('video' in metaAsObject) {
-      const asset = await extractAsset({
-        assetIndex: metaAsObject.video,
-        assets: parameters.assets,
-        db: parameters.db,
-        event: parameters.event,
-        contentOwner: parameters.contentOwner,
-      })
-      integrateAsset('media', result, asset) // changes `result` inline!
-      delete metaAsObject.video
-    }
+async function processVideoMediaEncoding(
+  { store, event }: StoreContext & EventContext,
+  existingVideoMediaEncoding: VideoMediaEncoding | undefined,
+  metadata: DecodedMetadataObject<IMediaType>
+): Promise<VideoMediaEncoding> {
+  const encoding =
+    existingVideoMediaEncoding ||
+    new VideoMediaEncoding({
+      createdAt: new Date(event.blockTimestamp),
+      createdById: '1',
+      updatedById: '1',
+    })
+  // integrate media encoding-related data
+  integrateMeta(encoding, metadata, ['codecName', 'container', 'mimeMediaType'])
+  encoding.updatedAt = new Date(event.blockTimestamp)
+  await store.save<VideoMediaEncoding>(encoding)
 
-    // prepare language if needed
-    if ('language' in metaAsObject) {
-      const language = await prepareLanguage(metaAsObject.language, parameters.db, parameters.event)
-      delete metaAsObject.language // make sure temporary value will not interfere
-      language.integrateInto(result, 'language')
-    }
+  return encoding
+}
 
-    if (metaAsObject.publishedBeforeJoystream) {
-      const publishedBeforeJoystream = handlePublishedBeforeJoystream(result, metaAsObject.publishedBeforeJoystream)
-      delete metaAsObject.publishedBeforeJoystream // make sure temporary value will not interfere
-      publishedBeforeJoystream.integrateInto(result, 'publishedBeforeJoystream')
-    }
+async function processVideoMediaMetadata(
+  ctx: StoreContext & EventContext,
+  existingVideoMedia: VideoMediaMetadata | undefined,
+  metadata: DecodedMetadataObject<IVideoMetadata>,
+  videoSize: number | undefined
+): Promise<VideoMediaMetadata> {
+  const { store, event } = ctx
+  const videoMedia =
+    existingVideoMedia ||
+    new VideoMediaMetadata({
+      createdInBlock: event.blockNumber,
+      createdAt: new Date(event.blockTimestamp),
+      createdById: '1',
+      updatedById: '1',
+    })
 
-    return result as Partial<T>
+  // integrate media-related data
+  const mediaMetadata = {
+    size: isSet(videoSize) ? new BN(videoSize.toString()) : undefined,
+    pixelWidth: metadata.mediaPixelWidth,
+    pixelHeight: metadata.mediaPixelHeight,
   }
+  integrateMeta(videoMedia, mediaMetadata, ['pixelWidth', 'pixelHeight', 'size'])
+  videoMedia.updatedAt = new Date(event.blockTimestamp)
+  videoMedia.encoding = await processVideoMediaEncoding(ctx, videoMedia.encoding, metadata.mediaType || {})
+  await store.save<VideoMediaMetadata>(videoMedia)
 
-  // this should never happen
-  logger.error('Not implemented metadata type', { type })
-  throw new Error(`Not implemented metadata type`)
+  return videoMedia
 }
 
 export async function convertContentActorToChannelOwner(
-  db: DatabaseManager,
+  store: DatabaseManager,
   contentActor: ContentActor
 ): Promise<{
   ownerMember?: Membership
@@ -354,7 +214,7 @@ export async function convertContentActorToChannelOwner(
 }> {
   if (contentActor.isMember) {
     const memberId = contentActor.asMember.toNumber()
-    const member = await db.get(Membership, { where: { id: memberId.toString() } as FindConditions<Membership> })
+    const member = await store.get(Membership, { where: { id: memberId.toString() } as FindConditions<Membership> })
 
     // ensure member exists
     if (!member) {
@@ -369,7 +229,7 @@ export async function convertContentActorToChannelOwner(
 
   if (contentActor.isCurator) {
     const curatorGroupId = contentActor.asCurator[0].toNumber()
-    const curatorGroup = await db.get(CuratorGroup, {
+    const curatorGroup = await store.get(CuratorGroup, {
       where: { id: curatorGroupId.toString() } as FindConditions<CuratorGroup>,
     })
 
@@ -390,384 +250,217 @@ export async function convertContentActorToChannelOwner(
   throw new Error('Not-implemented ContentActor type used')
 }
 
-export function convertContentActorToDataObjectOwner(
-  contentActor: ContentActor,
-  channelId: number
-): typeof DataObjectOwner {
-  const owner = new DataObjectOwnerChannel()
-  owner.channel = channelId
-
-  return owner
-
-  /* contentActor is irrelevant now -> all video/channel content belongs to the channel
-  if (contentActor.isMember) {
-    const owner = new DataObjectOwnerMember()
-    owner.member = contentActor.asMember.toBn()
-
-    return owner
-  }
-
-  if (contentActor.isLead || contentActor.isCurator) {
-    const owner = new DataObjectOwnerChannel()
-    owner.channel = channelId
-
-    return owner
+function processPublishedBeforeJoystream(
+  ctx: EventContext & StoreContext,
+  currentValue: Date | undefined,
+  metadata: DecodedMetadataObject<IPublishedBeforeJoystream>
+): Date | undefined {
+  if (!isSet(metadata)) {
+    return currentValue
   }
 
-  logger.error('Not implemented ContentActor type', {contentActor: contentActor.toString()})
-  throw new Error('Not-implemented ContentActor type used')
-  */
-}
-
-function handlePublishedBeforeJoystream(
-  video: Partial<Video>,
-  metadata: PublishedBeforeJoystreamMetadata.AsObject
-): PropertyChange<Date> {
-  // is publish being unset
-  if ('isPublished' in metadata && !metadata.isPublished) {
-    return PropertyChange.newUnset()
+  // Property is beeing unset
+  if (!metadata.isPublished) {
+    return undefined
   }
 
   // try to parse timestamp from publish date
-  const timestamp = metadata.date ? Date.parse(metadata.date) : NaN
+  const timestamp = isSet(metadata.date) ? Date.parse(metadata.date) : NaN
 
   // ensure date is valid
   if (isNaN(timestamp)) {
     invalidMetadata(`Invalid date used for publishedBeforeJoystream`, {
       timestamp,
     })
-    return PropertyChange.newNoChange()
+    return currentValue
   }
 
   // set new date
-  return PropertyChange.newChange(new Date(timestamp))
-}
-
-interface IConvertAssetParameters {
-  rawAsset: NewAsset
-  db: DatabaseManager
-  event: SubstrateEvent
-  contentOwner: typeof DataObjectOwner
-}
-
-/*
-  Converts event asset into data object or list of URLs fit to be saved to db.
-*/
-async function convertAsset(parameters: IConvertAssetParameters): Promise<AssetStorageOrUrls> {
-  // is asset describing list of URLs?
-  if (parameters.rawAsset.isUrls) {
-    const urls = parameters.rawAsset.asUrls.toArray().map((item) => item.toString())
-
-    return urls
-  }
-
-  // !parameters.rawAsset.isUrls && parameters.rawAsset.isUpload // asset is in storage
-
-  // prepare data object
-  const contentParameters: ContentParameters = parameters.rawAsset.asUpload
-  const dataObject = await prepareDataObject(
-    parameters.db,
-    contentParameters,
-    parameters.event,
-    parameters.contentOwner
-  )
-
-  return dataObject
-}
-
-interface IExtractAssetParameters {
-  assetIndex: number | undefined
-  assets: NewAsset[]
-  db: DatabaseManager
-  event: SubstrateEvent
-  contentOwner: typeof DataObjectOwner
-}
-
-/*
-  Selects asset from provided set of assets and prepares asset data fit to be saved to db.
-*/
-async function extractAsset(parameters: IExtractAssetParameters): Promise<PropertyChange<AssetStorageOrUrls>> {
-  // is asset being unset?
-  if (parameters.assetIndex === undefined) {
-    return PropertyChange.newUnset()
-  }
-
-  // ensure asset index is valid
-  if (parameters.assetIndex >= parameters.assets.length) {
-    invalidMetadata(`Non-existing asset extraction requested`, {
-      assetsProvided: parameters.assets.length,
-      assetIndex: parameters.assetIndex,
-    })
-    return PropertyChange.newNoChange()
-  }
-
-  // convert asset to data object record
-  const asset = await convertAsset({
-    rawAsset: parameters.assets[parameters.assetIndex],
-    db: parameters.db,
-    event: parameters.event,
-    contentOwner: parameters.contentOwner,
-  })
-
-  return PropertyChange.newChange(asset)
-}
-
-/*
-  As a temporary messure to overcome yet-to-be-implemented features in Hydra, we are using redudant information
-  to describe asset state. This function introduces all redudant data needed to be saved to db.
-
-  Changes `result` argument!
-*/
-function integrateAsset<T>(propertyName: string, result: Object, asset: PropertyChange<AssetStorageOrUrls>): void {
-  // helpers - property names
-  const nameUrl = propertyName + 'Urls'
-  const nameDataObject = propertyName + 'DataObject'
-  const nameAvailability = propertyName + 'Availability'
-
-  if (asset.isNoChange()) {
-    return
-  }
-
-  if (asset.isUnset()) {
-    result[nameUrl] = []
-    result[nameAvailability] = AssetAvailability.INVALID
-    result[nameDataObject] = undefined // plan deletion (will have effect when saved to db)
-
-    return
-  }
-
-  const newValue = asset.getValue() as AssetStorageOrUrls
-
-  // is asset available on external URL(s)
-  if (!isAssetInStorage(newValue)) {
-    // (un)set asset's properties
-    result[nameUrl] = newValue
-    result[nameAvailability] = AssetAvailability.ACCEPTED
-    result[nameDataObject] = undefined // plan deletion (will have effect when saved to db)
-
-    return
-  }
-
-  // asset saved in storage
-
-  // prepare conversion table between liaison judgment and asset availability
-  const conversionTable = {
-    [LiaisonJudgement.ACCEPTED]: AssetAvailability.ACCEPTED,
-    [LiaisonJudgement.PENDING]: AssetAvailability.PENDING,
-  }
-
-  // (un)set asset's properties
-  result[nameUrl] = [] // plan deletion (will have effect when saved to db)
-  result[nameAvailability] = conversionTable[newValue.liaisonJudgement]
-  result[nameDataObject] = newValue
+  return new Date(timestamp)
 }
 
-function extractVideoSize(assets: NewAsset[], assetIndex: number | undefined): number | undefined {
-  // escape if no asset is required
-  if (assetIndex === undefined) {
-    return undefined
-  }
-
-  // ensure asset index is valid
-  if (assetIndex > assets.length) {
-    invalidMetadata(`Non-existing asset video size extraction requested`, { assetsProvided: assets.length, assetIndex })
-    return undefined
-  }
-
-  const rawAsset = assets[assetIndex]
-
-  // escape if asset is describing URLs (can't get size)
-  if (rawAsset.isUrls) {
-    return undefined
+// TODO: Assets processing (Storage v2)
+// async function processNewAsset(
+//   ctx: EventContext & StoreContext,
+//   asset: NewAsset,
+//   owner: typeof DataObjectOwner
+// ): Promise<typeof Asset> {
+//   if (asset.isUrls) {
+//     const urls = asset.asUrls.toArray().map((url) => url.toString())
+//     const resultAsset = new AssetExternal()
+//     resultAsset.urls = JSON.stringify(urls)
+//     return resultAsset
+//   } else if (asset.isUpload) {
+//     const contentParameters: ContentParameters = asset.asUpload
+//     const dataObject = await createDataObject(ctx, contentParameters, owner)
+
+//     const resultAsset = new AssetJoystreamStorage()
+//     resultAsset.dataObjectId = dataObject.id
+//     return resultAsset
+//   } else {
+//     unexpectedData('Unrecognized asset type', asset.type)
+//   }
+// }
+
+// function extractVideoSize(assets: NewAsset[], assetIndex: number | null | undefined): number | undefined {
+//   // escape if no asset is required
+//   if (!isSet(assetIndex)) {
+//     return undefined
+//   }
+
+//   // ensure asset index is valid
+//   if (assetIndex > assets.length) {
+//     invalidMetadata(`Non-existing asset video size extraction requested`, { assetsProvided: assets.length, assetIndex })
+//     return undefined
+//   }
+
+//   const rawAsset = assets[assetIndex]
+
+//   // escape if asset is describing URLs (can't get size)
+//   if (rawAsset.isUrls) {
+//     return undefined
+//   }
+
+//   // !rawAsset.isUrls && rawAsset.isUpload // asset is in storage
+
+//   // convert generic content parameters coming from processor to custom Joystream data type
+//   const customContentParameters = new Custom_ContentParameters(registry, rawAsset.asUpload.toJSON() as any)
+//   // extract video size
+//   const videoSize = customContentParameters.size_in_bytes.toNumber()
+
+//   return videoSize
+// }
+
+async function processLanguage(
+  ctx: EventContext & StoreContext,
+  currentLanguage: Language | undefined,
+  languageIso: string | undefined
+): Promise<Language | undefined> {
+  const { event, store } = ctx
+
+  if (!isSet(languageIso)) {
+    return currentLanguage
   }
 
-  // !rawAsset.isUrls && rawAsset.isUpload // asset is in storage
-
-  // convert generic content parameters coming from processor to custom Joystream data type
-  const customContentParameters = new Custom_ContentParameters(registry, rawAsset.asUpload.toJSON() as any)
-  // extract video size
-  const videoSize = customContentParameters.size_in_bytes.toNumber()
-
-  return videoSize
-}
-
-async function prepareLanguage(
-  languageIso: string | undefined,
-  db: DatabaseManager,
-  event: SubstrateEvent
-): Promise<PropertyChange<Language>> {
-  // is language being unset?
-  if (languageIso === undefined) {
-    return PropertyChange.newUnset()
-  }
-
-  // validate language string
-  const isValidIso = ISO6391.validate(languageIso)
-
   // ensure language string is valid
-  if (!isValidIso) {
+  if (!isValidLanguageCode(languageIso)) {
     invalidMetadata(`Invalid language ISO-639-1 provided`, languageIso)
-    return PropertyChange.newNoChange()
+    return currentLanguage
   }
 
   // load language
-  const language = await db.get(Language, { where: { iso: languageIso } as FindConditions<Language> })
+  const existingLanguage = await store.get(Language, { where: { iso: languageIso } })
 
   // return existing language if any
-  if (language) {
-    return PropertyChange.newChange(language)
+  if (existingLanguage) {
+    return existingLanguage
   }
 
   // create new language
   const newLanguage = new Language({
-    // set id as iso to overcome current graphql filtering limitations (so we can use query `videos(where: {languageId_eq: 'en'})`)
-    // id: await getNextId(db),
-    id: languageIso,
     iso: languageIso,
     createdInBlock: event.blockNumber,
-
-    createdAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
-    updatedAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
-
+    createdAt: new Date(event.blockTimestamp),
+    updatedAt: new Date(event.blockTimestamp),
     // TODO: remove these lines after Hydra auto-fills the values when cascading save (remove them on all places)
     createdById: '1',
     updatedById: '1',
   })
 
-  await db.save<Language>(newLanguage)
+  await store.save<Language>(newLanguage)
 
-  return PropertyChange.newChange(newLanguage)
+  return newLanguage
 }
 
-async function prepareLicense(
-  db: DatabaseManager,
-  licenseProtobuf: LicenseMetadata.AsObject | undefined,
-  event: SubstrateEvent
-): Promise<License | undefined> {
-  // NOTE: Deletion of any previous license should take place in appropriate event handling function
-  //       and not here even it might appear so.
-
-  // is license being unset?
-  if (licenseProtobuf === undefined) {
-    return undefined
-  }
+async function updateVideoLicense(
+  ctx: StoreContext & EventContext,
+  video: Video,
+  licenseMetadata: ILicense | null | undefined
+): Promise<void> {
+  const { store, event } = ctx
 
-  // license is meant to be deleted
-  if (isLicenseEmpty(licenseProtobuf)) {
-    return new License({})
+  if (!isSet(licenseMetadata)) {
+    return
   }
 
-  // crete new license
-  const license = new License({
-    ...licenseProtobuf,
-    id: await getNextId(db),
+  const previousLicense = video.license
+  let license: License | null = null
 
-    createdAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
-    updatedAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
+  if (!isLicenseEmpty(licenseMetadata)) {
+    // license is meant to be created/updated
+    license =
+      previousLicense ||
+      new License({
+        createdAt: new Date(event.blockTimestamp),
+        createdById: '1',
+        updatedById: '1',
+      })
+    license.updatedAt = new Date(event.blockTimestamp)
+    integrateMeta(license, licenseMetadata, ['attribution', 'code', 'customText'])
+    await store.save<License>(license)
+  }
 
-    createdById: '1',
-    updatedById: '1',
-  })
+  // Update license (and potentially remove foreign key reference)
+  // FIXME: Note that we MUST to provide "null" here in order to unset a relation,
+  // See: https://github.com/Joystream/hydra/issues/435
+  video.license = license as License | undefined
+  video.updatedAt = new Date(ctx.event.blockTimestamp)
+  await store.save<Video>(video)
 
-  return license
+  // Safely remove previous license if needed
+  if (previousLicense && !license) {
+    await store.remove<License>(previousLicense)
+  }
 }
 
 /*
   Checks if protobof contains license with some fields filled or is empty object (`{}` or `{someKey: undefined, ...}`).
   Empty object means deletion is requested.
 */
-function isLicenseEmpty(licenseObject: LicenseMetadata.AsObject): boolean {
-  const somePropertySet = Object.entries(licenseObject).reduce((acc, [key, value]) => {
-    return acc || value !== undefined
-  }, false)
+function isLicenseEmpty(licenseObject: ILicense): boolean {
+  const somePropertySet = Object.values(licenseObject).some((v) => isSet(v))
 
   return !somePropertySet
 }
 
-function prepareVideoMetadata(
-  videoProtobuf: VideoMetadata.AsObject,
-  videoSize: number | undefined,
-  blockNumber: number
-): RawVideoMetadata {
-  const rawMeta = {
-    encoding: {
-      codecName: PropertyChange.fromObjectProperty<string, 'codecName', MediaTypeMetadata.AsObject>(
-        videoProtobuf.mediaType || {},
-        'codecName'
-      ),
-      container: PropertyChange.fromObjectProperty<string, 'container', MediaTypeMetadata.AsObject>(
-        videoProtobuf.mediaType || {},
-        'container'
-      ),
-      mimeMediaType: PropertyChange.fromObjectProperty<string, 'mimeMediaType', MediaTypeMetadata.AsObject>(
-        videoProtobuf.mediaType || {},
-        'mimeMediaType'
-      ),
-    },
-    pixelWidth: PropertyChange.fromObjectProperty<number, 'mediaPixelWidth', VideoMetadata.AsObject>(
-      videoProtobuf,
-      'mediaPixelWidth'
-    ),
-    pixelHeight: PropertyChange.fromObjectProperty<number, 'mediaPixelHeight', VideoMetadata.AsObject>(
-      videoProtobuf,
-      'mediaPixelHeight'
-    ),
-    size: videoSize === undefined ? PropertyChange.newNoChange() : PropertyChange.newChange(videoSize),
-  } as RawVideoMetadata
-
-  return rawMeta
-}
-
-async function prepareVideoCategory(
-  categoryId: number | undefined,
-  db: DatabaseManager
-): Promise<PropertyChange<VideoCategory>> {
-  // is category being unset?
-  if (categoryId === undefined) {
-    return PropertyChange.newUnset()
-  }
+async function processVideoCategory(
+  ctx: EventContext & StoreContext,
+  currentCategory: VideoCategory | undefined,
+  categoryId: number
+): Promise<VideoCategory | undefined> {
+  const { store } = ctx
 
   // load video category
-  const category = await db.get(VideoCategory, {
-    where: { id: categoryId.toString() } as FindConditions<VideoCategory>,
+  const category = await store.get(VideoCategory, {
+    where: { id: categoryId.toString() },
   })
 
   // ensure video category exists
   if (!category) {
     invalidMetadata('Non-existing video category association with video requested', categoryId)
-    return PropertyChange.newNoChange()
+    return currentCategory
   }
 
-  return PropertyChange.newChange(category)
+  return category
 }
 
-function convertMetadataToObject<T extends Object>(metadata: jspb.Message): T {
-  const metaAsObject = metadata.toObject()
-  const result = {} as T
-
-  for (const key in metaAsObject) {
-    const funcNameBase = key.charAt(0).toUpperCase() + key.slice(1)
-    const hasFuncName = 'has' + funcNameBase
-    const isSet =
-      funcNameBase === 'PersonsList' // there is no `VideoMetadata.hasPersonsList` method from unkown reason -> create exception
-        ? true
-        : metadata[hasFuncName]()
-
-    if (!isSet) {
-      continue
-    }
-
-    const getFuncName = 'get' + funcNameBase
-    const value = metadata[getFuncName]()
+async function processChannelCategory(
+  ctx: EventContext & StoreContext,
+  currentCategory: ChannelCategory | undefined,
+  categoryId: number
+): Promise<ChannelCategory | undefined> {
+  const { store } = ctx
 
-    // TODO: check that recursion trully works
-    if (value instanceof jspb.Message) {
-      result[key] = convertMetadataToObject(value)
-      continue
-    }
+  // load video category
+  const category = await store.get(ChannelCategory, {
+    where: { id: categoryId.toString() },
+  })
 
-    result[key] = metaAsObject[key]
+  // ensure video category exists
+  if (!category) {
+    invalidMetadata('Non-existing channel category association with channel requested', categoryId)
+    return currentCategory
   }
 
-  return result
+  return category
 }

+ 94 - 326
query-node/mappings/sumer/content/video.ts

@@ -1,41 +1,21 @@
-import BN from 'bn.js'
-import { fixBlockTimestamp } from '../eventFix'
-import { SubstrateEvent } from '@dzlzv/hydra-common'
-import { DatabaseManager } from '@dzlzv/hydra-db-utils'
-import { FindConditions, In } from 'typeorm'
-
-import { Content } from '../../../generated/types'
-
-import { inconsistentState, logger, getNextId } from '../common'
-
-import { convertContentActorToDataObjectOwner, readProtobuf, readProtobufWithAssets, RawVideoMetadata } from './utils'
-
-import {
-  AssetAvailability,
-  Channel,
-  License,
-  Video,
-  VideoCategory,
-  VideoMediaEncoding,
-  VideoMediaMetadata,
-} from 'query-node'
-
-// Joystream types
-import { ChannelId } from '@joystream/types/augment'
-
-// eslint-disable-next-line @typescript-eslint/naming-convention
-export async function content_VideoCategoryCreated(db: DatabaseManager, event: SubstrateEvent) {
+/*
+eslint-disable @typescript-eslint/naming-convention
+*/
+import { EventContext, StoreContext } from '@joystream/hydra-common'
+import { In } from 'typeorm'
+import { Content } from '../../generated/types'
+import { deserializeMetadata, inconsistentState, logger } from '../../common'
+import { processVideoMetadata } from './utils'
+import { Channel, Video, VideoCategory, AssetNone } from 'query-node/dist/model'
+import { VideoMetadata, VideoCategoryMetadata } from '@joystream/metadata-protobuf'
+import { integrateMeta } from '@joystream/metadata-protobuf/utils'
+
+export async function content_VideoCategoryCreated({ store, event }: EventContext & StoreContext): Promise<void> {
   // read event data
-  const { videoCategoryId, videoCategoryCreationParameters, contentActor } = new Content.VideoCategoryCreatedEvent(
-    event
-  ).data
+  const [, videoCategoryId, videoCategoryCreationParameters] = new Content.VideoCategoryCreatedEvent(event).params
 
   // read metadata
-  const protobufContent = await readProtobuf(new VideoCategory(), {
-    metadata: videoCategoryCreationParameters.meta,
-    db,
-    event,
-  })
+  const metadata = (await deserializeMetadata(VideoCategoryMetadata, videoCategoryCreationParameters.meta)) || {}
 
   // create new video category
   const videoCategory = new VideoCategory({
@@ -43,32 +23,26 @@ export async function content_VideoCategoryCreated(db: DatabaseManager, event: S
     id: videoCategoryId.toString(),
     videos: [],
     createdInBlock: event.blockNumber,
-
     // fill in auto-generated fields
-    createdAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
-    updatedAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
-
-    // integrate metadata
-    ...protobufContent,
+    createdAt: new Date(event.blockTimestamp),
+    updatedAt: new Date(event.blockTimestamp),
   })
+  integrateMeta(videoCategory, metadata, ['name'])
 
   // save video category
-  await db.save<VideoCategory>(videoCategory)
+  await store.save<VideoCategory>(videoCategory)
 
   // emit log event
   logger.info('Video category has been created', { id: videoCategoryId })
 }
 
-// eslint-disable-next-line @typescript-eslint/naming-convention
-export async function content_VideoCategoryUpdated(db: DatabaseManager, event: SubstrateEvent) {
+export async function content_VideoCategoryUpdated({ store, event }: EventContext & StoreContext): Promise<void> {
   // read event data
-  const { videoCategoryId, videoCategoryUpdateParameters, contentActor } = new Content.VideoCategoryUpdatedEvent(
-    event
-  ).data
+  const [, videoCategoryId, videoCategoryUpdateParameters] = new Content.VideoCategoryUpdatedEvent(event).params
 
   // load video category
-  const videoCategory = await db.get(VideoCategory, {
-    where: { id: videoCategoryId.toString() } as FindConditions<VideoCategory>,
+  const videoCategory = await store.get(VideoCategory, {
+    where: { id: videoCategoryId.toString() },
   })
 
   // ensure video category exists
@@ -77,35 +51,26 @@ export async function content_VideoCategoryUpdated(db: DatabaseManager, event: S
   }
 
   // read metadata
-  const protobufContent = await readProtobuf(new VideoCategory(), {
-    metadata: videoCategoryUpdateParameters.new_meta,
-    db,
-    event,
-  })
-
-  // update all fields read from protobuf
-  for (const [key, value] of Object.entries(protobufContent)) {
-    videoCategory[key] = value
-  }
+  const newMeta = deserializeMetadata(VideoCategoryMetadata, videoCategoryUpdateParameters.new_meta) || {}
+  integrateMeta(videoCategory, newMeta, ['name'])
 
   // set last update time
-  videoCategory.updatedAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
+  videoCategory.updatedAt = new Date(event.blockTimestamp)
 
   // save video category
-  await db.save<VideoCategory>(videoCategory)
+  await store.save<VideoCategory>(videoCategory)
 
   // emit log event
   logger.info('Video category has been updated', { id: videoCategoryId })
 }
 
-// eslint-disable-next-line @typescript-eslint/naming-convention
-export async function content_VideoCategoryDeleted(db: DatabaseManager, event: SubstrateEvent) {
+export async function content_VideoCategoryDeleted({ store, event }: EventContext & StoreContext): Promise<void> {
   // read event data
-  const { videoCategoryId } = new Content.VideoCategoryDeletedEvent(event).data
+  const [, videoCategoryId] = new Content.VideoCategoryDeletedEvent(event).params
 
   // load video category
-  const videoCategory = await db.get(VideoCategory, {
-    where: { id: videoCategoryId.toString() } as FindConditions<VideoCategory>,
+  const videoCategory = await store.get(VideoCategory, {
+    where: { id: videoCategoryId.toString() },
   })
 
   // ensure video category exists
@@ -114,83 +79,57 @@ export async function content_VideoCategoryDeleted(db: DatabaseManager, event: S
   }
 
   // remove video category
-  await db.remove<VideoCategory>(videoCategory)
+  await store.remove<VideoCategory>(videoCategory)
 
   // emit log event
   logger.info('Video category has been deleted', { id: videoCategoryId })
 }
 
-/// ///////////////// Video //////////////////////////////////////////////////////
+/// //////////////// Video //////////////////////////////////////////////////////
 
-// eslint-disable-next-line @typescript-eslint/naming-convention
-export async function content_VideoCreated(db: DatabaseManager, event: SubstrateEvent) {
+export async function content_VideoCreated(ctx: EventContext & StoreContext): Promise<void> {
+  const { store, event } = ctx
   // read event data
-  const { channelId, videoId, videoCreationParameters, contentActor } = new Content.VideoCreatedEvent(event).data
-
-  // read metadata
-  const protobufContent = await readProtobufWithAssets(new Video(), {
-    metadata: videoCreationParameters.meta,
-    db,
-    event,
-    assets: videoCreationParameters.assets,
-    contentOwner: convertContentActorToDataObjectOwner(contentActor, channelId.toNumber()),
-  })
+  const [, channelId, videoId, videoCreationParameters] = new Content.VideoCreatedEvent(event).params
 
   // load channel
-  const channel = await db.get(Channel, { where: { id: channelId.toString() } as FindConditions<Channel> })
+  const channel = await store.get(Channel, { where: { id: channelId.toString() } })
 
   // ensure channel exists
   if (!channel) {
     return inconsistentState('Trying to add video to non-existing channel', channelId)
   }
 
-  // prepare video media metadata (if any)
-  const fixedProtobuf = await integrateVideoMediaMetadata(db, null, protobufContent, event)
-
-  const licenseIsEmpty = fixedProtobuf.license && !Object.keys(fixedProtobuf.license).length
-  if (licenseIsEmpty) {
-    // license deletion was requested - ignore it and consider it empty
-    delete fixedProtobuf.license
-  }
-
-  // create new video
   const video = new Video({
-    // main data
     id: videoId.toString(),
-    isCensored: false,
     channel,
-    createdInBlock: event.blockNumber,
+    isCensored: false,
     isFeatured: false,
-
-    // default values for properties that might or might not be filled by metadata
-    thumbnailPhotoUrls: [],
-    thumbnailPhotoAvailability: AssetAvailability.INVALID,
-    mediaUrls: [],
-    mediaAvailability: AssetAvailability.INVALID,
-
-    // fill in auto-generated fields
-    createdAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
-    updatedAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
-
-    // integrate metadata
-    ...fixedProtobuf,
+    createdInBlock: event.blockNumber,
+    thumbnailPhoto: new AssetNone(),
+    media: new AssetNone(),
+    createdAt: new Date(event.blockTimestamp),
+    updatedAt: new Date(event.blockTimestamp),
   })
+  // deserialize & process metadata
+  const metadata = deserializeMetadata(VideoMetadata, videoCreationParameters.meta) || {}
+  await processVideoMetadata(ctx, channel, video, metadata, videoCreationParameters.assets)
 
   // save video
-  await db.save<Video>(video)
+  await store.save<Video>(video)
 
   // emit log event
   logger.info('Video has been created', { id: videoId })
 }
 
-// eslint-disable-next-line @typescript-eslint/naming-convention
-export async function content_VideoUpdated(db: DatabaseManager, event: SubstrateEvent) {
+export async function content_VideoUpdated(ctx: EventContext & StoreContext): Promise<void> {
+  const { event, store } = ctx
   // read event data
-  const { videoId, videoUpdateParameters, contentActor } = new Content.VideoUpdatedEvent(event).data
+  const [, videoId, videoUpdateParameters] = new Content.VideoUpdatedEvent(event).params
 
   // load video
-  const video = await db.get(Video, {
-    where: { id: videoId.toString() } as FindConditions<Video>,
+  const video = await store.get(Video, {
+    where: { id: videoId.toString() },
     relations: ['channel', 'license'],
   })
 
@@ -200,63 +139,30 @@ export async function content_VideoUpdated(db: DatabaseManager, event: Substrate
   }
 
   // prepare changed metadata
-  const newMetadata = videoUpdateParameters.new_meta.unwrapOr(null)
-
-  // license must be deleted AFTER video is saved - plan a license deletion by assigning it to this variable
-  let licenseToDelete: License | null = null
+  const newMetadataBytes = videoUpdateParameters.new_meta.unwrapOr(null)
 
   // update metadata if it was changed
-  if (newMetadata) {
-    const protobufContent = await readProtobufWithAssets(new Video(), {
-      metadata: newMetadata,
-      db,
-      event,
-      assets: videoUpdateParameters.assets.unwrapOr([]),
-      contentOwner: convertContentActorToDataObjectOwner(contentActor, new BN(video.channel.id).toNumber()),
-    })
-
-    // prepare video media metadata (if any)
-    const fixedProtobuf = await integrateVideoMediaMetadata(db, video, protobufContent, event)
-
-    // remember original license
-    const originalLicense = video.license
-
-    // update all fields read from protobuf
-    for (const [key, value] of Object.entries(fixedProtobuf)) {
-      video[key] = value
-    }
-
-    // license has changed - plan old license delete
-    if (originalLicense && video.license !== originalLicense) {
-      ;[video.license, licenseToDelete] = handleLicenseUpdate(originalLicense, video.license)
-    } else if (!Object.keys(video.license || {}).length) {
-      // license deletion was requested event no license exists?
-      delete video.license // ensure license is empty
-    }
+  if (newMetadataBytes) {
+    const newMetadata = deserializeMetadata(VideoMetadata, newMetadataBytes) || {}
+    await processVideoMetadata(ctx, video.channel, video, newMetadata, videoUpdateParameters.assets.unwrapOr([]))
   }
 
   // set last update time
-  video.updatedAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
+  video.updatedAt = new Date(event.blockTimestamp)
 
   // save video
-  await db.save<Video>(video)
-
-  // delete old license if it's planned
-  if (licenseToDelete) {
-    await db.remove<License>(licenseToDelete)
-  }
+  await store.save<Video>(video)
 
   // emit log event
   logger.info('Video has been updated', { id: videoId })
 }
 
-// eslint-disable-next-line @typescript-eslint/naming-convention
-export async function content_VideoDeleted(db: DatabaseManager, event: SubstrateEvent) {
+export async function content_VideoDeleted({ store, event }: EventContext & StoreContext): Promise<void> {
   // read event data
-  const { videoId } = new Content.VideoDeletedEvent(event).data
+  const [, videoId] = new Content.VideoDeletedEvent(event).params
 
   // load video
-  const video = await db.get(Video, { where: { id: videoId.toString() } as FindConditions<Video> })
+  const video = await store.get(Video, { where: { id: videoId.toString() } })
 
   // ensure video exists
   if (!video) {
@@ -264,19 +170,21 @@ export async function content_VideoDeleted(db: DatabaseManager, event: Substrate
   }
 
   // remove video
-  await db.remove<Video>(video)
+  await store.remove<Video>(video)
 
   // emit log event
   logger.info('Video has been deleted', { id: videoId })
 }
 
-// eslint-disable-next-line @typescript-eslint/naming-convention
-export async function content_VideoCensorshipStatusUpdated(db: DatabaseManager, event: SubstrateEvent) {
+export async function content_VideoCensorshipStatusUpdated({
+  store,
+  event,
+}: EventContext & StoreContext): Promise<void> {
   // read event data
-  const { videoId, isCensored } = new Content.VideoCensorshipStatusUpdatedEvent(event).data
+  const [, videoId, isCensored] = new Content.VideoCensorshipStatusUpdatedEvent(event).params
 
   // load video
-  const video = await db.get(Video, { where: { id: videoId.toString() } as FindConditions<Video> })
+  const video = await store.get(Video, { where: { id: videoId.toString() } })
 
   // ensure video exists
   if (!video) {
@@ -287,71 +195,56 @@ export async function content_VideoCensorshipStatusUpdated(db: DatabaseManager,
   video.isCensored = isCensored.isTrue
 
   // set last update time
-  video.updatedAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
+  video.updatedAt = new Date(event.blockTimestamp)
 
   // save video
-  await db.save<Video>(video)
+  await store.save<Video>(video)
 
   // emit log event
   logger.info('Video censorship status has been updated', { id: videoId, isCensored: isCensored.isTrue })
 }
 
-// eslint-disable-next-line @typescript-eslint/naming-convention
-export async function content_FeaturedVideosSet(db: DatabaseManager, event: SubstrateEvent) {
+export async function content_FeaturedVideosSet({ store, event }: EventContext & StoreContext): Promise<void> {
   // read event data
-  const { videoId: videoIds } = new Content.FeaturedVideosSetEvent(event).data
+  const [, videoIds] = new Content.FeaturedVideosSetEvent(event).params
 
   // load old featured videos
-  const existingFeaturedVideos = await db.getMany(Video, { where: { isFeatured: true } as FindConditions<Video> })
+  const existingFeaturedVideos = await store.getMany(Video, { where: { isFeatured: true } })
 
   // comparsion utility
   const isSame = (videoIdA: string) => (videoIdB: string) => videoIdA === videoIdB
 
   // calculate diff sets
-  const toRemove = existingFeaturedVideos.filter(
-    (existingFV) => !videoIds.map((item) => item.toString()).some(isSame(existingFV.id))
+  const videosToRemove = existingFeaturedVideos.filter(
+    (existingFV) => !videoIds.map((videoId) => videoId.toString()).some(isSame(existingFV.id))
   )
-  const toAdd = videoIds.filter(
-    (video) => !existingFeaturedVideos.map((item) => item.id).some(isSame(video.toString()))
+  const videoIdsToAdd = videoIds.filter(
+    (videoId) => !existingFeaturedVideos.map((existingFV) => existingFV.id).some(isSame(videoId.toString()))
   )
 
-  // escape if no featured video needs to be added or removed
-  if (!toRemove.length && !toAdd.length) {
-    // emit log event
-    logger.info('Featured videos unchanged')
-
-    return
-  }
-
   // mark previously featured videos as not-featured
   await Promise.all(
-    toRemove.map(async (video) => {
+    videosToRemove.map(async (video) => {
       video.isFeatured = false
-
       // set last update time
-      video.updatedAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
+      video.updatedAt = new Date(event.blockTimestamp)
 
-      await db.save<Video>(video)
+      await store.save<Video>(video)
     })
   )
 
-  // escape if no featured video needs to be added
-  if (!toAdd.length) {
-    // emit log event
-    logger.info('Some featured videos have been unset.', { videoIds: toRemove.map((item) => item.id.toString()) })
-
-    return
-  }
-
-  // read videos previously not-featured videos that are meant to be featured
-  const videosToAdd = await db.getMany(Video, {
+  // read previously not-featured videos that are meant to be featured
+  const videosToAdd = await store.getMany(Video, {
     where: {
-      id: In(toAdd.map((item) => item.toString())),
-    } as FindConditions<Video>,
+      id: In(videoIdsToAdd.map((item) => item.toString())),
+    },
   })
 
-  if (videosToAdd.length !== toAdd.length) {
-    return inconsistentState('At least one non-existing video featuring requested', toAdd)
+  if (videosToAdd.length !== videoIdsToAdd.length) {
+    return inconsistentState(
+      'At least one non-existing video featuring requested',
+      videosToAdd.map((v) => v.id)
+    )
   }
 
   // mark previously not-featured videos as featured
@@ -360,139 +253,14 @@ export async function content_FeaturedVideosSet(db: DatabaseManager, event: Subs
       video.isFeatured = true
 
       // set last update time
-      video.updatedAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
+      video.updatedAt = new Date(event.blockTimestamp)
 
-      await db.save<Video>(video)
+      await store.save<Video>(video)
     })
   )
 
   // emit log event
-  logger.info('New featured videos have been set', { videoIds })
-}
-
-/// ///////////////// Helpers ////////////////////////////////////////////////////
-
-/*
-  Integrates video metadata-related data into existing data (if any) or creates a new record.
-
-  NOTE: type hack - `RawVideoMetadata` is accepted for `metadata` instead of `Partial<Video>`
-        see `prepareVideoMetadata()` in `utils.ts` for more info
-*/
-async function integrateVideoMediaMetadata(
-  db: DatabaseManager,
-  existingRecord: Video | null,
-  metadata: Partial<Video>,
-  event: SubstrateEvent
-): Promise<Partial<Video>> {
-  if (!metadata.mediaMetadata) {
-    return metadata
-  }
-
-  const now = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
-
-  // fix TS type
-  const rawMediaMetadata = (metadata.mediaMetadata as unknown) as RawVideoMetadata
-
-  // ensure encoding object
-  const encoding =
-    (existingRecord && existingRecord.mediaMetadata && existingRecord.mediaMetadata.encoding) ||
-    new VideoMediaEncoding({
-      createdAt: now,
-      updatedAt: now,
-
-      createdById: '1',
-      updatedById: '1',
-    })
-
-  // integrate media encoding-related data
-  rawMediaMetadata.encoding.codecName.integrateInto(encoding, 'codecName')
-  rawMediaMetadata.encoding.container.integrateInto(encoding, 'container')
-  rawMediaMetadata.encoding.mimeMediaType.integrateInto(encoding, 'mimeMediaType')
-
-  // ensure media metadata object
-  const mediaMetadata =
-    (existingRecord && existingRecord.mediaMetadata) ||
-    new VideoMediaMetadata({
-      createdInBlock: event.blockNumber,
-
-      createdAt: now,
-      updatedAt: now,
-
-      createdById: '1',
-      updatedById: '1',
-    })
-
-  // integrate media-related data
-  rawMediaMetadata.pixelWidth.integrateInto(mediaMetadata, 'pixelWidth')
-  rawMediaMetadata.pixelHeight.integrateInto(mediaMetadata, 'pixelHeight')
-  rawMediaMetadata.size.integrateInto(mediaMetadata, 'size')
-
-  // connect encoding to media metadata object
-  mediaMetadata.encoding = encoding
-
-  // ensure predictable ids
-  if (!mediaMetadata.encoding.id) {
-    mediaMetadata.encoding.id = await getNextId(db)
-  }
-  if (!mediaMetadata.id) {
-    mediaMetadata.id = await getNextId(db)
-  }
-
-  /// ///////////////// update updatedAt if needed ///////////////////////////////
-
-  const encodingNoChange =
-    true &&
-    rawMediaMetadata.encoding.codecName.isNoChange() &&
-    rawMediaMetadata.encoding.container.isNoChange() &&
-    rawMediaMetadata.encoding.mimeMediaType.isNoChange()
-  const mediaMetadataNoChange =
-    encodingNoChange &&
-    rawMediaMetadata.encoding.codecName.isNoChange() &&
-    rawMediaMetadata.encoding.container.isNoChange() &&
-    rawMediaMetadata.encoding.mimeMediaType.isNoChange()
-
-  if (!encodingNoChange) {
-    // encoding changed?
-    mediaMetadata.encoding.updatedAt = now
-  }
-  if (!mediaMetadataNoChange) {
-    // metadata changed?
-    mediaMetadata.updatedAt = now
-  }
-
-  /// ////////////////////////////////////////////////////////////////////////////
-
-  return {
-    ...metadata,
-    mediaMetadata,
-  }
-}
-
-// returns tuple `[newLicenseForVideo, oldLicenseToBeDeleted]`
-function handleLicenseUpdate(originalLicense, newLicense): [License | undefined, License | null] {
-  const isNewEmpty = !Object.keys(newLicense).length
-
-  if (!originalLicense && isNewEmpty) {
-    return [undefined, null]
-  }
-
-  if (!originalLicense) {
-    // && !isNewEmpty
-    return [newLicense, null]
-  }
-
-  if (!isNewEmpty) {
-    // && originalLicense
-    return [
-      new License({
-        ...originalLicense,
-        ...newLicense,
-      }),
-      null,
-    ]
-  }
-
-  // originalLicense && isNewEmpty
-
-  return [originalLicense, null]
+  const newFeaturedVideoIds = videoIds.map((id) => id.toString())
+  const removedFeaturedVideosIds = videosToRemove.map((v) => v.id)
+  logger.info('New featured videos have been set', { newFeaturedVideoIds, removedFeaturedVideosIds })
 }

+ 0 - 6
query-node/mappings/sumer/eventFix.ts

@@ -1,6 +0,0 @@
-import BN from 'bn.js'
-
-// Workaround for https://github.com/Joystream/hydra/issues/326 . This file can be removed after it's fixed
-export function fixBlockTimestamp(blockTimestamp: unknown): BN {
-  return new BN(blockTimestamp as string)
-}

+ 0 - 1
query-node/mappings/sumer/index.ts

@@ -1,4 +1,3 @@
 export * from './content'
 export * from './membership'
-export * from './storage'
 export * from './workingGroup'

+ 37 - 45
query-node/mappings/sumer/membership.ts

@@ -1,25 +1,16 @@
-import { fixBlockTimestamp } from './eventFix'
 import { Bytes } from '@polkadot/types'
 import { MemberId } from '@joystream/types/members'
-import { SubstrateEvent } from '@dzlzv/hydra-common'
-import { DatabaseManager } from '@dzlzv/hydra-db-utils'
-import { FindConditions } from 'typeorm'
-
-import {
-  convertBytesToString,
-  inconsistentState,
-  logger,
-  extractExtrinsicArgs,
-  extractSudoCallParameters,
-} from './common'
-import { Members } from '../../generated/types'
-import { MembershipEntryMethod, Membership } from 'query-node'
+import { SubstrateEvent, EventContext, StoreContext } from '@joystream/hydra-common'
+
+import { bytesToString, inconsistentState, logger, extractExtrinsicArgs, extractSudoCallParameters } from '../common'
+import { Members } from '../generated/types'
+import { MembershipEntryMethod, Membership } from 'query-node/dist/model'
 import { EntryMethod } from '@joystream/types/augment'
 
 // eslint-disable-next-line @typescript-eslint/naming-convention
-export async function members_MemberRegistered(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
+export async function members_MemberRegistered({ event, store }: EventContext & StoreContext): Promise<void> {
   // read event data
-  const { accountId, memberId, entryMethod } = new Members.MemberRegisteredEvent(event).data
+  const [memberId, accountId, entryMethod] = new Members.MemberRegisteredEvent(event).params
   const { avatarUri, about, handle } = extractExtrinsicArgs(event, Members.BuyMembershipCall, {
     handle: 1,
     avatarUri: 2,
@@ -32,26 +23,27 @@ export async function members_MemberRegistered(db: DatabaseManager, event: Subst
     id: memberId.toString(),
     rootAccount: accountId.toString(),
     controllerAccount: accountId.toString(),
-    handle: convertBytesToString(handle.unwrapOr(null)),
-    about: convertBytesToString(about.unwrapOr(null)),
-    avatarUri: convertBytesToString(avatarUri.unwrapOr(null)),
+    // Handle is required by the runtime during registration. Lack of it will throw an error
+    handle: bytesToString(handle.unwrap()),
+    about: about.isSome ? bytesToString(about.unwrap()) : undefined,
+    avatarUri: avatarUri.isSome ? bytesToString(avatarUri.unwrap()) : undefined,
     createdInBlock: event.blockNumber,
     entry: convertEntryMethod(entryMethod),
 
     // fill in auto-generated fields
-    createdAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
-    updatedAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
+    createdAt: new Date(event.blockTimestamp),
+    updatedAt: new Date(event.blockTimestamp),
   })
 
   // save membership
-  await db.save<Membership>(member)
+  await store.save<Membership>(member)
 
   // emit log event
   logger.info('Member has been registered', { ids: memberId })
 }
 
 // eslint-disable-next-line @typescript-eslint/naming-convention
-export async function members_MemberUpdatedAboutText(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
+export async function members_MemberUpdatedAboutText({ event, store }: EventContext & StoreContext): Promise<void> {
   // read event data
   const { text, memberId } = isUpdateMembershipExtrinsic(event)
     ? unpackUpdateMembershipOptions(
@@ -60,7 +52,7 @@ export async function members_MemberUpdatedAboutText(db: DatabaseManager, event:
     : extractExtrinsicArgs(event, Members.ChangeMemberAboutTextCall, { memberId: 0, text: 1 })
 
   // load member
-  const member = await db.get(Membership, { where: { id: memberId.toString() } as FindConditions<Membership> })
+  const member = await store.get(Membership, { where: { id: memberId.toString() } })
 
   // ensure member exists
   if (!member) {
@@ -68,20 +60,20 @@ export async function members_MemberUpdatedAboutText(db: DatabaseManager, event:
   }
 
   // update member
-  member.about = convertBytesToString(text)
+  member.about = bytesToString(text)
 
   // set last update time
-  member.updatedAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
+  member.updatedAt = new Date(event.blockTimestamp)
 
   // save member
-  await db.save<Membership>(member)
+  await store.save<Membership>(member)
 
   // emit log event
   logger.info("Member's about text has been updated", { ids: memberId })
 }
 
 // eslint-disable-next-line @typescript-eslint/naming-convention
-export async function members_MemberUpdatedAvatar(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
+export async function members_MemberUpdatedAvatar({ event, store }: EventContext & StoreContext): Promise<void> {
   // read event data
   const { uri, memberId } = isUpdateMembershipExtrinsic(event)
     ? unpackUpdateMembershipOptions(
@@ -90,7 +82,7 @@ export async function members_MemberUpdatedAvatar(db: DatabaseManager, event: Su
     : extractExtrinsicArgs(event, Members.ChangeMemberAvatarCall, { memberId: 0, uri: 1 })
 
   // load member
-  const member = await db.get(Membership, { where: { id: memberId.toString() } as FindConditions<Membership> })
+  const member = await store.get(Membership, { where: { id: memberId.toString() } })
 
   // ensure member exists
   if (!member) {
@@ -98,20 +90,20 @@ export async function members_MemberUpdatedAvatar(db: DatabaseManager, event: Su
   }
 
   // update member
-  member.avatarUri = convertBytesToString(uri)
+  member.avatarUri = bytesToString(uri)
 
   // set last update time
-  member.updatedAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
+  member.updatedAt = new Date(event.blockTimestamp)
 
   // save member
-  await db.save<Membership>(member)
+  await store.save<Membership>(member)
 
   // emit log event
   logger.info("Member's avatar has been updated", { ids: memberId })
 }
 
 // eslint-disable-next-line @typescript-eslint/naming-convention
-export async function members_MemberUpdatedHandle(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
+export async function members_MemberUpdatedHandle({ event, store }: EventContext & StoreContext): Promise<void> {
   // read event data
   const { handle, memberId } = isUpdateMembershipExtrinsic(event)
     ? unpackUpdateMembershipOptions(
@@ -120,7 +112,7 @@ export async function members_MemberUpdatedHandle(db: DatabaseManager, event: Su
     : extractExtrinsicArgs(event, Members.ChangeMemberHandleCall, { memberId: 0, handle: 1 })
 
   // load member
-  const member = await db.get(Membership, { where: { id: memberId.toString() } as FindConditions<Membership> })
+  const member = await store.get(Membership, { where: { id: memberId.toString() } })
 
   // ensure member exists
   if (!member) {
@@ -128,20 +120,20 @@ export async function members_MemberUpdatedHandle(db: DatabaseManager, event: Su
   }
 
   // update member
-  member.handle = convertBytesToString(handle)
+  member.handle = bytesToString(handle)
 
   // set last update time
-  member.updatedAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
+  member.updatedAt = new Date(event.blockTimestamp)
 
   // save member
-  await db.save<Membership>(member)
+  await store.save<Membership>(member)
 
   // emit log event
   logger.info("Member's avatar has been updated", { ids: memberId })
 }
 
 // eslint-disable-next-line @typescript-eslint/naming-convention
-export async function members_MemberSetRootAccount(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
+export async function members_MemberSetRootAccount({ event, store }: EventContext & StoreContext): Promise<void> {
   // read event data
   const { newRootAccount, memberId } = extractExtrinsicArgs(event, Members.SetRootAccountCall, {
     memberId: 0,
@@ -149,7 +141,7 @@ export async function members_MemberSetRootAccount(db: DatabaseManager, event: S
   })
 
   // load member
-  const member = await db.get(Membership, { where: { id: memberId.toString() } as FindConditions<Membership> })
+  const member = await store.get(Membership, { where: { id: memberId.toString() } })
 
   // ensure member exists
   if (!member) {
@@ -160,17 +152,17 @@ export async function members_MemberSetRootAccount(db: DatabaseManager, event: S
   member.rootAccount = newRootAccount.toString()
 
   // set last update time
-  member.updatedAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
+  member.updatedAt = new Date(event.blockTimestamp)
 
   // save member
-  await db.save<Membership>(member)
+  await store.save<Membership>(member)
 
   // emit log event
   logger.info("Member's root has been updated", { ids: memberId })
 }
 
 // eslint-disable-next-line @typescript-eslint/naming-convention
-export async function members_MemberSetControllerAccount(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
+export async function members_MemberSetControllerAccount({ event, store }: EventContext & StoreContext): Promise<void> {
   // read event data
   const { newControllerAccount, memberId } = extractExtrinsicArgs(event, Members.SetControllerAccountCall, {
     memberId: 0,
@@ -178,7 +170,7 @@ export async function members_MemberSetControllerAccount(db: DatabaseManager, ev
   })
 
   // load member
-  const member = await db.get(Membership, { where: { id: memberId.toString() } as FindConditions<Membership> })
+  const member = await store.get(Membership, { where: { id: memberId.toString() } })
 
   // ensure member exists
   if (!member) {
@@ -189,10 +181,10 @@ export async function members_MemberSetControllerAccount(db: DatabaseManager, ev
   member.controllerAccount = newControllerAccount.toString()
 
   // set last update time
-  member.updatedAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
+  member.updatedAt = new Date(event.blockTimestamp)
 
   // save member
-  await db.save<Membership>(member)
+  await store.save<Membership>(member)
 
   // emit log event
   logger.info("Member's controller has been updated", { ids: memberId })

+ 0 - 280
query-node/mappings/sumer/storage.ts

@@ -1,280 +0,0 @@
-import { fixBlockTimestamp } from './eventFix'
-import { SubstrateEvent } from '@dzlzv/hydra-common'
-import { DatabaseManager } from '@dzlzv/hydra-db-utils'
-import { FindConditions, In } from 'typeorm'
-
-import { inconsistentState, logger, prepareDataObject } from './common'
-
-import { DataDirectory } from '../../generated/types'
-import { ContentId, ContentParameters, StorageObjectOwner } from '@joystream/types/augment'
-
-import { ContentId as Custom_ContentId, ContentParameters as Custom_ContentParameters } from '@joystream/types/storage'
-import { registry } from '@joystream/types'
-
-import {
-  Channel,
-  Video,
-  AssetAvailability,
-  DataObject,
-  DataObjectOwner,
-  DataObjectOwnerMember,
-  DataObjectOwnerChannel,
-  DataObjectOwnerDao,
-  DataObjectOwnerCouncil,
-  DataObjectOwnerWorkingGroup,
-  LiaisonJudgement,
-  Worker,
-  WorkerType,
-} from 'query-node'
-
-export async function dataDirectory_ContentAdded(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
-  // read event data
-  const { contentParameters, storageObjectOwner } = new DataDirectory.ContentAddedEvent(event).data
-
-  // save all content objects
-  for (const parameters of contentParameters) {
-    const owner = convertStorageObjectOwner(storageObjectOwner)
-    const dataObject = await prepareDataObject(db, parameters, event, owner)
-
-    // fill in auto-generated fields
-    dataObject.createdAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
-    dataObject.updatedAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
-
-    await db.save<DataObject>(dataObject)
-  }
-
-  // emit log event
-  logger.info('Storage content has beed added', {
-    ids: contentParameters.map((item) => encodeContentId(item.content_id)),
-  })
-}
-
-export async function dataDirectory_ContentRemoved(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
-  // read event data
-  const { contentId: contentIds } = new DataDirectory.ContentRemovedEvent(event).data
-
-  // load assets
-  const dataObjects = await db.getMany(DataObject, {
-    where: {
-      joystreamContentId: In(contentIds.map((item) => encodeContentId(item))),
-    } as FindConditions<DataObject>,
-  })
-
-  // store dataObject ids before they are deleted (for logging purposes)
-  const dataObjectIds = dataObjects.map((item) => item.id)
-
-  // remove assets from database
-  for (const item of dataObjects) {
-    // ensure dataObject is nowhere used to prevent db constraint error
-    await disconnectDataObjectRelations(db, item)
-
-    // remove data object
-    await db.remove<DataObject>(item)
-  }
-
-  // emit log event
-  logger.info('Storage content have been removed', { id: contentIds, dataObjectIds })
-}
-
-export async function dataDirectory_ContentAccepted(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
-  // read event data
-  const { contentId, storageProviderId } = new DataDirectory.ContentAcceptedEvent(event).data
-  const encodedContentId = encodeContentId(contentId)
-
-  // load asset
-  const dataObject = await db.get(DataObject, {
-    where: { joystreamContentId: encodedContentId } as FindConditions<DataObject>,
-  })
-
-  // ensure object exists
-  if (!dataObject) {
-    return inconsistentState('Non-existing content acceptation requested', encodedContentId)
-  }
-
-  // load storage provider
-  const worker = await db.get(Worker, {
-    where: {
-      workerId: storageProviderId.toString(),
-      type: WorkerType.STORAGE,
-    } as FindConditions<Worker>,
-  })
-
-  // ensure object exists
-  if (!worker) {
-    return inconsistentState('Missing Storage Provider Id', storageProviderId)
-  }
-
-  // update object
-  dataObject.liaison = worker
-  dataObject.liaisonJudgement = LiaisonJudgement.ACCEPTED
-
-  // set last update time
-  dataObject.updatedAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
-
-  // save object
-  await db.save<DataObject>(dataObject)
-
-  // emit log event
-  logger.info('Storage content has been accepted', { id: encodedContentId })
-
-  // update asset availability for all connected channels and videos
-  // this will not be needed after redudant AssetAvailability will be removed (after some Hydra upgrades)
-  await updateConnectedAssets(db, dataObject)
-}
-
-/// ///////////////// Updating connected entities ////////////////////////////////
-
-async function updateConnectedAssets(db: DatabaseManager, dataObject: DataObject) {
-  await updateSingleConnectedAsset(db, new Channel(), 'avatarPhoto', dataObject)
-  await updateSingleConnectedAsset(db, new Channel(), 'coverPhoto', dataObject)
-
-  await updateSingleConnectedAsset(db, new Video(), 'thumbnailPhoto', dataObject)
-  await updateSingleConnectedAsset(db, new Video(), 'media', dataObject)
-}
-
-// async function updateSingleConnectedAsset(db: DatabaseManager, type: typeof Channel | typeof Video, propertyName: string, dataObject: DataObject) {
-async function updateSingleConnectedAsset<T extends Channel | Video>(
-  db: DatabaseManager,
-  type: T,
-  propertyName: string,
-  dataObject: DataObject
-) {
-  // prepare lookup condition
-  const condition = {
-    where: {
-      [propertyName + 'DataObject']: dataObject,
-    },
-  } // as FindConditions<T>
-
-  // NOTE: we don't need to retrieve multiple channels/videos via `db.getMany()` because dataObject
-  //       is allowed to be associated only with one channel/video in runtime
-
-  // in therory the following condition(s) can be generalized `... db.get(type, ...` but in practice it doesn't work :-\
-  const item = type instanceof Channel ? await db.get(Channel, condition) : await db.get(Video, condition)
-
-  // escape when no dataObject association found
-  if (!item) {
-    return
-  }
-
-  item[propertyName + 'Availability'] = AssetAvailability.ACCEPTED
-
-  if (type instanceof Channel) {
-    await db.save<Channel>(item)
-
-    // emit log event
-    logger.info('Channel using Content has been accepted', {
-      channelId: item.id.toString(),
-      joystreamContentId: dataObject.joystreamContentId,
-    })
-  } else {
-    await db.save<Video>(item)
-
-    // emit log event
-    logger.info('Video using Content has been accepted', {
-      videoId: item.id.toString(),
-      joystreamContentId: dataObject.joystreamContentId,
-    })
-  }
-}
-
-// removes connection between dataObject and other entities
-async function disconnectDataObjectRelations(db: DatabaseManager, dataObject: DataObject) {
-  await disconnectSingleDataObjectRelation(db, new Channel(), 'avatarPhoto', dataObject)
-  await disconnectSingleDataObjectRelation(db, new Channel(), 'coverPhoto', dataObject)
-
-  await disconnectSingleDataObjectRelation(db, new Video(), 'thumbnailPhoto', dataObject)
-  await disconnectSingleDataObjectRelation(db, new Video(), 'media', dataObject)
-}
-
-async function disconnectSingleDataObjectRelation<T extends Channel | Video>(
-  db: DatabaseManager,
-  type: T,
-  propertyName: string,
-  dataObject: DataObject
-) {
-  // prepare lookup condition
-  const condition = {
-    where: {
-      [propertyName + 'DataObject']: dataObject,
-    },
-  } // as FindConditions<T>
-
-  // NOTE: we don't need to retrieve multiple channels/videos via `db.getMany()` because dataObject
-  //       is allowed to be associated only with one channel/video in runtime
-
-  // in therory the following condition(s) can be generalized `... db.get(type, ...` but in practice it doesn't work :-\
-  const item = type instanceof Channel ? await db.get(Channel, condition) : await db.get(Video, condition)
-
-  // escape when no dataObject association found
-  if (!item) {
-    return
-  }
-
-  item[propertyName + 'Availability'] = AssetAvailability.INVALID
-  item[propertyName + 'DataObject'] = null
-
-  if (type instanceof Channel) {
-    await db.save<Channel>(item)
-
-    // emit log event
-    logger.info('Content has been disconnected from Channel', {
-      channelId: item.id.toString(),
-      joystreamContentId: dataObject.joystreamContentId,
-    })
-  } else {
-    // type instanceof Video
-    await db.save<Video>(item)
-
-    // emit log event
-    logger.info('Content has been disconnected from Video', {
-      videoId: item.id.toString(),
-      joystreamContentId: dataObject.joystreamContentId,
-    })
-  }
-}
-
-/// ///////////////// Helpers ////////////////////////////////////////////////////
-
-function convertStorageObjectOwner(objectOwner: StorageObjectOwner): typeof DataObjectOwner {
-  if (objectOwner.isMember) {
-    const owner = new DataObjectOwnerMember()
-    owner.member = objectOwner.asMember.toNumber()
-
-    return owner
-  }
-
-  if (objectOwner.isChannel) {
-    const owner = new DataObjectOwnerChannel()
-    owner.channel = objectOwner.asChannel.toNumber()
-
-    return owner
-  }
-
-  if (objectOwner.isDao) {
-    const owner = new DataObjectOwnerDao()
-    owner.dao = objectOwner.asDao.toNumber()
-
-    return owner
-  }
-
-  if (objectOwner.isCouncil) {
-    return new DataObjectOwnerCouncil()
-  }
-
-  if (objectOwner.isWorkingGroup) {
-    const owner = new DataObjectOwnerWorkingGroup()
-    owner.workingGroup = objectOwner.asWorkingGroup.toNumber()
-
-    return owner
-  }
-
-  logger.error('Not implemented StorageObjectOwner type', { objectOwner: objectOwner.toString() })
-  throw new Error('Not implemented StorageObjectOwner type')
-}
-
-function encodeContentId(contentId: ContentId) {
-  const customContentId = new Custom_ContentId(registry, contentId)
-
-  return customContentId.encode()
-}

+ 62 - 146
query-node/mappings/sumer/workingGroup.ts

@@ -1,135 +1,39 @@
-import { SubstrateEvent } from '@dzlzv/hydra-common'
-import { DatabaseManager } from '@dzlzv/hydra-db-utils'
-import { FindConditions } from 'typeorm'
-import { Bytes } from '@polkadot/types'
-import { fixBlockTimestamp } from './eventFix'
-
-import { convertBytesToString, inconsistentState, logger, getNextId } from './common'
-
-import { Channel, Worker, WorkerType } from 'query-node'
-import { GatewayWorkingGroup, StorageWorkingGroup } from '../../generated/types'
-import { ApplicationId, ApplicationIdToWorkerIdMap, WorkerId } from '@joystream/types/augment'
-
-/// ///////////////// Storage working group //////////////////////////////////////
-
-export async function storageWorkingGroup_OpeningFilled(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
-  // read event data
-  const { applicationIdToWorkerIdMap } = new StorageWorkingGroup.OpeningFilledEvent(event).data
-
-  // call generic processing
-  await workingGroup_OpeningFilled(db, WorkerType.STORAGE, applicationIdToWorkerIdMap, event)
-}
-
-export async function storageWorkingGroup_WorkerStorageUpdated(
-  db: DatabaseManager,
-  event: SubstrateEvent
-): Promise<void> {
-  // read event data
-  const { workerId, bytes: newMetadata } = new StorageWorkingGroup.WorkerStorageUpdatedEvent(event).data
-
-  // call generic processing
-  await workingGroup_WorkerStorageUpdated(db, WorkerType.STORAGE, workerId, newMetadata)
-}
-
-export async function storageWorkingGroup_TerminatedWorker(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
-  // read event data
-  const { workerId } = new StorageWorkingGroup.TerminatedWorkerEvent(event).data
-
-  // call generic processing
-  await workingGroup_TerminatedWorker(db, event, WorkerType.STORAGE, workerId)
-}
-
-export async function storageWorkingGroup_WorkerExited(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
-  // read event data
-  const { workerId } = new StorageWorkingGroup.WorkerExitedEvent(event).data
-
-  // call generic processing
-  await workingGroup_WorkerExited(db, event, WorkerType.STORAGE, workerId)
-}
-
-export async function storageWorkingGroup_TerminatedLeader(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
-  // read event data
-  const { workerId } = new StorageWorkingGroup.TerminatedLeaderEvent(event).data
-
-  // call generic processing
-  await workingGroup_TerminatedLeader(db, event, WorkerType.STORAGE, workerId)
-}
-
-/// ///////////////// Gateway working group //////////////////////////////////////
-
-export async function gatewayWorkingGroup_OpeningFilled(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
-  // read event data
-  const { applicationIdToWorkerIdMap } = new GatewayWorkingGroup.OpeningFilledEvent(event).data
-
-  // call generic processing
-  await workingGroup_OpeningFilled(db, WorkerType.GATEWAY, applicationIdToWorkerIdMap, event)
-}
-
-export async function gatewayWorkingGroup_WorkerStorageUpdated(
-  db: DatabaseManager,
-  event: SubstrateEvent
-): Promise<void> {
-  // read event data
-  const { workerId, bytes: newMetadata } = new GatewayWorkingGroup.WorkerStorageUpdatedEvent(event).data
-
-  // call generic processing
-  await workingGroup_WorkerStorageUpdated(db, WorkerType.GATEWAY, workerId, newMetadata)
-}
-
-export async function gatewayWorkingGroup_TerminatedWorker(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
-  // read event data
-  const { workerId } = new GatewayWorkingGroup.TerminatedWorkerEvent(event).data
-
-  // call generic processing
-  await workingGroup_TerminatedWorker(db, event, WorkerType.GATEWAY, workerId)
-}
-
-export async function gatewayWorkingGroup_WorkerExited(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
-  // read event data
-  const { workerId } = new GatewayWorkingGroup.WorkerExitedEvent(event).data
-
-  // call generic processing
-  await workingGroup_WorkerExited(db, event, WorkerType.GATEWAY, workerId)
-}
-
-export async function gatewayWorkingGroup_TerminatedLeader(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
-  // read event data
-  const { workerId } = new GatewayWorkingGroup.TerminatedLeaderEvent(event).data
-
-  // call generic processing
-  await workingGroup_TerminatedLeader(db, event, WorkerType.GATEWAY, workerId)
-}
-
-/// ///////////////// Generic working group processing ///////////////////////////
+import { EventContext, StoreContext, DatabaseManager, SubstrateEvent } from '@joystream/hydra-common'
+import { bytesToString, inconsistentState, logger } from '../common'
+import { Worker, WorkerType } from 'query-node/dist/model'
+import { StorageWorkingGroup } from '../generated/types'
+import { WorkerId } from '@joystream/types/augment'
+
+export async function workingGroup_OpeningFilled({ event, store }: EventContext & StoreContext): Promise<void> {
+  const workerType = getWorkerType(event)
+  if (!workerType) {
+    return
+  }
 
-export async function workingGroup_OpeningFilled(
-  db: DatabaseManager,
-  workerType: WorkerType,
-  applicationIdToWorkerIdMap: ApplicationIdToWorkerIdMap,
-  event: SubstrateEvent
-): Promise<void> {
+  const [, applicationIdToWorkerIdMap] = new StorageWorkingGroup.OpeningFilledEvent(event).params
   const workerIds = [...applicationIdToWorkerIdMap.values()]
 
   for (const workerId of workerIds) {
-    await createWorker(db, workerId, workerType, event)
+    await createWorker(store, workerId, workerType, event)
   }
 
   // emit log event
   logger.info('Workers have been created', { ids: workerIds.map((item) => item.toString()), workerType })
 }
 
-export async function workingGroup_WorkerStorageUpdated(
-  db: DatabaseManager,
-  workerType: WorkerType,
-  workerId: WorkerId,
-  newMetadata: Bytes
-): Promise<void> {
+export async function workingGroup_WorkerStorageUpdated({ event, store }: EventContext & StoreContext): Promise<void> {
+  const workerType = getWorkerType(event)
+  if (!workerType) {
+    return
+  }
+  const [workerId, newMetadata] = new StorageWorkingGroup.WorkerStorageUpdatedEvent(event).params
+
   // load worker
-  const worker = await db.get(Worker, {
+  const worker = await store.get(Worker, {
     where: {
       workerId: workerId.toString(),
       type: workerType,
-    } as FindConditions<Worker>,
+    },
   })
 
   // ensure worker exists
@@ -137,48 +41,51 @@ export async function workingGroup_WorkerStorageUpdated(
     return inconsistentState('Non-existing worker update requested', workerId)
   }
 
-  worker.metadata = convertBytesToString(newMetadata)
+  worker.metadata = bytesToString(newMetadata)
 
-  await db.save<Worker>(worker)
+  await store.save<Worker>(worker)
 
   // emit log event
   logger.info('Worker has been updated', { workerId, workerType })
 }
 
-export async function workingGroup_TerminatedWorker(
-  db: DatabaseManager,
-  event: SubstrateEvent,
-  workerType: WorkerType,
-  workerId: WorkerId
-): Promise<void> {
+export async function workingGroup_TerminatedWorker({ event, store }: EventContext & StoreContext): Promise<void> {
+  const workerType = getWorkerType(event)
+  if (!workerType) {
+    return
+  }
+  const [workerId] = new StorageWorkingGroup.TerminatedWorkerEvent(event).params
+
   // do removal logic
-  await deactivateWorker(db, event, workerType, workerId)
+  await deactivateWorker(store, event, workerType, workerId)
 
   // emit log event
   logger.info('Worker has been removed (worker terminated)', { workerId, workerType })
 }
 
-export async function workingGroup_WorkerExited(
-  db: DatabaseManager,
-  event: SubstrateEvent,
-  workerType: WorkerType,
-  workerId: WorkerId
-): Promise<void> {
+export async function workingGroup_WorkerExited({ event, store }: EventContext & StoreContext): Promise<void> {
+  const workerType = getWorkerType(event)
+  if (!workerType) {
+    return
+  }
+  const [workerId] = new StorageWorkingGroup.WorkerExitedEvent(event).params
+
   // do removal logic
-  await deactivateWorker(db, event, workerType, workerId)
+  await deactivateWorker(store, event, workerType, workerId)
 
   // emit log event
   logger.info('Worker has been removed (worker exited)', { workerId, workerType })
 }
 
-export async function workingGroup_TerminatedLeader(
-  db: DatabaseManager,
-  event: SubstrateEvent,
-  workerType: WorkerType,
-  workerId: WorkerId
-): Promise<void> {
+export async function workingGroup_TerminatedLeader({ event, store }: EventContext & StoreContext): Promise<void> {
+  const workerType = getWorkerType(event)
+  if (!workerType) {
+    return
+  }
+  const [workerId] = new StorageWorkingGroup.WorkerExitedEvent(event).params
+
   // do removal logic
-  await deactivateWorker(db, event, workerType, workerId)
+  await deactivateWorker(store, event, workerType, workerId)
 
   // emit log event
   logger.info('Working group leader has been removed (worker exited)', { workerId, workerType })
@@ -186,6 +93,15 @@ export async function workingGroup_TerminatedLeader(
 
 /// ///////////////// Helpers ////////////////////////////////////////////////////
 
+function getWorkerType(event: SubstrateEvent): WorkerType | null {
+  if (event.section === 'storageWorkingGroup') {
+    return WorkerType.STORAGE
+  } else if (event.section === 'gatewayWorkingGroup') {
+    return WorkerType.GATEWAY
+  }
+  return null
+}
+
 async function createWorker(
   db: DatabaseManager,
   workerId: WorkerId,
@@ -194,13 +110,13 @@ async function createWorker(
 ): Promise<void> {
   // create entity
   const newWorker = new Worker({
-    id: await getNextId(db),
+    id: `${workerType}-${workerId.toString()}`,
     workerId: workerId.toString(),
     type: workerType,
     isActive: true,
 
-    createdAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
-    updatedAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
+    createdAt: new Date(event.blockTimestamp),
+    updatedAt: new Date(event.blockTimestamp),
   })
 
   // save worker
@@ -218,7 +134,7 @@ async function deactivateWorker(
     where: {
       workerId: workerId.toString(),
       type: workerType,
-    } as FindConditions<Worker>,
+    },
   })
 
   // ensure worker exists
@@ -230,7 +146,7 @@ async function deactivateWorker(
   worker.isActive = false
 
   // set last update time
-  worker.updatedAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
+  worker.updatedAt = new Date(event.blockTimestamp)
 
   // save worker
   await db.save<Worker>(worker)

+ 1 - 1
query-node/mappings/tsconfig.json

@@ -18,5 +18,5 @@
       "@polkadot/types/augment": ["../../types/augment/augment-types.ts"]
     }
   },
-  "include": ["./giza/**/*"]
+  "include": ["./**/*"]
 }

+ 28 - 182
query-node/schemas/content.graphql

@@ -1,115 +1,33 @@
-"Category of media channel"
-type ChannelCategory @entity {
-  id: ID!
-
-  "The name of the category"
-  name: String @fulltext(query: "channelCategoriesByName")
-
-  channels: [Channel!]! @derivedFrom(field: "category")
-
-  createdInBlock: Int!
+type AssetExternal @variant {
+  # FIXME: [String!] currnetly not supported in variants
+  "JSON array of the urls"
+  urls: String!
 }
 
-"Asset availability representation"
-enum AssetAvailability {
-  "Asset is available in storage"
-  ACCEPTED,
-
-  "Asset is being uploaded to storage"
-  PENDING,
-
-  "Invalid storage (meta)data used"
-  INVALID,
+type AssetJoystreamStorage @variant {
+  "Related data object"
+  dataObject: StorageDataObject!
 }
 
-"The decision of the storage provider when it acts as liaison"
-enum LiaisonJudgement {
-  "Content awaits for a judgment"
-  PENDING,
-
-  "Content accepted"
-  ACCEPTED,
+# FIXME: https://github.com/Joystream/hydra/issues/434
+type AssetNone @variant {
+  _phantom: Int
 }
 
-"Manages content ids, type and storage provider decision about it"
-type DataObject @entity {
-  "Content owner"
-  owner: DataObjectOwner!
-
-  "Content added at"
-  createdInBlock: Int!
-
-  "Content type id"
-  typeId: Int!
-
-  "Content size in bytes"
-  size: Int!
-
-  "Storage provider id of the liaison"
-  liaison: Worker # liaison is unset until storage provider accepts or rejects the content
+union Asset = AssetExternal | AssetJoystreamStorage | AssetNone
 
-  "Storage provider as liaison judgment"
-  liaisonJudgement: LiaisonJudgement!
-
-  "IPFS content id"
-  ipfsContentId: String!
-
-  "Joystream runtime content"
-  joystreamContentId: String!
-}
-
-"Owner type for storage object"
-union DataObjectOwner = DataObjectOwnerMember
-  | DataObjectOwnerChannel
-  | DataObjectOwnerDao
-  | DataObjectOwnerCouncil
-  | DataObjectOwnerWorkingGroup
-
-"Asset owned by a member"
-type DataObjectOwnerMember @variant {
-  # use `Int` instead of `Membership` before variant relations are featured in Hydra
-  # TODO: setup proper relations
-  #"Member identifier"
-  #memberId: Membership!
-  "Member identifier"
-  member: Int!
-
-  "Variant needs to have at least one property. This value is not used."
-  dummy: Int
-}
-
-"Asset owned by a channel"
-type DataObjectOwnerChannel @variant {
-  # use `Int` instead of `Channel` before variant relations are featured in Hydra
-  #"Channel identifier"
-  #channel: Channel!
-  "Channel identifier"
-  channel: Int!
-
-  "Variant needs to have at least one property. This value is not used."
-  dummy: Int
-}
+"Category of media channel"
+type ChannelCategory @entity {
+  id: ID!
 
-"Asset owned by a DAO"
-type DataObjectOwnerDao @variant {
-  "DAO identifier"
-  dao: Int!
-}
+  "The name of the category"
+  name: String @fulltext(query: "channelCategoriesByName")
 
-"Asset owned by the Council"
-type DataObjectOwnerCouncil @variant {
-  "Variant needs to have at least one property. This value is not used."
-  dummy: Int
-}
+  channels: [Channel!]! @derivedFrom(field: "category")
 
-"Asset owned by a WorkingGroup"
-type DataObjectOwnerWorkingGroup @variant {
-  "Working group identifier"
-  workingGroup: Int!
+  createdInBlock: Int!
 }
 
-#### High Level Derivative Entities ####
-
 type Language @entity {
   "Runtime entity identifier (EntityId)"
   id: ID!
@@ -141,31 +59,11 @@ type Channel @entity {
   "The description of a Channel"
   description: String
 
-  ### Cover photo asset ###
-
-  # Channel's cover (background) photo. Recommended ratio: 16:9.
-
-  "Asset's data object"
-  coverPhotoDataObject: DataObject
-
-  "URLs where the asset content can be accessed (if any)"
-  coverPhotoUrls: [String!]
-
-  "Availability meta information"
-  coverPhotoAvailability: AssetAvailability!
-
-  ### Avatar photo asset ###
-
-  # Channel's avatar photo.
-
-  "Asset's data object"
-  avatarPhotoDataObject: DataObject
-
-  "URLs where the asset content can be accessed (if any)"
-  avatarPhotoUrls: [String!]
+  "Channel's cover (background) photo asset. Recommended ratio: 16:9."
+  coverPhoto: Asset
 
-  "Availability meta information"
-  avatarPhotoAvailability: AssetAvailability!
+  "Channel's avatar photo asset."
+  avatarPhoto: Asset
 
   ##########################
 
@@ -213,7 +111,7 @@ type Video @entity {
   id: ID!
 
   "Reference to member's channel"
-  channel: Channel
+  channel: Channel!
 
   "Reference to a video category"
   category: VideoCategory
@@ -227,18 +125,8 @@ type Video @entity {
   "Video duration in seconds"
   duration: Int
 
-  ### Thumbnail asset ###
-
-  # Video thumbnail (recommended ratio: 16:9)
-
-  "Asset's data object"
-  thumbnailPhotoDataObject: DataObject
-
-  "URLs where the asset content can be accessed (if any)"
-  thumbnailPhotoUrls: [String!]
-
-  "Availability meta information"
-  thumbnailPhotoAvailability: AssetAvailability!
+  "Video thumbnail asset (recommended ratio: 16:9)"
+  thumbnailPhoto: Asset
 
   ##########################
 
@@ -263,18 +151,8 @@ type Video @entity {
   "License under the video is published"
   license: License
 
-  ### Media asset ###
-
-  # Reference to video asset
-
-  "Asset's data object"
-  mediaDataObject: DataObject
-
-  "URLs where the asset content can be accessed (if any)"
-  mediaUrls: [String!]
-
-  "Availability meta information"
-  mediaAvailability: AssetAvailability!
+  "Video media asset"
+  media: Asset
 
   ##########################
 
@@ -301,7 +179,7 @@ type VideoMediaMetadata @entity {
   pixelHeight: Int
 
   "Video media size in bytes"
-  size: Int
+  size: BigInt
 
   video: Video @derivedFrom(field: "mediaMetadata")
 
@@ -332,35 +210,3 @@ type License @entity {
   "Custom license content"
   custom_text: String
 }
-
-enum WorkerType {
-  GATEWAY
-  STORAGE
-}
-
-type Worker @entity {
-  "Unique identifier"
-  id: ID!
-
-  "Sign of worker still being active"
-  isActive: Boolean!
-
-  "Runtime identifier"
-  workerId: String!
-
-  "Associated working group"
-  type: WorkerType!
-
-  "Custom metadata set by provider"
-  metadata: String
-
-  dataObjects: [DataObject!]! @derivedFrom(field: "liaison")
-}
-
-type NextEntityId @entity {
-  "Unique identifier"
-  id: ID!
-
-  "Next deterministic id for entities without custom id"
-  nextId: Int!
-}

+ 17 - 0
query-node/schemas/workingGroups.graphql

@@ -0,0 +1,17 @@
+enum WorkerType {
+  GATEWAY
+  STORAGE
+}
+
+type Worker @entity {
+  "Unique identifier"
+  id: ID!
+  "Sign of worker still being active"
+  isActive: Boolean!
+  "Runtime identifier"
+  workerId: String!
+  "Associated working group"
+  type: WorkerType!
+  "Custom metadata set by provider"
+  metadata: String
+}

+ 5 - 12
yarn.lock

@@ -14466,11 +14466,6 @@ google-protobuf@^3.14.0:
   resolved "https://registry.yarnpkg.com/google-protobuf/-/google-protobuf-3.17.3.tgz#f87595073545a77946c8f0b67c302c5f7646d700"
   integrity sha512-OVPzcSWIAJ+d5yiHyeaLrdufQtrvaBrF4JQg+z8ynTkbO3uFcujqXszTumqg1cGsAsjkWnI+M5B1xZ19yR4Wyg==
 
-google-protobuf@^3.6.1:
-  version "3.15.8"
-  resolved "https://registry.yarnpkg.com/google-protobuf/-/google-protobuf-3.15.8.tgz#5f3948905e4951c867d6bc143f385a80e2a39efe"
-  integrity sha512-2jtfdqTaSxk0cuBJBtTTWsot4WtR9RVr2rXg7x7OoqiuOKopPrwXpM1G4dXIkLcUNRh3RKzz76C8IOkksZSeOw==
-
 got@^6.3.0, got@^6.7.1:
   version "6.7.1"
   resolved "https://registry.yarnpkg.com/got/-/got-6.7.1.tgz#240cd05785a9a18e561dc1b44b41c763ef1e8db0"
@@ -16897,6 +16892,11 @@ isexe@^2.0.0:
   resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10"
   integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=
 
+iso-639-1@^2.1.9:
+  version "2.1.9"
+  resolved "https://registry.yarnpkg.com/iso-639-1/-/iso-639-1-2.1.9.tgz#e41b11d4f1808e5316d0252c3fa16eeb9b37bb58"
+  integrity sha512-owRu9up+Cpx/hwSzm83j6G8PtC7U99UCtPVItsafefNfEgMl+pi8KBwhXwJkJfp6IouyYWFxj8n24SvCWpKZEQ==
+
 iso-constants@^0.1.2:
   version "0.1.2"
   resolved "https://registry.yarnpkg.com/iso-constants/-/iso-constants-0.1.2.tgz#3d2456ed5aeaa55d18564f285ba02a47a0d885b4"
@@ -27707,13 +27707,6 @@ ts-pnp@^1.1.2:
   resolved "https://registry.yarnpkg.com/ts-pnp/-/ts-pnp-1.2.0.tgz#a500ad084b0798f1c3071af391e65912c86bca92"
   integrity "sha1-pQCtCEsHmPHDBxrzkeZZEshrypI= sha512-csd+vJOb/gkzvcCHgTGSChYpy5f1/XKNsmvBGO4JXS+z1v2HobugDz4s1IeFXM3wZB44uczs+eazB5Q/ccdhQw=="
 
-ts-protoc-gen@^0.14.0:
-  version "0.14.0"
-  resolved "https://registry.yarnpkg.com/ts-protoc-gen/-/ts-protoc-gen-0.14.0.tgz#a6f4c3fc37d1d449915551c18404fb7e9aa8fef6"
-  integrity sha512-2z6w2HioMCMVNcgNHBcEvudmQfzrn+3BjAlz+xgYZ9L0o8n8UG8WUiTJcbXHFiEg2SU8IltwH2pm1otLoMSKwg==
-  dependencies:
-    google-protobuf "^3.6.1"
-
 tsconfig-paths-webpack-plugin@^3.2.0:
   version "3.5.1"
   resolved "https://registry.yarnpkg.com/tsconfig-paths-webpack-plugin/-/tsconfig-paths-webpack-plugin-3.5.1.tgz#e4dbf492a20dca9caab60086ddacb703afc2b726"