Browse Source

Content directory mappings - refactorization, updates related to Hydra v3 and protobufjs

Leszek Wiesner 3 years ago
parent
commit
7b21edef90

+ 2 - 2
query-node/manifest.yml

@@ -489,7 +489,7 @@ mappings:
     #- extrinsic: Sudo.batchCall
     #  handler: handleSudoCall(DatabaseManager,SubstrateEvent)
   preBlockHooks:
-    - hanlder: loadGenesisData
+    - handler: loadGenesisData
       filter:
-        height: [0,0] # will be executed only at genesis
+        height: "[0,0]" # will be executed only at genesis
   postBlockHooks:

+ 49 - 19
query-node/mappings/common.ts

@@ -1,7 +1,14 @@
-import { DatabaseManager, SubstrateEvent, SubstrateExtrinsic, ExtrinsicArg } from '@dzlzv/hydra-common'
+import {
+  DatabaseManager,
+  SubstrateEvent,
+  SubstrateExtrinsic,
+  ExtrinsicArg,
+  EventContext,
+  StoreContext,
+} from '@dzlzv/hydra-common'
 import { Bytes } from '@polkadot/types'
 import { WorkingGroup, WorkerId, ContentParameters } from '@joystream/types/augment/all'
-import { Worker, DataObjectOwner, DataObject, LiaisonJudgement, Event, Network } from 'query-node/dist/model'
+import { Worker, Event, Network, DataObject, LiaisonJudgement, DataObjectOwner } from 'query-node/dist/model'
 import { BaseModel } from 'warthog'
 import { ContentParameters as Custom_ContentParameters } from '@joystream/types/storage'
 import { registry } from '@joystream/types'
@@ -65,28 +72,33 @@ export function invalidMetadata(extraInfo: string, data?: unknown): void {
   logger.info(errorMessage, data)
 }
 
-/*
-  Prepares data object from content parameters.
-*/
-export async function prepareDataObject(
+export async function createDataObject(
+  { event, store }: EventContext & StoreContext,
   contentParameters: ContentParameters,
-  blockNumber: number,
   owner: typeof DataObjectOwner
 ): Promise<DataObject> {
-  // convert generic content parameters coming from processor to custom Joystream data type
-  const customContentParameters = new Custom_ContentParameters(registry, contentParameters.toJSON() as any)
-
+  const {
+    size_in_bytes: sizeInBytes,
+    type_id: typeId,
+    content_id: contentId,
+    ipfs_content_id: ipfsContentId,
+  } = new Custom_ContentParameters(registry, contentParameters.toJSON() as any)
+  const dataObjectId = contentId.encode()
   const dataObject = new DataObject({
+    id: dataObjectId,
     owner,
-    createdInBlock: blockNumber,
-    typeId: contentParameters.type_id.toNumber(),
-    size: customContentParameters.size_in_bytes.toNumber(),
+    createdAt: new Date(event.blockTimestamp),
+    updatedAt: new Date(event.blockTimestamp),
+    createdInBlock: event.blockNumber,
+    typeId: typeId.toNumber(),
+    size: sizeInBytes.toNumber(),
     liaisonJudgement: LiaisonJudgement.PENDING, // judgement is pending at start; liaison id is set when content is accepted/rejected
-    ipfsContentId: contentParameters.ipfs_content_id.toUtf8(),
-    joystreamContentId: customContentParameters.content_id.encode(),
+    ipfsContentId: ipfsContentId.toUtf8(),
+    joystreamContentId: dataObjectId,
     createdById: '1',
     updatedById: '1',
   })
+  await store.save<DataObject>(dataObject)
 
   return dataObject
 }
@@ -111,12 +123,13 @@ export interface ISudoCallArgs<T> extends ExtrinsicArg {
 */
 export function extractExtrinsicArgs<DataParams, EventObject extends IGenericExtrinsicObject<DataParams>>(
   rawEvent: SubstrateEvent,
-  callFactory: new (event: SubstrateEvent) => EventObject,
+  callFactoryConstructor: new (event: SubstrateEvent) => EventObject,
 
   // in ideal world this parameter would not be needed, but there is no way to associate parameters
   // used in sudo to extrinsic parameters without it
   argsIndeces: Record<keyof DataParams, number>
 ): EventObject['args'] {
+  const CallFactory = callFactoryConstructor
   // this is equal to DataParams but only this notation works properly
   // escape when extrinsic info is not available
   if (!rawEvent.extrinsic) {
@@ -125,7 +138,7 @@ export function extractExtrinsicArgs<DataParams, EventObject extends IGenericExt
 
   // regural extrinsic call?
   if (rawEvent.extrinsic.section !== 'sudo') {
-    return new callFactory(rawEvent).args
+    return new CallFactory(rawEvent).args
   }
 
   // sudo extrinsic call
@@ -155,7 +168,7 @@ export function extractExtrinsicArgs<DataParams, EventObject extends IGenericExt
   } as SubstrateEvent
 
   // create event object and extract processed args
-  const finalArgs = new callFactory(partialEvent).args
+  const finalArgs = new CallFactory(partialEvent).args
 
   return finalArgs
 }
@@ -217,7 +230,7 @@ export function deserializeMetadata<T>(metadataType: AnyMetadataClass<T>, metada
     // We use `toObject()` to get rid of .prototype defaults for optional fields
     return metadataType.toObject(metadataType.decode(metadataBytes.toU8a(true))) as T
   } catch (e) {
-    console.error(`Cannot deserialize ${metadataType.name}! Provided bytes: (${metadataBytes.toHex()})`)
+    invalidMetadata(`Cannot deserialize ${metadataType.name}! Provided bytes: (${metadataBytes.toHex()})`)
     return null
   }
 }
@@ -236,6 +249,23 @@ export function perpareString(s: string): string {
   return s.replace(/\u0000/g, '')
 }
 
+export function isSet<T>(v: T | null | undefined): v is T {
+  return v !== null && v !== undefined
+}
+
+export function integrateMeta<
+  T extends BaseModel,
+  Props extends readonly (keyof T & keyof M & string)[],
+  M extends { [K in Props[number]]?: T[K] | null }
+>(object: T, meta: M, props: Props): void {
+  props.forEach((prop) => {
+    const metaPropVal = meta[prop] as T[Props[number]] | null | undefined
+    if (isSet(metaPropVal)) {
+      object[prop] = metaPropVal
+    }
+  })
+}
+
 export function hasValuesForProperties<
   T extends Record<string, unknown>,
   P extends keyof T & string,

+ 35 - 77
query-node/mappings/content/channel.ts

@@ -2,31 +2,19 @@
 eslint-disable @typescript-eslint/naming-convention
 */
 import { EventContext, StoreContext } from '@dzlzv/hydra-common'
-import { FindConditions, In } from 'typeorm'
+import { In } from 'typeorm'
 import { AccountId } from '@polkadot/types/interfaces'
 import { Option } from '@polkadot/types/codec'
 import { Content } from '../generated/types'
-import {
-  readProtobuf,
-  readProtobufWithAssets,
-  convertContentActorToChannelOwner,
-  convertContentActorToDataObjectOwner,
-} from './utils'
-import { Channel, ChannelCategory, DataObject, AssetAvailability } from 'query-node/dist/model'
-import { inconsistentState, logger } from '../common'
-
-export async function content_ChannelCreated({ store, event }: EventContext & StoreContext): Promise<void> {
-  // read event data
-  const { channelId, channelCreationParameters, contentActor } = new Content.ChannelCreatedEvent(event).data
+import { convertContentActorToChannelOwner, processChannelMetadata } from './utils'
+import { Channel, ChannelCategory, DataObject } from 'query-node/dist/model'
+import { deserializeMetadata, inconsistentState, integrateMeta, logger } from '../common'
+import { ChannelCategoryMetadata, ChannelMetadata } from '@joystream/metadata-protobuf/compiled'
 
-  // read metadata
-  const protobufContent = await readProtobufWithAssets(new Channel(), {
-    metadata: channelCreationParameters.meta,
-    store,
-    blockNumber: event.blockNumber,
-    assets: channelCreationParameters.assets,
-    contentOwner: convertContentActorToDataObjectOwner(contentActor, channelId.toNumber()),
-  })
+export async function content_ChannelCreated(ctx: EventContext & StoreContext): Promise<void> {
+  const { store, event } = ctx
+  // read event data
+  const [contentActor, channelId, , channelCreationParameters] = new Content.ChannelCreatedEvent(event).params
 
   // create entity
   const channel = new Channel({
@@ -36,23 +24,18 @@ export async function content_ChannelCreated({ store, event }: EventContext & St
     videos: [],
     createdInBlock: event.blockNumber,
 
-    // default values for properties that might or might not be filled by metadata
-    coverPhotoUrls: [],
-    coverPhotoAvailability: AssetAvailability.INVALID,
-    avatarPhotoUrls: [],
-    avatarPhotoAvailability: AssetAvailability.INVALID,
-
     // fill in auto-generated fields
     createdAt: new Date(event.blockTimestamp),
     updatedAt: new Date(event.blockTimestamp),
 
     // prepare channel owner (handles fields `ownerMember` and `ownerCuratorGroup`)
     ...(await convertContentActorToChannelOwner(store, contentActor)),
-
-    // integrate metadata
-    ...protobufContent,
   })
 
+  // deserialize & process metadata
+  const metadata = deserializeMetadata(ChannelMetadata, channelCreationParameters.meta) || {}
+  await processChannelMetadata(ctx, channel, metadata, channelCreationParameters.assets)
+
   // save entity
   await store.save<Channel>(channel)
 
@@ -60,12 +43,13 @@ export async function content_ChannelCreated({ store, event }: EventContext & St
   logger.info('Channel has been created', { id: channel.id })
 }
 
-export async function content_ChannelUpdated({ store, event }: EventContext & StoreContext): Promise<void> {
+export async function content_ChannelUpdated(ctx: EventContext & StoreContext): Promise<void> {
+  const { store, event } = ctx
   // read event data
-  const { channelId, channelUpdateParameters, contentActor } = new Content.ChannelUpdatedEvent(event).data
+  const [, channelId, , channelUpdateParameters] = new Content.ChannelUpdatedEvent(event).params
 
   // load channel
-  const channel = await store.get(Channel, { where: { id: channelId.toString() } as FindConditions<Channel> })
+  const channel = await store.get(Channel, { where: { id: channelId.toString() } })
 
   // ensure channel exists
   if (!channel) {
@@ -73,22 +57,12 @@ export async function content_ChannelUpdated({ store, event }: EventContext & St
   }
 
   // prepare changed metadata
-  const newMetadata = channelUpdateParameters.new_meta.unwrapOr(null)
+  const newMetadataBytes = channelUpdateParameters.new_meta.unwrapOr(null)
 
   //  update metadata if it was changed
-  if (newMetadata) {
-    const protobufContent = await readProtobufWithAssets(new Channel(), {
-      metadata: newMetadata,
-      store,
-      blockNumber: event.blockNumber,
-      assets: channelUpdateParameters.assets.unwrapOr([]),
-      contentOwner: convertContentActorToDataObjectOwner(contentActor, channelId.toNumber()),
-    })
-
-    // update all fields read from protobuf
-    for (const [key, value] of Object.entries(protobufContent)) {
-      channel[key] = value
-    }
+  if (newMetadataBytes) {
+    const newMetadata = deserializeMetadata(ChannelMetadata, newMetadataBytes) || {}
+    await processChannelMetadata(ctx, channel, newMetadata, channelUpdateParameters.assets.unwrapOr([]))
   }
 
   // prepare changed reward account
@@ -112,19 +86,17 @@ export async function content_ChannelUpdated({ store, event }: EventContext & St
 
 export async function content_ChannelAssetsRemoved({ store, event }: EventContext & StoreContext): Promise<void> {
   // read event data
-  const { contentId: contentIds } = new Content.ChannelAssetsRemovedEvent(event).data
+  const [, , contentIds] = new Content.ChannelAssetsRemovedEvent(event).params
 
   // load channel
   const assets = await store.getMany(DataObject, {
     where: {
       id: In(contentIds.toArray().map((item) => item.toString())),
-    } as FindConditions<DataObject>,
+    },
   })
 
   // delete assets
-  for (const asset of assets) {
-    await store.remove<DataObject>(asset)
-  }
+  await Promise.all(assets.map((a) => store.remove<DataObject>(a)))
 
   // emit log event
   logger.info('Channel assets have been removed', { ids: contentIds })
@@ -135,10 +107,10 @@ export async function content_ChannelCensorshipStatusUpdated({
   event,
 }: EventContext & StoreContext): Promise<void> {
   // read event data
-  const { channelId, isCensored } = new Content.ChannelCensorshipStatusUpdatedEvent(event).data
+  const [, channelId, isCensored] = new Content.ChannelCensorshipStatusUpdatedEvent(event).params
 
   // load event
-  const channel = await store.get(Channel, { where: { id: channelId.toString() } as FindConditions<Channel> })
+  const channel = await store.get(Channel, { where: { id: channelId.toString() } })
 
   // ensure channel exists
   if (!channel) {
@@ -162,14 +134,10 @@ export async function content_ChannelCensorshipStatusUpdated({
 
 export async function content_ChannelCategoryCreated({ store, event }: EventContext & StoreContext): Promise<void> {
   // read event data
-  const { channelCategoryCreationParameters, channelCategoryId } = new Content.ChannelCategoryCreatedEvent(event).data
+  const [channelCategoryId, , channelCategoryCreationParameters] = new Content.ChannelCategoryCreatedEvent(event).params
 
   // read metadata
-  const protobufContent = await readProtobuf(new ChannelCategory(), {
-    metadata: channelCategoryCreationParameters.meta,
-    store,
-    blockNumber: event.blockNumber,
-  })
+  const metadata = deserializeMetadata(ChannelCategoryMetadata, channelCategoryCreationParameters.meta) || {}
 
   // create new channel category
   const channelCategory = new ChannelCategory({
@@ -181,10 +149,8 @@ export async function content_ChannelCategoryCreated({ store, event }: EventCont
     // fill in auto-generated fields
     createdAt: new Date(event.blockTimestamp),
     updatedAt: new Date(event.blockTimestamp),
-
-    // integrate metadata
-    ...protobufContent,
   })
+  integrateMeta(channelCategory, metadata, ['name'])
 
   // save channel
   await store.save<ChannelCategory>(channelCategory)
@@ -195,13 +161,13 @@ export async function content_ChannelCategoryCreated({ store, event }: EventCont
 
 export async function content_ChannelCategoryUpdated({ store, event }: EventContext & StoreContext): Promise<void> {
   // read event data
-  const { channelCategoryId, channelCategoryUpdateParameters } = new Content.ChannelCategoryUpdatedEvent(event).data
+  const [, channelCategoryId, channelCategoryUpdateParameters] = new Content.ChannelCategoryUpdatedEvent(event).params
 
   // load channel category
   const channelCategory = await store.get(ChannelCategory, {
     where: {
       id: channelCategoryId.toString(),
-    } as FindConditions<ChannelCategory>,
+    },
   })
 
   // ensure channel exists
@@ -210,16 +176,8 @@ export async function content_ChannelCategoryUpdated({ store, event }: EventCont
   }
 
   // read metadata
-  const protobufContent = await readProtobuf(new ChannelCategory(), {
-    metadata: channelCategoryUpdateParameters.new_meta,
-    store,
-    blockNumber: event.blockNumber,
-  })
-
-  // update all fields read from protobuf
-  for (const [key, value] of Object.entries(protobufContent)) {
-    channelCategory[key] = value
-  }
+  const newMeta = deserializeMetadata(ChannelCategoryMetadata, channelCategoryUpdateParameters.new_meta) || {}
+  integrateMeta(channelCategory, newMeta, ['name'])
 
   // set last update time
   channelCategory.updatedAt = new Date(event.blockTimestamp)
@@ -233,13 +191,13 @@ export async function content_ChannelCategoryUpdated({ store, event }: EventCont
 
 export async function content_ChannelCategoryDeleted({ store, event }: EventContext & StoreContext): Promise<void> {
   // read event data
-  const { channelCategoryId } = new Content.ChannelCategoryDeletedEvent(event).data
+  const [, channelCategoryId] = new Content.ChannelCategoryDeletedEvent(event).params
 
   // load channel category
   const channelCategory = await store.get(ChannelCategory, {
     where: {
       id: channelCategoryId.toString(),
-    } as FindConditions<ChannelCategory>,
+    },
   })
 
   // ensure channel category exists

+ 4 - 4
query-node/mappings/content/curatorGroup.ts

@@ -9,7 +9,7 @@ import { inconsistentState, logger } from '../common'
 
 export async function content_CuratorGroupCreated({ store, event }: EventContext & StoreContext): Promise<void> {
   // read event data
-  const { curatorGroupId } = new Content.CuratorGroupCreatedEvent(event).data
+  const [curatorGroupId] = new Content.CuratorGroupCreatedEvent(event).params
 
   // create new curator group
   const curatorGroup = new CuratorGroup({
@@ -32,7 +32,7 @@ export async function content_CuratorGroupCreated({ store, event }: EventContext
 
 export async function content_CuratorGroupStatusSet({ store, event }: EventContext & StoreContext): Promise<void> {
   // read event data
-  const { curatorGroupId, bool: isActive } = new Content.CuratorGroupStatusSetEvent(event).data
+  const [curatorGroupId, isActive] = new Content.CuratorGroupStatusSetEvent(event).params
 
   // load curator group
   const curatorGroup = await store.get(CuratorGroup, {
@@ -59,7 +59,7 @@ export async function content_CuratorGroupStatusSet({ store, event }: EventConte
 
 export async function content_CuratorAdded({ store, event }: EventContext & StoreContext): Promise<void> {
   // read event data
-  const { curatorGroupId, curatorId } = new Content.CuratorAddedEvent(event).data
+  const [curatorGroupId, curatorId] = new Content.CuratorAddedEvent(event).params
 
   // load curator group
   const curatorGroup = await store.get(CuratorGroup, {
@@ -86,7 +86,7 @@ export async function content_CuratorAdded({ store, event }: EventContext & Stor
 
 export async function content_CuratorRemoved({ store, event }: EventContext & StoreContext): Promise<void> {
   // read event data
-  const { curatorGroupId, curatorId } = new Content.CuratorAddedEvent(event).data
+  const [curatorGroupId, curatorId] = new Content.CuratorAddedEvent(event).params
 
   // load curator group
   const curatorGroup = await store.get(CuratorGroup, {

+ 250 - 511
query-node/mappings/content/utils.ts

@@ -6,26 +6,29 @@
 //         every time query node codegen is run (that will overwrite said manual changes)
 //       - verify in integration tests that the records are trully created/updated/removed as expected
 
-import { DatabaseManager } from '@dzlzv/hydra-common'
-import { Bytes } from '@polkadot/types'
+import { DatabaseManager, EventContext, StoreContext } from '@dzlzv/hydra-common'
 import ISO6391 from 'iso-639-1'
 import { FindConditions } from 'typeorm'
-import * as jspb from 'google-protobuf'
 import {
-  ChannelMetadata,
-  ChannelCategoryMetadata,
-  PublishedBeforeJoystream as PublishedBeforeJoystreamMetadata,
-  License as LicenseMetadata,
-  MediaType as MediaTypeMetadata,
-  VideoMetadata,
-  VideoCategoryMetadata,
-} from '@joystream/content-metadata-protobuf'
-import { invalidMetadata, inconsistentState, logger, prepareDataObject } from '../common'
+  IVideoMetadata,
+  IPublishedBeforeJoystream,
+  ILicense,
+  IMediaType,
+  IChannelMetadata,
+} from '@joystream/metadata-protobuf'
+import {
+  invalidMetadata,
+  inconsistentState,
+  logger,
+  isSet,
+  integrateMeta,
+  unexpectedData,
+  createDataObject,
+} from '../common'
 import {
   // primary entities
   CuratorGroup,
   Channel,
-  ChannelCategory,
   Video,
   VideoCategory,
   // secondary entities
@@ -33,298 +36,186 @@ import {
   License,
   VideoMediaMetadata,
   // asset
+  Asset,
   DataObjectOwner,
   DataObjectOwnerChannel,
-  DataObject,
-  LiaisonJudgement,
-  AssetAvailability,
   Membership,
+  VideoMediaEncoding,
+  AssetExternal,
+  AssetJoystreamStorage,
+  ChannelCategory,
 } from 'query-node/dist/model'
 // Joystream types
 import { ContentParameters, NewAsset, ContentActor } from '@joystream/types/augment'
 import { ContentParameters as Custom_ContentParameters } from '@joystream/types/storage'
 import { registry } from '@joystream/types'
 
-/*
-  Asset either stored in storage or describing list of URLs.
-*/
-type AssetStorageOrUrls = DataObject | string[]
-
-/*
-  Type guard differentiating asset stored in storage from asset describing a list of URLs.
-*/
-function isAssetInStorage(dataObject: AssetStorageOrUrls): dataObject is DataObject {
-  if (Array.isArray(dataObject)) {
-    return false
-  }
-
-  return true
-}
-
-export interface IReadProtobufArguments {
-  metadata: Bytes
-  store: DatabaseManager
-  blockNumber: number
-}
-
-export interface IReadProtobufArgumentsWithAssets extends IReadProtobufArguments {
-  assets: NewAsset[] // assets provided in event
-  contentOwner: typeof DataObjectOwner
-}
+export async function processChannelMetadata(
+  ctx: EventContext & StoreContext,
+  channel: Channel,
+  meta: IChannelMetadata,
+  assets: NewAsset[]
+): Promise<Channel> {
+  const assetsOwner = new DataObjectOwnerChannel()
+  assetsOwner.channelId = channel.id
 
-/*
-  This class represents one of 3 possible states when changing property read from metadata.
-  NoChange - don't change anything (used when invalid metadata are encountered)
-  Unset - unset the value (used when the unset is requested in runtime)
-  Change - set the new value
-*/
-export class PropertyChange<T> {
-  static newUnset<T>(): PropertyChange<T> {
-    return new PropertyChange<T>('unset')
-  }
+  const processedAssets = await Promise.all(assets.map((asset) => processNewAsset(ctx, asset, assetsOwner)))
 
-  static newNoChange<T>(): PropertyChange<T> {
-    return new PropertyChange<T>('nochange')
-  }
+  integrateMeta(channel, meta, ['title', 'description', 'isPublic'])
 
-  static newChange<T>(value: T): PropertyChange<T> {
-    return new PropertyChange<T>('change', value)
+  // prepare channel category if needed
+  if (isSet(meta.category)) {
+    channel.category = await processChannelCategory(ctx, channel.category, meta.category.toNumber())
   }
 
-  /*
-    Determines property change from the given object property.
-  */
-  static fromObjectProperty<T, Key extends string, ChangedObject extends { [key in Key]?: T }>(
-    object: ChangedObject,
-    key: Key
-  ): PropertyChange<T> {
-    if (!(key in object)) {
-      return PropertyChange.newNoChange<T>()
+  // prepare cover photo asset if needed
+  if (isSet(meta.coverPhoto)) {
+    const asset = findAssetByIndex(processedAssets, meta.coverPhoto, 'channel cover photo')
+    if (asset) {
+      channel.coverPhoto = asset
     }
+  }
 
-    if (object[key] === undefined) {
-      return PropertyChange.newUnset<T>()
+  // prepare avatar photo asset if needed
+  if (isSet(meta.avatarPhoto)) {
+    const asset = findAssetByIndex(processedAssets, meta.avatarPhoto, 'channel avatar photo')
+    if (asset) {
+      channel.avatarPhoto = asset
     }
+  }
 
-    return PropertyChange.newChange<T>(object[key] as T)
+  // prepare language if needed
+  if (isSet(meta.language)) {
+    channel.language = await processLanguage(ctx, channel.language, meta.language)
   }
 
-  private type: string
-  private value?: T
+  return channel
+}
 
-  private constructor(type: 'change' | 'nochange' | 'unset', value?: T) {
-    this.type = type
-    this.value = value
-  }
+export async function processVideoMetadata(
+  ctx: EventContext & StoreContext,
+  channel: Channel,
+  video: Video,
+  meta: IVideoMetadata,
+  assets: NewAsset[]
+): Promise<Video> {
+  const assetsOwner = new DataObjectOwnerChannel()
+  assetsOwner.channelId = channel.id
 
-  public isUnset(): boolean {
-    return this.type === 'unset'
-  }
+  const processedAssets = await Promise.all(assets.map((asset) => processNewAsset(ctx, asset, assetsOwner)))
 
-  public isNoChange(): boolean {
-    return this.type === 'nochange'
+  integrateMeta(video, meta, ['title', 'description', 'duration', 'hasMarketing', 'isExplicit', 'isPublic'])
+
+  // prepare video category if needed
+  if (meta.category) {
+    video.category = await processVideoCategory(ctx, video.category, meta.category.toNumber())
   }
 
-  public isValue(): boolean {
-    return this.type === 'change'
+  // prepare media meta information if needed
+  if (isSet(meta.mediaType) || isSet(meta.mediaPixelWidth) || isSet(meta.mediaPixelHeight)) {
+    // prepare video file size if poosible
+    const videoSize = extractVideoSize(assets, meta.video)
+    video.mediaMetadata = await processVideoMediaMetadata(ctx, video.mediaMetadata, meta, videoSize)
   }
 
-  public getValue(): T | undefined {
-    return this.type === 'change' ? this.value : undefined
+  // prepare license if needed
+  if (isSet(meta.license)) {
+    video.license = await processLicense(ctx, video.license, meta.license)
   }
 
-  /*
-    Integrates the value into the given dictionary.
-  */
-  public integrateInto(object: Record<string, unknown>, key: string): void {
-    if (this.isNoChange()) {
-      return
+  // prepare thumbnail photo asset if needed
+  if (isSet(meta.thumbnailPhoto)) {
+    const asset = findAssetByIndex(processedAssets, meta.thumbnailPhoto, 'thumbnail photo')
+    if (asset) {
+      video.thumbnailPhoto = asset
     }
+  }
 
-    if (this.isUnset()) {
-      delete object[key]
-      return
+  // prepare video asset if needed
+  if (isSet(meta.video)) {
+    const asset = findAssetByIndex(processedAssets, meta.video, 'video')
+    if (asset) {
+      video.media = asset
     }
-
-    object[key] = this.value
   }
-}
 
-export interface RawVideoMetadata {
-  encoding: {
-    codecName: PropertyChange<string>
-    container: PropertyChange<string>
-    mimeMediaType: PropertyChange<string>
+  // prepare language if needed
+  if (isSet(meta.language)) {
+    video.language = await processLanguage(ctx, video.language, meta.language)
   }
-  pixelWidth: PropertyChange<number>
-  pixelHeight: PropertyChange<number>
-  size: PropertyChange<number>
-}
 
-/*
-  Reads information from the event and protobuf metadata and constructs changeset that's fit to be used when saving to db.
-*/
-export async function readProtobuf<T extends ChannelCategory | VideoCategory>(
-  type: T,
-  parameters: IReadProtobufArguments
-): Promise<Partial<T>> {
-  // true option here is crucial, it indicates that we want just the underlying bytes (by default it will also include bytes encoding the length)
-  const metaU8a = parameters.metadata.toU8a(true)
-
-  // process channel category
-  if (type instanceof ChannelCategory) {
-    const meta = ChannelCategoryMetadata.deserializeBinary(metaU8a)
-    const result = convertMetadataToObject<ChannelCategoryMetadata.AsObject>(meta) as Partial<T>
-
-    return result
+  if (isSet(meta.publishedBeforeJoystream)) {
+    video.publishedBeforeJoystream = processPublishedBeforeJoystream(
+      ctx,
+      video.publishedBeforeJoystream,
+      meta.publishedBeforeJoystream
+    )
   }
 
-  // process video category
-  if (type instanceof VideoCategory) {
-    const meta = VideoCategoryMetadata.deserializeBinary(metaU8a)
-    const result = convertMetadataToObject<VideoCategoryMetadata.AsObject>(meta) as Partial<T>
-
-    return result
-  }
-
-  // this should never happen
-  logger.error('Not implemented metadata type', { type })
-  throw new Error(`Not implemented metadata type`)
+  return video
 }
 
-/*
-  Reads information from the event and protobuf metadata and constructs changeset that's fit to be used when saving to db.
-  In addition it handles any assets associated with the metadata.
-*/
-
-export async function readProtobufWithAssets<T extends Channel | Video>(
-  type: T,
-  parameters: IReadProtobufArgumentsWithAssets
-): Promise<Partial<T>> {
-  // true option here is crucial, it indicates that we want just the underlying bytes (by default it will also include bytes encoding the length)
-  const metaU8a = parameters.metadata.toU8a(true)
-
-  // process channel
-  if (type instanceof Channel) {
-    const meta = ChannelMetadata.deserializeBinary(metaU8a)
-    const metaAsObject = convertMetadataToObject<ChannelMetadata.AsObject>(meta)
-    const result = (metaAsObject as any) as Partial<Channel>
-
-    // prepare cover photo asset if needed
-    if ('coverPhoto' in metaAsObject) {
-      const asset = await extractAsset({
-        // assetIndex: metaAsObject.coverPhoto,
-        assetIndex: metaAsObject.coverPhoto,
-        assets: parameters.assets,
-        store: parameters.store,
-        blockNumber: parameters.blockNumber,
-        contentOwner: parameters.contentOwner,
-      })
-      integrateAsset('coverPhoto', result, asset) // changes `result` inline!
-      delete metaAsObject.coverPhoto
-    }
-
-    // prepare avatar photo asset if needed
-    if ('avatarPhoto' in metaAsObject) {
-      const asset = await extractAsset({
-        assetIndex: metaAsObject.avatarPhoto,
-        assets: parameters.assets,
-        store: parameters.store,
-        blockNumber: parameters.blockNumber,
-        contentOwner: parameters.contentOwner,
-      })
-      integrateAsset('avatarPhoto', result, asset) // changes `result` inline!
-      delete metaAsObject.avatarPhoto
-    }
-
-    // prepare language if needed
-    if ('language' in metaAsObject) {
-      const language = await prepareLanguage(metaAsObject.language, parameters.store, parameters.blockNumber)
-      delete metaAsObject.language // make sure temporary value will not interfere
-      language.integrateInto(result, 'language')
-    }
+function findAssetByIndex(assets: typeof Asset[], index: number, name?: string): typeof Asset | null {
+  if (assets[index]) {
+    return assets[index]
+  } else {
+    invalidMetadata(`Invalid${name ? ' ' + name : ''} asset index`, {
+      numberOfAssets: assets.length,
+      requestedAssetIndex: index,
+    })
 
-    return result as Partial<T>
+    return null
   }
+}
 
-  // process video
-  if (type instanceof Video) {
-    const meta = VideoMetadata.deserializeBinary(metaU8a)
-    const metaAsObject = convertMetadataToObject<VideoMetadata.AsObject>(meta)
-    const result = (metaAsObject as any) as Partial<Video>
-
-    // prepare video category if needed
-    if ('category' in metaAsObject) {
-      const category = await prepareVideoCategory(metaAsObject.category, parameters.store)
-      delete metaAsObject.category // make sure temporary value will not interfere
-      category.integrateInto(result, 'category')
-    }
-
-    // prepare media meta information if needed
-    if ('mediaType' in metaAsObject || 'mediaPixelWidth' in metaAsObject || 'mediaPixelHeight' in metaAsObject) {
-      // prepare video file size if poosible
-      const videoSize = extractVideoSize(parameters.assets, metaAsObject.video)
-
-      // NOTE: type hack - `RawVideoMetadata` is inserted instead of VideoMediaMetadata - it should be edited in `video.ts`
-      //       see `integrateVideoMetadata()` in `video.ts` for more info
-      result.mediaMetadata = (prepareVideoMetadata(metaAsObject, videoSize) as unknown) as VideoMediaMetadata
-
-      // remove extra values
-      delete metaAsObject.mediaType
-      delete metaAsObject.mediaPixelWidth
-      delete metaAsObject.mediaPixelHeight
-    }
-
-    // prepare license if needed
-    if ('license' in metaAsObject) {
-      result.license = await prepareLicense(metaAsObject.license)
-    }
-
-    // prepare thumbnail photo asset if needed
-    if ('thumbnailPhoto' in metaAsObject) {
-      const asset = await extractAsset({
-        assetIndex: metaAsObject.thumbnailPhoto,
-        assets: parameters.assets,
-        store: parameters.store,
-        blockNumber: parameters.blockNumber,
-        contentOwner: parameters.contentOwner,
-      })
-      integrateAsset('thumbnailPhoto', result, asset) // changes `result` inline!
-      delete metaAsObject.thumbnailPhoto
-    }
-
-    // prepare video asset if needed
-    if ('video' in metaAsObject) {
-      const asset = await extractAsset({
-        assetIndex: metaAsObject.video,
-        assets: parameters.assets,
-        store: parameters.store,
-        blockNumber: parameters.blockNumber,
-        contentOwner: parameters.contentOwner,
-      })
-      integrateAsset('media', result, asset) // changes `result` inline!
-      delete metaAsObject.video
-    }
+async function processVideoMediaEncoding(
+  { store, event }: StoreContext & EventContext,
+  existingVideoMediaEncoding: VideoMediaEncoding | undefined,
+  metadata: IMediaType
+): Promise<VideoMediaEncoding> {
+  const encoding =
+    existingVideoMediaEncoding ||
+    new VideoMediaEncoding({
+      createdAt: new Date(event.blockTimestamp),
+      createdById: '1',
+      updatedById: '1',
+    })
+  // integrate media encoding-related data
+  integrateMeta(encoding, metadata, ['codecName', 'container', 'mimeMediaType'])
+  encoding.updatedAt = new Date(event.blockTimestamp)
+  await store.save<VideoMediaEncoding>(encoding)
 
-    // prepare language if needed
-    if ('language' in metaAsObject) {
-      const language = await prepareLanguage(metaAsObject.language, parameters.store, parameters.blockNumber)
-      delete metaAsObject.language // make sure temporary value will not interfere
-      language.integrateInto(result, 'language')
-    }
+  return encoding
+}
 
-    if (metaAsObject.publishedBeforeJoystream) {
-      const publishedBeforeJoystream = handlePublishedBeforeJoystream(result, metaAsObject.publishedBeforeJoystream)
-      delete metaAsObject.publishedBeforeJoystream // make sure temporary value will not interfere
-      publishedBeforeJoystream.integrateInto(result, 'publishedBeforeJoystream')
-    }
+async function processVideoMediaMetadata(
+  ctx: StoreContext & EventContext,
+  existingVideoMedia: VideoMediaMetadata | undefined,
+  metadata: IVideoMetadata,
+  videoSize: number | undefined
+): Promise<VideoMediaMetadata> {
+  const { store, event } = ctx
+  const videoMedia =
+    existingVideoMedia ||
+    new VideoMediaMetadata({
+      createdInBlock: event.blockNumber,
+      createdAt: new Date(event.blockTimestamp),
+      createdById: '1',
+      updatedById: '1',
+    })
 
-    return result as Partial<T>
+  // integrate media-related data
+  const mediaMetadata = {
+    size: videoSize,
+    pixelWidth: metadata.mediaPixelWidth,
+    pixelHeight: metadata.mediaPixelHeight,
   }
+  integrateMeta(videoMedia, mediaMetadata, ['pixelWidth', 'pixelHeight', 'size'])
+  videoMedia.updatedAt = new Date(event.blockTimestamp)
+  videoMedia.encoding = await processVideoMediaEncoding(ctx, videoMedia.encoding, metadata.mediaType || {})
+  await store.save<VideoMediaMetadata>(videoMedia)
 
-  // this should never happen
-  logger.error('Not implemented metadata type', { type })
-  throw new Error(`Not implemented metadata type`)
+  return videoMedia
 }
 
 export async function convertContentActorToChannelOwner(
@@ -372,180 +263,60 @@ export async function convertContentActorToChannelOwner(
   throw new Error('Not-implemented ContentActor type used')
 }
 
-export function convertContentActorToDataObjectOwner(
-  contentActor: ContentActor,
-  channelId: number
-): typeof DataObjectOwner {
-  const owner = new DataObjectOwnerChannel()
-  owner.channelId = channelId.toString()
-
-  return owner
-
-  /* contentActor is irrelevant now -> all video/channel content belongs to the channel
-  if (contentActor.isMember) {
-    const owner = new DataObjectOwnerMember()
-    owner.member = contentActor.asMember.toBn()
-
-    return owner
-  }
-
-  if (contentActor.isLead || contentActor.isCurator) {
-    const owner = new DataObjectOwnerChannel()
-    owner.channel = channelId
-
-    return owner
+function processPublishedBeforeJoystream(
+  ctx: EventContext & StoreContext,
+  currentValue: Date | undefined,
+  metadata: IPublishedBeforeJoystream
+): Date | undefined {
+  if (!isSet(metadata)) {
+    return currentValue
   }
 
-  logger.error('Not implemented ContentActor type', {contentActor: contentActor.toString()})
-  throw 'Not-implemented ContentActor type used'
-  */
-}
-
-function handlePublishedBeforeJoystream(
-  video: Partial<Video>,
-  metadata: PublishedBeforeJoystreamMetadata.AsObject
-): PropertyChange<Date> {
-  // is publish being unset
-  if ('isPublished' in metadata && !metadata.isPublished) {
-    return PropertyChange.newUnset()
+  // Property is beeing unset
+  if (!metadata.isPublished) {
+    return undefined
   }
 
   // try to parse timestamp from publish date
-  const timestamp = metadata.date ? Date.parse(metadata.date) : NaN
+  const timestamp = isSet(metadata.date) ? Date.parse(metadata.date) : NaN
 
   // ensure date is valid
   if (isNaN(timestamp)) {
     invalidMetadata(`Invalid date used for publishedBeforeJoystream`, {
       timestamp,
     })
-    return PropertyChange.newNoChange()
+    return currentValue
   }
 
   // set new date
-  return PropertyChange.newChange(new Date(timestamp))
-}
-
-interface IConvertAssetParameters {
-  rawAsset: NewAsset
-  store: DatabaseManager
-  blockNumber: number
-  contentOwner: typeof DataObjectOwner
-}
-
-/*
-  Converts event asset into data object or list of URLs fit to be saved to db.
-*/
-async function convertAsset(parameters: IConvertAssetParameters): Promise<AssetStorageOrUrls> {
-  // is asset describing list of URLs?
-  if (parameters.rawAsset.isUrls) {
-    const urls = parameters.rawAsset.asUrls.toArray().map((item) => item.toString())
-
-    return urls
-  }
-
-  // !parameters.rawAsset.isUrls && parameters.rawAsset.isUpload // asset is in storage
-
-  // prepare data object
-  const contentParameters: ContentParameters = parameters.rawAsset.asUpload
-  const dataObject = await prepareDataObject(contentParameters, parameters.blockNumber, parameters.contentOwner)
-
-  return dataObject
-}
-
-interface IExtractAssetParameters {
-  assetIndex: number | undefined
-  assets: NewAsset[]
-  store: DatabaseManager
-  blockNumber: number
-  contentOwner: typeof DataObjectOwner
+  return new Date(timestamp)
 }
 
-/*
-  Selects asset from provided set of assets and prepares asset data fit to be saved to db.
-*/
-async function extractAsset(parameters: IExtractAssetParameters): Promise<PropertyChange<AssetStorageOrUrls>> {
-  // is asset being unset?
-  if (parameters.assetIndex === undefined) {
-    return PropertyChange.newUnset()
+async function processNewAsset(
+  ctx: EventContext & StoreContext,
+  asset: NewAsset,
+  owner: typeof DataObjectOwner
+): Promise<typeof Asset> {
+  if (asset.isUrls) {
+    const urls = asset.asUrls.toArray().map((url) => url.toString())
+    const resultAsset = new AssetExternal()
+    resultAsset.urls = JSON.stringify(urls)
+    return resultAsset
+  } else if (asset.isUpload) {
+    const contentParameters: ContentParameters = asset.asUpload
+    const dataObject = await createDataObject(ctx, contentParameters, owner)
+
+    const resultAsset = new AssetJoystreamStorage()
+    resultAsset.dataObjectId = dataObject.id
+    return resultAsset
+  } else {
+    unexpectedData('Unrecognized asset type', asset.type)
   }
-
-  // ensure asset index is valid
-  if (parameters.assetIndex >= parameters.assets.length) {
-    invalidMetadata(`Non-existing asset extraction requested`, {
-      assetsProvided: parameters.assets.length,
-      assetIndex: parameters.assetIndex,
-    })
-    return PropertyChange.newNoChange()
-  }
-
-  // convert asset to data object record
-  const asset = await convertAsset({
-    rawAsset: parameters.assets[parameters.assetIndex],
-    store: parameters.store,
-    blockNumber: parameters.blockNumber,
-    contentOwner: parameters.contentOwner,
-  })
-
-  return PropertyChange.newChange(asset)
 }
 
-/*
-  As a temporary messure to overcome yet-to-be-implemented features in Hydra, we are using redudant information
-  to describe asset state. This function introduces all redudant data needed to be saved to db.
-
-  Changes `result` argument!
-*/
-function integrateAsset<T>(
-  propertyName: string,
-  result: Record<string, unknown>,
-  asset: PropertyChange<AssetStorageOrUrls>
-): void {
-  // helpers - property names
-  const nameUrl = propertyName + 'Urls'
-  const nameDataObject = propertyName + 'DataObject'
-  const nameAvailability = propertyName + 'Availability'
-
-  if (asset.isNoChange()) {
-    return
-  }
-
-  if (asset.isUnset()) {
-    result[nameUrl] = []
-    result[nameAvailability] = AssetAvailability.INVALID
-    result[nameDataObject] = undefined // plan deletion (will have effect when saved to db)
-
-    return
-  }
-
-  const newValue = asset.getValue() as AssetStorageOrUrls
-
-  // is asset available on external URL(s)
-  if (!isAssetInStorage(newValue)) {
-    // (un)set asset's properties
-    result[nameUrl] = newValue
-    result[nameAvailability] = AssetAvailability.ACCEPTED
-    result[nameDataObject] = undefined // plan deletion (will have effect when saved to db)
-
-    return
-  }
-
-  // asset saved in storage
-
-  // prepare conversion table between liaison judgment and asset availability
-  const conversionTable = {
-    [LiaisonJudgement.ACCEPTED]: AssetAvailability.ACCEPTED,
-    [LiaisonJudgement.PENDING]: AssetAvailability.PENDING,
-  }
-
-  // (un)set asset's properties
-  result[nameUrl] = [] // plan deletion (will have effect when saved to db)
-  result[nameAvailability] = conversionTable[newValue.liaisonJudgement]
-  result[nameDataObject] = newValue
-}
-
-function extractVideoSize(assets: NewAsset[], assetIndex: number | undefined): number | undefined {
+function extractVideoSize(assets: NewAsset[], assetIndex: number | null | undefined): number | undefined {
   // escape if no asset is required
-  if (assetIndex === undefined) {
+  if (!isSet(assetIndex)) {
     return undefined
   }
 
@@ -572,14 +343,15 @@ function extractVideoSize(assets: NewAsset[], assetIndex: number | undefined): n
   return videoSize
 }
 
-async function prepareLanguage(
-  languageIso: string | undefined,
-  store: DatabaseManager,
-  blockNumber: number
-): Promise<PropertyChange<Language>> {
-  // is language being unset?
-  if (languageIso === undefined) {
-    return PropertyChange.newUnset()
+async function processLanguage(
+  ctx: EventContext & StoreContext,
+  currentLanguage: Language | undefined,
+  languageIso: string | undefined
+): Promise<Language | undefined> {
+  const { event, store } = ctx
+
+  if (!isSet(languageIso)) {
+    return currentLanguage
   }
 
   // validate language string
@@ -588,22 +360,23 @@ async function prepareLanguage(
   // ensure language string is valid
   if (!isValidIso) {
     invalidMetadata(`Invalid language ISO-639-1 provided`, languageIso)
-    return PropertyChange.newNoChange()
+    return currentLanguage
   }
 
   // load language
-  const language = await store.get(Language, { where: { iso: languageIso } as FindConditions<Language> })
+  const existingLanguage = await store.get(Language, { where: { iso: languageIso } })
 
   // return existing language if any
-  if (language) {
-    return PropertyChange.newChange(language)
+  if (existingLanguage) {
+    return existingLanguage
   }
 
   // create new language
   const newLanguage = new Language({
     iso: languageIso,
-    createdInBlock: blockNumber,
-
+    createdInBlock: event.blockNumber,
+    createdAt: new Date(event.blockTimestamp),
+    updatedAt: new Date(event.blockTimestamp),
     // TODO: remove these lines after Hydra auto-fills the values when cascading save (remove them on all places)
     createdById: '1',
     updatedById: '1',
@@ -611,30 +384,40 @@ async function prepareLanguage(
 
   await store.save<Language>(newLanguage)
 
-  return PropertyChange.newChange(newLanguage)
+  return newLanguage
 }
 
-async function prepareLicense(licenseProtobuf: LicenseMetadata.AsObject | undefined): Promise<License | undefined> {
-  // NOTE: Deletion of any previous license should take place in appropriate event handling function
-  //       and not here even it might appear so.
+async function processLicense(
+  ctx: StoreContext & EventContext,
+  existingLicense: License | undefined,
+  metadata: ILicense | null | undefined
+): Promise<License | undefined> {
+  const { store, event } = ctx
 
-  // is license being unset?
-  if (licenseProtobuf === undefined) {
-    return undefined
+  if (!isSet(metadata)) {
+    return existingLicense
   }
 
-  // license is meant to be deleted
-  if (isLicenseEmpty(licenseProtobuf)) {
-    return new License({})
+  if (isLicenseEmpty(metadata)) {
+    // license is meant to be deleted
+    if (existingLicense) {
+      await store.remove<License>(existingLicense)
+    }
+    return undefined
   }
 
-  // crete new license
-  const license = new License({
-    ...licenseProtobuf,
+  // license is meant to be created/updated
+  const license =
+    existingLicense ||
+    new License({
+      createdAt: new Date(event.blockTimestamp),
+      createdById: '1',
+      updatedById: '1',
+    })
+  license.updatedAt = new Date(event.blockTimestamp)
+  integrateMeta(license, metadata, ['attribution', 'code', 'customText'])
 
-    createdById: '1',
-    updatedById: '1',
-  })
+  await store.save<License>(license)
 
   return license
 }
@@ -643,94 +426,50 @@ async function prepareLicense(licenseProtobuf: LicenseMetadata.AsObject | undefi
   Checks if protobof contains license with some fields filled or is empty object (`{}` or `{someKey: undefined, ...}`).
   Empty object means deletion is requested.
 */
-function isLicenseEmpty(licenseObject: LicenseMetadata.AsObject): boolean {
-  const somePropertySet = Object.entries(licenseObject).reduce((acc, [key, value]) => {
-    return acc || value !== undefined
-  }, false)
+function isLicenseEmpty(licenseObject: ILicense): boolean {
+  const somePropertySet = Object.values(licenseObject).some((v) => isSet(v))
 
   return !somePropertySet
 }
 
-function prepareVideoMetadata(videoProtobuf: VideoMetadata.AsObject, videoSize: number | undefined): RawVideoMetadata {
-  const rawMeta = {
-    encoding: {
-      codecName: PropertyChange.fromObjectProperty<string, 'codecName', MediaTypeMetadata.AsObject>(
-        videoProtobuf.mediaType || {},
-        'codecName'
-      ),
-      container: PropertyChange.fromObjectProperty<string, 'container', MediaTypeMetadata.AsObject>(
-        videoProtobuf.mediaType || {},
-        'container'
-      ),
-      mimeMediaType: PropertyChange.fromObjectProperty<string, 'mimeMediaType', MediaTypeMetadata.AsObject>(
-        videoProtobuf.mediaType || {},
-        'mimeMediaType'
-      ),
-    },
-    pixelWidth: PropertyChange.fromObjectProperty<number, 'mediaPixelWidth', VideoMetadata.AsObject>(
-      videoProtobuf,
-      'mediaPixelWidth'
-    ),
-    pixelHeight: PropertyChange.fromObjectProperty<number, 'mediaPixelHeight', VideoMetadata.AsObject>(
-      videoProtobuf,
-      'mediaPixelHeight'
-    ),
-    size: videoSize === undefined ? PropertyChange.newNoChange() : PropertyChange.newChange(videoSize),
-  } as RawVideoMetadata
-
-  return rawMeta
-}
-
-async function prepareVideoCategory(
-  categoryId: number | undefined,
-  store: DatabaseManager
-): Promise<PropertyChange<VideoCategory>> {
-  // is category being unset?
-  if (categoryId === undefined) {
-    return PropertyChange.newUnset()
-  }
+async function processVideoCategory(
+  ctx: EventContext & StoreContext,
+  currentCategory: VideoCategory | undefined,
+  categoryId: number
+): Promise<VideoCategory | undefined> {
+  const { store } = ctx
 
   // load video category
   const category = await store.get(VideoCategory, {
-    where: { id: categoryId.toString() } as FindConditions<VideoCategory>,
+    where: { id: categoryId.toString() },
   })
 
   // ensure video category exists
   if (!category) {
     invalidMetadata('Non-existing video category association with video requested', categoryId)
-    return PropertyChange.newNoChange()
+    return currentCategory
   }
 
-  return PropertyChange.newChange(category)
+  return category
 }
 
-function convertMetadataToObject<T>(metadata: jspb.Message): T {
-  const metaAsObject = metadata.toObject()
-  const result = {} as T
-
-  for (const key in metaAsObject) {
-    const funcNameBase = key.charAt(0).toUpperCase() + key.slice(1)
-    const hasFuncName = 'has' + funcNameBase
-    const isSet =
-      funcNameBase === 'PersonsList' // there is no `VideoMetadata.hasPersonsList` method from unkown reason -> create exception
-        ? true
-        : metadata[hasFuncName]()
+async function processChannelCategory(
+  ctx: EventContext & StoreContext,
+  currentCategory: ChannelCategory | undefined,
+  categoryId: number
+): Promise<ChannelCategory | undefined> {
+  const { store } = ctx
 
-    if (!isSet) {
-      continue
-    }
-
-    const getFuncName = 'get' + funcNameBase
-    const value = metadata[getFuncName]()
-
-    // TODO: check that recursion trully works
-    if (value instanceof jspb.Message) {
-      result[key] = convertMetadataToObject(value)
-      continue
-    }
+  // load video category
+  const category = await store.get(ChannelCategory, {
+    where: { id: categoryId.toString() },
+  })
 
-    result[key] = metaAsObject[key]
+  // ensure video category exists
+  if (!category) {
+    invalidMetadata('Non-existing channel category association with channel requested', categoryId)
+    return currentCategory
   }
 
-  return result
+  return category
 }

+ 67 - 233
query-node/mappings/content/video.ts

@@ -1,32 +1,20 @@
 /*
 eslint-disable @typescript-eslint/naming-convention
 */
-import BN from 'bn.js'
 import { EventContext, StoreContext } from '@dzlzv/hydra-common'
-import { FindConditions, In } from 'typeorm'
+import { In } from 'typeorm'
 import { Content } from '../generated/types'
-import { inconsistentState, logger } from '../common'
-import { convertContentActorToDataObjectOwner, readProtobuf, readProtobufWithAssets, RawVideoMetadata } from './utils'
-import {
-  AssetAvailability,
-  Channel,
-  Video,
-  VideoCategory,
-  VideoMediaEncoding,
-  VideoMediaMetadata,
-  License,
-} from 'query-node/dist/model'
+import { deserializeMetadata, inconsistentState, integrateMeta, logger } from '../common'
+import { processVideoMetadata } from './utils'
+import { Channel, Video, VideoCategory } from 'query-node/dist/model'
+import { VideoMetadata, VideoCategoryMetadata } from '@joystream/metadata-protobuf'
 
 export async function content_VideoCategoryCreated({ store, event }: EventContext & StoreContext): Promise<void> {
   // read event data
-  const { videoCategoryId, videoCategoryCreationParameters } = new Content.VideoCategoryCreatedEvent(event).data
+  const [, videoCategoryId, videoCategoryCreationParameters] = new Content.VideoCategoryCreatedEvent(event).params
 
   // read metadata
-  const protobufContent = await readProtobuf(new VideoCategory(), {
-    metadata: videoCategoryCreationParameters.meta,
-    store,
-    blockNumber: event.blockNumber,
-  })
+  const metadata = (await deserializeMetadata(VideoCategoryMetadata, videoCategoryCreationParameters.meta)) || {}
 
   // create new video category
   const videoCategory = new VideoCategory({
@@ -34,14 +22,11 @@ export async function content_VideoCategoryCreated({ store, event }: EventContex
     id: videoCategoryId.toString(),
     videos: [],
     createdInBlock: event.blockNumber,
-
     // fill in auto-generated fields
     createdAt: new Date(event.blockTimestamp),
     updatedAt: new Date(event.blockTimestamp),
-
-    // integrate metadata
-    ...protobufContent,
   })
+  integrateMeta(videoCategory, metadata, ['name'])
 
   // save video category
   await store.save<VideoCategory>(videoCategory)
@@ -52,11 +37,11 @@ export async function content_VideoCategoryCreated({ store, event }: EventContex
 
 export async function content_VideoCategoryUpdated({ store, event }: EventContext & StoreContext): Promise<void> {
   // read event data
-  const { videoCategoryId, videoCategoryUpdateParameters } = new Content.VideoCategoryUpdatedEvent(event).data
+  const [, videoCategoryId, videoCategoryUpdateParameters] = new Content.VideoCategoryUpdatedEvent(event).params
 
   // load video category
   const videoCategory = await store.get(VideoCategory, {
-    where: { id: videoCategoryId.toString() } as FindConditions<VideoCategory>,
+    where: { id: videoCategoryId.toString() },
   })
 
   // ensure video category exists
@@ -65,16 +50,8 @@ export async function content_VideoCategoryUpdated({ store, event }: EventContex
   }
 
   // read metadata
-  const protobufContent = await readProtobuf(new VideoCategory(), {
-    metadata: videoCategoryUpdateParameters.new_meta,
-    store,
-    blockNumber: event.blockNumber,
-  })
-
-  // update all fields read from protobuf
-  for (const [key, value] of Object.entries(protobufContent)) {
-    videoCategory[key] = value
-  }
+  const newMeta = deserializeMetadata(VideoCategoryMetadata, videoCategoryUpdateParameters.new_meta) || {}
+  integrateMeta(videoCategory, newMeta, ['name'])
 
   // set last update time
   videoCategory.updatedAt = new Date(event.blockTimestamp)
@@ -88,11 +65,11 @@ export async function content_VideoCategoryUpdated({ store, event }: EventContex
 
 export async function content_VideoCategoryDeleted({ store, event }: EventContext & StoreContext): Promise<void> {
   // read event data
-  const { videoCategoryId } = new Content.VideoCategoryDeletedEvent(event).data
+  const [videoCategoryId] = new Content.VideoCategoryDeletedEvent(event).params
 
   // load video category
   const videoCategory = await store.get(VideoCategory, {
-    where: { id: videoCategoryId.toString() } as FindConditions<VideoCategory>,
+    where: { id: videoCategoryId.toString() },
   })
 
   // ensure video category exists
@@ -109,58 +86,30 @@ export async function content_VideoCategoryDeleted({ store, event }: EventContex
 
 /// //////////////// Video //////////////////////////////////////////////////////
 
-export async function content_VideoCreated({ store, event }: EventContext & StoreContext): Promise<void> {
+export async function content_VideoCreated(ctx: EventContext & StoreContext): Promise<void> {
+  const { store, event } = ctx
   // read event data
-  const { channelId, videoId, videoCreationParameters, contentActor } = new Content.VideoCreatedEvent(event).data
-
-  // read metadata
-  const protobufContent = await readProtobufWithAssets(new Video(), {
-    metadata: videoCreationParameters.meta,
-    store,
-    blockNumber: event.blockNumber,
-    assets: videoCreationParameters.assets,
-    contentOwner: convertContentActorToDataObjectOwner(contentActor, channelId.toNumber()),
-  })
+  const [, channelId, videoId, videoCreationParameters] = new Content.VideoCreatedEvent(event).params
 
   // load channel
-  const channel = await store.get(Channel, { where: { id: channelId.toString() } as FindConditions<Channel> })
+  const channel = await store.get(Channel, { where: { id: channelId.toString() } })
 
   // ensure channel exists
   if (!channel) {
     return inconsistentState('Trying to add video to non-existing channel', channelId)
   }
 
-  // prepare video media metadata (if any)
-  const fixedProtobuf = integrateVideoMediaMetadata(null, protobufContent, event.blockNumber)
-
-  const licenseIsEmpty = fixedProtobuf.license && !Object.keys(fixedProtobuf.license).length
-  if (licenseIsEmpty) {
-    // license deletion was requested - ignore it and consider it empty
-    delete fixedProtobuf.license
-  }
-
-  // create new video
   const video = new Video({
-    // main data
     id: videoId.toString(),
     isCensored: false,
-    channel,
-    createdInBlock: event.blockNumber,
     isFeatured: false,
-
-    // default values for properties that might or might not be filled by metadata
-    thumbnailPhotoUrls: [],
-    thumbnailPhotoAvailability: AssetAvailability.INVALID,
-    mediaUrls: [],
-    mediaAvailability: AssetAvailability.INVALID,
-
-    // fill in auto-generated fields
+    createdInBlock: event.blockNumber,
     createdAt: new Date(event.blockTimestamp),
     updatedAt: new Date(event.blockTimestamp),
-
-    // integrate metadata
-    ...fixedProtobuf,
   })
+  // deserialize & process metadata
+  const metadata = deserializeMetadata(VideoMetadata, videoCreationParameters.meta) || {}
+  await processVideoMetadata(ctx, channel, video, metadata, videoCreationParameters.assets)
 
   // save video
   await store.save<Video>(video)
@@ -169,13 +118,14 @@ export async function content_VideoCreated({ store, event }: EventContext & Stor
   logger.info('Video has been created', { id: videoId })
 }
 
-export async function content_VideoUpdated({ store, event }: EventContext & StoreContext): Promise<void> {
+export async function content_VideoUpdated(ctx: EventContext & StoreContext): Promise<void> {
+  const { event, store } = ctx
   // read event data
-  const { videoId, videoUpdateParameters, contentActor } = new Content.VideoUpdatedEvent(event).data
+  const [, videoId, videoUpdateParameters] = new Content.VideoUpdatedEvent(event).params
 
   // load video
   const video = await store.get(Video, {
-    where: { id: videoId.toString() } as FindConditions<Video>,
+    where: { id: videoId.toString() },
     relations: ['channel', 'license'],
   })
 
@@ -185,39 +135,12 @@ export async function content_VideoUpdated({ store, event }: EventContext & Stor
   }
 
   // prepare changed metadata
-  const newMetadata = videoUpdateParameters.new_meta.unwrapOr(null)
-
-  // license must be deleted AFTER video is saved - plan a license deletion by assigning it to this variable
-  let licenseToDelete: License | null = null
+  const newMetadataBytes = videoUpdateParameters.new_meta.unwrapOr(null)
 
   // update metadata if it was changed
-  if (newMetadata) {
-    const protobufContent = await readProtobufWithAssets(new Video(), {
-      metadata: newMetadata,
-      store,
-      blockNumber: event.blockNumber,
-      assets: videoUpdateParameters.assets.unwrapOr([]),
-      contentOwner: convertContentActorToDataObjectOwner(contentActor, new BN(video.channel.id).toNumber()),
-    })
-
-    // prepare video media metadata (if any)
-    const fixedProtobuf = integrateVideoMediaMetadata(video, protobufContent, event.blockNumber)
-
-    // remember original license
-    const originalLicense = video.license
-
-    // update all fields read from protobuf
-    for (const [key, value] of Object.entries(fixedProtobuf)) {
-      video[key] = value
-    }
-
-    // license has changed - plan old license delete
-    if (originalLicense && video.license !== originalLicense) {
-      ;[video.license, licenseToDelete] = handleLicenseUpdate(originalLicense, video.license)
-    } else if (!Object.keys(video.license || {}).length) {
-      // license deletion was requested event no license exists?
-      delete video.license // ensure license is empty
-    }
+  if (newMetadataBytes) {
+    const newMetadata = deserializeMetadata(VideoMetadata, newMetadataBytes) || {}
+    await processVideoMetadata(ctx, video.channel!, video, newMetadata, videoUpdateParameters.assets.unwrapOr([]))
   }
 
   // set last update time
@@ -226,21 +149,16 @@ export async function content_VideoUpdated({ store, event }: EventContext & Stor
   // save video
   await store.save<Video>(video)
 
-  // delete old license if it's planned
-  if (licenseToDelete) {
-    await store.remove<License>(licenseToDelete)
-  }
-
   // emit log event
   logger.info('Video has been updated', { id: videoId })
 }
 
 export async function content_VideoDeleted({ store, event }: EventContext & StoreContext): Promise<void> {
   // read event data
-  const { videoId } = new Content.VideoDeletedEvent(event).data
+  const [, videoId] = new Content.VideoDeletedEvent(event).params
 
   // load video
-  const video = await store.get(Video, { where: { id: videoId.toString() } as FindConditions<Video> })
+  const video = await store.get(Video, { where: { id: videoId.toString() } })
 
   // ensure video exists
   if (!video) {
@@ -259,10 +177,10 @@ export async function content_VideoCensorshipStatusUpdated({
   event,
 }: EventContext & StoreContext): Promise<void> {
   // read event data
-  const { videoId, isCensored } = new Content.VideoCensorshipStatusUpdatedEvent(event).data
+  const [, videoId, isCensored] = new Content.VideoCensorshipStatusUpdatedEvent(event).params
 
   // load video
-  const video = await store.get(Video, { where: { id: videoId.toString() } as FindConditions<Video> })
+  const video = await store.get(Video, { where: { id: videoId.toString() } })
 
   // ensure video exists
   if (!video) {
@@ -284,145 +202,61 @@ export async function content_VideoCensorshipStatusUpdated({
 
 export async function content_FeaturedVideosSet({ store, event }: EventContext & StoreContext): Promise<void> {
   // read event data
-  const { videoId: videoIds } = new Content.FeaturedVideosSetEvent(event).data
+  const [, videoIds] = new Content.FeaturedVideosSetEvent(event).params
 
   // load old featured videos
-  const existingFeaturedVideos = await store.getMany(Video, { where: { isFeatured: true } as FindConditions<Video> })
+  const existingFeaturedVideos = await store.getMany(Video, { where: { isFeatured: true } })
 
   // comparsion utility
   const isSame = (videoIdA: string) => (videoIdB: string) => videoIdA === videoIdB
 
   // calculate diff sets
-  const toRemove = existingFeaturedVideos.filter(
-    (existingFV) => !videoIds.map((item) => item.toHex()).some(isSame(existingFV.id))
+  const videosToRemove = existingFeaturedVideos.filter(
+    (existingFV) => !videoIds.map((videoId) => videoId.toString()).some(isSame(existingFV.id))
+  )
+  const videoIdsToAdd = videoIds.filter(
+    (videoId) => !existingFeaturedVideos.map((existingFV) => existingFV.id).some(isSame(videoId.toString()))
   )
-  const toAdd = videoIds.filter((video) => !existingFeaturedVideos.map((item) => item.id).some(isSame(video.toHex())))
 
   // mark previously featured videos as not-featured
-  for (const video of toRemove) {
-    video.isFeatured = false
-
-    // set last update time
-    video.updatedAt = new Date(event.blockTimestamp)
+  await Promise.all(
+    videosToRemove.map(async (video) => {
+      video.isFeatured = false
+      // set last update time
+      video.updatedAt = new Date(event.blockTimestamp)
 
-    await store.save<Video>(video)
-  }
-
-  // escape if no featured video needs to be added
-  if (!toAdd) {
-    // emit log event
-    logger.info('Featured videos unchanged')
-
-    return
-  }
+      await store.save<Video>(video)
+    })
+  )
 
   // read videos previously not-featured videos that are meant to be featured
   const videosToAdd = await store.getMany(Video, {
     where: {
-      id: In(toAdd.map((item) => item.toString())),
-    } as FindConditions<Video>,
+      id: In(videoIdsToAdd.map((item) => item.toString())),
+    },
   })
 
-  if (videosToAdd.length !== toAdd.length) {
-    return inconsistentState('At least one non-existing video featuring requested', toAdd)
+  if (videosToAdd.length !== videoIdsToAdd.length) {
+    return inconsistentState(
+      'At least one non-existing video featuring requested',
+      videosToAdd.map((v) => v.id)
+    )
   }
 
   // mark previously not-featured videos as featured
-  for (const video of videosToAdd) {
-    video.isFeatured = true
+  await Promise.all(
+    videosToAdd.map(async (video) => {
+      video.isFeatured = true
 
-    // set last update time
-    video.updatedAt = new Date(event.blockTimestamp)
+      // set last update time
+      video.updatedAt = new Date(event.blockTimestamp)
 
-    await store.save<Video>(video)
-  }
-
-  // emit log event
-  logger.info('New featured videos have been set', { videoIds })
-}
-
-/// //////////////// Helpers ////////////////////////////////////////////////////
-
-/*
-  Integrates video metadata-related data into existing data (if any) or creates a new record.
-
-  NOTE: type hack - `RawVideoMetadata` is accepted for `metadata` instead of `Partial<Video>`
-        see `prepareVideoMetadata()` in `utils.ts` for more info
-*/
-function integrateVideoMediaMetadata(
-  existingRecord: Video | null,
-  metadata: Partial<Video>,
-  blockNumber: number
-): Partial<Video> {
-  if (!metadata.mediaMetadata) {
-    return metadata
-  }
-
-  // fix TS type
-  const rawMediaMetadata = (metadata.mediaMetadata as unknown) as RawVideoMetadata
-
-  // ensure encoding object
-  const encoding =
-    (existingRecord && existingRecord.mediaMetadata && existingRecord.mediaMetadata.encoding) ||
-    new VideoMediaEncoding({
-      createdById: '1',
-      updatedById: '1',
+      await store.save<Video>(video)
     })
+  )
 
-  // integrate media encoding-related data
-  rawMediaMetadata.encoding.codecName.integrateInto(encoding, 'codecName')
-  rawMediaMetadata.encoding.container.integrateInto(encoding, 'container')
-  rawMediaMetadata.encoding.mimeMediaType.integrateInto(encoding, 'mimeMediaType')
-
-  // ensure media metadata object
-  const mediaMetadata =
-    (existingRecord && existingRecord.mediaMetadata) ||
-    new VideoMediaMetadata({
-      createdInBlock: blockNumber,
-
-      createdById: '1',
-      updatedById: '1',
-    })
-
-  // integrate media-related data
-  rawMediaMetadata.pixelWidth.integrateInto(mediaMetadata, 'pixelWidth')
-  rawMediaMetadata.pixelHeight.integrateInto(mediaMetadata, 'pixelHeight')
-  rawMediaMetadata.size.integrateInto(mediaMetadata, 'size')
-
-  // connect encoding to media metadata object
-  mediaMetadata.encoding = encoding
-
-  return {
-    ...metadata,
-    mediaMetadata,
-  }
-}
-
-// returns tuple `[newLicenseForVideo, oldLicenseToBeDeleted]`
-function handleLicenseUpdate(originalLicense, newLicense): [License | undefined, License | null] {
-  const isNewEmpty = !Object.keys(newLicense).length
-
-  if (!originalLicense && isNewEmpty) {
-    return [undefined, null]
-  }
-
-  if (!originalLicense) {
-    // && !isNewEmpty
-    return [newLicense, null]
-  }
-
-  if (!isNewEmpty) {
-    // && originalLicense
-    return [
-      new License({
-        ...originalLicense,
-        ...newLicense,
-      }),
-      null,
-    ]
-  }
-
-  // originalLicense && isNewEmpty
-
-  return [originalLicense, null]
+  // emit log event
+  const newFeaturedVideoIds = videoIds.map((id) => id.toString())
+  const removedFeaturedVideosIds = videosToRemove.map((v) => v.id)
+  logger.info('New featured videos have been set', { newFeaturedVideoIds, removedFeaturedVideosIds })
 }

+ 6 - 4
query-node/mappings/genesis-data/index.ts

@@ -1,4 +1,6 @@
-export { default as members } from './members'
-export { default as membershipSystem } from './membershipSystem'
-export { default as workers } from './workers'
-export { default as workingGroups } from './workingGroups'
+import members from './members.json'
+import membershipSystem from './membershipSystem.json'
+import workers from './workers.json'
+import workingGroups from './workingGroups.json'
+
+export { members, membershipSystem, workers, workingGroups }

+ 2 - 1
query-node/mappings/genesis.ts

@@ -24,7 +24,8 @@ export async function loadGenesisData({ store }: StoreContext): Promise<void> {
           createdAt: new Date(0),
           updatedAt: new Date(0),
           id: group.name,
-          ...group,
+          name: group.name,
+          budget: new BN(group.budget),
         })
       )
     )

+ 1 - 0
query-node/mappings/index.ts

@@ -12,3 +12,4 @@ export * from './workingGroups'
 export * from './proposals'
 export * from './proposalsDiscussion'
 export * from './forum'
+export * from './genesis'

+ 0 - 1
query-node/mappings/package.json

@@ -16,7 +16,6 @@
     "warthog": "https://github.com/metmirr/warthog/releases/download/v2.30.0/warthog-v2.30.0.tgz",
     "@joystream/content-metadata-protobuf": "^1.1.0",
     "@joystream/metadata-protobuf": "^1.0.0",
-    "query-node": "^0.1.0",
     "iso-639-1": "^2.1.8"
   },
   "devDependencies": {

+ 44 - 114
query-node/mappings/storage.ts

@@ -2,13 +2,13 @@
 eslint-disable @typescript-eslint/naming-convention
 */
 import { EventContext, StoreContext, DatabaseManager } from '@dzlzv/hydra-common'
-import { FindConditions, In } from 'typeorm'
+import { FindConditions, In, Raw } from 'typeorm'
 import {
+  createDataObject,
   getWorker,
   getWorkingGroupModuleName,
   inconsistentState,
   logger,
-  prepareDataObject,
   unexpectedData,
 } from './common'
 import { DataDirectory } from './generated/types'
@@ -18,7 +18,6 @@ import { registry } from '@joystream/types'
 import {
   Channel,
   Video,
-  AssetAvailability,
   DataObject,
   DataObjectOwner,
   DataObjectOwnerMember,
@@ -27,22 +26,18 @@ import {
   DataObjectOwnerCouncil,
   DataObjectOwnerWorkingGroup,
   LiaisonJudgement,
+  AssetJoystreamStorage,
 } from 'query-node/dist/model'
 
-export async function dataDirectory_ContentAdded({ store, event }: EventContext & StoreContext): Promise<void> {
+export async function dataDirectory_ContentAdded(ctx: EventContext & StoreContext): Promise<void> {
+  const { event } = ctx
   // read event data
-  const { contentParameters, storageObjectOwner } = new DataDirectory.ContentAddedEvent(event).data
+  const [contentParameters, storageObjectOwner] = new DataDirectory.ContentAddedEvent(event).params
 
   // save all content objects
   for (const parameters of contentParameters) {
     const owner = convertStorageObjectOwner(storageObjectOwner)
-    const dataObject = await prepareDataObject(parameters, event.blockNumber, owner)
-
-    // fill in auto-generated fields
-    dataObject.createdAt = new Date(event.blockTimestamp)
-    dataObject.updatedAt = new Date(event.blockTimestamp)
-
-    await store.save<DataObject>(dataObject)
+    await createDataObject(ctx, parameters, owner)
   }
 
   // emit log event
@@ -53,7 +48,7 @@ export async function dataDirectory_ContentAdded({ store, event }: EventContext
 
 export async function dataDirectory_ContentRemoved({ store, event }: EventContext & StoreContext): Promise<void> {
   // read event data
-  const { contentId: contentIds } = new DataDirectory.ContentRemovedEvent(event).data
+  const [contentIds] = new DataDirectory.ContentRemovedEvent(event).params
 
   // load assets
   const dataObjects = await store.getMany(DataObject, {
@@ -68,7 +63,7 @@ export async function dataDirectory_ContentRemoved({ store, event }: EventContex
   // remove assets from database
   for (const item of dataObjects) {
     // ensure dataObject is nowhere used to prevent db constraint error
-    await disconnectDataObjectRelations(store, item)
+    await unsetDataObjectRelations(store, item)
 
     // remove data object
     await store.remove<DataObject>(item)
@@ -80,7 +75,7 @@ export async function dataDirectory_ContentRemoved({ store, event }: EventContex
 
 export async function dataDirectory_ContentAccepted({ store, event }: EventContext & StoreContext): Promise<void> {
   // read event data
-  const { contentId, storageProviderId } = new DataDirectory.ContentAcceptedEvent(event).data
+  const [contentId, storageProviderId] = new DataDirectory.ContentAcceptedEvent(event).params
   const encodedContentId = encodeContentId(contentId)
 
   // load asset
@@ -108,119 +103,54 @@ export async function dataDirectory_ContentAccepted({ store, event }: EventConte
 
   // emit log event
   logger.info('Storage content has been accepted', { id: encodedContentId })
-
-  // update asset availability for all connected channels and videos
-  // this will not be needed after redudant AssetAvailability will be removed (after some Hydra upgrades)
-  await updateConnectedAssets(store, dataObject)
-}
-
-/// //////////////// Updating connected entities ////////////////////////////////
-
-async function updateConnectedAssets(store: DatabaseManager, dataObject: DataObject) {
-  await updateSingleConnectedAsset(store, new Channel(), 'avatarPhoto', dataObject)
-  await updateSingleConnectedAsset(store, new Channel(), 'coverPhoto', dataObject)
-
-  await updateSingleConnectedAsset(store, new Video(), 'thumbnailPhoto', dataObject)
-  await updateSingleConnectedAsset(store, new Video(), 'media', dataObject)
 }
+// TODO: use ON DELETE SET null on database/typeorm level instead?
+async function unsetDataObjectRelations(store: DatabaseManager, dataObject: DataObject) {
+  const channelAssets = ['avatarPhoto', 'coverPhoto'] as const
+  const videoAssets = ['thumbnailPhoto', 'media'] as const
 
-// async function updateSingleConnectedAsset(store: DatabaseManager, type: typeof Channel | typeof Video, propertyName: string, dataObject: DataObject) {
-async function updateSingleConnectedAsset<T extends Channel | Video>(
-  store: DatabaseManager,
-  type: T,
-  propertyName: string,
-  dataObject: DataObject
-) {
-  // prepare lookup condition
-  const condition = {
-    where: {
-      [propertyName + 'DataObject']: dataObject,
-    },
-  } // as FindConditions<T>
-
+  // TODO: FIXME: Queries to be verified!
   // NOTE: we don't need to retrieve multiple channels/videos via `store.getMany()` because dataObject
   //       is allowed to be associated only with one channel/video in runtime
+  const channel = await store.get(Channel, {
+    where: channelAssets.map((assetName) => ({
+      [assetName]: Raw((alias) => `${alias}::json->'dataObjectId' = :id`, {
+        id: dataObject.id,
+      }),
+    })),
+  })
+  const video = await store.get(Video, {
+    where: videoAssets.map((assetName) => ({
+      [assetName]: Raw((alias) => `${alias}::json->'dataObjectId' = :id`, {
+        id: dataObject.id,
+      }),
+    })),
+  })
 
-  // in therory the following condition(s) can be generalized `... store.get(type, ...` but in practice it doesn't work :-\
-  const item = type instanceof Channel ? await store.get(Channel, condition) : await store.get(Video, condition)
-
-  // escape when no dataObject association found
-  if (!item) {
-    return
-  }
-
-  item[propertyName + 'Availability'] = AssetAvailability.ACCEPTED
-
-  if (type instanceof Channel) {
-    await store.save<Channel>(item)
-
-    // emit log event
-    logger.info('Channel using Content has been accepted', {
-      channelId: item.id.toString(),
-      joystreamContentId: dataObject.joystreamContentId,
-    })
-  } else {
-    await store.save<Video>(item)
-
-    // emit log event
-    logger.info('Video using Content has been accepted', {
-      videoId: item.id.toString(),
-      joystreamContentId: dataObject.joystreamContentId,
+  if (channel) {
+    channelAssets.forEach((assetName) => {
+      if (channel[assetName] && (channel[assetName] as AssetJoystreamStorage).dataObjectId === dataObject.id) {
+        channel[assetName] = undefined
+      }
     })
-  }
-}
-
-// removes connection between dataObject and other entities
-async function disconnectDataObjectRelations(store: DatabaseManager, dataObject: DataObject) {
-  await disconnectSingleDataObjectRelation(store, new Channel(), 'avatarPhoto', dataObject)
-  await disconnectSingleDataObjectRelation(store, new Channel(), 'coverPhoto', dataObject)
-
-  await disconnectSingleDataObjectRelation(store, new Video(), 'thumbnailPhoto', dataObject)
-  await disconnectSingleDataObjectRelation(store, new Video(), 'media', dataObject)
-}
-
-async function disconnectSingleDataObjectRelation<T extends Channel | Video>(
-  store: DatabaseManager,
-  type: T,
-  propertyName: string,
-  dataObject: DataObject
-) {
-  // prepare lookup condition
-  const condition = {
-    where: {
-      [propertyName + 'DataObject']: dataObject,
-    },
-  } // as FindConditions<T>
-
-  // NOTE: we don't need to retrieve multiple channels/videos via `store.getMany()` because dataObject
-  //       is allowed to be associated only with one channel/video in runtime
-
-  // in therory the following condition(s) can be generalized `... store.get(type, ...` but in practice it doesn't work :-\
-  const item = type instanceof Channel ? await store.get(Channel, condition) : await store.get(Video, condition)
-
-  // escape when no dataObject association found
-  if (!item) {
-    return
-  }
-
-  item[propertyName + 'Availability'] = AssetAvailability.INVALID
-  item[propertyName + 'DataObject'] = null
-
-  if (type instanceof Channel) {
-    await store.save<Channel>(item)
+    await store.save<Channel>(channel)
 
     // emit log event
     logger.info('Content has been disconnected from Channel', {
-      channelId: item.id.toString(),
+      channelId: channel.id.toString(),
       joystreamContentId: dataObject.joystreamContentId,
     })
-  } else {
-    // type instanceof Video
-    await store.save<Video>(item)
+  } else if (video) {
+    videoAssets.forEach((assetName) => {
+      if (video[assetName] && (video[assetName] as AssetJoystreamStorage).dataObjectId === dataObject.id) {
+        video[assetName] = undefined
+      }
+    })
+    await store.save<Video>(video)
 
     // emit log event
     logger.info('Content has been disconnected from Video', {
-      videoId: item.id.toString(),
+      videoId: video.id.toString(),
       joystreamContentId: dataObject.joystreamContentId,
     })
   }

+ 18 - 57
query-node/schemas/content.graphql

@@ -1,15 +1,16 @@
-"Asset availability representation"
-enum AssetAvailability {
-  "Asset is available in storage"
-  ACCEPTED
-
-  "Asset is being uploaded to storage"
-  PENDING
+type AssetExternal @variant {
+  # FIXME: [String!] currnetly not supported in variants
+  "JSON array of the urls"
+  urls: String!
+}
 
-  "Invalid storage (meta)data used"
-  INVALID
+type AssetJoystreamStorage @variant {
+  "Related DataObject entity"
+  dataObject: DataObject!
 }
 
+union Asset = AssetExternal | AssetJoystreamStorage
+
 "Category of media channel"
 type ChannelCategory @entity {
   id: ID!
@@ -53,31 +54,11 @@ type Channel @entity {
   "The description of a Channel"
   description: String
 
-  ### Cover photo asset ###
-
-  # Channel's cover (background) photo. Recommended ratio: 16:9.
-
-  "Asset's data object"
-  coverPhotoDataObject: DataObject
-
-  "URLs where the asset content can be accessed (if any)"
-  coverPhotoUrls: [String!]
-
-  "Availability meta information"
-  coverPhotoAvailability: AssetAvailability!
-
-  ### Avatar photo asset ###
-
-  # Channel's avatar photo.
-
-  "Asset's data object"
-  avatarPhotoDataObject: DataObject
+  "Channel's cover (background) photo asset. Recommended ratio: 16:9."
+  coverPhoto: Asset
 
-  "URLs where the asset content can be accessed (if any)"
-  avatarPhotoUrls: [String!]
-
-  "Availability meta information"
-  avatarPhotoAvailability: AssetAvailability!
+  "Channel's avatar photo asset."
+  avatarPhoto: Asset
 
   ##########################
 
@@ -139,18 +120,8 @@ type Video @entity {
   "Video duration in seconds"
   duration: Int
 
-  ### Thumbnail asset ###
-
-  # Video thumbnail (recommended ratio: 16:9)
-
-  "Asset's data object"
-  thumbnailPhotoDataObject: DataObject
-
-  "URLs where the asset content can be accessed (if any)"
-  thumbnailPhotoUrls: [String!]
-
-  "Availability meta information"
-  thumbnailPhotoAvailability: AssetAvailability!
+  "Video thumbnail asset (recommended ratio: 16:9)"
+  thumbnailPhoto: Asset
 
   ##########################
 
@@ -175,18 +146,8 @@ type Video @entity {
   "License under the video is published"
   license: License
 
-  ### Media asset ###
-
-  # Reference to video asset
-
-  "Asset's data object"
-  mediaDataObject: DataObject
-
-  "URLs where the asset content can be accessed (if any)"
-  mediaUrls: [String!]
-
-  "Availability meta information"
-  mediaAvailability: AssetAvailability!
+  "Video media asset"
+  media: Asset
 
   ##########################
 

+ 1 - 64
yarn.lock

@@ -22847,7 +22847,7 @@ pg-types@^2.1.0, pg-types@^2.2.0:
     postgres-date "~1.0.4"
     postgres-interval "^1.1.0"
 
-pg@8.0.3, pg@^7.12.1, pg@^8.4.0:
+pg@8.0.3, pg@^8.3.2, pg@^8.4.0:
   version "8.6.0"
   resolved "https://registry.yarnpkg.com/pg/-/pg-8.6.0.tgz#e222296b0b079b280cce106ea991703335487db2"
   integrity sha512-qNS9u61lqljTDFvmk/N66EeGq3n6Ujzj0FFyNMGQr6XuEv4tgNTXvJQTfJdcvGit5p5/DWPu+wj920hAJFI+QQ==
@@ -29522,69 +29522,6 @@ warning@^4.0.2, warning@^4.0.3:
   dependencies:
     loose-envify "^1.0.0"
 
-"warthog@https://github.com/metmirr/warthog/releases/download/v2.23.0/warthog-v2.23.0.tgz":
-  version "2.23.0"
-  resolved "https://github.com/metmirr/warthog/releases/download/v2.23.0/warthog-v2.23.0.tgz#4582fc35554580e0af0f43a9b3725aad2eb808c6"
-  dependencies:
-    "@types/app-root-path" "^1.2.4"
-    "@types/bn.js" "^4.11.6"
-    "@types/caller" "^1.0.0"
-    "@types/cosmiconfig" "^6.0.0"
-    "@types/debug" "^4.1.5"
-    "@types/dotenv" "^8.2.0"
-    "@types/express" "^4.17.2"
-    "@types/graphql" "^14.5.0"
-    "@types/graphql-fields" "^1.3.2"
-    "@types/graphql-iso-date" "^3.3.3"
-    "@types/graphql-type-json" "^0.3.2"
-    "@types/isomorphic-fetch" "^0.0.35"
-    "@types/lodash" "^4.14.148"
-    "@types/mkdirp" "^0.5.2"
-    "@types/node" "^12.12.8"
-    "@types/node-emoji" "^1.8.1"
-    "@types/open" "^6.2.1"
-    "@types/pg" "^7.11.2"
-    "@types/prettier" "^1.18.3"
-    "@types/shortid" "^0.0.29"
-    "@types/ws" "^6.0.3"
-    apollo-link-error "^1.1.12"
-    apollo-link-http "^1.5.16"
-    apollo-server "^2.9.9"
-    apollo-server-express "^2.9.9"
-    app-root-path "^3.0.0"
-    caller "^1.0.1"
-    class-transformer "^0.2.3"
-    class-validator "^0.11.0"
-    cosmiconfig "^6.0.0"
-    cross-fetch "^3.0.4"
-    dataloader "^1.4.0"
-    debug "^4.1.1"
-    execa "^4.0.3"
-    express "^4.17.1"
-    gluegun "^4.1.0"
-    graphql "^14.5.8"
-    graphql-binding "^2.5.2"
-    graphql-fields "^2.0.3"
-    graphql-import-node "^0.0.4"
-    graphql-iso-date "^3.6.1"
-    graphql-scalars "^1.2.6"
-    graphql-tools "^4.0.6"
-    graphql-type-json "^0.3.0"
-    lodash "^4.17.15"
-    mkdirp "^0.5.1"
-    node-emoji "^1.10.0"
-    open "^7.0.0"
-    pg "^7.12.1"
-    pgtools "^0.3.0"
-    prettier "^1.19.1"
-    reflect-metadata "^0.1.13"
-    shortid "^2.2.15"
-    type-graphql "^0.17.5"
-    typedi "^0.8.0"
-    typeorm "^0.2.25"
-    typeorm-typedi-extensions "^0.2.3"
-    typescript "^3.9.7"
-
 "warthog@https://github.com/metmirr/warthog/releases/download/v2.30.0/warthog-v2.30.0.tgz":
   version "2.30.0"
   resolved "https://github.com/metmirr/warthog/releases/download/v2.30.0/warthog-v2.30.0.tgz#24a0b975f2ad5cba17a934752ac07052e856b49c"