Browse Source

Query node merge

Leszek Wiesner 3 years ago
parent
commit
169a0db255
37 changed files with 1506 additions and 2475 deletions
  1. 29 26
      query-node/manifest.yml
  2. 0 1
      query-node/mappings/bootstrap/data/workers.json
  3. 0 71
      query-node/mappings/bootstrap/index.ts
  4. 0 44
      query-node/mappings/bootstrap/members.ts
  5. 0 39
      query-node/mappings/bootstrap/workers.ts
  6. 186 8
      query-node/mappings/common.ts
  7. 274 0
      query-node/mappings/content/channel.ts
  8. 116 0
      query-node/mappings/content/curatorGroup.ts
  9. 0 0
      query-node/mappings/content/index.ts
  10. 111 110
      query-node/mappings/content/utils.ts
  11. 428 0
      query-node/mappings/content/video.ts
  12. 4 0
      query-node/mappings/genesis-data/index.ts
  13. 0 0
      query-node/mappings/genesis-data/members.json
  14. 6 0
      query-node/mappings/genesis-data/membershipSystem.json
  15. 1 0
      query-node/mappings/genesis-data/workers.json
  16. 18 0
      query-node/mappings/genesis-data/workingGroups.json
  17. 34 0
      query-node/mappings/genesis.ts
  18. 2 0
      query-node/mappings/index.ts
  19. 0 31
      query-node/mappings/init.ts
  20. 0 45
      query-node/mappings/initializeDb.ts
  21. 0 190
      query-node/mappings/src/common.ts
  22. 0 321
      query-node/mappings/src/content/channel.ts
  23. 0 126
      query-node/mappings/src/content/curatorGroup.ts
  24. 0 499
      query-node/mappings/src/content/video.ts
  25. 0 6
      query-node/mappings/src/eventFix.ts
  26. 0 4
      query-node/mappings/src/index.ts
  27. 0 284
      query-node/mappings/src/membership.ts
  28. 0 276
      query-node/mappings/src/storage.ts
  29. 0 203
      query-node/mappings/src/workingGroup.ts
  30. 271 0
      query-node/mappings/storage.ts
  31. 1 0
      query-node/mappings/tsconfig.json
  32. 1 2
      query-node/package.json
  33. 8 164
      query-node/schemas/content.graphql
  34. 3 0
      query-node/schemas/membership.graphql
  35. 10 22
      query-node/schemas/storage.graphql
  36. 3 0
      query-node/schemas/workingGroups.graphql
  37. 0 3
      query-node/start.sh

+ 29 - 26
query-node/manifest.yml

@@ -429,64 +429,67 @@ mappings:
       handler: forum_CategoryMembershipOfModeratorUpdated
     # Content directory
     - event: content.CuratorGroupCreated
-      handler: content_CuratorGroupCreated(DatabaseManager, SubstrateEvent)
+      handler: content_CuratorGroupCreated
     - event: content.CuratorGroupStatusSet
-      handler: content_CuratorGroupStatusSet(DatabaseManager, SubstrateEvent)
+      handler: content_CuratorGroupStatusSet
     - event: content.CuratorAdded
-      handler: content_CuratorAdded(DatabaseManager, SubstrateEvent)
+      handler: content_CuratorAdded
     - event: content.CuratorRemoved
-      handler: content_CuratorRemoved(DatabaseManager, SubstrateEvent)
+      handler: content_CuratorRemoved
     - event: content.ChannelCreated
-      handler: content_ChannelCreated(DatabaseManager, SubstrateEvent)
+      handler: content_ChannelCreated
     - event: content.ChannelUpdated
-      handler: content_ChannelUpdated(DatabaseManager, SubstrateEvent)
+      handler: content_ChannelUpdated
     - event: content.ChannelAssetsRemoved
-      handler: content_ChannelAssetsRemoved(DatabaseManager, SubstrateEvent)
+      handler: content_ChannelAssetsRemoved
     - event: content.ChannelCensorshipStatusUpdated
-      handler: content_ChannelCensorshipStatusUpdated(DatabaseManager, SubstrateEvent)
+      handler: content_ChannelCensorshipStatusUpdated
     # these events are defined in runtime but never calles (at the time of writing)
     #- event: content.ChannelOwnershipTransferRequested
-    #  handler: content_ChannelOwnershipTransferRequested(DatabaseManager, SubstrateEvent)
+    #  handler: content_ChannelOwnershipTransferRequested
     #- event: content.ChannelOwnershipTransferRequestWithdrawn
-    #  handler: content_ChannelOwnershipTransferRequestWithdrawn(DatabaseManager, SubstrateEvent)
+    #  handler: content_ChannelOwnershipTransferRequestWithdrawn
     #- event: content.ChannelOwnershipTransferred
-    #  handler: content_ChannelOwnershipTransferred(DatabaseManager, SubstrateEvent)
+    #  handler: content_ChannelOwnershipTransferred
     - event: content.ChannelCategoryCreated
-      handler: content_ChannelCategoryCreated(DatabaseManager, SubstrateEvent)
+      handler: content_ChannelCategoryCreated
     - event: content.ChannelCategoryUpdated
-      handler: content_ChannelCategoryUpdated(DatabaseManager, SubstrateEvent)
+      handler: content_ChannelCategoryUpdated
     - event: content.ChannelCategoryDeleted
-      handler: content_ChannelCategoryDeleted(DatabaseManager, SubstrateEvent)
+      handler: content_ChannelCategoryDeleted
     - event: content.VideoCategoryCreated
-      handler: content_VideoCategoryCreated(DatabaseManager, SubstrateEvent)
+      handler: content_VideoCategoryCreated
     - event: content.VideoCategoryUpdated
-      handler: content_VideoCategoryUpdated(DatabaseManager, SubstrateEvent)
+      handler: content_VideoCategoryUpdated
     - event: content.VideoCategoryDeleted
-      handler: content_VideoCategoryDeleted(DatabaseManager, SubstrateEvent)
+      handler: content_VideoCategoryDeleted
     - event: content.VideoCreated
-      handler: content_VideoCreated(DatabaseManager, SubstrateEvent)
+      handler: content_VideoCreated
     - event: content.VideoUpdated
-      handler: content_VideoUpdated(DatabaseManager, SubstrateEvent)
+      handler: content_VideoUpdated
     - event: content.VideoDeleted
-      handler: content_VideoDeleted(DatabaseManager, SubstrateEvent)
+      handler: content_VideoDeleted
     - event: content.VideoCensorshipStatusUpdated
-      handler: content_VideoCensorshipStatusUpdated(DatabaseManager, SubstrateEvent)
+      handler: content_VideoCensorshipStatusUpdated
     - event: content.FeaturedVideosSet
-      handler: content_FeaturedVideosSet(DatabaseManager, SubstrateEvent)
+      handler: content_FeaturedVideosSet
     # Storage
     - event: dataDirectory.ContentAdded
-      handler: dataDirectory_ContentAdded(DatabaseManager, SubstrateEvent)
+      handler: dataDirectory_ContentAdded
     - event: dataDirectory.ContentRemoved
-      handler: dataDirectory_ContentRemoved(DatabaseManager, SubstrateEvent)
+      handler: dataDirectory_ContentRemoved
     - event: dataDirectory.ContentAccepted
-      handler: dataDirectory_ContentAccepted(DatabaseManager, SubstrateEvent)
+      handler: dataDirectory_ContentAccepted
     # not handled at the moment
     #- event: dataDirectory.ContentUploadingStatusUpdated
-    #  handler: data_directory_ContentUploadingStatusUpdated(DatabaseManager, SubstrateEvent)
+    #  handler: data_directory_ContentUploadingStatusUpdated
   extrinsicHandlers:
     # infer defaults here
     #- extrinsic: Balances.Transfer
     #- extrinsic: Sudo.batchCall
     #  handler: handleSudoCall(DatabaseManager,SubstrateEvent)
   preBlockHooks:
+    - hanlder: loadGenesisData
+      filter:
+        height: [0,0] # will be executed only at genesis
   postBlockHooks:

+ 0 - 1
query-node/mappings/bootstrap/data/workers.json

@@ -1 +0,0 @@
-{}

+ 0 - 71
query-node/mappings/bootstrap/index.ts

@@ -1,71 +0,0 @@
-import { createDBConnection } from '@dzlzv/hydra-processor/lib/db'
-import { DatabaseManager, makeDatabaseManager } from '@dzlzv/hydra-db-utils'
-import { Connection, getManager } from 'typeorm'
-
-import { bootMembers, IBootstrapMember } from './members';
-import { bootWorkers, IBootstrapWorker, IBootstrapWorkers } from './workers';
-import { Worker, WorkerType } from 'query-node'
-import { FindConditions } from 'typeorm'
-import fs from 'fs'
-import path from 'path'
-
-// run bootstrap
-init()
-
-// bootstrap flow
-async function init() {
-    // prepare database and import data
-    const [databaseManager, connection] = await createDatabaseManager()
-
-    // escape if db is already initialized
-    if (await isDbInitialized(databaseManager)) {
-        await connection.close()
-        return
-    }
-
-    // load import data
-    const data = loadData()
-
-    // bootstrap entities
-    await bootMembers(databaseManager, data.members)
-    await bootWorkers(databaseManager, data.workers)
-
-    await connection.close()
-}
-
-async function isDbInitialized(db: DatabaseManager): Promise<boolean> {
-    // simple way to check if db is bootstrapped already - check if there is at least 1 storage provider
-    const membership = await db.get(Worker, {
-        where: {
-          type: WorkerType.STORAGE,
-        } as FindConditions<Worker>
-    })
-
-    return !!membership
-}
-
-async function createDatabaseManager(): Promise<[DatabaseManager, Connection]> {
-    // paths in `entities` should be the same as `entities` set in `manifest.yml`
-    const entities = [
-        'generated/graphql-server/dist/**/*.model.js'
-    ]
-
-    // connect to db and create manager
-    const connection = await createDBConnection(entities)
-    const entityManager = getManager(connection.name)
-    const databaseManager = makeDatabaseManager(entityManager)
-
-    return [databaseManager, connection]
-}
-
-interface IBootstrapData {
-    members: IBootstrapMember[]
-    workers: IBootstrapWorkers
-}
-
-function loadData(): IBootstrapData {
-    return {
-        members: JSON.parse(fs.readFileSync(process.env.BOOTSTRAP_DATA_FOLDER + '/members.json').toString()),
-        workers: JSON.parse(fs.readFileSync(process.env.BOOTSTRAP_DATA_FOLDER + '/workers.json').toString()),
-    }
-}

+ 0 - 44
query-node/mappings/bootstrap/members.ts

@@ -1,44 +0,0 @@
-//import { Connection } from 'typeorm'
-import { DatabaseManager } from '@dzlzv/hydra-db-utils'
-import {
-  logger,
-} from '../src/common'
-import { MembershipEntryMethod, Membership } from 'query-node'
-
-export interface IBootstrapMember {
-  member_id: number
-  root_account: string,
-  controller_account: string,
-  handle: string,
-  avatar_uri: string,
-  about: string,
-  registered_at_time: number
-}
-
-//export async function bootMembers(members: IBootstrapMember[], db: Connection): Promise<void> {
-export async function bootMembers(db: DatabaseManager, members: IBootstrapMember[]): Promise<void> {
-  for (const rawMember of members) {
-    // create new membership
-    const member = new Membership({
-      // main data
-      id: rawMember.member_id.toString(),
-      rootAccount: rawMember.root_account,
-      controllerAccount: rawMember.controller_account,
-      handle: rawMember.handle,
-      about: rawMember.about,
-      avatarUri: rawMember.avatar_uri,
-      createdInBlock: 0,
-      entry: MembershipEntryMethod.GENESIS,
-
-      // fill in auto-generated fields
-      createdAt: new Date(rawMember.registered_at_time),
-      updatedAt: new Date(rawMember.registered_at_time),
-    })
-
-    // save membership
-    await db.save<Membership>(member)
-
-    // emit log event
-    logger.info('Member has been bootstrapped', {id: rawMember.member_id})
-  }
-}

+ 0 - 39
query-node/mappings/bootstrap/workers.ts

@@ -1,39 +0,0 @@
-import { DatabaseManager } from '@dzlzv/hydra-db-utils'
-import { Worker, WorkerType } from 'query-node'
-import {logger} from '../src/common'
-
-export interface IBootstrapWorkers {
-  storage: IBootstrapWorker[]
-  gateway: IBootstrapWorker[]
-}
-
-export interface IBootstrapWorker {
-  id: string
-}
-
-export async function bootWorkers(db: DatabaseManager, workers: IBootstrapWorkers): Promise<void> {
-  await bootWorkersInGroup(db, workers.storage, WorkerType.STORAGE)
-  await bootWorkersInGroup(db, workers.gateway, WorkerType.GATEWAY)
-}
-
-export async function bootWorkersInGroup(db: DatabaseManager, workers: IBootstrapWorker[], workerType: WorkerType): Promise<void> {
-  if (!workers) {
-    return
-  }
-
-  for (const rawWorker of workers) {
-    // create new membership
-    const worker = new Worker({
-      // main data
-      workerId: rawWorker.id,
-      type: workerType,
-      isActive: true,
-    })
-
-    // save worker
-    await db.save<Worker>(worker)
-
-    // emit log event
-    logger.info('Worker has been bootstrapped', {id: rawWorker.id, workerType})
-  }
-}

+ 186 - 8
query-node/mappings/common.ts

@@ -1,13 +1,191 @@
-import { DatabaseManager, SubstrateEvent } from '@dzlzv/hydra-common'
-import { Network } from 'query-node/dist/src/modules/enums/enums'
-import { Event } from 'query-node/dist/src/modules/event/event.model'
+import { DatabaseManager, SubstrateEvent, SubstrateExtrinsic, ExtrinsicArg } from '@dzlzv/hydra-common'
 import { Bytes } from '@polkadot/types'
-import { WorkingGroup, WorkerId } from '@joystream/types/augment/all'
-
-import { Worker } from 'query-node/dist/model'
+import { WorkingGroup, WorkerId, ContentParameters } from '@joystream/types/augment/all'
+import { Worker, DataObjectOwner, DataObject, LiaisonJudgement, Event, Network } from 'query-node/dist/model'
 import { BaseModel } from 'warthog'
+import { ContentParameters as Custom_ContentParameters } from '@joystream/types/storage'
+import { registry } from '@joystream/types'
 
 export const CURRENT_NETWORK = Network.OLYMPIA
+/*
+  Simple logger enabling error and informational reporting.
+
+  FIXME: `Logger` class will not be needed in the future when Hydra v3 will be released.
+  Hydra will provide logger instance and relevant code using `Logger` should be refactored.
+*/
+class Logger {
+  /*
+    Log significant event.
+  */
+  info(message: string, data?: unknown) {
+    console.log(message, data)
+  }
+
+  /*
+    Log significant error.
+  */
+  error(message: string, data?: unknown) {
+    console.error(message, data)
+  }
+}
+
+export const logger = new Logger()
+
+/*
+  Reports that insurmountable inconsistent state has been encountered and throws an exception.
+*/
+export function inconsistentState(extraInfo: string, data?: unknown): never {
+  const errorMessage = 'Inconsistent state: ' + extraInfo
+
+  // log error
+  logger.error(errorMessage, data)
+
+  throw errorMessage
+}
+
+/*
+  Reports that insurmountable unexpected data has been encountered and throws an exception.
+*/
+export function unexpectedData(extraInfo: string, data?: unknown): never {
+  const errorMessage = 'Unexpected data: ' + extraInfo
+
+  // log error
+  logger.error(errorMessage, data)
+
+  throw errorMessage
+}
+
+/*
+  Reports that metadata inserted by the user are not entirely valid, but the problem can be overcome.
+*/
+export function invalidMetadata(extraInfo: string, data?: unknown): void {
+  const errorMessage = 'Invalid metadata: ' + extraInfo
+
+  // log error
+  logger.info(errorMessage, data)
+}
+
+/*
+  Prepares data object from content parameters.
+*/
+export async function prepareDataObject(
+  contentParameters: ContentParameters,
+  blockNumber: number,
+  owner: typeof DataObjectOwner
+): Promise<DataObject> {
+  // convert generic content parameters coming from processor to custom Joystream data type
+  const customContentParameters = new Custom_ContentParameters(registry, contentParameters.toJSON() as any)
+
+  const dataObject = new DataObject({
+    owner,
+    createdInBlock: blockNumber,
+    typeId: contentParameters.type_id.toNumber(),
+    size: customContentParameters.size_in_bytes.toNumber(),
+    liaisonJudgement: LiaisonJudgement.PENDING, // judgement is pending at start; liaison id is set when content is accepted/rejected
+    ipfsContentId: contentParameters.ipfs_content_id.toUtf8(),
+    joystreamContentId: customContentParameters.content_id.encode(),
+    createdById: '1',
+    updatedById: '1',
+  })
+
+  return dataObject
+}
+
+/// //////////////// Sudo extrinsic calls ///////////////////////////////////////
+
+// soft-peg interface for typegen-generated `*Call` types
+export interface IGenericExtrinsicObject<T> {
+  readonly extrinsic: SubstrateExtrinsic
+  readonly expectedArgTypes: string[]
+  args: T
+}
+
+// arguments for calling extrinsic as sudo
+export interface ISudoCallArgs<T> extends ExtrinsicArg {
+  args: T
+  callIndex: string
+}
+
+/*
+  Extracts extrinsic arguments from the Substrate event. Supports both direct extrinsic calls and sudo calls.
+*/
+export function extractExtrinsicArgs<DataParams, EventObject extends IGenericExtrinsicObject<DataParams>>(
+  rawEvent: SubstrateEvent,
+  callFactory: new (event: SubstrateEvent) => EventObject,
+
+  // in ideal world this parameter would not be needed, but there is no way to associate parameters
+  // used in sudo to extrinsic parameters without it
+  argsIndeces: Record<keyof DataParams, number>
+): EventObject['args'] {
+  // this is equal to DataParams but only this notation works properly
+  // escape when extrinsic info is not available
+  if (!rawEvent.extrinsic) {
+    throw new Error('Invalid event - no extrinsic set') // this should never happen
+  }
+
+  // regural extrinsic call?
+  if (rawEvent.extrinsic.section !== 'sudo') {
+    return new callFactory(rawEvent).args
+  }
+
+  // sudo extrinsic call
+
+  const callArgs = extractSudoCallParameters<DataParams>(rawEvent)
+
+  // convert naming convention (underscore_names to camelCase)
+  const clearArgs = Object.keys(callArgs.args).reduce((acc, key) => {
+    const formattedName = key.replace(/_([a-z])/g, (tmp) => tmp[1].toUpperCase())
+
+    acc[formattedName] = callArgs.args[key]
+
+    return acc
+  }, {} as DataParams)
+
+  // prepare partial event object
+  const partialEvent = {
+    extrinsic: ({
+      args: Object.keys(argsIndeces).reduce((acc, key) => {
+        acc[argsIndeces[key]] = {
+          value: clearArgs[key],
+        }
+
+        return acc
+      }, [] as unknown[]),
+    } as unknown) as SubstrateExtrinsic,
+  } as SubstrateEvent
+
+  // create event object and extract processed args
+  const finalArgs = new callFactory(partialEvent).args
+
+  return finalArgs
+}
+
+/*
+  Extracts extrinsic call parameters used inside of sudo call.
+*/
+export function extractSudoCallParameters<DataParams>(rawEvent: SubstrateEvent): ISudoCallArgs<DataParams> {
+  if (!rawEvent.extrinsic) {
+    throw new Error('Invalid event - no extrinsic set') // this should never happen
+  }
+
+  // see Substrate's sudo frame for more info about sudo extrinsics and `call` argument index
+  const argIndex =
+    false ||
+    (rawEvent.extrinsic.method === 'sudoAs' && 1) || // who, *call*
+    (rawEvent.extrinsic.method === 'sudo' && 0) || // *call*
+    (rawEvent.extrinsic.method === 'sudoUncheckedWeight' && 0) // *call*, _weight
+
+  // ensure `call` argument was found
+  if (argIndex === false) {
+    // this could possibly happen in sometime in future if new sudo options are introduced in Substrate
+    throw new Error('Not implemented situation with sudo')
+  }
+
+  // typecast call arguments
+  const callArgs = (rawEvent.extrinsic.args[argIndex].value as unknown) as ISudoCallArgs<DataParams>
+
+  return callArgs
+}
 
 export function genericEventFields(substrateEvent: SubstrateEvent): Partial<BaseModel & Event> {
   const { blockNumber, indexInBlock, extrinsic, blockTimestamp } = substrateEvent
@@ -88,7 +266,7 @@ export function getWorkingGroupModuleName(group: WorkingGroup): WorkingGroupModu
     return 'storageWorkingGroup'
   }
 
-  throw new Error(`Unsupported working group: ${group.type}`)
+  unexpectedData('Unsupported working group encountered:', group.type)
 }
 
 export async function getWorker(
@@ -99,7 +277,7 @@ export async function getWorker(
   const workerDbId = `${groupName}-${runtimeId}`
   const worker = await store.get(Worker, { where: { id: workerDbId } })
   if (!worker) {
-    throw new Error(`Worker not found by id ${workerDbId}`)
+    inconsistentState(`Expected worker not found by id ${workerDbId}`)
   }
 
   return worker

+ 274 - 0
query-node/mappings/content/channel.ts

@@ -0,0 +1,274 @@
+/*
+eslint-disable @typescript-eslint/naming-convention
+*/
+import { EventContext, StoreContext } from '@dzlzv/hydra-common'
+import { FindConditions, In } from 'typeorm'
+import { AccountId } from '@polkadot/types/interfaces'
+import { Option } from '@polkadot/types/codec'
+import { Content } from '../generated/types'
+import {
+  readProtobuf,
+  readProtobufWithAssets,
+  convertContentActorToChannelOwner,
+  convertContentActorToDataObjectOwner,
+} from './utils'
+import { Channel, ChannelCategory, DataObject, AssetAvailability } from 'query-node/dist/model'
+import { inconsistentState, logger } from '../common'
+
+export async function content_ChannelCreated({ store, event }: EventContext & StoreContext): Promise<void> {
+  // read event data
+  const { channelId, channelCreationParameters, contentActor } = new Content.ChannelCreatedEvent(event).data
+
+  // read metadata
+  const protobufContent = await readProtobufWithAssets(new Channel(), {
+    metadata: channelCreationParameters.meta,
+    store,
+    blockNumber: event.blockNumber,
+    assets: channelCreationParameters.assets,
+    contentOwner: convertContentActorToDataObjectOwner(contentActor, channelId.toNumber()),
+  })
+
+  // create entity
+  const channel = new Channel({
+    // main data
+    id: channelId.toString(),
+    isCensored: false,
+    videos: [],
+    createdInBlock: event.blockNumber,
+
+    // default values for properties that might or might not be filled by metadata
+    coverPhotoUrls: [],
+    coverPhotoAvailability: AssetAvailability.INVALID,
+    avatarPhotoUrls: [],
+    avatarPhotoAvailability: AssetAvailability.INVALID,
+
+    // fill in auto-generated fields
+    createdAt: new Date(event.blockTimestamp),
+    updatedAt: new Date(event.blockTimestamp),
+
+    // prepare channel owner (handles fields `ownerMember` and `ownerCuratorGroup`)
+    ...(await convertContentActorToChannelOwner(store, contentActor)),
+
+    // integrate metadata
+    ...protobufContent,
+  })
+
+  // save entity
+  await store.save<Channel>(channel)
+
+  // emit log event
+  logger.info('Channel has been created', { id: channel.id })
+}
+
+export async function content_ChannelUpdated({ store, event }: EventContext & StoreContext): Promise<void> {
+  // read event data
+  const { channelId, channelUpdateParameters, contentActor } = new Content.ChannelUpdatedEvent(event).data
+
+  // load channel
+  const channel = await store.get(Channel, { where: { id: channelId.toString() } as FindConditions<Channel> })
+
+  // ensure channel exists
+  if (!channel) {
+    return inconsistentState('Non-existing channel update requested', channelId)
+  }
+
+  // prepare changed metadata
+  const newMetadata = channelUpdateParameters.new_meta.unwrapOr(null)
+
+  //  update metadata if it was changed
+  if (newMetadata) {
+    const protobufContent = await readProtobufWithAssets(new Channel(), {
+      metadata: newMetadata,
+      store,
+      blockNumber: event.blockNumber,
+      assets: channelUpdateParameters.assets.unwrapOr([]),
+      contentOwner: convertContentActorToDataObjectOwner(contentActor, channelId.toNumber()),
+    })
+
+    // update all fields read from protobuf
+    for (const [key, value] of Object.entries(protobufContent)) {
+      channel[key] = value
+    }
+  }
+
+  // prepare changed reward account
+  const newRewardAccount = channelUpdateParameters.reward_account.unwrapOr(null)
+
+  // reward account change happened?
+  if (newRewardAccount) {
+    // this will change the `channel`!
+    handleChannelRewardAccountChange(channel, newRewardAccount)
+  }
+
+  // set last update time
+  channel.updatedAt = new Date(event.blockTimestamp)
+
+  // save channel
+  await store.save<Channel>(channel)
+
+  // emit log event
+  logger.info('Channel has been updated', { id: channel.id })
+}
+
+export async function content_ChannelAssetsRemoved({ store, event }: EventContext & StoreContext): Promise<void> {
+  // read event data
+  const { contentId: contentIds } = new Content.ChannelAssetsRemovedEvent(event).data
+
+  // load channel
+  const assets = await store.getMany(DataObject, {
+    where: {
+      id: In(contentIds.toArray().map((item) => item.toString())),
+    } as FindConditions<DataObject>,
+  })
+
+  // delete assets
+  for (const asset of assets) {
+    await store.remove<DataObject>(asset)
+  }
+
+  // emit log event
+  logger.info('Channel assets have been removed', { ids: contentIds })
+}
+
+export async function content_ChannelCensorshipStatusUpdated({
+  store,
+  event,
+}: EventContext & StoreContext): Promise<void> {
+  // read event data
+  const { channelId, isCensored } = new Content.ChannelCensorshipStatusUpdatedEvent(event).data
+
+  // load event
+  const channel = await store.get(Channel, { where: { id: channelId.toString() } as FindConditions<Channel> })
+
+  // ensure channel exists
+  if (!channel) {
+    return inconsistentState('Non-existing channel censoring requested', channelId)
+  }
+
+  // update channel
+  channel.isCensored = isCensored.isTrue
+
+  // set last update time
+  channel.updatedAt = new Date(event.blockTimestamp)
+
+  // save channel
+  await store.save<Channel>(channel)
+
+  // emit log event
+  logger.info('Channel censorship status has been updated', { id: channelId, isCensored: isCensored.isTrue })
+}
+
+/// //////////////// ChannelCategory ////////////////////////////////////////////
+
+export async function content_ChannelCategoryCreated({ store, event }: EventContext & StoreContext): Promise<void> {
+  // read event data
+  const { channelCategoryCreationParameters, channelCategoryId } = new Content.ChannelCategoryCreatedEvent(event).data
+
+  // read metadata
+  const protobufContent = await readProtobuf(new ChannelCategory(), {
+    metadata: channelCategoryCreationParameters.meta,
+    store,
+    blockNumber: event.blockNumber,
+  })
+
+  // create new channel category
+  const channelCategory = new ChannelCategory({
+    // main data
+    id: channelCategoryId.toString(),
+    channels: [],
+    createdInBlock: event.blockNumber,
+
+    // fill in auto-generated fields
+    createdAt: new Date(event.blockTimestamp),
+    updatedAt: new Date(event.blockTimestamp),
+
+    // integrate metadata
+    ...protobufContent,
+  })
+
+  // save channel
+  await store.save<ChannelCategory>(channelCategory)
+
+  // emit log event
+  logger.info('Channel category has been created', { id: channelCategory.id })
+}
+
+export async function content_ChannelCategoryUpdated({ store, event }: EventContext & StoreContext): Promise<void> {
+  // read event data
+  const { channelCategoryId, channelCategoryUpdateParameters } = new Content.ChannelCategoryUpdatedEvent(event).data
+
+  // load channel category
+  const channelCategory = await store.get(ChannelCategory, {
+    where: {
+      id: channelCategoryId.toString(),
+    } as FindConditions<ChannelCategory>,
+  })
+
+  // ensure channel exists
+  if (!channelCategory) {
+    return inconsistentState('Non-existing channel category update requested', channelCategoryId)
+  }
+
+  // read metadata
+  const protobufContent = await readProtobuf(new ChannelCategory(), {
+    metadata: channelCategoryUpdateParameters.new_meta,
+    store,
+    blockNumber: event.blockNumber,
+  })
+
+  // update all fields read from protobuf
+  for (const [key, value] of Object.entries(protobufContent)) {
+    channelCategory[key] = value
+  }
+
+  // set last update time
+  channelCategory.updatedAt = new Date(event.blockTimestamp)
+
+  // save channel category
+  await store.save<ChannelCategory>(channelCategory)
+
+  // emit log event
+  logger.info('Channel category has been updated', { id: channelCategory.id })
+}
+
+export async function content_ChannelCategoryDeleted({ store, event }: EventContext & StoreContext): Promise<void> {
+  // read event data
+  const { channelCategoryId } = new Content.ChannelCategoryDeletedEvent(event).data
+
+  // load channel category
+  const channelCategory = await store.get(ChannelCategory, {
+    where: {
+      id: channelCategoryId.toString(),
+    } as FindConditions<ChannelCategory>,
+  })
+
+  // ensure channel category exists
+  if (!channelCategory) {
+    return inconsistentState('Non-existing channel category deletion requested', channelCategoryId)
+  }
+
+  // delete channel category
+  await store.remove<ChannelCategory>(channelCategory)
+
+  // emit log event
+  logger.info('Channel category has been deleted', { id: channelCategory.id })
+}
+
+/// //////////////// Helpers ////////////////////////////////////////////////////
+
+function handleChannelRewardAccountChange(
+  channel: Channel, // will be modified inside of the function!
+  reward_account: Option<AccountId>
+) {
+  const rewardAccount = reward_account.unwrapOr(null)
+
+  // new different reward account set?
+  if (rewardAccount) {
+    channel.rewardAccount = rewardAccount.toString()
+    return
+  }
+
+  // reward account removed
+
+  channel.rewardAccount = undefined // plan deletion (will have effect when saved to db)
+}

+ 116 - 0
query-node/mappings/content/curatorGroup.ts

@@ -0,0 +1,116 @@
+/*
+eslint-disable @typescript-eslint/naming-convention
+*/
+import { EventContext, StoreContext } from '@dzlzv/hydra-common'
+import { FindConditions } from 'typeorm'
+import { CuratorGroup } from 'query-node/dist/model'
+import { Content } from '../generated/types'
+import { inconsistentState, logger } from '../common'
+
+export async function content_CuratorGroupCreated({ store, event }: EventContext & StoreContext): Promise<void> {
+  // read event data
+  const { curatorGroupId } = new Content.CuratorGroupCreatedEvent(event).data
+
+  // create new curator group
+  const curatorGroup = new CuratorGroup({
+    // main data
+    id: curatorGroupId.toString(),
+    curatorIds: [],
+    isActive: false, // runtime creates inactive curator groups by default
+
+    // fill in auto-generated fields
+    createdAt: new Date(event.blockTimestamp),
+    updatedAt: new Date(event.blockTimestamp),
+  })
+
+  // save curator group
+  await store.save<CuratorGroup>(curatorGroup)
+
+  // emit log event
+  logger.info('Curator group has been created', { id: curatorGroupId })
+}
+
+export async function content_CuratorGroupStatusSet({ store, event }: EventContext & StoreContext): Promise<void> {
+  // read event data
+  const { curatorGroupId, bool: isActive } = new Content.CuratorGroupStatusSetEvent(event).data
+
+  // load curator group
+  const curatorGroup = await store.get(CuratorGroup, {
+    where: { id: curatorGroupId.toString() } as FindConditions<CuratorGroup>,
+  })
+
+  // ensure curator group exists
+  if (!curatorGroup) {
+    return inconsistentState('Non-existing curator group status set requested', curatorGroupId)
+  }
+
+  // update curator group
+  curatorGroup.isActive = isActive.isTrue
+
+  // set last update time
+  curatorGroup.updatedAt = new Date(event.blockTimestamp)
+
+  // save curator group
+  await store.save<CuratorGroup>(curatorGroup)
+
+  // emit log event
+  logger.info('Curator group status has been set', { id: curatorGroupId, isActive })
+}
+
+export async function content_CuratorAdded({ store, event }: EventContext & StoreContext): Promise<void> {
+  // read event data
+  const { curatorGroupId, curatorId } = new Content.CuratorAddedEvent(event).data
+
+  // load curator group
+  const curatorGroup = await store.get(CuratorGroup, {
+    where: { id: curatorGroupId.toString() } as FindConditions<CuratorGroup>,
+  })
+
+  // ensure curator group exists
+  if (!curatorGroup) {
+    return inconsistentState('Curator add to non-existing curator group requested', curatorGroupId)
+  }
+
+  // update curator group
+  curatorGroup.curatorIds.push(curatorId.toNumber())
+
+  // set last update time
+  curatorGroup.updatedAt = new Date(event.blockTimestamp)
+
+  // save curator group
+  await store.save<CuratorGroup>(curatorGroup)
+
+  // emit log event
+  logger.info('Curator has been added to curator group', { id: curatorGroupId, curatorId })
+}
+
+export async function content_CuratorRemoved({ store, event }: EventContext & StoreContext): Promise<void> {
+  // read event data
+  const { curatorGroupId, curatorId } = new Content.CuratorAddedEvent(event).data
+
+  // load curator group
+  const curatorGroup = await store.get(CuratorGroup, {
+    where: { id: curatorGroupId.toString() } as FindConditions<CuratorGroup>,
+  })
+
+  // ensure curator group exists
+  if (!curatorGroup) {
+    return inconsistentState('Non-existing curator group removal requested', curatorGroupId)
+  }
+
+  const curatorIndex = curatorGroup.curatorIds.indexOf(curatorId.toNumber())
+
+  // ensure curator group exists
+  if (curatorIndex < 0) {
+    return inconsistentState('Non-associated curator removal from curator group requested', curatorId)
+  }
+
+  // update curator group
+  curatorGroup.curatorIds.splice(curatorIndex, 1)
+
+  // save curator group
+  await store.save<CuratorGroup>(curatorGroup)
+
+  // emit log event
+  logger.info('Curator has been removed from curator group', { id: curatorGroupId, curatorId })
+}

+ 0 - 0
query-node/mappings/src/content/index.ts → query-node/mappings/content/index.ts


+ 111 - 110
query-node/mappings/src/content/utils.ts → query-node/mappings/content/utils.ts

@@ -1,20 +1,16 @@
 // TODO: finish db cascade on save/remove; right now there is manually added `cascade: ["insert", "update"]` directive
 //       to all relations in `query-node/generated/graphql-server/src/modules/**/*.model.ts`. That should ensure all records
-//       are saved on one `db.save(...)` call. Missing features
+//       are saved on one `store.save(...)` call. Missing features
 //       - find a proper way to cascade on remove or implement custom removals for every entity
 //       - convert manual changes done to `*model.ts` file into some patch or bash commands that can be executed
 //         every time query node codegen is run (that will overwrite said manual changes)
 //       - verify in integration tests that the records are trully created/updated/removed as expected
 
-import { SubstrateEvent } from '@dzlzv/hydra-common'
-import { DatabaseManager } from '@dzlzv/hydra-db-utils'
+import { DatabaseManager } from '@dzlzv/hydra-common'
 import { Bytes } from '@polkadot/types'
 import ISO6391 from 'iso-639-1'
-import { u64 } from '@polkadot/types/primitive'
 import { FindConditions } from 'typeorm'
-import * as jspb from "google-protobuf"
-
-// protobuf definitions
+import * as jspb from 'google-protobuf'
 import {
   ChannelMetadata,
   ChannelCategoryMetadata,
@@ -24,19 +20,7 @@ import {
   VideoMetadata,
   VideoCategoryMetadata,
 } from '@joystream/content-metadata-protobuf'
-
-import {
-  Content,
-} from '../../../generated/types'
-
-import {
-  invalidMetadata,
-  inconsistentState,
-  logger,
-  prepareDataObject,
-} from '../common'
-
-
+import { invalidMetadata, inconsistentState, logger, prepareDataObject } from '../common'
 import {
   // primary entities
   CuratorGroup,
@@ -44,32 +28,20 @@ import {
   ChannelCategory,
   Video,
   VideoCategory,
-
   // secondary entities
   Language,
   License,
-  VideoMediaEncoding,
   VideoMediaMetadata,
-
   // asset
   DataObjectOwner,
-  DataObjectOwnerMember,
   DataObjectOwnerChannel,
   DataObject,
   LiaisonJudgement,
   AssetAvailability,
-
   Membership,
-} from 'query-node'
-
+} from 'query-node/dist/model'
 // Joystream types
-import {
-  ChannelId,
-  ContentParameters,
-  NewAsset,
-  ContentActor,
-} from '@joystream/types/augment'
-
+import { ContentParameters, NewAsset, ContentActor } from '@joystream/types/augment'
 import { ContentParameters as Custom_ContentParameters } from '@joystream/types/storage'
 import { registry } from '@joystream/types'
 
@@ -91,7 +63,7 @@ function isAssetInStorage(dataObject: AssetStorageOrUrls): dataObject is DataObj
 
 export interface IReadProtobufArguments {
   metadata: Bytes
-  db: DatabaseManager
+  store: DatabaseManager
   blockNumber: number
 }
 
@@ -107,7 +79,6 @@ export interface IReadProtobufArgumentsWithAssets extends IReadProtobufArguments
   Change - set the new value
 */
 export class PropertyChange<T> {
-
   static newUnset<T>(): PropertyChange<T> {
     return new PropertyChange<T>('unset')
   }
@@ -123,11 +94,10 @@ export class PropertyChange<T> {
   /*
     Determines property change from the given object property.
   */
-  static fromObjectProperty<
-    T,
-    Key extends string,
-    ChangedObject extends {[key in Key]?: T}
-  >(object: ChangedObject, key: Key): PropertyChange<T> {
+  static fromObjectProperty<T, Key extends string, ChangedObject extends { [key in Key]?: T }>(
+    object: ChangedObject,
+    key: Key
+  ): PropertyChange<T> {
     if (!(key in object)) {
       return PropertyChange.newNoChange<T>()
     }
@@ -148,27 +118,25 @@ export class PropertyChange<T> {
   }
 
   public isUnset(): boolean {
-    return this.type == 'unset'
+    return this.type === 'unset'
   }
 
   public isNoChange(): boolean {
-    return this.type == 'nochange'
+    return this.type === 'nochange'
   }
 
   public isValue(): boolean {
-    return this.type == 'change'
+    return this.type === 'change'
   }
 
   public getValue(): T | undefined {
-    return this.type == 'change'
-      ? this.value
-      : undefined
+    return this.type === 'change' ? this.value : undefined
   }
 
   /*
     Integrates the value into the given dictionary.
   */
-  public integrateInto(object: Object, key: string): void {
+  public integrateInto(object: Record<string, unknown>, key: string): void {
     if (this.isNoChange()) {
       return
     }
@@ -198,7 +166,7 @@ export interface RawVideoMetadata {
 */
 export async function readProtobuf<T extends ChannelCategory | VideoCategory>(
   type: T,
-  parameters: IReadProtobufArguments,
+  parameters: IReadProtobufArguments
 ): Promise<Partial<T>> {
   // true option here is crucial, it indicates that we want just the underlying bytes (by default it will also include bytes encoding the length)
   const metaU8a = parameters.metadata.toU8a(true)
@@ -220,8 +188,8 @@ export async function readProtobuf<T extends ChannelCategory | VideoCategory>(
   }
 
   // this should never happen
-  logger.error('Not implemented metadata type', {type})
-  throw `Not implemented metadata type`
+  logger.error('Not implemented metadata type', { type })
+  throw new Error(`Not implemented metadata type`)
 }
 
 /*
@@ -231,7 +199,7 @@ export async function readProtobuf<T extends ChannelCategory | VideoCategory>(
 
 export async function readProtobufWithAssets<T extends Channel | Video>(
   type: T,
-  parameters: IReadProtobufArgumentsWithAssets,
+  parameters: IReadProtobufArgumentsWithAssets
 ): Promise<Partial<T>> {
   // true option here is crucial, it indicates that we want just the underlying bytes (by default it will also include bytes encoding the length)
   const metaU8a = parameters.metadata.toU8a(true)
@@ -240,15 +208,15 @@ export async function readProtobufWithAssets<T extends Channel | Video>(
   if (type instanceof Channel) {
     const meta = ChannelMetadata.deserializeBinary(metaU8a)
     const metaAsObject = convertMetadataToObject<ChannelMetadata.AsObject>(meta)
-    const result = metaAsObject as any as Partial<Channel>
+    const result = (metaAsObject as any) as Partial<Channel>
 
     // prepare cover photo asset if needed
     if ('coverPhoto' in metaAsObject) {
       const asset = await extractAsset({
-        //assetIndex: metaAsObject.coverPhoto,
+        // assetIndex: metaAsObject.coverPhoto,
         assetIndex: metaAsObject.coverPhoto,
         assets: parameters.assets,
-        db: parameters.db,
+        store: parameters.store,
         blockNumber: parameters.blockNumber,
         contentOwner: parameters.contentOwner,
       })
@@ -261,7 +229,7 @@ export async function readProtobufWithAssets<T extends Channel | Video>(
       const asset = await extractAsset({
         assetIndex: metaAsObject.avatarPhoto,
         assets: parameters.assets,
-        db: parameters.db,
+        store: parameters.store,
         blockNumber: parameters.blockNumber,
         contentOwner: parameters.contentOwner,
       })
@@ -271,7 +239,7 @@ export async function readProtobufWithAssets<T extends Channel | Video>(
 
     // prepare language if needed
     if ('language' in metaAsObject) {
-      const language = await prepareLanguage(metaAsObject.language, parameters.db, parameters.blockNumber)
+      const language = await prepareLanguage(metaAsObject.language, parameters.store, parameters.blockNumber)
       delete metaAsObject.language // make sure temporary value will not interfere
       language.integrateInto(result, 'language')
     }
@@ -283,11 +251,11 @@ export async function readProtobufWithAssets<T extends Channel | Video>(
   if (type instanceof Video) {
     const meta = VideoMetadata.deserializeBinary(metaU8a)
     const metaAsObject = convertMetadataToObject<VideoMetadata.AsObject>(meta)
-    const result = metaAsObject as any as Partial<Video>
+    const result = (metaAsObject as any) as Partial<Video>
 
     // prepare video category if needed
     if ('category' in metaAsObject) {
-      const category = await prepareVideoCategory(metaAsObject.category, parameters.db)
+      const category = await prepareVideoCategory(metaAsObject.category, parameters.store)
       delete metaAsObject.category // make sure temporary value will not interfere
       category.integrateInto(result, 'category')
     }
@@ -299,7 +267,7 @@ export async function readProtobufWithAssets<T extends Channel | Video>(
 
       // NOTE: type hack - `RawVideoMetadata` is inserted instead of VideoMediaMetadata - it should be edited in `video.ts`
       //       see `integrateVideoMetadata()` in `video.ts` for more info
-      result.mediaMetadata = prepareVideoMetadata(metaAsObject, videoSize, parameters.blockNumber) as unknown as VideoMediaMetadata
+      result.mediaMetadata = (prepareVideoMetadata(metaAsObject, videoSize) as unknown) as VideoMediaMetadata
 
       // remove extra values
       delete metaAsObject.mediaType
@@ -317,7 +285,7 @@ export async function readProtobufWithAssets<T extends Channel | Video>(
       const asset = await extractAsset({
         assetIndex: metaAsObject.thumbnailPhoto,
         assets: parameters.assets,
-        db: parameters.db,
+        store: parameters.store,
         blockNumber: parameters.blockNumber,
         contentOwner: parameters.contentOwner,
       })
@@ -330,7 +298,7 @@ export async function readProtobufWithAssets<T extends Channel | Video>(
       const asset = await extractAsset({
         assetIndex: metaAsObject.video,
         assets: parameters.assets,
-        db: parameters.db,
+        store: parameters.store,
         blockNumber: parameters.blockNumber,
         contentOwner: parameters.contentOwner,
       })
@@ -340,7 +308,7 @@ export async function readProtobufWithAssets<T extends Channel | Video>(
 
     // prepare language if needed
     if ('language' in metaAsObject) {
-      const language = await prepareLanguage(metaAsObject.language, parameters.db, parameters.blockNumber)
+      const language = await prepareLanguage(metaAsObject.language, parameters.store, parameters.blockNumber)
       delete metaAsObject.language // make sure temporary value will not interfere
       language.integrateInto(result, 'language')
     }
@@ -355,17 +323,20 @@ export async function readProtobufWithAssets<T extends Channel | Video>(
   }
 
   // this should never happen
-  logger.error('Not implemented metadata type', {type})
-  throw `Not implemented metadata type`
+  logger.error('Not implemented metadata type', { type })
+  throw new Error(`Not implemented metadata type`)
 }
 
-export async function convertContentActorToChannelOwner(db: DatabaseManager, contentActor: ContentActor): Promise<{
-  ownerMember?: Membership,
-  ownerCuratorGroup?: CuratorGroup,
+export async function convertContentActorToChannelOwner(
+  store: DatabaseManager,
+  contentActor: ContentActor
+): Promise<{
+  ownerMember?: Membership
+  ownerCuratorGroup?: CuratorGroup
 }> {
   if (contentActor.isMember) {
     const memberId = contentActor.asMember.toNumber()
-    const member = await db.get(Membership, { where: { id: memberId.toString() } as FindConditions<Membership> })
+    const member = await store.get(Membership, { where: { id: memberId.toString() } as FindConditions<Membership> })
 
     // ensure member exists
     if (!member) {
@@ -380,7 +351,9 @@ export async function convertContentActorToChannelOwner(db: DatabaseManager, con
 
   if (contentActor.isCurator) {
     const curatorGroupId = contentActor.asCurator[0].toNumber()
-    const curatorGroup = await db.get(CuratorGroup, { where: { id: curatorGroupId.toString() } as FindConditions<CuratorGroup> })
+    const curatorGroup = await store.get(CuratorGroup, {
+      where: { id: curatorGroupId.toString() } as FindConditions<CuratorGroup>,
+    })
 
     // ensure curator group exists
     if (!curatorGroup) {
@@ -395,13 +368,16 @@ export async function convertContentActorToChannelOwner(db: DatabaseManager, con
 
   // TODO: contentActor.isLead
 
-  logger.error('Not implemented ContentActor type', {contentActor: contentActor.toString()})
-  throw 'Not-implemented ContentActor type used'
+  logger.error('Not implemented ContentActor type', { contentActor: contentActor.toString() })
+  throw new Error('Not-implemented ContentActor type used')
 }
 
-export function convertContentActorToDataObjectOwner(contentActor: ContentActor, channelId: number): typeof DataObjectOwner {
+export function convertContentActorToDataObjectOwner(
+  contentActor: ContentActor,
+  channelId: number
+): typeof DataObjectOwner {
   const owner = new DataObjectOwnerChannel()
-  owner.channel = channelId
+  owner.channelId = channelId.toString()
 
   return owner
 
@@ -425,21 +401,22 @@ export function convertContentActorToDataObjectOwner(contentActor: ContentActor,
   */
 }
 
-function handlePublishedBeforeJoystream(video: Partial<Video>, metadata: PublishedBeforeJoystreamMetadata.AsObject): PropertyChange<Date> {
+function handlePublishedBeforeJoystream(
+  video: Partial<Video>,
+  metadata: PublishedBeforeJoystreamMetadata.AsObject
+): PropertyChange<Date> {
   // is publish being unset
   if ('isPublished' in metadata && !metadata.isPublished) {
     return PropertyChange.newUnset()
   }
 
   // try to parse timestamp from publish date
-  const timestamp = metadata.date
-    ? Date.parse(metadata.date)
-    : NaN
+  const timestamp = metadata.date ? Date.parse(metadata.date) : NaN
 
   // ensure date is valid
   if (isNaN(timestamp)) {
     invalidMetadata(`Invalid date used for publishedBeforeJoystream`, {
-      timestamp
+      timestamp,
     })
     return PropertyChange.newNoChange()
   }
@@ -450,7 +427,7 @@ function handlePublishedBeforeJoystream(video: Partial<Video>, metadata: Publish
 
 interface IConvertAssetParameters {
   rawAsset: NewAsset
-  db: DatabaseManager
+  store: DatabaseManager
   blockNumber: number
   contentOwner: typeof DataObjectOwner
 }
@@ -461,7 +438,7 @@ interface IConvertAssetParameters {
 async function convertAsset(parameters: IConvertAssetParameters): Promise<AssetStorageOrUrls> {
   // is asset describing list of URLs?
   if (parameters.rawAsset.isUrls) {
-    const urls = parameters.rawAsset.asUrls.toArray().map(item => item.toString())
+    const urls = parameters.rawAsset.asUrls.toArray().map((item) => item.toString())
 
     return urls
   }
@@ -478,7 +455,7 @@ async function convertAsset(parameters: IConvertAssetParameters): Promise<AssetS
 interface IExtractAssetParameters {
   assetIndex: number | undefined
   assets: NewAsset[]
-  db: DatabaseManager
+  store: DatabaseManager
   blockNumber: number
   contentOwner: typeof DataObjectOwner
 }
@@ -504,7 +481,7 @@ async function extractAsset(parameters: IExtractAssetParameters): Promise<Proper
   // convert asset to data object record
   const asset = await convertAsset({
     rawAsset: parameters.assets[parameters.assetIndex],
-    db: parameters.db,
+    store: parameters.store,
     blockNumber: parameters.blockNumber,
     contentOwner: parameters.contentOwner,
   })
@@ -518,7 +495,11 @@ async function extractAsset(parameters: IExtractAssetParameters): Promise<Proper
 
   Changes `result` argument!
 */
-function integrateAsset<T>(propertyName: string, result: Object, asset: PropertyChange<AssetStorageOrUrls>): void {
+function integrateAsset<T>(
+  propertyName: string,
+  result: Record<string, unknown>,
+  asset: PropertyChange<AssetStorageOrUrls>
+): void {
   // helpers - property names
   const nameUrl = propertyName + 'Urls'
   const nameDataObject = propertyName + 'DataObject'
@@ -570,7 +551,7 @@ function extractVideoSize(assets: NewAsset[], assetIndex: number | undefined): n
 
   // ensure asset index is valid
   if (assetIndex > assets.length) {
-    invalidMetadata(`Non-existing asset video size extraction requested`, {assetsProvided: assets.length, assetIndex})
+    invalidMetadata(`Non-existing asset video size extraction requested`, { assetsProvided: assets.length, assetIndex })
     return undefined
   }
 
@@ -591,7 +572,11 @@ function extractVideoSize(assets: NewAsset[], assetIndex: number | undefined): n
   return videoSize
 }
 
-async function prepareLanguage(languageIso: string | undefined, db: DatabaseManager, blockNumber: number): Promise<PropertyChange<Language>> {
+async function prepareLanguage(
+  languageIso: string | undefined,
+  store: DatabaseManager,
+  blockNumber: number
+): Promise<PropertyChange<Language>> {
   // is language being unset?
   if (languageIso === undefined) {
     return PropertyChange.newUnset()
@@ -607,14 +592,13 @@ async function prepareLanguage(languageIso: string | undefined, db: DatabaseMana
   }
 
   // load language
-  const language = await db.get(Language, { where: { iso: languageIso } as FindConditions<Language> })
+  const language = await store.get(Language, { where: { iso: languageIso } as FindConditions<Language> })
 
   // return existing language if any
   if (language) {
     return PropertyChange.newChange(language)
   }
 
-
   // create new language
   const newLanguage = new Language({
     iso: languageIso,
@@ -625,7 +609,7 @@ async function prepareLanguage(languageIso: string | undefined, db: DatabaseMana
     updatedById: '1',
   })
 
-  await db.save<Language>(newLanguage)
+  await store.save<Language>(newLanguage)
 
   return PropertyChange.newChange(newLanguage)
 }
@@ -660,39 +644,56 @@ async function prepareLicense(licenseProtobuf: LicenseMetadata.AsObject | undefi
   Empty object means deletion is requested.
 */
 function isLicenseEmpty(licenseObject: LicenseMetadata.AsObject): boolean {
-    let somePropertySet = Object.entries(licenseObject).reduce((acc, [key, value]) => {
-        return acc || value !== undefined
-    }, false)
+  const somePropertySet = Object.entries(licenseObject).reduce((acc, [key, value]) => {
+    return acc || value !== undefined
+  }, false)
 
-    return !somePropertySet
+  return !somePropertySet
 }
 
-
-function prepareVideoMetadata(videoProtobuf: VideoMetadata.AsObject, videoSize: number | undefined, blockNumber: number): RawVideoMetadata {
+function prepareVideoMetadata(videoProtobuf: VideoMetadata.AsObject, videoSize: number | undefined): RawVideoMetadata {
   const rawMeta = {
     encoding: {
-      codecName: PropertyChange.fromObjectProperty<string, 'codecName', MediaTypeMetadata.AsObject>(videoProtobuf.mediaType || {}, 'codecName'),
-      container: PropertyChange.fromObjectProperty<string, 'container', MediaTypeMetadata.AsObject>(videoProtobuf.mediaType || {}, 'container'),
-      mimeMediaType: PropertyChange.fromObjectProperty<string, 'mimeMediaType', MediaTypeMetadata.AsObject>(videoProtobuf.mediaType || {}, 'mimeMediaType'),
+      codecName: PropertyChange.fromObjectProperty<string, 'codecName', MediaTypeMetadata.AsObject>(
+        videoProtobuf.mediaType || {},
+        'codecName'
+      ),
+      container: PropertyChange.fromObjectProperty<string, 'container', MediaTypeMetadata.AsObject>(
+        videoProtobuf.mediaType || {},
+        'container'
+      ),
+      mimeMediaType: PropertyChange.fromObjectProperty<string, 'mimeMediaType', MediaTypeMetadata.AsObject>(
+        videoProtobuf.mediaType || {},
+        'mimeMediaType'
+      ),
     },
-    pixelWidth: PropertyChange.fromObjectProperty<number, 'mediaPixelWidth', VideoMetadata.AsObject>(videoProtobuf, 'mediaPixelWidth'),
-    pixelHeight: PropertyChange.fromObjectProperty<number, 'mediaPixelHeight', VideoMetadata.AsObject>(videoProtobuf, 'mediaPixelHeight'),
-    size: videoSize === undefined
-      ? PropertyChange.newNoChange()
-      : PropertyChange.newChange(videoSize)
+    pixelWidth: PropertyChange.fromObjectProperty<number, 'mediaPixelWidth', VideoMetadata.AsObject>(
+      videoProtobuf,
+      'mediaPixelWidth'
+    ),
+    pixelHeight: PropertyChange.fromObjectProperty<number, 'mediaPixelHeight', VideoMetadata.AsObject>(
+      videoProtobuf,
+      'mediaPixelHeight'
+    ),
+    size: videoSize === undefined ? PropertyChange.newNoChange() : PropertyChange.newChange(videoSize),
   } as RawVideoMetadata
 
   return rawMeta
 }
 
-async function prepareVideoCategory(categoryId: number | undefined, db: DatabaseManager): Promise<PropertyChange<VideoCategory>> {
+async function prepareVideoCategory(
+  categoryId: number | undefined,
+  store: DatabaseManager
+): Promise<PropertyChange<VideoCategory>> {
   // is category being unset?
   if (categoryId === undefined) {
     return PropertyChange.newUnset()
   }
 
   // load video category
-  const category = await db.get(VideoCategory, { where: { id: categoryId.toString() } as FindConditions<VideoCategory> })
+  const category = await store.get(VideoCategory, {
+    where: { id: categoryId.toString() } as FindConditions<VideoCategory>,
+  })
 
   // ensure video category exists
   if (!category) {
@@ -703,22 +704,22 @@ async function prepareVideoCategory(categoryId: number | undefined, db: Database
   return PropertyChange.newChange(category)
 }
 
-function convertMetadataToObject<T extends Object>(metadata: jspb.Message): T {
+function convertMetadataToObject<T>(metadata: jspb.Message): T {
   const metaAsObject = metadata.toObject()
   const result = {} as T
 
   for (const key in metaAsObject) {
     const funcNameBase = key.charAt(0).toUpperCase() + key.slice(1)
     const hasFuncName = 'has' + funcNameBase
-    const isSet = funcNameBase == 'PersonsList' // there is no `VideoMetadata.hasPersonsList` method from unkown reason -> create exception
-      ? true
-      : metadata[hasFuncName]()
+    const isSet =
+      funcNameBase === 'PersonsList' // there is no `VideoMetadata.hasPersonsList` method from unkown reason -> create exception
+        ? true
+        : metadata[hasFuncName]()
 
     if (!isSet) {
       continue
     }
 
-
     const getFuncName = 'get' + funcNameBase
     const value = metadata[getFuncName]()
 

+ 428 - 0
query-node/mappings/content/video.ts

@@ -0,0 +1,428 @@
+/*
+eslint-disable @typescript-eslint/naming-convention
+*/
+import BN from 'bn.js'
+import { EventContext, StoreContext } from '@dzlzv/hydra-common'
+import { FindConditions, In } from 'typeorm'
+import { Content } from '../generated/types'
+import { inconsistentState, logger } from '../common'
+import { convertContentActorToDataObjectOwner, readProtobuf, readProtobufWithAssets, RawVideoMetadata } from './utils'
+import {
+  AssetAvailability,
+  Channel,
+  Video,
+  VideoCategory,
+  VideoMediaEncoding,
+  VideoMediaMetadata,
+  License,
+} from 'query-node/dist/model'
+
+export async function content_VideoCategoryCreated({ store, event }: EventContext & StoreContext): Promise<void> {
+  // read event data
+  const { videoCategoryId, videoCategoryCreationParameters } = new Content.VideoCategoryCreatedEvent(event).data
+
+  // read metadata
+  const protobufContent = await readProtobuf(new VideoCategory(), {
+    metadata: videoCategoryCreationParameters.meta,
+    store,
+    blockNumber: event.blockNumber,
+  })
+
+  // create new video category
+  const videoCategory = new VideoCategory({
+    // main data
+    id: videoCategoryId.toString(),
+    videos: [],
+    createdInBlock: event.blockNumber,
+
+    // fill in auto-generated fields
+    createdAt: new Date(event.blockTimestamp),
+    updatedAt: new Date(event.blockTimestamp),
+
+    // integrate metadata
+    ...protobufContent,
+  })
+
+  // save video category
+  await store.save<VideoCategory>(videoCategory)
+
+  // emit log event
+  logger.info('Video category has been created', { id: videoCategoryId })
+}
+
+export async function content_VideoCategoryUpdated({ store, event }: EventContext & StoreContext): Promise<void> {
+  // read event data
+  const { videoCategoryId, videoCategoryUpdateParameters } = new Content.VideoCategoryUpdatedEvent(event).data
+
+  // load video category
+  const videoCategory = await store.get(VideoCategory, {
+    where: { id: videoCategoryId.toString() } as FindConditions<VideoCategory>,
+  })
+
+  // ensure video category exists
+  if (!videoCategory) {
+    return inconsistentState('Non-existing video category update requested', videoCategoryId)
+  }
+
+  // read metadata
+  const protobufContent = await readProtobuf(new VideoCategory(), {
+    metadata: videoCategoryUpdateParameters.new_meta,
+    store,
+    blockNumber: event.blockNumber,
+  })
+
+  // update all fields read from protobuf
+  for (const [key, value] of Object.entries(protobufContent)) {
+    videoCategory[key] = value
+  }
+
+  // set last update time
+  videoCategory.updatedAt = new Date(event.blockTimestamp)
+
+  // save video category
+  await store.save<VideoCategory>(videoCategory)
+
+  // emit log event
+  logger.info('Video category has been updated', { id: videoCategoryId })
+}
+
+export async function content_VideoCategoryDeleted({ store, event }: EventContext & StoreContext): Promise<void> {
+  // read event data
+  const { videoCategoryId } = new Content.VideoCategoryDeletedEvent(event).data
+
+  // load video category
+  const videoCategory = await store.get(VideoCategory, {
+    where: { id: videoCategoryId.toString() } as FindConditions<VideoCategory>,
+  })
+
+  // ensure video category exists
+  if (!videoCategory) {
+    return inconsistentState('Non-existing video category deletion requested', videoCategoryId)
+  }
+
+  // remove video category
+  await store.remove<VideoCategory>(videoCategory)
+
+  // emit log event
+  logger.info('Video category has been deleted', { id: videoCategoryId })
+}
+
+/// //////////////// Video //////////////////////////////////////////////////////
+
+export async function content_VideoCreated({ store, event }: EventContext & StoreContext): Promise<void> {
+  // read event data
+  const { channelId, videoId, videoCreationParameters, contentActor } = new Content.VideoCreatedEvent(event).data
+
+  // read metadata
+  const protobufContent = await readProtobufWithAssets(new Video(), {
+    metadata: videoCreationParameters.meta,
+    store,
+    blockNumber: event.blockNumber,
+    assets: videoCreationParameters.assets,
+    contentOwner: convertContentActorToDataObjectOwner(contentActor, channelId.toNumber()),
+  })
+
+  // load channel
+  const channel = await store.get(Channel, { where: { id: channelId.toString() } as FindConditions<Channel> })
+
+  // ensure channel exists
+  if (!channel) {
+    return inconsistentState('Trying to add video to non-existing channel', channelId)
+  }
+
+  // prepare video media metadata (if any)
+  const fixedProtobuf = integrateVideoMediaMetadata(null, protobufContent, event.blockNumber)
+
+  const licenseIsEmpty = fixedProtobuf.license && !Object.keys(fixedProtobuf.license).length
+  if (licenseIsEmpty) {
+    // license deletion was requested - ignore it and consider it empty
+    delete fixedProtobuf.license
+  }
+
+  // create new video
+  const video = new Video({
+    // main data
+    id: videoId.toString(),
+    isCensored: false,
+    channel,
+    createdInBlock: event.blockNumber,
+    isFeatured: false,
+
+    // default values for properties that might or might not be filled by metadata
+    thumbnailPhotoUrls: [],
+    thumbnailPhotoAvailability: AssetAvailability.INVALID,
+    mediaUrls: [],
+    mediaAvailability: AssetAvailability.INVALID,
+
+    // fill in auto-generated fields
+    createdAt: new Date(event.blockTimestamp),
+    updatedAt: new Date(event.blockTimestamp),
+
+    // integrate metadata
+    ...fixedProtobuf,
+  })
+
+  // save video
+  await store.save<Video>(video)
+
+  // emit log event
+  logger.info('Video has been created', { id: videoId })
+}
+
+export async function content_VideoUpdated({ store, event }: EventContext & StoreContext): Promise<void> {
+  // read event data
+  const { videoId, videoUpdateParameters, contentActor } = new Content.VideoUpdatedEvent(event).data
+
+  // load video
+  const video = await store.get(Video, {
+    where: { id: videoId.toString() } as FindConditions<Video>,
+    relations: ['channel', 'license'],
+  })
+
+  // ensure video exists
+  if (!video) {
+    return inconsistentState('Non-existing video update requested', videoId)
+  }
+
+  // prepare changed metadata
+  const newMetadata = videoUpdateParameters.new_meta.unwrapOr(null)
+
+  // license must be deleted AFTER video is saved - plan a license deletion by assigning it to this variable
+  let licenseToDelete: License | null = null
+
+  // update metadata if it was changed
+  if (newMetadata) {
+    const protobufContent = await readProtobufWithAssets(new Video(), {
+      metadata: newMetadata,
+      store,
+      blockNumber: event.blockNumber,
+      assets: videoUpdateParameters.assets.unwrapOr([]),
+      contentOwner: convertContentActorToDataObjectOwner(contentActor, new BN(video.channel.id).toNumber()),
+    })
+
+    // prepare video media metadata (if any)
+    const fixedProtobuf = integrateVideoMediaMetadata(video, protobufContent, event.blockNumber)
+
+    // remember original license
+    const originalLicense = video.license
+
+    // update all fields read from protobuf
+    for (const [key, value] of Object.entries(fixedProtobuf)) {
+      video[key] = value
+    }
+
+    // license has changed - plan old license delete
+    if (originalLicense && video.license !== originalLicense) {
+      ;[video.license, licenseToDelete] = handleLicenseUpdate(originalLicense, video.license)
+    } else if (!Object.keys(video.license || {}).length) {
+      // license deletion was requested event no license exists?
+      delete video.license // ensure license is empty
+    }
+  }
+
+  // set last update time
+  video.updatedAt = new Date(event.blockTimestamp)
+
+  // save video
+  await store.save<Video>(video)
+
+  // delete old license if it's planned
+  if (licenseToDelete) {
+    await store.remove<License>(licenseToDelete)
+  }
+
+  // emit log event
+  logger.info('Video has been updated', { id: videoId })
+}
+
+export async function content_VideoDeleted({ store, event }: EventContext & StoreContext): Promise<void> {
+  // read event data
+  const { videoId } = new Content.VideoDeletedEvent(event).data
+
+  // load video
+  const video = await store.get(Video, { where: { id: videoId.toString() } as FindConditions<Video> })
+
+  // ensure video exists
+  if (!video) {
+    return inconsistentState('Non-existing video deletion requested', videoId)
+  }
+
+  // remove video
+  await store.remove<Video>(video)
+
+  // emit log event
+  logger.info('Video has been deleted', { id: videoId })
+}
+
+export async function content_VideoCensorshipStatusUpdated({
+  store,
+  event,
+}: EventContext & StoreContext): Promise<void> {
+  // read event data
+  const { videoId, isCensored } = new Content.VideoCensorshipStatusUpdatedEvent(event).data
+
+  // load video
+  const video = await store.get(Video, { where: { id: videoId.toString() } as FindConditions<Video> })
+
+  // ensure video exists
+  if (!video) {
+    return inconsistentState('Non-existing video censoring requested', videoId)
+  }
+
+  // update video
+  video.isCensored = isCensored.isTrue
+
+  // set last update time
+  video.updatedAt = new Date(event.blockTimestamp)
+
+  // save video
+  await store.save<Video>(video)
+
+  // emit log event
+  logger.info('Video censorship status has been updated', { id: videoId, isCensored: isCensored.isTrue })
+}
+
+export async function content_FeaturedVideosSet({ store, event }: EventContext & StoreContext): Promise<void> {
+  // read event data
+  const { videoId: videoIds } = new Content.FeaturedVideosSetEvent(event).data
+
+  // load old featured videos
+  const existingFeaturedVideos = await store.getMany(Video, { where: { isFeatured: true } as FindConditions<Video> })
+
+  // comparsion utility
+  const isSame = (videoIdA: string) => (videoIdB: string) => videoIdA === videoIdB
+
+  // calculate diff sets
+  const toRemove = existingFeaturedVideos.filter(
+    (existingFV) => !videoIds.map((item) => item.toHex()).some(isSame(existingFV.id))
+  )
+  const toAdd = videoIds.filter((video) => !existingFeaturedVideos.map((item) => item.id).some(isSame(video.toHex())))
+
+  // mark previously featured videos as not-featured
+  for (const video of toRemove) {
+    video.isFeatured = false
+
+    // set last update time
+    video.updatedAt = new Date(event.blockTimestamp)
+
+    await store.save<Video>(video)
+  }
+
+  // escape if no featured video needs to be added
+  if (!toAdd) {
+    // emit log event
+    logger.info('Featured videos unchanged')
+
+    return
+  }
+
+  // read videos previously not-featured videos that are meant to be featured
+  const videosToAdd = await store.getMany(Video, {
+    where: {
+      id: In(toAdd.map((item) => item.toString())),
+    } as FindConditions<Video>,
+  })
+
+  if (videosToAdd.length !== toAdd.length) {
+    return inconsistentState('At least one non-existing video featuring requested', toAdd)
+  }
+
+  // mark previously not-featured videos as featured
+  for (const video of videosToAdd) {
+    video.isFeatured = true
+
+    // set last update time
+    video.updatedAt = new Date(event.blockTimestamp)
+
+    await store.save<Video>(video)
+  }
+
+  // emit log event
+  logger.info('New featured videos have been set', { videoIds })
+}
+
+/// //////////////// Helpers ////////////////////////////////////////////////////
+
+/*
+  Integrates video metadata-related data into existing data (if any) or creates a new record.
+
+  NOTE: type hack - `RawVideoMetadata` is accepted for `metadata` instead of `Partial<Video>`
+        see `prepareVideoMetadata()` in `utils.ts` for more info
+*/
+function integrateVideoMediaMetadata(
+  existingRecord: Video | null,
+  metadata: Partial<Video>,
+  blockNumber: number
+): Partial<Video> {
+  if (!metadata.mediaMetadata) {
+    return metadata
+  }
+
+  // fix TS type
+  const rawMediaMetadata = (metadata.mediaMetadata as unknown) as RawVideoMetadata
+
+  // ensure encoding object
+  const encoding =
+    (existingRecord && existingRecord.mediaMetadata && existingRecord.mediaMetadata.encoding) ||
+    new VideoMediaEncoding({
+      createdById: '1',
+      updatedById: '1',
+    })
+
+  // integrate media encoding-related data
+  rawMediaMetadata.encoding.codecName.integrateInto(encoding, 'codecName')
+  rawMediaMetadata.encoding.container.integrateInto(encoding, 'container')
+  rawMediaMetadata.encoding.mimeMediaType.integrateInto(encoding, 'mimeMediaType')
+
+  // ensure media metadata object
+  const mediaMetadata =
+    (existingRecord && existingRecord.mediaMetadata) ||
+    new VideoMediaMetadata({
+      createdInBlock: blockNumber,
+
+      createdById: '1',
+      updatedById: '1',
+    })
+
+  // integrate media-related data
+  rawMediaMetadata.pixelWidth.integrateInto(mediaMetadata, 'pixelWidth')
+  rawMediaMetadata.pixelHeight.integrateInto(mediaMetadata, 'pixelHeight')
+  rawMediaMetadata.size.integrateInto(mediaMetadata, 'size')
+
+  // connect encoding to media metadata object
+  mediaMetadata.encoding = encoding
+
+  return {
+    ...metadata,
+    mediaMetadata,
+  }
+}
+
+// returns tuple `[newLicenseForVideo, oldLicenseToBeDeleted]`
+function handleLicenseUpdate(originalLicense, newLicense): [License | undefined, License | null] {
+  const isNewEmpty = !Object.keys(newLicense).length
+
+  if (!originalLicense && isNewEmpty) {
+    return [undefined, null]
+  }
+
+  if (!originalLicense) {
+    // && !isNewEmpty
+    return [newLicense, null]
+  }
+
+  if (!isNewEmpty) {
+    // && originalLicense
+    return [
+      new License({
+        ...originalLicense,
+        ...newLicense,
+      }),
+      null,
+    ]
+  }
+
+  // originalLicense && isNewEmpty
+
+  return [originalLicense, null]
+}

+ 4 - 0
query-node/mappings/genesis-data/index.ts

@@ -0,0 +1,4 @@
+export { default as members } from './members'
+export { default as membershipSystem } from './membershipSystem'
+export { default as workers } from './workers'
+export { default as workingGroups } from './workingGroups'

+ 0 - 0
query-node/mappings/bootstrap/data/members.json → query-node/mappings/genesis-data/members.json


+ 6 - 0
query-node/mappings/genesis-data/membershipSystem.json

@@ -0,0 +1,6 @@
+{
+  "defaultInviteCount": 5,
+  "membershipPrice": 100,
+  "referralCut": 0,
+  "invitedInitialBalance": 100
+}

+ 1 - 0
query-node/mappings/genesis-data/workers.json

@@ -0,0 +1 @@
+[]

+ 18 - 0
query-node/mappings/genesis-data/workingGroups.json

@@ -0,0 +1,18 @@
+[
+  {
+    "name": "storageWorkingGroup",
+    "budget": 0
+  },
+  {
+    "name": "membershipWorkingGroup",
+    "budget": 0
+  },
+  {
+    "name": "contentDirectoryWorkingGroup",
+    "budget": 0
+  },
+  {
+    "name": "forumWorkingGroup",
+    "budget": 0
+  }
+]

+ 34 - 0
query-node/mappings/genesis.ts

@@ -0,0 +1,34 @@
+import { StoreContext } from '@dzlzv/hydra-common'
+import BN from 'bn.js'
+import { MembershipSystemSnapshot, WorkingGroup } from 'query-node/dist/model'
+import { membershipSystem, workingGroups } from './genesis-data'
+
+export async function loadGenesisData({ store }: StoreContext): Promise<void> {
+  // Membership system
+  await store.save<MembershipSystemSnapshot>(
+    new MembershipSystemSnapshot({
+      createdAt: new Date(0),
+      updatedAt: new Date(0),
+      snapshotBlock: 0,
+      ...membershipSystem,
+      membershipPrice: new BN(membershipSystem.membershipPrice),
+      invitedInitialBalance: new BN(membershipSystem.invitedInitialBalance),
+    })
+  )
+
+  // Working groups
+  await Promise.all(
+    workingGroups.map(async (group) =>
+      store.save<WorkingGroup>(
+        new WorkingGroup({
+          createdAt: new Date(0),
+          updatedAt: new Date(0),
+          id: group.name,
+          ...group,
+        })
+      )
+    )
+  )
+
+  // TODO: members, workers
+}

+ 2 - 0
query-node/mappings/index.ts

@@ -5,7 +5,9 @@ BN.prototype.toJSON = function () {
   return this.toString()
 }
 
+export * from './content'
 export * from './membership'
+export * from './storage'
 export * from './workingGroups'
 export * from './proposals'
 export * from './proposalsDiscussion'

+ 0 - 31
query-node/mappings/init.ts

@@ -1,31 +0,0 @@
-import { ApiPromise, WsProvider } from '@polkadot/api'
-import { types } from '@joystream/types'
-import { createDBConnection } from '@dzlzv/hydra-processor'
-import { makeDatabaseManager } from '@dzlzv/hydra-processor/lib/executor/TransactionalExecutor'
-import path from 'path'
-
-// A script to initialize processor database with some initial values that cannot be fetched from events / extrinics
-async function init() {
-  const provider = new WsProvider(process.env.WS_PROVIDER_ENDPOINT_URI)
-  const api = await ApiPromise.create({ provider, types })
-  // Will be resolved relatively to mappings/lib
-  const entitiesPath = path.resolve(__dirname, '../../generated/graphql-server/dist/src/modules/**/*.model.js')
-  // We need to create db connection (and configure env) before importing any warthog models
-  const dbConnection = await createDBConnection([entitiesPath])
-  const db = makeDatabaseManager(dbConnection.createEntityManager())
-  // Only now we can import the initialization scripts (which include warthog models imports)
-  // eslint-disable-next-line @typescript-eslint/no-var-requires
-  const initializeDb = require('./initializeDb').default
-
-  await initializeDb(api, db)
-}
-
-init()
-  .then(() => {
-    console.log('Processor database initialized')
-    process.exit()
-  })
-  .catch((e) => {
-    console.error(e)
-    process.exit(-1)
-  })

+ 0 - 45
query-node/mappings/initializeDb.ts

@@ -1,45 +0,0 @@
-import { ApiPromise } from '@polkadot/api'
-import { BalanceOf } from '@polkadot/types/interfaces'
-import { DatabaseManager } from '@dzlzv/hydra-common'
-import { MembershipSystemSnapshot, WorkingGroup } from 'query-node/dist/model'
-
-async function initMembershipSystem(api: ApiPromise, db: DatabaseManager) {
-  const initialInvitationCount = await api.query.members.initialInvitationCount.at(api.genesisHash)
-  const initialInvitationBalance = await api.query.members.initialInvitationBalance.at(api.genesisHash)
-  const referralCut = await api.query.members.referralCut.at(api.genesisHash)
-  const membershipPrice = await api.query.members.membershipPrice.at(api.genesisHash)
-  const membershipSystem = new MembershipSystemSnapshot({
-    createdAt: new Date(0),
-    updatedAt: new Date(0),
-    snapshotBlock: 0,
-    defaultInviteCount: initialInvitationCount.toNumber(),
-    membershipPrice,
-    referralCut: referralCut.toNumber(),
-    invitedInitialBalance: initialInvitationBalance,
-  })
-  await db.save<MembershipSystemSnapshot>(membershipSystem)
-}
-
-async function initWorkingGroups(api: ApiPromise, db: DatabaseManager) {
-  const groupNames = Object.keys(api.query).filter((k) => k.endsWith('WorkingGroup'))
-  const groups = await Promise.all(
-    groupNames.map(async (groupName) => {
-      const budget = await api.query[groupName].budget.at<BalanceOf>(api.genesisHash)
-      return new WorkingGroup({
-        createdAt: new Date(0),
-        updatedAt: new Date(0),
-        id: groupName,
-        name: groupName,
-        workers: [],
-        openings: [],
-        budget,
-      })
-    })
-  )
-  await Promise.all(groups.map((g) => db.save<WorkingGroup>(g)))
-}
-
-export default async function initializeDb(api: ApiPromise, db: DatabaseManager): Promise<void> {
-  await initMembershipSystem(api, db)
-  await initWorkingGroups(api, db)
-}

+ 0 - 190
query-node/mappings/src/common.ts

@@ -1,190 +0,0 @@
-import { SubstrateEvent } from '@dzlzv/hydra-common'
-import { DatabaseManager } from '@dzlzv/hydra-db-utils'
-import { u64 } from '@polkadot/types/primitive';
-import { SubstrateExtrinsic, ExtrinsicArg } from '@dzlzv/hydra-common'
-
-// Asset
-import {
-  DataObjectOwner,
-  DataObject,
-  LiaisonJudgement,
-  Network,
-} from 'query-node'
-import {
-  ContentParameters,
-} from '@joystream/types/augment'
-
-import { ContentParameters as Custom_ContentParameters } from '@joystream/types/storage'
-import { registry } from '@joystream/types'
-
-const currentNetwork = Network.BABYLON
-
-/*
-  Reports that insurmountable inconsistent state has been encountered and throws an exception.
-*/
-export function inconsistentState(extraInfo: string, data?: unknown): never {
-  const errorMessage = 'Inconsistent state: ' + extraInfo
-
-  // log error
-  logger.error(errorMessage, data)
-
-  throw errorMessage
-}
-
-/*
-  Reports that metadata inserted by the user are not entirely valid, but the problem can be overcome.
-*/
-export function invalidMetadata(extraInfo: string, data?: unknown): void {
-  const errorMessage = 'Invalid metadata: ' + extraInfo
-
-  // log error
-  logger.info(errorMessage, data)
-}
-
-/*
-  Prepares data object from content parameters.
-*/
-export async function prepareDataObject(
-  contentParameters: ContentParameters,
-  blockNumber: number,
-  owner: typeof DataObjectOwner,
-): Promise<DataObject> {
-  // convert generic content parameters coming from processor to custom Joystream data type
-  const customContentParameters = new Custom_ContentParameters(registry, contentParameters.toJSON() as any)
-
-  const dataObject = new DataObject({
-    owner,
-    createdInBlock: blockNumber,
-    typeId: contentParameters.type_id.toNumber(),
-    size: customContentParameters.size_in_bytes.toNumber(),
-    liaisonJudgement: LiaisonJudgement.PENDING, // judgement is pending at start; liaison id is set when content is accepted/rejected
-    ipfsContentId: contentParameters.ipfs_content_id.toUtf8(),
-    joystreamContentId: customContentParameters.content_id.encode(),
-
-    createdById: '1',
-    updatedById: '1',
-  })
-
-  return dataObject
-}
-
-/////////////////// Sudo extrinsic calls ///////////////////////////////////////
-
-// soft-peg interface for typegen-generated `*Call` types
-export interface IGenericExtrinsicObject<T> {
-  readonly extrinsic: SubstrateExtrinsic
-  readonly expectedArgTypes: string[]
-  args: T
-}
-
-// arguments for calling extrinsic as sudo
-export interface ISudoCallArgs<T> extends ExtrinsicArg {
-  args: T
-  callIndex: string
-}
-
-/*
-  Extracts extrinsic arguments from the Substrate event. Supports both direct extrinsic calls and sudo calls.
-*/
-export function extractExtrinsicArgs<DataParams, EventObject extends IGenericExtrinsicObject<DataParams>>(
-  rawEvent: SubstrateEvent,
-  callFactory: new (event: SubstrateEvent) => EventObject,
-
-  // in ideal world this parameter would not be needed, but there is no way to associate parameters
-  // used in sudo to extrinsic parameters without it
-  argsIndeces: Record<keyof DataParams, number>,
-): EventObject['args'] { // this is equal to DataParams but only this notation works properly
-  // escape when extrinsic info is not available
-  if (!rawEvent.extrinsic) {
-    throw 'Invalid event - no extrinsic set' // this should never happen
-  }
-
-  // regural extrinsic call?
-  if (rawEvent.extrinsic.section != 'sudo') {
-    return (new callFactory(rawEvent)).args
-  }
-
-  // sudo extrinsic call
-
-  const callArgs = extractSudoCallParameters<DataParams>(rawEvent)
-
-  // convert naming convention (underscore_names to camelCase)
-  const clearArgs = Object.keys(callArgs.args).reduce((acc, key) => {
-    const formattedName = key.replace(/_([a-z])/g, tmp => tmp[1].toUpperCase())
-
-    acc[formattedName] = callArgs.args[key]
-
-    return acc
-  }, {} as DataParams)
-
-  // prepare partial event object
-  const partialEvent = {
-    extrinsic: {
-      args: Object.keys(argsIndeces).reduce((acc, key) => {
-        acc[(argsIndeces)[key]] = {
-          value: clearArgs[key]
-        }
-
-        return acc
-      }, [] as unknown[]),
-    } as unknown as SubstrateExtrinsic
-  } as SubstrateEvent
-
-  // create event object and extract processed args
-  const finalArgs = (new callFactory(partialEvent)).args
-
-  return finalArgs
-}
-
-/*
-  Extracts extrinsic call parameters used inside of sudo call.
-*/
-export function extractSudoCallParameters<DataParams>(rawEvent: SubstrateEvent): ISudoCallArgs<DataParams> {
-  if (!rawEvent.extrinsic) {
-    throw 'Invalid event - no extrinsic set' // this should never happen
-  }
-
-  // see Substrate's sudo frame for more info about sudo extrinsics and `call` argument index
-  const argIndex = false
-    || (rawEvent.extrinsic.method == 'sudoAs' && 1) // who, *call*
-    || (rawEvent.extrinsic.method == 'sudo' && 0) // *call*
-    || (rawEvent.extrinsic.method == 'sudoUncheckedWeight' && 0) // *call*, _weight
-
-  // ensure `call` argument was found
-  if (argIndex === false) {
-    // this could possibly happen in sometime in future if new sudo options are introduced in Substrate
-    throw 'Not implemented situation with sudo'
-  }
-
-  // typecast call arguments
-  const callArgs = rawEvent.extrinsic.args[argIndex].value as unknown as ISudoCallArgs<DataParams>
-
-  return callArgs
-}
-
-/////////////////// Logger /////////////////////////////////////////////////////
-
-/*
-  Simple logger enabling error and informational reporting.
-
-  `Logger` class will not be needed in the future when Hydra v3 will be released.
-  Hydra will provide logger instance and relevant code using `Logger` should be refactored.
-*/
-class Logger {
-
-  /*
-    Log significant event.
-  */
-  info(message: string, data?: unknown) {
-    console.log(message, data)
-  }
-
-  /*
-    Log significant error.
-  */
-  error(message: string, data?: unknown) {
-    console.error(message, data)
-  }
-}
-
-export const logger = new Logger()

+ 0 - 321
query-node/mappings/src/content/channel.ts

@@ -1,321 +0,0 @@
-import { fixBlockTimestamp } from '../eventFix'
-import { SubstrateEvent } from '@dzlzv/hydra-common'
-import { DatabaseManager } from '@dzlzv/hydra-db-utils'
-import ISO6391 from 'iso-639-1';
-import { FindConditions, In } from 'typeorm'
-
-import { AccountId } from "@polkadot/types/interfaces";
-import { Option } from '@polkadot/types/codec';
-import { Content } from '../../../generated/types'
-import {
-  readProtobuf,
-  readProtobufWithAssets,
-  convertContentActorToChannelOwner,
-  convertContentActorToDataObjectOwner,
-} from './utils'
-
-import {
-  Channel,
-  ChannelCategory,
-  DataObject,
-} from 'query-node'
-import {
-  inconsistentState,
-  logger,
-} from '../common'
-
-import {
-  AssetAvailability,
-} from 'query-node'
-
-// eslint-disable-next-line @typescript-eslint/naming-convention
-export async function content_ChannelCreated(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
-  // read event data
-  const {channelId, channelCreationParameters, contentActor} = new Content.ChannelCreatedEvent(event).data
-
-  // read metadata
-  const protobufContent = await readProtobufWithAssets(
-    new Channel(),
-    {
-      metadata: channelCreationParameters.meta,
-      db,
-      blockNumber: event.blockNumber,
-      assets: channelCreationParameters.assets,
-      contentOwner: convertContentActorToDataObjectOwner(contentActor, channelId.toNumber()),
-    }
-  )
-
-  // create entity
-  const channel = new Channel({
-    // main data
-    id: channelId.toString(),
-    isCensored: false,
-    videos: [],
-    createdInBlock: event.blockNumber,
-
-    // default values for properties that might or might not be filled by metadata
-    coverPhotoUrls: [],
-    coverPhotoAvailability: AssetAvailability.INVALID,
-    avatarPhotoUrls: [],
-    avatarPhotoAvailability: AssetAvailability.INVALID,
-
-    // fill in auto-generated fields
-    createdAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
-    updatedAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
-
-    // prepare channel owner (handles fields `ownerMember` and `ownerCuratorGroup`)
-    ...await convertContentActorToChannelOwner(db, contentActor),
-
-    // integrate metadata
-    ...protobufContent
-  })
-
-  // save entity
-  await db.save<Channel>(channel)
-
-  // emit log event
-  logger.info('Channel has been created', {id: channel.id})
-}
-
-// eslint-disable-next-line @typescript-eslint/naming-convention
-export async function content_ChannelUpdated(
-  db: DatabaseManager,
-  event: SubstrateEvent
-) {
-  // read event data
-  const {
-    channelId,
-    channelUpdateParameters,
-    contentActor,
-  } = new Content.ChannelUpdatedEvent(event).data
-
-  // load channel
-  const channel = await db.get(Channel, { where: { id: channelId.toString() } as FindConditions<Channel> })
-
-  // ensure channel exists
-  if (!channel) {
-    return inconsistentState('Non-existing channel update requested', channelId)
-  }
-
-  // prepare changed metadata
-  const newMetadata = channelUpdateParameters.new_meta.unwrapOr(null)
-
-  //  update metadata if it was changed
-  if (newMetadata) {
-    const protobufContent = await readProtobufWithAssets(
-      new Channel(),
-      {
-        metadata: newMetadata,
-        db,
-        blockNumber: event.blockNumber,
-        assets: channelUpdateParameters.assets.unwrapOr([]),
-        contentOwner: convertContentActorToDataObjectOwner(contentActor, channelId.toNumber()),
-      }
-    )
-
-    // update all fields read from protobuf
-    for (let [key, value] of Object.entries(protobufContent)) {
-      channel[key] = value
-    }
-  }
-
-  // prepare changed reward account
-  const newRewardAccount = channelUpdateParameters.reward_account.unwrapOr(null)
-
-  // reward account change happened?
-  if (newRewardAccount) {
-    // this will change the `channel`!
-    handleChannelRewardAccountChange(channel, newRewardAccount)
-  }
-
-  // set last update time
-  channel.updatedAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
-
-  // save channel
-  await db.save<Channel>(channel)
-
-  // emit log event
-  logger.info('Channel has been updated', {id: channel.id})
-}
-
-export async function content_ChannelAssetsRemoved(
-  db: DatabaseManager,
-  event: SubstrateEvent
-) {
-  // read event data
-  const {contentId: contentIds} = new Content.ChannelAssetsRemovedEvent(event).data
-
-  // load channel
-  const assets = await db.getMany(DataObject, { where: {
-    id: In(contentIds.toArray().map(item => item.toString()))
-  } as FindConditions<DataObject>})
-
-  // delete assets
-  for (const asset of assets) {
-    await db.remove<DataObject>(asset)
-  }
-
-  // emit log event
-  logger.info('Channel assets have been removed', {ids: contentIds})
-}
-
-// eslint-disable-next-line @typescript-eslint/naming-convention
-export async function content_ChannelCensorshipStatusUpdated(
-  db: DatabaseManager,
-  event: SubstrateEvent
-) {
-  // read event data
-  const {channelId, isCensored} = new Content.ChannelCensorshipStatusUpdatedEvent(event).data
-
-  // load event
-  const channel = await db.get(Channel, { where: { id: channelId.toString() } as FindConditions<Channel> })
-
-  // ensure channel exists
-  if (!channel) {
-    return inconsistentState('Non-existing channel censoring requested', channelId)
-  }
-
-  // update channel
-  channel.isCensored = isCensored.isTrue;
-
-  // set last update time
-  channel.updatedAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
-
-  // save channel
-  await db.save<Channel>(channel)
-
-  // emit log event
-  logger.info('Channel censorship status has been updated', {id: channelId, isCensored: isCensored.isTrue})
-}
-
-/////////////////// ChannelCategory ////////////////////////////////////////////
-
-// eslint-disable-next-line @typescript-eslint/naming-convention
-export async function content_ChannelCategoryCreated(
-  db: DatabaseManager,
-  event: SubstrateEvent
-) {
-  // read event data
-  const {channelCategoryCreationParameters, channelCategoryId} = new Content.ChannelCategoryCreatedEvent(event).data
-
-  // read metadata
-  const protobufContent = await readProtobuf(
-    new ChannelCategory(),
-    {
-      metadata: channelCategoryCreationParameters.meta,
-      db,
-      blockNumber: event.blockNumber,
-    }
-  )
-
-  // create new channel category
-  const channelCategory = new ChannelCategory({
-    // main data
-    id: channelCategoryId.toString(),
-    channels: [],
-    createdInBlock: event.blockNumber,
-
-    // fill in auto-generated fields
-    createdAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
-    updatedAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
-
-    // integrate metadata
-    ...protobufContent
-  })
-
-  // save channel
-  await db.save<ChannelCategory>(channelCategory)
-
-  // emit log event
-  logger.info('Channel category has been created', {id: channelCategory.id})
-}
-
-// eslint-disable-next-line @typescript-eslint/naming-convention
-export async function content_ChannelCategoryUpdated(
-  db: DatabaseManager,
-  event: SubstrateEvent
-) {
-  // read event data
-  const {
-    channelCategoryId,
-    channelCategoryUpdateParameters,
-  } = new Content.ChannelCategoryUpdatedEvent(event).data
-
-  // load channel category
-  const channelCategory = await db.get(ChannelCategory, { where: {
-    id: channelCategoryId.toString()
-  } as FindConditions<ChannelCategory> })
-
-  // ensure channel exists
-  if (!channelCategory) {
-    return inconsistentState('Non-existing channel category update requested', channelCategoryId)
-  }
-
-  // read metadata
-  const protobufContent = await readProtobuf(
-    new ChannelCategory(),
-    {
-      metadata: channelCategoryUpdateParameters.new_meta,
-      db,
-      blockNumber: event.blockNumber,
-    }
-  )
-
-  // update all fields read from protobuf
-  for (let [key, value] of Object.entries(protobufContent)) {
-    channelCategory[key] = value
-  }
-
-  // set last update time
-  channelCategory.updatedAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
-
-  // save channel category
-  await db.save<ChannelCategory>(channelCategory)
-
-  // emit log event
-  logger.info('Channel category has been updated', {id: channelCategory.id})
-}
-
-// eslint-disable-next-line @typescript-eslint/naming-convention
-export async function content_ChannelCategoryDeleted(
-  db: DatabaseManager,
-  event: SubstrateEvent
-) {
-  // read event data
-  const {channelCategoryId} = new Content.ChannelCategoryDeletedEvent(event).data
-
-  // load channel category
-  const channelCategory = await db.get(ChannelCategory, { where: {
-    id: channelCategoryId.toString()
-  } as FindConditions<ChannelCategory> })
-
-  // ensure channel category exists
-  if (!channelCategory) {
-    return inconsistentState('Non-existing channel category deletion requested', channelCategoryId)
-  }
-
-  // delete channel category
-  await db.remove<ChannelCategory>(channelCategory)
-
-  // emit log event
-  logger.info('Channel category has been deleted', {id: channelCategory.id})
-}
-
-/////////////////// Helpers ////////////////////////////////////////////////////
-
-function handleChannelRewardAccountChange(
-  channel: Channel, // will be modified inside of the function!
-  reward_account: Option<AccountId>
-) {
-  const rewardAccount = reward_account.unwrapOr(null)
-
-  // new different reward account set?
-  if (rewardAccount) {
-    channel.rewardAccount = rewardAccount.toString()
-    return
-  }
-
-  // reward account removed
-
-  channel.rewardAccount = undefined // plan deletion (will have effect when saved to db)
-}

+ 0 - 126
query-node/mappings/src/content/curatorGroup.ts

@@ -1,126 +0,0 @@
-import { fixBlockTimestamp } from '../eventFix'
-import { SubstrateEvent } from '@dzlzv/hydra-common'
-import { DatabaseManager } from '@dzlzv/hydra-db-utils'
-import { FindConditions } from 'typeorm'
-
-import { CuratorGroup } from 'query-node'
-import { Content } from '../../../generated/types'
-
-import {
-  inconsistentState,
-  logger,
-} from '../common'
-
-export async function content_CuratorGroupCreated(
-  db: DatabaseManager,
-  event: SubstrateEvent
-) {
-  // read event data
-  const {curatorGroupId} = new Content.CuratorGroupCreatedEvent(event).data
-
-  // create new curator group
-  const curatorGroup = new CuratorGroup({
-    // main data
-    id: curatorGroupId.toString(),
-    curatorIds: [],
-    isActive: false, // runtime creates inactive curator groups by default
-
-    // fill in auto-generated fields
-    createdAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
-    updatedAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
-  })
-
-  // save curator group
-  await db.save<CuratorGroup>(curatorGroup)
-
-  // emit log event
-  logger.info('Curator group has been created', {id: curatorGroupId})
-}
-
-export async function content_CuratorGroupStatusSet(
-  db: DatabaseManager,
-  event: SubstrateEvent
-) {
-  // read event data
-  const {curatorGroupId, bool: isActive} = new Content.CuratorGroupStatusSetEvent(event).data
-
-  // load curator group
-  const curatorGroup = await db.get(CuratorGroup, { where: { id: curatorGroupId.toString() } as FindConditions<CuratorGroup>})
-
-  // ensure curator group exists
-  if (!curatorGroup) {
-    return inconsistentState('Non-existing curator group status set requested', curatorGroupId)
-  }
-
-  // update curator group
-  curatorGroup.isActive = isActive.isTrue
-
-  // set last update time
-  curatorGroup.updatedAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
-
-  // save curator group
-  await db.save<CuratorGroup>(curatorGroup)
-
-  // emit log event
-  logger.info('Curator group status has been set', {id: curatorGroupId, isActive})
-}
-
-export async function content_CuratorAdded(
-  db: DatabaseManager,
-  event: SubstrateEvent
-) {
-  // read event data
-  const {curatorGroupId, curatorId} = new Content.CuratorAddedEvent(event).data
-
-  // load curator group
-  const curatorGroup = await db.get(CuratorGroup, { where: { id: curatorGroupId.toString() } as FindConditions<CuratorGroup>})
-
-  // ensure curator group exists
-  if (!curatorGroup) {
-    return inconsistentState('Curator add to non-existing curator group requested', curatorGroupId)
-  }
-
-  // update curator group
-  curatorGroup.curatorIds.push(curatorId.toNumber())
-
-  // set last update time
-  curatorGroup.updatedAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
-
-  // save curator group
-  await db.save<CuratorGroup>(curatorGroup)
-
-  // emit log event
-  logger.info('Curator has been added to curator group', {id: curatorGroupId, curatorId})
-}
-
-export async function content_CuratorRemoved(
-  db: DatabaseManager,
-  event: SubstrateEvent
-) {
-  // read event data
-  const {curatorGroupId, curatorId} = new Content.CuratorAddedEvent(event).data
-
-  // load curator group
-  const curatorGroup = await db.get(CuratorGroup, { where: { id: curatorGroupId.toString() } as FindConditions<CuratorGroup>})
-
-  // ensure curator group exists
-  if (!curatorGroup) {
-    return inconsistentState('Non-existing curator group removal requested', curatorGroupId)
-  }
-
-  const curatorIndex = curatorGroup.curatorIds.indexOf(curatorId.toNumber())
-
-  // ensure curator group exists
-  if (curatorIndex < 0) {
-    return inconsistentState('Non-associated curator removal from curator group requested', curatorId)
-  }
-
-  // update curator group
-  curatorGroup.curatorIds.splice(curatorIndex, 1)
-
-  // save curator group
-  await db.save<CuratorGroup>(curatorGroup)
-
-  // emit log event
-  logger.info('Curator has been removed from curator group', {id: curatorGroupId, curatorId})
-}

+ 0 - 499
query-node/mappings/src/content/video.ts

@@ -1,499 +0,0 @@
-import BN from 'bn.js'
-import { fixBlockTimestamp } from '../eventFix'
-import { SubstrateEvent } from '@dzlzv/hydra-common'
-import { DatabaseManager } from '@dzlzv/hydra-db-utils'
-import { FindConditions, In } from 'typeorm'
-
-import {
-  Content,
-} from '../../../generated/types'
-
-import {
-  inconsistentState,
-  logger,
-} from '../common'
-
-import {
-  convertContentActorToDataObjectOwner,
-  readProtobuf,
-  readProtobufWithAssets,
-  RawVideoMetadata,
-} from './utils'
-
-// primary entities
-import {
-  AssetAvailability,
-  Channel,
-  Video,
-  VideoCategory,
-  VideoMediaEncoding,
-  VideoMediaMetadata,
-} from 'query-node'
-
-// secondary entities
-import { License } from 'query-node'
-
-// Joystream types
-import {
-  ChannelId,
-} from '@joystream/types/augment'
-
-// eslint-disable-next-line @typescript-eslint/naming-convention
-export async function content_VideoCategoryCreated(
-  db: DatabaseManager,
-  event: SubstrateEvent
-) {
-  // read event data
-  const {
-    videoCategoryId,
-    videoCategoryCreationParameters,
-    contentActor,
-  } = new Content.VideoCategoryCreatedEvent(event).data
-
-  // read metadata
-  const protobufContent = await readProtobuf(
-    new VideoCategory(),
-    {
-      metadata: videoCategoryCreationParameters.meta,
-      db,
-      blockNumber: event.blockNumber,
-    }
-  )
-
-  // create new video category
-  const videoCategory = new VideoCategory({
-    // main data
-    id: videoCategoryId.toString(),
-    videos: [],
-    createdInBlock: event.blockNumber,
-
-    // fill in auto-generated fields
-    createdAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
-    updatedAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
-
-    // integrate metadata
-    ...protobufContent
-  })
-
-  // save video category
-  await db.save<VideoCategory>(videoCategory)
-
-  // emit log event
-  logger.info('Video category has been created', {id: videoCategoryId})
-}
-
-// eslint-disable-next-line @typescript-eslint/naming-convention
-export async function content_VideoCategoryUpdated(
-  db: DatabaseManager,
-  event: SubstrateEvent
-) {
-  // read event data
-  const {
-    videoCategoryId,
-    videoCategoryUpdateParameters,
-    contentActor,
-  } = new Content.VideoCategoryUpdatedEvent(event).data
-
-  // load video category
-  const videoCategory = await db.get(VideoCategory, { where: { id: videoCategoryId.toString() } as FindConditions<VideoCategory> })
-
-  // ensure video category exists
-  if (!videoCategory) {
-    return inconsistentState('Non-existing video category update requested', videoCategoryId)
-  }
-
-  // read metadata
-  const protobufContent = await readProtobuf(
-    new VideoCategory(),
-    {
-      metadata: videoCategoryUpdateParameters.new_meta,
-      db,
-      blockNumber: event.blockNumber,
-    }
-  )
-
-  // update all fields read from protobuf
-  for (let [key, value] of Object.entries(protobufContent)) {
-    videoCategory[key] = value
-  }
-
-  // set last update time
-  videoCategory.updatedAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
-
-  // save video category
-  await db.save<VideoCategory>(videoCategory)
-
-  // emit log event
-  logger.info('Video category has been updated', {id: videoCategoryId})
-}
-
-// eslint-disable-next-line @typescript-eslint/naming-convention
-export async function content_VideoCategoryDeleted(
-  db: DatabaseManager,
-  event: SubstrateEvent
-) {
-  // read event data
-  const {videoCategoryId} = new Content.VideoCategoryDeletedEvent(event).data
-
-  // load video category
-  const videoCategory = await db.get(VideoCategory, { where: { id: videoCategoryId.toString() } as FindConditions<VideoCategory> })
-
-  // ensure video category exists
-  if (!videoCategory) {
-    return inconsistentState('Non-existing video category deletion requested', videoCategoryId)
-  }
-
-  // remove video category
-  await db.remove<VideoCategory>(videoCategory)
-
-  // emit log event
-  logger.info('Video category has been deleted', {id: videoCategoryId})
-}
-
-/////////////////// Video //////////////////////////////////////////////////////
-
-// eslint-disable-next-line @typescript-eslint/naming-convention
-export async function content_VideoCreated(
-  db: DatabaseManager,
-  event: SubstrateEvent
-) {
-  // read event data
-  const {
-    channelId,
-    videoId,
-    videoCreationParameters,
-    contentActor,
-  } = new Content.VideoCreatedEvent(event).data
-
-  // read metadata
-  const protobufContent = await readProtobufWithAssets(
-    new Video(),
-    {
-      metadata: videoCreationParameters.meta,
-      db,
-      blockNumber: event.blockNumber,
-      assets: videoCreationParameters.assets,
-      contentOwner: convertContentActorToDataObjectOwner(contentActor, channelId.toNumber()),
-    }
-  )
-
-  // load channel
-  const channel = await db.get(Channel, { where: { id: channelId.toString() } as FindConditions<Channel> })
-
-  // ensure channel exists
-  if (!channel) {
-    return inconsistentState('Trying to add video to non-existing channel', channelId)
-  }
-
-  // prepare video media metadata (if any)
-  const fixedProtobuf = integrateVideoMediaMetadata(null, protobufContent, event.blockNumber)
-
-  const licenseIsEmpty = fixedProtobuf.license && !Object.keys(fixedProtobuf.license).length
-  if (licenseIsEmpty) { // license deletion was requested - ignore it and consider it empty
-    delete fixedProtobuf.license
-  }
-
-  // create new video
-  const video = new Video({
-    // main data
-    id: videoId.toString(),
-    isCensored: false,
-    channel,
-    createdInBlock: event.blockNumber,
-    isFeatured: false,
-
-    // default values for properties that might or might not be filled by metadata
-    thumbnailPhotoUrls: [],
-    thumbnailPhotoAvailability: AssetAvailability.INVALID,
-    mediaUrls: [],
-    mediaAvailability: AssetAvailability.INVALID,
-
-
-    // fill in auto-generated fields
-    createdAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
-    updatedAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
-
-    // integrate metadata
-    ...fixedProtobuf
-  })
-
-  // save video
-  await db.save<Video>(video)
-
-  // emit log event
-  logger.info('Video has been created', {id: videoId})
-}
-
-// eslint-disable-next-line @typescript-eslint/naming-convention
-export async function content_VideoUpdated(
-  db: DatabaseManager,
-  event: SubstrateEvent
-) {
-  // read event data
-  const {
-    videoId,
-    videoUpdateParameters,
-    contentActor,
-  } = new Content.VideoUpdatedEvent(event).data
-
-  // load video
-  const video = await db.get(Video, { where: { id: videoId.toString() } as FindConditions<Video>, relations: ['channel', 'license'] })
-
-  // ensure video exists
-  if (!video) {
-    return inconsistentState('Non-existing video update requested', videoId)
-  }
-
-  // prepare changed metadata
-  const newMetadata = videoUpdateParameters.new_meta.unwrapOr(null)
-
-  // license must be deleted AFTER video is saved - plan a license deletion by assigning it to this variable
-  let licenseToDelete: License | null = null
-
-  // update metadata if it was changed
-  if (newMetadata) {
-    const protobufContent = await readProtobufWithAssets(
-      new Video(),
-      {
-        metadata: newMetadata,
-        db,
-        blockNumber: event.blockNumber,
-        assets: videoUpdateParameters.assets.unwrapOr([]),
-        contentOwner: convertContentActorToDataObjectOwner(contentActor, (new BN(video.channel.id)).toNumber()),
-      }
-    )
-
-    // prepare video media metadata (if any)
-    const fixedProtobuf = integrateVideoMediaMetadata(video, protobufContent, event.blockNumber)
-
-    // remember original license
-    const originalLicense = video.license
-
-    // update all fields read from protobuf
-    for (let [key, value] of Object.entries(fixedProtobuf)) {
-      video[key] = value
-    }
-
-    // license has changed - plan old license delete
-    if (originalLicense && video.license != originalLicense) {
-      ([video.license, licenseToDelete] = handleLicenseUpdate(originalLicense, video.license))
-    } else if (!Object.keys(video.license || {}).length) { // license deletion was requested event no license exists?
-      delete video.license // ensure license is empty
-    }
-  }
-
-  // set last update time
-  video.updatedAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
-
-  // save video
-  await db.save<Video>(video)
-
-  // delete old license if it's planned
-  if (licenseToDelete) {
-    await db.remove<License>(licenseToDelete)
-  }
-
-  // emit log event
-  logger.info('Video has been updated', {id: videoId})
-}
-
-// eslint-disable-next-line @typescript-eslint/naming-convention
-export async function content_VideoDeleted(
-  db: DatabaseManager,
-  event: SubstrateEvent
-) {
-  // read event data
-  const {videoId} = new Content.VideoDeletedEvent(event).data
-
-  // load video
-  const video = await db.get(Video, { where: { id: videoId.toString() } as FindConditions<Video> })
-
-  // ensure video exists
-  if (!video) {
-    return inconsistentState('Non-existing video deletion requested', videoId)
-  }
-
-  // remove video
-  await db.remove<Video>(video)
-
-  // emit log event
-  logger.info('Video has been deleted', {id: videoId})
-}
-
-
-// eslint-disable-next-line @typescript-eslint/naming-convention
-export async function content_VideoCensorshipStatusUpdated(
-  db: DatabaseManager,
-  event: SubstrateEvent
-) {
-  // read event data
-  const {videoId, isCensored} = new Content.VideoCensorshipStatusUpdatedEvent(event).data
-
-  // load video
-  const video = await db.get(Video, { where: { id: videoId.toString() } as FindConditions<Video> })
-
-  // ensure video exists
-  if (!video) {
-    return inconsistentState('Non-existing video censoring requested', videoId)
-  }
-
-  // update video
-  video.isCensored = isCensored.isTrue;
-
-  // set last update time
-  video.updatedAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
-
-  // save video
-  await db.save<Video>(video)
-
-  // emit log event
-  logger.info('Video censorship status has been updated', {id: videoId, isCensored: isCensored.isTrue})
-}
-
-// eslint-disable-next-line @typescript-eslint/naming-convention
-export async function content_FeaturedVideosSet(
-  db: DatabaseManager,
-  event: SubstrateEvent
-) {
-  // read event data
-  const {videoId: videoIds} = new Content.FeaturedVideosSetEvent(event).data
-
-  // load old featured videos
-  const existingFeaturedVideos = await db.getMany(Video, { where: { isFeatured: true } as FindConditions<Video> })
-
-  // comparsion utility
-  const isSame = (videoIdA: string) => (videoIdB: string) => videoIdA == videoIdB
-
-  // calculate diff sets
-  const toRemove = existingFeaturedVideos.filter(existingFV =>
-    !videoIds
-      .map(item => item.toHex())
-      .some(isSame(existingFV.id))
-  )
-  const toAdd = videoIds.filter(video =>
-    !existingFeaturedVideos
-      .map(item => item.id)
-      .some(isSame(video.toHex()))
-  )
-
-  // mark previously featured videos as not-featured
-  for (let video of toRemove) {
-    video.isFeatured = false;
-
-    // set last update time
-    video.updatedAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
-
-    await db.save<Video>(video)
-  }
-
-  // escape if no featured video needs to be added
-  if (!toAdd) {
-    // emit log event
-    logger.info('Featured videos unchanged')
-
-    return
-  }
-
-  // read videos previously not-featured videos that are meant to be featured
-  const videosToAdd = await db.getMany(Video, { where: {
-    id: In(toAdd.map(item => item.toString()))
-  } as FindConditions<Video> })
-
-  if (videosToAdd.length != toAdd.length) {
-    return inconsistentState('At least one non-existing video featuring requested', toAdd)
-  }
-
-  // mark previously not-featured videos as featured
-  for (let video of videosToAdd) {
-    video.isFeatured = true;
-
-    // set last update time
-    video.updatedAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
-
-    await db.save<Video>(video)
-  }
-
-  // emit log event
-  logger.info('New featured videos have been set', {videoIds})
-}
-
-/////////////////// Helpers ////////////////////////////////////////////////////
-
-/*
-  Integrates video metadata-related data into existing data (if any) or creates a new record.
-
-  NOTE: type hack - `RawVideoMetadata` is accepted for `metadata` instead of `Partial<Video>`
-        see `prepareVideoMetadata()` in `utils.ts` for more info
-*/
-function integrateVideoMediaMetadata(
-  existingRecord: Video | null,
-  metadata: Partial<Video>,
-  blockNumber: number,
-): Partial<Video> {
-  if (!metadata.mediaMetadata) {
-    return metadata
-  }
-
-  // fix TS type
-  const rawMediaMetadata = metadata.mediaMetadata as unknown as RawVideoMetadata
-
-  // ensure encoding object
-  const encoding = (existingRecord && existingRecord.mediaMetadata && existingRecord.mediaMetadata.encoding)
-    || new VideoMediaEncoding({
-        createdById: '1',
-        updatedById: '1',
-      })
-
-  // integrate media encoding-related data
-  rawMediaMetadata.encoding.codecName.integrateInto(encoding, 'codecName')
-  rawMediaMetadata.encoding.container.integrateInto(encoding, 'container')
-  rawMediaMetadata.encoding.mimeMediaType.integrateInto(encoding, 'mimeMediaType')
-
-  // ensure media metadata object
-  const mediaMetadata = (existingRecord && existingRecord.mediaMetadata) || new VideoMediaMetadata({
-    createdInBlock: blockNumber,
-
-    createdById: '1',
-    updatedById: '1',
-  })
-
-  // integrate media-related data
-  rawMediaMetadata.pixelWidth.integrateInto(mediaMetadata, 'pixelWidth')
-  rawMediaMetadata.pixelHeight.integrateInto(mediaMetadata, 'pixelHeight')
-  rawMediaMetadata.size.integrateInto(mediaMetadata, 'size')
-
-  // connect encoding to media metadata object
-  mediaMetadata.encoding = encoding
-
-  return {
-    ...metadata,
-    mediaMetadata
-  }
-}
-
-// returns tuple `[newLicenseForVideo, oldLicenseToBeDeleted]`
-function handleLicenseUpdate(originalLicense, newLicense): [License | undefined, License | null] {
-  const isNewEmpty = !Object.keys(newLicense).length
-
-  if (!originalLicense && isNewEmpty) {
-    return [undefined, null]
-  }
-
-  if (!originalLicense) { // && !isNewEmpty
-    return [newLicense, null]
-  }
-
-  if (!isNewEmpty) { // && originalLicense
-    return [
-      new License({
-        ...originalLicense,
-        ...newLicense,
-      }),
-      null
-    ]
-  }
-
-  // originalLicense && isNewEmpty
-
-  return [originalLicense, null]
-}

+ 0 - 6
query-node/mappings/src/eventFix.ts

@@ -1,6 +0,0 @@
-import BN from 'bn.js'
-
-// Workaround for https://github.com/Joystream/hydra/issues/326 . This file can be removed after it's fixed
-export function fixBlockTimestamp(blockTimestamp: unknown): BN {
-    return new BN(blockTimestamp as string)
-}

+ 0 - 4
query-node/mappings/src/index.ts

@@ -1,4 +0,0 @@
-export * from './content'
-export * from './membership'
-export * from './storage'
-export * from './workingGroup'

+ 0 - 284
query-node/mappings/src/membership.ts

@@ -1,284 +0,0 @@
-import { fixBlockTimestamp } from './eventFix'
-import BN from 'bn.js'
-import { Bytes } from '@polkadot/types'
-import { MemberId } from '@joystream/types/members'
-import { SubstrateEvent } from '@dzlzv/hydra-common'
-import { DatabaseManager } from '@dzlzv/hydra-db-utils'
-import { FindConditions } from 'typeorm'
-
-import {
-  inconsistentState,
-  logger,
-  extractExtrinsicArgs,
-  extractSudoCallParameters,
-} from './common'
-import { Members } from '../../generated/types'
-import { MembershipEntryMethod, Membership } from 'query-node'
-import { EntryMethod } from '@joystream/types/augment'
-
-// eslint-disable-next-line @typescript-eslint/naming-convention
-export async function members_MemberRegistered(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
-  // read event data
-  const { accountId, memberId, entryMethod } = new Members.MemberRegisteredEvent(event).data
-  const { avatarUri, about, handle } = extractExtrinsicArgs(
-    event,
-    Members.BuyMembershipCall,
-    {
-      handle: 1,
-      avatarUri: 2,
-      about: 3,
-    },
-  )
-
-  // create new membership
-  const member = new Membership({
-    // main data
-    id: memberId.toString(),
-    rootAccount: accountId.toString(),
-    controllerAccount: accountId.toString(),
-    handle: convertBytesToString(handle.unwrapOr(null)),
-    about: convertBytesToString(about.unwrapOr(null)),
-    avatarUri: convertBytesToString(avatarUri.unwrapOr(null)),
-    createdInBlock: event.blockNumber,
-    entry: convertEntryMethod(entryMethod),
-
-    // fill in auto-generated fields
-    createdAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
-    updatedAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
-  })
-
-  // save membership
-  await db.save<Membership>(member)
-
-  // emit log event
-  logger.info('Member has been registered', {ids: memberId})
-}
-
-// eslint-disable-next-line @typescript-eslint/naming-convention
-export async function members_MemberUpdatedAboutText(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
-  // read event data
-  const { text, memberId } = isUpdateMembershipExtrinsic(event)
-    ? unpackUpdateMembershipOptions(
-        extractExtrinsicArgs(event, Members.UpdateMembershipCall, {memberId: 0, about: 3})
-      )
-    : extractExtrinsicArgs(event, Members.ChangeMemberAboutTextCall, {memberId: 0, text: 1})
-
-  // load member
-  const member = await db.get(Membership, { where: { id: memberId.toString() } as FindConditions<Membership> })
-
-  // ensure member exists
-  if (!member) {
-    return inconsistentState(`Non-existing member about text update requested`, memberId)
-  }
-
-  // update member
-  member.about = convertBytesToString(text)
-
-  // set last update time
-  member.updatedAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
-
-  // save member
-  await db.save<Membership>(member)
-
-  // emit log event
-  logger.info("Member's about text has been updated", {ids: memberId})
-}
-
-// eslint-disable-next-line @typescript-eslint/naming-convention
-export async function members_MemberUpdatedAvatar(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
-  // read event data
-  const { uri, memberId } = isUpdateMembershipExtrinsic(event)
-    ? unpackUpdateMembershipOptions(
-        extractExtrinsicArgs(event, Members.UpdateMembershipCall, {memberId: 0, avatarUri: 2})
-      )
-    : extractExtrinsicArgs(event, Members.ChangeMemberAvatarCall, {memberId: 0, uri: 1})
-
-  // load member
-  const member = await db.get(Membership, { where: { id: memberId.toString() } as FindConditions<Membership> })
-
-  // ensure member exists
-  if (!member) {
-    return inconsistentState(`Non-existing member avatar update requested`, memberId)
-  }
-
-  // update member
-  member.avatarUri = convertBytesToString(uri)
-
-  // set last update time
-  member.updatedAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
-
-  // save member
-  await db.save<Membership>(member)
-
-  // emit log event
-  logger.info("Member's avatar has been updated", {ids: memberId})
-}
-
-// eslint-disable-next-line @typescript-eslint/naming-convention
-export async function members_MemberUpdatedHandle(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
-  // read event data
-  const { handle, memberId } = isUpdateMembershipExtrinsic(event)
-    ? unpackUpdateMembershipOptions(
-        extractExtrinsicArgs(event, Members.UpdateMembershipCall, {memberId: 0, handle: 1})
-      )
-    : extractExtrinsicArgs(event, Members.ChangeMemberHandleCall, {memberId: 0, handle: 1})
-
-  // load member
-  const member = await db.get(Membership, { where: { id: memberId.toString() } as FindConditions<Membership> })
-
-  // ensure member exists
-  if (!member) {
-    return inconsistentState(`Non-existing member handle update requested`, memberId)
-  }
-
-  // update member
-  member.handle = convertBytesToString(handle)
-
-  // set last update time
-  member.updatedAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
-
-  // save member
-  await db.save<Membership>(member)
-
-  // emit log event
-  logger.info("Member's avatar has been updated", {ids: memberId})
-}
-
-// eslint-disable-next-line @typescript-eslint/naming-convention
-export async function members_MemberSetRootAccount(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
-  // read event data
-  const { newRootAccount, memberId } = extractExtrinsicArgs(event, Members.SetRootAccountCall, {memberId: 0, newRootAccount: 1})
-
-  // load member
-  const member = await db.get(Membership, { where: { id: memberId.toString() } as FindConditions<Membership> })
-
-  // ensure member exists
-  if (!member) {
-    return inconsistentState(`Non-existing member root account update requested`, memberId)
-  }
-
-  // update member
-  member.rootAccount = newRootAccount.toString()
-
-  // set last update time
-  member.updatedAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
-
-  // save member
-  await db.save<Membership>(member)
-
-  // emit log event
-  logger.info("Member's root has been updated", {ids: memberId})
-}
-
-// eslint-disable-next-line @typescript-eslint/naming-convention
-export async function members_MemberSetControllerAccount(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
-  // read event data
-  const { newControllerAccount, memberId } = extractExtrinsicArgs(
-    event,
-    Members.SetControllerAccountCall,
-    {memberId: 0, newControllerAccount: 1},
-  )
-
-  // load member
-  const member = await db.get(Membership, { where: { id: memberId.toString() } as FindConditions<Membership> })
-
-  // ensure member exists
-  if (!member) {
-    return inconsistentState(`Non-existing member controller account update requested`, memberId)
-  }
-
-  // update member
-  member.controllerAccount = newControllerAccount.toString()
-
-  // set last update time
-  member.updatedAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
-
-  // save member
-  await db.save<Membership>(member)
-
-  // emit log event
-  logger.info("Member's controller has been updated", {ids: memberId})
-}
-
-/////////////////// Helpers ////////////////////////////////////////////////////
-
-/*
-  Helper for converting Bytes type to string
-*/
-function convertBytesToString(b: Bytes | null): string {
-  if (!b) {
-    return ''
-  }
-
-  const result = Buffer.from(b.toU8a(true)).toString()
-
-  // prevent utf-8 null character
-  if (result.match(/^\0$/)) {
-    return ''
-  }
-
-  return result
-}
-
-function convertEntryMethod(entryMethod: EntryMethod): MembershipEntryMethod {
-  // paid membership?
-  if (entryMethod.isPaid) {
-    return MembershipEntryMethod.PAID
-  }
-
-  // paid membership?
-  if (entryMethod.isScreening) {
-    return MembershipEntryMethod.SCREENING
-  }
-
-  // paid membership?
-  if (entryMethod.isGenesis) {
-    return MembershipEntryMethod.GENESIS
-  }
-
-  // should never happen
-  logger.error('Not implemented entry method', {entryMethod: entryMethod.toString()})
-  throw 'Not implemented entry method'
-}
-
-/*
-  Returns true if event is emitted inside of `update_membership` extrinsic.
-*/
-function isUpdateMembershipExtrinsic(event: SubstrateEvent): boolean {
-  if (!event.extrinsic) { // this should never happen
-    return false
-  }
-
-  if (event.extrinsic.method == 'updateMembership') {
-    return true
-  }
-
-  // no sudo was used to update membership -> this is not updateMembership
-  if (event.extrinsic.section != 'sudo') {
-    return false
-  }
-
-  const sudoCallParameters = extractSudoCallParameters<unknown[]>(event)
-
-  // very trivial check if update_membership extrinsic was used
-  return sudoCallParameters.args.length == 4 // memberId, handle, avatarUri, about
-}
-
-interface IUnpackedUpdateMembershipOptions {
-  memberId: MemberId
-  handle: Bytes
-  uri: Bytes
-  text: Bytes
-}
-
-/*
-  Returns unwrapped data + unite naming of uri/avatarUri and about/text
-*/
-function unpackUpdateMembershipOptions(args: Members.UpdateMembershipCall['args']): IUnpackedUpdateMembershipOptions {
-  return {
-    memberId: args.memberId,
-    handle: args.handle.unwrapOrDefault(),
-    uri: args.avatarUri.unwrapOrDefault(),
-    text: args.about.unwrapOrDefault(),
-  }
-}

+ 0 - 276
query-node/mappings/src/storage.ts

@@ -1,276 +0,0 @@
-import { fixBlockTimestamp } from './eventFix'
-import { SubstrateEvent } from '@dzlzv/hydra-common'
-import { DatabaseManager } from '@dzlzv/hydra-db-utils'
-import { FindConditions, In } from 'typeorm'
-
-import {
-  inconsistentState,
-  logger,
-  prepareDataObject,
-} from './common'
-
-import {
-  DataDirectory,
-} from '../../generated/types'
-import {
-  ContentId,
-  ContentParameters,
-  StorageObjectOwner,
-} from '@joystream/types/augment'
-
-import { ContentId as Custom_ContentId, ContentParameters as Custom_ContentParameters } from '@joystream/types/storage'
-import { registry } from '@joystream/types'
-
-import {
-  Channel,
-  Video,
-  AssetAvailability,
-
-  DataObject,
-  DataObjectOwner,
-  DataObjectOwnerMember,
-  DataObjectOwnerChannel,
-  DataObjectOwnerDao,
-  DataObjectOwnerCouncil,
-  DataObjectOwnerWorkingGroup,
-  LiaisonJudgement,
-  Worker,
-  WorkerType,
-} from 'query-node'
-
-export async function dataDirectory_ContentAdded(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
-  // read event data
-  const {contentParameters, storageObjectOwner} = new DataDirectory.ContentAddedEvent(event).data
-
-  // save all content objects
-  for (let parameters of contentParameters) {
-    const owner = convertStorageObjectOwner(storageObjectOwner)
-    const dataObject = await prepareDataObject(parameters, event.blockNumber, owner)
-
-    // fill in auto-generated fields
-    dataObject.createdAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
-    dataObject.updatedAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
-
-    await db.save<DataObject>(dataObject)
-  }
-
-  // emit log event
-  logger.info("Storage content has beed added", {ids: contentParameters.map(item => encodeContentId(item.content_id))})
-}
-
-export async function dataDirectory_ContentRemoved(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
-  // read event data
-  const {contentId: contentIds} = new DataDirectory.ContentRemovedEvent(event).data
-
-  // load assets
-  const dataObjects = await db.getMany(DataObject, { where: {
-    joystreamContentId: In(contentIds.map(item => encodeContentId(item)))
-  } as FindConditions<DataObject> })
-
-  // store dataObject ids before they are deleted (for logging purposes)
-  const dataObjectIds = dataObjects.map(item => item.id)
-
-  // remove assets from database
-  for (let item of dataObjects) {
-    // ensure dataObject is nowhere used to prevent db constraint error
-    await disconnectDataObjectRelations(db, item)
-
-    // remove data object
-    await db.remove<DataObject>(item)
-  }
-
-  // emit log event
-  logger.info("Storage content have been removed", {id: contentIds, dataObjectIds})
-}
-
-export async function dataDirectory_ContentAccepted(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
-  // read event data
-  const {contentId, storageProviderId} = new DataDirectory.ContentAcceptedEvent(event).data
-  const encodedContentId = encodeContentId(contentId)
-
-  // load asset
-  const dataObject = await db.get(DataObject, { where: { joystreamContentId: encodedContentId } as FindConditions<DataObject>})
-
-  // ensure object exists
-  if (!dataObject) {
-    return inconsistentState('Non-existing content acceptation requested', encodedContentId)
-  }
-
-  // load storage provider
-  const worker = await db.get(Worker, {
-    where: {
-      workerId: storageProviderId.toString(),
-      type: WorkerType.STORAGE,
-    } as FindConditions<Worker>
-  })
-
-  // ensure object exists
-  if (!worker) {
-    return inconsistentState('Missing Storage Provider Id', storageProviderId)
-  }
-
-  // update object
-  dataObject.liaison = worker
-  dataObject.liaisonJudgement = LiaisonJudgement.ACCEPTED
-
-  // set last update time
-  dataObject.updatedAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
-
-  // save object
-  await db.save<DataObject>(dataObject)
-
-  // emit log event
-  logger.info("Storage content has been accepted", {id: encodedContentId})
-
-  // update asset availability for all connected channels and videos
-  // this will not be needed after redudant AssetAvailability will be removed (after some Hydra upgrades)
-  await updateConnectedAssets(db, dataObject)
-}
-
-/////////////////// Updating connected entities ////////////////////////////////
-
-async function updateConnectedAssets(db: DatabaseManager, dataObject: DataObject) {
-  await updateSingleConnectedAsset(db, new Channel(), 'avatarPhoto', dataObject)
-  await updateSingleConnectedAsset(db, new Channel(), 'coverPhoto', dataObject)
-
-  await updateSingleConnectedAsset(db, new Video(), 'thumbnailPhoto', dataObject)
-  await updateSingleConnectedAsset(db, new Video(), 'media', dataObject)
-}
-
-//async function updateSingleConnectedAsset(db: DatabaseManager, type: typeof Channel | typeof Video, propertyName: string, dataObject: DataObject) {
-async function updateSingleConnectedAsset<T extends Channel | Video>(db: DatabaseManager, type: T, propertyName: string, dataObject: DataObject) {
-  // prepare lookup condition
-  const condition = {
-    where: {
-      [propertyName + 'DataObject']: dataObject
-    }
-  } // as FindConditions<T>
-
-  // NOTE: we don't need to retrieve multiple channels/videos via `db.getMany()` because dataObject
-  //       is allowed to be associated only with one channel/video in runtime
-
-  // in therory the following condition(s) can be generalized `... db.get(type, ...` but in practice it doesn't work :-\
-  const item = type instanceof Channel
-    ? await db.get(Channel, condition)
-    : await db.get(Video, condition)
-
-  // escape when no dataObject association found
-  if (!item) {
-    return
-  }
-
-  item[propertyName + 'Availability'] = AssetAvailability.ACCEPTED
-
-  if (type instanceof Channel) {
-    await db.save<Channel>(item)
-
-    // emit log event
-    logger.info("Channel using Content has been accepted", {
-      channelId: item.id.toString(),
-      joystreamContentId: dataObject.joystreamContentId
-    })
-  } else {
-    await db.save<Video>(item)
-
-    // emit log event
-    logger.info("Video using Content has been accepted", {
-      videoId: item.id.toString(),
-      joystreamContentId: dataObject.joystreamContentId
-    })
-  }
-}
-
-// removes connection between dataObject and other entities
-async function disconnectDataObjectRelations(db: DatabaseManager, dataObject: DataObject) {
-  await disconnectSingleDataObjectRelation(db, new Channel(), 'avatarPhoto', dataObject)
-  await disconnectSingleDataObjectRelation(db, new Channel(), 'coverPhoto', dataObject)
-
-  await disconnectSingleDataObjectRelation(db, new Video(), 'thumbnailPhoto', dataObject)
-  await disconnectSingleDataObjectRelation(db, new Video(), 'media', dataObject)
-}
-
-async function disconnectSingleDataObjectRelation<T extends Channel | Video>(db: DatabaseManager, type: T, propertyName: string, dataObject: DataObject) {
-  // prepare lookup condition
-  const condition = {
-    where: {
-      [propertyName + 'DataObject']: dataObject
-    }
-  } // as FindConditions<T>
-
-  // NOTE: we don't need to retrieve multiple channels/videos via `db.getMany()` because dataObject
-  //       is allowed to be associated only with one channel/video in runtime
-
-  // in therory the following condition(s) can be generalized `... db.get(type, ...` but in practice it doesn't work :-\
-  const item = type instanceof Channel
-    ? await db.get(Channel, condition)
-    : await db.get(Video, condition)
-
-  // escape when no dataObject association found
-  if (!item) {
-    return
-  }
-
-  item[propertyName + 'Availability'] = AssetAvailability.INVALID
-  item[propertyName + 'DataObject'] = null
-
-  if (type instanceof Channel) {
-    await db.save<Channel>(item)
-
-    // emit log event
-    logger.info("Content has been disconnected from Channel", {
-      channelId: item.id.toString(),
-      joystreamContentId: dataObject.joystreamContentId})
-  } else { // type instanceof Video
-    await db.save<Video>(item)
-
-    // emit log event
-    logger.info("Content has been disconnected from Video", {
-      videoId: item.id.toString(),
-      joystreamContentId: dataObject.joystreamContentId})
-  }
-}
-
-/////////////////// Helpers ////////////////////////////////////////////////////
-
-function convertStorageObjectOwner(objectOwner: StorageObjectOwner): typeof DataObjectOwner {
-  if (objectOwner.isMember) {
-    const owner = new DataObjectOwnerMember()
-    owner.member = objectOwner.asMember.toNumber()
-
-    return owner
-  }
-
-  if (objectOwner.isChannel) {
-    const owner = new DataObjectOwnerChannel()
-    owner.channel = objectOwner.asChannel.toNumber()
-
-    return owner
-  }
-
-  if (objectOwner.isDao) {
-    const owner = new DataObjectOwnerDao()
-    owner.dao = objectOwner.asDao.toNumber()
-
-    return owner
-  }
-
-  if (objectOwner.isCouncil) {
-    return new DataObjectOwnerCouncil()
-  }
-
-  if (objectOwner.isWorkingGroup) {
-    const owner = new DataObjectOwnerWorkingGroup()
-    owner.workingGroup = objectOwner.asWorkingGroup.toNumber()
-
-    return owner
-  }
-
-  logger.error('Not implemented StorageObjectOwner type', {objectOwner: objectOwner.toString()})
-  throw 'Not implemented StorageObjectOwner type'
-}
-
-function encodeContentId(contentId: ContentId) {
-  const customContentId = new Custom_ContentId(registry, contentId);
-
-  return customContentId.encode()
-}

+ 0 - 203
query-node/mappings/src/workingGroup.ts

@@ -1,203 +0,0 @@
-import { SubstrateEvent } from '@dzlzv/hydra-common'
-import { DatabaseManager } from '@dzlzv/hydra-db-utils'
-import { FindConditions } from 'typeorm'
-import { Bytes } from '@polkadot/types'
-
-import {
-  inconsistentState,
-  logger,
-} from './common'
-
-import {
-  Channel,
-  Worker,
-  WorkerType,
-} from 'query-node'
-import {
-  GatewayWorkingGroup,
-  StorageWorkingGroup,
-} from '../../generated/types'
-import {
-  ApplicationId,
-  ApplicationIdToWorkerIdMap,
-  WorkerId,
-} from "@joystream/types/augment";
-
-/////////////////// Storage working group //////////////////////////////////////
-
-export async function storageWorkingGroup_OpeningFilled(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
-  // read event data
-  const {applicationIdToWorkerIdMap} = new StorageWorkingGroup.OpeningFilledEvent(event).data
-
-  // call generic processing
-  await workingGroup_OpeningFilled(db, WorkerType.STORAGE, applicationIdToWorkerIdMap)
-}
-
-export async function storageWorkingGroup_WorkerStorageUpdated(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
-  // read event data
-  const {workerId, bytes: newMetadata} = new StorageWorkingGroup.WorkerStorageUpdatedEvent(event).data
-
-  // call generic processing
-  await workingGroup_WorkerStorageUpdated(db, WorkerType.STORAGE, workerId, newMetadata)
-}
-
-export async function storageWorkingGroup_TerminatedWorker(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
-  // read event data
-  const {workerId} = new StorageWorkingGroup.TerminatedWorkerEvent(event).data
-
-  // call generic processing
-  await workingGroup_TerminatedWorker(db, WorkerType.STORAGE, workerId)
-}
-
-export async function storageWorkingGroup_WorkerExited(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
-  // read event data
-  const {workerId} = new StorageWorkingGroup.WorkerExitedEvent(event).data
-
-  // call generic processing
-  await workingGroup_WorkerExited(db, WorkerType.STORAGE, workerId)
-}
-
-export async function storageWorkingGroup_TerminatedLeader(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
-  // read event data
-  const {workerId} = new StorageWorkingGroup.TerminatedLeaderEvent(event).data
-
-  // call generic processing
-  await workingGroup_TerminatedLeader(db, WorkerType.STORAGE, workerId)
-}
-
-/////////////////// Gateway working group //////////////////////////////////////
-
-export async function gatewayWorkingGroup_OpeningFilled(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
-  // read event data
-  const {applicationIdToWorkerIdMap} = new GatewayWorkingGroup.OpeningFilledEvent(event).data
-
-  // call generic processing
-  await workingGroup_OpeningFilled(db, WorkerType.GATEWAY, applicationIdToWorkerIdMap)
-}
-
-export async function gatewayWorkingGroup_WorkerStorageUpdated(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
-  // read event data
-  const {workerId, bytes: newMetadata} = new GatewayWorkingGroup.WorkerStorageUpdatedEvent(event).data
-
-  // call generic processing
-  await workingGroup_WorkerStorageUpdated(db, WorkerType.GATEWAY, workerId, newMetadata)
-}
-
-export async function gatewayWorkingGroup_TerminatedWorker(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
-  // read event data
-  const {workerId} = new GatewayWorkingGroup.TerminatedWorkerEvent(event).data
-
-  // call generic processing
-  await workingGroup_TerminatedWorker(db, WorkerType.GATEWAY, workerId)
-}
-
-export async function gatewayWorkingGroup_WorkerExited(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
-  // read event data
-  const {workerId} = new GatewayWorkingGroup.WorkerExitedEvent(event).data
-
-  // call generic processing
-  await workingGroup_WorkerExited(db, WorkerType.GATEWAY, workerId)
-}
-
-export async function gatewayWorkingGroup_TerminatedLeader(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
-  // read event data
-  const {workerId} = new GatewayWorkingGroup.TerminatedLeaderEvent(event).data
-
-  // call generic processing
-  await workingGroup_TerminatedLeader(db, WorkerType.GATEWAY, workerId)
-}
-
-/////////////////// Generic working group processing ///////////////////////////
-
-export async function workingGroup_OpeningFilled(
-  db: DatabaseManager,
-  workerType: WorkerType,
-  applicationIdToWorkerIdMap: ApplicationIdToWorkerIdMap
-): Promise<void> {
-  const workerIds = [...applicationIdToWorkerIdMap.values()]
-
-  for (const workerId of workerIds) {
-    await createWorker(db, workerId, workerType)
-  }
-
-  // emit log event
-  logger.info("Workers have been created", {ids: workerIds.map(item => item.toString()), workerType})
-}
-
-export async function workingGroup_WorkerStorageUpdated(db: DatabaseManager, workerType: WorkerType, workerId: WorkerId, newMetadata: Bytes): Promise<void> {
-  // load worker
-  const worker = await db.get(Worker, {
-    where: {
-      workerId: workerId.toString(),
-      type: workerType,
-    } as FindConditions<Worker>
-  })
-
-  // ensure worker exists
-  if (!worker) {
-    return inconsistentState('Non-existing worker update requested', workerId)
-  }
-
-  worker.metadata = newMetadata.toUtf8()
-
-  await db.save<Worker>(worker)
-
-  // emit log event
-  logger.info("Worker has been updated", {workerId, workerType})
-}
-
-export async function workingGroup_TerminatedWorker(db: DatabaseManager, workerType: WorkerType, workerId: WorkerId): Promise<void> {
-  // do removal logic
-  await deactivateWorker(db, workerType, workerId)
-
-  // emit log event
-  logger.info("Worker has been removed (worker terminated)", {workerId, workerType})
-}
-
-export async function workingGroup_WorkerExited(db: DatabaseManager, workerType: WorkerType, workerId: WorkerId): Promise<void> {
-  // do removal logic
-  await deactivateWorker(db, workerType, workerId)
-
-  // emit log event
-  logger.info("Worker has been removed (worker exited)", {workerId, workerType})
-}
-
-export async function workingGroup_TerminatedLeader(db: DatabaseManager, workerType: WorkerType, workerId: WorkerId): Promise<void> {
-  // do removal logic
-  await deactivateWorker(db, workerType, workerId)
-
-  // emit log event
-  logger.info("Working group leader has been removed (worker exited)", {workerId, workerType})
-}
-
-/////////////////// Helpers ////////////////////////////////////////////////////
-
-async function createWorker(db: DatabaseManager, workerId: WorkerId, workerType: WorkerType): Promise<void> {
-  // create new worker
-  const newWorker = new Worker({
-    workerId: workerId.toString(),
-    type: workerType,
-    isActive: true,
-  })
-
-  await db.save<Worker>(newWorker)
-}
-
-async function deactivateWorker(db: DatabaseManager, workerType: WorkerType, workerId: WorkerId) {
-  // load worker
-  const worker = await db.get(Worker, {
-    where: {
-      workerId: workerId.toString(),
-      type: workerType,
-    } as FindConditions<Worker>
-  })
-
-  // ensure worker exists
-  if (!worker) {
-    return inconsistentState('Non-existing worker deletion requested', workerId)
-  }
-
-  worker.isActive = false
-
-  await db.save<Worker>(worker)
-}

+ 271 - 0
query-node/mappings/storage.ts

@@ -0,0 +1,271 @@
+/*
+eslint-disable @typescript-eslint/naming-convention
+*/
+import { EventContext, StoreContext, DatabaseManager } from '@dzlzv/hydra-common'
+import { FindConditions, In } from 'typeorm'
+import {
+  getWorker,
+  getWorkingGroupModuleName,
+  inconsistentState,
+  logger,
+  prepareDataObject,
+  unexpectedData,
+} from './common'
+import { DataDirectory } from './generated/types'
+import { ContentId, StorageObjectOwner } from '@joystream/types/augment'
+import { ContentId as Custom_ContentId } from '@joystream/types/storage'
+import { registry } from '@joystream/types'
+import {
+  Channel,
+  Video,
+  AssetAvailability,
+  DataObject,
+  DataObjectOwner,
+  DataObjectOwnerMember,
+  DataObjectOwnerChannel,
+  DataObjectOwnerDao,
+  DataObjectOwnerCouncil,
+  DataObjectOwnerWorkingGroup,
+  LiaisonJudgement,
+} from 'query-node/dist/model'
+
+export async function dataDirectory_ContentAdded({ store, event }: EventContext & StoreContext): Promise<void> {
+  // read event data
+  const { contentParameters, storageObjectOwner } = new DataDirectory.ContentAddedEvent(event).data
+
+  // save all content objects
+  for (const parameters of contentParameters) {
+    const owner = convertStorageObjectOwner(storageObjectOwner)
+    const dataObject = await prepareDataObject(parameters, event.blockNumber, owner)
+
+    // fill in auto-generated fields
+    dataObject.createdAt = new Date(event.blockTimestamp)
+    dataObject.updatedAt = new Date(event.blockTimestamp)
+
+    await store.save<DataObject>(dataObject)
+  }
+
+  // emit log event
+  logger.info('Storage content has beed added', {
+    ids: contentParameters.map((item) => encodeContentId(item.content_id)),
+  })
+}
+
+export async function dataDirectory_ContentRemoved({ store, event }: EventContext & StoreContext): Promise<void> {
+  // read event data
+  const { contentId: contentIds } = new DataDirectory.ContentRemovedEvent(event).data
+
+  // load assets
+  const dataObjects = await store.getMany(DataObject, {
+    where: {
+      joystreamContentId: In(contentIds.map((item) => encodeContentId(item))),
+    } as FindConditions<DataObject>,
+  })
+
+  // store dataObject ids before they are deleted (for logging purposes)
+  const dataObjectIds = dataObjects.map((item) => item.id)
+
+  // remove assets from database
+  for (const item of dataObjects) {
+    // ensure dataObject is nowhere used to prevent db constraint error
+    await disconnectDataObjectRelations(store, item)
+
+    // remove data object
+    await store.remove<DataObject>(item)
+  }
+
+  // emit log event
+  logger.info('Storage content have been removed', { id: contentIds, dataObjectIds })
+}
+
+export async function dataDirectory_ContentAccepted({ store, event }: EventContext & StoreContext): Promise<void> {
+  // read event data
+  const { contentId, storageProviderId } = new DataDirectory.ContentAcceptedEvent(event).data
+  const encodedContentId = encodeContentId(contentId)
+
+  // load asset
+  const dataObject = await store.get(DataObject, {
+    where: { joystreamContentId: encodedContentId } as FindConditions<DataObject>,
+  })
+
+  // ensure object exists
+  if (!dataObject) {
+    return inconsistentState('Non-existing content acceptation requested', encodedContentId)
+  }
+
+  // load storage provider
+  const worker = await getWorker(store, 'storageWorkingGroup', storageProviderId)
+
+  // update object
+  dataObject.liaison = worker
+  dataObject.liaisonJudgement = LiaisonJudgement.ACCEPTED
+
+  // set last update time
+  dataObject.updatedAt = new Date(event.blockTimestamp)
+
+  // save object
+  await store.save<DataObject>(dataObject)
+
+  // emit log event
+  logger.info('Storage content has been accepted', { id: encodedContentId })
+
+  // update asset availability for all connected channels and videos
+  // this will not be needed after redudant AssetAvailability will be removed (after some Hydra upgrades)
+  await updateConnectedAssets(store, dataObject)
+}
+
+/// //////////////// Updating connected entities ////////////////////////////////
+
+async function updateConnectedAssets(store: DatabaseManager, dataObject: DataObject) {
+  await updateSingleConnectedAsset(store, new Channel(), 'avatarPhoto', dataObject)
+  await updateSingleConnectedAsset(store, new Channel(), 'coverPhoto', dataObject)
+
+  await updateSingleConnectedAsset(store, new Video(), 'thumbnailPhoto', dataObject)
+  await updateSingleConnectedAsset(store, new Video(), 'media', dataObject)
+}
+
+// async function updateSingleConnectedAsset(store: DatabaseManager, type: typeof Channel | typeof Video, propertyName: string, dataObject: DataObject) {
+async function updateSingleConnectedAsset<T extends Channel | Video>(
+  store: DatabaseManager,
+  type: T,
+  propertyName: string,
+  dataObject: DataObject
+) {
+  // prepare lookup condition
+  const condition = {
+    where: {
+      [propertyName + 'DataObject']: dataObject,
+    },
+  } // as FindConditions<T>
+
+  // NOTE: we don't need to retrieve multiple channels/videos via `store.getMany()` because dataObject
+  //       is allowed to be associated only with one channel/video in runtime
+
+  // in therory the following condition(s) can be generalized `... store.get(type, ...` but in practice it doesn't work :-\
+  const item = type instanceof Channel ? await store.get(Channel, condition) : await store.get(Video, condition)
+
+  // escape when no dataObject association found
+  if (!item) {
+    return
+  }
+
+  item[propertyName + 'Availability'] = AssetAvailability.ACCEPTED
+
+  if (type instanceof Channel) {
+    await store.save<Channel>(item)
+
+    // emit log event
+    logger.info('Channel using Content has been accepted', {
+      channelId: item.id.toString(),
+      joystreamContentId: dataObject.joystreamContentId,
+    })
+  } else {
+    await store.save<Video>(item)
+
+    // emit log event
+    logger.info('Video using Content has been accepted', {
+      videoId: item.id.toString(),
+      joystreamContentId: dataObject.joystreamContentId,
+    })
+  }
+}
+
+// removes connection between dataObject and other entities
+async function disconnectDataObjectRelations(store: DatabaseManager, dataObject: DataObject) {
+  await disconnectSingleDataObjectRelation(store, new Channel(), 'avatarPhoto', dataObject)
+  await disconnectSingleDataObjectRelation(store, new Channel(), 'coverPhoto', dataObject)
+
+  await disconnectSingleDataObjectRelation(store, new Video(), 'thumbnailPhoto', dataObject)
+  await disconnectSingleDataObjectRelation(store, new Video(), 'media', dataObject)
+}
+
+async function disconnectSingleDataObjectRelation<T extends Channel | Video>(
+  store: DatabaseManager,
+  type: T,
+  propertyName: string,
+  dataObject: DataObject
+) {
+  // prepare lookup condition
+  const condition = {
+    where: {
+      [propertyName + 'DataObject']: dataObject,
+    },
+  } // as FindConditions<T>
+
+  // NOTE: we don't need to retrieve multiple channels/videos via `store.getMany()` because dataObject
+  //       is allowed to be associated only with one channel/video in runtime
+
+  // in therory the following condition(s) can be generalized `... store.get(type, ...` but in practice it doesn't work :-\
+  const item = type instanceof Channel ? await store.get(Channel, condition) : await store.get(Video, condition)
+
+  // escape when no dataObject association found
+  if (!item) {
+    return
+  }
+
+  item[propertyName + 'Availability'] = AssetAvailability.INVALID
+  item[propertyName + 'DataObject'] = null
+
+  if (type instanceof Channel) {
+    await store.save<Channel>(item)
+
+    // emit log event
+    logger.info('Content has been disconnected from Channel', {
+      channelId: item.id.toString(),
+      joystreamContentId: dataObject.joystreamContentId,
+    })
+  } else {
+    // type instanceof Video
+    await store.save<Video>(item)
+
+    // emit log event
+    logger.info('Content has been disconnected from Video', {
+      videoId: item.id.toString(),
+      joystreamContentId: dataObject.joystreamContentId,
+    })
+  }
+}
+
+/// //////////////// Helpers ////////////////////////////////////////////////////
+
+function convertStorageObjectOwner(objectOwner: StorageObjectOwner): typeof DataObjectOwner {
+  if (objectOwner.isMember) {
+    const owner = new DataObjectOwnerMember()
+    owner.memberId = objectOwner.asMember.toString()
+
+    return owner
+  }
+
+  if (objectOwner.isChannel) {
+    const owner = new DataObjectOwnerChannel()
+    owner.channelId = objectOwner.asChannel.toString()
+
+    return owner
+  }
+
+  if (objectOwner.isDao) {
+    const owner = new DataObjectOwnerDao()
+    owner.dao = objectOwner.asDao.toNumber()
+
+    return owner
+  }
+
+  if (objectOwner.isCouncil) {
+    return new DataObjectOwnerCouncil()
+  }
+
+  if (objectOwner.isWorkingGroup) {
+    const owner = new DataObjectOwnerWorkingGroup()
+    owner.workingGroupId = getWorkingGroupModuleName(objectOwner.asWorkingGroup)
+
+    return owner
+  }
+
+  unexpectedData('Not implemented StorageObjectOwner type', objectOwner.toString())
+}
+
+function encodeContentId(contentId: ContentId) {
+  const customContentId = new Custom_ContentId(registry, contentId)
+
+  return customContentId.encode()
+}

+ 1 - 0
query-node/mappings/tsconfig.json

@@ -13,6 +13,7 @@
     "experimentalDecorators": true,
     "emitDecoratorMetadata": true,
     "skipLibCheck": true,
+    "resolveJsonModule": true,
     "paths": {
       "@polkadot/types/augment": ["../../types/augment/augment-types.ts"],
       "@polkadot/api/augment": ["../../types/augment/augment-api.ts"]

+ 1 - 2
query-node/package.json

@@ -33,8 +33,7 @@
     "docker:db:up": "(cd ../ && docker-compose up -d db)",
     "docker:db:migrate": "docker run --env-file .env --env DB_HOST=db --env TYPEORM_HOST=db --network container:${PWD##*/}_db_1 hydra-kit:latest yarn db:migrate",
     "docker:up": "docker-compose up -d",
-    "format": "prettier ./ --write",
-    "db:init": "node --unhandled-rejections=strict ./mappings/lib/init.js"
+    "format": "prettier ./ --write"
   },
   "author": "",
   "license": "ISC",

+ 8 - 164
query-node/schema.graphql → query-node/schemas/content.graphql

@@ -1,45 +1,13 @@
-enum Network {
-  BABYLON
-  ALEXANDRIA
-  ROME
-}
-
-enum MembershipEntryMethod {
-  PAID
-  SCREENING
-  GENESIS
-}
-
-"Stored information about a registered user"
-type Membership @entity {
-  "MemberId: runtime identifier for a user"
-  id: ID!
-
-  "The unique handle chosen by member"
-  handle: String! @unique @fulltext(query: "membersByHandle")
-
-  "A Url to member's Avatar image"
-  avatarUri: String
-
-  "Short text chosen by member to share information about themselves"
-  about: String
-
-  "Member's controller account id"
-  controllerAccount: String!
-
-  "Member's root account id"
-  rootAccount: String!
-
-  "Blocknumber when member was registered"
-  createdInBlock: Int!
-
-  "How the member was registered"
-  entry: MembershipEntryMethod!
+"Asset availability representation"
+enum AssetAvailability {
+  "Asset is available in storage"
+  ACCEPTED
 
-  "The type of subscription the member has purchased if any."
-  subscription: Int
+  "Asset is being uploaded to storage"
+  PENDING
 
-  channels: [Channel!]! @derivedFrom(field: "ownerMember")
+  "Invalid storage (meta)data used"
+  INVALID
 }
 
 "Category of media channel"
@@ -54,106 +22,6 @@ type ChannelCategory @entity {
   createdInBlock: Int!
 }
 
-"Asset availability representation"
-enum AssetAvailability {
-  "Asset is available in storage"
-  ACCEPTED,
-
-  "Asset is being uploaded to storage"
-  PENDING,
-
-  "Invalid storage (meta)data used"
-  INVALID,
-}
-
-"The decision of the storage provider when it acts as liaison"
-enum LiaisonJudgement {
-  "Content awaits for a judgment"
-  PENDING,
-
-  "Content accepted"
-  ACCEPTED,
-}
-
-"Manages content ids, type and storage provider decision about it"
-type DataObject @entity {
-  "Content owner"
-  owner: DataObjectOwner!
-
-  "Content added at"
-  createdInBlock: Int!
-
-  "Content type id"
-  typeId: Int!
-
-  "Content size in bytes"
-  size: Int!
-
-  "Storage provider id of the liaison"
-  liaison: Worker # liaison is unset until storage provider accepts or rejects the content
-
-  "Storage provider as liaison judgment"
-  liaisonJudgement: LiaisonJudgement!
-
-  "IPFS content id"
-  ipfsContentId: String!
-
-  "Joystream runtime content"
-  joystreamContentId: String!
-}
-
-"Owner type for storage object"
-union DataObjectOwner = DataObjectOwnerMember
-  | DataObjectOwnerChannel
-  | DataObjectOwnerDao
-  | DataObjectOwnerCouncil
-  | DataObjectOwnerWorkingGroup
-
-"Asset owned by a member"
-type DataObjectOwnerMember @variant {
-  # use `Int` instead of `Membership` before variant relations are featured in Hydra
-  # TODO: setup proper relations
-  #"Member identifier"
-  #memberId: Membership!
-  "Member identifier"
-  member: Int!
-
-  "Variant needs to have at least one property. This value is not used."
-  dummy: Int
-}
-
-"Asset owned by a channel"
-type DataObjectOwnerChannel @variant {
-  # use `Int` instead of `Channel` before variant relations are featured in Hydra
-  #"Channel identifier"
-  #channel: Channel!
-  "Channel identifier"
-  channel: Int!
-
-  "Variant needs to have at least one property. This value is not used."
-  dummy: Int
-}
-
-"Asset owned by a DAO"
-type DataObjectOwnerDao @variant {
-  "DAO identifier"
-  dao: Int!
-}
-
-"Asset owned by the Council"
-type DataObjectOwnerCouncil @variant {
-  "Variant needs to have at least one property. This value is not used."
-  dummy: Int
-}
-
-"Asset owned by a WorkingGroup"
-type DataObjectOwnerWorkingGroup @variant {
-  "Working group identifier"
-  workingGroup: Int!
-}
-
-#### High Level Derivative Entities ####
-
 type Language @entity {
   "Runtime entity identifier (EntityId)"
   id: ID!
@@ -376,27 +244,3 @@ type License @entity {
   "Custom license content"
   custom_text: String
 }
-
-enum WorkerType {
-  GATEWAY
-  STORAGE
-}
-
-type Worker @entity {
-  "Unique identifier"
-  id: ID!
-
-  "Sign of worker still being active"
-  isActive: Boolean!
-
-  "Runtime identifier"
-  workerId: String!
-
-  "Associated working group"
-  type: WorkerType!
-
-  "Custom metadata set by provider"
-  metadata: String
-
-  dataObjects: [DataObject!]! @derivedFrom(field: "liaison")
-}

+ 3 - 0
query-node/schemas/membership.graphql

@@ -77,6 +77,9 @@ type Membership @entity {
   # Required for ProposalDiscussionWhitelist->members Many-to-Many relationship
   "List of proposal thread whitelists the member is part of"
   whitelistedIn: [ProposalDiscussionWhitelist!] @derivedFrom(field: "members")
+
+  "Content channels the member owns"
+  channels: [Channel!] @derivedFrom(field: "ownerMember")
 }
 
 type MembershipSystemSnapshot @entity {

+ 10 - 22
query-node/schemas/storage.graphql

@@ -5,9 +5,6 @@ enum LiaisonJudgement {
 
   "Content accepted"
   ACCEPTED
-
-  "Content rejected"
-  REJECTED
 }
 
 "Manages content ids, type and storage provider decision about it"
@@ -16,16 +13,16 @@ type DataObject @entity {
   owner: DataObjectOwner!
 
   "Content added at"
-  addedAt: Int!
+  createdInBlock: Int!
 
   "Content type id"
   typeId: Int!
 
   "Content size in bytes"
-  size: BigInt!
+  size: Int!
 
   "Storage provider id of the liaison"
-  liaisonId: BigInt!
+  liaison: Worker # liaison is unset until storage provider accepts or rejects the content
 
   "Storage provider as liaison judgment"
   liaisonJudgement: LiaisonJudgement!
@@ -47,11 +44,8 @@ union DataObjectOwner =
 
 "Asset owned by a member"
 type DataObjectOwnerMember @variant {
-  # use `BigInt` instead of `Membership` before variant relations are featured in Hydra
-  #"Member identifier"
-  #memberId: Membership!
-  "Member identifier"
-  member: BigInt!
+  "Related member"
+  member: Membership!
 
   "Variant needs to have at least one property. This value is not used."
   dummy: Int
@@ -59,11 +53,8 @@ type DataObjectOwnerMember @variant {
 
 "Asset owned by a channel"
 type DataObjectOwnerChannel @variant {
-  # use `BigInt` instead of `Channel` before variant relations are featured in Hydra
-  #"Channel identifier"
-  #channel: Channel!
-  "Channel identifier"
-  channel: BigInt!
+  "Related channel"
+  channel: Channel!
 
   "Variant needs to have at least one property. This value is not used."
   dummy: Int
@@ -72,7 +63,7 @@ type DataObjectOwnerChannel @variant {
 "Asset owned by a DAO"
 type DataObjectOwnerDao @variant {
   "DAO identifier"
-  dao: BigInt!
+  dao: Int!
 }
 
 "Asset owned by the Council"
@@ -83,9 +74,6 @@ type DataObjectOwnerCouncil @variant {
 
 "Asset owned by a WorkingGroup"
 type DataObjectOwnerWorkingGroup @variant {
-  #"Working group identifier"
-  #workingGroup: BigInt!
-
-  "Variant needs to have at least one property. This value is not used."
-  dummy: Int
+  "Working group"
+  workingGroup: WorkingGroup!
 }

+ 3 - 0
query-node/schemas/workingGroups.graphql

@@ -78,6 +78,9 @@ type Worker @entity {
 
   "Forum categories managed by the worker (required for many-to-many relationship with ForumCategory)"
   managedForumCategories: [ForumCategory!] @derivedFrom(field: "moderators")
+
+  "Stored data objects (in case of storage worker)"
+  dataObjects: [DataObject!] @derivedFrom(field: "liaison")
 }
 
 type WorkingGroupMetadata @entity {

+ 0 - 3
query-node/start.sh

@@ -24,9 +24,6 @@ yarn workspace query-node config:dev
 yarn workspace query-node-root db:prepare
 yarn workspace query-node-root db:migrate
 
-# Initialize databse (ie. membership module configuration)
-yarn workspace query-node-root db:init
-
 docker-compose up -d graphql-server-mnt
 
 # Starting up processor will bring up all services it depends on