فهرست منبع

Merge branch 'giza_staging' into distributor-node-staging-2

Leszek Wiesner 3 سال پیش
والد
کامیت
874900594e
100فایلهای تغییر یافته به همراه8422 افزوده شده و 1677 حذف شده
  1. 8 3
      .env
  2. 32 32
      .github/workflows/joystream-cli.yml
  3. 27 4
      build-docker-images.sh
  4. 3 3
      build-npm-packages.sh
  5. 32 0
      cli/codegen.yml
  6. 1 1
      cli/examples/content/CreateVideo.json
  7. 16 4
      cli/package.json
  8. 62 0
      cli/scripts/content-test.sh
  9. 120 161
      cli/src/Api.ts
  10. 2 1
      cli/src/ExitCodes.ts
  11. 125 0
      cli/src/QueryNodeApi.ts
  12. 79 68
      cli/src/Types.ts
  13. 29 2
      cli/src/base/AccountsCommandBase.ts
  14. 160 65
      cli/src/base/ApiCommandBase.ts
  15. 5 5
      cli/src/base/ContentDirectoryCommandBase.ts
  16. 3 0
      cli/src/base/DefaultCommandBase.ts
  17. 2 0
      cli/src/base/StateAwareCommandBase.ts
  18. 175 66
      cli/src/base/UploadCommandBase.ts
  19. 11 0
      cli/src/commands/api/getQueryNodeEndpoint.ts
  20. 9 10
      cli/src/commands/api/inspect.ts
  21. 36 0
      cli/src/commands/api/setQueryNodeEndpoint.ts
  22. 3 6
      cli/src/commands/api/setUri.ts
  23. 3 12
      cli/src/commands/content/channel.ts
  24. 31 22
      cli/src/commands/content/createChannel.ts
  25. 5 5
      cli/src/commands/content/createChannelCategory.ts
  26. 51 39
      cli/src/commands/content/createVideo.ts
  27. 5 5
      cli/src/commands/content/createVideoCategory.ts
  28. 1 1
      cli/src/commands/content/curatorGroup.ts
  29. 1 1
      cli/src/commands/content/curatorGroups.ts
  30. 103 0
      cli/src/commands/content/deleteChannel.ts
  31. 81 0
      cli/src/commands/content/deleteVideo.ts
  32. 41 0
      cli/src/commands/content/removeChannelAssets.ts
  33. 1 1
      cli/src/commands/content/removeCuratorFromGroup.ts
  34. 8 6
      cli/src/commands/content/reuploadAssets.ts
  35. 65 22
      cli/src/commands/content/updateChannel.ts
  36. 5 5
      cli/src/commands/content/updateChannelCategory.ts
  37. 3 1
      cli/src/commands/content/updateChannelCensorshipStatus.ts
  38. 69 20
      cli/src/commands/content/updateVideo.ts
  39. 5 5
      cli/src/commands/content/updateVideoCategory.ts
  40. 3 1
      cli/src/commands/content/updateVideoCensorshipStatus.ts
  41. 1 1
      cli/src/commands/content/video.ts
  42. 3 5
      cli/src/commands/content/videos.ts
  43. 1 1
      cli/src/commands/council/info.ts
  44. 22 16
      cli/src/commands/working-groups/createOpening.ts
  45. 2 1
      cli/src/commands/working-groups/evictWorker.ts
  46. 2 2
      cli/src/commands/working-groups/fillOpening.ts
  47. 5 1
      cli/src/commands/working-groups/leaveRole.ts
  48. 3 2
      cli/src/commands/working-groups/updateWorkerReward.ts
  49. 120 0
      cli/src/graphql/generated/queries.ts
  50. 4515 0
      cli/src/graphql/generated/schema.ts
  51. 65 0
      cli/src/graphql/queries/storage.graphql
  52. 14 90
      cli/src/helpers/serialization.ts
  53. 0 22
      cli/src/json-schemas/Assets.schema.json
  54. 1 1
      cli/src/schemas/ContentDirectory.ts
  55. 34 0
      cli/src/schemas/json/Assets.schema.json
  56. 0 0
      cli/src/schemas/json/WorkingGroupOpening.schema.json
  57. 30 0
      cli/src/schemas/typings/Assets.schema.d.ts
  58. 0 0
      cli/src/schemas/typings/WorkingGroupOpening.schema.d.ts
  59. 2 1
      cli/tsconfig.json
  60. 26 7
      colossus.Dockerfile
  61. 1 1
      distributor-node.Dockerfile
  62. 18 18
      distributor-node/src/services/networking/distributor-node/generated/api.ts
  63. 492 494
      distributor-node/src/services/networking/query-node/generated/schema.ts
  64. 41 41
      distributor-node/src/services/networking/storage-node/generated/api.ts
  65. 13 0
      distributor-node/src/services/validation/generateTypes.ts
  66. 56 0
      distributor-node/src/services/validation/schemas/configSchema.ts
  67. 23 0
      distributor-node/src/services/validation/schemas/familyMetadataSchema.ts
  68. 24 0
      distributor-node/src/services/validation/schemas/index.ts
  69. 28 0
      distributor-node/src/services/validation/schemas/operatorMetadataSchema.ts
  70. 10 0
      distributor-node/src/services/validation/schemas/utils.ts
  71. 14 0
      docker-compose.linux-gnu-build.yml
  72. 42 0
      docker-compose.multi-storage.yml
  73. 33 42
      docker-compose.yml
  74. 37 0
      multi-storage.sh
  75. 2 2
      package.json
  76. 2 5
      query-node/build.sh
  77. 20 0
      query-node/mappings/scripts/postCodegen.ts
  78. 4 0
      runtime-modules/common/src/working_group.rs
  79. 28 14
      start.sh
  80. 1 0
      storage-node-v2/.eslintignore
  81. 1 0
      storage-node-v2/.eslintrc.js
  82. 412 194
      storage-node-v2/README.md
  83. 38 7
      storage-node-v2/package.json
  84. 200 0
      storage-node-v2/scripts/generate-test-data.ts
  85. 21 0
      storage-node-v2/scripts/init-for-cli.sh
  86. 12 0
      storage-node-v2/scripts/operatorMetadata.json
  87. 1 2
      storage-node-v2/scripts/run-all-commands.sh
  88. 106 11
      storage-node-v2/src/api-spec/openapi.yaml
  89. 40 16
      storage-node-v2/src/command-base/ApiCommandBase.ts
  90. 2 0
      storage-node-v2/src/command-base/ExitCodes.ts
  91. 4 0
      storage-node-v2/src/commands/dev/init.ts
  92. 65 0
      storage-node-v2/src/commands/dev/sync.ts
  93. 5 23
      storage-node-v2/src/commands/dev/verify-bag-id.ts
  94. 6 18
      storage-node-v2/src/commands/leader/update-bag.ts
  95. 1 1
      storage-node-v2/src/commands/leader/update-dynamic-bag-policy.ts
  96. 21 9
      storage-node-v2/src/commands/operator/set-metadata.ts
  97. 157 3
      storage-node-v2/src/commands/server.ts
  98. 41 27
      storage-node-v2/src/services/helpers/bagTypes.ts
  99. 2 2
      storage-node-v2/src/services/helpers/tokenNonceKeeper.ts
  100. 131 20
      storage-node-v2/src/services/logger.ts

+ 8 - 3
.env

@@ -14,9 +14,6 @@ DB_PORT=5432
 DEBUG=index-builder:*
 TYPEORM_LOGGING=error
 
-DEBUG=index-builder:*
-TYPEORM_LOGGING=error
-
 ###########################
 #    Indexer options      #
 ###########################
@@ -40,3 +37,11 @@ WS_PROVIDER_ENDPOINT_URI=ws://joystream-node:9944/
 
 # If running joystream-node on host machine you can use following address to reach it instead
 # WS_PROVIDER_ENDPOINT_URI=ws://host.docker.internal:9944/
+
+######################
+#    Storage Node    #
+######################
+COLOSSUS_PORT=3333
+QUERY_NODE_HOST=${GRAPHQL_SERVER_HOST}:${GRAPHQL_SERVER_PORT}
+WORKER_ID=0
+ACCOUNT_URI=//Alice

+ 32 - 32
.github/workflows/joystream-cli.yml

@@ -9,22 +9,22 @@ jobs:
       matrix:
         node-version: [14.x]
     steps:
-    - uses: actions/checkout@v1
-    - name: Use Node.js ${{ matrix.node-version }}
-      uses: actions/setup-node@v1
-      with:
-        node-version: ${{ matrix.node-version }}
-    - name: checks
-      run: |
-        yarn install --frozen-lockfile
-        yarn workspace @joystream/types build
-        yarn workspace @joystream/metadata-protobuf build
-        yarn workspace @joystream/cli checks --quiet
-    - name: yarn pack test
-      run: |
-        yarn workspace @joystream/cli pack --filename cli-pack-test.tgz
-        tar zxvf ./cli/cli-pack-test.tgz -C cli
-        cd ./cli/package && yarn link
+      - uses: actions/checkout@v1
+      - name: Use Node.js ${{ matrix.node-version }}
+        uses: actions/setup-node@v1
+        with:
+          node-version: ${{ matrix.node-version }}
+      - name: checks
+        run: |
+          yarn install --frozen-lockfile
+          yarn workspace @joystream/types build
+          yarn workspace @joystream/metadata-protobuf build
+          yarn workspace @joystream/cli checks --quiet
+      - name: yarn pack test
+        run: |
+          yarn workspace @joystream/cli pack --filename cli-pack-test.tgz
+          tar zxvf ./cli/cli-pack-test.tgz -C cli
+          cd ./cli/package && yarn link
 
   cli_build_osx:
     name: MacOS Checks
@@ -33,19 +33,19 @@ jobs:
       matrix:
         node-version: [14.x]
     steps:
-    - uses: actions/checkout@v1
-    - name: Use Node.js ${{ matrix.node-version }}
-      uses: actions/setup-node@v1
-      with:
-        node-version: ${{ matrix.node-version }}
-    - name: checks
-      run: |
-        yarn install --frozen-lockfile --network-timeout 120000
-        yarn workspace @joystream/types build
-        yarn workspace @joystream/metadata-protobuf build
-        yarn workspace @joystream/cli checks --quiet
-    - name: yarn pack test
-      run: |
-        yarn workspace @joystream/cli pack --filename cli-pack-test.tgz
-        tar zxvf ./cli/cli-pack-test.tgz -C cli
-        cd ./cli/package && yarn link
+      - uses: actions/checkout@v1
+      - name: Use Node.js ${{ matrix.node-version }}
+        uses: actions/setup-node@v1
+        with:
+          node-version: ${{ matrix.node-version }}
+      - name: checks
+        run: |
+          yarn install --frozen-lockfile --network-timeout 120000
+          yarn workspace @joystream/types build
+          yarn workspace @joystream/metadata-protobuf build
+          yarn workspace @joystream/cli checks --quiet
+      - name: yarn pack test
+        run: |
+          yarn workspace @joystream/cli pack --filename cli-pack-test.tgz
+          tar zxvf ./cli/cli-pack-test.tgz -C cli
+          cd ./cli/package && yarn link

+ 27 - 4
build-docker-images.sh

@@ -28,10 +28,33 @@ else
   fi
 fi
 
-# Build joystream/apps docker image
-echo "Building 'joystream/apps' docker image..."
+# Build colossus docker image
+echo "Building colossus docker image..."
 docker-compose build colossus
 
+# Build distributor docker image
+echo "Building distributor docker image..."
+docker-compose build distributor-node
+
+if [[ "$OSTYPE" == "linux-gnu" ]]; then
+    IP_ADDRESS=$(ip addr show | grep "\binet\b.*\bdocker0\b" | awk '{print $2}' | cut -d '/' -f 1)
+    # Run a local development chain
+    docker-compose -f docker-compose.linux-gnu-build.yml up -d joystream-node
+
+    # Build processor/graphql-server docker image
+    echo "Building joystream/apps docker image..."
+    WS_PROVIDER_ENDPOINT_URI=ws://${IP_ADDRESS}:9944/ docker-compose build graphql-server
+elif [[ "$OSTYPE" == "darwin"* ]]; then
+    # Run a local development chain
+    docker-compose up -d joystream-node
+
+    # Build processor/graphql-server docker image
+    echo "Building joystream/apps docker image..."
+    WS_PROVIDER_ENDPOINT_URI=ws://host.docker.internal:9944/ docker-compose build graphql-server
+fi
+
+docker-compose down
+
 # Build the pioneer docker image
-echo "Building pioneer docker image"
-docker-compose build pioneer
+# echo "Building pioneer docker image"
+# docker-compose build pioneer

+ 3 - 3
build-npm-packages.sh

@@ -6,7 +6,7 @@ yarn
 yarn workspace @joystream/types build
 yarn workspace @joystream/metadata-protobuf build
 yarn workspace query-node-root build
-yarn workspace @joystream/cli build
-yarn workspace storage-node build
+# yarn workspace @joystream/cli build
 yarn workspace storage-node-v2 build
-yarn workspace pioneer build
+yarn workspace @joystream/distributor-cli build
+# yarn workspace pioneer build

+ 32 - 0
cli/codegen.yml

@@ -0,0 +1,32 @@
+overwrite: true
+
+schema: '../query-node/generated/graphql-server/generated/schema.graphql'
+
+documents:
+  - './src/graphql/queries/*.graphql'
+
+config:
+  scalars:
+    Date: Date
+  preResolveTypes: true # avoid using Pick
+  skipTypename: true # skip __typename field in typings unless it's part of the query
+
+generates:
+  src/graphql/generated/schema.ts:
+    hooks:
+      afterOneFileWrite:
+        - prettier --write
+        - eslint --fix
+    plugins:
+      - typescript
+  src/graphql/generated/queries.ts:
+    preset: import-types
+    presetConfig:
+      typesPath: ./schema
+    hooks:
+      afterOneFileWrite:
+        - prettier --write
+        - eslint --fix
+    plugins:
+      - typescript-operations
+      - typescript-document-nodes

+ 1 - 1
cli/examples/content/CreateVideo.json

@@ -7,7 +7,7 @@
   "hasMarketing": false,
   "isPublic": true,
   "isExplicit": false,
-  "personsList": [],
+  "persons": [],
   "category": 1,
   "license": {
     "code": 1001,

+ 16 - 4
cli/package.json

@@ -11,7 +11,7 @@
     "@apidevtools/json-schema-ref-parser": "^9.0.6",
     "@ffprobe-installer/ffprobe": "^1.1.0",
     "@joystream/metadata-protobuf": "^1.0.0",
-    "@joystream/types": "^0.16.1",
+    "@joystream/types": "^0.17.0",
     "@oclif/command": "^1.5.19",
     "@oclif/config": "^1.14.0",
     "@oclif/plugin-autocomplete": "^0.2.0",
@@ -43,7 +43,12 @@
     "moment": "^2.24.0",
     "proper-lockfile": "^4.1.1",
     "slug": "^2.1.1",
-    "tslib": "^1.11.1"
+    "tslib": "^1.11.1",
+    "blake3": "^2.1.4",
+    "multihashes": "^4.0.3",
+    "@apollo/client": "^3.2.5",
+    "cross-fetch": "^3.0.6",
+    "form-data": "^4.0.0"
   },
   "devDependencies": {
     "@oclif/dev-cli": "^1.22.2",
@@ -61,7 +66,12 @@
     "mocha": "^5.2.0",
     "nyc": "^14.1.1",
     "ts-node": "^10.2.1",
-    "typescript": "^4.4.3"
+    "typescript": "^4.4.3",
+    "@graphql-codegen/cli": "^1.21.4",
+    "@graphql-codegen/typescript": "^1.22.0",
+    "@graphql-codegen/import-types-preset": "^1.18.1",
+    "@graphql-codegen/typescript-operations": "^1.17.16",
+    "@graphql-codegen/typescript-document-nodes": "^1.17.11"
   },
   "engines": {
     "node": ">=14.0.0",
@@ -126,7 +136,9 @@
     "lint": "eslint ./src --ext .ts",
     "checks": "tsc --noEmit --pretty && prettier ./ --check && yarn lint",
     "format": "prettier ./ --write",
-    "generate:schema-typings": "rm -rf ./src/json-schemas/typings && json2ts -i ./src/json-schemas/ -o ./src/json-schemas/typings/"
+    "generate:schema-typings": "rm -rf ./src/schemas/typings && json2ts -i ./src/schemas/json/ -o ./src/schemas/typings/ && prettier ./src/schemas/typings/ --write",
+    "generate:graphql-typings": "graphql-codegen",
+    "generate:all": "yarn generate:schema-typings && yarn generate:graphql-typings"
   },
   "types": "lib/index.d.ts",
   "volta": {

+ 62 - 0
cli/scripts/content-test.sh

@@ -0,0 +1,62 @@
+#!/usr/bin/env bash
+set -e
+
+SCRIPT_PATH="$(dirname "${BASH_SOURCE[0]}")"
+cd $SCRIPT_PATH
+
+echo "{}" > ~/tmp/empty.json
+
+export AUTO_CONFIRM=true
+
+# Init content lead
+yarn workspace api-scripts initialize-content-lead
+# Test create/update/remove category
+yarn joystream-cli content:createVideoCategory -i ./examples/content/CreateCategory.json
+yarn joystream-cli content:createVideoCategory -i ./examples/content/CreateCategory.json
+yarn joystream-cli content:createVideoCategory -i ./examples/content/CreateCategory.json
+yarn joystream-cli content:createChannelCategory -i ./examples/content/CreateCategory.json
+yarn joystream-cli content:createChannelCategory -i ./examples/content/CreateCategory.json
+yarn joystream-cli content:createChannelCategory -i ./examples/content/CreateCategory.json
+yarn joystream-cli content:updateVideoCategory -i ./examples/content/UpdateCategory.json 2
+yarn joystream-cli content:updateChannelCategory -i ./examples/content/UpdateCategory.json 2
+yarn joystream-cli content:deleteChannelCategory 3
+yarn joystream-cli content:deleteVideoCategory 3
+# Group 1 - a valid group
+yarn joystream-cli content:createCuratorGroup
+yarn joystream-cli content:setCuratorGroupStatus 1 1
+yarn joystream-cli content:addCuratorToGroup 1 0
+# Group 2 - test removeCuratorFromGroup
+yarn joystream-cli content:createCuratorGroup
+yarn joystream-cli content:addCuratorToGroup 2 0
+yarn joystream-cli content:removeCuratorFromGroup 2 0
+# Create/update channel
+yarn joystream-cli content:createChannel -i ./examples/content/CreateChannel.json --context Member || true
+yarn joystream-cli content:createChannel -i ./examples/content/CreateChannel.json --context Curator || true
+yarn joystream-cli content:createChannel -i ~/tmp/empty.json --context Member || true
+yarn joystream-cli content:updateChannel -i ./examples/content/UpdateChannel.json 1 || true
+# Create/update video
+yarn joystream-cli content:createVideo -i ./examples/content/CreateVideo.json -c 1 || true
+yarn joystream-cli content:createVideo -i ./examples/content/CreateVideo.json -c 2 || true
+yarn joystream-cli content:createVideo -i ~/tmp/empty.json -c 2 || true
+yarn joystream-cli content:updateVideo -i ./examples/content/UpdateVideo.json 1 || true
+# Set featured videos
+yarn joystream-cli content:setFeaturedVideos 1,2
+yarn joystream-cli content:setFeaturedVideos 2,3
+# Update channel censorship status
+yarn joystream-cli content:updateChannelCensorshipStatus 1 1 --rationale "Test"
+yarn joystream-cli content:updateVideoCensorshipStatus 1 1 --rationale "Test"
+# Display-only commands
+yarn joystream-cli content:videos
+yarn joystream-cli content:video 1
+yarn joystream-cli content:channels
+yarn joystream-cli content:channel 1
+yarn joystream-cli content:curatorGroups
+yarn joystream-cli content:curatorGroup 1
+# Remove videos/channels/assets
+yarn joystream-cli content:removeChannelAssets -c 1 -o 0
+yarn joystream-cli content:deleteVideo -v 1 -f
+yarn joystream-cli content:deleteVideo -v 2 -f
+yarn joystream-cli content:deleteVideo -v 3 -f
+yarn joystream-cli content:deleteChannel -c 1 -f
+yarn joystream-cli content:deleteChannel -c 2 -f
+yarn joystream-cli content:deleteChannel -c 3 -f

+ 120 - 161
cli/src/Api.ts

@@ -1,17 +1,14 @@
 import BN from 'bn.js'
 import { types } from '@joystream/types/'
 import { ApiPromise, WsProvider } from '@polkadot/api'
-import { QueryableStorageMultiArg, SubmittableExtrinsic, QueryableStorageEntry } from '@polkadot/api/types'
+import { AugmentedQuery, SubmittableExtrinsic } from '@polkadot/api/types'
 import { formatBalance } from '@polkadot/util'
-import { Balance, Moment, BlockNumber } from '@polkadot/types/interfaces'
+import { Balance, BlockNumber } from '@polkadot/types/interfaces'
 import { KeyringPair } from '@polkadot/keyring/types'
-import { Codec, CodecArg } from '@polkadot/types/types'
-import { Option, Vec, UInt, Bytes } from '@polkadot/types'
+import { Codec } from '@polkadot/types/types'
+import { Option, UInt } from '@polkadot/types'
 import {
   AccountSummary,
-  CouncilInfoObj,
-  CouncilInfoTuple,
-  createCouncilInfoObj,
   WorkingGroups,
   Reward,
   GroupMember,
@@ -22,61 +19,50 @@ import {
   openingPolicyUnstakingPeriodsKeys,
   UnstakingPeriods,
   StakingPolicyUnstakingPeriodKey,
+  UnaugmentedApiPromise,
+  CouncilInfo,
 } from './Types'
 import { DeriveBalancesAll } from '@polkadot/api-derive/types'
 import { CLIError } from '@oclif/errors'
-import ExitCodes from './ExitCodes'
-import {
-  Worker,
-  WorkerId,
-  RoleStakeProfile,
-  Opening as WGOpening,
-  Application as WGApplication,
-  StorageProviderId,
-} from '@joystream/types/working-group'
-import {
-  Opening,
-  Application,
-  OpeningStage,
-  ApplicationStageKeys,
-  ApplicationId,
-  OpeningId,
-  StakingPolicy,
-} from '@joystream/types/hiring'
+import { Worker, WorkerId, RoleStakeProfile, Application as WGApplication } from '@joystream/types/working-group'
+import { Opening, Application, OpeningStage, ApplicationId, OpeningId, StakingPolicy } from '@joystream/types/hiring'
 import { MemberId, Membership } from '@joystream/types/members'
-import { RewardRelationship, RewardRelationshipId } from '@joystream/types/recurring-rewards'
-import { Stake, StakeId } from '@joystream/types/stake'
+import { RewardRelationshipId } from '@joystream/types/recurring-rewards'
+import { StakeId } from '@joystream/types/stake'
 
-import { InputValidationLengthConstraint, ChannelId, Url } from '@joystream/types/common'
+import { InputValidationLengthConstraint, ChannelId } from '@joystream/types/common'
 import {
   CuratorGroup,
   CuratorGroupId,
   Channel,
   Video,
   VideoId,
-  ChannelCategory,
-  VideoCategory,
   ChannelCategoryId,
   VideoCategoryId,
 } from '@joystream/types/content'
-import { ContentId, DataObject } from '@joystream/types/storage'
-import _ from 'lodash'
+import { Observable } from 'rxjs'
+import { BagId, DataObject, DataObjectId } from '@joystream/types/storage'
 
 export const DEFAULT_API_URI = 'ws://localhost:9944/'
 
 // Mapping of working group to api module
-export const apiModuleByGroup: { [key in WorkingGroups]: string } = {
+export const apiModuleByGroup = {
   [WorkingGroups.StorageProviders]: 'storageWorkingGroup',
-  [WorkingGroups.Curators]: 'contentDirectoryWorkingGroup',
-  [WorkingGroups.Operations]: 'operationsWorkingGroup',
+  [WorkingGroups.Curators]: 'contentWorkingGroup',
+  [WorkingGroups.OperationsAlpha]: 'operationsWorkingGroupAlpha',
+  [WorkingGroups.OperationsBeta]: 'operationsWorkingGroupBeta',
+  [WorkingGroups.OperationsGamma]: 'operationsWorkingGroupGamma',
   [WorkingGroups.Gateway]: 'gatewayWorkingGroup',
-}
+  [WorkingGroups.Distribution]: 'distributionWorkingGroup',
+} as const
 
 // Api wrapper for handling most common api calls and allowing easy API implementation switch in the future
 export default class Api {
   private _api: ApiPromise
+  public isDevelopment = false
 
-  private constructor(originalApi: ApiPromise) {
+  private constructor(originalApi: ApiPromise, isDevelopment: boolean) {
+    this.isDevelopment = isDevelopment
     this._api = originalApi
   }
 
@@ -84,16 +70,18 @@ export default class Api {
     return this._api
   }
 
-  private static async initApi(
-    apiUri: string = DEFAULT_API_URI,
-    metadataCache: Record<string, any>
-  ): Promise<ApiPromise> {
+  // Get api for use-cases where no type augmentations are desirable
+  public getUnaugmentedApi(): UnaugmentedApiPromise {
+    return (this._api as unknown) as UnaugmentedApiPromise
+  }
+
+  private static async initApi(apiUri: string = DEFAULT_API_URI, metadataCache: Record<string, any>) {
     const wsProvider: WsProvider = new WsProvider(apiUri)
     const api = new ApiPromise({ provider: wsProvider, types, metadata: metadataCache })
     await api.isReadyOrError
 
     // Initializing some api params based on pioneer/packages/react-api/Api.tsx
-    const [properties] = await Promise.all([api.rpc.system.properties()])
+    const [properties, chainType] = await Promise.all([api.rpc.system.properties(), api.rpc.system.chainType()])
 
     const tokenSymbol = properties.tokenSymbol.unwrap()[0].toString()
     const tokenDecimals = properties.tokenDecimals.unwrap()[0].toNumber()
@@ -104,29 +92,12 @@ export default class Api {
       unit: tokenSymbol,
     })
 
-    return api
-  }
-
-  static async create(apiUri: string = DEFAULT_API_URI, metadataCache: Record<string, any>): Promise<Api> {
-    const originalApi: ApiPromise = await Api.initApi(apiUri, metadataCache)
-    return new Api(originalApi)
+    return { api, properties, chainType }
   }
 
-  private queryMultiOnce(queries: Parameters<typeof ApiPromise.prototype.queryMulti>[0]): Promise<Codec[]> {
-    return new Promise((resolve, reject) => {
-      let unsub: () => void
-      this._api
-        .queryMulti(queries, (res) => {
-          // unsub should already be set at this point
-          if (!unsub) {
-            reject(new CLIError('API queryMulti issue - unsub method not set!', { exit: ExitCodes.ApiError }))
-          }
-          unsub()
-          resolve(res)
-        })
-        .then((unsubscribe) => (unsub = unsubscribe))
-        .catch((e) => reject(e))
-    })
+  static async create(apiUri = DEFAULT_API_URI, metadataCache: Record<string, any>): Promise<Api> {
+    const { api, chainType } = await Api.initApi(apiUri, metadataCache)
+    return new Api(api, chainType.isDevelopment || chainType.isLocal)
   }
 
   async bestNumber(): Promise<number> {
@@ -150,25 +121,51 @@ export default class Api {
     return { balances }
   }
 
-  async getCouncilInfo(): Promise<CouncilInfoObj> {
-    const queries: { [P in keyof CouncilInfoObj]: QueryableStorageMultiArg<'promise'> } = {
-      activeCouncil: this._api.query.council.activeCouncil,
-      termEndsAt: this._api.query.council.termEndsAt,
-      autoStart: this._api.query.councilElection.autoStart,
-      newTermDuration: this._api.query.councilElection.newTermDuration,
-      candidacyLimit: this._api.query.councilElection.candidacyLimit,
-      councilSize: this._api.query.councilElection.councilSize,
-      minCouncilStake: this._api.query.councilElection.minCouncilStake,
-      minVotingStake: this._api.query.councilElection.minVotingStake,
-      announcingPeriod: this._api.query.councilElection.announcingPeriod,
-      votingPeriod: this._api.query.councilElection.votingPeriod,
-      revealingPeriod: this._api.query.councilElection.revealingPeriod,
-      round: this._api.query.councilElection.round,
-      stage: this._api.query.councilElection.stage,
+  async getCouncilInfo(): Promise<CouncilInfo> {
+    const [
+      activeCouncil,
+      termEndsAt,
+      autoStart,
+      newTermDuration,
+      candidacyLimit,
+      councilSize,
+      minCouncilStake,
+      minVotingStake,
+      announcingPeriod,
+      votingPeriod,
+    ] = await Promise.all([
+      this._api.query.council.activeCouncil(),
+      this._api.query.council.termEndsAt(),
+      this._api.query.councilElection.autoStart(),
+      this._api.query.councilElection.newTermDuration(),
+      this._api.query.councilElection.candidacyLimit(),
+      this._api.query.councilElection.councilSize(),
+      this._api.query.councilElection.minCouncilStake(),
+      this._api.query.councilElection.minVotingStake(),
+      this._api.query.councilElection.announcingPeriod(),
+      this._api.query.councilElection.votingPeriod(),
+    ])
+    // Promise.all only allows 10 types, so we need to split the queries
+    const [revealingPeriod, round, stage] = await Promise.all([
+      this._api.query.councilElection.revealingPeriod(),
+      this._api.query.councilElection.round(),
+      this._api.query.councilElection.stage(),
+    ])
+    return {
+      activeCouncil,
+      termEndsAt,
+      autoStart,
+      newTermDuration,
+      candidacyLimit,
+      councilSize,
+      minCouncilStake,
+      minVotingStake,
+      announcingPeriod,
+      votingPeriod,
+      revealingPeriod,
+      round,
+      stage,
     }
-    const results: CouncilInfoTuple = (await this.queryMultiOnce(Object.values(queries))) as CouncilInfoTuple
-
-    return createCouncilInfoObj(...results)
   }
 
   async estimateFee(account: KeyringPair, tx: SubmittableExtrinsic<'promise'>): Promise<Balance> {
@@ -184,12 +181,10 @@ export default class Api {
   // TODO: This is a lot of repeated logic from "/pioneer/joy-utils/transport"
   // It will be refactored to "joystream-js" soon
   async entriesByIds<IDType extends UInt, ValueType extends Codec>(
-    apiMethod: QueryableStorageEntry<'promise'>,
-    firstKey?: CodecArg // First key in case of double maps
+    apiMethod: AugmentedQuery<'promise', (key: IDType) => Observable<ValueType>, [IDType]>
   ): Promise<[IDType, ValueType][]> {
-    const entries: [IDType, ValueType][] = (await apiMethod.entries<ValueType>(firstKey)).map(([storageKey, value]) => [
-      // If double-map (first key is provided), we map entries by second key
-      storageKey.args[firstKey !== undefined ? 1 : 0] as IDType,
+    const entries: [IDType, ValueType][] = (await apiMethod.entries()).map(([storageKey, value]) => [
+      storageKey.args[0] as IDType,
       value,
     ])
 
@@ -203,7 +198,7 @@ export default class Api {
   }
 
   protected async blockTimestamp(height: number): Promise<Date> {
-    const blockTime = (await this._api.query.timestamp.now.at(await this.blockHash(height))) as Moment
+    const blockTime = await this._api.query.timestamp.now.at(await this.blockHash(height))
 
     return new Date(blockTime.toNumber())
   }
@@ -214,14 +209,14 @@ export default class Api {
   }
 
   protected async membershipById(memberId: MemberId): Promise<Membership | null> {
-    const profile = (await this._api.query.members.membershipById(memberId)) as Membership
+    const profile = await this._api.query.members.membershipById(memberId)
 
     // Can't just use profile.isEmpty because profile.suspended is Bool (which isEmpty method always returns false)
     return profile.handle.isEmpty ? null : profile
   }
 
   async groupLead(group: WorkingGroups): Promise<GroupMember | null> {
-    const optLeadId = (await this.workingGroupApiQuery(group).currentLead()) as Option<WorkerId>
+    const optLeadId = await this.workingGroupApiQuery(group).currentLead()
 
     if (!optLeadId.isSome) {
       return null
@@ -234,7 +229,7 @@ export default class Api {
   }
 
   protected async stakeValue(stakeId: StakeId): Promise<Balance> {
-    const stake = await this._api.query.stake.stakes<Stake>(stakeId)
+    const stake = await this._api.query.stake.stakes(stakeId)
     return stake.value
   }
 
@@ -243,9 +238,7 @@ export default class Api {
   }
 
   protected async workerReward(relationshipId: RewardRelationshipId): Promise<Reward> {
-    const rewardRelationship = await this._api.query.recurringRewards.rewardRelationships<RewardRelationship>(
-      relationshipId
-    )
+    const rewardRelationship = await this._api.query.recurringRewards.rewardRelationships(relationshipId)
 
     return {
       totalRecieved: rewardRelationship.total_reward_received,
@@ -286,14 +279,14 @@ export default class Api {
   }
 
   async workerByWorkerId(group: WorkingGroups, workerId: number): Promise<Worker> {
-    const nextId = await this.workingGroupApiQuery(group).nextWorkerId<WorkerId>()
+    const nextId = await this.workingGroupApiQuery(group).nextWorkerId()
 
     // This is chain specfic, but if next id is still 0, it means no workers have been added yet
     if (workerId < 0 || workerId >= nextId.toNumber()) {
       throw new CLIError('Invalid worker id!')
     }
 
-    const worker = await this.workingGroupApiQuery(group).workerById<Worker>(workerId)
+    const worker = await this.workingGroupApiQuery(group).workerById(workerId)
 
     if (worker.isEmpty) {
       throw new CLIError('This worker is not active anymore')
@@ -302,7 +295,7 @@ export default class Api {
     return worker
   }
 
-  async groupMember(group: WorkingGroups, workerId: number) {
+  async groupMember(group: WorkingGroups, workerId: number): Promise<GroupMember> {
     const worker = await this.workerByWorkerId(group, workerId)
     return await this.parseGroupMember(this._api.createType('WorkerId', workerId), worker)
   }
@@ -318,12 +311,12 @@ export default class Api {
   }
 
   groupWorkers(group: WorkingGroups): Promise<[WorkerId, Worker][]> {
-    return this.entriesByIds<WorkerId, Worker>(this.workingGroupApiQuery(group).workerById)
+    return this.entriesByIds(this.workingGroupApiQuery(group).workerById)
   }
 
   async openingsByGroup(group: WorkingGroups): Promise<GroupOpening[]> {
     let openings: GroupOpening[] = []
-    const nextId = await this.workingGroupApiQuery(group).nextOpeningId<OpeningId>()
+    const nextId = await this.workingGroupApiQuery(group).nextOpeningId()
 
     // This is chain specfic, but if next id is still 0, it means no openings have been added yet
     if (!nextId.eq(0)) {
@@ -335,23 +328,23 @@ export default class Api {
   }
 
   protected async hiringOpeningById(id: number | OpeningId): Promise<Opening> {
-    const result = await this._api.query.hiring.openingById<Opening>(id)
+    const result = await this._api.query.hiring.openingById(id)
     return result
   }
 
   protected async hiringApplicationById(id: number | ApplicationId): Promise<Application> {
-    const result = await this._api.query.hiring.applicationById<Application>(id)
+    const result = await this._api.query.hiring.applicationById(id)
     return result
   }
 
   async wgApplicationById(group: WorkingGroups, wgApplicationId: number): Promise<WGApplication> {
-    const nextAppId = await this.workingGroupApiQuery(group).nextApplicationId<ApplicationId>()
+    const nextAppId = await this.workingGroupApiQuery(group).nextApplicationId()
 
     if (wgApplicationId < 0 || wgApplicationId >= nextAppId.toNumber()) {
       throw new CLIError('Invalid working group application ID!')
     }
 
-    const result = await this.workingGroupApiQuery(group).applicationById<WGApplication>(wgApplicationId)
+    const result = await this.workingGroupApiQuery(group).applicationById(wgApplicationId)
     return result
   }
 
@@ -372,7 +365,7 @@ export default class Api {
         role: roleStakingId.isSome ? (await this.stakeValue(roleStakingId.unwrap())).toNumber() : 0,
       },
       humanReadableText: application.human_readable_text.toString(),
-      stage: application.stage.type as ApplicationStageKeys,
+      stage: application.stage.type,
     }
   }
 
@@ -382,9 +375,7 @@ export default class Api {
   }
 
   protected async groupOpeningApplications(group: WorkingGroups, wgOpeningId: number): Promise<GroupApplication[]> {
-    const wgApplicationEntries = await this.entriesByIds<ApplicationId, WGApplication>(
-      this.workingGroupApiQuery(group).applicationById
-    )
+    const wgApplicationEntries = await this.entriesByIds(this.workingGroupApiQuery(group).applicationById)
 
     return Promise.all(
       wgApplicationEntries
@@ -394,13 +385,13 @@ export default class Api {
   }
 
   async groupOpening(group: WorkingGroups, wgOpeningId: number): Promise<GroupOpening> {
-    const nextId = ((await this.workingGroupApiQuery(group).nextOpeningId()) as OpeningId).toNumber()
+    const nextId = (await this.workingGroupApiQuery(group).nextOpeningId()).toNumber()
 
     if (wgOpeningId < 0 || wgOpeningId >= nextId) {
       throw new CLIError('Invalid working group opening ID!')
     }
 
-    const groupOpening = await this.workingGroupApiQuery(group).openingById<WGOpening>(wgOpeningId)
+    const groupOpening = await this.workingGroupApiQuery(group).openingById(wgOpeningId)
 
     const openingId = groupOpening.hiring_opening_id.toNumber()
     const opening = await this.hiringOpeningById(openingId)
@@ -458,7 +449,7 @@ export default class Api {
     if (stage.isOfType('WaitingToBegin')) {
       const stageData = stage.asType('WaitingToBegin')
       const currentBlockNumber = (await this._api.derive.chain.bestNumber()).toNumber()
-      const expectedBlockTime = (this._api.consts.babe.expectedBlockTime as Moment).toNumber()
+      const expectedBlockTime = this._api.consts.babe.expectedBlockTime.toNumber()
       status = OpeningStatus.WaitingToBegin
       stageBlock = stageData.begins_at_block.toNumber()
       stageDate = new Date(Date.now() + (stageBlock - currentBlockNumber) * expectedBlockTime)
@@ -494,34 +485,34 @@ export default class Api {
   }
 
   async getMemberIdsByControllerAccount(address: string): Promise<MemberId[]> {
-    const ids = await this._api.query.members.memberIdsByControllerAccountId<Vec<MemberId>>(address)
+    const ids = await this._api.query.members.memberIdsByControllerAccountId(address)
     return ids.toArray()
   }
 
   async workerExitRationaleConstraint(group: WorkingGroups): Promise<InputValidationLengthConstraint> {
-    return await this.workingGroupApiQuery(group).workerExitRationaleText<InputValidationLengthConstraint>()
+    return await this.workingGroupApiQuery(group).workerExitRationaleText()
   }
 
   // Content directory
   async availableChannels(): Promise<[ChannelId, Channel][]> {
-    return await this.entriesByIds<ChannelId, Channel>(this._api.query.content.channelById)
+    return await this.entriesByIds(this._api.query.content.channelById)
   }
 
   async availableVideos(): Promise<[VideoId, Video][]> {
-    return await this.entriesByIds<VideoId, Video>(this._api.query.content.videoById)
+    return await this.entriesByIds(this._api.query.content.videoById)
   }
 
   availableCuratorGroups(): Promise<[CuratorGroupId, CuratorGroup][]> {
-    return this.entriesByIds<CuratorGroupId, CuratorGroup>(this._api.query.content.curatorGroupById)
+    return this.entriesByIds(this._api.query.content.curatorGroupById)
   }
 
   async curatorGroupById(id: number): Promise<CuratorGroup | null> {
     const exists = !!(await this._api.query.content.curatorGroupById.size(id)).toNumber()
-    return exists ? await this._api.query.content.curatorGroupById<CuratorGroup>(id) : null
+    return exists ? await this._api.query.content.curatorGroupById(id) : null
   }
 
   async nextCuratorGroupId(): Promise<number> {
-    return (await this._api.query.content.nextCuratorGroupId<CuratorGroupId>()).toNumber()
+    return (await this._api.query.content.nextCuratorGroupId()).toNumber()
   }
 
   async channelById(channelId: ChannelId | number | string): Promise<Channel> {
@@ -530,26 +521,13 @@ export default class Api {
     if (!exists) {
       throw new CLIError(`Channel by id ${channelId.toString()} not found!`)
     }
-    const channel = await this._api.query.content.channelById<Channel>(channelId)
+    const channel = await this._api.query.content.channelById(channelId)
 
     return channel
   }
 
-  async videosByChannelId(channelId: ChannelId | number | string): Promise<[VideoId, Video][]> {
-    const channel = await this.channelById(channelId)
-    if (channel) {
-      return Promise.all(
-        channel.videos.map(
-          async (videoId) => [videoId, await this._api.query.content.videoById<Video>(videoId)] as [VideoId, Video]
-        )
-      )
-    } else {
-      return []
-    }
-  }
-
   async videoById(videoId: VideoId | number | string): Promise<Video> {
-    const video = await this._api.query.content.videoById<Video>(videoId)
+    const video = await this._api.query.content.videoById(videoId)
     if (video.isEmpty) {
       throw new CLIError(`Video by id ${videoId.toString()} not found!`)
     }
@@ -557,45 +535,26 @@ export default class Api {
     return video
   }
 
+  async dataObjectsByIds(bagId: BagId, ids: DataObjectId[]): Promise<DataObject[]> {
+    return this._api.query.storage.dataObjectsById.multi(ids.map((id) => [bagId, id]))
+  }
+
   async channelCategoryIds(): Promise<ChannelCategoryId[]> {
     // There is currently no way to differentiate between unexisting and existing category
     // other than fetching all existing category ids (event the .size() trick does not work, as the object is empty)
-    return (
-      await this.entriesByIds<ChannelCategoryId, ChannelCategory>(this._api.query.content.channelCategoryById)
-    ).map(([id]) => id)
+    return (await this.entriesByIds(this._api.query.content.channelCategoryById)).map(([id]) => id)
   }
 
   async videoCategoryIds(): Promise<VideoCategoryId[]> {
     // There is currently no way to differentiate between unexisting and existing category
     // other than fetching all existing category ids (event the .size() trick does not work, as the object is empty)
-    return (await this.entriesByIds<VideoCategoryId, VideoCategory>(this._api.query.content.videoCategoryById)).map(
-      ([id]) => id
-    )
+    return (await this.entriesByIds(this._api.query.content.videoCategoryById)).map(([id]) => id)
   }
 
-  async dataObjectsByContentIds(contentIds: ContentId[]): Promise<DataObject[]> {
-    const dataObjects = await this._api.query.dataDirectory.dataByContentId.multi<DataObject>(contentIds)
-    const notFoundIndex = dataObjects.findIndex((o) => o.isEmpty)
-    if (notFoundIndex !== -1) {
-      throw new CLIError(`DataObject not found by id ${contentIds[notFoundIndex].toString()}`)
-    }
-    return dataObjects
-  }
-
-  async getRandomBootstrapEndpoint(): Promise<string | null> {
-    const endpoints = await this._api.query.discovery.bootstrapEndpoints<Vec<Url>>()
-    const randomEndpoint = _.sample(endpoints.toArray())
-    return randomEndpoint ? randomEndpoint.toString() : null
-  }
-
-  async storageProviderEndpoint(storageProviderId: StorageProviderId | number): Promise<string> {
-    const value = await this._api.query.storageWorkingGroup.workerStorage<Bytes>(storageProviderId)
-    return this._api.createType('Text', value).toString()
-  }
-
-  async allStorageProviderEndpoints(): Promise<string[]> {
-    const workerIds = (await this.groupWorkers(WorkingGroups.StorageProviders)).map(([id]) => id)
-    const workerStorages = await this._api.query.storageWorkingGroup.workerStorage.multi<Bytes>(workerIds)
-    return workerStorages.map((storage) => this._api.createType('Text', storage).toString())
+  async dataObjectsInBag(bagId: BagId): Promise<[DataObjectId, DataObject][]> {
+    return (await this._api.query.storage.dataObjectsById.entries(bagId)).map(([{ args: [, dataObjectId] }, value]) => [
+      dataObjectId,
+      value,
+    ])
   }
 }

+ 2 - 1
cli/src/ExitCodes.ts

@@ -11,7 +11,8 @@ enum ExitCodes {
   UnexpectedException = 500,
   FsOperationFailed = 501,
   ApiError = 502,
-  ExternalInfrastructureError = 503,
+  StorageNodeError = 503,
   ActionCurrentlyUnavailable = 504,
+  QueryNodeError = 505,
 }
 export = ExitCodes

+ 125 - 0
cli/src/QueryNodeApi.ts

@@ -0,0 +1,125 @@
+import { StorageNodeInfo } from './Types'
+import {
+  ApolloClient,
+  InMemoryCache,
+  HttpLink,
+  NormalizedCacheObject,
+  DocumentNode,
+  from,
+  ApolloLink,
+} from '@apollo/client/core'
+import { ErrorLink, onError } from '@apollo/client/link/error'
+import { Maybe } from './graphql/generated/schema'
+import {
+  GetStorageNodesInfoByBagId,
+  GetStorageNodesInfoByBagIdQuery,
+  GetStorageNodesInfoByBagIdQueryVariables,
+  DataObjectInfoFragment,
+  GetDataObjectsByBagId,
+  GetDataObjectsByBagIdQuery,
+  GetDataObjectsByBagIdQueryVariables,
+  GetDataObjectsByVideoId,
+  GetDataObjectsByVideoIdQuery,
+  GetDataObjectsByVideoIdQueryVariables,
+  GetDataObjectsChannelId,
+  GetDataObjectsChannelIdQuery,
+  GetDataObjectsChannelIdQueryVariables,
+} from './graphql/generated/queries'
+import { URL } from 'url'
+import fetch from 'cross-fetch'
+
+export default class QueryNodeApi {
+  private _qnClient: ApolloClient<NormalizedCacheObject>
+
+  public constructor(uri?: string, errorHandler?: ErrorLink.ErrorHandler) {
+    const links: ApolloLink[] = []
+    if (errorHandler) {
+      links.push(onError(errorHandler))
+    }
+    links.push(new HttpLink({ uri, fetch }))
+    this._qnClient = new ApolloClient({
+      link: from(links),
+      cache: new InMemoryCache(),
+      defaultOptions: { query: { fetchPolicy: 'no-cache', errorPolicy: 'all' } },
+    })
+  }
+
+  // Get entity by unique input
+  protected async uniqueEntityQuery<
+    QueryT extends { [k: string]: Maybe<Record<string, unknown>> | undefined },
+    VariablesT extends Record<string, unknown>
+  >(
+    query: DocumentNode,
+    variables: VariablesT,
+    resultKey: keyof QueryT
+  ): Promise<Required<QueryT>[keyof QueryT] | null> {
+    return (await this._qnClient.query<QueryT, VariablesT>({ query, variables })).data[resultKey] || null
+  }
+
+  // Get entities by "non-unique" input and return first result
+  protected async firstEntityQuery<
+    QueryT extends { [k: string]: unknown[] },
+    VariablesT extends Record<string, unknown>
+  >(query: DocumentNode, variables: VariablesT, resultKey: keyof QueryT): Promise<QueryT[keyof QueryT][number] | null> {
+    return (await this._qnClient.query<QueryT, VariablesT>({ query, variables })).data[resultKey][0] || null
+  }
+
+  // Get multiple entities
+  protected async multipleEntitiesQuery<
+    QueryT extends { [k: string]: unknown[] },
+    VariablesT extends Record<string, unknown>
+  >(query: DocumentNode, variables: VariablesT, resultKey: keyof QueryT): Promise<QueryT[keyof QueryT]> {
+    return (await this._qnClient.query<QueryT, VariablesT>({ query, variables })).data[resultKey]
+  }
+
+  async dataObjectsByBagId(bagId: string): Promise<DataObjectInfoFragment[]> {
+    return this.multipleEntitiesQuery<GetDataObjectsByBagIdQuery, GetDataObjectsByBagIdQueryVariables>(
+      GetDataObjectsByBagId,
+      { bagId },
+      'storageDataObjects'
+    )
+  }
+
+  async dataObjectsByVideoId(videoId: string): Promise<DataObjectInfoFragment[]> {
+    return this.multipleEntitiesQuery<GetDataObjectsByVideoIdQuery, GetDataObjectsByVideoIdQueryVariables>(
+      GetDataObjectsByVideoId,
+      { videoId },
+      'storageDataObjects'
+    )
+  }
+
+  async dataObjectsByChannelId(channelId: string): Promise<DataObjectInfoFragment[]> {
+    return this.multipleEntitiesQuery<GetDataObjectsChannelIdQuery, GetDataObjectsChannelIdQueryVariables>(
+      GetDataObjectsChannelId,
+      { channelId },
+      'storageDataObjects'
+    )
+  }
+
+  async storageNodesInfoByBagId(bagId: string): Promise<StorageNodeInfo[]> {
+    const result = await this.multipleEntitiesQuery<
+      GetStorageNodesInfoByBagIdQuery,
+      GetStorageNodesInfoByBagIdQueryVariables
+    >(GetStorageNodesInfoByBagId, { bagId }, 'storageBuckets')
+
+    const validNodesInfo: StorageNodeInfo[] = []
+    for (const { operatorMetadata, id } of result) {
+      if (operatorMetadata?.nodeEndpoint) {
+        try {
+          const rootEndpoint = operatorMetadata.nodeEndpoint
+          const apiEndpoint = new URL(
+            'api/v1',
+            rootEndpoint.endsWith('/') ? rootEndpoint : rootEndpoint + '/'
+          ).toString()
+          validNodesInfo.push({
+            apiEndpoint,
+            bucketId: parseInt(id),
+          })
+        } catch (e) {
+          continue
+        }
+      }
+    }
+    return validNodesInfo
+  }
+}

+ 79 - 68
cli/src/Types.ts

@@ -1,23 +1,24 @@
 import BN from 'bn.js'
 import { ElectionStage, Seat } from '@joystream/types/council'
-import { Option } from '@polkadot/types'
+import { Option, bool } from '@polkadot/types'
 import { Codec } from '@polkadot/types/types'
 import { BlockNumber, Balance, AccountId } from '@polkadot/types/interfaces'
 import { DeriveBalancesAll } from '@polkadot/api-derive/types'
 import { KeyringPair } from '@polkadot/keyring/types'
 import { WorkerId, OpeningType } from '@joystream/types/working-group'
 import { Membership, MemberId } from '@joystream/types/members'
-import { Opening, StakingPolicy, ApplicationStageKeys } from '@joystream/types/hiring'
+import { Opening, StakingPolicy, ApplicationStage } from '@joystream/types/hiring'
 import { Validator } from 'inquirer'
-import {
-  VideoMetadata,
-  ChannelMetadata,
-  ChannelCategoryMetadata,
-  VideoCategoryMetadata,
-} from '@joystream/content-metadata-protobuf'
-import { ContentId, ContentParameters } from '@joystream/types/storage'
-
+import { ApiPromise } from '@polkadot/api'
+import { SubmittableModuleExtrinsics, QueryableModuleStorage, QueryableModuleConsts } from '@polkadot/api/types'
 import { JSONSchema7, JSONSchema7Definition } from 'json-schema'
+import {
+  IChannelCategoryMetadata,
+  IChannelMetadata,
+  IVideoCategoryMetadata,
+  IVideoMetadata,
+} from '@joystream/metadata-protobuf'
+import { DataObjectCreationParameters } from '@joystream/types/storage'
 
 // KeyringPair type extended with mandatory "meta.name"
 // It's used for accounts/keys management within CLI.
@@ -33,43 +34,21 @@ export type AccountSummary = {
   balances: DeriveBalancesAll
 }
 
-// This function allows us to easily transform the tuple into the object
-// and simplifies the creation of consitent Object and Tuple types (seen below).
-export function createCouncilInfoObj(
-  activeCouncil: Seat[],
-  termEndsAt: BlockNumber,
-  autoStart: boolean,
-  newTermDuration: BN,
-  candidacyLimit: BN,
-  councilSize: BN,
-  minCouncilStake: Balance,
-  minVotingStake: Balance,
-  announcingPeriod: BlockNumber,
-  votingPeriod: BlockNumber,
-  revealingPeriod: BlockNumber,
-  round: BN,
+export type CouncilInfo = {
+  activeCouncil: Seat[]
+  termEndsAt: BlockNumber
+  autoStart: bool
+  newTermDuration: BN
+  candidacyLimit: BN
+  councilSize: BN
+  minCouncilStake: Balance
+  minVotingStake: Balance
+  announcingPeriod: BlockNumber
+  votingPeriod: BlockNumber
+  revealingPeriod: BlockNumber
+  round: BN
   stage: Option<ElectionStage>
-) {
-  return {
-    activeCouncil,
-    termEndsAt,
-    autoStart,
-    newTermDuration,
-    candidacyLimit,
-    councilSize,
-    minCouncilStake,
-    minVotingStake,
-    announcingPeriod,
-    votingPeriod,
-    revealingPeriod,
-    round,
-    stage,
-  }
 }
-// Object/Tuple containing council/councilElection information (council:info).
-// The tuple is useful, because that's how api.queryMulti returns the results.
-export type CouncilInfoTuple = Parameters<typeof createCouncilInfoObj>
-export type CouncilInfoObj = ReturnType<typeof createCouncilInfoObj>
 
 // Object with "name" and "value" properties, used for rendering simple CLI tables like:
 // Total balance:   100 JOY
@@ -80,15 +59,22 @@ export type NameValueObj = { name: string; value: string }
 export enum WorkingGroups {
   StorageProviders = 'storageProviders',
   Curators = 'curators',
-  Operations = 'operations',
+  OperationsAlpha = 'operationsAlpha',
+  OperationsBeta = 'operationsBeta',
+  OperationsGamma = 'operationsGamma',
   Gateway = 'gateway',
+  Distribution = 'distributors',
 }
 
 // In contrast to Pioneer, currently only StorageProviders group is available in CLI
 export const AvailableGroups: readonly WorkingGroups[] = [
   WorkingGroups.StorageProviders,
   WorkingGroups.Curators,
-  WorkingGroups.Operations,
+  WorkingGroups.OperationsAlpha,
+  WorkingGroups.OperationsBeta,
+  WorkingGroups.OperationsGamma,
+  WorkingGroups.Gateway,
+  WorkingGroups.Distribution,
 ] as const
 
 export type Reward = {
@@ -119,7 +105,7 @@ export type GroupApplication = {
     role: number
   }
   humanReadableText: string
-  stage: ApplicationStageKeys
+  stage: keyof ApplicationStage['typeDefinitions']
 }
 
 export enum OpeningStatus {
@@ -206,18 +192,21 @@ export type ApiMethodNamedArg = {
 }
 export type ApiMethodNamedArgs = ApiMethodNamedArg[]
 
-// Content-related
-export enum AssetType {
-  AnyAsset = 1,
+// Api without TypeScript augmentations for "query", "tx" and "consts" (useful when more type flexibility is needed)
+export type UnaugmentedApiPromise = Omit<ApiPromise, 'query' | 'tx' | 'consts'> & {
+  query: { [key: string]: QueryableModuleStorage<'promise'> }
+  tx: { [key: string]: SubmittableModuleExtrinsics<'promise'> }
+  consts: { [key: string]: QueryableModuleConsts }
 }
 
-export type InputAsset = {
+export type AssetToUpload = {
+  dataObjectId: BN
   path: string
-  contentId: ContentId
 }
 
-export type InputAssetDetails = InputAsset & {
-  parameters: ContentParameters
+export type ResolvedAsset = {
+  path: string
+  parameters: DataObjectCreationParameters
 }
 
 export type VideoFFProbeMetadata = {
@@ -234,47 +223,69 @@ export type VideoFileMetadata = VideoFFProbeMetadata & {
   mimeType: string
 }
 
-export type VideoInputParameters = Omit<VideoMetadata.AsObject, 'video' | 'thumbnailPhoto'> & {
+export type VideoInputParameters = Omit<IVideoMetadata, 'video' | 'thumbnailPhoto'> & {
   videoPath?: string
   thumbnailPhotoPath?: string
 }
 
-export type ChannelInputParameters = Omit<ChannelMetadata.AsObject, 'coverPhoto' | 'avatarPhoto'> & {
+export type ChannelInputParameters = Omit<IChannelMetadata, 'coverPhoto' | 'avatarPhoto'> & {
   coverPhotoPath?: string
   avatarPhotoPath?: string
   rewardAccount?: string
 }
 
-export type ChannelCategoryInputParameters = ChannelCategoryMetadata.AsObject
+export type ChannelCategoryInputParameters = IChannelCategoryMetadata
 
-export type VideoCategoryInputParameters = VideoCategoryMetadata.AsObject
+export type VideoCategoryInputParameters = IVideoCategoryMetadata
+
+type AnyNonObject = string | number | boolean | any[] | Long
 
 // JSONSchema utility types
 export type JSONTypeName<T> = T extends string
   ? 'string' | ['string', 'null']
   : T extends number
   ? 'number' | ['number', 'null']
-  : T extends any[]
-  ? 'array' | ['array', 'null']
-  : T extends Record<string, unknown>
-  ? 'object' | ['object', 'null']
   : T extends boolean
   ? 'boolean' | ['boolean', 'null']
-  : never
+  : T extends any[]
+  ? 'array' | ['array', 'null']
+  : T extends Long
+  ? 'number' | ['number', 'null']
+  : 'object' | ['object', 'null']
 
 export type PropertySchema<P> = Omit<
   JSONSchema7Definition & {
     type: JSONTypeName<P>
-    properties: P extends Record<string, unknown> ? JsonSchemaProperties<P> : never
+    properties: P extends AnyNonObject ? never : JsonSchemaProperties<P>
   },
-  P extends Record<string, unknown> ? '' : 'properties'
+  P extends AnyNonObject ? 'properties' : ''
 >
 
-export type JsonSchemaProperties<T extends Record<string, unknown>> = {
+export type JsonSchemaProperties<T> = {
   [K in keyof Required<T>]: PropertySchema<Required<T>[K]>
 }
 
-export type JsonSchema<T extends Record<string, unknown>> = JSONSchema7 & {
+export type JsonSchema<T> = JSONSchema7 & {
   type: 'object'
   properties: JsonSchemaProperties<T>
 }
+
+// Storage node related types
+
+export type StorageNodeInfo = {
+  bucketId: number
+  apiEndpoint: string
+}
+
+export type TokenRequest = {
+  data: TokenRequestData
+  signature: string
+}
+
+export type TokenRequestData = {
+  memberId: number
+  accountId: string
+  dataObjectId: number
+  storageBucketId: number
+  bagId: string
+}

+ 29 - 2
cli/src/base/AccountsCommandBase.ts

@@ -10,6 +10,7 @@ import { formatBalance } from '@polkadot/util'
 import { NamedKeyringPair } from '../Types'
 import { DeriveBalancesAll } from '@polkadot/api-derive/types'
 import { toFixedLength } from '../helpers/display'
+import { MemberId } from '@joystream/types/members'
 
 const ACCOUNTS_DIRNAME = 'accounts'
 const SPECIAL_ACCOUNT_POSTFIX = '__DEV'
@@ -22,6 +23,8 @@ const SPECIAL_ACCOUNT_POSTFIX = '__DEV'
  * Where: APP_DATA_PATH is provided by StateAwareCommandBase and ACCOUNTS_DIRNAME is a const (see above).
  */
 export default abstract class AccountsCommandBase extends ApiCommandBase {
+  private selectedMemberId: number | undefined
+
   getAccountsDirPath(): string {
     return path.join(this.getAppDataPath(), ACCOUNTS_DIRNAME)
   }
@@ -249,14 +252,38 @@ export default abstract class AccountsCommandBase extends ApiCommandBase {
     }
   }
 
-  async getRequiredMemberId(): Promise<number> {
+  async getRequiredMemberId(useSelected = false): Promise<number> {
+    if (this.selectedMemberId && useSelected) {
+      return this.selectedMemberId
+    }
+
     const account = await this.getRequiredSelectedAccount()
     const memberIds = await this.getApi().getMemberIdsByControllerAccount(account.address)
+
+    let memberId: number
     if (!memberIds.length) {
       this.error('Membership required to access this command!', { exit: ExitCodes.AccessDenied })
+    } else if (memberIds.length === 1) {
+      memberId = memberIds[0].toNumber()
+    } else {
+      memberId = await this.promptForMember(memberIds, 'Choose member context')
     }
 
-    return memberIds[0].toNumber() // FIXME: Temporary solution (just using the first one)
+    this.selectedMemberId = memberId
+    return memberId
+  }
+
+  async promptForMember(availableMembers: MemberId[], message = 'Choose a member'): Promise<number> {
+    const memberId: number = await this.simplePrompt({
+      type: 'list',
+      message,
+      choices: availableMembers.map((memberId) => ({
+        name: `ID: ${memberId.toString()}`,
+        value: memberId.toNumber(),
+      })),
+    })
+
+    return memberId
   }
 
   async init() {

+ 160 - 65
cli/src/base/ApiCommandBase.ts

@@ -2,21 +2,20 @@ import ExitCodes from '../ExitCodes'
 import { CLIError } from '@oclif/errors'
 import StateAwareCommandBase from './StateAwareCommandBase'
 import Api from '../Api'
-import { getTypeDef, Option, Tuple, TypeRegistry } from '@polkadot/types'
-import { Registry, Codec, CodecArg, TypeDef, TypeDefInfo } from '@polkadot/types/types'
-
+import { getTypeDef, Option, Tuple } from '@polkadot/types'
+import { Registry, Codec, TypeDef, TypeDefInfo, IEvent, DetectCodec } from '@polkadot/types/types'
 import { Vec, Struct, Enum } from '@polkadot/types/codec'
 import { ApiPromise, SubmittableResult, WsProvider } from '@polkadot/api'
 import { KeyringPair } from '@polkadot/keyring/types'
 import chalk from 'chalk'
 import { InterfaceTypes } from '@polkadot/types/types/registry'
-import { ApiMethodArg, ApiMethodNamedArgs, ApiParamsOptions, ApiParamOptions } from '../Types'
+import { ApiMethodArg, ApiMethodNamedArgs, ApiParamsOptions, ApiParamOptions, UnaugmentedApiPromise } from '../Types'
 import { createParamOptions } from '../helpers/promptOptions'
-import { SubmittableExtrinsic } from '@polkadot/api/types'
+import { AugmentedSubmittables, SubmittableExtrinsic, AugmentedEvents, AugmentedEvent } from '@polkadot/api/types'
 import { DistinctQuestion } from 'inquirer'
 import { BOOL_PROMPT_OPTIONS } from '../helpers/prompting'
 import { DispatchError } from '@polkadot/types/interfaces/system'
-import { Event } from '@polkadot/types/interfaces'
+import QueryNodeApi from '../QueryNodeApi'
 
 export class ExtrinsicFailedError extends Error {}
 
@@ -24,35 +23,74 @@ export class ExtrinsicFailedError extends Error {}
  * Abstract base class for commands that require access to the API.
  */
 export default abstract class ApiCommandBase extends StateAwareCommandBase {
-  private api: Api | null = null
+  private api: Api | undefined
+  private queryNodeApi: QueryNodeApi | null | undefined
+
+  // Command configuration
+  protected requiresApiConnection = true
+  protected requiresQueryNode = false
 
   getApi(): Api {
-    if (!this.api) throw new CLIError('Tried to get API before initialization.', { exit: ExitCodes.ApiError })
+    if (!this.api) {
+      throw new CLIError('Tried to access API before initialization.', { exit: ExitCodes.ApiError })
+    }
     return this.api
   }
 
-  // Get original api for lower-level api calls
+  getQNApi(): QueryNodeApi {
+    if (this.queryNodeApi === undefined) {
+      throw new CLIError('Tried to access QueryNodeApi before initialization.', { exit: ExitCodes.QueryNodeError })
+    }
+    if (this.queryNodeApi === null) {
+      throw new CLIError('Query node endpoint uri is required in order to run this command!', {
+        exit: ExitCodes.QueryNodeError,
+      })
+    }
+    return this.queryNodeApi
+  }
+
+  // Shortcuts
   getOriginalApi(): ApiPromise {
     return this.getApi().getOriginalApi()
   }
 
+  getUnaugmentedApi(): UnaugmentedApiPromise {
+    return this.getApi().getUnaugmentedApi()
+  }
+
   getTypesRegistry(): Registry {
     return this.getOriginalApi().registry
   }
 
-  createType<K extends keyof InterfaceTypes>(typeName: K, value?: unknown): InterfaceTypes[K] {
-    return this.getOriginalApi().createType(typeName, value)
+  createType<T extends Codec = Codec, TN extends string = string>(typeName: TN, value?: unknown): DetectCodec<T, TN> {
+    return this.getOriginalApi().createType<T, TN>(typeName, value)
+  }
+
+  isQueryNodeUriSet(): boolean {
+    const { queryNodeUri } = this.getPreservedState()
+    return !!queryNodeUri
   }
 
-  async init(skipConnection = false): Promise<void> {
+  async init(): Promise<void> {
     await super.init()
-    if (!skipConnection) {
+    if (this.requiresApiConnection) {
       let apiUri: string = this.getPreservedState().apiUri
+
       if (!apiUri) {
-        this.warn("You haven't provided a node/endpoint for the CLI to connect to yet!")
+        this.warn("You haven't provided a Joystream node websocket api uri for the CLI to connect to yet!")
         apiUri = await this.promptForApiUri()
       }
 
+      let queryNodeUri: string | null | undefined = this.getPreservedState().queryNodeUri
+
+      if (this.requiresQueryNode && !queryNodeUri) {
+        this.warn('Query node endpoint uri is required in order to run this command!')
+        queryNodeUri = await this.promptForQueryNodeUri(true)
+      } else if (queryNodeUri === undefined) {
+        this.warn("You haven't provided a Joystream query node uri for the CLI to connect to yet!")
+        queryNodeUri = await this.promptForQueryNodeUri()
+      }
+
       const { metadataCache } = this.getPreservedState()
       this.api = await Api.create(apiUri, metadataCache)
 
@@ -63,13 +101,19 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
         metadataCache[metadataKey] = await this.getOriginalApi().runtimeMetadata.toJSON()
         await this.setPreservedState({ metadataCache })
       }
+
+      this.queryNodeApi = queryNodeUri
+        ? new QueryNodeApi(queryNodeUri, (err) => {
+            this.warn(`Query node error: ${err.networkError?.message || err.graphQLErrors?.join('\n')}`)
+          })
+        : null
     }
   }
 
   async promptForApiUri(): Promise<string> {
     let selectedNodeUri = await this.simplePrompt({
       type: 'list',
-      message: 'Choose a node/endpoint:',
+      message: 'Choose a node websocket api uri:',
       choices: [
         {
           name: 'Local node (ws://localhost:9944)',
@@ -103,7 +147,51 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
     return selectedNodeUri
   }
 
-  isApiUriValid(uri: string) {
+  async promptForQueryNodeUri(isRequired = false): Promise<string | null> {
+    const choices = [
+      {
+        name: 'Local query node (http://localhost:8081/graphql)',
+        value: 'http://localhost:8081/graphql',
+      },
+      {
+        name: 'Jsgenesis-hosted query node (https://hydra.joystream.org/graphql)',
+        value: 'https://hydra.joystream.org/graphql',
+      },
+      {
+        name: 'Custom endpoint',
+        value: '',
+      },
+    ]
+    if (!isRequired) {
+      choices.push({
+        name: "No endpoint (if you don't use query node some features will not be available)",
+        value: 'none',
+      })
+    }
+    let selectedUri: string = await this.simplePrompt({
+      type: 'list',
+      message: 'Choose a query node endpoint:',
+      choices,
+    })
+
+    if (!selectedUri) {
+      do {
+        selectedUri = await this.simplePrompt({
+          type: 'input',
+          message: 'Provide a query node endpoint',
+        })
+        if (!this.isQueryNodeUriValid(selectedUri)) {
+          this.warn('Provided uri seems incorrect! Please try again...')
+        }
+      } while (!this.isQueryNodeUriValid(selectedUri))
+    }
+
+    await this.setPreservedState({ queryNodeUri: selectedUri })
+
+    return selectedUri === 'none' ? null : selectedUri
+  }
+
+  isApiUriValid(uri: string): boolean {
     try {
       // eslint-disable-next-line no-new
       new WsProvider(uri)
@@ -113,6 +201,17 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
     return true
   }
 
+  isQueryNodeUriValid(uri: string): boolean {
+    let url: URL
+    try {
+      url = new URL(uri)
+    } catch (_) {
+      return false
+    }
+
+    return url.protocol === 'http:' || url.protocol === 'https:'
+  }
+
   // This is needed to correctly handle some structs, enums etc.
   // Where the main typeDef doesn't provide enough information
   protected getRawTypeDef(type: keyof InterfaceTypes) {
@@ -180,10 +279,10 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
         createParamOptions(subtype.name, defaultValue?.unwrapOr(undefined))
       )
       this.closeIndentGroup()
-      return this.createType(`Option<${subtype.type}>` as any, value)
+      return this.createType<Option<Codec>>(`Option<${subtype.type}>`, value)
     }
 
-    return this.createType(`Option<${subtype.type}>` as any, null)
+    return this.createType<Option<Codec>>(`Option<${subtype.type}>`, null)
   }
 
   // Prompt for Tuple
@@ -204,7 +303,11 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
     }
     this.closeIndentGroup()
 
-    return new Tuple(this.getTypesRegistry(), subtypes.map((subtype) => subtype.type) as any, result)
+    return new Tuple(
+      this.getTypesRegistry(),
+      subtypes.map((subtype) => subtype.type),
+      result
+    )
   }
 
   // Prompt for Struct
@@ -231,7 +334,7 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
     }
     this.closeIndentGroup()
 
-    return this.createType(structType as any, structValues)
+    return this.createType(structType, structValues)
   }
 
   // Prompt for Vec
@@ -259,7 +362,7 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
     } while (addAnother)
     this.closeIndentGroup()
 
-    return this.createType(`Vec<${subtype.type}>` as any, entries)
+    return this.createType<Vec<Codec>>(`Vec<${subtype.type}>`, entries)
   }
 
   // Prompt for Enum
@@ -284,12 +387,12 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
 
     if (enumSubtype.type !== 'Null') {
       const subtypeOptions = createParamOptions(enumSubtype.name, defaultValue?.value)
-      return this.createType(enumType as any, {
+      return this.createType<Enum>(enumType, {
         [enumSubtype.name!]: await this.promptForParam(enumSubtype.type, subtypeOptions),
       })
     }
 
-    return this.createType(enumType as any, enumSubtype.name)
+    return this.createType<Enum>(enumType, enumSubtype.name)
   }
 
   // Prompt for param based on "paramType" string (ie. Option<MemeberId>)
@@ -334,7 +437,7 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
     method: string,
     paramsOptions?: ApiParamsOptions
   ): Promise<ApiMethodArg[]> {
-    const extrinsicMethod = this.getOriginalApi().tx[module][method]
+    const extrinsicMethod = (await this.getUnaugmentedApi().tx)[module][method]
     const values: ApiMethodArg[] = []
 
     this.openIndentGroup()
@@ -371,12 +474,8 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
                 let errorMsg = dispatchError.toString()
                 if (dispatchError.isModule) {
                   try {
-                    // Need to assert that registry is of TypeRegistry type, since Registry intefrace
-                    // seems outdated and doesn't include DispatchErrorModule as possible argument for "findMetaError"
-                    const { name, documentation } = (this.getOriginalApi().registry as TypeRegistry).findMetaError(
-                      dispatchError.asModule
-                    )
-                    errorMsg = `${name} (${documentation})`
+                    const { name, docs } = this.getOriginalApi().registry.findMetaError(dispatchError.asModule)
+                    errorMsg = `${name} (${docs.join(', ')})`
                   } catch (e) {
                     // This probably means we don't have this error in the metadata
                     // In this case - continue (we'll just display dispatchError.toString())
@@ -398,20 +497,13 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
     })
   }
 
-  async sendAndFollowTx(
-    account: KeyringPair,
-    tx: SubmittableExtrinsic<'promise'>,
-    warnOnly = false // If specified - only warning will be displayed in case of failure (instead of error beeing thrown)
-  ): Promise<SubmittableResult | false> {
+  async sendAndFollowTx(account: KeyringPair, tx: SubmittableExtrinsic<'promise'>): Promise<SubmittableResult> {
     try {
       const res = await this.sendExtrinsic(account, tx)
       this.log(chalk.green(`Extrinsic successful!`))
       return res
     } catch (e) {
-      if (e instanceof ExtrinsicFailedError && warnOnly) {
-        this.warn(`Extrinsic failed! ${e.message}`)
-        return false
-      } else if (e instanceof ExtrinsicFailedError) {
+      if (e instanceof ExtrinsicFailedError) {
         throw new CLIError(`Extrinsic failed! ${e.message}`, { exit: ExitCodes.ApiError })
       } else {
         throw e
@@ -419,36 +511,39 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
     }
   }
 
-  async sendAndFollowNamedTx(
+  async sendAndFollowNamedTx<
+    Module extends keyof AugmentedSubmittables<'promise'>,
+    Method extends keyof AugmentedSubmittables<'promise'>[Module] & string,
+    Submittable extends AugmentedSubmittables<'promise'>[Module][Method]
+  >(
     account: KeyringPair,
-    module: string,
-    method: string,
-    params: CodecArg[],
-    warnOnly = false
-  ): Promise<SubmittableResult | false> {
-    this.log(chalk.magentaBright(`\nSending ${module}.${method} extrinsic...`))
-    const tx = await this.getOriginalApi().tx[module][method](...params)
-    return await this.sendAndFollowTx(account, tx, warnOnly)
+    module: Module,
+    method: Method,
+    params: Submittable extends (...args: any[]) => any ? Parameters<Submittable> : []
+  ): Promise<SubmittableResult> {
+    this.log(
+      chalk.magentaBright(
+        `\nSending ${module}.${method} extrinsic from ${account.meta.name ? account.meta.name : account.address}...`
+      )
+    )
+    const tx = await this.getUnaugmentedApi().tx[module][method](...params)
+    return await this.sendAndFollowTx(account, tx) //, warnOnly)
   }
 
-  // TODO:
-  // Switch to:
-  // public findEvent<S extends keyof AugmentedEvents<'promise'> & string, M extends keyof AugmentedEvents<'promise'>[S] & string>
-  //          (result: SubmittableResult, section: S, method: M): Event | undefined {
-  // Once augment-api is supported
-  public findEvent(result: SubmittableResult, section: string, method: string): Event | undefined {
-    return result.findRecord(section, method)?.event
+  public findEvent<
+    S extends keyof AugmentedEvents<'promise'> & string,
+    M extends keyof AugmentedEvents<'promise'>[S] & string,
+    EventType = AugmentedEvents<'promise'>[S][M] extends AugmentedEvent<'promise', infer T> ? IEvent<T> : never
+  >(result: SubmittableResult, section: S, method: M): EventType | undefined {
+    return result.findRecord(section, method)?.event as EventType | undefined
   }
 
-  async buildAndSendExtrinsic(
-    account: KeyringPair,
-    module: string,
-    method: string,
-    paramsOptions?: ApiParamsOptions,
-    warnOnly = false // If specified - only warning will be displayed (instead of error beeing thrown)
-  ): Promise<ApiMethodArg[]> {
+  async buildAndSendExtrinsic<
+    Module extends keyof AugmentedSubmittables<'promise'>,
+    Method extends keyof AugmentedSubmittables<'promise'>[Module] & string
+  >(account: KeyringPair, module: Module, method: Method, paramsOptions?: ApiParamsOptions): Promise<ApiMethodArg[]> {
     const params = await this.promptForExtrinsicParams(module, method, paramsOptions)
-    await this.sendAndFollowNamedTx(account, module, method, params, warnOnly)
+    await this.sendAndFollowNamedTx(account, module, method, params as any)
 
     return params
   }
@@ -456,7 +551,7 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
   extrinsicArgsFromDraft(module: string, method: string, draftFilePath: string): ApiMethodNamedArgs {
     let draftJSONObj
     const parsedArgs: ApiMethodNamedArgs = []
-    const extrinsicMethod = this.getOriginalApi().tx[module][method]
+    const extrinsicMethod = this.getUnaugmentedApi().tx[module][method]
     try {
       // eslint-disable-next-line @typescript-eslint/no-var-requires
       draftJSONObj = require(draftFilePath)
@@ -470,7 +565,7 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
       const argName = arg.name.toString()
       const argType = arg.type.toString()
       try {
-        parsedArgs.push({ name: argName, value: this.createType(argType as any, draftJSONObj[parseInt(index)]) })
+        parsedArgs.push({ name: argName, value: this.createType(argType, draftJSONObj[parseInt(index)]) })
       } catch (e) {
         throw new CLIError(`Couldn't parse ${argName} value from draft at ${draftFilePath}!`, {
           exit: ExitCodes.InvalidFile,

+ 5 - 5
cli/src/base/ContentDirectoryCommandBase.ts

@@ -4,7 +4,7 @@ import { CuratorGroup, CuratorGroupId, ContentActor, Channel } from '@joystream/
 import { Worker } from '@joystream/types/working-group'
 import { CLIError } from '@oclif/errors'
 import { RolesCommandBase } from './WorkingGroupsCommandBase'
-import { createType } from '@joystream/types'
+import { createTypeFromConstructor } from '@joystream/types'
 import { flags } from '@oclif/command'
 
 // TODO: Rework the contexts
@@ -110,7 +110,7 @@ export default abstract class ContentDirectoryCommandBase extends RolesCommandBa
       if (!group.active.valueOf()) {
         this.error(`Curator group ${requiredGroupId.toString()} is no longer active`, { exit: ExitCodes.AccessDenied })
       }
-      if (!group.curators.toArray().some((curatorId) => curatorId.eq(curator.workerId))) {
+      if (!Array.from(group.curators).some((curatorId) => curatorId.eq(curator.workerId))) {
         this.error(`You don't belong to required curator group (ID: ${requiredGroupId.toString()})`, {
           exit: ExitCodes.AccessDenied,
         })
@@ -121,7 +121,7 @@ export default abstract class ContentDirectoryCommandBase extends RolesCommandBa
       const availableGroupIds = groups
         .filter(
           ([, group]) =>
-            group.active.valueOf() && group.curators.toArray().some((curatorId) => curatorId.eq(curator.workerId))
+            group.active.valueOf() && Array.from(group.curators).some((curatorId) => curatorId.eq(curator.workerId))
         )
         .map(([id]) => id)
 
@@ -134,7 +134,7 @@ export default abstract class ContentDirectoryCommandBase extends RolesCommandBa
       }
     }
 
-    return createType('ContentActor', { Curator: [groupId, curator.workerId.toNumber()] })
+    return createTypeFromConstructor(ContentActor, { Curator: [groupId, curator.workerId.toNumber()] })
   }
 
   private async curatorGroupChoices(ids?: CuratorGroupId[]) {
@@ -145,7 +145,7 @@ export default abstract class ContentDirectoryCommandBase extends RolesCommandBa
         name:
           `Group ${id.toString()} (` +
           `${group.active.valueOf() ? 'Active' : 'Inactive'}, ` +
-          `${group.curators.toArray().length} member(s)), `,
+          `${Array.from(group.curators).length} member(s)), `,
         value: id.toNumber(),
       }))
   }

+ 3 - 0
cli/src/base/DefaultCommandBase.ts

@@ -102,6 +102,9 @@ export default abstract class DefaultCommandBase extends Command {
     // called after run and catch regardless of whether or not the command errored
     // We'll force exit here, in case there is no error, to prevent console.log from hanging the process
     if (!err) this.exit(ExitCodes.OK)
+    if (err && process.env.DEBUG === 'true') {
+      console.log(err)
+    }
     super.finally(err)
   }
 

+ 2 - 0
cli/src/base/StateAwareCommandBase.ts

@@ -12,6 +12,7 @@ import { WorkingGroups } from '../Types'
 type StateObject = {
   selectedAccountFilename: string
   apiUri: string
+  queryNodeUri: string | null | undefined
   defaultWorkingGroup: WorkingGroups
   metadataCache: Record<string, any>
 }
@@ -20,6 +21,7 @@ type StateObject = {
 const DEFAULT_STATE: StateObject = {
   selectedAccountFilename: '',
   apiUri: '',
+  queryNodeUri: undefined,
   defaultWorkingGroup: WorkingGroups.StorageProviders,
   metadataCache: {},
 }

+ 175 - 66
cli/src/base/UploadCommandBase.ts

@@ -1,18 +1,33 @@
 import ContentDirectoryCommandBase from './ContentDirectoryCommandBase'
-import { VideoFFProbeMetadata, VideoFileMetadata, AssetType, InputAsset, InputAssetDetails } from '../Types'
-import { ContentId, ContentParameters } from '@joystream/types/storage'
+import {
+  AssetToUpload,
+  ResolvedAsset,
+  StorageNodeInfo,
+  TokenRequest,
+  TokenRequestData,
+  VideoFFProbeMetadata,
+  VideoFileMetadata,
+} from '../Types'
 import { MultiBar, Options, SingleBar } from 'cli-progress'
-import { Assets } from '../json-schemas/typings/Assets.schema'
 import ExitCodes from '../ExitCodes'
-import ipfsHash from 'ipfs-only-hash'
 import fs from 'fs'
 import _ from 'lodash'
-import axios, { AxiosRequestConfig } from 'axios'
+import axios from 'axios'
 import ffprobeInstaller from '@ffprobe-installer/ffprobe'
 import ffmpeg from 'fluent-ffmpeg'
 import path from 'path'
-import chalk from 'chalk'
 import mimeTypes from 'mime-types'
+import { Assets } from '../schemas/typings/Assets.schema'
+import chalk from 'chalk'
+import { DataObjectCreationParameters } from '@joystream/types/storage'
+import { createHash } from 'blake3'
+import * as multihash from 'multihashes'
+import { u8aToHex, formatBalance } from '@polkadot/util'
+import { KeyringPair } from '@polkadot/keyring/types'
+import FormData from 'form-data'
+import BN from 'bn.js'
+import { createTypeFromConstructor } from '@joystream/types'
+import { StorageAssets } from '@joystream/types/content'
 
 ffmpeg.setFfprobePath(ffprobeInstaller.path)
 
@@ -21,19 +36,18 @@ ffmpeg.setFfprobePath(ffprobeInstaller.path)
  */
 export default abstract class UploadCommandBase extends ContentDirectoryCommandBase {
   private fileSizeCache: Map<string, number> = new Map<string, number>()
+  private maxFileSize: undefined | BN = undefined
   private progressBarOptions: Options = {
     format: `{barTitle} | {bar} | {value}/{total} KB processed`,
   }
 
+  protected requiresQueryNode = true
+
   getFileSize(path: string): number {
     const cachedSize = this.fileSizeCache.get(path)
     return cachedSize !== undefined ? cachedSize : fs.statSync(path).size
   }
 
-  normalizeEndpoint(endpoint: string) {
-    return endpoint.endsWith('/') ? endpoint : endpoint + '/'
-  }
-
   createReadStreamWithProgressBar(
     filePath: string,
     barTitle: string,
@@ -103,7 +117,7 @@ export default abstract class UploadCommandBase extends ContentDirectoryCommandB
     try {
       ffProbeMetadata = await this.getVideoFFProbeMetadata(filePath)
     } catch (e) {
-      const message = e.message || e
+      const message = e instanceof Error ? e.message : e
       this.warn(`Failed to get video metadata via ffprobe (${message})`)
     }
 
@@ -118,114 +132,177 @@ export default abstract class UploadCommandBase extends ContentDirectoryCommandB
     }
   }
 
-  async calculateFileIpfsHash(filePath: string): Promise<string> {
+  async calculateFileHash(filePath: string): Promise<string> {
     const { fileStream } = this.createReadStreamWithProgressBar(filePath, 'Calculating file hash')
-    const hash: string = await ipfsHash.of(fileStream)
-
-    return hash
+    let blake3Hash: Uint8Array
+    return new Promise<string>((resolve, reject) => {
+      fileStream
+        .pipe(createHash())
+        .on('data', (data) => (blake3Hash = data))
+        .on('end', () => resolve(multihash.toB58String(multihash.encode(blake3Hash, 'blake3'))))
+        .on('error', (err) => reject(err))
+    })
   }
 
-  validateFile(filePath: string): void {
+  async validateFile(filePath: string): Promise<void> {
     // Basic file validation
     if (!fs.existsSync(filePath)) {
       this.error(`${filePath} - file does not exist under provided path!`, { exit: ExitCodes.FileNotFound })
     }
+    if (!this.maxFileSize) {
+      this.maxFileSize = await this.getOriginalApi().consts.storage.maxDataObjectSize
+    }
+    if (this.maxFileSize.ltn(this.getFileSize(filePath))) {
+      this.error(`${filePath} - file is too big. Max file size is ${this.maxFileSize.toString()} bytes`)
+    }
   }
 
-  assetUrl(endpointRoot: string, contentId: ContentId): string {
-    // This will also make sure the resulting url is a valid url
-    return new URL(`asset/v0/${contentId.encode()}`, this.normalizeEndpoint(endpointRoot)).toString()
-  }
-
-  async getRandomProviderEndpoint(): Promise<string | null> {
-    const endpoints = _.shuffle(await this.getApi().allStorageProviderEndpoints())
-    for (const endpoint of endpoints) {
-      try {
-        const url = new URL('swagger.json', this.normalizeEndpoint(endpoint)).toString()
-        await axios.head(url)
-        return endpoint
-      } catch (e) {
-        continue
+  async getRandomActiveStorageNodeInfo(bagId: string, retryTime = 6, retryCount = 5): Promise<StorageNodeInfo | null> {
+    for (let i = 0; i <= retryCount; ++i) {
+      const nodesInfo = _.shuffle(await this.getQNApi().storageNodesInfoByBagId(bagId))
+      for (const info of nodesInfo) {
+        try {
+          await axios.get(info.apiEndpoint + '/version', {
+            headers: {
+              connection: 'close',
+            },
+          })
+          return info
+        } catch (err) {
+          continue
+        }
+      }
+      if (i !== retryCount) {
+        this.log(`No storage provider can serve the request yet, retrying in ${retryTime}s (${i + 1}/${retryCount})...`)
+        await new Promise((resolve) => setTimeout(resolve, retryTime * 1000))
       }
     }
 
     return null
   }
 
-  async generateContentParameters(filePath: string, type: AssetType): Promise<ContentParameters> {
-    return this.createType('ContentParameters', {
-      content_id: ContentId.generate(this.getTypesRegistry()),
-      type_id: type,
+  async generateDataObjectParameters(filePath: string): Promise<DataObjectCreationParameters> {
+    return createTypeFromConstructor(DataObjectCreationParameters, {
       size: this.getFileSize(filePath),
-      ipfs_content_id: await this.calculateFileIpfsHash(filePath),
+      ipfsContentId: await this.calculateFileHash(filePath),
     })
   }
 
-  async prepareInputAssets(paths: string[], basePath?: string): Promise<InputAssetDetails[]> {
+  async resolveAndValidateAssets(paths: string[], basePath: string): Promise<ResolvedAsset[]> {
     // Resolve assets
     if (basePath) {
       paths = paths.map((p) => basePath && path.resolve(path.dirname(basePath), p))
     }
     // Validate assets
-    paths.forEach((p) => this.validateFile(p))
+    await Promise.all(paths.map((p) => this.validateFile(p)))
 
     // Return data
     return await Promise.all(
       paths.map(async (path) => {
-        const parameters = await this.generateContentParameters(path, AssetType.AnyAsset)
+        const parameters = await this.generateDataObjectParameters(path)
         return {
           path,
-          contentId: parameters.content_id,
           parameters,
         }
       })
     )
   }
 
-  async uploadAsset(contentId: ContentId, filePath: string, endpoint?: string, multiBar?: MultiBar): Promise<void> {
-    const providerEndpoint = endpoint || (await this.getRandomProviderEndpoint())
-    if (!providerEndpoint) {
-      this.error('No active provider found!', { exit: ExitCodes.ActionCurrentlyUnavailable })
+  async getStorageNodeUploadToken(
+    storageNodeInfo: StorageNodeInfo,
+    account: KeyringPair,
+    memberId: number,
+    objectId: BN,
+    bagId: string
+  ): Promise<string> {
+    const data: TokenRequestData = {
+      storageBucketId: storageNodeInfo.bucketId,
+      accountId: account.address,
+      bagId,
+      memberId,
+      dataObjectId: objectId.toNumber(),
     }
-    const uploadUrl = this.assetUrl(providerEndpoint, contentId)
-    const fileSize = this.getFileSize(filePath)
+    const message = JSON.stringify(data)
+    const signature = u8aToHex(account.sign(message))
+    const postData: TokenRequest = { data, signature }
+    const {
+      data: { token },
+    } = await axios.post(`${storageNodeInfo.apiEndpoint}/authToken`, postData)
+    if (!token) {
+      this.error('Recieved empty token from the storage node!', { exit: ExitCodes.StorageNodeError })
+    }
+
+    return token
+  }
+
+  async uploadAsset(
+    account: KeyringPair,
+    memberId: number,
+    objectId: BN,
+    bagId: string,
+    filePath: string,
+    storageNode?: StorageNodeInfo,
+    multiBar?: MultiBar
+  ): Promise<void> {
+    const storageNodeInfo = storageNode || (await this.getRandomActiveStorageNodeInfo(bagId))
+    if (!storageNodeInfo) {
+      this.error('No active storage node found!', { exit: ExitCodes.ActionCurrentlyUnavailable })
+    }
+    this.log(`Chosen storage node endpoint: ${storageNodeInfo.apiEndpoint}`)
+    const token = await this.getStorageNodeUploadToken(storageNodeInfo, account, memberId, objectId, bagId)
     const { fileStream, progressBar } = this.createReadStreamWithProgressBar(
       filePath,
-      `Uploading ${contentId.encode()}`,
+      `Uploading ${filePath}`,
       multiBar
     )
     fileStream.on('end', () => {
       // Temporarly disable because with Promise.all it breaks the UI
       // cli.action.start('Waiting for the file to be processed...')
     })
-
+    const formData = new FormData()
+    formData.append('dataObjectId', objectId.toString())
+    formData.append('storageBucketId', storageNodeInfo.bucketId)
+    formData.append('bagId', bagId)
+    formData.append('file', fileStream, {
+      filename: path.basename(filePath),
+      filepath: filePath,
+      knownLength: this.getFileSize(filePath),
+    })
+    this.log(`Uploading object ${objectId.toString()} (${filePath})`)
     try {
-      const config: AxiosRequestConfig = {
+      await axios.post(`${storageNodeInfo.apiEndpoint}/files`, formData, {
         headers: {
-          'Content-Type': '', // https://github.com/Joystream/storage-node-joystream/issues/16
-          'Content-Length': fileSize.toString(),
+          'x-api-key': token,
+          'content-type': 'multipart/form-data',
+          ...formData.getHeaders(),
         },
-        maxBodyLength: fileSize,
-      }
-      await axios.put(uploadUrl, fileStream, config)
+      })
     } catch (e) {
       progressBar.stop()
-      const msg = (e.response && e.response.data && e.response.data.message) || e.message || e
-      this.error(`Unexpected error when trying to upload a file: ${msg}`, {
-        exit: ExitCodes.ExternalInfrastructureError,
-      })
+      if (axios.isAxiosError(e)) {
+        const msg = e.response && e.response.data ? JSON.stringify(e.response.data) : e.message
+        this.error(`Unexpected error when trying to upload a file: ${msg}`, {
+          exit: ExitCodes.StorageNodeError,
+        })
+      } else {
+        throw e
+      }
     }
   }
 
   async uploadAssets(
-    assets: InputAsset[],
+    account: KeyringPair,
+    memberId: number,
+    bagId: string,
+    assets: AssetToUpload[],
     inputFilePath: string,
     outputFilePostfix = '__rejectedContent'
   ): Promise<void> {
-    const endpoint = await this.getRandomProviderEndpoint()
-    if (!endpoint) {
+    const storageNodeInfo = await this.getRandomActiveStorageNodeInfo(bagId)
+    if (!storageNodeInfo) {
       this.warn('No storage provider is currently available!')
       this.handleRejectedUploads(
+        bagId,
         assets,
         assets.map(() => false),
         inputFilePath,
@@ -234,34 +311,66 @@ export default abstract class UploadCommandBase extends ContentDirectoryCommandB
       this.exit(ExitCodes.ActionCurrentlyUnavailable)
     }
     const multiBar = new MultiBar(this.progressBarOptions)
+    const errors: [string, string][] = []
     // Workaround replacement for Promise.allSettled (which is only available in ES2020)
     const results = await Promise.all(
       assets.map(async (a) => {
         try {
-          await this.uploadAsset(a.contentId, a.path, endpoint, multiBar)
+          await this.uploadAsset(account, memberId, a.dataObjectId, bagId, a.path, storageNodeInfo, multiBar)
           return true
         } catch (e) {
+          errors.push([a.dataObjectId.toString(), e instanceof Error ? e.message : 'Unknown error'])
           return false
         }
       })
     )
-    this.handleRejectedUploads(assets, results, inputFilePath, outputFilePostfix)
+    errors.forEach(([objectId, message]) => this.warn(`Upload of object ${objectId} failed: ${message}`))
+    this.handleRejectedUploads(bagId, assets, results, inputFilePath, outputFilePostfix)
     multiBar.stop()
   }
 
+  public assetsIndexes(originalPaths: (string | undefined)[], filteredPaths: string[]): (number | undefined)[] {
+    let lastIndex = -1
+    return originalPaths.map((path) => (filteredPaths.includes(path as string) ? ++lastIndex : undefined))
+  }
+
+  async prepareAssetsForExtrinsic(resolvedAssets: ResolvedAsset[]): Promise<StorageAssets | undefined> {
+    const feePerMB = await this.getOriginalApi().query.storage.dataObjectPerMegabyteFee()
+    if (resolvedAssets.length) {
+      const totalBytes = resolvedAssets
+        .reduce((a, b) => {
+          return a.add(b.parameters.getField('size'))
+        }, new BN(0))
+        .toNumber()
+      const totalFee = feePerMB.muln(Math.ceil(totalBytes / 1024 / 1024))
+      await this.requireConfirmation(
+        `Total fee of ${chalk.cyan(formatBalance(totalFee))} ` +
+          `will have to be paid in order to store the provided assets. Are you sure you want to continue?`
+      )
+      return createTypeFromConstructor(StorageAssets, {
+        expected_data_size_fee: feePerMB,
+        object_creation_list: resolvedAssets.map((a) => a.parameters),
+      })
+    }
+
+    return undefined
+  }
+
   private handleRejectedUploads(
-    assets: InputAsset[],
+    bagId: string,
+    assets: AssetToUpload[],
     results: boolean[],
     inputFilePath: string,
     outputFilePostfix: string
   ): void {
     // Try to save rejected contentIds and paths for reupload purposes
-    const rejectedAssetsOutput: Assets = []
+    const rejectedAssetsOutput: Assets = { bagId, assets: [] }
     results.forEach(
       (r, i) =>
-        r === false && rejectedAssetsOutput.push({ contentId: assets[i].contentId.encode(), path: assets[i].path })
+        r === false &&
+        rejectedAssetsOutput.assets.push({ objectId: assets[i].dataObjectId.toString(), path: assets[i].path })
     )
-    if (rejectedAssetsOutput.length) {
+    if (rejectedAssetsOutput.assets.length) {
       this.warn(
         `Some assets were not uploaded successfully. Try reuploading them with ${chalk.magentaBright(
           'content:reuploadAssets'

+ 11 - 0
cli/src/commands/api/getQueryNodeEndpoint.ts

@@ -0,0 +1,11 @@
+import StateAwareCommandBase from '../../base/StateAwareCommandBase'
+import chalk from 'chalk'
+
+export default class ApiGetQueryNodeEndpoint extends StateAwareCommandBase {
+  static description = 'Get current query node endpoint'
+
+  async run(): Promise<void> {
+    const currentEndpoint: string | null | undefined = this.getPreservedState().queryNodeUri
+    this.log(chalk.green(JSON.stringify(currentEndpoint)))
+  }
+}

+ 9 - 10
cli/src/commands/api/inspect.ts

@@ -1,12 +1,11 @@
 import { flags } from '@oclif/command'
 import { CLIError } from '@oclif/errors'
 import { displayNameValueTable } from '../../helpers/display'
-import { ApiPromise } from '@polkadot/api'
 import { Codec } from '@polkadot/types/types'
-import { ConstantCodec } from '@polkadot/metadata/decorate/types'
+import { ConstantCodec } from '@polkadot/types/metadata/decorate/types'
 import ExitCodes from '../../ExitCodes'
 import chalk from 'chalk'
-import { NameValueObj, ApiMethodArg } from '../../Types'
+import { NameValueObj, ApiMethodArg, UnaugmentedApiPromise } from '../../Types'
 import ApiCommandBase from '../../base/ApiCommandBase'
 
 // Command flags type
@@ -78,21 +77,21 @@ export default class ApiInspect extends ApiCommandBase {
 
   getMethodMeta(apiType: ApiType, apiModule: string, apiMethod: string) {
     if (apiType === 'query') {
-      return this.getOriginalApi().query[apiModule][apiMethod].creator.meta
+      return this.getUnaugmentedApi().query[apiModule][apiMethod].creator.meta
     } else {
       // Currently the only other optoin is api.consts
-      const method: ConstantCodec = this.getOriginalApi().consts[apiModule][apiMethod] as ConstantCodec
+      const method = this.getUnaugmentedApi().consts[apiModule][apiMethod] as ConstantCodec
       return method.meta
     }
   }
 
   getMethodDescription(apiType: ApiType, apiModule: string, apiMethod: string): string {
-    const description: string = this.getMethodMeta(apiType, apiModule, apiMethod).documentation.join(' ')
+    const description: string = this.getMethodMeta(apiType, apiModule, apiMethod).docs.join(' ')
     return description || 'No description available.'
   }
 
   getQueryMethodParamsTypes(apiModule: string, apiMethod: string): string[] {
-    const method = this.getOriginalApi().query[apiModule][apiMethod]
+    const method = this.getUnaugmentedApi().query[apiModule][apiMethod]
     const { type } = method.creator.meta
     if (type.isDoubleMap) {
       return [type.asDoubleMap.key1.toString(), type.asDoubleMap.key2.toString()]
@@ -105,7 +104,7 @@ export default class ApiInspect extends ApiCommandBase {
 
   getMethodReturnType(apiType: ApiType, apiModule: string, apiMethod: string): string {
     if (apiType === 'query') {
-      const method = this.getOriginalApi().query[apiModule][apiMethod]
+      const method = this.getUnaugmentedApi().query[apiModule][apiMethod]
       const {
         meta: { type, modifier },
       } = method.creator
@@ -126,7 +125,7 @@ export default class ApiInspect extends ApiCommandBase {
   // Validate the flags - throws an error if flags.type, flags.module or flags.method is invalid / does not exist in the api.
   // Returns type, module and method which validity we can be sure about (notice they may still be "undefined" if weren't provided).
   validateFlags(
-    api: ApiPromise,
+    api: UnaugmentedApiPromise,
     flags: ApiInspectFlags
   ): { apiType: ApiType | undefined; apiModule: string | undefined; apiMethod: string | undefined } {
     let apiType: ApiType | undefined
@@ -164,7 +163,7 @@ export default class ApiInspect extends ApiCommandBase {
   }
 
   async run() {
-    const api: ApiPromise = this.getOriginalApi()
+    const api: UnaugmentedApiPromise = this.getUnaugmentedApi()
     const flags: ApiInspectFlags = this.parse(ApiInspect).flags as ApiInspectFlags
     const availableTypes: readonly string[] = TYPES_AVAILABLE
     const { apiType, apiModule, apiMethod } = this.validateFlags(api, flags)

+ 36 - 0
cli/src/commands/api/setQueryNodeEndpoint.ts

@@ -0,0 +1,36 @@
+import chalk from 'chalk'
+import ApiCommandBase from '../../base/ApiCommandBase'
+import ExitCodes from '../../ExitCodes'
+
+type ApiSetQueryNodeEndpointArgs = { endpoint: string }
+
+export default class ApiSetQueryNodeEndpoint extends ApiCommandBase {
+  protected requiresApiConnection = false
+
+  static description = 'Set query node endpoint'
+  static args = [
+    {
+      name: 'endpoint',
+      required: false,
+      description: 'Query node endpoint for the CLI to use',
+    },
+  ]
+
+  async run(): Promise<void> {
+    const { endpoint }: ApiSetQueryNodeEndpointArgs = this.parse(ApiSetQueryNodeEndpoint)
+      .args as ApiSetQueryNodeEndpointArgs
+    let newEndpoint: string | null = null
+    if (endpoint) {
+      if (!this.isQueryNodeUriValid(endpoint)) {
+        this.error('Provided endpoint seems to be incorrect!', { exit: ExitCodes.InvalidInput })
+      }
+      newEndpoint = endpoint
+    } else {
+      newEndpoint = await this.promptForQueryNodeUri()
+    }
+    await this.setPreservedState({ queryNodeUri: endpoint })
+    this.log(
+      chalk.greenBright('Query node endpoint successfuly changed! New endpoint: ') + chalk.magentaBright(newEndpoint)
+    )
+  }
+}

+ 3 - 6
cli/src/commands/api/setUri.ts

@@ -5,6 +5,8 @@ import ExitCodes from '../../ExitCodes'
 type ApiSetUriArgs = { uri: string }
 
 export default class ApiSetUri extends ApiCommandBase {
+  protected requiresApiConnection = false
+
   static description = 'Set api WS provider uri'
   static args = [
     {
@@ -14,12 +16,7 @@ export default class ApiSetUri extends ApiCommandBase {
     },
   ]
 
-  async init() {
-    // Pass "skipConnection" arg to prevent command from exiting if current api uri is invalid
-    await super.init(true)
-  }
-
-  async run() {
+  async run(): Promise<void> {
     const args: ApiSetUriArgs = this.parse(ApiSetUri).args as ApiSetUriArgs
     let newUri = ''
     if (args.uri) {

+ 3 - 12
cli/src/commands/content/channel.ts

@@ -19,23 +19,14 @@ export default class ChannelCommand extends ContentDirectoryCommandBase {
         'ID': channelId.toString(),
         'Owner': JSON.stringify(channel.owner.toJSON()),
         'IsCensored': channel.is_censored.toString(),
-        'RewardAccount': channel.reward_account ? channel.reward_account.toString() : 'NONE',
+        'RewardAccount': channel.reward_account.unwrapOr('NONE').toString(),
+        'DeletionPrizeAccount': channel.deletion_prize_source_account_id.toString(),
       })
 
       displayHeader(`Media`)
 
       displayCollapsedRow({
-        'NumberOfVideos': channel.videos.length,
-        'NumberOfPlaylists': channel.playlists.length,
-        'NumberOfSeries': channel.series.length,
-      })
-
-      displayHeader(`MediaData`)
-
-      displayCollapsedRow({
-        'Videos': JSON.stringify(channel.videos.toJSON()),
-        'Playlists': JSON.stringify(channel.playlists.toJSON()),
-        'Series': JSON.stringify(channel.series.toJSON()),
+        'NumberOfVideos': channel.num_videos.toNumber(),
       })
     } else {
       this.error(`Channel not found by channel id: "${channelId}"!`)

+ 31 - 22
cli/src/commands/content/createChannel.ts

@@ -1,13 +1,14 @@
 import { getInputJson } from '../../helpers/InputOutput'
 import { ChannelInputParameters } from '../../Types'
-import { metadataToBytes, channelMetadataFromInput } from '../../helpers/serialization'
+import { asValidatedMetadata, metadataToBytes } from '../../helpers/serialization'
 import { flags } from '@oclif/command'
-import { CreateInterface } from '@joystream/types'
+import { createTypeFromConstructor } from '@joystream/types'
 import { ChannelCreationParameters } from '@joystream/types/content'
-import { ChannelInputSchema } from '../../json-schemas/ContentDirectory'
+import { ChannelInputSchema } from '../../schemas/ContentDirectory'
 import ContentDirectoryCommandBase from '../../base/ContentDirectoryCommandBase'
 import UploadCommandBase from '../../base/UploadCommandBase'
 import chalk from 'chalk'
+import { ChannelMetadata } from '@joystream/metadata-protobuf'
 
 export default class CreateChannelCommand extends UploadCommandBase {
   static description = 'Create channel inside content directory.'
@@ -29,30 +30,28 @@ export default class CreateChannelCommand extends UploadCommandBase {
     }
     const account = await this.getRequiredSelectedAccount()
     const actor = await this.getActor(context)
+    const memberId = await this.getRequiredMemberId(true)
     await this.requestAccountDecoding(account)
 
     const channelInput = await getInputJson<ChannelInputParameters>(input, ChannelInputSchema)
+    const meta = asValidatedMetadata(ChannelMetadata, channelInput)
 
-    const meta = channelMetadataFromInput(channelInput)
     const { coverPhotoPath, avatarPhotoPath } = channelInput
     const assetsPaths = [coverPhotoPath, avatarPhotoPath].filter((v) => v !== undefined) as string[]
-    const inputAssets = await this.prepareInputAssets(assetsPaths, input)
-    const assets = inputAssets.map(({ parameters }) => ({ Upload: parameters }))
+    const resolvedAssets = await this.resolveAndValidateAssets(assetsPaths, input)
     // Set assets indexes in the metadata
-    if (coverPhotoPath) {
-      meta.setCoverPhoto(0)
-    }
-    if (avatarPhotoPath) {
-      meta.setAvatarPhoto(coverPhotoPath ? 1 : 0)
-    }
+    const [coverPhotoIndex, avatarPhotoIndex] = this.assetsIndexes([coverPhotoPath, avatarPhotoPath], assetsPaths)
+    meta.coverPhoto = coverPhotoIndex
+    meta.avatarPhoto = avatarPhotoIndex
 
-    const channelCreationParameters: CreateInterface<ChannelCreationParameters> = {
+    // Preare and send the extrinsic
+    const assets = await this.prepareAssetsForExtrinsic(resolvedAssets)
+    const channelCreationParameters = createTypeFromConstructor(ChannelCreationParameters, {
       assets,
-      meta: metadataToBytes(meta),
-      reward_account: channelInput.rewardAccount,
-    }
+      meta: metadataToBytes(ChannelMetadata, meta),
+    })
 
-    this.jsonPrettyPrint(JSON.stringify({ assets, metadata: meta.toObject() }))
+    this.jsonPrettyPrint(JSON.stringify({ assets: assets?.toJSON(), metadata: meta }))
 
     await this.requireConfirmation('Do you confirm the provided input?', true)
 
@@ -60,11 +59,21 @@ export default class CreateChannelCommand extends UploadCommandBase {
       actor,
       channelCreationParameters,
     ])
-    if (result) {
-      const event = this.findEvent(result, 'content', 'ChannelCreated')
-      this.log(chalk.green(`Channel with id ${chalk.cyanBright(event?.data[1].toString())} successfully created!`))
-    }
 
-    await this.uploadAssets(inputAssets, input)
+    const channelCreatedEvent = this.findEvent(result, 'content', 'ChannelCreated')
+    const channelId = channelCreatedEvent!.data[1]
+    this.log(chalk.green(`Channel with id ${chalk.cyanBright(channelId.toString())} successfully created!`))
+
+    const dataObjectsUploadedEvent = this.findEvent(result, 'storage', 'DataObjectsUploaded')
+    if (dataObjectsUploadedEvent) {
+      const [objectIds] = dataObjectsUploadedEvent.data
+      await this.uploadAssets(
+        account,
+        memberId,
+        `dynamic:channel:${channelId.toString()}`,
+        objectIds.map((id, index) => ({ dataObjectId: id, path: resolvedAssets[index].path })),
+        input
+      )
+    }
   }
 }

+ 5 - 5
cli/src/commands/content/createChannelCategory.ts

@@ -1,12 +1,13 @@
 import ContentDirectoryCommandBase from '../../base/ContentDirectoryCommandBase'
 import { getInputJson } from '../../helpers/InputOutput'
 import { ChannelCategoryInputParameters } from '../../Types'
-import { channelCategoryMetadataFromInput, metadataToBytes } from '../../helpers/serialization'
+import { asValidatedMetadata, metadataToBytes } from '../../helpers/serialization'
 import { flags } from '@oclif/command'
 import { CreateInterface } from '@joystream/types'
 import { ChannelCategoryCreationParameters } from '@joystream/types/content'
-import { ChannelCategoryInputSchema } from '../../json-schemas/ContentDirectory'
+import { ChannelCategoryInputSchema } from '../../schemas/ContentDirectory'
 import chalk from 'chalk'
+import { ChannelCategoryMetadata } from '@joystream/metadata-protobuf'
 
 export default class CreateChannelCategoryCommand extends ContentDirectoryCommandBase {
   static description = 'Create channel category inside content directory.'
@@ -28,11 +29,10 @@ export default class CreateChannelCategoryCommand extends ContentDirectoryComman
     const actor = context ? await this.getActor(context) : await this.getCategoryManagementActor()
 
     const channelCategoryInput = await getInputJson<ChannelCategoryInputParameters>(input, ChannelCategoryInputSchema)
-
-    const meta = channelCategoryMetadataFromInput(channelCategoryInput)
+    const meta = asValidatedMetadata(ChannelCategoryMetadata, channelCategoryInput)
 
     const channelCategoryCreationParameters: CreateInterface<ChannelCategoryCreationParameters> = {
-      meta: metadataToBytes(meta),
+      meta: metadataToBytes(ChannelCategoryMetadata, meta),
     }
 
     this.jsonPrettyPrint(JSON.stringify(channelCategoryInput))

+ 51 - 39
cli/src/commands/content/createVideo.ts

@@ -1,12 +1,13 @@
 import UploadCommandBase from '../../base/UploadCommandBase'
 import { getInputJson } from '../../helpers/InputOutput'
-import { videoMetadataFromInput, metadataToBytes } from '../../helpers/serialization'
+import { asValidatedMetadata, metadataToBytes } from '../../helpers/serialization'
 import { VideoInputParameters, VideoFileMetadata } from '../../Types'
-import { CreateInterface } from '@joystream/types'
+import { createTypeFromConstructor } from '@joystream/types'
 import { flags } from '@oclif/command'
 import { VideoCreationParameters } from '@joystream/types/content'
-import { MediaType, VideoMetadata } from '@joystream/content-metadata-protobuf'
-import { VideoInputSchema } from '../../json-schemas/ContentDirectory'
+import { IVideoMetadata, VideoMetadata } from '@joystream/metadata-protobuf'
+import { VideoInputSchema } from '../../schemas/ContentDirectory'
+import { integrateMeta } from '@joystream/metadata-protobuf/utils'
 import chalk from 'chalk'
 
 export default class CreateVideoCommand extends UploadCommandBase {
@@ -24,17 +25,19 @@ export default class CreateVideoCommand extends UploadCommandBase {
     }),
   }
 
-  setVideoMetadataDefaults(metadata: VideoMetadata, videoFileMetadata: VideoFileMetadata) {
-    const metaObj = metadata.toObject()
-    metadata.setDuration((metaObj.duration || videoFileMetadata.duration) as number)
-    metadata.setMediaPixelWidth((metaObj.mediaPixelWidth || videoFileMetadata.width) as number)
-    metadata.setMediaPixelHeight((metaObj.mediaPixelHeight || videoFileMetadata.height) as number)
-
-    const fileMediaType = new MediaType()
-    fileMediaType.setCodecName(videoFileMetadata.codecName as string)
-    fileMediaType.setContainer(videoFileMetadata.container)
-    fileMediaType.setMimeMediaType(videoFileMetadata.mimeType)
-    metadata.setMediaType(metadata.getMediaType() || fileMediaType)
+  setVideoMetadataDefaults(metadata: IVideoMetadata, videoFileMetadata: VideoFileMetadata): void {
+    const videoMetaToIntegrate = {
+      duration: videoFileMetadata.duration,
+      mediaPixelWidth: videoFileMetadata.width,
+      mediaPixelHeight: videoFileMetadata.height,
+    }
+    const mediaTypeMetaToIntegrate = {
+      codecName: videoFileMetadata.codecName,
+      container: videoFileMetadata.container,
+      mimeMediaType: videoFileMetadata.mimeType,
+    }
+    integrateMeta(metadata, videoMetaToIntegrate, ['duration', 'mediaPixelWidth', 'mediaPixelHeight'])
+    integrateMeta(metadata.mediaType || {}, mediaTypeMetaToIntegrate, ['codecName', 'container', 'mimeMediaType'])
   }
 
   async run() {
@@ -44,38 +47,37 @@ export default class CreateVideoCommand extends UploadCommandBase {
     const account = await this.getRequiredSelectedAccount()
     const channel = await this.getApi().channelById(channelId)
     const actor = await this.getChannelOwnerActor(channel)
+    const memberId = await this.getRequiredMemberId(true)
     await this.requestAccountDecoding(account)
 
     // Get input from file
     const videoCreationParametersInput = await getInputJson<VideoInputParameters>(input, VideoInputSchema)
-
-    const meta = videoMetadataFromInput(videoCreationParametersInput)
+    const meta = asValidatedMetadata(VideoMetadata, videoCreationParametersInput)
 
     // Assets
     const { videoPath, thumbnailPhotoPath } = videoCreationParametersInput
     const assetsPaths = [videoPath, thumbnailPhotoPath].filter((a) => a !== undefined) as string[]
-    const inputAssets = await this.prepareInputAssets(assetsPaths, input)
-    const assets = inputAssets.map(({ parameters }) => ({ Upload: parameters }))
+    const resolvedAssets = await this.resolveAndValidateAssets(assetsPaths, input)
     // Set assets indexes in the metadata
-    if (videoPath) {
-      meta.setVideo(0)
-    }
-    if (thumbnailPhotoPath) {
-      meta.setThumbnailPhoto(videoPath ? 1 : 0)
-    }
+    const [videoIndex, thumbnailPhotoIndex] = this.assetsIndexes([videoPath, thumbnailPhotoPath], assetsPaths)
+    meta.video = videoIndex
+    meta.thumbnailPhoto = thumbnailPhotoIndex
 
     // Try to get video file metadata
-    const videoFileMetadata = await this.getVideoFileMetadata(inputAssets[0].path)
-    this.log('Video media file parameters established:', videoFileMetadata)
-    this.setVideoMetadataDefaults(meta, videoFileMetadata)
+    if (videoIndex !== undefined) {
+      const videoFileMetadata = await this.getVideoFileMetadata(resolvedAssets[videoIndex].path)
+      this.log('Video media file parameters established:', videoFileMetadata)
+      this.setVideoMetadataDefaults(meta, videoFileMetadata)
+    }
 
-    // Create final extrinsic params and send the extrinsic
-    const videoCreationParameters: CreateInterface<VideoCreationParameters> = {
+    // Preare and send the extrinsic
+    const assets = await this.prepareAssetsForExtrinsic(resolvedAssets)
+    const videoCreationParameters = createTypeFromConstructor(VideoCreationParameters, {
       assets,
-      meta: metadataToBytes(meta),
-    }
+      meta: metadataToBytes(VideoMetadata, meta),
+    })
 
-    this.jsonPrettyPrint(JSON.stringify({ assets, metadata: meta.toObject() }))
+    this.jsonPrettyPrint(JSON.stringify({ assets: assets?.toJSON(), metadata: meta }))
 
     await this.requireConfirmation('Do you confirm the provided input?', true)
 
@@ -84,12 +86,22 @@ export default class CreateVideoCommand extends UploadCommandBase {
       channelId,
       videoCreationParameters,
     ])
-    if (result) {
-      const event = this.findEvent(result, 'content', 'VideoCreated')
-      this.log(chalk.green(`Video with id ${chalk.cyanBright(event?.data[2].toString())} successfully created!`))
-    }
 
-    // Upload assets
-    await this.uploadAssets(inputAssets, input)
+    const videoCreatedEvent = this.findEvent(result, 'content', 'VideoCreated')
+    this.log(
+      chalk.green(`Video with id ${chalk.cyanBright(videoCreatedEvent?.data[2].toString())} successfully created!`)
+    )
+
+    const dataObjectsUploadedEvent = this.findEvent(result, 'storage', 'DataObjectsUploaded')
+    if (dataObjectsUploadedEvent) {
+      const [objectIds] = dataObjectsUploadedEvent.data
+      await this.uploadAssets(
+        account,
+        memberId,
+        `dynamic:channel:${channelId.toString()}`,
+        objectIds.map((id, index) => ({ dataObjectId: id, path: resolvedAssets[index].path })),
+        input
+      )
+    }
   }
 }

+ 5 - 5
cli/src/commands/content/createVideoCategory.ts

@@ -1,12 +1,13 @@
 import ContentDirectoryCommandBase from '../../base/ContentDirectoryCommandBase'
 import { getInputJson } from '../../helpers/InputOutput'
 import { VideoCategoryInputParameters } from '../../Types'
-import { metadataToBytes, videoCategoryMetadataFromInput } from '../../helpers/serialization'
+import { asValidatedMetadata, metadataToBytes } from '../../helpers/serialization'
 import { flags } from '@oclif/command'
 import { CreateInterface } from '@joystream/types'
 import { VideoCategoryCreationParameters } from '@joystream/types/content'
-import { VideoCategoryInputSchema } from '../../json-schemas/ContentDirectory'
+import { VideoCategoryInputSchema } from '../../schemas/ContentDirectory'
 import chalk from 'chalk'
+import { VideoCategoryMetadata } from '@joystream/metadata-protobuf'
 
 export default class CreateVideoCategoryCommand extends ContentDirectoryCommandBase {
   static description = 'Create video category inside content directory.'
@@ -28,11 +29,10 @@ export default class CreateVideoCategoryCommand extends ContentDirectoryCommandB
     const actor = context ? await this.getActor(context) : await this.getCategoryManagementActor()
 
     const videoCategoryInput = await getInputJson<VideoCategoryInputParameters>(input, VideoCategoryInputSchema)
-
-    const meta = videoCategoryMetadataFromInput(videoCategoryInput)
+    const meta = asValidatedMetadata(VideoCategoryMetadata, videoCategoryInput)
 
     const videoCategoryCreationParameters: CreateInterface<VideoCategoryCreationParameters> = {
-      meta: metadataToBytes(meta),
+      meta: metadataToBytes(VideoCategoryMetadata, meta),
     }
 
     this.jsonPrettyPrint(JSON.stringify(videoCategoryInput))

+ 1 - 1
cli/src/commands/content/curatorGroup.ts

@@ -17,7 +17,7 @@ export default class CuratorGroupCommand extends ContentDirectoryCommandBase {
     const { id } = this.parse(CuratorGroupCommand).args
     const group = await this.getCuratorGroup(id)
     const members = (await this.getApi().groupMembers(WorkingGroups.Curators)).filter((curator) =>
-      group.curators.toArray().some((groupCurator) => groupCurator.eq(curator.workerId))
+      Array.from(group.curators).some((groupCurator) => groupCurator.eq(curator.workerId))
     )
 
     displayCollapsedRow({

+ 1 - 1
cli/src/commands/content/curatorGroups.ts

@@ -13,7 +13,7 @@ export default class CuratorGroupsCommand extends ContentDirectoryCommandBase {
         groups.map(([id, group]) => ({
           'ID': id.toString(),
           'Status': group.active.valueOf() ? 'Active' : 'Inactive',
-          'Members': group.curators.toArray().length,
+          'Members': Array.from(group.curators).length,
         })),
         5
       )

+ 103 - 0
cli/src/commands/content/deleteChannel.ts

@@ -0,0 +1,103 @@
+import ContentDirectoryCommandBase from '../../base/ContentDirectoryCommandBase'
+import { flags } from '@oclif/command'
+import chalk from 'chalk'
+import { createTypeFromConstructor } from '@joystream/types'
+import { BagId } from '@joystream/types/storage'
+import ExitCodes from '../../ExitCodes'
+import { formatBalance } from '@polkadot/util'
+import BN from 'bn.js'
+
+export default class DeleteChannelCommand extends ContentDirectoryCommandBase {
+  static description = 'Delete the channel and optionally all associated data objects.'
+
+  static flags = {
+    channelId: flags.integer({
+      char: 'c',
+      required: true,
+      description: 'ID of the Channel',
+    }),
+    force: flags.boolean({
+      char: 'f',
+      default: false,
+      description: 'Force-remove all associated channel data objects',
+    }),
+  }
+
+  async getDataObjectsInfoFromQueryNode(channelId: number): Promise<[string, BN][]> {
+    const dataObjects = await this.getQNApi().dataObjectsByBagId(`dynamic:channel:${channelId}`)
+
+    if (dataObjects.length) {
+      this.log('Following data objects are still associated with the channel:')
+      dataObjects.forEach((o) => {
+        let parentStr = ''
+        if ('video' in o.type && o.type.video) {
+          parentStr = ` (video: ${o.type.video.id})`
+        }
+        this.log(`- ${o.id} - ${o.type.__typename}${parentStr}`)
+      })
+    }
+
+    return dataObjects.map((o) => [o.id, new BN(o.deletionPrize)])
+  }
+
+  async getDataObjectsInfoFromChain(channelId: number): Promise<[string, BN][]> {
+    const dataObjects = await this.getApi().dataObjectsInBag(
+      createTypeFromConstructor(BagId, { Dynamic: { Channel: channelId } })
+    )
+
+    if (dataObjects.length) {
+      const dataObjectIds = dataObjects.map(([id]) => id.toString())
+      this.log(`Following data objects are still associated with the channel: ${dataObjectIds.join(', ')}`)
+    }
+
+    return dataObjects.map(([id, o]) => [id.toString(), o.deletion_prize])
+  }
+
+  async run(): Promise<void> {
+    const {
+      flags: { channelId, force },
+    } = this.parse(DeleteChannelCommand)
+    // Context
+    const account = await this.getRequiredSelectedAccount()
+    const channel = await this.getApi().channelById(channelId)
+    const actor = await this.getChannelOwnerActor(channel)
+    await this.requestAccountDecoding(account)
+
+    if (channel.num_videos.toNumber()) {
+      this.error(
+        `This channel still has ${channel.num_videos.toNumber()} associated video(s)!\n` +
+          `Delete the videos first using ${chalk.magentaBright('content:deleteVideo')} command`
+      )
+    }
+
+    const dataObjectsInfo = this.isQueryNodeUriSet()
+      ? await this.getDataObjectsInfoFromQueryNode(channelId)
+      : await this.getDataObjectsInfoFromChain(channelId)
+
+    if (dataObjectsInfo.length) {
+      if (!force) {
+        this.error(`Cannot remove associated data objects unless ${chalk.magentaBright('--force')} flag is used`, {
+          exit: ExitCodes.InvalidInput,
+        })
+      }
+      const deletionPrize = dataObjectsInfo.reduce((sum, [, prize]) => sum.add(prize), new BN(0))
+      this.log(
+        `Data objects deletion prize of ${chalk.cyanBright(
+          formatBalance(deletionPrize)
+        )} will be transferred to ${chalk.magentaBright(channel.deletion_prize_source_account_id.toString())}`
+      )
+    }
+
+    await this.requireConfirmation(
+      `Are you sure you want to remove channel ${chalk.magentaBright(channelId.toString())}${
+        force ? ' and all associated data objects' : ''
+      }?`
+    )
+
+    await this.sendAndFollowNamedTx(account, 'content', 'deleteChannel', [
+      actor,
+      channelId,
+      force ? dataObjectsInfo.length : 0,
+    ])
+  }
+}

+ 81 - 0
cli/src/commands/content/deleteVideo.ts

@@ -0,0 +1,81 @@
+import ContentDirectoryCommandBase from '../../base/ContentDirectoryCommandBase'
+import { flags } from '@oclif/command'
+import BN from 'bn.js'
+import chalk from 'chalk'
+import { formatBalance } from '@polkadot/util'
+import { createType } from '@joystream/types'
+import ExitCodes from '../../ExitCodes'
+
+export default class DeleteVideoCommand extends ContentDirectoryCommandBase {
+  static description = 'Delete the video and optionally all associated data objects.'
+
+  protected requiresQueryNode = true
+
+  static flags = {
+    videoId: flags.integer({
+      char: 'v',
+      required: true,
+      description: 'ID of the Video',
+    }),
+    force: flags.boolean({
+      char: 'f',
+      default: false,
+      description: 'Force-remove all associated video data objects',
+    }),
+  }
+
+  async getDataObjectsInfo(videoId: number): Promise<[string, BN][]> {
+    const dataObjects = await this.getQNApi().dataObjectsByVideoId(videoId.toString())
+
+    if (dataObjects.length) {
+      this.log('Following data objects are still associated with the video:')
+      dataObjects.forEach((o) => {
+        this.log(`${o.id} - ${o.type.__typename}`)
+      })
+    }
+
+    return dataObjects.map((o) => [o.id, new BN(o.deletionPrize)])
+  }
+
+  async run(): Promise<void> {
+    const {
+      flags: { videoId, force },
+    } = this.parse(DeleteVideoCommand)
+    // Context
+    const account = await this.getRequiredSelectedAccount()
+    const video = await this.getApi().videoById(videoId)
+    const channel = await this.getApi().channelById(video.in_channel.toNumber())
+    const actor = await this.getChannelOwnerActor(channel)
+    await this.requestAccountDecoding(account)
+
+    const dataObjectsInfo = await this.getDataObjectsInfo(videoId)
+    if (dataObjectsInfo.length) {
+      if (!force) {
+        this.error(`Cannot remove associated data objects unless ${chalk.magentaBright('--force')} flag is used`, {
+          exit: ExitCodes.InvalidInput,
+        })
+      }
+      const deletionPrize = dataObjectsInfo.reduce((sum, [, prize]) => sum.add(prize), new BN(0))
+      this.log(
+        `Data objects deletion prize of ${chalk.cyanBright(
+          formatBalance(deletionPrize)
+        )} will be transferred to ${chalk.magentaBright(channel.deletion_prize_source_account_id.toString())}`
+      )
+    }
+
+    await this.requireConfirmation(
+      `Are you sure you want to remove video ${chalk.magentaBright(videoId)}${
+        force ? ' and all associated data objects' : ''
+      }?`
+    )
+
+    await this.sendAndFollowNamedTx(account, 'content', 'deleteVideo', [
+      actor,
+      videoId,
+      createType(
+        'BTreeSet<DataObjectId>',
+        dataObjectsInfo.map(([id]) => id)
+      ),
+    ])
+  }
+}

+ 41 - 0
cli/src/commands/content/removeChannelAssets.ts

@@ -0,0 +1,41 @@
+import ContentDirectoryCommandBase from '../../base/ContentDirectoryCommandBase'
+import { flags } from '@oclif/command'
+import { createType } from '@joystream/types'
+
+export default class RemoveChannelAssetsCommand extends ContentDirectoryCommandBase {
+  static description = 'Remove data objects associated with the channel or any of its videos.'
+
+  static flags = {
+    channelId: flags.integer({
+      char: 'c',
+      required: true,
+      description: 'ID of the Channel',
+    }),
+    objectId: flags.integer({
+      char: 'o',
+      required: true,
+      multiple: true,
+      description: 'ID of an object to remove',
+    }),
+  }
+
+  async run(): Promise<void> {
+    const {
+      flags: { channelId, objectId: objectIds },
+    } = this.parse(RemoveChannelAssetsCommand)
+    // Context
+    const account = await this.getRequiredSelectedAccount()
+    const channel = await this.getApi().channelById(channelId)
+    const actor = await this.getChannelOwnerActor(channel)
+    await this.requestAccountDecoding(account)
+
+    this.jsonPrettyPrint(JSON.stringify({ channelId, assetsToRemove: objectIds }))
+    await this.requireConfirmation('Do you confirm the provided input?', true)
+
+    await this.sendAndFollowNamedTx(account, 'content', 'updateChannel', [
+      actor,
+      channelId,
+      { assets_to_remove: createType('BTreeSet<DataObjectId>', objectIds) },
+    ])
+  }
+}

+ 1 - 1
cli/src/commands/content/removeCuratorFromGroup.ts

@@ -27,7 +27,7 @@ export default class RemoveCuratorFromGroupCommand extends ContentDirectoryComma
     }
 
     const group = await this.getCuratorGroup(groupId)
-    const groupCuratorIds = group.curators.toArray().map((id) => id.toNumber())
+    const groupCuratorIds = Array.from(group.curators).map((id) => id.toNumber())
 
     if (curatorId === undefined) {
       curatorId = await this.promptForCurator('Choose a Curator to remove', groupCuratorIds)

+ 8 - 6
cli/src/commands/content/reuploadAssets.ts

@@ -1,9 +1,9 @@
 import UploadCommandBase from '../../base/UploadCommandBase'
 import { getInputJson } from '../../helpers/InputOutput'
-import AssetsSchema from '../../json-schemas/Assets.schema.json'
-import { Assets as AssetsInput } from '../../json-schemas/typings/Assets.schema'
+import AssetsSchema from '../../schemas/json/Assets.schema.json'
+import { Assets as AssetsInput } from '../../schemas/typings/Assets.schema'
 import { flags } from '@oclif/command'
-import { ContentId } from '@joystream/types/storage'
+import BN from 'bn.js'
 
 export default class ReuploadVideoAssetsCommand extends UploadCommandBase {
   static description = 'Allows reuploading assets that were not successfully uploaded during channel/video creation'
@@ -21,16 +21,18 @@ export default class ReuploadVideoAssetsCommand extends UploadCommandBase {
 
     // Get context
     const account = await this.getRequiredSelectedAccount()
+    const memberId = await this.getRequiredMemberId()
     await this.requestAccountDecoding(account)
 
     // Get input from file
     const inputData = await getInputJson<AssetsInput>(input, AssetsSchema)
-    const inputAssets = inputData.map(({ contentId, path }) => ({
-      contentId: ContentId.decode(this.getTypesRegistry(), contentId),
+    const { bagId } = inputData
+    const inputAssets = inputData.assets.map(({ objectId, path }) => ({
+      dataObjectId: new BN(objectId),
       path,
     }))
 
     // Upload assets
-    await this.uploadAssets(inputAssets, input, '')
+    await this.uploadAssets(account, memberId, bagId, inputAssets, input, '')
   }
 }

+ 65 - 22
cli/src/commands/content/updateChannel.ts

@@ -1,12 +1,16 @@
 import { getInputJson } from '../../helpers/InputOutput'
-import { channelMetadataFromInput, metadataToBytes } from '../../helpers/serialization'
+import { asValidatedMetadata, metadataToBytes } from '../../helpers/serialization'
 import { ChannelInputParameters } from '../../Types'
 import { flags } from '@oclif/command'
 import UploadCommandBase from '../../base/UploadCommandBase'
-import { CreateInterface } from '@joystream/types'
+import { CreateInterface, createType } from '@joystream/types'
 import { ChannelUpdateParameters } from '@joystream/types/content'
-import { ChannelInputSchema } from '../../json-schemas/ContentDirectory'
-
+import { ChannelInputSchema } from '../../schemas/ContentDirectory'
+import { ChannelMetadata } from '@joystream/metadata-protobuf'
+import { DataObjectInfoFragment } from '../../graphql/generated/queries'
+import BN from 'bn.js'
+import { formatBalance } from '@polkadot/util'
+import chalk from 'chalk'
 export default class UpdateChannelCommand extends UploadCommandBase {
   static description = 'Update existing content directory channel.'
   static flags = {
@@ -38,6 +42,33 @@ export default class UpdateChannelCommand extends UploadCommandBase {
     }
   }
 
+  async getAssetsToRemove(
+    channelId: number,
+    coverPhotoIndex: number | undefined,
+    avatarPhotoIndex: number | undefined
+  ): Promise<string[]> {
+    let assetsToRemove: DataObjectInfoFragment[] = []
+    if (coverPhotoIndex !== undefined || avatarPhotoIndex !== undefined) {
+      const currentAssets = await this.getQNApi().dataObjectsByChannelId(channelId.toString())
+      const currentCovers = currentAssets.filter((a) => a.type.__typename === 'DataObjectTypeChannelCoverPhoto')
+      const currentAvatars = currentAssets.filter((a) => a.type.__typename === 'DataObjectTypeChannelAvatar')
+      if (currentCovers.length && coverPhotoIndex !== undefined) {
+        assetsToRemove = assetsToRemove.concat(currentCovers)
+      }
+      if (currentAvatars.length && avatarPhotoIndex !== undefined) {
+        assetsToRemove = assetsToRemove.concat(currentAvatars)
+      }
+      if (assetsToRemove.length) {
+        this.log(`\nData objects to be removed due to replacement:`)
+        assetsToRemove.forEach((a) => this.log(`- ${a.id} (${a.type.__typename})`))
+        const totalPrize = assetsToRemove.reduce((sum, { deletionPrize }) => sum.add(new BN(deletionPrize)), new BN(0))
+        this.log(`Total deletion prize: ${chalk.cyanBright(formatBalance(totalPrize))}\n`)
+      }
+    }
+
+    return assetsToRemove.map((a) => a.id)
+  }
+
   async run() {
     const {
       flags: { input },
@@ -45,43 +76,55 @@ export default class UpdateChannelCommand extends UploadCommandBase {
     } = this.parse(UpdateChannelCommand)
 
     // Context
-    const currentAccount = await this.getRequiredSelectedAccount()
+    const account = await this.getRequiredSelectedAccount()
     const channel = await this.getApi().channelById(channelId)
     const actor = await this.getChannelOwnerActor(channel)
-    await this.requestAccountDecoding(currentAccount)
+    const memberId = await this.getRequiredMemberId(true)
+    await this.requestAccountDecoding(account)
 
     const channelInput = await getInputJson<ChannelInputParameters>(input, ChannelInputSchema)
-
-    const meta = channelMetadataFromInput(channelInput)
+    const meta = asValidatedMetadata(ChannelMetadata, channelInput)
 
     const { coverPhotoPath, avatarPhotoPath, rewardAccount } = channelInput
     const inputPaths = [coverPhotoPath, avatarPhotoPath].filter((p) => p !== undefined) as string[]
-    const inputAssets = await this.prepareInputAssets(inputPaths, input)
-    const assets = inputAssets.map(({ parameters }) => ({ Upload: parameters }))
+    const resolvedAssets = await this.resolveAndValidateAssets(inputPaths, input)
     // Set assets indexes in the metadata
-    if (coverPhotoPath) {
-      meta.setCoverPhoto(0)
-    }
-    if (avatarPhotoPath) {
-      meta.setAvatarPhoto(coverPhotoPath ? 1 : 0)
-    }
+    const [coverPhotoIndex, avatarPhotoIndex] = this.assetsIndexes([coverPhotoPath, avatarPhotoPath], inputPaths)
+    // "undefined" values will be omitted when the metadata is encoded. It's not possible to "unset" an asset this way.
+    meta.coverPhoto = coverPhotoIndex
+    meta.avatarPhoto = avatarPhotoIndex
 
+    // Preare and send the extrinsic
+    const assetsToUpload = await this.prepareAssetsForExtrinsic(resolvedAssets)
+    const assetsToRemove = await this.getAssetsToRemove(channelId, coverPhotoIndex, avatarPhotoIndex)
     const channelUpdateParameters: CreateInterface<ChannelUpdateParameters> = {
-      assets,
-      new_meta: metadataToBytes(meta),
+      assets_to_upload: assetsToUpload,
+      assets_to_remove: createType('BTreeSet<DataObjectId>', assetsToRemove),
+      new_meta: metadataToBytes(ChannelMetadata, meta),
       reward_account: this.parseRewardAccountInput(rewardAccount),
     }
 
-    this.jsonPrettyPrint(JSON.stringify({ assets, metadata: meta.toObject(), rewardAccount }))
+    this.jsonPrettyPrint(
+      JSON.stringify({ assetsToUpload: assetsToUpload?.toJSON(), assetsToRemove, metadata: meta, rewardAccount })
+    )
 
     await this.requireConfirmation('Do you confirm the provided input?', true)
 
-    await this.sendAndFollowNamedTx(currentAccount, 'content', 'updateChannel', [
+    const result = await this.sendAndFollowNamedTx(account, 'content', 'updateChannel', [
       actor,
       channelId,
       channelUpdateParameters,
     ])
-
-    await this.uploadAssets(inputAssets, input)
+    const dataObjectsUploadedEvent = this.findEvent(result, 'storage', 'DataObjectsUploaded')
+    if (dataObjectsUploadedEvent) {
+      const [objectIds] = dataObjectsUploadedEvent.data
+      await this.uploadAssets(
+        account,
+        memberId,
+        `dynamic:channel:${channelId.toString()}`,
+        objectIds.map((id, index) => ({ dataObjectId: id, path: resolvedAssets[index].path })),
+        input
+      )
+    }
   }
 }

+ 5 - 5
cli/src/commands/content/updateChannelCategory.ts

@@ -1,11 +1,12 @@
 import ContentDirectoryCommandBase from '../../base/ContentDirectoryCommandBase'
 import { getInputJson } from '../../helpers/InputOutput'
 import { ChannelCategoryInputParameters } from '../../Types'
-import { channelCategoryMetadataFromInput, metadataToBytes } from '../../helpers/serialization'
+import { asValidatedMetadata, metadataToBytes } from '../../helpers/serialization'
 import { CreateInterface } from '@joystream/types'
 import { ChannelCategoryUpdateParameters } from '@joystream/types/content'
 import { flags } from '@oclif/command'
-import { ChannelCategoryInputSchema } from '../../json-schemas/ContentDirectory'
+import { ChannelCategoryInputSchema } from '../../schemas/ContentDirectory'
+import { ChannelCategoryMetadata } from '@joystream/metadata-protobuf'
 export default class UpdateChannelCategoryCommand extends ContentDirectoryCommandBase {
   static description = 'Update channel category inside content directory.'
   static flags = {
@@ -36,11 +37,10 @@ export default class UpdateChannelCategoryCommand extends ContentDirectoryComman
     const actor = context ? await this.getActor(context) : await this.getCategoryManagementActor()
 
     const channelCategoryInput = await getInputJson<ChannelCategoryInputParameters>(input, ChannelCategoryInputSchema)
-
-    const meta = channelCategoryMetadataFromInput(channelCategoryInput)
+    const meta = asValidatedMetadata(ChannelCategoryMetadata, channelCategoryInput)
 
     const channelCategoryUpdateParameters: CreateInterface<ChannelCategoryUpdateParameters> = {
-      new_meta: metadataToBytes(meta),
+      new_meta: metadataToBytes(ChannelCategoryMetadata, meta),
     }
 
     this.jsonPrettyPrint(JSON.stringify(channelCategoryInput))

+ 3 - 1
cli/src/commands/content/updateChannelCensorshipStatus.ts

@@ -58,7 +58,9 @@ export default class UpdateChannelCensorshipStatusCommand extends ContentDirecto
     }
 
     if (rationale === undefined) {
-      rationale = await this.simplePrompt({ message: 'Please provide the rationale for updating the status' })
+      rationale = (await this.simplePrompt({
+        message: 'Please provide the rationale for updating the status',
+      })) as string
     }
 
     await this.sendAndFollowNamedTx(currentAccount, 'content', 'updateChannelCensorshipStatus', [

+ 69 - 20
cli/src/commands/content/updateVideo.ts

@@ -1,11 +1,16 @@
 import { getInputJson } from '../../helpers/InputOutput'
 import { VideoInputParameters } from '../../Types'
-import { metadataToBytes, videoMetadataFromInput } from '../../helpers/serialization'
+import { asValidatedMetadata, metadataToBytes } from '../../helpers/serialization'
 import UploadCommandBase from '../../base/UploadCommandBase'
 import { flags } from '@oclif/command'
-import { CreateInterface } from '@joystream/types'
+import { CreateInterface, createType } from '@joystream/types'
 import { VideoUpdateParameters } from '@joystream/types/content'
-import { VideoInputSchema } from '../../json-schemas/ContentDirectory'
+import { VideoInputSchema } from '../../schemas/ContentDirectory'
+import { VideoMetadata } from '@joystream/metadata-protobuf'
+import { DataObjectInfoFragment } from '../../graphql/generated/queries'
+import BN from 'bn.js'
+import { formatBalance } from '@polkadot/util'
+import chalk from 'chalk'
 
 export default class UpdateVideoCommand extends UploadCommandBase {
   static description = 'Update video under specific id.'
@@ -25,6 +30,33 @@ export default class UpdateVideoCommand extends UploadCommandBase {
     },
   ]
 
+  async getAssetsToRemove(
+    videoId: number,
+    videoIndex: number | undefined,
+    thumbnailIndex: number | undefined
+  ): Promise<string[]> {
+    let assetsToRemove: DataObjectInfoFragment[] = []
+    if (videoIndex !== undefined || thumbnailIndex !== undefined) {
+      const currentAssets = await this.getQNApi().dataObjectsByVideoId(videoId.toString())
+      const currentThumbs = currentAssets.filter((a) => a.type.__typename === 'DataObjectTypeVideoThumbnail')
+      const currentMedias = currentAssets.filter((a) => a.type.__typename === 'DataObjectTypeVideoMedia')
+      if (currentThumbs.length && thumbnailIndex !== undefined) {
+        assetsToRemove = assetsToRemove.concat(currentThumbs)
+      }
+      if (currentMedias.length && videoIndex !== undefined) {
+        assetsToRemove = assetsToRemove.concat(currentMedias)
+      }
+      if (assetsToRemove.length) {
+        this.log(`\nData objects to be removed due to replacement:`)
+        assetsToRemove.forEach((a) => this.log(`- ${a.id} (${a.type.__typename})`))
+        const totalPrize = assetsToRemove.reduce((sum, { deletionPrize }) => sum.add(new BN(deletionPrize)), new BN(0))
+        this.log(`Total deletion prize: ${chalk.cyanBright(formatBalance(totalPrize))}\n`)
+      }
+    }
+
+    return assetsToRemove.map((a) => a.id)
+  }
+
   async run() {
     const {
       flags: { input },
@@ -32,38 +64,55 @@ export default class UpdateVideoCommand extends UploadCommandBase {
     } = this.parse(UpdateVideoCommand)
 
     // Context
-    const currentAccount = await this.getRequiredSelectedAccount()
+    const account = await this.getRequiredSelectedAccount()
     const video = await this.getApi().videoById(videoId)
     const channel = await this.getApi().channelById(video.in_channel.toNumber())
     const actor = await this.getChannelOwnerActor(channel)
-    await this.requestAccountDecoding(currentAccount)
+    const memberId = await this.getRequiredMemberId(true)
+    await this.requestAccountDecoding(account)
 
     const videoInput = await getInputJson<VideoInputParameters>(input, VideoInputSchema)
+    const meta = asValidatedMetadata(VideoMetadata, videoInput)
 
-    const meta = videoMetadataFromInput(videoInput)
     const { videoPath, thumbnailPhotoPath } = videoInput
     const inputPaths = [videoPath, thumbnailPhotoPath].filter((p) => p !== undefined) as string[]
-    const inputAssets = await this.prepareInputAssets(inputPaths, input)
-    const assets = inputAssets.map(({ parameters }) => ({ Upload: parameters }))
+    const resolvedAssets = await this.resolveAndValidateAssets(inputPaths, input)
     // Set assets indexes in the metadata
-    if (videoPath) {
-      meta.setVideo(0)
-    }
-    if (thumbnailPhotoPath) {
-      meta.setThumbnailPhoto(videoPath ? 1 : 0)
-    }
+    const [videoIndex, thumbnailPhotoIndex] = this.assetsIndexes([videoPath, thumbnailPhotoPath], inputPaths)
+    // "undefined" values will be omitted when the metadata is encoded. It's not possible to "unset" an asset this way.
+    meta.video = videoIndex
+    meta.thumbnailPhoto = thumbnailPhotoIndex
 
+    // Preare and send the extrinsic
+    const assetsToUpload = await this.prepareAssetsForExtrinsic(resolvedAssets)
+    const assetsToRemove = await this.getAssetsToRemove(videoId, videoIndex, thumbnailPhotoIndex)
     const videoUpdateParameters: CreateInterface<VideoUpdateParameters> = {
-      assets,
-      new_meta: metadataToBytes(meta),
+      assets_to_upload: assetsToUpload,
+      new_meta: metadataToBytes(VideoMetadata, meta),
+      assets_to_remove: createType('BTreeSet<DataObjectId>', assetsToRemove),
     }
 
-    this.jsonPrettyPrint(JSON.stringify({ assets, newMetadata: meta.toObject() }))
+    this.jsonPrettyPrint(
+      JSON.stringify({ assetsToUpload: assetsToUpload?.toJSON(), newMetadata: meta, assetsToRemove })
+    )
 
     await this.requireConfirmation('Do you confirm the provided input?', true)
 
-    await this.sendAndFollowNamedTx(currentAccount, 'content', 'updateVideo', [actor, videoId, videoUpdateParameters])
-
-    await this.uploadAssets(inputAssets, input)
+    const result = await this.sendAndFollowNamedTx(account, 'content', 'updateVideo', [
+      actor,
+      videoId,
+      videoUpdateParameters,
+    ])
+    const dataObjectsUploadedEvent = this.findEvent(result, 'storage', 'DataObjectsUploaded')
+    if (dataObjectsUploadedEvent) {
+      const [objectIds] = dataObjectsUploadedEvent.data
+      await this.uploadAssets(
+        account,
+        memberId,
+        `dynamic:channel:${video.in_channel.toString()}`,
+        objectIds.map((id, index) => ({ dataObjectId: id, path: resolvedAssets[index].path })),
+        input
+      )
+    }
   }
 }

+ 5 - 5
cli/src/commands/content/updateVideoCategory.ts

@@ -1,11 +1,12 @@
 import ContentDirectoryCommandBase from '../../base/ContentDirectoryCommandBase'
 import { getInputJson } from '../../helpers/InputOutput'
 import { VideoCategoryInputParameters } from '../../Types'
-import { metadataToBytes, videoCategoryMetadataFromInput } from '../../helpers/serialization'
+import { asValidatedMetadata, metadataToBytes } from '../../helpers/serialization'
 import { flags } from '@oclif/command'
 import { CreateInterface } from '@joystream/types'
 import { VideoCategoryUpdateParameters } from '@joystream/types/content'
-import { VideoCategoryInputSchema } from '../../json-schemas/ContentDirectory'
+import { VideoCategoryInputSchema } from '../../schemas/ContentDirectory'
+import { VideoCategoryMetadata } from '@joystream/metadata-protobuf'
 
 export default class UpdateVideoCategoryCommand extends ContentDirectoryCommandBase {
   static description = 'Update video category inside content directory.'
@@ -37,11 +38,10 @@ export default class UpdateVideoCategoryCommand extends ContentDirectoryCommandB
     const actor = context ? await this.getActor(context) : await this.getCategoryManagementActor()
 
     const videoCategoryInput = await getInputJson<VideoCategoryInputParameters>(input, VideoCategoryInputSchema)
-
-    const meta = videoCategoryMetadataFromInput(videoCategoryInput)
+    const meta = asValidatedMetadata(VideoCategoryMetadata, videoCategoryInput)
 
     const videoCategoryUpdateParameters: CreateInterface<VideoCategoryUpdateParameters> = {
-      new_meta: metadataToBytes(meta),
+      new_meta: metadataToBytes(VideoCategoryMetadata, meta),
     }
 
     this.jsonPrettyPrint(JSON.stringify(videoCategoryInput))

+ 3 - 1
cli/src/commands/content/updateVideoCensorshipStatus.ts

@@ -59,7 +59,9 @@ export default class UpdateVideoCensorshipStatusCommand extends ContentDirectory
     }
 
     if (rationale === undefined) {
-      rationale = await this.simplePrompt({ message: 'Please provide the rationale for updating the status' })
+      rationale = (await this.simplePrompt({
+        message: 'Please provide the rationale for updating the status',
+      })) as string
     }
 
     await this.sendAndFollowNamedTx(currentAccount, 'content', 'updateVideoCensorshipStatus', [

+ 1 - 1
cli/src/commands/content/video.ts

@@ -18,7 +18,7 @@ export default class VideoCommand extends ContentDirectoryCommandBase {
       displayCollapsedRow({
         'ID': videoId.toString(),
         'InChannel': aVideo.in_channel.toString(),
-        'InSeries': aVideo.in_series.toString(),
+        'InSeries': aVideo.in_series.unwrapOr('NONE').toString(),
         'IsCensored': aVideo.is_censored.toString(),
       })
     } else {

+ 3 - 5
cli/src/commands/content/videos.ts

@@ -16,11 +16,9 @@ export default class VideosCommand extends ContentDirectoryCommandBase {
   async run() {
     const { channelId } = this.parse(VideosCommand).args
 
-    let videos: [VideoId, Video][]
+    let videos: [VideoId, Video][] = await this.getApi().availableVideos()
     if (channelId) {
-      videos = await this.getApi().videosByChannelId(channelId)
-    } else {
-      videos = await this.getApi().availableVideos()
+      videos = videos.filter(([, v]) => v.in_channel.eqn(parseInt(channelId)))
     }
 
     if (videos.length > 0) {
@@ -28,7 +26,7 @@ export default class VideosCommand extends ContentDirectoryCommandBase {
         videos.map(([id, v]) => ({
           'ID': id.toString(),
           'InChannel': v.in_channel.toString(),
-          'InSeries': v.in_series.toString(),
+          'InSeries': v.in_series.unwrapOr('NONE').toString(),
           'IsCensored': v.is_censored.toString(),
         })),
         3

+ 1 - 1
cli/src/commands/council/info.ts

@@ -1,7 +1,7 @@
 import { ElectionStage } from '@joystream/types/council'
 import { formatNumber, formatBalance } from '@polkadot/util'
 import { BlockNumber } from '@polkadot/types/interfaces'
-import { CouncilInfoObj, NameValueObj } from '../../Types'
+import { CouncilInfo as CouncilInfoObj, NameValueObj } from '../../Types'
 import { displayHeader, displayNameValueTable } from '../../helpers/display'
 import ApiCommandBase from '../../base/ApiCommandBase'
 

+ 22 - 16
cli/src/commands/working-groups/createOpening.ts

@@ -6,14 +6,17 @@ import HRTSchema from '@joystream/types/hiring/schemas/role.schema.json'
 import { GenericJoyStreamRoleSchema as HRTJson } from '@joystream/types/hiring/schemas/role.schema.typings'
 import { JsonSchemaPrompter } from '../../helpers/JsonSchemaPrompt'
 import { JSONSchema } from '@apidevtools/json-schema-ref-parser'
-import WGOpeningSchema from '../../json-schemas/WorkingGroupOpening.schema.json'
-import { WorkingGroupOpening as WGOpeningJson } from '../../json-schemas/typings/WorkingGroupOpening.schema'
+import WGOpeningSchema from '../../schemas/json/WorkingGroupOpening.schema.json'
+import { WorkingGroupOpening as WGOpeningJson } from '../../schemas/typings/WorkingGroupOpening.schema'
 import _ from 'lodash'
 import { IOFlags, getInputJson, ensureOutputFileIsWriteable, saveOutputJsonToFile } from '../../helpers/InputOutput'
 import Ajv from 'ajv'
 import ExitCodes from '../../ExitCodes'
 import { flags } from '@oclif/command'
-import { createType } from '@joystream/types'
+import { CLIError } from '@oclif/errors'
+import { createTypeFromConstructor } from '@joystream/types'
+import { OpeningPolicyCommitment, OpeningType } from '@joystream/types/working-group'
+import { ActivateOpeningAt } from '@joystream/types/hiring'
 
 export default class WorkingGroupsCreateOpening extends WorkingGroupsCommandBase {
   static description = 'Create working group opening (requires lead access)'
@@ -76,10 +79,13 @@ export default class WorkingGroupsCreateOpening extends WorkingGroupsCommandBase
     }
   }
 
-  createTxParams(wgOpeningJson: WGOpeningJson, hrtJson: HRTJson) {
+  createTxParams(
+    wgOpeningJson: WGOpeningJson,
+    hrtJson: HRTJson
+  ): [ActivateOpeningAt, OpeningPolicyCommitment, string, OpeningType] {
     return [
-      wgOpeningJson.activateAt,
-      createType('OpeningPolicyCommitment', {
+      createTypeFromConstructor(ActivateOpeningAt, wgOpeningJson.activateAt),
+      createTypeFromConstructor(OpeningPolicyCommitment, {
         max_review_period_length: wgOpeningJson.maxReviewPeriodLength,
         application_rationing_policy: wgOpeningJson.maxActiveApplicants
           ? { max_active_applicants: wgOpeningJson.maxActiveApplicants }
@@ -100,7 +106,7 @@ export default class WorkingGroupsCreateOpening extends WorkingGroupsCommandBase
         exit_role_stake_unstaking_period: wgOpeningJson.leaveRoleUnstakingPeriod,
       }),
       JSON.stringify(hrtJson),
-      createType('OpeningType', 'Worker'),
+      createTypeFromConstructor(OpeningType, 'Worker'),
     ]
   }
 
@@ -210,17 +216,17 @@ export default class WorkingGroupsCreateOpening extends WorkingGroupsCommandBase
 
       // Send the tx
       this.log(chalk.magentaBright('Sending the extrinsic...'))
-      const txSuccess = await this.sendAndFollowTx(
-        account,
-        this.getOriginalApi().tx[apiModuleByGroup[this.group]].addOpening(...txParams),
-        true // warnOnly
-      )
-
-      // Display a success message on success or ask to try again on error
-      if (txSuccess) {
+      try {
+        await this.sendAndFollowTx(
+          account,
+          this.getOriginalApi().tx[apiModuleByGroup[this.group]].addOpening(...txParams)
+        )
         this.log(chalk.green('Opening successfully created!'))
         tryAgain = false
-      } else {
+      } catch (e) {
+        if (e instanceof CLIError) {
+          this.warn(e.message)
+        }
         tryAgain = await this.simplePrompt({ type: 'confirm', message: 'Try again with remembered input?' })
       }
     } while (tryAgain)

+ 2 - 1
cli/src/commands/working-groups/evictWorker.ts

@@ -3,6 +3,7 @@ import { apiModuleByGroup } from '../../Api'
 import { formatBalance } from '@polkadot/util'
 import chalk from 'chalk'
 import { createParamOptions } from '../../helpers/promptOptions'
+import { Bytes } from '@polkadot/types'
 
 export default class WorkingGroupsEvictWorker extends WorkingGroupsCommandBase {
   static description = 'Evicts given worker. Requires lead access.'
@@ -30,7 +31,7 @@ export default class WorkingGroupsEvictWorker extends WorkingGroupsCommandBase {
     const groupMember = await this.getWorkerForLeadAction(workerId)
 
     // TODO: Terminate worker text limits? (minMaxStr)
-    const rationale = await this.promptForParam('Bytes', createParamOptions('rationale'))
+    const rationale = (await this.promptForParam('Bytes', createParamOptions('rationale'))) as Bytes
     const shouldSlash = groupMember.stake
       ? await this.simplePrompt({
           message: `Should the worker stake (${formatBalance(groupMember.stake)}) be slashed?`,

+ 2 - 2
cli/src/commands/working-groups/fillOpening.ts

@@ -3,7 +3,7 @@ import { OpeningStatus } from '../../Types'
 import { apiModuleByGroup } from '../../Api'
 import chalk from 'chalk'
 import { createParamOptions } from '../../helpers/promptOptions'
-
+import { createType } from '@joystream/types'
 export default class WorkingGroupsFillOpening extends WorkingGroupsCommandBase {
   static description = "Allows filling working group opening that's currently in review. Requires lead access."
   static args = [
@@ -35,7 +35,7 @@ export default class WorkingGroupsFillOpening extends WorkingGroupsCommandBase {
 
     await this.sendAndFollowNamedTx(account, apiModuleByGroup[this.group], 'fillOpening', [
       openingId,
-      applicationIds,
+      createType('BTreeSet<ApplicationId>', applicationIds),
       rewardPolicyOpt,
     ])
 

+ 5 - 1
cli/src/commands/working-groups/leaveRole.ts

@@ -3,6 +3,7 @@ import { apiModuleByGroup } from '../../Api'
 import { minMaxStr } from '../../validators/common'
 import chalk from 'chalk'
 import { createParamOptions } from '../../helpers/promptOptions'
+import { Bytes } from '@polkadot/types'
 
 export default class WorkingGroupsLeaveRole extends WorkingGroupsCommandBase {
   static description = 'Leave the worker or lead role associated with currently selected account.'
@@ -17,7 +18,10 @@ export default class WorkingGroupsLeaveRole extends WorkingGroupsCommandBase {
 
     const constraint = await this.getApi().workerExitRationaleConstraint(this.group)
     const rationaleValidator = minMaxStr(constraint.min.toNumber(), constraint.max.toNumber())
-    const rationale = await this.promptForParam('Bytes', createParamOptions('rationale', undefined, rationaleValidator))
+    const rationale = (await this.promptForParam(
+      'Bytes',
+      createParamOptions('rationale', undefined, rationaleValidator)
+    )) as Bytes
 
     await this.requestAccountDecoding(account)
 

+ 3 - 2
cli/src/commands/working-groups/updateWorkerReward.ts

@@ -6,6 +6,7 @@ import { Reward } from '../../Types'
 import { positiveInt } from '../../validators/common'
 import { createParamOptions } from '../../helpers/promptOptions'
 import ExitCodes from '../../ExitCodes'
+import { BalanceOfMint } from '@joystream/types/mint'
 
 export default class WorkingGroupsUpdateWorkerReward extends WorkingGroupsCommandBase {
   static description = "Change given worker's reward (amount only). Requires lead access."
@@ -48,10 +49,10 @@ export default class WorkingGroupsUpdateWorkerReward extends WorkingGroupsComman
 
     console.log(chalk.magentaBright(`Current worker reward: ${this.formatReward(reward)}`))
 
-    const newRewardValue = await this.promptForParam(
+    const newRewardValue = (await this.promptForParam(
       'BalanceOfMint',
       createParamOptions('new_amount', undefined, positiveInt())
-    )
+    )) as BalanceOfMint
 
     await this.requestAccountDecoding(account)
 

+ 120 - 0
cli/src/graphql/generated/queries.ts

@@ -0,0 +1,120 @@
+import * as Types from './schema'
+
+import gql from 'graphql-tag'
+export type StorageNodeInfoFragment = {
+  id: string
+  operatorMetadata?: Types.Maybe<{ nodeEndpoint?: Types.Maybe<string> }>
+}
+
+export type GetStorageNodesInfoByBagIdQueryVariables = Types.Exact<{
+  bagId?: Types.Maybe<Types.Scalars['String']>
+}>
+
+export type GetStorageNodesInfoByBagIdQuery = { storageBuckets: Array<StorageNodeInfoFragment> }
+
+export type DataObjectInfoFragment = {
+  id: string
+  size: any
+  deletionPrize: any
+  type:
+    | { __typename: 'DataObjectTypeChannelAvatar'; channel?: Types.Maybe<{ id: string }> }
+    | { __typename: 'DataObjectTypeChannelCoverPhoto'; channel?: Types.Maybe<{ id: string }> }
+    | { __typename: 'DataObjectTypeVideoMedia'; video?: Types.Maybe<{ id: string }> }
+    | { __typename: 'DataObjectTypeVideoThumbnail'; video?: Types.Maybe<{ id: string }> }
+    | { __typename: 'DataObjectTypeUnknown' }
+}
+
+export type GetDataObjectsByBagIdQueryVariables = Types.Exact<{
+  bagId?: Types.Maybe<Types.Scalars['ID']>
+}>
+
+export type GetDataObjectsByBagIdQuery = { storageDataObjects: Array<DataObjectInfoFragment> }
+
+export type GetDataObjectsChannelIdQueryVariables = Types.Exact<{
+  channelId?: Types.Maybe<Types.Scalars['ID']>
+}>
+
+export type GetDataObjectsChannelIdQuery = { storageDataObjects: Array<DataObjectInfoFragment> }
+
+export type GetDataObjectsByVideoIdQueryVariables = Types.Exact<{
+  videoId?: Types.Maybe<Types.Scalars['ID']>
+}>
+
+export type GetDataObjectsByVideoIdQuery = { storageDataObjects: Array<DataObjectInfoFragment> }
+
+export const StorageNodeInfo = gql`
+  fragment StorageNodeInfo on StorageBucket {
+    id
+    operatorMetadata {
+      nodeEndpoint
+    }
+  }
+`
+export const DataObjectInfo = gql`
+  fragment DataObjectInfo on StorageDataObject {
+    id
+    size
+    deletionPrize
+    type {
+      __typename
+      ... on DataObjectTypeVideoMedia {
+        video {
+          id
+        }
+      }
+      ... on DataObjectTypeVideoThumbnail {
+        video {
+          id
+        }
+      }
+      ... on DataObjectTypeChannelAvatar {
+        channel {
+          id
+        }
+      }
+      ... on DataObjectTypeChannelCoverPhoto {
+        channel {
+          id
+        }
+      }
+    }
+  }
+`
+export const GetStorageNodesInfoByBagId = gql`
+  query getStorageNodesInfoByBagId($bagId: String) {
+    storageBuckets(
+      where: {
+        operatorStatus_json: { isTypeOf_eq: "StorageBucketOperatorStatusActive" }
+        bagAssignments_some: { storageBagId_eq: $bagId }
+        operatorMetadata: { nodeEndpoint_contains: "http" }
+      }
+    ) {
+      ...StorageNodeInfo
+    }
+  }
+  ${StorageNodeInfo}
+`
+export const GetDataObjectsByBagId = gql`
+  query getDataObjectsByBagId($bagId: ID) {
+    storageDataObjects(where: { storageBag: { id_eq: $bagId } }) {
+      ...DataObjectInfo
+    }
+  }
+  ${DataObjectInfo}
+`
+export const GetDataObjectsChannelId = gql`
+  query getDataObjectsChannelId($channelId: ID) {
+    storageDataObjects(where: { type_json: { channelId_eq: $channelId } }) {
+      ...DataObjectInfo
+    }
+  }
+  ${DataObjectInfo}
+`
+export const GetDataObjectsByVideoId = gql`
+  query getDataObjectsByVideoId($videoId: ID) {
+    storageDataObjects(where: { type_json: { videoId_eq: $videoId } }) {
+      ...DataObjectInfo
+    }
+  }
+  ${DataObjectInfo}
+`

+ 4515 - 0
cli/src/graphql/generated/schema.ts

@@ -0,0 +1,4515 @@
+export type Maybe<T> = T | null
+export type Exact<T extends { [key: string]: unknown }> = { [K in keyof T]: T[K] }
+export type MakeOptional<T, K extends keyof T> = Omit<T, K> & { [SubKey in K]?: Maybe<T[SubKey]> }
+export type MakeMaybe<T, K extends keyof T> = Omit<T, K> & { [SubKey in K]: Maybe<T[SubKey]> }
+/** All built-in and custom scalars, mapped to their actual values */
+export type Scalars = {
+  ID: string
+  String: string
+  Boolean: boolean
+  Int: number
+  Float: number
+  /** The javascript `Date` as string. Type represents date and time as the ISO Date string. */
+  DateTime: any
+  /** GraphQL representation of BigInt */
+  BigInt: any
+  /** The `JSONObject` scalar type represents JSON objects as specified by [ECMA-404](http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf). */
+  JSONObject: any
+}
+
+export type BaseGraphQlObject = {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+}
+
+export type BaseModel = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+}
+
+export type BaseModelUuid = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+}
+
+export type BaseWhereInput = {
+  id_eq?: Maybe<Scalars['String']>
+  id_in?: Maybe<Array<Scalars['String']>>
+  createdAt_eq?: Maybe<Scalars['String']>
+  createdAt_lt?: Maybe<Scalars['String']>
+  createdAt_lte?: Maybe<Scalars['String']>
+  createdAt_gt?: Maybe<Scalars['String']>
+  createdAt_gte?: Maybe<Scalars['String']>
+  createdById_eq?: Maybe<Scalars['String']>
+  updatedAt_eq?: Maybe<Scalars['String']>
+  updatedAt_lt?: Maybe<Scalars['String']>
+  updatedAt_lte?: Maybe<Scalars['String']>
+  updatedAt_gt?: Maybe<Scalars['String']>
+  updatedAt_gte?: Maybe<Scalars['String']>
+  updatedById_eq?: Maybe<Scalars['String']>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['String']>
+  deletedAt_lt?: Maybe<Scalars['String']>
+  deletedAt_lte?: Maybe<Scalars['String']>
+  deletedAt_gt?: Maybe<Scalars['String']>
+  deletedAt_gte?: Maybe<Scalars['String']>
+  deletedById_eq?: Maybe<Scalars['String']>
+}
+
+export type Channel = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  ownerMember?: Maybe<Membership>
+  ownerMemberId?: Maybe<Scalars['String']>
+  ownerCuratorGroup?: Maybe<CuratorGroup>
+  ownerCuratorGroupId?: Maybe<Scalars['String']>
+  category?: Maybe<ChannelCategory>
+  categoryId?: Maybe<Scalars['String']>
+  /** Reward account where revenue is sent if set. */
+  rewardAccount?: Maybe<Scalars['String']>
+  /** Destination account for the prize associated with channel deletion */
+  deletionPrizeDestAccount: Scalars['String']
+  /** The title of the Channel */
+  title?: Maybe<Scalars['String']>
+  /** The description of a Channel */
+  description?: Maybe<Scalars['String']>
+  coverPhoto?: Maybe<StorageDataObject>
+  coverPhotoId?: Maybe<Scalars['String']>
+  avatarPhoto?: Maybe<StorageDataObject>
+  avatarPhotoId?: Maybe<Scalars['String']>
+  /** Flag signaling whether a channel is public. */
+  isPublic?: Maybe<Scalars['Boolean']>
+  /** Flag signaling whether a channel is censored. */
+  isCensored: Scalars['Boolean']
+  language?: Maybe<Language>
+  languageId?: Maybe<Scalars['String']>
+  videos: Array<Video>
+  createdInBlock: Scalars['Int']
+}
+
+export type ChannelCategoriesByNameFtsOutput = {
+  item: ChannelCategoriesByNameSearchResult
+  rank: Scalars['Float']
+  isTypeOf: Scalars['String']
+  highlight: Scalars['String']
+}
+
+export type ChannelCategoriesByNameSearchResult = ChannelCategory
+
+/** Category of media channel */
+export type ChannelCategory = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** The name of the category */
+  name?: Maybe<Scalars['String']>
+  channels: Array<Channel>
+  createdInBlock: Scalars['Int']
+}
+
+export type ChannelCategoryConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<ChannelCategoryEdge>
+  pageInfo: PageInfo
+}
+
+export type ChannelCategoryCreateInput = {
+  name?: Maybe<Scalars['String']>
+  createdInBlock: Scalars['Float']
+}
+
+export type ChannelCategoryEdge = {
+  node: ChannelCategory
+  cursor: Scalars['String']
+}
+
+export enum ChannelCategoryOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  NameAsc = 'name_ASC',
+  NameDesc = 'name_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+}
+
+export type ChannelCategoryUpdateInput = {
+  name?: Maybe<Scalars['String']>
+  createdInBlock?: Maybe<Scalars['Float']>
+}
+
+export type ChannelCategoryWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  name_eq?: Maybe<Scalars['String']>
+  name_contains?: Maybe<Scalars['String']>
+  name_startsWith?: Maybe<Scalars['String']>
+  name_endsWith?: Maybe<Scalars['String']>
+  name_in?: Maybe<Array<Scalars['String']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  channels_none?: Maybe<ChannelWhereInput>
+  channels_some?: Maybe<ChannelWhereInput>
+  channels_every?: Maybe<ChannelWhereInput>
+  AND?: Maybe<Array<ChannelCategoryWhereInput>>
+  OR?: Maybe<Array<ChannelCategoryWhereInput>>
+}
+
+export type ChannelCategoryWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type ChannelConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<ChannelEdge>
+  pageInfo: PageInfo
+}
+
+export type ChannelCreateInput = {
+  ownerMember?: Maybe<Scalars['ID']>
+  ownerCuratorGroup?: Maybe<Scalars['ID']>
+  category?: Maybe<Scalars['ID']>
+  rewardAccount?: Maybe<Scalars['String']>
+  deletionPrizeDestAccount: Scalars['String']
+  title?: Maybe<Scalars['String']>
+  description?: Maybe<Scalars['String']>
+  coverPhoto?: Maybe<Scalars['ID']>
+  avatarPhoto?: Maybe<Scalars['ID']>
+  isPublic?: Maybe<Scalars['Boolean']>
+  isCensored: Scalars['Boolean']
+  language?: Maybe<Scalars['ID']>
+  createdInBlock: Scalars['Float']
+}
+
+export type ChannelEdge = {
+  node: Channel
+  cursor: Scalars['String']
+}
+
+export enum ChannelOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  OwnerMemberAsc = 'ownerMember_ASC',
+  OwnerMemberDesc = 'ownerMember_DESC',
+  OwnerCuratorGroupAsc = 'ownerCuratorGroup_ASC',
+  OwnerCuratorGroupDesc = 'ownerCuratorGroup_DESC',
+  CategoryAsc = 'category_ASC',
+  CategoryDesc = 'category_DESC',
+  RewardAccountAsc = 'rewardAccount_ASC',
+  RewardAccountDesc = 'rewardAccount_DESC',
+  DeletionPrizeDestAccountAsc = 'deletionPrizeDestAccount_ASC',
+  DeletionPrizeDestAccountDesc = 'deletionPrizeDestAccount_DESC',
+  TitleAsc = 'title_ASC',
+  TitleDesc = 'title_DESC',
+  DescriptionAsc = 'description_ASC',
+  DescriptionDesc = 'description_DESC',
+  CoverPhotoAsc = 'coverPhoto_ASC',
+  CoverPhotoDesc = 'coverPhoto_DESC',
+  AvatarPhotoAsc = 'avatarPhoto_ASC',
+  AvatarPhotoDesc = 'avatarPhoto_DESC',
+  IsPublicAsc = 'isPublic_ASC',
+  IsPublicDesc = 'isPublic_DESC',
+  IsCensoredAsc = 'isCensored_ASC',
+  IsCensoredDesc = 'isCensored_DESC',
+  LanguageAsc = 'language_ASC',
+  LanguageDesc = 'language_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+}
+
+export type ChannelUpdateInput = {
+  ownerMember?: Maybe<Scalars['ID']>
+  ownerCuratorGroup?: Maybe<Scalars['ID']>
+  category?: Maybe<Scalars['ID']>
+  rewardAccount?: Maybe<Scalars['String']>
+  deletionPrizeDestAccount?: Maybe<Scalars['String']>
+  title?: Maybe<Scalars['String']>
+  description?: Maybe<Scalars['String']>
+  coverPhoto?: Maybe<Scalars['ID']>
+  avatarPhoto?: Maybe<Scalars['ID']>
+  isPublic?: Maybe<Scalars['Boolean']>
+  isCensored?: Maybe<Scalars['Boolean']>
+  language?: Maybe<Scalars['ID']>
+  createdInBlock?: Maybe<Scalars['Float']>
+}
+
+export type ChannelWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  ownerMember_eq?: Maybe<Scalars['ID']>
+  ownerMember_in?: Maybe<Array<Scalars['ID']>>
+  ownerCuratorGroup_eq?: Maybe<Scalars['ID']>
+  ownerCuratorGroup_in?: Maybe<Array<Scalars['ID']>>
+  category_eq?: Maybe<Scalars['ID']>
+  category_in?: Maybe<Array<Scalars['ID']>>
+  rewardAccount_eq?: Maybe<Scalars['String']>
+  rewardAccount_contains?: Maybe<Scalars['String']>
+  rewardAccount_startsWith?: Maybe<Scalars['String']>
+  rewardAccount_endsWith?: Maybe<Scalars['String']>
+  rewardAccount_in?: Maybe<Array<Scalars['String']>>
+  deletionPrizeDestAccount_eq?: Maybe<Scalars['String']>
+  deletionPrizeDestAccount_contains?: Maybe<Scalars['String']>
+  deletionPrizeDestAccount_startsWith?: Maybe<Scalars['String']>
+  deletionPrizeDestAccount_endsWith?: Maybe<Scalars['String']>
+  deletionPrizeDestAccount_in?: Maybe<Array<Scalars['String']>>
+  title_eq?: Maybe<Scalars['String']>
+  title_contains?: Maybe<Scalars['String']>
+  title_startsWith?: Maybe<Scalars['String']>
+  title_endsWith?: Maybe<Scalars['String']>
+  title_in?: Maybe<Array<Scalars['String']>>
+  description_eq?: Maybe<Scalars['String']>
+  description_contains?: Maybe<Scalars['String']>
+  description_startsWith?: Maybe<Scalars['String']>
+  description_endsWith?: Maybe<Scalars['String']>
+  description_in?: Maybe<Array<Scalars['String']>>
+  coverPhoto_eq?: Maybe<Scalars['ID']>
+  coverPhoto_in?: Maybe<Array<Scalars['ID']>>
+  avatarPhoto_eq?: Maybe<Scalars['ID']>
+  avatarPhoto_in?: Maybe<Array<Scalars['ID']>>
+  isPublic_eq?: Maybe<Scalars['Boolean']>
+  isPublic_in?: Maybe<Array<Scalars['Boolean']>>
+  isCensored_eq?: Maybe<Scalars['Boolean']>
+  isCensored_in?: Maybe<Array<Scalars['Boolean']>>
+  language_eq?: Maybe<Scalars['ID']>
+  language_in?: Maybe<Array<Scalars['ID']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  ownerMember?: Maybe<MembershipWhereInput>
+  ownerCuratorGroup?: Maybe<CuratorGroupWhereInput>
+  category?: Maybe<ChannelCategoryWhereInput>
+  coverPhoto?: Maybe<StorageDataObjectWhereInput>
+  avatarPhoto?: Maybe<StorageDataObjectWhereInput>
+  language?: Maybe<LanguageWhereInput>
+  videos_none?: Maybe<VideoWhereInput>
+  videos_some?: Maybe<VideoWhereInput>
+  videos_every?: Maybe<VideoWhereInput>
+  AND?: Maybe<Array<ChannelWhereInput>>
+  OR?: Maybe<Array<ChannelWhereInput>>
+}
+
+export type ChannelWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export enum Continent {
+  Af = 'AF',
+  Na = 'NA',
+  Oc = 'OC',
+  An = 'AN',
+  As = 'AS',
+  Eu = 'EU',
+  Sa = 'SA',
+}
+
+export type CuratorGroup = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Curators belonging to this group */
+  curatorIds: Array<Scalars['Int']>
+  /** Is group active or not */
+  isActive: Scalars['Boolean']
+  channels: Array<Channel>
+}
+
+export type CuratorGroupConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<CuratorGroupEdge>
+  pageInfo: PageInfo
+}
+
+export type CuratorGroupCreateInput = {
+  curatorIds: Array<Scalars['Int']>
+  isActive: Scalars['Boolean']
+}
+
+export type CuratorGroupEdge = {
+  node: CuratorGroup
+  cursor: Scalars['String']
+}
+
+export enum CuratorGroupOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  IsActiveAsc = 'isActive_ASC',
+  IsActiveDesc = 'isActive_DESC',
+}
+
+export type CuratorGroupUpdateInput = {
+  curatorIds?: Maybe<Array<Scalars['Int']>>
+  isActive?: Maybe<Scalars['Boolean']>
+}
+
+export type CuratorGroupWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  isActive_eq?: Maybe<Scalars['Boolean']>
+  isActive_in?: Maybe<Array<Scalars['Boolean']>>
+  channels_none?: Maybe<ChannelWhereInput>
+  channels_some?: Maybe<ChannelWhereInput>
+  channels_every?: Maybe<ChannelWhereInput>
+  AND?: Maybe<Array<CuratorGroupWhereInput>>
+  OR?: Maybe<Array<CuratorGroupWhereInput>>
+}
+
+export type CuratorGroupWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type DataObjectType =
+  | DataObjectTypeChannelAvatar
+  | DataObjectTypeChannelCoverPhoto
+  | DataObjectTypeVideoMedia
+  | DataObjectTypeVideoThumbnail
+  | DataObjectTypeUnknown
+
+export type DataObjectTypeChannelAvatar = {
+  /** Related channel entity */
+  channel?: Maybe<Channel>
+}
+
+export type DataObjectTypeChannelCoverPhoto = {
+  /** Related channel entity */
+  channel?: Maybe<Channel>
+}
+
+export type DataObjectTypeUnknown = {
+  phantom?: Maybe<Scalars['Int']>
+}
+
+export type DataObjectTypeUnknownCreateInput = {
+  phantom?: Maybe<Scalars['Float']>
+}
+
+export type DataObjectTypeUnknownUpdateInput = {
+  phantom?: Maybe<Scalars['Float']>
+}
+
+export type DataObjectTypeUnknownWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  phantom_eq?: Maybe<Scalars['Int']>
+  phantom_gt?: Maybe<Scalars['Int']>
+  phantom_gte?: Maybe<Scalars['Int']>
+  phantom_lt?: Maybe<Scalars['Int']>
+  phantom_lte?: Maybe<Scalars['Int']>
+  phantom_in?: Maybe<Array<Scalars['Int']>>
+  AND?: Maybe<Array<DataObjectTypeUnknownWhereInput>>
+  OR?: Maybe<Array<DataObjectTypeUnknownWhereInput>>
+}
+
+export type DataObjectTypeUnknownWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type DataObjectTypeVideoMedia = {
+  /** Related video entity */
+  video?: Maybe<Video>
+}
+
+export type DataObjectTypeVideoThumbnail = {
+  /** Related video entity */
+  video?: Maybe<Video>
+}
+
+export type DeleteResponse = {
+  id: Scalars['ID']
+}
+
+export type DistributionBucket = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  family: DistributionBucketFamily
+  familyId: Scalars['String']
+  operators: Array<DistributionBucketOperator>
+  /** Whether the bucket is accepting any new bags */
+  acceptingNewBags: Scalars['Boolean']
+  /** Whether the bucket is currently distributing content */
+  distributing: Scalars['Boolean']
+  bagAssignments: Array<StorageBagDistributionAssignment>
+}
+
+export type DistributionBucketConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<DistributionBucketEdge>
+  pageInfo: PageInfo
+}
+
+export type DistributionBucketCreateInput = {
+  family: Scalars['ID']
+  acceptingNewBags: Scalars['Boolean']
+  distributing: Scalars['Boolean']
+}
+
+export type DistributionBucketEdge = {
+  node: DistributionBucket
+  cursor: Scalars['String']
+}
+
+export type DistributionBucketFamily = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  metadata?: Maybe<DistributionBucketFamilyMetadata>
+  metadataId?: Maybe<Scalars['String']>
+  buckets: Array<DistributionBucket>
+}
+
+export type DistributionBucketFamilyConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<DistributionBucketFamilyEdge>
+  pageInfo: PageInfo
+}
+
+export type DistributionBucketFamilyCreateInput = {
+  metadata?: Maybe<Scalars['ID']>
+}
+
+export type DistributionBucketFamilyEdge = {
+  node: DistributionBucketFamily
+  cursor: Scalars['String']
+}
+
+export type DistributionBucketFamilyGeographicArea = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Geographical area (continent / country / subdivision) */
+  area: GeographicalArea
+  distributionBucketFamilyMetadata: DistributionBucketFamilyMetadata
+  distributionBucketFamilyMetadataId: Scalars['String']
+}
+
+export type DistributionBucketFamilyGeographicAreaConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<DistributionBucketFamilyGeographicAreaEdge>
+  pageInfo: PageInfo
+}
+
+export type DistributionBucketFamilyGeographicAreaCreateInput = {
+  area: Scalars['JSONObject']
+  distributionBucketFamilyMetadata: Scalars['ID']
+}
+
+export type DistributionBucketFamilyGeographicAreaEdge = {
+  node: DistributionBucketFamilyGeographicArea
+  cursor: Scalars['String']
+}
+
+export enum DistributionBucketFamilyGeographicAreaOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  DistributionBucketFamilyMetadataAsc = 'distributionBucketFamilyMetadata_ASC',
+  DistributionBucketFamilyMetadataDesc = 'distributionBucketFamilyMetadata_DESC',
+}
+
+export type DistributionBucketFamilyGeographicAreaUpdateInput = {
+  area?: Maybe<Scalars['JSONObject']>
+  distributionBucketFamilyMetadata?: Maybe<Scalars['ID']>
+}
+
+export type DistributionBucketFamilyGeographicAreaWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  area_json?: Maybe<Scalars['JSONObject']>
+  distributionBucketFamilyMetadata_eq?: Maybe<Scalars['ID']>
+  distributionBucketFamilyMetadata_in?: Maybe<Array<Scalars['ID']>>
+  distributionBucketFamilyMetadata?: Maybe<DistributionBucketFamilyMetadataWhereInput>
+  AND?: Maybe<Array<DistributionBucketFamilyGeographicAreaWhereInput>>
+  OR?: Maybe<Array<DistributionBucketFamilyGeographicAreaWhereInput>>
+}
+
+export type DistributionBucketFamilyGeographicAreaWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type DistributionBucketFamilyMetadata = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Name of the geographical region covered by the family (ie.: us-east-1) */
+  region?: Maybe<Scalars['String']>
+  /** Optional, more specific description of the region covered by the family */
+  description?: Maybe<Scalars['String']>
+  areas: Array<DistributionBucketFamilyGeographicArea>
+  /** List of targets (hosts/ips) best suited latency measurements for the family */
+  latencyTestTargets?: Maybe<Array<Scalars['String']>>
+  distributionbucketfamilymetadata?: Maybe<Array<DistributionBucketFamily>>
+}
+
+export type DistributionBucketFamilyMetadataConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<DistributionBucketFamilyMetadataEdge>
+  pageInfo: PageInfo
+}
+
+export type DistributionBucketFamilyMetadataCreateInput = {
+  region?: Maybe<Scalars['String']>
+  description?: Maybe<Scalars['String']>
+  latencyTestTargets?: Maybe<Array<Scalars['String']>>
+}
+
+export type DistributionBucketFamilyMetadataEdge = {
+  node: DistributionBucketFamilyMetadata
+  cursor: Scalars['String']
+}
+
+export enum DistributionBucketFamilyMetadataOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  RegionAsc = 'region_ASC',
+  RegionDesc = 'region_DESC',
+  DescriptionAsc = 'description_ASC',
+  DescriptionDesc = 'description_DESC',
+}
+
+export type DistributionBucketFamilyMetadataUpdateInput = {
+  region?: Maybe<Scalars['String']>
+  description?: Maybe<Scalars['String']>
+  latencyTestTargets?: Maybe<Array<Scalars['String']>>
+}
+
+export type DistributionBucketFamilyMetadataWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  region_eq?: Maybe<Scalars['String']>
+  region_contains?: Maybe<Scalars['String']>
+  region_startsWith?: Maybe<Scalars['String']>
+  region_endsWith?: Maybe<Scalars['String']>
+  region_in?: Maybe<Array<Scalars['String']>>
+  description_eq?: Maybe<Scalars['String']>
+  description_contains?: Maybe<Scalars['String']>
+  description_startsWith?: Maybe<Scalars['String']>
+  description_endsWith?: Maybe<Scalars['String']>
+  description_in?: Maybe<Array<Scalars['String']>>
+  areas_none?: Maybe<DistributionBucketFamilyGeographicAreaWhereInput>
+  areas_some?: Maybe<DistributionBucketFamilyGeographicAreaWhereInput>
+  areas_every?: Maybe<DistributionBucketFamilyGeographicAreaWhereInput>
+  distributionbucketfamilymetadata_none?: Maybe<DistributionBucketFamilyWhereInput>
+  distributionbucketfamilymetadata_some?: Maybe<DistributionBucketFamilyWhereInput>
+  distributionbucketfamilymetadata_every?: Maybe<DistributionBucketFamilyWhereInput>
+  AND?: Maybe<Array<DistributionBucketFamilyMetadataWhereInput>>
+  OR?: Maybe<Array<DistributionBucketFamilyMetadataWhereInput>>
+}
+
+export type DistributionBucketFamilyMetadataWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export enum DistributionBucketFamilyOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  MetadataAsc = 'metadata_ASC',
+  MetadataDesc = 'metadata_DESC',
+}
+
+export type DistributionBucketFamilyUpdateInput = {
+  metadata?: Maybe<Scalars['ID']>
+}
+
+export type DistributionBucketFamilyWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  metadata_eq?: Maybe<Scalars['ID']>
+  metadata_in?: Maybe<Array<Scalars['ID']>>
+  metadata?: Maybe<DistributionBucketFamilyMetadataWhereInput>
+  buckets_none?: Maybe<DistributionBucketWhereInput>
+  buckets_some?: Maybe<DistributionBucketWhereInput>
+  buckets_every?: Maybe<DistributionBucketWhereInput>
+  AND?: Maybe<Array<DistributionBucketFamilyWhereInput>>
+  OR?: Maybe<Array<DistributionBucketFamilyWhereInput>>
+}
+
+export type DistributionBucketFamilyWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type DistributionBucketOperator = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  distributionBucket: DistributionBucket
+  distributionBucketId: Scalars['String']
+  /** ID of the distribution group worker */
+  workerId: Scalars['Int']
+  /** Current operator status */
+  status: DistributionBucketOperatorStatus
+  metadata?: Maybe<DistributionBucketOperatorMetadata>
+  metadataId?: Maybe<Scalars['String']>
+}
+
+export type DistributionBucketOperatorConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<DistributionBucketOperatorEdge>
+  pageInfo: PageInfo
+}
+
+export type DistributionBucketOperatorCreateInput = {
+  distributionBucket: Scalars['ID']
+  workerId: Scalars['Float']
+  status: DistributionBucketOperatorStatus
+  metadata?: Maybe<Scalars['ID']>
+}
+
+export type DistributionBucketOperatorEdge = {
+  node: DistributionBucketOperator
+  cursor: Scalars['String']
+}
+
+export type DistributionBucketOperatorMetadata = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Root distributor node api endpoint */
+  nodeEndpoint?: Maybe<Scalars['String']>
+  nodeLocation?: Maybe<NodeLocationMetadata>
+  nodeLocationId?: Maybe<Scalars['String']>
+  /** Additional information about the node/operator */
+  extra?: Maybe<Scalars['String']>
+  distributionbucketoperatormetadata?: Maybe<Array<DistributionBucketOperator>>
+}
+
+export type DistributionBucketOperatorMetadataConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<DistributionBucketOperatorMetadataEdge>
+  pageInfo: PageInfo
+}
+
+export type DistributionBucketOperatorMetadataCreateInput = {
+  nodeEndpoint?: Maybe<Scalars['String']>
+  nodeLocation?: Maybe<Scalars['ID']>
+  extra?: Maybe<Scalars['String']>
+}
+
+export type DistributionBucketOperatorMetadataEdge = {
+  node: DistributionBucketOperatorMetadata
+  cursor: Scalars['String']
+}
+
+export enum DistributionBucketOperatorMetadataOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  NodeEndpointAsc = 'nodeEndpoint_ASC',
+  NodeEndpointDesc = 'nodeEndpoint_DESC',
+  NodeLocationAsc = 'nodeLocation_ASC',
+  NodeLocationDesc = 'nodeLocation_DESC',
+  ExtraAsc = 'extra_ASC',
+  ExtraDesc = 'extra_DESC',
+}
+
+export type DistributionBucketOperatorMetadataUpdateInput = {
+  nodeEndpoint?: Maybe<Scalars['String']>
+  nodeLocation?: Maybe<Scalars['ID']>
+  extra?: Maybe<Scalars['String']>
+}
+
+export type DistributionBucketOperatorMetadataWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  nodeEndpoint_eq?: Maybe<Scalars['String']>
+  nodeEndpoint_contains?: Maybe<Scalars['String']>
+  nodeEndpoint_startsWith?: Maybe<Scalars['String']>
+  nodeEndpoint_endsWith?: Maybe<Scalars['String']>
+  nodeEndpoint_in?: Maybe<Array<Scalars['String']>>
+  nodeLocation_eq?: Maybe<Scalars['ID']>
+  nodeLocation_in?: Maybe<Array<Scalars['ID']>>
+  extra_eq?: Maybe<Scalars['String']>
+  extra_contains?: Maybe<Scalars['String']>
+  extra_startsWith?: Maybe<Scalars['String']>
+  extra_endsWith?: Maybe<Scalars['String']>
+  extra_in?: Maybe<Array<Scalars['String']>>
+  nodeLocation?: Maybe<NodeLocationMetadataWhereInput>
+  distributionbucketoperatormetadata_none?: Maybe<DistributionBucketOperatorWhereInput>
+  distributionbucketoperatormetadata_some?: Maybe<DistributionBucketOperatorWhereInput>
+  distributionbucketoperatormetadata_every?: Maybe<DistributionBucketOperatorWhereInput>
+  AND?: Maybe<Array<DistributionBucketOperatorMetadataWhereInput>>
+  OR?: Maybe<Array<DistributionBucketOperatorMetadataWhereInput>>
+}
+
+export type DistributionBucketOperatorMetadataWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export enum DistributionBucketOperatorOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  DistributionBucketAsc = 'distributionBucket_ASC',
+  DistributionBucketDesc = 'distributionBucket_DESC',
+  WorkerIdAsc = 'workerId_ASC',
+  WorkerIdDesc = 'workerId_DESC',
+  StatusAsc = 'status_ASC',
+  StatusDesc = 'status_DESC',
+  MetadataAsc = 'metadata_ASC',
+  MetadataDesc = 'metadata_DESC',
+}
+
+export enum DistributionBucketOperatorStatus {
+  Invited = 'INVITED',
+  Active = 'ACTIVE',
+}
+
+export type DistributionBucketOperatorUpdateInput = {
+  distributionBucket?: Maybe<Scalars['ID']>
+  workerId?: Maybe<Scalars['Float']>
+  status?: Maybe<DistributionBucketOperatorStatus>
+  metadata?: Maybe<Scalars['ID']>
+}
+
+export type DistributionBucketOperatorWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  distributionBucket_eq?: Maybe<Scalars['ID']>
+  distributionBucket_in?: Maybe<Array<Scalars['ID']>>
+  workerId_eq?: Maybe<Scalars['Int']>
+  workerId_gt?: Maybe<Scalars['Int']>
+  workerId_gte?: Maybe<Scalars['Int']>
+  workerId_lt?: Maybe<Scalars['Int']>
+  workerId_lte?: Maybe<Scalars['Int']>
+  workerId_in?: Maybe<Array<Scalars['Int']>>
+  status_eq?: Maybe<DistributionBucketOperatorStatus>
+  status_in?: Maybe<Array<DistributionBucketOperatorStatus>>
+  metadata_eq?: Maybe<Scalars['ID']>
+  metadata_in?: Maybe<Array<Scalars['ID']>>
+  distributionBucket?: Maybe<DistributionBucketWhereInput>
+  metadata?: Maybe<DistributionBucketOperatorMetadataWhereInput>
+  AND?: Maybe<Array<DistributionBucketOperatorWhereInput>>
+  OR?: Maybe<Array<DistributionBucketOperatorWhereInput>>
+}
+
+export type DistributionBucketOperatorWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export enum DistributionBucketOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  FamilyAsc = 'family_ASC',
+  FamilyDesc = 'family_DESC',
+  AcceptingNewBagsAsc = 'acceptingNewBags_ASC',
+  AcceptingNewBagsDesc = 'acceptingNewBags_DESC',
+  DistributingAsc = 'distributing_ASC',
+  DistributingDesc = 'distributing_DESC',
+}
+
+export type DistributionBucketUpdateInput = {
+  family?: Maybe<Scalars['ID']>
+  acceptingNewBags?: Maybe<Scalars['Boolean']>
+  distributing?: Maybe<Scalars['Boolean']>
+}
+
+export type DistributionBucketWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  family_eq?: Maybe<Scalars['ID']>
+  family_in?: Maybe<Array<Scalars['ID']>>
+  acceptingNewBags_eq?: Maybe<Scalars['Boolean']>
+  acceptingNewBags_in?: Maybe<Array<Scalars['Boolean']>>
+  distributing_eq?: Maybe<Scalars['Boolean']>
+  distributing_in?: Maybe<Array<Scalars['Boolean']>>
+  family?: Maybe<DistributionBucketFamilyWhereInput>
+  operators_none?: Maybe<DistributionBucketOperatorWhereInput>
+  operators_some?: Maybe<DistributionBucketOperatorWhereInput>
+  operators_every?: Maybe<DistributionBucketOperatorWhereInput>
+  bagAssignments_none?: Maybe<StorageBagDistributionAssignmentWhereInput>
+  bagAssignments_some?: Maybe<StorageBagDistributionAssignmentWhereInput>
+  bagAssignments_every?: Maybe<StorageBagDistributionAssignmentWhereInput>
+  AND?: Maybe<Array<DistributionBucketWhereInput>>
+  OR?: Maybe<Array<DistributionBucketWhereInput>>
+}
+
+export type DistributionBucketWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type GeoCoordinates = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  latitude: Scalars['Float']
+  longitude: Scalars['Float']
+  nodelocationmetadatacoordinates?: Maybe<Array<NodeLocationMetadata>>
+}
+
+export type GeoCoordinatesConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<GeoCoordinatesEdge>
+  pageInfo: PageInfo
+}
+
+export type GeoCoordinatesCreateInput = {
+  latitude: Scalars['Float']
+  longitude: Scalars['Float']
+}
+
+export type GeoCoordinatesEdge = {
+  node: GeoCoordinates
+  cursor: Scalars['String']
+}
+
+export enum GeoCoordinatesOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  LatitudeAsc = 'latitude_ASC',
+  LatitudeDesc = 'latitude_DESC',
+  LongitudeAsc = 'longitude_ASC',
+  LongitudeDesc = 'longitude_DESC',
+}
+
+export type GeoCoordinatesUpdateInput = {
+  latitude?: Maybe<Scalars['Float']>
+  longitude?: Maybe<Scalars['Float']>
+}
+
+export type GeoCoordinatesWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  latitude_eq?: Maybe<Scalars['Float']>
+  latitude_gt?: Maybe<Scalars['Float']>
+  latitude_gte?: Maybe<Scalars['Float']>
+  latitude_lt?: Maybe<Scalars['Float']>
+  latitude_lte?: Maybe<Scalars['Float']>
+  latitude_in?: Maybe<Array<Scalars['Float']>>
+  longitude_eq?: Maybe<Scalars['Float']>
+  longitude_gt?: Maybe<Scalars['Float']>
+  longitude_gte?: Maybe<Scalars['Float']>
+  longitude_lt?: Maybe<Scalars['Float']>
+  longitude_lte?: Maybe<Scalars['Float']>
+  longitude_in?: Maybe<Array<Scalars['Float']>>
+  nodelocationmetadatacoordinates_none?: Maybe<NodeLocationMetadataWhereInput>
+  nodelocationmetadatacoordinates_some?: Maybe<NodeLocationMetadataWhereInput>
+  nodelocationmetadatacoordinates_every?: Maybe<NodeLocationMetadataWhereInput>
+  AND?: Maybe<Array<GeoCoordinatesWhereInput>>
+  OR?: Maybe<Array<GeoCoordinatesWhereInput>>
+}
+
+export type GeoCoordinatesWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type GeographicalArea = GeographicalAreaContinent | GeographicalAreaCountry | GeographicalAreaSubdivistion
+
+export type GeographicalAreaContinent = {
+  code?: Maybe<Continent>
+}
+
+export type GeographicalAreaContinentCreateInput = {
+  code?: Maybe<Continent>
+}
+
+export type GeographicalAreaContinentUpdateInput = {
+  code?: Maybe<Continent>
+}
+
+export type GeographicalAreaContinentWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  code_eq?: Maybe<Continent>
+  code_in?: Maybe<Array<Continent>>
+  AND?: Maybe<Array<GeographicalAreaContinentWhereInput>>
+  OR?: Maybe<Array<GeographicalAreaContinentWhereInput>>
+}
+
+export type GeographicalAreaContinentWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type GeographicalAreaCountry = {
+  /** ISO 3166-1 alpha-2 country code */
+  code?: Maybe<Scalars['String']>
+}
+
+export type GeographicalAreaCountryCreateInput = {
+  code?: Maybe<Scalars['String']>
+}
+
+export type GeographicalAreaCountryUpdateInput = {
+  code?: Maybe<Scalars['String']>
+}
+
+export type GeographicalAreaCountryWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  code_eq?: Maybe<Scalars['String']>
+  code_contains?: Maybe<Scalars['String']>
+  code_startsWith?: Maybe<Scalars['String']>
+  code_endsWith?: Maybe<Scalars['String']>
+  code_in?: Maybe<Array<Scalars['String']>>
+  AND?: Maybe<Array<GeographicalAreaCountryWhereInput>>
+  OR?: Maybe<Array<GeographicalAreaCountryWhereInput>>
+}
+
+export type GeographicalAreaCountryWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type GeographicalAreaSubdivistion = {
+  /** ISO 3166-2 subdivision code */
+  code?: Maybe<Scalars['String']>
+}
+
+export type GeographicalAreaSubdivistionCreateInput = {
+  code?: Maybe<Scalars['String']>
+}
+
+export type GeographicalAreaSubdivistionUpdateInput = {
+  code?: Maybe<Scalars['String']>
+}
+
+export type GeographicalAreaSubdivistionWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  code_eq?: Maybe<Scalars['String']>
+  code_contains?: Maybe<Scalars['String']>
+  code_startsWith?: Maybe<Scalars['String']>
+  code_endsWith?: Maybe<Scalars['String']>
+  code_in?: Maybe<Array<Scalars['String']>>
+  AND?: Maybe<Array<GeographicalAreaSubdivistionWhereInput>>
+  OR?: Maybe<Array<GeographicalAreaSubdivistionWhereInput>>
+}
+
+export type GeographicalAreaSubdivistionWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type Language = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Language identifier ISO 639-1 */
+  iso: Scalars['String']
+  createdInBlock: Scalars['Int']
+  channellanguage?: Maybe<Array<Channel>>
+  videolanguage?: Maybe<Array<Video>>
+}
+
+export type LanguageConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<LanguageEdge>
+  pageInfo: PageInfo
+}
+
+export type LanguageCreateInput = {
+  iso: Scalars['String']
+  createdInBlock: Scalars['Float']
+}
+
+export type LanguageEdge = {
+  node: Language
+  cursor: Scalars['String']
+}
+
+export enum LanguageOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  IsoAsc = 'iso_ASC',
+  IsoDesc = 'iso_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+}
+
+export type LanguageUpdateInput = {
+  iso?: Maybe<Scalars['String']>
+  createdInBlock?: Maybe<Scalars['Float']>
+}
+
+export type LanguageWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  iso_eq?: Maybe<Scalars['String']>
+  iso_contains?: Maybe<Scalars['String']>
+  iso_startsWith?: Maybe<Scalars['String']>
+  iso_endsWith?: Maybe<Scalars['String']>
+  iso_in?: Maybe<Array<Scalars['String']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  channellanguage_none?: Maybe<ChannelWhereInput>
+  channellanguage_some?: Maybe<ChannelWhereInput>
+  channellanguage_every?: Maybe<ChannelWhereInput>
+  videolanguage_none?: Maybe<VideoWhereInput>
+  videolanguage_some?: Maybe<VideoWhereInput>
+  videolanguage_every?: Maybe<VideoWhereInput>
+  AND?: Maybe<Array<LanguageWhereInput>>
+  OR?: Maybe<Array<LanguageWhereInput>>
+}
+
+export type LanguageWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type License = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** License code defined by Joystream */
+  code?: Maybe<Scalars['Int']>
+  /** Attribution (if required by the license) */
+  attribution?: Maybe<Scalars['String']>
+  /** Custom license content */
+  customText?: Maybe<Scalars['String']>
+  videolicense?: Maybe<Array<Video>>
+}
+
+export type LicenseConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<LicenseEdge>
+  pageInfo: PageInfo
+}
+
+export type LicenseCreateInput = {
+  code?: Maybe<Scalars['Float']>
+  attribution?: Maybe<Scalars['String']>
+  customText?: Maybe<Scalars['String']>
+}
+
+export type LicenseEdge = {
+  node: License
+  cursor: Scalars['String']
+}
+
+export enum LicenseOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  CodeAsc = 'code_ASC',
+  CodeDesc = 'code_DESC',
+  AttributionAsc = 'attribution_ASC',
+  AttributionDesc = 'attribution_DESC',
+  CustomTextAsc = 'customText_ASC',
+  CustomTextDesc = 'customText_DESC',
+}
+
+export type LicenseUpdateInput = {
+  code?: Maybe<Scalars['Float']>
+  attribution?: Maybe<Scalars['String']>
+  customText?: Maybe<Scalars['String']>
+}
+
+export type LicenseWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  code_eq?: Maybe<Scalars['Int']>
+  code_gt?: Maybe<Scalars['Int']>
+  code_gte?: Maybe<Scalars['Int']>
+  code_lt?: Maybe<Scalars['Int']>
+  code_lte?: Maybe<Scalars['Int']>
+  code_in?: Maybe<Array<Scalars['Int']>>
+  attribution_eq?: Maybe<Scalars['String']>
+  attribution_contains?: Maybe<Scalars['String']>
+  attribution_startsWith?: Maybe<Scalars['String']>
+  attribution_endsWith?: Maybe<Scalars['String']>
+  attribution_in?: Maybe<Array<Scalars['String']>>
+  customText_eq?: Maybe<Scalars['String']>
+  customText_contains?: Maybe<Scalars['String']>
+  customText_startsWith?: Maybe<Scalars['String']>
+  customText_endsWith?: Maybe<Scalars['String']>
+  customText_in?: Maybe<Array<Scalars['String']>>
+  videolicense_none?: Maybe<VideoWhereInput>
+  videolicense_some?: Maybe<VideoWhereInput>
+  videolicense_every?: Maybe<VideoWhereInput>
+  AND?: Maybe<Array<LicenseWhereInput>>
+  OR?: Maybe<Array<LicenseWhereInput>>
+}
+
+export type LicenseWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type MembersByHandleFtsOutput = {
+  item: MembersByHandleSearchResult
+  rank: Scalars['Float']
+  isTypeOf: Scalars['String']
+  highlight: Scalars['String']
+}
+
+export type MembersByHandleSearchResult = Membership
+
+/** Stored information about a registered user */
+export type Membership = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** The unique handle chosen by member */
+  handle: Scalars['String']
+  /** A Url to member's Avatar image */
+  avatarUri?: Maybe<Scalars['String']>
+  /** Short text chosen by member to share information about themselves */
+  about?: Maybe<Scalars['String']>
+  /** Member's controller account id */
+  controllerAccount: Scalars['String']
+  /** Member's root account id */
+  rootAccount: Scalars['String']
+  /** Blocknumber when member was registered */
+  createdInBlock: Scalars['Int']
+  /** How the member was registered */
+  entry: MembershipEntryMethod
+  /** The type of subscription the member has purchased if any. */
+  subscription?: Maybe<Scalars['Int']>
+  channels: Array<Channel>
+}
+
+export type MembershipConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<MembershipEdge>
+  pageInfo: PageInfo
+}
+
+export type MembershipCreateInput = {
+  handle: Scalars['String']
+  avatarUri?: Maybe<Scalars['String']>
+  about?: Maybe<Scalars['String']>
+  controllerAccount: Scalars['String']
+  rootAccount: Scalars['String']
+  createdInBlock: Scalars['Float']
+  entry: MembershipEntryMethod
+  subscription?: Maybe<Scalars['Float']>
+}
+
+export type MembershipEdge = {
+  node: Membership
+  cursor: Scalars['String']
+}
+
+export enum MembershipEntryMethod {
+  Paid = 'PAID',
+  Screening = 'SCREENING',
+  Genesis = 'GENESIS',
+}
+
+export enum MembershipOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  HandleAsc = 'handle_ASC',
+  HandleDesc = 'handle_DESC',
+  AvatarUriAsc = 'avatarUri_ASC',
+  AvatarUriDesc = 'avatarUri_DESC',
+  AboutAsc = 'about_ASC',
+  AboutDesc = 'about_DESC',
+  ControllerAccountAsc = 'controllerAccount_ASC',
+  ControllerAccountDesc = 'controllerAccount_DESC',
+  RootAccountAsc = 'rootAccount_ASC',
+  RootAccountDesc = 'rootAccount_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+  EntryAsc = 'entry_ASC',
+  EntryDesc = 'entry_DESC',
+  SubscriptionAsc = 'subscription_ASC',
+  SubscriptionDesc = 'subscription_DESC',
+}
+
+export type MembershipUpdateInput = {
+  handle?: Maybe<Scalars['String']>
+  avatarUri?: Maybe<Scalars['String']>
+  about?: Maybe<Scalars['String']>
+  controllerAccount?: Maybe<Scalars['String']>
+  rootAccount?: Maybe<Scalars['String']>
+  createdInBlock?: Maybe<Scalars['Float']>
+  entry?: Maybe<MembershipEntryMethod>
+  subscription?: Maybe<Scalars['Float']>
+}
+
+export type MembershipWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  handle_eq?: Maybe<Scalars['String']>
+  handle_contains?: Maybe<Scalars['String']>
+  handle_startsWith?: Maybe<Scalars['String']>
+  handle_endsWith?: Maybe<Scalars['String']>
+  handle_in?: Maybe<Array<Scalars['String']>>
+  avatarUri_eq?: Maybe<Scalars['String']>
+  avatarUri_contains?: Maybe<Scalars['String']>
+  avatarUri_startsWith?: Maybe<Scalars['String']>
+  avatarUri_endsWith?: Maybe<Scalars['String']>
+  avatarUri_in?: Maybe<Array<Scalars['String']>>
+  about_eq?: Maybe<Scalars['String']>
+  about_contains?: Maybe<Scalars['String']>
+  about_startsWith?: Maybe<Scalars['String']>
+  about_endsWith?: Maybe<Scalars['String']>
+  about_in?: Maybe<Array<Scalars['String']>>
+  controllerAccount_eq?: Maybe<Scalars['String']>
+  controllerAccount_contains?: Maybe<Scalars['String']>
+  controllerAccount_startsWith?: Maybe<Scalars['String']>
+  controllerAccount_endsWith?: Maybe<Scalars['String']>
+  controllerAccount_in?: Maybe<Array<Scalars['String']>>
+  rootAccount_eq?: Maybe<Scalars['String']>
+  rootAccount_contains?: Maybe<Scalars['String']>
+  rootAccount_startsWith?: Maybe<Scalars['String']>
+  rootAccount_endsWith?: Maybe<Scalars['String']>
+  rootAccount_in?: Maybe<Array<Scalars['String']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  entry_eq?: Maybe<MembershipEntryMethod>
+  entry_in?: Maybe<Array<MembershipEntryMethod>>
+  subscription_eq?: Maybe<Scalars['Int']>
+  subscription_gt?: Maybe<Scalars['Int']>
+  subscription_gte?: Maybe<Scalars['Int']>
+  subscription_lt?: Maybe<Scalars['Int']>
+  subscription_lte?: Maybe<Scalars['Int']>
+  subscription_in?: Maybe<Array<Scalars['Int']>>
+  channels_none?: Maybe<ChannelWhereInput>
+  channels_some?: Maybe<ChannelWhereInput>
+  channels_every?: Maybe<ChannelWhereInput>
+  AND?: Maybe<Array<MembershipWhereInput>>
+  OR?: Maybe<Array<MembershipWhereInput>>
+}
+
+export type MembershipWhereUniqueInput = {
+  id?: Maybe<Scalars['ID']>
+  handle?: Maybe<Scalars['String']>
+}
+
+export type NodeLocationMetadata = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** ISO 3166-1 alpha-2 country code (2 letters) */
+  countryCode?: Maybe<Scalars['String']>
+  /** City name */
+  city?: Maybe<Scalars['String']>
+  coordinates?: Maybe<GeoCoordinates>
+  coordinatesId?: Maybe<Scalars['String']>
+  distributionbucketoperatormetadatanodeLocation?: Maybe<Array<DistributionBucketOperatorMetadata>>
+  storagebucketoperatormetadatanodeLocation?: Maybe<Array<StorageBucketOperatorMetadata>>
+}
+
+export type NodeLocationMetadataConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<NodeLocationMetadataEdge>
+  pageInfo: PageInfo
+}
+
+export type NodeLocationMetadataCreateInput = {
+  countryCode?: Maybe<Scalars['String']>
+  city?: Maybe<Scalars['String']>
+  coordinates?: Maybe<Scalars['ID']>
+}
+
+export type NodeLocationMetadataEdge = {
+  node: NodeLocationMetadata
+  cursor: Scalars['String']
+}
+
+export enum NodeLocationMetadataOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  CountryCodeAsc = 'countryCode_ASC',
+  CountryCodeDesc = 'countryCode_DESC',
+  CityAsc = 'city_ASC',
+  CityDesc = 'city_DESC',
+  CoordinatesAsc = 'coordinates_ASC',
+  CoordinatesDesc = 'coordinates_DESC',
+}
+
+export type NodeLocationMetadataUpdateInput = {
+  countryCode?: Maybe<Scalars['String']>
+  city?: Maybe<Scalars['String']>
+  coordinates?: Maybe<Scalars['ID']>
+}
+
+export type NodeLocationMetadataWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  countryCode_eq?: Maybe<Scalars['String']>
+  countryCode_contains?: Maybe<Scalars['String']>
+  countryCode_startsWith?: Maybe<Scalars['String']>
+  countryCode_endsWith?: Maybe<Scalars['String']>
+  countryCode_in?: Maybe<Array<Scalars['String']>>
+  city_eq?: Maybe<Scalars['String']>
+  city_contains?: Maybe<Scalars['String']>
+  city_startsWith?: Maybe<Scalars['String']>
+  city_endsWith?: Maybe<Scalars['String']>
+  city_in?: Maybe<Array<Scalars['String']>>
+  coordinates_eq?: Maybe<Scalars['ID']>
+  coordinates_in?: Maybe<Array<Scalars['ID']>>
+  coordinates?: Maybe<GeoCoordinatesWhereInput>
+  distributionbucketoperatormetadatanodeLocation_none?: Maybe<DistributionBucketOperatorMetadataWhereInput>
+  distributionbucketoperatormetadatanodeLocation_some?: Maybe<DistributionBucketOperatorMetadataWhereInput>
+  distributionbucketoperatormetadatanodeLocation_every?: Maybe<DistributionBucketOperatorMetadataWhereInput>
+  storagebucketoperatormetadatanodeLocation_none?: Maybe<StorageBucketOperatorMetadataWhereInput>
+  storagebucketoperatormetadatanodeLocation_some?: Maybe<StorageBucketOperatorMetadataWhereInput>
+  storagebucketoperatormetadatanodeLocation_every?: Maybe<StorageBucketOperatorMetadataWhereInput>
+  AND?: Maybe<Array<NodeLocationMetadataWhereInput>>
+  OR?: Maybe<Array<NodeLocationMetadataWhereInput>>
+}
+
+export type NodeLocationMetadataWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type PageInfo = {
+  hasNextPage: Scalars['Boolean']
+  hasPreviousPage: Scalars['Boolean']
+  startCursor?: Maybe<Scalars['String']>
+  endCursor?: Maybe<Scalars['String']>
+}
+
+export type ProcessorState = {
+  lastCompleteBlock: Scalars['Float']
+  lastProcessedEvent: Scalars['String']
+  indexerHead: Scalars['Float']
+  chainHead: Scalars['Float']
+}
+
+export type Query = {
+  channelCategories: Array<ChannelCategory>
+  channelCategoryByUniqueInput?: Maybe<ChannelCategory>
+  channelCategoriesConnection: ChannelCategoryConnection
+  channels: Array<Channel>
+  channelByUniqueInput?: Maybe<Channel>
+  channelsConnection: ChannelConnection
+  curatorGroups: Array<CuratorGroup>
+  curatorGroupByUniqueInput?: Maybe<CuratorGroup>
+  curatorGroupsConnection: CuratorGroupConnection
+  distributionBucketFamilyGeographicAreas: Array<DistributionBucketFamilyGeographicArea>
+  distributionBucketFamilyGeographicAreaByUniqueInput?: Maybe<DistributionBucketFamilyGeographicArea>
+  distributionBucketFamilyGeographicAreasConnection: DistributionBucketFamilyGeographicAreaConnection
+  distributionBucketFamilyMetadata: Array<DistributionBucketFamilyMetadata>
+  distributionBucketFamilyMetadataByUniqueInput?: Maybe<DistributionBucketFamilyMetadata>
+  distributionBucketFamilyMetadataConnection: DistributionBucketFamilyMetadataConnection
+  distributionBucketFamilies: Array<DistributionBucketFamily>
+  distributionBucketFamilyByUniqueInput?: Maybe<DistributionBucketFamily>
+  distributionBucketFamiliesConnection: DistributionBucketFamilyConnection
+  distributionBucketOperatorMetadata: Array<DistributionBucketOperatorMetadata>
+  distributionBucketOperatorMetadataByUniqueInput?: Maybe<DistributionBucketOperatorMetadata>
+  distributionBucketOperatorMetadataConnection: DistributionBucketOperatorMetadataConnection
+  distributionBucketOperators: Array<DistributionBucketOperator>
+  distributionBucketOperatorByUniqueInput?: Maybe<DistributionBucketOperator>
+  distributionBucketOperatorsConnection: DistributionBucketOperatorConnection
+  distributionBuckets: Array<DistributionBucket>
+  distributionBucketByUniqueInput?: Maybe<DistributionBucket>
+  distributionBucketsConnection: DistributionBucketConnection
+  geoCoordinates: Array<GeoCoordinates>
+  geoCoordinatesByUniqueInput?: Maybe<GeoCoordinates>
+  geoCoordinatesConnection: GeoCoordinatesConnection
+  languages: Array<Language>
+  languageByUniqueInput?: Maybe<Language>
+  languagesConnection: LanguageConnection
+  licenses: Array<License>
+  licenseByUniqueInput?: Maybe<License>
+  licensesConnection: LicenseConnection
+  memberships: Array<Membership>
+  membershipByUniqueInput?: Maybe<Membership>
+  membershipsConnection: MembershipConnection
+  nodeLocationMetadata: Array<NodeLocationMetadata>
+  nodeLocationMetadataByUniqueInput?: Maybe<NodeLocationMetadata>
+  nodeLocationMetadataConnection: NodeLocationMetadataConnection
+  channelCategoriesByName: Array<ChannelCategoriesByNameFtsOutput>
+  membersByHandle: Array<MembersByHandleFtsOutput>
+  search: Array<SearchFtsOutput>
+  videoCategoriesByName: Array<VideoCategoriesByNameFtsOutput>
+  storageBagDistributionAssignments: Array<StorageBagDistributionAssignment>
+  storageBagDistributionAssignmentByUniqueInput?: Maybe<StorageBagDistributionAssignment>
+  storageBagDistributionAssignmentsConnection: StorageBagDistributionAssignmentConnection
+  storageBagStorageAssignments: Array<StorageBagStorageAssignment>
+  storageBagStorageAssignmentByUniqueInput?: Maybe<StorageBagStorageAssignment>
+  storageBagStorageAssignmentsConnection: StorageBagStorageAssignmentConnection
+  storageBags: Array<StorageBag>
+  storageBagByUniqueInput?: Maybe<StorageBag>
+  storageBagsConnection: StorageBagConnection
+  storageBucketOperatorMetadata: Array<StorageBucketOperatorMetadata>
+  storageBucketOperatorMetadataByUniqueInput?: Maybe<StorageBucketOperatorMetadata>
+  storageBucketOperatorMetadataConnection: StorageBucketOperatorMetadataConnection
+  storageBuckets: Array<StorageBucket>
+  storageBucketByUniqueInput?: Maybe<StorageBucket>
+  storageBucketsConnection: StorageBucketConnection
+  storageDataObjects: Array<StorageDataObject>
+  storageDataObjectByUniqueInput?: Maybe<StorageDataObject>
+  storageDataObjectsConnection: StorageDataObjectConnection
+  storageSystemParameters: Array<StorageSystemParameters>
+  storageSystemParametersByUniqueInput?: Maybe<StorageSystemParameters>
+  storageSystemParametersConnection: StorageSystemParametersConnection
+  videoCategories: Array<VideoCategory>
+  videoCategoryByUniqueInput?: Maybe<VideoCategory>
+  videoCategoriesConnection: VideoCategoryConnection
+  videoMediaEncodings: Array<VideoMediaEncoding>
+  videoMediaEncodingByUniqueInput?: Maybe<VideoMediaEncoding>
+  videoMediaEncodingsConnection: VideoMediaEncodingConnection
+  videoMediaMetadata: Array<VideoMediaMetadata>
+  videoMediaMetadataByUniqueInput?: Maybe<VideoMediaMetadata>
+  videoMediaMetadataConnection: VideoMediaMetadataConnection
+  videos: Array<Video>
+  videoByUniqueInput?: Maybe<Video>
+  videosConnection: VideoConnection
+  workers: Array<Worker>
+  workerByUniqueInput?: Maybe<Worker>
+  workersConnection: WorkerConnection
+}
+
+export type QueryChannelCategoriesArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<ChannelCategoryWhereInput>
+  orderBy?: Maybe<Array<ChannelCategoryOrderByInput>>
+}
+
+export type QueryChannelCategoryByUniqueInputArgs = {
+  where: ChannelCategoryWhereUniqueInput
+}
+
+export type QueryChannelCategoriesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<ChannelCategoryWhereInput>
+  orderBy?: Maybe<Array<ChannelCategoryOrderByInput>>
+}
+
+export type QueryChannelsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<ChannelWhereInput>
+  orderBy?: Maybe<Array<ChannelOrderByInput>>
+}
+
+export type QueryChannelByUniqueInputArgs = {
+  where: ChannelWhereUniqueInput
+}
+
+export type QueryChannelsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<ChannelWhereInput>
+  orderBy?: Maybe<Array<ChannelOrderByInput>>
+}
+
+export type QueryCuratorGroupsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<CuratorGroupWhereInput>
+  orderBy?: Maybe<Array<CuratorGroupOrderByInput>>
+}
+
+export type QueryCuratorGroupByUniqueInputArgs = {
+  where: CuratorGroupWhereUniqueInput
+}
+
+export type QueryCuratorGroupsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<CuratorGroupWhereInput>
+  orderBy?: Maybe<Array<CuratorGroupOrderByInput>>
+}
+
+export type QueryDistributionBucketFamilyGeographicAreasArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<DistributionBucketFamilyGeographicAreaWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketFamilyGeographicAreaOrderByInput>>
+}
+
+export type QueryDistributionBucketFamilyGeographicAreaByUniqueInputArgs = {
+  where: DistributionBucketFamilyGeographicAreaWhereUniqueInput
+}
+
+export type QueryDistributionBucketFamilyGeographicAreasConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<DistributionBucketFamilyGeographicAreaWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketFamilyGeographicAreaOrderByInput>>
+}
+
+export type QueryDistributionBucketFamilyMetadataArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<DistributionBucketFamilyMetadataWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketFamilyMetadataOrderByInput>>
+}
+
+export type QueryDistributionBucketFamilyMetadataByUniqueInputArgs = {
+  where: DistributionBucketFamilyMetadataWhereUniqueInput
+}
+
+export type QueryDistributionBucketFamilyMetadataConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<DistributionBucketFamilyMetadataWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketFamilyMetadataOrderByInput>>
+}
+
+export type QueryDistributionBucketFamiliesArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<DistributionBucketFamilyWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketFamilyOrderByInput>>
+}
+
+export type QueryDistributionBucketFamilyByUniqueInputArgs = {
+  where: DistributionBucketFamilyWhereUniqueInput
+}
+
+export type QueryDistributionBucketFamiliesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<DistributionBucketFamilyWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketFamilyOrderByInput>>
+}
+
+export type QueryDistributionBucketOperatorMetadataArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<DistributionBucketOperatorMetadataWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketOperatorMetadataOrderByInput>>
+}
+
+export type QueryDistributionBucketOperatorMetadataByUniqueInputArgs = {
+  where: DistributionBucketOperatorMetadataWhereUniqueInput
+}
+
+export type QueryDistributionBucketOperatorMetadataConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<DistributionBucketOperatorMetadataWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketOperatorMetadataOrderByInput>>
+}
+
+export type QueryDistributionBucketOperatorsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<DistributionBucketOperatorWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketOperatorOrderByInput>>
+}
+
+export type QueryDistributionBucketOperatorByUniqueInputArgs = {
+  where: DistributionBucketOperatorWhereUniqueInput
+}
+
+export type QueryDistributionBucketOperatorsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<DistributionBucketOperatorWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketOperatorOrderByInput>>
+}
+
+export type QueryDistributionBucketsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<DistributionBucketWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketOrderByInput>>
+}
+
+export type QueryDistributionBucketByUniqueInputArgs = {
+  where: DistributionBucketWhereUniqueInput
+}
+
+export type QueryDistributionBucketsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<DistributionBucketWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketOrderByInput>>
+}
+
+export type QueryGeoCoordinatesArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<GeoCoordinatesWhereInput>
+  orderBy?: Maybe<Array<GeoCoordinatesOrderByInput>>
+}
+
+export type QueryGeoCoordinatesByUniqueInputArgs = {
+  where: GeoCoordinatesWhereUniqueInput
+}
+
+export type QueryGeoCoordinatesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<GeoCoordinatesWhereInput>
+  orderBy?: Maybe<Array<GeoCoordinatesOrderByInput>>
+}
+
+export type QueryLanguagesArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<LanguageWhereInput>
+  orderBy?: Maybe<Array<LanguageOrderByInput>>
+}
+
+export type QueryLanguageByUniqueInputArgs = {
+  where: LanguageWhereUniqueInput
+}
+
+export type QueryLanguagesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<LanguageWhereInput>
+  orderBy?: Maybe<Array<LanguageOrderByInput>>
+}
+
+export type QueryLicensesArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<LicenseWhereInput>
+  orderBy?: Maybe<Array<LicenseOrderByInput>>
+}
+
+export type QueryLicenseByUniqueInputArgs = {
+  where: LicenseWhereUniqueInput
+}
+
+export type QueryLicensesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<LicenseWhereInput>
+  orderBy?: Maybe<Array<LicenseOrderByInput>>
+}
+
+export type QueryMembershipsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<MembershipWhereInput>
+  orderBy?: Maybe<Array<MembershipOrderByInput>>
+}
+
+export type QueryMembershipByUniqueInputArgs = {
+  where: MembershipWhereUniqueInput
+}
+
+export type QueryMembershipsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<MembershipWhereInput>
+  orderBy?: Maybe<Array<MembershipOrderByInput>>
+}
+
+export type QueryNodeLocationMetadataArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<NodeLocationMetadataWhereInput>
+  orderBy?: Maybe<Array<NodeLocationMetadataOrderByInput>>
+}
+
+export type QueryNodeLocationMetadataByUniqueInputArgs = {
+  where: NodeLocationMetadataWhereUniqueInput
+}
+
+export type QueryNodeLocationMetadataConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<NodeLocationMetadataWhereInput>
+  orderBy?: Maybe<Array<NodeLocationMetadataOrderByInput>>
+}
+
+export type QueryChannelCategoriesByNameArgs = {
+  whereChannelCategory?: Maybe<ChannelCategoryWhereInput>
+  skip?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  text: Scalars['String']
+}
+
+export type QueryMembersByHandleArgs = {
+  whereMembership?: Maybe<MembershipWhereInput>
+  skip?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  text: Scalars['String']
+}
+
+export type QuerySearchArgs = {
+  whereVideo?: Maybe<VideoWhereInput>
+  whereChannel?: Maybe<ChannelWhereInput>
+  skip?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  text: Scalars['String']
+}
+
+export type QueryVideoCategoriesByNameArgs = {
+  whereVideoCategory?: Maybe<VideoCategoryWhereInput>
+  skip?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  text: Scalars['String']
+}
+
+export type QueryStorageBagDistributionAssignmentsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<StorageBagDistributionAssignmentWhereInput>
+  orderBy?: Maybe<Array<StorageBagDistributionAssignmentOrderByInput>>
+}
+
+export type QueryStorageBagDistributionAssignmentByUniqueInputArgs = {
+  where: StorageBagDistributionAssignmentWhereUniqueInput
+}
+
+export type QueryStorageBagDistributionAssignmentsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<StorageBagDistributionAssignmentWhereInput>
+  orderBy?: Maybe<Array<StorageBagDistributionAssignmentOrderByInput>>
+}
+
+export type QueryStorageBagStorageAssignmentsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<StorageBagStorageAssignmentWhereInput>
+  orderBy?: Maybe<Array<StorageBagStorageAssignmentOrderByInput>>
+}
+
+export type QueryStorageBagStorageAssignmentByUniqueInputArgs = {
+  where: StorageBagStorageAssignmentWhereUniqueInput
+}
+
+export type QueryStorageBagStorageAssignmentsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<StorageBagStorageAssignmentWhereInput>
+  orderBy?: Maybe<Array<StorageBagStorageAssignmentOrderByInput>>
+}
+
+export type QueryStorageBagsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<StorageBagWhereInput>
+  orderBy?: Maybe<Array<StorageBagOrderByInput>>
+}
+
+export type QueryStorageBagByUniqueInputArgs = {
+  where: StorageBagWhereUniqueInput
+}
+
+export type QueryStorageBagsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<StorageBagWhereInput>
+  orderBy?: Maybe<Array<StorageBagOrderByInput>>
+}
+
+export type QueryStorageBucketOperatorMetadataArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<StorageBucketOperatorMetadataWhereInput>
+  orderBy?: Maybe<Array<StorageBucketOperatorMetadataOrderByInput>>
+}
+
+export type QueryStorageBucketOperatorMetadataByUniqueInputArgs = {
+  where: StorageBucketOperatorMetadataWhereUniqueInput
+}
+
+export type QueryStorageBucketOperatorMetadataConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<StorageBucketOperatorMetadataWhereInput>
+  orderBy?: Maybe<Array<StorageBucketOperatorMetadataOrderByInput>>
+}
+
+export type QueryStorageBucketsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<StorageBucketWhereInput>
+  orderBy?: Maybe<Array<StorageBucketOrderByInput>>
+}
+
+export type QueryStorageBucketByUniqueInputArgs = {
+  where: StorageBucketWhereUniqueInput
+}
+
+export type QueryStorageBucketsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<StorageBucketWhereInput>
+  orderBy?: Maybe<Array<StorageBucketOrderByInput>>
+}
+
+export type QueryStorageDataObjectsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<StorageDataObjectWhereInput>
+  orderBy?: Maybe<Array<StorageDataObjectOrderByInput>>
+}
+
+export type QueryStorageDataObjectByUniqueInputArgs = {
+  where: StorageDataObjectWhereUniqueInput
+}
+
+export type QueryStorageDataObjectsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<StorageDataObjectWhereInput>
+  orderBy?: Maybe<Array<StorageDataObjectOrderByInput>>
+}
+
+export type QueryStorageSystemParametersArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<StorageSystemParametersWhereInput>
+  orderBy?: Maybe<Array<StorageSystemParametersOrderByInput>>
+}
+
+export type QueryStorageSystemParametersByUniqueInputArgs = {
+  where: StorageSystemParametersWhereUniqueInput
+}
+
+export type QueryStorageSystemParametersConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<StorageSystemParametersWhereInput>
+  orderBy?: Maybe<Array<StorageSystemParametersOrderByInput>>
+}
+
+export type QueryVideoCategoriesArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<VideoCategoryWhereInput>
+  orderBy?: Maybe<Array<VideoCategoryOrderByInput>>
+}
+
+export type QueryVideoCategoryByUniqueInputArgs = {
+  where: VideoCategoryWhereUniqueInput
+}
+
+export type QueryVideoCategoriesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<VideoCategoryWhereInput>
+  orderBy?: Maybe<Array<VideoCategoryOrderByInput>>
+}
+
+export type QueryVideoMediaEncodingsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<VideoMediaEncodingWhereInput>
+  orderBy?: Maybe<Array<VideoMediaEncodingOrderByInput>>
+}
+
+export type QueryVideoMediaEncodingByUniqueInputArgs = {
+  where: VideoMediaEncodingWhereUniqueInput
+}
+
+export type QueryVideoMediaEncodingsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<VideoMediaEncodingWhereInput>
+  orderBy?: Maybe<Array<VideoMediaEncodingOrderByInput>>
+}
+
+export type QueryVideoMediaMetadataArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<VideoMediaMetadataWhereInput>
+  orderBy?: Maybe<Array<VideoMediaMetadataOrderByInput>>
+}
+
+export type QueryVideoMediaMetadataByUniqueInputArgs = {
+  where: VideoMediaMetadataWhereUniqueInput
+}
+
+export type QueryVideoMediaMetadataConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<VideoMediaMetadataWhereInput>
+  orderBy?: Maybe<Array<VideoMediaMetadataOrderByInput>>
+}
+
+export type QueryVideosArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<VideoWhereInput>
+  orderBy?: Maybe<Array<VideoOrderByInput>>
+}
+
+export type QueryVideoByUniqueInputArgs = {
+  where: VideoWhereUniqueInput
+}
+
+export type QueryVideosConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<VideoWhereInput>
+  orderBy?: Maybe<Array<VideoOrderByInput>>
+}
+
+export type QueryWorkersArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<WorkerWhereInput>
+  orderBy?: Maybe<Array<WorkerOrderByInput>>
+}
+
+export type QueryWorkerByUniqueInputArgs = {
+  where: WorkerWhereUniqueInput
+}
+
+export type QueryWorkersConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<WorkerWhereInput>
+  orderBy?: Maybe<Array<WorkerOrderByInput>>
+}
+
+export type SearchFtsOutput = {
+  item: SearchSearchResult
+  rank: Scalars['Float']
+  isTypeOf: Scalars['String']
+  highlight: Scalars['String']
+}
+
+export type SearchSearchResult = Channel | Video
+
+export type StandardDeleteResponse = {
+  id: Scalars['ID']
+}
+
+export type StorageBag = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  objects: Array<StorageDataObject>
+  storageAssignments: Array<StorageBagStorageAssignment>
+  distirbutionAssignments: Array<StorageBagDistributionAssignment>
+  /** Owner of the storage bag */
+  owner: StorageBagOwner
+}
+
+export type StorageBagConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<StorageBagEdge>
+  pageInfo: PageInfo
+}
+
+export type StorageBagCreateInput = {
+  owner: Scalars['JSONObject']
+}
+
+export type StorageBagDistributionAssignment = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  storageBag: StorageBag
+  storageBagId?: Maybe<Scalars['String']>
+  distributionBucket: DistributionBucket
+  distributionBucketId?: Maybe<Scalars['String']>
+}
+
+export type StorageBagDistributionAssignmentConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<StorageBagDistributionAssignmentEdge>
+  pageInfo: PageInfo
+}
+
+export type StorageBagDistributionAssignmentCreateInput = {
+  storageBag: Scalars['ID']
+  distributionBucket: Scalars['ID']
+  storageBagId?: Maybe<Scalars['String']>
+  distributionBucketId?: Maybe<Scalars['String']>
+}
+
+export type StorageBagDistributionAssignmentEdge = {
+  node: StorageBagDistributionAssignment
+  cursor: Scalars['String']
+}
+
+export enum StorageBagDistributionAssignmentOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  StorageBagAsc = 'storageBag_ASC',
+  StorageBagDesc = 'storageBag_DESC',
+  DistributionBucketAsc = 'distributionBucket_ASC',
+  DistributionBucketDesc = 'distributionBucket_DESC',
+  StorageBagIdAsc = 'storageBagId_ASC',
+  StorageBagIdDesc = 'storageBagId_DESC',
+  DistributionBucketIdAsc = 'distributionBucketId_ASC',
+  DistributionBucketIdDesc = 'distributionBucketId_DESC',
+}
+
+export type StorageBagDistributionAssignmentUpdateInput = {
+  storageBag?: Maybe<Scalars['ID']>
+  distributionBucket?: Maybe<Scalars['ID']>
+  storageBagId?: Maybe<Scalars['String']>
+  distributionBucketId?: Maybe<Scalars['String']>
+}
+
+export type StorageBagDistributionAssignmentWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  storageBag_eq?: Maybe<Scalars['ID']>
+  storageBag_in?: Maybe<Array<Scalars['ID']>>
+  distributionBucket_eq?: Maybe<Scalars['ID']>
+  distributionBucket_in?: Maybe<Array<Scalars['ID']>>
+  storageBagId_eq?: Maybe<Scalars['String']>
+  storageBagId_contains?: Maybe<Scalars['String']>
+  storageBagId_startsWith?: Maybe<Scalars['String']>
+  storageBagId_endsWith?: Maybe<Scalars['String']>
+  storageBagId_in?: Maybe<Array<Scalars['String']>>
+  distributionBucketId_eq?: Maybe<Scalars['String']>
+  distributionBucketId_contains?: Maybe<Scalars['String']>
+  distributionBucketId_startsWith?: Maybe<Scalars['String']>
+  distributionBucketId_endsWith?: Maybe<Scalars['String']>
+  distributionBucketId_in?: Maybe<Array<Scalars['String']>>
+  storageBag?: Maybe<StorageBagWhereInput>
+  distributionBucket?: Maybe<DistributionBucketWhereInput>
+  AND?: Maybe<Array<StorageBagDistributionAssignmentWhereInput>>
+  OR?: Maybe<Array<StorageBagDistributionAssignmentWhereInput>>
+}
+
+export type StorageBagDistributionAssignmentWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageBagEdge = {
+  node: StorageBag
+  cursor: Scalars['String']
+}
+
+export enum StorageBagOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+}
+
+export type StorageBagOwner =
+  | StorageBagOwnerCouncil
+  | StorageBagOwnerWorkingGroup
+  | StorageBagOwnerMember
+  | StorageBagOwnerChannel
+  | StorageBagOwnerDao
+
+export type StorageBagOwnerChannel = {
+  channelId?: Maybe<Scalars['Int']>
+}
+
+export type StorageBagOwnerChannelCreateInput = {
+  channelId?: Maybe<Scalars['Float']>
+}
+
+export type StorageBagOwnerChannelUpdateInput = {
+  channelId?: Maybe<Scalars['Float']>
+}
+
+export type StorageBagOwnerChannelWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  channelId_eq?: Maybe<Scalars['Int']>
+  channelId_gt?: Maybe<Scalars['Int']>
+  channelId_gte?: Maybe<Scalars['Int']>
+  channelId_lt?: Maybe<Scalars['Int']>
+  channelId_lte?: Maybe<Scalars['Int']>
+  channelId_in?: Maybe<Array<Scalars['Int']>>
+  AND?: Maybe<Array<StorageBagOwnerChannelWhereInput>>
+  OR?: Maybe<Array<StorageBagOwnerChannelWhereInput>>
+}
+
+export type StorageBagOwnerChannelWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageBagOwnerCouncil = {
+  phantom?: Maybe<Scalars['Int']>
+}
+
+export type StorageBagOwnerCouncilCreateInput = {
+  phantom?: Maybe<Scalars['Float']>
+}
+
+export type StorageBagOwnerCouncilUpdateInput = {
+  phantom?: Maybe<Scalars['Float']>
+}
+
+export type StorageBagOwnerCouncilWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  phantom_eq?: Maybe<Scalars['Int']>
+  phantom_gt?: Maybe<Scalars['Int']>
+  phantom_gte?: Maybe<Scalars['Int']>
+  phantom_lt?: Maybe<Scalars['Int']>
+  phantom_lte?: Maybe<Scalars['Int']>
+  phantom_in?: Maybe<Array<Scalars['Int']>>
+  AND?: Maybe<Array<StorageBagOwnerCouncilWhereInput>>
+  OR?: Maybe<Array<StorageBagOwnerCouncilWhereInput>>
+}
+
+export type StorageBagOwnerCouncilWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageBagOwnerDao = {
+  daoId?: Maybe<Scalars['Int']>
+}
+
+export type StorageBagOwnerDaoCreateInput = {
+  daoId?: Maybe<Scalars['Float']>
+}
+
+export type StorageBagOwnerDaoUpdateInput = {
+  daoId?: Maybe<Scalars['Float']>
+}
+
+export type StorageBagOwnerDaoWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  daoId_eq?: Maybe<Scalars['Int']>
+  daoId_gt?: Maybe<Scalars['Int']>
+  daoId_gte?: Maybe<Scalars['Int']>
+  daoId_lt?: Maybe<Scalars['Int']>
+  daoId_lte?: Maybe<Scalars['Int']>
+  daoId_in?: Maybe<Array<Scalars['Int']>>
+  AND?: Maybe<Array<StorageBagOwnerDaoWhereInput>>
+  OR?: Maybe<Array<StorageBagOwnerDaoWhereInput>>
+}
+
+export type StorageBagOwnerDaoWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageBagOwnerMember = {
+  memberId?: Maybe<Scalars['Int']>
+}
+
+export type StorageBagOwnerMemberCreateInput = {
+  memberId?: Maybe<Scalars['Float']>
+}
+
+export type StorageBagOwnerMemberUpdateInput = {
+  memberId?: Maybe<Scalars['Float']>
+}
+
+export type StorageBagOwnerMemberWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  memberId_eq?: Maybe<Scalars['Int']>
+  memberId_gt?: Maybe<Scalars['Int']>
+  memberId_gte?: Maybe<Scalars['Int']>
+  memberId_lt?: Maybe<Scalars['Int']>
+  memberId_lte?: Maybe<Scalars['Int']>
+  memberId_in?: Maybe<Array<Scalars['Int']>>
+  AND?: Maybe<Array<StorageBagOwnerMemberWhereInput>>
+  OR?: Maybe<Array<StorageBagOwnerMemberWhereInput>>
+}
+
+export type StorageBagOwnerMemberWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageBagOwnerWorkingGroup = {
+  workingGroupId?: Maybe<Scalars['String']>
+}
+
+export type StorageBagOwnerWorkingGroupCreateInput = {
+  workingGroupId?: Maybe<Scalars['String']>
+}
+
+export type StorageBagOwnerWorkingGroupUpdateInput = {
+  workingGroupId?: Maybe<Scalars['String']>
+}
+
+export type StorageBagOwnerWorkingGroupWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  workingGroupId_eq?: Maybe<Scalars['String']>
+  workingGroupId_contains?: Maybe<Scalars['String']>
+  workingGroupId_startsWith?: Maybe<Scalars['String']>
+  workingGroupId_endsWith?: Maybe<Scalars['String']>
+  workingGroupId_in?: Maybe<Array<Scalars['String']>>
+  AND?: Maybe<Array<StorageBagOwnerWorkingGroupWhereInput>>
+  OR?: Maybe<Array<StorageBagOwnerWorkingGroupWhereInput>>
+}
+
+export type StorageBagOwnerWorkingGroupWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageBagStorageAssignment = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  storageBag: StorageBag
+  storageBagId?: Maybe<Scalars['String']>
+  storageBucket: StorageBucket
+  storageBucketId?: Maybe<Scalars['String']>
+}
+
+export type StorageBagStorageAssignmentConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<StorageBagStorageAssignmentEdge>
+  pageInfo: PageInfo
+}
+
+export type StorageBagStorageAssignmentCreateInput = {
+  storageBag: Scalars['ID']
+  storageBucket: Scalars['ID']
+  storageBagId?: Maybe<Scalars['String']>
+  storageBucketId?: Maybe<Scalars['String']>
+}
+
+export type StorageBagStorageAssignmentEdge = {
+  node: StorageBagStorageAssignment
+  cursor: Scalars['String']
+}
+
+export enum StorageBagStorageAssignmentOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  StorageBagAsc = 'storageBag_ASC',
+  StorageBagDesc = 'storageBag_DESC',
+  StorageBucketAsc = 'storageBucket_ASC',
+  StorageBucketDesc = 'storageBucket_DESC',
+  StorageBagIdAsc = 'storageBagId_ASC',
+  StorageBagIdDesc = 'storageBagId_DESC',
+  StorageBucketIdAsc = 'storageBucketId_ASC',
+  StorageBucketIdDesc = 'storageBucketId_DESC',
+}
+
+export type StorageBagStorageAssignmentUpdateInput = {
+  storageBag?: Maybe<Scalars['ID']>
+  storageBucket?: Maybe<Scalars['ID']>
+  storageBagId?: Maybe<Scalars['String']>
+  storageBucketId?: Maybe<Scalars['String']>
+}
+
+export type StorageBagStorageAssignmentWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  storageBag_eq?: Maybe<Scalars['ID']>
+  storageBag_in?: Maybe<Array<Scalars['ID']>>
+  storageBucket_eq?: Maybe<Scalars['ID']>
+  storageBucket_in?: Maybe<Array<Scalars['ID']>>
+  storageBagId_eq?: Maybe<Scalars['String']>
+  storageBagId_contains?: Maybe<Scalars['String']>
+  storageBagId_startsWith?: Maybe<Scalars['String']>
+  storageBagId_endsWith?: Maybe<Scalars['String']>
+  storageBagId_in?: Maybe<Array<Scalars['String']>>
+  storageBucketId_eq?: Maybe<Scalars['String']>
+  storageBucketId_contains?: Maybe<Scalars['String']>
+  storageBucketId_startsWith?: Maybe<Scalars['String']>
+  storageBucketId_endsWith?: Maybe<Scalars['String']>
+  storageBucketId_in?: Maybe<Array<Scalars['String']>>
+  storageBag?: Maybe<StorageBagWhereInput>
+  storageBucket?: Maybe<StorageBucketWhereInput>
+  AND?: Maybe<Array<StorageBagStorageAssignmentWhereInput>>
+  OR?: Maybe<Array<StorageBagStorageAssignmentWhereInput>>
+}
+
+export type StorageBagStorageAssignmentWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageBagUpdateInput = {
+  owner?: Maybe<Scalars['JSONObject']>
+}
+
+export type StorageBagWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  owner_json?: Maybe<Scalars['JSONObject']>
+  objects_none?: Maybe<StorageDataObjectWhereInput>
+  objects_some?: Maybe<StorageDataObjectWhereInput>
+  objects_every?: Maybe<StorageDataObjectWhereInput>
+  storageAssignments_none?: Maybe<StorageBagStorageAssignmentWhereInput>
+  storageAssignments_some?: Maybe<StorageBagStorageAssignmentWhereInput>
+  storageAssignments_every?: Maybe<StorageBagStorageAssignmentWhereInput>
+  distirbutionAssignments_none?: Maybe<StorageBagDistributionAssignmentWhereInput>
+  distirbutionAssignments_some?: Maybe<StorageBagDistributionAssignmentWhereInput>
+  distirbutionAssignments_every?: Maybe<StorageBagDistributionAssignmentWhereInput>
+  AND?: Maybe<Array<StorageBagWhereInput>>
+  OR?: Maybe<Array<StorageBagWhereInput>>
+}
+
+export type StorageBagWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageBucket = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Current bucket operator status */
+  operatorStatus: StorageBucketOperatorStatus
+  operatorMetadata?: Maybe<StorageBucketOperatorMetadata>
+  operatorMetadataId?: Maybe<Scalars['String']>
+  /** Whether the bucket is accepting any new storage bags */
+  acceptingNewBags: Scalars['Boolean']
+  bagAssignments: Array<StorageBagStorageAssignment>
+  /** Bucket's data object size limit in bytes */
+  dataObjectsSizeLimit: Scalars['BigInt']
+  /** Bucket's data object count limit */
+  dataObjectCountLimit: Scalars['BigInt']
+  /** Number of assigned data objects */
+  dataObjectsCount: Scalars['BigInt']
+  /** Total size of assigned data objects */
+  dataObjectsSize: Scalars['BigInt']
+}
+
+export type StorageBucketConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<StorageBucketEdge>
+  pageInfo: PageInfo
+}
+
+export type StorageBucketCreateInput = {
+  operatorStatus: Scalars['JSONObject']
+  operatorMetadata?: Maybe<Scalars['ID']>
+  acceptingNewBags: Scalars['Boolean']
+  dataObjectsSizeLimit: Scalars['BigInt']
+  dataObjectCountLimit: Scalars['BigInt']
+  dataObjectsCount: Scalars['BigInt']
+  dataObjectsSize: Scalars['BigInt']
+}
+
+export type StorageBucketEdge = {
+  node: StorageBucket
+  cursor: Scalars['String']
+}
+
+export type StorageBucketOperatorMetadata = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Root node endpoint */
+  nodeEndpoint?: Maybe<Scalars['String']>
+  nodeLocation?: Maybe<NodeLocationMetadata>
+  nodeLocationId?: Maybe<Scalars['String']>
+  /** Additional information about the node/operator */
+  extra?: Maybe<Scalars['String']>
+  storagebucketoperatorMetadata?: Maybe<Array<StorageBucket>>
+}
+
+export type StorageBucketOperatorMetadataConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<StorageBucketOperatorMetadataEdge>
+  pageInfo: PageInfo
+}
+
+export type StorageBucketOperatorMetadataCreateInput = {
+  nodeEndpoint?: Maybe<Scalars['String']>
+  nodeLocation?: Maybe<Scalars['ID']>
+  extra?: Maybe<Scalars['String']>
+}
+
+export type StorageBucketOperatorMetadataEdge = {
+  node: StorageBucketOperatorMetadata
+  cursor: Scalars['String']
+}
+
+export enum StorageBucketOperatorMetadataOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  NodeEndpointAsc = 'nodeEndpoint_ASC',
+  NodeEndpointDesc = 'nodeEndpoint_DESC',
+  NodeLocationAsc = 'nodeLocation_ASC',
+  NodeLocationDesc = 'nodeLocation_DESC',
+  ExtraAsc = 'extra_ASC',
+  ExtraDesc = 'extra_DESC',
+}
+
+export type StorageBucketOperatorMetadataUpdateInput = {
+  nodeEndpoint?: Maybe<Scalars['String']>
+  nodeLocation?: Maybe<Scalars['ID']>
+  extra?: Maybe<Scalars['String']>
+}
+
+export type StorageBucketOperatorMetadataWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  nodeEndpoint_eq?: Maybe<Scalars['String']>
+  nodeEndpoint_contains?: Maybe<Scalars['String']>
+  nodeEndpoint_startsWith?: Maybe<Scalars['String']>
+  nodeEndpoint_endsWith?: Maybe<Scalars['String']>
+  nodeEndpoint_in?: Maybe<Array<Scalars['String']>>
+  nodeLocation_eq?: Maybe<Scalars['ID']>
+  nodeLocation_in?: Maybe<Array<Scalars['ID']>>
+  extra_eq?: Maybe<Scalars['String']>
+  extra_contains?: Maybe<Scalars['String']>
+  extra_startsWith?: Maybe<Scalars['String']>
+  extra_endsWith?: Maybe<Scalars['String']>
+  extra_in?: Maybe<Array<Scalars['String']>>
+  nodeLocation?: Maybe<NodeLocationMetadataWhereInput>
+  storagebucketoperatorMetadata_none?: Maybe<StorageBucketWhereInput>
+  storagebucketoperatorMetadata_some?: Maybe<StorageBucketWhereInput>
+  storagebucketoperatorMetadata_every?: Maybe<StorageBucketWhereInput>
+  AND?: Maybe<Array<StorageBucketOperatorMetadataWhereInput>>
+  OR?: Maybe<Array<StorageBucketOperatorMetadataWhereInput>>
+}
+
+export type StorageBucketOperatorMetadataWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageBucketOperatorStatus =
+  | StorageBucketOperatorStatusMissing
+  | StorageBucketOperatorStatusInvited
+  | StorageBucketOperatorStatusActive
+
+export type StorageBucketOperatorStatusActive = {
+  workerId: Scalars['Int']
+}
+
+export type StorageBucketOperatorStatusActiveCreateInput = {
+  workerId: Scalars['Float']
+}
+
+export type StorageBucketOperatorStatusActiveUpdateInput = {
+  workerId?: Maybe<Scalars['Float']>
+}
+
+export type StorageBucketOperatorStatusActiveWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  workerId_eq?: Maybe<Scalars['Int']>
+  workerId_gt?: Maybe<Scalars['Int']>
+  workerId_gte?: Maybe<Scalars['Int']>
+  workerId_lt?: Maybe<Scalars['Int']>
+  workerId_lte?: Maybe<Scalars['Int']>
+  workerId_in?: Maybe<Array<Scalars['Int']>>
+  AND?: Maybe<Array<StorageBucketOperatorStatusActiveWhereInput>>
+  OR?: Maybe<Array<StorageBucketOperatorStatusActiveWhereInput>>
+}
+
+export type StorageBucketOperatorStatusActiveWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageBucketOperatorStatusInvited = {
+  workerId: Scalars['Int']
+}
+
+export type StorageBucketOperatorStatusInvitedCreateInput = {
+  workerId: Scalars['Float']
+}
+
+export type StorageBucketOperatorStatusInvitedUpdateInput = {
+  workerId?: Maybe<Scalars['Float']>
+}
+
+export type StorageBucketOperatorStatusInvitedWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  workerId_eq?: Maybe<Scalars['Int']>
+  workerId_gt?: Maybe<Scalars['Int']>
+  workerId_gte?: Maybe<Scalars['Int']>
+  workerId_lt?: Maybe<Scalars['Int']>
+  workerId_lte?: Maybe<Scalars['Int']>
+  workerId_in?: Maybe<Array<Scalars['Int']>>
+  AND?: Maybe<Array<StorageBucketOperatorStatusInvitedWhereInput>>
+  OR?: Maybe<Array<StorageBucketOperatorStatusInvitedWhereInput>>
+}
+
+export type StorageBucketOperatorStatusInvitedWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageBucketOperatorStatusMissing = {
+  phantom?: Maybe<Scalars['Int']>
+}
+
+export type StorageBucketOperatorStatusMissingCreateInput = {
+  phantom?: Maybe<Scalars['Float']>
+}
+
+export type StorageBucketOperatorStatusMissingUpdateInput = {
+  phantom?: Maybe<Scalars['Float']>
+}
+
+export type StorageBucketOperatorStatusMissingWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  phantom_eq?: Maybe<Scalars['Int']>
+  phantom_gt?: Maybe<Scalars['Int']>
+  phantom_gte?: Maybe<Scalars['Int']>
+  phantom_lt?: Maybe<Scalars['Int']>
+  phantom_lte?: Maybe<Scalars['Int']>
+  phantom_in?: Maybe<Array<Scalars['Int']>>
+  AND?: Maybe<Array<StorageBucketOperatorStatusMissingWhereInput>>
+  OR?: Maybe<Array<StorageBucketOperatorStatusMissingWhereInput>>
+}
+
+export type StorageBucketOperatorStatusMissingWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export enum StorageBucketOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  OperatorMetadataAsc = 'operatorMetadata_ASC',
+  OperatorMetadataDesc = 'operatorMetadata_DESC',
+  AcceptingNewBagsAsc = 'acceptingNewBags_ASC',
+  AcceptingNewBagsDesc = 'acceptingNewBags_DESC',
+  DataObjectsSizeLimitAsc = 'dataObjectsSizeLimit_ASC',
+  DataObjectsSizeLimitDesc = 'dataObjectsSizeLimit_DESC',
+  DataObjectCountLimitAsc = 'dataObjectCountLimit_ASC',
+  DataObjectCountLimitDesc = 'dataObjectCountLimit_DESC',
+  DataObjectsCountAsc = 'dataObjectsCount_ASC',
+  DataObjectsCountDesc = 'dataObjectsCount_DESC',
+  DataObjectsSizeAsc = 'dataObjectsSize_ASC',
+  DataObjectsSizeDesc = 'dataObjectsSize_DESC',
+}
+
+export type StorageBucketUpdateInput = {
+  operatorStatus?: Maybe<Scalars['JSONObject']>
+  operatorMetadata?: Maybe<Scalars['ID']>
+  acceptingNewBags?: Maybe<Scalars['Boolean']>
+  dataObjectsSizeLimit?: Maybe<Scalars['BigInt']>
+  dataObjectCountLimit?: Maybe<Scalars['BigInt']>
+  dataObjectsCount?: Maybe<Scalars['BigInt']>
+  dataObjectsSize?: Maybe<Scalars['BigInt']>
+}
+
+export type StorageBucketWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  operatorStatus_json?: Maybe<Scalars['JSONObject']>
+  operatorMetadata_eq?: Maybe<Scalars['ID']>
+  operatorMetadata_in?: Maybe<Array<Scalars['ID']>>
+  acceptingNewBags_eq?: Maybe<Scalars['Boolean']>
+  acceptingNewBags_in?: Maybe<Array<Scalars['Boolean']>>
+  dataObjectsSizeLimit_eq?: Maybe<Scalars['BigInt']>
+  dataObjectsSizeLimit_gt?: Maybe<Scalars['BigInt']>
+  dataObjectsSizeLimit_gte?: Maybe<Scalars['BigInt']>
+  dataObjectsSizeLimit_lt?: Maybe<Scalars['BigInt']>
+  dataObjectsSizeLimit_lte?: Maybe<Scalars['BigInt']>
+  dataObjectsSizeLimit_in?: Maybe<Array<Scalars['BigInt']>>
+  dataObjectCountLimit_eq?: Maybe<Scalars['BigInt']>
+  dataObjectCountLimit_gt?: Maybe<Scalars['BigInt']>
+  dataObjectCountLimit_gte?: Maybe<Scalars['BigInt']>
+  dataObjectCountLimit_lt?: Maybe<Scalars['BigInt']>
+  dataObjectCountLimit_lte?: Maybe<Scalars['BigInt']>
+  dataObjectCountLimit_in?: Maybe<Array<Scalars['BigInt']>>
+  dataObjectsCount_eq?: Maybe<Scalars['BigInt']>
+  dataObjectsCount_gt?: Maybe<Scalars['BigInt']>
+  dataObjectsCount_gte?: Maybe<Scalars['BigInt']>
+  dataObjectsCount_lt?: Maybe<Scalars['BigInt']>
+  dataObjectsCount_lte?: Maybe<Scalars['BigInt']>
+  dataObjectsCount_in?: Maybe<Array<Scalars['BigInt']>>
+  dataObjectsSize_eq?: Maybe<Scalars['BigInt']>
+  dataObjectsSize_gt?: Maybe<Scalars['BigInt']>
+  dataObjectsSize_gte?: Maybe<Scalars['BigInt']>
+  dataObjectsSize_lt?: Maybe<Scalars['BigInt']>
+  dataObjectsSize_lte?: Maybe<Scalars['BigInt']>
+  dataObjectsSize_in?: Maybe<Array<Scalars['BigInt']>>
+  operatorMetadata?: Maybe<StorageBucketOperatorMetadataWhereInput>
+  bagAssignments_none?: Maybe<StorageBagStorageAssignmentWhereInput>
+  bagAssignments_some?: Maybe<StorageBagStorageAssignmentWhereInput>
+  bagAssignments_every?: Maybe<StorageBagStorageAssignmentWhereInput>
+  AND?: Maybe<Array<StorageBucketWhereInput>>
+  OR?: Maybe<Array<StorageBucketWhereInput>>
+}
+
+export type StorageBucketWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageDataObject = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Whether the data object was uploaded and accepted by the storage provider */
+  isAccepted: Scalars['Boolean']
+  /** Data object size in bytes */
+  size: Scalars['BigInt']
+  storageBag: StorageBag
+  storageBagId: Scalars['String']
+  /** IPFS content hash */
+  ipfsHash: Scalars['String']
+  /** The type of the asset that the data object represents (if known) */
+  type: DataObjectType
+  /** Prize for removing the data object */
+  deletionPrize: Scalars['BigInt']
+  /** If the object is no longer used as an asset - the time at which it was unset (if known) */
+  unsetAt?: Maybe<Scalars['DateTime']>
+  channelcoverPhoto?: Maybe<Array<Channel>>
+  channelavatarPhoto?: Maybe<Array<Channel>>
+  videothumbnailPhoto?: Maybe<Array<Video>>
+  videomedia?: Maybe<Array<Video>>
+}
+
+export type StorageDataObjectConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<StorageDataObjectEdge>
+  pageInfo: PageInfo
+}
+
+export type StorageDataObjectCreateInput = {
+  isAccepted: Scalars['Boolean']
+  size: Scalars['BigInt']
+  storageBag: Scalars['ID']
+  ipfsHash: Scalars['String']
+  type: Scalars['JSONObject']
+  deletionPrize: Scalars['BigInt']
+  unsetAt?: Maybe<Scalars['DateTime']>
+}
+
+export type StorageDataObjectEdge = {
+  node: StorageDataObject
+  cursor: Scalars['String']
+}
+
+export enum StorageDataObjectOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  IsAcceptedAsc = 'isAccepted_ASC',
+  IsAcceptedDesc = 'isAccepted_DESC',
+  SizeAsc = 'size_ASC',
+  SizeDesc = 'size_DESC',
+  StorageBagAsc = 'storageBag_ASC',
+  StorageBagDesc = 'storageBag_DESC',
+  IpfsHashAsc = 'ipfsHash_ASC',
+  IpfsHashDesc = 'ipfsHash_DESC',
+  DeletionPrizeAsc = 'deletionPrize_ASC',
+  DeletionPrizeDesc = 'deletionPrize_DESC',
+  UnsetAtAsc = 'unsetAt_ASC',
+  UnsetAtDesc = 'unsetAt_DESC',
+}
+
+export type StorageDataObjectUpdateInput = {
+  isAccepted?: Maybe<Scalars['Boolean']>
+  size?: Maybe<Scalars['BigInt']>
+  storageBag?: Maybe<Scalars['ID']>
+  ipfsHash?: Maybe<Scalars['String']>
+  type?: Maybe<Scalars['JSONObject']>
+  deletionPrize?: Maybe<Scalars['BigInt']>
+  unsetAt?: Maybe<Scalars['DateTime']>
+}
+
+export type StorageDataObjectWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  isAccepted_eq?: Maybe<Scalars['Boolean']>
+  isAccepted_in?: Maybe<Array<Scalars['Boolean']>>
+  size_eq?: Maybe<Scalars['BigInt']>
+  size_gt?: Maybe<Scalars['BigInt']>
+  size_gte?: Maybe<Scalars['BigInt']>
+  size_lt?: Maybe<Scalars['BigInt']>
+  size_lte?: Maybe<Scalars['BigInt']>
+  size_in?: Maybe<Array<Scalars['BigInt']>>
+  storageBag_eq?: Maybe<Scalars['ID']>
+  storageBag_in?: Maybe<Array<Scalars['ID']>>
+  ipfsHash_eq?: Maybe<Scalars['String']>
+  ipfsHash_contains?: Maybe<Scalars['String']>
+  ipfsHash_startsWith?: Maybe<Scalars['String']>
+  ipfsHash_endsWith?: Maybe<Scalars['String']>
+  ipfsHash_in?: Maybe<Array<Scalars['String']>>
+  type_json?: Maybe<Scalars['JSONObject']>
+  deletionPrize_eq?: Maybe<Scalars['BigInt']>
+  deletionPrize_gt?: Maybe<Scalars['BigInt']>
+  deletionPrize_gte?: Maybe<Scalars['BigInt']>
+  deletionPrize_lt?: Maybe<Scalars['BigInt']>
+  deletionPrize_lte?: Maybe<Scalars['BigInt']>
+  deletionPrize_in?: Maybe<Array<Scalars['BigInt']>>
+  unsetAt_eq?: Maybe<Scalars['DateTime']>
+  unsetAt_lt?: Maybe<Scalars['DateTime']>
+  unsetAt_lte?: Maybe<Scalars['DateTime']>
+  unsetAt_gt?: Maybe<Scalars['DateTime']>
+  unsetAt_gte?: Maybe<Scalars['DateTime']>
+  storageBag?: Maybe<StorageBagWhereInput>
+  channelcoverPhoto_none?: Maybe<ChannelWhereInput>
+  channelcoverPhoto_some?: Maybe<ChannelWhereInput>
+  channelcoverPhoto_every?: Maybe<ChannelWhereInput>
+  channelavatarPhoto_none?: Maybe<ChannelWhereInput>
+  channelavatarPhoto_some?: Maybe<ChannelWhereInput>
+  channelavatarPhoto_every?: Maybe<ChannelWhereInput>
+  videothumbnailPhoto_none?: Maybe<VideoWhereInput>
+  videothumbnailPhoto_some?: Maybe<VideoWhereInput>
+  videothumbnailPhoto_every?: Maybe<VideoWhereInput>
+  videomedia_none?: Maybe<VideoWhereInput>
+  videomedia_some?: Maybe<VideoWhereInput>
+  videomedia_every?: Maybe<VideoWhereInput>
+  AND?: Maybe<Array<StorageDataObjectWhereInput>>
+  OR?: Maybe<Array<StorageDataObjectWhereInput>>
+}
+
+export type StorageDataObjectWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+/** Global storage system parameters */
+export type StorageSystemParameters = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Blacklisted content hashes */
+  blacklist: Array<Scalars['String']>
+  /** How many buckets can be assigned to store a bag */
+  storageBucketsPerBagLimit: Scalars['Int']
+  /** How many buckets can be assigned to distribute a bag */
+  distributionBucketsPerBagLimit: Scalars['Int']
+  /** Whether the uploading is globally blocked */
+  uploadingBlocked: Scalars['Boolean']
+  /** Additional fee for storing 1 MB of data */
+  dataObjectFeePerMb: Scalars['BigInt']
+  /** Global max. number of objects a storage bucket can store (can also be further limitted the provider) */
+  storageBucketMaxObjectsCountLimit: Scalars['BigInt']
+  /** Global max. size of objects a storage bucket can store (can also be further limitted the provider) */
+  storageBucketMaxObjectsSizeLimit: Scalars['BigInt']
+  /** ID of the next data object when created */
+  nextDataObjectId: Scalars['BigInt']
+}
+
+export type StorageSystemParametersConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<StorageSystemParametersEdge>
+  pageInfo: PageInfo
+}
+
+export type StorageSystemParametersCreateInput = {
+  blacklist: Array<Scalars['String']>
+  storageBucketsPerBagLimit: Scalars['Float']
+  distributionBucketsPerBagLimit: Scalars['Float']
+  uploadingBlocked: Scalars['Boolean']
+  dataObjectFeePerMb: Scalars['BigInt']
+  storageBucketMaxObjectsCountLimit: Scalars['BigInt']
+  storageBucketMaxObjectsSizeLimit: Scalars['BigInt']
+  nextDataObjectId: Scalars['BigInt']
+}
+
+export type StorageSystemParametersEdge = {
+  node: StorageSystemParameters
+  cursor: Scalars['String']
+}
+
+export enum StorageSystemParametersOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  StorageBucketsPerBagLimitAsc = 'storageBucketsPerBagLimit_ASC',
+  StorageBucketsPerBagLimitDesc = 'storageBucketsPerBagLimit_DESC',
+  DistributionBucketsPerBagLimitAsc = 'distributionBucketsPerBagLimit_ASC',
+  DistributionBucketsPerBagLimitDesc = 'distributionBucketsPerBagLimit_DESC',
+  UploadingBlockedAsc = 'uploadingBlocked_ASC',
+  UploadingBlockedDesc = 'uploadingBlocked_DESC',
+  DataObjectFeePerMbAsc = 'dataObjectFeePerMb_ASC',
+  DataObjectFeePerMbDesc = 'dataObjectFeePerMb_DESC',
+  StorageBucketMaxObjectsCountLimitAsc = 'storageBucketMaxObjectsCountLimit_ASC',
+  StorageBucketMaxObjectsCountLimitDesc = 'storageBucketMaxObjectsCountLimit_DESC',
+  StorageBucketMaxObjectsSizeLimitAsc = 'storageBucketMaxObjectsSizeLimit_ASC',
+  StorageBucketMaxObjectsSizeLimitDesc = 'storageBucketMaxObjectsSizeLimit_DESC',
+  NextDataObjectIdAsc = 'nextDataObjectId_ASC',
+  NextDataObjectIdDesc = 'nextDataObjectId_DESC',
+}
+
+export type StorageSystemParametersUpdateInput = {
+  blacklist?: Maybe<Array<Scalars['String']>>
+  storageBucketsPerBagLimit?: Maybe<Scalars['Float']>
+  distributionBucketsPerBagLimit?: Maybe<Scalars['Float']>
+  uploadingBlocked?: Maybe<Scalars['Boolean']>
+  dataObjectFeePerMb?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsCountLimit?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsSizeLimit?: Maybe<Scalars['BigInt']>
+  nextDataObjectId?: Maybe<Scalars['BigInt']>
+}
+
+export type StorageSystemParametersWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  storageBucketsPerBagLimit_eq?: Maybe<Scalars['Int']>
+  storageBucketsPerBagLimit_gt?: Maybe<Scalars['Int']>
+  storageBucketsPerBagLimit_gte?: Maybe<Scalars['Int']>
+  storageBucketsPerBagLimit_lt?: Maybe<Scalars['Int']>
+  storageBucketsPerBagLimit_lte?: Maybe<Scalars['Int']>
+  storageBucketsPerBagLimit_in?: Maybe<Array<Scalars['Int']>>
+  distributionBucketsPerBagLimit_eq?: Maybe<Scalars['Int']>
+  distributionBucketsPerBagLimit_gt?: Maybe<Scalars['Int']>
+  distributionBucketsPerBagLimit_gte?: Maybe<Scalars['Int']>
+  distributionBucketsPerBagLimit_lt?: Maybe<Scalars['Int']>
+  distributionBucketsPerBagLimit_lte?: Maybe<Scalars['Int']>
+  distributionBucketsPerBagLimit_in?: Maybe<Array<Scalars['Int']>>
+  uploadingBlocked_eq?: Maybe<Scalars['Boolean']>
+  uploadingBlocked_in?: Maybe<Array<Scalars['Boolean']>>
+  dataObjectFeePerMb_eq?: Maybe<Scalars['BigInt']>
+  dataObjectFeePerMb_gt?: Maybe<Scalars['BigInt']>
+  dataObjectFeePerMb_gte?: Maybe<Scalars['BigInt']>
+  dataObjectFeePerMb_lt?: Maybe<Scalars['BigInt']>
+  dataObjectFeePerMb_lte?: Maybe<Scalars['BigInt']>
+  dataObjectFeePerMb_in?: Maybe<Array<Scalars['BigInt']>>
+  storageBucketMaxObjectsCountLimit_eq?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsCountLimit_gt?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsCountLimit_gte?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsCountLimit_lt?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsCountLimit_lte?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsCountLimit_in?: Maybe<Array<Scalars['BigInt']>>
+  storageBucketMaxObjectsSizeLimit_eq?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsSizeLimit_gt?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsSizeLimit_gte?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsSizeLimit_lt?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsSizeLimit_lte?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsSizeLimit_in?: Maybe<Array<Scalars['BigInt']>>
+  nextDataObjectId_eq?: Maybe<Scalars['BigInt']>
+  nextDataObjectId_gt?: Maybe<Scalars['BigInt']>
+  nextDataObjectId_gte?: Maybe<Scalars['BigInt']>
+  nextDataObjectId_lt?: Maybe<Scalars['BigInt']>
+  nextDataObjectId_lte?: Maybe<Scalars['BigInt']>
+  nextDataObjectId_in?: Maybe<Array<Scalars['BigInt']>>
+  AND?: Maybe<Array<StorageSystemParametersWhereInput>>
+  OR?: Maybe<Array<StorageSystemParametersWhereInput>>
+}
+
+export type StorageSystemParametersWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type Subscription = {
+  stateSubscription: ProcessorState
+}
+
+export type Video = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  channel: Channel
+  channelId: Scalars['String']
+  category?: Maybe<VideoCategory>
+  categoryId?: Maybe<Scalars['String']>
+  /** The title of the video */
+  title?: Maybe<Scalars['String']>
+  /** The description of the Video */
+  description?: Maybe<Scalars['String']>
+  /** Video duration in seconds */
+  duration?: Maybe<Scalars['Int']>
+  thumbnailPhoto?: Maybe<StorageDataObject>
+  thumbnailPhotoId?: Maybe<Scalars['String']>
+  language?: Maybe<Language>
+  languageId?: Maybe<Scalars['String']>
+  /** Whether or not Video contains marketing */
+  hasMarketing?: Maybe<Scalars['Boolean']>
+  /** If the Video was published on other platform before beeing published on Joystream - the original publication date */
+  publishedBeforeJoystream?: Maybe<Scalars['DateTime']>
+  /** Whether the Video is supposed to be publically displayed */
+  isPublic?: Maybe<Scalars['Boolean']>
+  /** Flag signaling whether a video is censored. */
+  isCensored: Scalars['Boolean']
+  /** Whether the Video contains explicit material. */
+  isExplicit?: Maybe<Scalars['Boolean']>
+  license?: Maybe<License>
+  licenseId?: Maybe<Scalars['String']>
+  media?: Maybe<StorageDataObject>
+  mediaId?: Maybe<Scalars['String']>
+  mediaMetadata?: Maybe<VideoMediaMetadata>
+  mediaMetadataId?: Maybe<Scalars['String']>
+  createdInBlock: Scalars['Int']
+  /** Is video featured or not */
+  isFeatured: Scalars['Boolean']
+}
+
+export type VideoCategoriesByNameFtsOutput = {
+  item: VideoCategoriesByNameSearchResult
+  rank: Scalars['Float']
+  isTypeOf: Scalars['String']
+  highlight: Scalars['String']
+}
+
+export type VideoCategoriesByNameSearchResult = VideoCategory
+
+export type VideoCategory = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** The name of the category */
+  name?: Maybe<Scalars['String']>
+  videos: Array<Video>
+  createdInBlock: Scalars['Int']
+}
+
+export type VideoCategoryConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<VideoCategoryEdge>
+  pageInfo: PageInfo
+}
+
+export type VideoCategoryCreateInput = {
+  name?: Maybe<Scalars['String']>
+  createdInBlock: Scalars['Float']
+}
+
+export type VideoCategoryEdge = {
+  node: VideoCategory
+  cursor: Scalars['String']
+}
+
+export enum VideoCategoryOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  NameAsc = 'name_ASC',
+  NameDesc = 'name_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+}
+
+export type VideoCategoryUpdateInput = {
+  name?: Maybe<Scalars['String']>
+  createdInBlock?: Maybe<Scalars['Float']>
+}
+
+export type VideoCategoryWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  name_eq?: Maybe<Scalars['String']>
+  name_contains?: Maybe<Scalars['String']>
+  name_startsWith?: Maybe<Scalars['String']>
+  name_endsWith?: Maybe<Scalars['String']>
+  name_in?: Maybe<Array<Scalars['String']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  videos_none?: Maybe<VideoWhereInput>
+  videos_some?: Maybe<VideoWhereInput>
+  videos_every?: Maybe<VideoWhereInput>
+  AND?: Maybe<Array<VideoCategoryWhereInput>>
+  OR?: Maybe<Array<VideoCategoryWhereInput>>
+}
+
+export type VideoCategoryWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type VideoConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<VideoEdge>
+  pageInfo: PageInfo
+}
+
+export type VideoCreateInput = {
+  channel: Scalars['ID']
+  category?: Maybe<Scalars['ID']>
+  title?: Maybe<Scalars['String']>
+  description?: Maybe<Scalars['String']>
+  duration?: Maybe<Scalars['Float']>
+  thumbnailPhoto?: Maybe<Scalars['ID']>
+  language?: Maybe<Scalars['ID']>
+  hasMarketing?: Maybe<Scalars['Boolean']>
+  publishedBeforeJoystream?: Maybe<Scalars['DateTime']>
+  isPublic?: Maybe<Scalars['Boolean']>
+  isCensored: Scalars['Boolean']
+  isExplicit?: Maybe<Scalars['Boolean']>
+  license?: Maybe<Scalars['ID']>
+  media?: Maybe<Scalars['ID']>
+  mediaMetadata?: Maybe<Scalars['ID']>
+  createdInBlock: Scalars['Float']
+  isFeatured: Scalars['Boolean']
+}
+
+export type VideoEdge = {
+  node: Video
+  cursor: Scalars['String']
+}
+
+export type VideoMediaEncoding = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Encoding of the video media object */
+  codecName?: Maybe<Scalars['String']>
+  /** Media container format */
+  container?: Maybe<Scalars['String']>
+  /** Content MIME type */
+  mimeMediaType?: Maybe<Scalars['String']>
+  videomediametadataencoding?: Maybe<Array<VideoMediaMetadata>>
+}
+
+export type VideoMediaEncodingConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<VideoMediaEncodingEdge>
+  pageInfo: PageInfo
+}
+
+export type VideoMediaEncodingCreateInput = {
+  codecName?: Maybe<Scalars['String']>
+  container?: Maybe<Scalars['String']>
+  mimeMediaType?: Maybe<Scalars['String']>
+}
+
+export type VideoMediaEncodingEdge = {
+  node: VideoMediaEncoding
+  cursor: Scalars['String']
+}
+
+export enum VideoMediaEncodingOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  CodecNameAsc = 'codecName_ASC',
+  CodecNameDesc = 'codecName_DESC',
+  ContainerAsc = 'container_ASC',
+  ContainerDesc = 'container_DESC',
+  MimeMediaTypeAsc = 'mimeMediaType_ASC',
+  MimeMediaTypeDesc = 'mimeMediaType_DESC',
+}
+
+export type VideoMediaEncodingUpdateInput = {
+  codecName?: Maybe<Scalars['String']>
+  container?: Maybe<Scalars['String']>
+  mimeMediaType?: Maybe<Scalars['String']>
+}
+
+export type VideoMediaEncodingWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  codecName_eq?: Maybe<Scalars['String']>
+  codecName_contains?: Maybe<Scalars['String']>
+  codecName_startsWith?: Maybe<Scalars['String']>
+  codecName_endsWith?: Maybe<Scalars['String']>
+  codecName_in?: Maybe<Array<Scalars['String']>>
+  container_eq?: Maybe<Scalars['String']>
+  container_contains?: Maybe<Scalars['String']>
+  container_startsWith?: Maybe<Scalars['String']>
+  container_endsWith?: Maybe<Scalars['String']>
+  container_in?: Maybe<Array<Scalars['String']>>
+  mimeMediaType_eq?: Maybe<Scalars['String']>
+  mimeMediaType_contains?: Maybe<Scalars['String']>
+  mimeMediaType_startsWith?: Maybe<Scalars['String']>
+  mimeMediaType_endsWith?: Maybe<Scalars['String']>
+  mimeMediaType_in?: Maybe<Array<Scalars['String']>>
+  videomediametadataencoding_none?: Maybe<VideoMediaMetadataWhereInput>
+  videomediametadataencoding_some?: Maybe<VideoMediaMetadataWhereInput>
+  videomediametadataencoding_every?: Maybe<VideoMediaMetadataWhereInput>
+  AND?: Maybe<Array<VideoMediaEncodingWhereInput>>
+  OR?: Maybe<Array<VideoMediaEncodingWhereInput>>
+}
+
+export type VideoMediaEncodingWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type VideoMediaMetadata = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  encoding?: Maybe<VideoMediaEncoding>
+  encodingId?: Maybe<Scalars['String']>
+  /** Video media width in pixels */
+  pixelWidth?: Maybe<Scalars['Int']>
+  /** Video media height in pixels */
+  pixelHeight?: Maybe<Scalars['Int']>
+  /** Video media size in bytes */
+  size?: Maybe<Scalars['BigInt']>
+  video?: Maybe<Video>
+  createdInBlock: Scalars['Int']
+}
+
+export type VideoMediaMetadataConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<VideoMediaMetadataEdge>
+  pageInfo: PageInfo
+}
+
+export type VideoMediaMetadataCreateInput = {
+  encoding?: Maybe<Scalars['ID']>
+  pixelWidth?: Maybe<Scalars['Float']>
+  pixelHeight?: Maybe<Scalars['Float']>
+  size?: Maybe<Scalars['BigInt']>
+  createdInBlock: Scalars['Float']
+}
+
+export type VideoMediaMetadataEdge = {
+  node: VideoMediaMetadata
+  cursor: Scalars['String']
+}
+
+export enum VideoMediaMetadataOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  EncodingAsc = 'encoding_ASC',
+  EncodingDesc = 'encoding_DESC',
+  PixelWidthAsc = 'pixelWidth_ASC',
+  PixelWidthDesc = 'pixelWidth_DESC',
+  PixelHeightAsc = 'pixelHeight_ASC',
+  PixelHeightDesc = 'pixelHeight_DESC',
+  SizeAsc = 'size_ASC',
+  SizeDesc = 'size_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+}
+
+export type VideoMediaMetadataUpdateInput = {
+  encoding?: Maybe<Scalars['ID']>
+  pixelWidth?: Maybe<Scalars['Float']>
+  pixelHeight?: Maybe<Scalars['Float']>
+  size?: Maybe<Scalars['BigInt']>
+  createdInBlock?: Maybe<Scalars['Float']>
+}
+
+export type VideoMediaMetadataWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  encoding_eq?: Maybe<Scalars['ID']>
+  encoding_in?: Maybe<Array<Scalars['ID']>>
+  pixelWidth_eq?: Maybe<Scalars['Int']>
+  pixelWidth_gt?: Maybe<Scalars['Int']>
+  pixelWidth_gte?: Maybe<Scalars['Int']>
+  pixelWidth_lt?: Maybe<Scalars['Int']>
+  pixelWidth_lte?: Maybe<Scalars['Int']>
+  pixelWidth_in?: Maybe<Array<Scalars['Int']>>
+  pixelHeight_eq?: Maybe<Scalars['Int']>
+  pixelHeight_gt?: Maybe<Scalars['Int']>
+  pixelHeight_gte?: Maybe<Scalars['Int']>
+  pixelHeight_lt?: Maybe<Scalars['Int']>
+  pixelHeight_lte?: Maybe<Scalars['Int']>
+  pixelHeight_in?: Maybe<Array<Scalars['Int']>>
+  size_eq?: Maybe<Scalars['BigInt']>
+  size_gt?: Maybe<Scalars['BigInt']>
+  size_gte?: Maybe<Scalars['BigInt']>
+  size_lt?: Maybe<Scalars['BigInt']>
+  size_lte?: Maybe<Scalars['BigInt']>
+  size_in?: Maybe<Array<Scalars['BigInt']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  encoding?: Maybe<VideoMediaEncodingWhereInput>
+  video?: Maybe<VideoWhereInput>
+  AND?: Maybe<Array<VideoMediaMetadataWhereInput>>
+  OR?: Maybe<Array<VideoMediaMetadataWhereInput>>
+}
+
+export type VideoMediaMetadataWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export enum VideoOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  ChannelAsc = 'channel_ASC',
+  ChannelDesc = 'channel_DESC',
+  CategoryAsc = 'category_ASC',
+  CategoryDesc = 'category_DESC',
+  TitleAsc = 'title_ASC',
+  TitleDesc = 'title_DESC',
+  DescriptionAsc = 'description_ASC',
+  DescriptionDesc = 'description_DESC',
+  DurationAsc = 'duration_ASC',
+  DurationDesc = 'duration_DESC',
+  ThumbnailPhotoAsc = 'thumbnailPhoto_ASC',
+  ThumbnailPhotoDesc = 'thumbnailPhoto_DESC',
+  LanguageAsc = 'language_ASC',
+  LanguageDesc = 'language_DESC',
+  HasMarketingAsc = 'hasMarketing_ASC',
+  HasMarketingDesc = 'hasMarketing_DESC',
+  PublishedBeforeJoystreamAsc = 'publishedBeforeJoystream_ASC',
+  PublishedBeforeJoystreamDesc = 'publishedBeforeJoystream_DESC',
+  IsPublicAsc = 'isPublic_ASC',
+  IsPublicDesc = 'isPublic_DESC',
+  IsCensoredAsc = 'isCensored_ASC',
+  IsCensoredDesc = 'isCensored_DESC',
+  IsExplicitAsc = 'isExplicit_ASC',
+  IsExplicitDesc = 'isExplicit_DESC',
+  LicenseAsc = 'license_ASC',
+  LicenseDesc = 'license_DESC',
+  MediaAsc = 'media_ASC',
+  MediaDesc = 'media_DESC',
+  MediaMetadataAsc = 'mediaMetadata_ASC',
+  MediaMetadataDesc = 'mediaMetadata_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+  IsFeaturedAsc = 'isFeatured_ASC',
+  IsFeaturedDesc = 'isFeatured_DESC',
+}
+
+export type VideoUpdateInput = {
+  channel?: Maybe<Scalars['ID']>
+  category?: Maybe<Scalars['ID']>
+  title?: Maybe<Scalars['String']>
+  description?: Maybe<Scalars['String']>
+  duration?: Maybe<Scalars['Float']>
+  thumbnailPhoto?: Maybe<Scalars['ID']>
+  language?: Maybe<Scalars['ID']>
+  hasMarketing?: Maybe<Scalars['Boolean']>
+  publishedBeforeJoystream?: Maybe<Scalars['DateTime']>
+  isPublic?: Maybe<Scalars['Boolean']>
+  isCensored?: Maybe<Scalars['Boolean']>
+  isExplicit?: Maybe<Scalars['Boolean']>
+  license?: Maybe<Scalars['ID']>
+  media?: Maybe<Scalars['ID']>
+  mediaMetadata?: Maybe<Scalars['ID']>
+  createdInBlock?: Maybe<Scalars['Float']>
+  isFeatured?: Maybe<Scalars['Boolean']>
+}
+
+export type VideoWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  channel_eq?: Maybe<Scalars['ID']>
+  channel_in?: Maybe<Array<Scalars['ID']>>
+  category_eq?: Maybe<Scalars['ID']>
+  category_in?: Maybe<Array<Scalars['ID']>>
+  title_eq?: Maybe<Scalars['String']>
+  title_contains?: Maybe<Scalars['String']>
+  title_startsWith?: Maybe<Scalars['String']>
+  title_endsWith?: Maybe<Scalars['String']>
+  title_in?: Maybe<Array<Scalars['String']>>
+  description_eq?: Maybe<Scalars['String']>
+  description_contains?: Maybe<Scalars['String']>
+  description_startsWith?: Maybe<Scalars['String']>
+  description_endsWith?: Maybe<Scalars['String']>
+  description_in?: Maybe<Array<Scalars['String']>>
+  duration_eq?: Maybe<Scalars['Int']>
+  duration_gt?: Maybe<Scalars['Int']>
+  duration_gte?: Maybe<Scalars['Int']>
+  duration_lt?: Maybe<Scalars['Int']>
+  duration_lte?: Maybe<Scalars['Int']>
+  duration_in?: Maybe<Array<Scalars['Int']>>
+  thumbnailPhoto_eq?: Maybe<Scalars['ID']>
+  thumbnailPhoto_in?: Maybe<Array<Scalars['ID']>>
+  language_eq?: Maybe<Scalars['ID']>
+  language_in?: Maybe<Array<Scalars['ID']>>
+  hasMarketing_eq?: Maybe<Scalars['Boolean']>
+  hasMarketing_in?: Maybe<Array<Scalars['Boolean']>>
+  publishedBeforeJoystream_eq?: Maybe<Scalars['DateTime']>
+  publishedBeforeJoystream_lt?: Maybe<Scalars['DateTime']>
+  publishedBeforeJoystream_lte?: Maybe<Scalars['DateTime']>
+  publishedBeforeJoystream_gt?: Maybe<Scalars['DateTime']>
+  publishedBeforeJoystream_gte?: Maybe<Scalars['DateTime']>
+  isPublic_eq?: Maybe<Scalars['Boolean']>
+  isPublic_in?: Maybe<Array<Scalars['Boolean']>>
+  isCensored_eq?: Maybe<Scalars['Boolean']>
+  isCensored_in?: Maybe<Array<Scalars['Boolean']>>
+  isExplicit_eq?: Maybe<Scalars['Boolean']>
+  isExplicit_in?: Maybe<Array<Scalars['Boolean']>>
+  license_eq?: Maybe<Scalars['ID']>
+  license_in?: Maybe<Array<Scalars['ID']>>
+  media_eq?: Maybe<Scalars['ID']>
+  media_in?: Maybe<Array<Scalars['ID']>>
+  mediaMetadata_eq?: Maybe<Scalars['ID']>
+  mediaMetadata_in?: Maybe<Array<Scalars['ID']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  isFeatured_eq?: Maybe<Scalars['Boolean']>
+  isFeatured_in?: Maybe<Array<Scalars['Boolean']>>
+  channel?: Maybe<ChannelWhereInput>
+  category?: Maybe<VideoCategoryWhereInput>
+  thumbnailPhoto?: Maybe<StorageDataObjectWhereInput>
+  language?: Maybe<LanguageWhereInput>
+  license?: Maybe<LicenseWhereInput>
+  media?: Maybe<StorageDataObjectWhereInput>
+  mediaMetadata?: Maybe<VideoMediaMetadataWhereInput>
+  AND?: Maybe<Array<VideoWhereInput>>
+  OR?: Maybe<Array<VideoWhereInput>>
+}
+
+export type VideoWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type Worker = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Sign of worker still being active */
+  isActive: Scalars['Boolean']
+  /** Runtime identifier */
+  workerId: Scalars['String']
+  /** Associated working group */
+  type: WorkerType
+  /** Custom metadata set by provider */
+  metadata?: Maybe<Scalars['String']>
+}
+
+export type WorkerConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<WorkerEdge>
+  pageInfo: PageInfo
+}
+
+export type WorkerCreateInput = {
+  isActive: Scalars['Boolean']
+  workerId: Scalars['String']
+  type: WorkerType
+  metadata?: Maybe<Scalars['String']>
+}
+
+export type WorkerEdge = {
+  node: Worker
+  cursor: Scalars['String']
+}
+
+export enum WorkerOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  IsActiveAsc = 'isActive_ASC',
+  IsActiveDesc = 'isActive_DESC',
+  WorkerIdAsc = 'workerId_ASC',
+  WorkerIdDesc = 'workerId_DESC',
+  TypeAsc = 'type_ASC',
+  TypeDesc = 'type_DESC',
+  MetadataAsc = 'metadata_ASC',
+  MetadataDesc = 'metadata_DESC',
+}
+
+export enum WorkerType {
+  Gateway = 'GATEWAY',
+  Storage = 'STORAGE',
+}
+
+export type WorkerUpdateInput = {
+  isActive?: Maybe<Scalars['Boolean']>
+  workerId?: Maybe<Scalars['String']>
+  type?: Maybe<WorkerType>
+  metadata?: Maybe<Scalars['String']>
+}
+
+export type WorkerWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  isActive_eq?: Maybe<Scalars['Boolean']>
+  isActive_in?: Maybe<Array<Scalars['Boolean']>>
+  workerId_eq?: Maybe<Scalars['String']>
+  workerId_contains?: Maybe<Scalars['String']>
+  workerId_startsWith?: Maybe<Scalars['String']>
+  workerId_endsWith?: Maybe<Scalars['String']>
+  workerId_in?: Maybe<Array<Scalars['String']>>
+  type_eq?: Maybe<WorkerType>
+  type_in?: Maybe<Array<WorkerType>>
+  metadata_eq?: Maybe<Scalars['String']>
+  metadata_contains?: Maybe<Scalars['String']>
+  metadata_startsWith?: Maybe<Scalars['String']>
+  metadata_endsWith?: Maybe<Scalars['String']>
+  metadata_in?: Maybe<Array<Scalars['String']>>
+  AND?: Maybe<Array<WorkerWhereInput>>
+  OR?: Maybe<Array<WorkerWhereInput>>
+}
+
+export type WorkerWhereUniqueInput = {
+  id: Scalars['ID']
+}

+ 65 - 0
cli/src/graphql/queries/storage.graphql

@@ -0,0 +1,65 @@
+fragment StorageNodeInfo on StorageBucket {
+  id
+  operatorMetadata {
+    nodeEndpoint
+  }
+}
+
+query getStorageNodesInfoByBagId($bagId: String) {
+  storageBuckets(
+    where: {
+      operatorStatus_json: { isTypeOf_eq: "StorageBucketOperatorStatusActive" }
+      bagAssignments_some: { storageBagId_eq: $bagId }
+      operatorMetadata: { nodeEndpoint_contains: "http" }
+    }
+  ) {
+    ...StorageNodeInfo
+  }
+}
+
+fragment DataObjectInfo on StorageDataObject {
+  id
+  size
+  deletionPrize
+  type {
+    __typename
+    ... on DataObjectTypeVideoMedia {
+      video {
+        id
+      }
+    }
+    ... on DataObjectTypeVideoThumbnail {
+      video {
+        id
+      }
+    }
+    ... on DataObjectTypeChannelAvatar {
+      channel {
+        id
+      }
+    }
+    ... on DataObjectTypeChannelCoverPhoto {
+      channel {
+        id
+      }
+    }
+  }
+}
+
+query getDataObjectsByBagId($bagId: ID) {
+  storageDataObjects(where: { storageBag: { id_eq: $bagId } }) {
+    ...DataObjectInfo
+  }
+}
+
+query getDataObjectsChannelId($channelId: ID) {
+  storageDataObjects(where: { type_json: { channelId_eq: $channelId } }) {
+    ...DataObjectInfo
+  }
+}
+
+query getDataObjectsByVideoId($videoId: ID) {
+  storageDataObjects(where: { type_json: { videoId_eq: $videoId } }) {
+    ...DataObjectInfo
+  }
+}

+ 14 - 90
cli/src/helpers/serialization.ts

@@ -1,98 +1,22 @@
-import {
-  VideoMetadata,
-  PublishedBeforeJoystream,
-  License,
-  MediaType,
-  ChannelMetadata,
-  ChannelCategoryMetadata,
-  VideoCategoryMetadata,
-} from '@joystream/content-metadata-protobuf'
-import {
-  ChannelCategoryInputParameters,
-  ChannelInputParameters,
-  VideoCategoryInputParameters,
-  VideoInputParameters,
-} from '../Types'
+import { AnyMetadataClass, DecodedMetadataObject } from '@joystream/metadata-protobuf/types'
 import { Bytes } from '@polkadot/types/primitive'
-import { createType } from '@joystream/types'
+import { createTypeFromConstructor } from '@joystream/types'
+import { CLIError } from '@oclif/errors'
+import ExitCodes from '../ExitCodes'
+import { metaToObject } from '@joystream/metadata-protobuf/utils'
 
-type AnyMetadata = {
-  serializeBinary(): Uint8Array
+export function metadataToBytes<T>(metaClass: AnyMetadataClass<T>, obj: T): Bytes {
+  return createTypeFromConstructor(Bytes, '0x' + Buffer.from(metaClass.encode(obj).finish()).toString('hex'))
 }
 
-export function metadataToBytes(metadata: AnyMetadata): Bytes {
-  const bytes = createType('Bytes', '0x' + Buffer.from(metadata.serializeBinary()).toString('hex'))
-  console.log('Metadata as Bytes:', bytes.toString())
-  return bytes
+export function metadataFromBytes<T>(metaClass: AnyMetadataClass<T>, bytes: Bytes): DecodedMetadataObject<T> {
+  return metaToObject(metaClass, metaClass.decode(bytes.toU8a(true)))
 }
 
-// TODO: If "fromObject()" was generated for the protobuffs we could avoid having to create separate converters for each metadata
-
-export function videoMetadataFromInput(videoParametersInput: VideoInputParameters): VideoMetadata {
-  const videoMetadata = new VideoMetadata()
-  videoMetadata.setTitle(videoParametersInput.title as string)
-  videoMetadata.setDescription(videoParametersInput.description as string)
-  videoMetadata.setDuration(videoParametersInput.duration as number)
-  videoMetadata.setMediaPixelHeight(videoParametersInput.mediaPixelHeight as number)
-  videoMetadata.setMediaPixelWidth(videoParametersInput.mediaPixelWidth as number)
-  videoMetadata.setLanguage(videoParametersInput.language as string)
-  videoMetadata.setHasMarketing(videoParametersInput.hasMarketing as boolean)
-  videoMetadata.setIsPublic(videoParametersInput.isPublic as boolean)
-  videoMetadata.setIsExplicit(videoParametersInput.isExplicit as boolean)
-  videoMetadata.setPersonsList(videoParametersInput.personsList as number[])
-  videoMetadata.setCategory(videoParametersInput.category as number)
-
-  if (videoParametersInput.mediaType) {
-    const mediaType = new MediaType()
-    mediaType.setCodecName(videoParametersInput.mediaType.codecName as string)
-    mediaType.setContainer(videoParametersInput.mediaType.container as string)
-    mediaType.setMimeMediaType(videoParametersInput.mediaType.mimeMediaType as string)
-    videoMetadata.setMediaType(mediaType)
-  }
-
-  if (videoParametersInput.publishedBeforeJoystream) {
-    const publishedBeforeJoystream = new PublishedBeforeJoystream()
-    publishedBeforeJoystream.setIsPublished(videoParametersInput.publishedBeforeJoystream.isPublished as boolean)
-    publishedBeforeJoystream.setDate(videoParametersInput.publishedBeforeJoystream.date as string)
-    videoMetadata.setPublishedBeforeJoystream(publishedBeforeJoystream)
+export function asValidatedMetadata<T>(metaClass: AnyMetadataClass<T>, anyObject: any): T {
+  const error = metaClass.verify(anyObject)
+  if (error) {
+    throw new CLIError(`Invalid metadata: ${error}`, { exit: ExitCodes.InvalidInput })
   }
-
-  if (videoParametersInput.license) {
-    const license = new License()
-    license.setCode(videoParametersInput.license.code as number)
-    license.setAttribution(videoParametersInput.license.attribution as string)
-    license.setCustomText(videoParametersInput.license.customText as string)
-    videoMetadata.setLicense(license)
-  }
-
-  return videoMetadata
-}
-
-export function channelMetadataFromInput(channelParametersInput: ChannelInputParameters): ChannelMetadata {
-  const channelMetadata = new ChannelMetadata()
-  channelMetadata.setTitle(channelParametersInput.title as string)
-  channelMetadata.setDescription(channelParametersInput.description as string)
-  channelMetadata.setIsPublic(channelParametersInput.isPublic as boolean)
-  channelMetadata.setLanguage(channelParametersInput.language as string)
-  channelMetadata.setCategory(channelParametersInput.category as number)
-
-  return channelMetadata
-}
-
-export function channelCategoryMetadataFromInput(
-  channelCategoryParametersInput: ChannelCategoryInputParameters
-): ChannelCategoryMetadata {
-  const channelCategoryMetadata = new ChannelCategoryMetadata()
-  channelCategoryMetadata.setName(channelCategoryParametersInput.name as string)
-
-  return channelCategoryMetadata
-}
-
-export function videoCategoryMetadataFromInput(
-  videoCategoryParametersInput: VideoCategoryInputParameters
-): VideoCategoryMetadata {
-  const videoCategoryMetadata = new VideoCategoryMetadata()
-  videoCategoryMetadata.setName(videoCategoryParametersInput.name as string)
-
-  return videoCategoryMetadata
+  return { ...anyObject } as T
 }

+ 0 - 22
cli/src/json-schemas/Assets.schema.json

@@ -1,22 +0,0 @@
-{
-  "$schema": "http://json-schema.org/draft-07/schema",
-  "$id": "https://joystream.org/Assets.schema.json",
-  "title": "Assets",
-  "description": "List of assets to upload/reupload",
-  "type": "array",
-  "items": {
-    "type": "object",
-    "required": ["contentId", "path"],
-    "additionalProperties": false,
-    "properties": {
-      "contentId": {
-        "type": "string",
-        "description": "Already existing ContentID"
-      },
-      "path": {
-        "type": "string",
-        "description": "Path to the content file (relative to input json file)"
-      }
-    }
-  }
-}

+ 1 - 1
cli/src/json-schemas/ContentDirectory.ts → cli/src/schemas/ContentDirectory.ts

@@ -74,7 +74,7 @@ export const VideoInputSchema: JsonSchema<VideoInputParameters> = {
         },
       },
     },
-    personsList: { type: 'array' },
+    persons: { type: 'array' },
     publishedBeforeJoystream: {
       type: 'object',
       properties: {

+ 34 - 0
cli/src/schemas/json/Assets.schema.json

@@ -0,0 +1,34 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema",
+  "$id": "https://joystream.org/Assets.schema.json",
+  "title": "Assets",
+  "description": "List of assets to upload/reupload",
+  "type": "object",
+  "required": ["bagId", "assets"],
+  "properties": {
+    "bagId": {
+      "type": "string",
+      "description": "Target bag id"
+    },
+    "assets": {
+      "type": "array",
+      "description": "List of assets to upload",
+      "items": {
+        "type": "object",
+        "required": ["objectId", "path"],
+        "additionalProperties": false,
+        "properties": {
+          "objectId": {
+            "type": "string",
+            "description": "Already existing data object ID",
+            "pattern": "[0-9]+"
+          },
+          "path": {
+            "type": "string",
+            "description": "Path to the content file (relative to input json file)"
+          }
+        }
+      }
+    }
+  }
+}

+ 0 - 0
cli/src/json-schemas/WorkingGroupOpening.schema.json → cli/src/schemas/json/WorkingGroupOpening.schema.json


+ 30 - 0
cli/src/schemas/typings/Assets.schema.d.ts

@@ -0,0 +1,30 @@
+/* tslint:disable */
+/**
+ * This file was automatically generated by json-schema-to-typescript.
+ * DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file,
+ * and run json-schema-to-typescript to regenerate this file.
+ */
+
+/**
+ * List of assets to upload/reupload
+ */
+export interface Assets {
+  /**
+   * Target bag id
+   */
+  bagId: string
+  /**
+   * List of assets to upload
+   */
+  assets: {
+    /**
+     * Already existing data object ID
+     */
+    objectId: string
+    /**
+     * Path to the content file (relative to input json file)
+     */
+    path: string
+  }[]
+  [k: string]: unknown
+}

+ 0 - 0
cli/src/json-schemas/typings/WorkingGroupOpening.schema.d.ts → cli/src/schemas/typings/WorkingGroupOpening.schema.d.ts


+ 2 - 1
cli/tsconfig.json

@@ -12,7 +12,8 @@
     "noUnusedLocals": true,
     "baseUrl": ".",
     "paths": {
-      "@polkadot/types/augment": ["../types/augment-codec/augment-types.ts"]
+      "@polkadot/types/augment": ["../types/augment-codec/augment-types.ts"],
+      "@polkadot/api/augment": ["../types/augment-codec/augment-api.ts"]
     },
     "resolveJsonModule": true,
     "skipLibCheck": true

+ 26 - 7
colossus.Dockerfile

@@ -2,15 +2,34 @@ FROM --platform=linux/x86-64 node:14 as builder
 
 WORKDIR /joystream
 COPY . /joystream
-RUN  rm -fr /joystream/pioneer
 
-EXPOSE 3001
-
-RUN yarn --frozen-lockfile
+RUN yarn
 
 RUN yarn workspace @joystream/types build
-RUN yarn workspace storage-node build
+RUN yarn workspace @joystream/metadata-protobuf build
+RUN yarn workspace storage-node-v2 build
 
-RUN yarn
+# Use these volumes to persist uploading data and to pass the keyfile.
+VOLUME ["/data", "/keystore"]
+
+# Required variables
+ENV WS_PROVIDER_ENDPOINT_URI=ws://not-set
+ENV COLOSSUS_PORT=3333
+ENV QUERY_NODE_HOST=not-set
+ENV WORKER_ID=not-set
+# - set external key file using the `/keystore` volume
+ENV ACCOUNT_KEYFILE=
+ENV ACCOUNT_PWD=
+# Optional variables
+ENV SYNC_INTERVAL=1
+ENV ELASTIC_SEARCH_HOST=
+# warn, error, debug, info
+ENV ELASTIC_LOG_LEVEL=debug
+# - overrides account key file
+ENV ACCOUNT_URI=
+
+# Colossus node port
+EXPOSE ${COLOSSUS_PORT}
 
-ENTRYPOINT yarn colossus --dev --ws-provider $WS_PROVIDER_ENDPOINT_URI
+WORKDIR /joystream/storage-node-v2
+ENTRYPOINT yarn storage-node server --queryNodeHost ${QUERY_NODE_HOST} --port ${COLOSSUS_PORT} --uploads /data --worker ${WORKER_ID} --apiUrl ${WS_PROVIDER_ENDPOINT_URI} --sync --syncInterval=${SYNC_INTERVAL} --keyFile=${ACCOUNT_KEYFILE} --elasticSearchHost=${ELASTIC_SEARCH_HOST}

+ 1 - 1
distributor-node.Dockerfile

@@ -10,7 +10,7 @@ COPY ./package.json package.json
 EXPOSE 3334
 
 # Build & cleanup
-# (must be inside a signle "RUN", see: https://stackoverflow.com/questions/40212836/docker-image-larger-than-its-filesystem)
+# (must be inside a single "RUN", see: https://stackoverflow.com/questions/40212836/docker-image-larger-than-its-filesystem)
 RUN \
   yarn --frozen-lockfile &&\
   yarn workspace @joystream/types build &&\

+ 18 - 18
distributor-node/src/services/networking/distributor-node/generated/api.ts

@@ -28,88 +28,88 @@ import { BASE_PATH, COLLECTION_FORMATS, RequestArgs, BaseAPI, RequiredError } fr
 export type BucketsResponse = BucketsResponseOneOf | BucketsResponseOneOf1;
 
 /**
- * 
+ *
  * @export
  * @interface BucketsResponseOneOf
  */
 export interface BucketsResponseOneOf {
     /**
-     * 
+     *
      * @type {Array<number>}
      * @memberof BucketsResponseOneOf
      */
     bucketIds: Array<number>;
 }
 /**
- * 
+ *
  * @export
  * @interface BucketsResponseOneOf1
  */
 export interface BucketsResponseOneOf1 {
     /**
-     * 
+     *
      * @type {number}
      * @memberof BucketsResponseOneOf1
      */
     allByWorkerId: number;
 }
 /**
- * 
+ *
  * @export
  * @interface ErrorResponse
  */
 export interface ErrorResponse {
     /**
-     * 
+     *
      * @type {string}
      * @memberof ErrorResponse
      */
     type?: string;
     /**
-     * 
+     *
      * @type {string}
      * @memberof ErrorResponse
      */
     message: string;
 }
 /**
- * 
+ *
  * @export
  * @interface StatusResponse
  */
 export interface StatusResponse {
     /**
-     * 
+     *
      * @type {string}
      * @memberof StatusResponse
      */
     id: string;
     /**
-     * 
+     *
      * @type {number}
      * @memberof StatusResponse
      */
     objectsInCache: number;
     /**
-     * 
+     *
      * @type {number}
      * @memberof StatusResponse
      */
     storageLimit: number;
     /**
-     * 
+     *
      * @type {number}
      * @memberof StatusResponse
      */
     storageUsed: number;
     /**
-     * 
+     *
      * @type {number}
      * @memberof StatusResponse
      */
     uptime: number;
     /**
-     * 
+     *
      * @type {number}
      * @memberof StatusResponse
      */
@@ -145,7 +145,7 @@ export const PublicApiAxiosParamCreator = function (configuration?: Configuratio
             const localVarQueryParameter = {} as any;
 
 
-    
+
             setSearchParams(localVarUrlObj, localVarQueryParameter, options.query);
             let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
             localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
@@ -178,7 +178,7 @@ export const PublicApiAxiosParamCreator = function (configuration?: Configuratio
             const localVarQueryParameter = {} as any;
 
 
-    
+
             setSearchParams(localVarUrlObj, localVarQueryParameter, options.query);
             let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
             localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
@@ -207,7 +207,7 @@ export const PublicApiAxiosParamCreator = function (configuration?: Configuratio
             const localVarQueryParameter = {} as any;
 
 
-    
+
             setSearchParams(localVarUrlObj, localVarQueryParameter, options.query);
             let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
             localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
@@ -236,7 +236,7 @@ export const PublicApiAxiosParamCreator = function (configuration?: Configuratio
             const localVarQueryParameter = {} as any;
 
 
-    
+
             setSearchParams(localVarUrlObj, localVarQueryParameter, options.query);
             let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
             localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};

تفاوت فایلی نمایش داده نمی شود زیرا این فایل بسیار بزرگ است
+ 492 - 494
distributor-node/src/services/networking/query-node/generated/schema.ts


+ 41 - 41
distributor-node/src/services/networking/storage-node/generated/api.ts

@@ -22,151 +22,151 @@ import { DUMMY_BASE_URL, assertParamExists, setApiKeyToObject, setBasicAuthToObj
 import { BASE_PATH, COLLECTION_FORMATS, RequestArgs, BaseAPI, RequiredError } from './base';
 
 /**
- * 
+ *
  * @export
  * @interface DataStatsResponse
  */
 export interface DataStatsResponse {
     /**
-     * 
+     *
      * @type {number}
      * @memberof DataStatsResponse
      */
     totalSize: number;
     /**
-     * 
+     *
      * @type {number}
      * @memberof DataStatsResponse
      */
     objectNumber: number;
     /**
-     * 
+     *
      * @type {number}
      * @memberof DataStatsResponse
      */
     tempDirSize?: number;
     /**
-     * 
+     *
      * @type {number}
      * @memberof DataStatsResponse
      */
     tempDownloads?: number;
 }
 /**
- * 
+ *
  * @export
  * @interface ErrorResponse
  */
 export interface ErrorResponse {
     /**
-     * 
+     *
      * @type {string}
      * @memberof ErrorResponse
      */
     type?: string;
     /**
-     * 
+     *
      * @type {string}
      * @memberof ErrorResponse
      */
     message: string;
 }
 /**
- * 
+ *
  * @export
  * @interface InlineResponse201
  */
 export interface InlineResponse201 {
     /**
-     * 
+     *
      * @type {string}
      * @memberof InlineResponse201
      */
     id?: string;
 }
 /**
- * 
+ *
  * @export
  * @interface InlineResponse2011
  */
 export interface InlineResponse2011 {
     /**
-     * 
+     *
      * @type {string}
      * @memberof InlineResponse2011
      */
     token?: string;
 }
 /**
- * 
+ *
  * @export
  * @interface TokenRequest
  */
 export interface TokenRequest {
     /**
-     * 
+     *
      * @type {TokenRequestData}
      * @memberof TokenRequest
      */
     data: TokenRequestData;
     /**
-     * 
+     *
      * @type {string}
      * @memberof TokenRequest
      */
     signature: string;
 }
 /**
- * 
+ *
  * @export
  * @interface TokenRequestData
  */
 export interface TokenRequestData {
     /**
-     * 
+     *
      * @type {number}
      * @memberof TokenRequestData
      */
     memberId: number;
     /**
-     * 
+     *
      * @type {string}
      * @memberof TokenRequestData
      */
     accountId: string;
     /**
-     * 
+     *
      * @type {number}
      * @memberof TokenRequestData
      */
     dataObjectId: number;
     /**
-     * 
+     *
      * @type {number}
      * @memberof TokenRequestData
      */
     storageBucketId: number;
     /**
-     * 
+     *
      * @type {string}
      * @memberof TokenRequestData
      */
     bagId: string;
 }
 /**
- * 
+ *
  * @export
  * @interface VersionResponse
  */
 export interface VersionResponse {
     /**
-     * 
+     *
      * @type {string}
      * @memberof VersionResponse
      */
     version: string;
     /**
-     * 
+     *
      * @type {string}
      * @memberof VersionResponse
      */
@@ -199,7 +199,7 @@ export const FilesApiAxiosParamCreator = function (configuration?: Configuration
             const localVarQueryParameter = {} as any;
 
 
-    
+
             localVarHeaderParameter['Content-Type'] = 'application/json';
 
             setSearchParams(localVarUrlObj, localVarQueryParameter, options.query);
@@ -235,7 +235,7 @@ export const FilesApiAxiosParamCreator = function (configuration?: Configuration
             const localVarQueryParameter = {} as any;
 
 
-    
+
             setSearchParams(localVarUrlObj, localVarQueryParameter, options.query);
             let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
             localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
@@ -268,7 +268,7 @@ export const FilesApiAxiosParamCreator = function (configuration?: Configuration
             const localVarQueryParameter = {} as any;
 
 
-    
+
             setSearchParams(localVarUrlObj, localVarQueryParameter, options.query);
             let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
             localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
@@ -311,25 +311,25 @@ export const FilesApiAxiosParamCreator = function (configuration?: Configuration
             await setApiKeyToObject(localVarHeaderParameter, "x-api-key", configuration)
 
 
-            if (file !== undefined) { 
+            if (file !== undefined) {
                 localVarFormParams.append('file', file as any);
             }
-    
-            if (dataObjectId !== undefined) { 
+
+            if (dataObjectId !== undefined) {
                 localVarFormParams.append('dataObjectId', dataObjectId as any);
             }
-    
-            if (storageBucketId !== undefined) { 
+
+            if (storageBucketId !== undefined) {
                 localVarFormParams.append('storageBucketId', storageBucketId as any);
             }
-    
-            if (bagId !== undefined) { 
+
+            if (bagId !== undefined) {
                 localVarFormParams.append('bagId', bagId as any);
             }
-    
-    
+
+
             localVarHeaderParameter['Content-Type'] = 'multipart/form-data';
-    
+
             setSearchParams(localVarUrlObj, localVarQueryParameter, options.query);
             let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
             localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
@@ -526,7 +526,7 @@ export const StateApiAxiosParamCreator = function (configuration?: Configuration
             const localVarQueryParameter = {} as any;
 
 
-    
+
             setSearchParams(localVarUrlObj, localVarQueryParameter, options.query);
             let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
             localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
@@ -559,7 +559,7 @@ export const StateApiAxiosParamCreator = function (configuration?: Configuration
             const localVarQueryParameter = {} as any;
 
 
-    
+
             setSearchParams(localVarUrlObj, localVarQueryParameter, options.query);
             let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
             localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
@@ -588,7 +588,7 @@ export const StateApiAxiosParamCreator = function (configuration?: Configuration
             const localVarQueryParameter = {} as any;
 
 
-    
+
             setSearchParams(localVarUrlObj, localVarQueryParameter, options.query);
             let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
             localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
@@ -617,7 +617,7 @@ export const StateApiAxiosParamCreator = function (configuration?: Configuration
             const localVarQueryParameter = {} as any;
 
 
-    
+
             setSearchParams(localVarUrlObj, localVarQueryParameter, options.query);
             let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
             localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};

+ 13 - 0
distributor-node/src/services/validation/generateTypes.ts

@@ -0,0 +1,13 @@
+import fs from 'fs'
+import path from 'path'
+import { compile } from 'json-schema-to-typescript'
+import { schemas } from './schemas'
+
+// eslint-disable-next-line @typescript-eslint/no-var-requires
+const prettierConfig = require('@joystream/prettier-config')
+
+Object.entries(schemas).forEach(([schemaKey, schema]) => {
+  compile(schema, `${schemaKey}Json`, { style: prettierConfig }).then((output) =>
+    fs.writeFileSync(path.resolve(__dirname, `../../types/generated/${schemaKey}Json.d.ts`), output)
+  )
+})

+ 56 - 0
distributor-node/src/services/validation/schemas/configSchema.ts

@@ -0,0 +1,56 @@
+import { JSONSchema4 } from 'json-schema'
+import { strictObject } from './utils'
+import winston from 'winston'
+
+export const bytesizeUnits = ['B', 'K', 'M', 'G', 'T']
+export const bytesizeRegex = new RegExp(`^[0-9]+(${bytesizeUnits.join('|')})$`)
+
+export const configSchema: JSONSchema4 = {
+  type: 'object',
+  required: ['id', 'endpoints', 'directories', 'buckets', 'keys', 'port', 'workerId', 'limits'],
+  additionalProperties: false,
+  properties: {
+    id: { type: 'string' },
+    endpoints: {
+      type: 'object',
+      additionalProperties: false,
+      required: ['queryNode', 'substrateNode'],
+      properties: {
+        queryNode: { type: 'string' },
+        substrateNode: { type: 'string' },
+        elasticSearch: { type: 'string' },
+      },
+    },
+    directories: strictObject({
+      data: { type: 'string' },
+      cache: { type: 'string' },
+      logs: { type: 'string' },
+    }),
+    log: {
+      type: 'object',
+      additionalProperties: false,
+      properties: {
+        file: { type: 'string', enum: [...Object.keys(winston.config.npm.levels), 'off'] },
+        console: { type: 'string', enum: [...Object.keys(winston.config.npm.levels), 'off'] },
+        elastic: { type: 'string', enum: [...Object.keys(winston.config.npm.levels), 'off'] },
+      },
+    },
+    limits: strictObject({
+      storage: { type: 'string', pattern: bytesizeRegex.source },
+      maxConcurrentStorageNodeDownloads: { type: 'integer', minimum: 1 },
+      maxConcurrentOutboundConnections: { type: 'integer', minimum: 1 },
+      outboundRequestsTimeout: { type: 'integer', minimum: 1 },
+    }),
+    port: { type: 'integer', minimum: 0 },
+    keys: { type: 'array', items: { type: 'string' }, minItems: 1 },
+    buckets: {
+      oneOf: [
+        { type: 'array', items: { type: 'integer', minimum: 0 }, minItems: 1 },
+        { type: 'string', enum: ['all'] },
+      ],
+    },
+    workerId: { type: 'integer', minimum: 0 },
+  },
+}
+
+export default configSchema

+ 23 - 0
distributor-node/src/services/validation/schemas/familyMetadataSchema.ts

@@ -0,0 +1,23 @@
+import { JSONSchema4 } from 'json-schema'
+
+export const familyMetadataSchema: JSONSchema4 = {
+  type: 'object',
+  additionalProperties: false,
+  properties: {
+    region: { type: 'string' },
+    description: { type: 'string' },
+    boundary: {
+      type: 'array',
+      items: {
+        type: 'object',
+        additionalProperties: false,
+        properties: {
+          latitude: { type: 'number', minimum: -180, maximum: 180 },
+          longitude: { type: 'number', minimum: -180, maximum: 180 },
+        },
+      },
+    },
+  },
+}
+
+export default familyMetadataSchema

+ 24 - 0
distributor-node/src/services/validation/schemas/index.ts

@@ -0,0 +1,24 @@
+import { ConfigJson } from '../../../types/generated/ConfigJson'
+import { OperatorMetadataJson } from '../../../types/generated/OperatorMetadataJson'
+import { FamilyMetadataJson } from '../../../types/generated/FamilyMetadataJson'
+import { configSchema } from './configSchema'
+import { familyMetadataSchema } from './familyMetadataSchema'
+import { operatorMetadataSchema } from './operatorMetadataSchema'
+
+export const schemas = {
+  Config: configSchema,
+  OperatorMetadata: operatorMetadataSchema,
+  FamilyMetadata: familyMetadataSchema,
+} as const
+
+export type SchemaKey = keyof typeof schemas & string
+
+export type TypeBySchemaKey<T extends SchemaKey> = T extends 'Config'
+  ? ConfigJson
+  : T extends 'OperatorMetadata'
+  ? OperatorMetadataJson
+  : T extends 'FamilyMetadata'
+  ? FamilyMetadataJson
+  : never
+
+export default schemas

+ 28 - 0
distributor-node/src/services/validation/schemas/operatorMetadataSchema.ts

@@ -0,0 +1,28 @@
+import { JSONSchema4 } from 'json-schema'
+
+export const operatorMetadataSchema: JSONSchema4 = {
+  type: 'object',
+  additionalProperties: false,
+  properties: {
+    endpoint: { type: 'string' },
+    location: {
+      type: 'object',
+      additionalProperties: false,
+      properties: {
+        countryCode: { type: 'string' },
+        city: { type: 'string' },
+        coordinates: {
+          type: 'object',
+          additionalProperties: false,
+          properties: {
+            latitude: { type: 'number', minimum: -180, maximum: 180 },
+            longitude: { type: 'number', minimum: -180, maximum: 180 },
+          },
+        },
+      },
+    },
+    extra: { type: 'string' },
+  },
+}
+
+export default operatorMetadataSchema

+ 10 - 0
distributor-node/src/services/validation/schemas/utils.ts

@@ -0,0 +1,10 @@
+import { JSONSchema4 } from 'json-schema'
+
+export function strictObject(properties: Exclude<JSONSchema4['properties'], undefined>): JSONSchema4 {
+  return {
+    type: 'object',
+    additionalProperties: false,
+    required: Object.keys(properties),
+    properties,
+  }
+}

+ 14 - 0
docker-compose.linux-gnu-build.yml

@@ -0,0 +1,14 @@
+version: '3.4'
+services:
+  joystream-node:
+    image: joystream/node:latest
+    build:
+      # context is relative to the compose file
+      context: .
+      # dockerfile is relative to the context
+      dockerfile: joystream-node.Dockerfile
+    container_name: joystream-node
+    volumes:
+      - /data
+    command: --dev --alice --validator --unsafe-ws-external --unsafe-rpc-external --rpc-methods Unsafe --rpc-cors=all --log runtime --base-path /data
+    network_mode: host

+ 42 - 0
docker-compose.multi-storage.yml

@@ -0,0 +1,42 @@
+version: '3.4'
+services:
+  colossus-2:
+    image: joystream/colossus:latest
+    restart: on-failure
+    build:
+      context: .
+      dockerfile: colossus.Dockerfile
+    depends_on:
+      - graphql-server
+    volumes:
+      - /data
+      - /keystore
+      - ${ACCOUNT_KEYFILE}:/joystream/storage-node-v2/keyfile
+    ports:
+      - '127.0.0.1:${COLOSSUS_PORT_2}:${COLOSSUS_PORT}'
+    env_file:
+      # relative to working directory where docker-compose was run from
+      - .env
+    environment:
+      WORKER_ID: ${WORKER_ID}
+      ACCOUNT_KEYFILE: /joystream/storage-node-v2/keyfile
+
+  distributor-node-2:
+    image: joystream/distributor-node
+    restart: on-failure
+    build:
+      context: .
+      dockerfile: distributor-node.Dockerfile
+    depends_on:
+      - graphql-server
+    volumes:
+      - /data
+      - /cache
+    ports:
+      - 127.0.0.1:${DISTRIBUTOR_PORT_2}:3334
+    environment:
+      JOYSTREAM_DISTRIBUTOR__ENDPOINTS__QUERY_NODE: http://${QUERY_NODE_HOST}/graphql
+      JOYSTREAM_DISTRIBUTOR__KEYS: ${KEYS}
+      JOYSTREAM_DISTRIBUTOR__BUCKETS: ${BUCKETS}
+      JOYSTREAM_DISTRIBUTOR__WORKER_ID: ${WORKER_ID}
+    command: ['start']

+ 33 - 42
docker-compose.yml

@@ -1,7 +1,7 @@
 # Compiles new joystream/node and joystream/apps images if local images not found
 # and runs a complete joystream development network
 # To prevent build of docker images run docker-compose with "--no-build" arg
-version: "3.4"
+version: '3.4'
 services:
   joystream-node:
     image: joystream/node:latest
@@ -15,41 +15,32 @@ services:
       - /data
     command: --dev --alice --validator --unsafe-ws-external --unsafe-rpc-external --rpc-methods Unsafe --rpc-cors=all --log runtime --base-path /data
     ports:
-      - "127.0.0.1:9944:9944"
-      - "127.0.0.1:9933:9933"
-
-  ipfs:
-    image: ipfs/go-ipfs:latest
-    ports:
-      - '127.0.0.1:5001:5001'
-      - '127.0.0.1:8080:8080'
-    volumes:
-      - /data/ipfs
-    entrypoint: ''
-    command: |
-      /bin/sh -c "
-        set -e
-        /usr/local/bin/start_ipfs config profile apply lowpower
-        /usr/local/bin/start_ipfs config --json Gateway.PublicGateways '{\"localhost\": null }'
-        /sbin/tini -- /usr/local/bin/start_ipfs daemon --migrate=true
-      "
+      - '127.0.0.1:9944:9944'
+      - '127.0.0.1:9933:9933'
 
   colossus:
-    image: joystream/apps
+    image: joystream/colossus:latest
     restart: on-failure
-    depends_on:
-      - "ipfs"
     build:
       context: .
-      dockerfile: apps.Dockerfile
+      dockerfile: colossus.Dockerfile
+    depends_on:
+      - graphql-server
+    volumes:
+      - /data
+      - /keystore
+    ports:
+      - '127.0.0.1:3333:${COLOSSUS_PORT}'
     env_file:
       # relative to working directory where docker-compose was run from
       - .env
-    ports:
-      - '127.0.0.1:3001:3001'
-    command: colossus --dev --ws-provider ${WS_PROVIDER_ENDPOINT_URI} --ipfs-host ipfs
     environment:
-      - DEBUG=*
+      - COLOSSUS_PORT=3333
+      - QUERY_NODE_HOST=graphql-server-mnt:${GRAPHQL_SERVER_PORT}
+      - WORKER_ID=0
+      - ACCOUNT_URI=//Alice
+      # enable ElasticSearch server
+      # - ELASTIC_SEARCH_HOST=host.docker.internal:9200
 
   distributor-node:
     image: joystream/distributor-node
@@ -57,15 +48,17 @@ services:
     build:
       context: .
       dockerfile: distributor-node.Dockerfile
+    depends_on:
+      - graphql-server
     volumes:
       - /data
       - /cache
     ports:
       - 127.0.0.1:3334:3334
     # Node configuration can be overriden via env, for exampe:
-    # environment:
-    #   JOYSTREAM_DISTRIBUTOR__ID: node-id
-    #   JOYSTREAM_DISTRIBUTOR__ENDPOINTS__QUERY_NODE: qn-endpoint
+    environment:
+      # JOYSTREAM_DISTRIBUTOR__ID: node-id
+      JOYSTREAM_DISTRIBUTOR__ENDPOINTS__QUERY_NODE: http://${GRAPHQL_SERVER_HOST}:${GRAPHQL_SERVER_PORT}/graphql
     #   JOYSTREAM_DISTRIBUTOR__ENDPOINTS__JOYSTREAM_NODE_WS: sn-endpoint
     #   JOYSTREAM_DISTRIBUTOR__ENDPOINTS__ELASTIC_SEARCH: es-endpoint
     #   JOYSTREAM_DISTRIBUTOR__DIRECTORIES__ASSETS: assets-dir
@@ -85,7 +78,7 @@ services:
     image: postgres:12
     restart: always
     ports:
-      - "127.0.0.1:${DB_PORT}:5432"
+      - '127.0.0.1:${DB_PORT}:5432'
     volumes:
       - /var/lib/postgresql/data
     env_file:
@@ -102,17 +95,16 @@ services:
     build:
       context: .
       dockerfile: apps.Dockerfile
-      network: joystream_default
       args:
         - WS_PROVIDER_ENDPOINT_URI=${WS_PROVIDER_ENDPOINT_URI}
     env_file:
       # relative to working directory where docker-compose was run from
       - .env
     ports:
-      - "127.0.0.1:8081:${GRAPHQL_SERVER_PORT}"
+      - '127.0.0.1:8081:${GRAPHQL_SERVER_PORT}'
     depends_on:
       - db
-    command: ["workspace", "query-node-root", "query-node:start:prod"]
+    command: ['workspace', 'query-node-root', 'query-node:start:prod']
 
   graphql-server-mnt:
     image: node:14
@@ -121,7 +113,7 @@ services:
       # relative to working directory where docker-compose was run from
       - .env
     ports:
-      - "127.0.0.1:8081:${GRAPHQL_SERVER_PORT}"
+      - '127.0.0.1:8081:${GRAPHQL_SERVER_PORT}'
     depends_on:
       - db
     volumes:
@@ -129,7 +121,7 @@ services:
         source: .
         target: /joystream
     working_dir: /joystream
-    command: ["yarn", "workspace", "query-node-root", "query-node:start:prod"]
+    command: ['yarn', 'workspace', 'query-node-root', 'query-node:start:prod']
 
   processor:
     image: joystream/apps
@@ -137,7 +129,6 @@ services:
     build:
       context: .
       dockerfile: apps.Dockerfile
-      network: joystream_default
       args:
         - WS_PROVIDER_ENDPOINT_URI=${WS_PROVIDER_ENDPOINT_URI}
     env_file:
@@ -152,7 +143,7 @@ services:
       - ./types/augment/all/defs.json:/joystream/query-node/mappings/lib/generated/types/typedefs.json
     depends_on:
       - hydra-indexer-gateway
-    command: ["workspace", "query-node-root", "processor:start"]
+    command: ['workspace', 'query-node-root', 'processor:start']
 
   processor-mnt:
     image: node:14
@@ -171,7 +162,7 @@ services:
         source: .
         target: /joystream
     working_dir: /joystream
-    command: ["yarn", "workspace", "query-node-root", "processor:start"]
+    command: ['yarn', 'workspace', 'query-node-root', 'processor:start']
 
   indexer:
     image: joystream/hydra-indexer:3.0.0
@@ -209,7 +200,7 @@ services:
       - PORT=${WARTHOG_APP_PORT}
       - DEBUG=*
     ports:
-      - "127.0.0.1:4000:${WARTHOG_APP_PORT}"
+      - '127.0.0.1:4000:${WARTHOG_APP_PORT}'
     depends_on:
       - redis
       - db
@@ -219,7 +210,7 @@ services:
     image: redis:6.0-alpine
     restart: always
     ports:
-      - "127.0.0.1:6379:6379"
+      - '127.0.0.1:6379:6379'
 
   pioneer:
     image: joystream/pioneer
@@ -227,4 +218,4 @@ services:
       context: .
       dockerfile: pioneer.Dockerfile
     ports:
-      - "127.0.0.1:3000:80"
+      - '127.0.0.1:3000:80'

+ 37 - 0
multi-storage.sh

@@ -0,0 +1,37 @@
+#!/usr/bin/env bash
+set -e
+
+# Script to run a second storage node and distributor node on local
+# Make sure to run yarn start prior to running this script
+
+set -a
+. .env
+set +a
+
+export COLOSSUS_PORT_2=3335
+export DISTRIBUTOR_PORT_2=3336
+export KEYS=[//Alice]
+export BUCKETS='all'
+export WORKER_ID=2
+export ACCOUNT_KEYFILE="./types/augment/all/defs.json"
+
+function down()
+{
+    # Stop containers and clear volumes
+    docker-compose -f docker-compose.yml -f docker-compose.multi-storage.yml rm -vsf distributor-node-2
+    docker-compose -f docker-compose.yml -f docker-compose.multi-storage.yml rm -vsf colossus-2
+}
+
+down
+
+trap down EXIT
+
+docker-compose -f docker-compose.yml -f docker-compose.multi-storage.yml run -d --name colossus-2 colossus-2
+
+docker-compose -f docker-compose.yml -f docker-compose.multi-storage.yml run -d --name distributor-node-2 distributor-node-2
+
+echo "use Ctrl+C to shutdown the development network."
+
+while true; do 
+  read
+done

+ 2 - 2
package.json

@@ -47,7 +47,7 @@
     "typescript": "^4.4.3",
     "bn.js": "4.12.0",
     "rxjs": "^7.4.0",
-    "typeorm": "^0.2.31",
+    "typeorm": "0.2.34",
     "pg": "^8.4.0"
   },
   "devDependencies": {
@@ -66,7 +66,7 @@
     "yarn": "^1.22.0"
   },
   "volta": {
-    "node": "14.16.1",
+    "node": "14.18.0",
     "yarn": "1.22.4"
   }
 }

+ 2 - 5
query-node/build.sh

@@ -16,11 +16,8 @@ yarn clean
 yarn codegen:noinstall
 yarn typegen # if this fails try to run this command outside of yarn workspaces
 
-# TS4 useUnknownInCatchVariables is not compatible with auto-generated code
-sed -i '/\"compilerOptions\"\:\ {/a \ \ \ \ "useUnknownInCatchVariables": false,' ./generated/graphql-server/tsconfig.json
-# Type assertions are no longer needed for createTypeUnsafe in @polkadot/api 5.9.1 (and they break the build)
-# Here we're relpacing createTypeUnsafe<Assertions>(...params) to createTypeUnsafe(...params) in all generated types:
-find ./mappings/generated -type f -print0 | xargs -0 sed -ri 's/createTypeUnsafe<[^(]+[(]/createTypeUnsafe(/g'
+# Post-codegen - fixes in autogenerated files
+yarn ts-node --project ./mappings/tsconfig.json ./mappings/scripts/postCodegen.ts
 
 # We run yarn again to ensure graphql-server dependencies are installed
 # and are inline with root workspace resolutions

+ 20 - 0
query-node/mappings/scripts/postCodegen.ts

@@ -0,0 +1,20 @@
+// A script to be executed post hydra codegen, that may include modifications to autogenerated files
+import fs from 'fs'
+import path from 'path'
+
+// TS4 useUnknownInCatchVariables is not compatible with auto-generated code inside generated/graphql-server
+const serverTsConfigPath = path.resolve(__dirname, '../../generated/graphql-server/tsconfig.json')
+const serverTsConfig = JSON.parse(fs.readFileSync(serverTsConfigPath).toString())
+serverTsConfig.compilerOptions.useUnknownInCatchVariables = false
+fs.writeFileSync(serverTsConfigPath, JSON.stringify(serverTsConfig, undefined, 2))
+
+// Type assertions are no longer needed for createTypeUnsafe in @polkadot/api 5.9.1 (and they break the build)
+// Here we're relpacing createTypeUnsafe<Assertions>(...params) to createTypeUnsafe(...params) in all generated types:
+const generatedTypesPaths = path.resolve(__dirname, '../generated/types')
+fs.readdirSync(generatedTypesPaths).map((fileName) => {
+  if (path.extname(fileName) === '.ts') {
+    const filePath = path.join(generatedTypesPaths, fileName)
+    const fileContent = fs.readFileSync(filePath).toString()
+    fs.writeFileSync(filePath, fileContent.replace(/createTypeUnsafe<[^(]+[(]/g, 'createTypeUnsafe('))
+  }
+})

+ 4 - 0
runtime-modules/common/src/working_group.rs

@@ -11,6 +11,10 @@ use strum_macros::EnumIter;
 #[cfg_attr(feature = "std", derive(Serialize, Deserialize, EnumIter))]
 #[derive(Encode, Decode, Clone, PartialEq, Eq, Copy, Debug, PartialOrd, Ord)]
 pub enum WorkingGroup {
+    /* Reserved
+        // working_group::Instance0.
+        Reserved,
+    */
     /* Reserved
         /// Forum working group: working_group::Instance1.
         Forum,

+ 28 - 14
start.sh

@@ -23,28 +23,42 @@ trap down EXIT
 # Run a local development chain
 docker-compose up -d joystream-node
 
-## Storage Infrastructure
-# Configure a dev storage node and start storage node
-DEBUG=joystream:storage-cli:dev yarn storage-cli dev-init
-docker-compose up -d colossus
-# Create a new content directory lead
-yarn workspace api-scripts initialize-content-lead
-
-# Set sudo as the membership screening authority
-yarn workspace api-scripts set-sudo-as-screening-auth
-
 ## Query Node Infrastructure
 # Initialize a new database for the query node infrastructure
 docker-compose up -d db
+
+# Override DB_HOST for db setup
+export DB_HOST=localhost
+
+# Make sure we use dev config for db migrations (prevents "Cannot create database..." and some other errors)
+yarn workspace query-node config:dev
+
+# Migrate the databases
 yarn workspace query-node-root db:prepare
 yarn workspace query-node-root db:migrate
 
-# Startup all query-node infrastructure services
-export WS_PROVIDER_ENDPOINT_URI=ws://joystream-node:9944
-docker-compose up -d graphql-server
+# Set DB_HOST back to docker-service one
+export DB_HOST=db
+
+# Start processor and graphql server
 docker-compose up -d processor
+docker-compose up -d graphql-server
+
+## Storage Infrastructure
+docker-compose run -d --name colossus --entrypoint sh colossus -c "yarn storage-node dev:init --apiUrl ${WS_PROVIDER_ENDPOINT_URI} && \
+          yarn storage-node server --queryNodeHost ${QUERY_NODE_HOST} --port ${COLOSSUS_PORT} \
+          --uploads /data --worker ${WORKER_ID} --apiUrl ${WS_PROVIDER_ENDPOINT_URI} --sync --syncInterval=1 \
+          --keyFile=${ACCOUNT_KEYFILE} --elasticSearchHost=${ELASTIC_SEARCH_HOST}"
+
+docker-compose up -d distributor-node
+
+# # Create a new content directory lead
+# yarn workspace api-scripts initialize-content-lead
+
+# # Set sudo as the membership screening authority
+# yarn workspace api-scripts set-sudo-as-screening-auth
 
-docker-compose up -d pioneer
+# docker-compose up -d pioneer
 
 echo "use Ctrl+C to shutdown the development network."
 

+ 1 - 0
storage-node-v2/.eslintignore

@@ -1,2 +1,3 @@
 /lib
 .eslintrc.js
+**/generated/*

+ 1 - 0
storage-node-v2/.eslintrc.js

@@ -9,6 +9,7 @@ module.exports = {
   rules: {
     'no-console': 'warn', // use dedicated logger
     'no-unused-vars': 'off', // Required by the typescript rule below
+    'prettier/prettier': 'off', // prettier-eslint conflicts inherited from @joystream/eslint-config
     '@typescript-eslint/no-unused-vars': ['error'],
     '@typescript-eslint/no-floating-promises': 'error',
   },

+ 412 - 194
storage-node-v2/README.md

@@ -1,35 +1,162 @@
-storage-node-v2
+Colossus v2
 ===============
 
-Jostream storage subsystem.
+Joystream storage subsystem.
 
 [![oclif](https://img.shields.io/badge/cli-oclif-brightgreen.svg)](https://oclif.io)
-[![Version](https://img.shields.io/npm/v/storage-node-v2.svg)](https://npmjs.org/package/storage-node-v2)
-[![Downloads/week](https://img.shields.io/npm/dw/storage-node-v2.svg)](https://npmjs.org/package/storage-node-v2)
-[![License](https://img.shields.io/npm/l/storage-node-v2.svg)](https://github.com/shamil-gadelshin/storage-node-v2/blob/master/package.json)
+![License](https://img.shields.io/github/license/Joystream/joystream)
 
 <!-- toc -->
+* [Description](#description)
+  * [API](#api)
+  * [Auth schema](#auth-schema-description)
+  * [CLI](#cli)
+  * [Metadata](#metadata)
+  * [Data](#data)
+    * [Uploading](#uploading)
+    * [Synchronization](#synchronization)
+    * [Distribution](#distribution)
+  * [Comments](#comments)
+* [Installation](#installation)
 * [Usage](#usage)
-* [Commands](#commands)
+  * [Prerequisites](#prerequisites)
+  * [CLI Command](#cli-command)
+  * [Docker](#docker)
+* [CLI Commands](#cli-commands)
 <!-- tocstop -->
+
+# Description
+
+The main responsibility of Colossus is handling media data for users. The data could be images, audio, or video files.
+Colossus receives uploads and saves files in the local folder, registers uploads in the blockchain, and later serves files 
+to Argus nodes (distribution nodes). Colossus instances spread the data using peer-to-peer synchronization.
+On data uploading, clients should provide an authentication token to prevent abuse.
+Data management is blockchain-based, it relies on the concepts of buckets, bags, data objects.
+The full description of the blockchain smart contracts could be found [here](https://github.com/Joystream/joystream/issues/2224).
+
+### API
+
+Colossus provides REST API for its clients and other Colossus instances. It's based on the OpenAPI Specification v3. Here is the complete [spec](./src/api-spec/openapi.yaml) (work in progress).
+
+API endpoints:
+- files
+    - get - get the data file by its ID
+    - head - get the data file headers by its ID
+    - post - upload a file (requires auth token)
+    - auth_token - returns auth token for uploading
+- state
+    - version - Colossus version and system environment
+    - all data objects IDs
+    - data objects IDs for bag
+    - data statistics - total data folder size and data object number
+
+
+### Auth schema description
+
+To reduce the possibility of abuse of the uploading endpoint we implemented a simple authentication schema. On each uploading attempt, the client should receive the auth token first and provide it as a special header. The token has an expiration time and cannot be reused. To receive such token the client should be part of the StorageWorkingGroup and have  `WorkerId`.
+
+
+### CLI
+
+There is a command-line interface to manage Storage Working Group operations like create a bucket or change storage settings. Full description could be found [below](#cli-commands).
+
+There are several groups of command:
+- *leader* - manages the Storage Working group in the blockchain. Requires leader privileges.
+- *operator* - Storage Provider group - it manages data object uploads and storage provider metadata(endpoint is the most important). Requires active Storage Working group membership.
+- *dev* - development support commands. Requires development blockchain setup with Alice account.
+- *ungroupped* - server and help commands. `server` starts Colossus and `help` shows the full command list.
+
+### Metadata
+The storage provider should provide *metadata* for Colossus instance to be discoverable by other Colossus or
+Argus (distributor node) instances. At the very least an endpoint should be registered in the blockchain.
+For some complex scenarios, Colossus should provide its geolocation.
+
+Metadata could be registered using [operator:set-metadata](#storage-node-operatorset-metadata) command.
+A simple endpoint could be set using the `--endpoint` flag of the command. Complex metadata requires JSON file ([example](./scripts/operatorMetadata.json)).
+JSON file format based on the *protobuf* format described [here](../metadata-protobuf/proto/Storage.proto).
+
+### Data 
+#### Uploading
+
+Colossus accepts files using its API. The data must be uploaded using POST http method with `multipart/form-data`.
+Simplified process:
+
+1. Get auth token using API endpoint
+   - sign data object info with private blockchain account key
+
+2. Upload file
+   - auth header decoding and verification
+   - accepting the data upload in the temp folder
+   - data hash & size verification
+   - moving the data to the data folder
+   - registering the data object as `accepted` in the blockchain
+    
+#### Synchronization
+
+Several instances of Colossus should contain the data replica in order to provide some degree of reliability.
+When some Colossus instance receives the data upload it marks the related data object as `accepted`.
+Other instances that have the same obligations to store the data (they serve storage buckets assigned to the same bag)
+will eventually load this data object from the initial receiver (or some other node that already downloaded a new
+data object from the initial receiver) using REST API.
+
+#### Distribution
+
+The actual data distribution (serving to end-users) is done via Argus - the distributor node. It gets data from Colossus using the same `get` endpoint on a single data object basis.
+
+### Comments
+- Colossus relies on the [Query Node (Hydra)](https://www.joystream.org/hydra/) to get the blockchain data in a structured form.
+- Using Colossus as a functioning Storage Provider requires providing [account URI or key file and password](https://wiki.polkadot.network/docs/learn-accounts) as well as active `WorkerId` from the Storage Working group.
+
+# Installation
+```shell
+# Ubuntu Linux
+
+# Install packages required for installation
+apt update
+apt install git curl
+
+# Clone the code repository
+git clone https://github.com/Joystream/joystream
+cd joystream
+
+# Install volta
+curl https://get.volta.sh | bash
+bash
+
+# Install project dependencies and build it
+yarn
+yarn workspace @joystream/types build
+yarn workspace @joystream/metadata-protobuf build
+yarn workspace storage-node-v2 build
+
+# Verify installation
+cd storage-node-v2
+yarn storage-node version
+```
 # Usage
-<!-- usage -->
+
 ```sh-session
-$ npm install -g storage-node-v2
-$ storage-node COMMAND
-running command...
-$ storage-node (-v|--version|version)
-storage-node-v2/0.1.0 darwin-x64 node-v14.17.0
-$ storage-node --help [COMMAND]
-USAGE
-  $ storage-node COMMAND
-...
+$ yarn storage-node server --apiUrl ws://localhost:9944  -w 0 --accountUri //Alice -q localhost:8081 -o 3333 -d ~/uploads --sync
 ```
-<!-- usagestop -->
-# Commands
+
+### Prerequisites
+- accountURI or keyfile and password
+- workerId from the Storage working group that matches with the account above
+- Joystream Network validator URL
+- QueryNode URL
+- (optional) ElasticSearch URL
+- created directory for data uploading
+
+### CLI command
+ Full command description could be find [below](#storage-node-server).
+### Docker
+There is also an option to run Colossus as [Docker container](../colossus.Dockerfile).
+
+# CLI Commands
 <!-- commands -->
 * [`storage-node dev:init`](#storage-node-devinit)
 * [`storage-node dev:multihash`](#storage-node-devmultihash)
+* [`storage-node dev:sync`](#storage-node-devsync)
 * [`storage-node dev:upload`](#storage-node-devupload)
 * [`storage-node dev:verify-bag-id`](#storage-node-devverify-bag-id)
 * [`storage-node help [COMMAND]`](#storage-node-help-command)
@@ -60,14 +187,17 @@ USAGE
   $ storage-node dev:init
 
 OPTIONS
-  -h, --help               show CLI help
-  -k, --keyfile=keyfile    Key file for the account. Mandatory in non-dev environment.
-  -m, --dev                Use development mode
-  -p, --password=password  Key file password (optional).
-  -u, --apiUrl=apiUrl      Runtime API URL. Mandatory in non-dev environment. Default is ws://localhost:9944
+  -h, --help                   show CLI help
+  -k, --keyFile=keyFile        Key file for the account. Mandatory in non-dev environment.
+  -m, --dev                    Use development mode
+  -p, --password=password      Key file password (optional). Could be overriden by ACCOUNT_PWD environment variable.
+  -u, --apiUrl=apiUrl          [default: ws://localhost:9944] Runtime API URL. Mandatory in non-dev environment.
+
+  -y, --accountUri=accountUri  Account URI (optional). Has a priority over the keyFile and password flags. Could be
+                               overriden by ACCOUNT_URI environment variable.
 ```
 
-_See code: [src/commands/dev/init.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/dev/init.ts)_
+_See code: [src/commands/dev/init.ts](https://github.com/Joystream/joystream/blob/v2.0.0/src/commands/dev/init.ts)_
 
 ## `storage-node dev:multihash`
 
@@ -82,7 +212,31 @@ OPTIONS
   -h, --help       show CLI help
 ```
 
-_See code: [src/commands/dev/multihash.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/dev/multihash.ts)_
+_See code: [src/commands/dev/multihash.ts](https://github.com/Joystream/joystream/blob/v2.0.0/src/commands/dev/multihash.ts)_
+
+## `storage-node dev:sync`
+
+Synchronizes the data - it fixes the differences between local data folder and worker ID obligations from the runtime.
+
+```
+USAGE
+  $ storage-node dev:sync
+
+OPTIONS
+  -d, --uploads=uploads                                (required) Data uploading directory (absolute path).
+  -h, --help                                           show CLI help
+
+  -o, --dataSourceOperatorHost=dataSourceOperatorHost  Storage node host and port (e.g.: some.com:8081) to get data
+                                                       from.
+
+  -p, --syncWorkersNumber=syncWorkersNumber            Sync workers number (max async operations in progress).
+
+  -q, --queryNodeHost=queryNodeHost                    Query node host and port (e.g.: some.com:8081)
+
+  -w, --workerId=workerId                              (required) Storage node operator worker ID.
+```
+
+_See code: [src/commands/dev/sync.ts](https://github.com/Joystream/joystream/blob/v2.0.0/src/commands/dev/sync.ts)_
 
 ## `storage-node dev:upload`
 
@@ -93,16 +247,19 @@ USAGE
   $ storage-node dev:upload
 
 OPTIONS
-  -c, --cid=cid            (required) Data object IPFS content ID.
-  -h, --help               show CLI help
-  -k, --keyfile=keyfile    Key file for the account. Mandatory in non-dev environment.
-  -m, --dev                Use development mode
-  -p, --password=password  Key file password (optional).
-  -s, --size=size          (required) Data object size.
-  -u, --apiUrl=apiUrl      Runtime API URL. Mandatory in non-dev environment. Default is ws://localhost:9944
+  -c, --cid=cid                (required) Data object IPFS content ID.
+  -h, --help                   show CLI help
+  -k, --keyFile=keyFile        Key file for the account. Mandatory in non-dev environment.
+  -m, --dev                    Use development mode
+  -p, --password=password      Key file password (optional). Could be overriden by ACCOUNT_PWD environment variable.
+  -s, --size=size              (required) Data object size.
+  -u, --apiUrl=apiUrl          [default: ws://localhost:9944] Runtime API URL. Mandatory in non-dev environment.
+
+  -y, --accountUri=accountUri  Account URI (optional). Has a priority over the keyFile and password flags. Could be
+                               overriden by ACCOUNT_URI environment variable.
 ```
 
-_See code: [src/commands/dev/upload.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/dev/upload.ts)_
+_See code: [src/commands/dev/upload.ts](https://github.com/Joystream/joystream/blob/v2.0.0/src/commands/dev/upload.ts)_
 
 ## `storage-node dev:verify-bag-id`
 
@@ -117,33 +274,20 @@ OPTIONS
       show CLI help
 
   -i, --bagId=bagId
-      (required) 
-             Bag ID. Format: {bag_type}:{sub_type}:{id}.
-             - Bag types: 'static', 'dynamic'
-             - Sub types: 'static:council', 'static:wg', 'dynamic:member', 'dynamic:channel'
-             - Id: 
-               - absent for 'static:council'
-               - working group name for 'static:wg'
-               - integer for 'dynamic:member' and 'dynamic:channel'
-             Examples:
-             - static:council
-             - static:wg:storage
-             - dynamic:member:4
-
-  -k, --keyfile=keyfile
-      Key file for the account. Mandatory in non-dev environment.
-
-  -m, --dev
-      Use development mode
-
-  -p, --password=password
-      Key file password (optional).
-
-  -u, --apiUrl=apiUrl
-      Runtime API URL. Mandatory in non-dev environment. Default is ws://localhost:9944
+      (required) Bag ID. Format: {bag_type}:{sub_type}:{id}.
+           - Bag types: 'static', 'dynamic'
+           - Sub types: 'static:council', 'static:wg', 'dynamic:member', 'dynamic:channel'
+           - Id:
+             - absent for 'static:council'
+             - working group name for 'static:wg'
+             - integer for 'dynamic:member' and 'dynamic:channel'
+           Examples:
+           - static:council
+           - static:wg:storage
+           - dynamic:member:4
 ```
 
-_See code: [src/commands/dev/verify-bag-id.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/dev/verify-bag-id.ts)_
+_See code: [src/commands/dev/verify-bag-id.ts](https://github.com/Joystream/joystream/blob/v2.0.0/src/commands/dev/verify-bag-id.ts)_
 
 ## `storage-node help [COMMAND]`
 
@@ -160,7 +304,7 @@ OPTIONS
   --all  see all commands in CLI
 ```
 
-_See code: [@oclif/plugin-help](https://github.com/oclif/plugin-help/blob/v3.0.1/src/commands/help.ts)_
+_See code: [@oclif/plugin-help](https://github.com/oclif/plugin-help/blob/v3.2.2/src/commands/help.ts)_
 
 ## `storage-node leader:cancel-invite`
 
@@ -171,15 +315,18 @@ USAGE
   $ storage-node leader:cancel-invite
 
 OPTIONS
-  -h, --help               show CLI help
-  -i, --bucketId=bucketId  (required) Storage bucket ID
-  -k, --keyfile=keyfile    Key file for the account. Mandatory in non-dev environment.
-  -m, --dev                Use development mode
-  -p, --password=password  Key file password (optional).
-  -u, --apiUrl=apiUrl      Runtime API URL. Mandatory in non-dev environment. Default is ws://localhost:9944
+  -h, --help                   show CLI help
+  -i, --bucketId=bucketId      (required) Storage bucket ID
+  -k, --keyFile=keyFile        Key file for the account. Mandatory in non-dev environment.
+  -m, --dev                    Use development mode
+  -p, --password=password      Key file password (optional). Could be overriden by ACCOUNT_PWD environment variable.
+  -u, --apiUrl=apiUrl          [default: ws://localhost:9944] Runtime API URL. Mandatory in non-dev environment.
+
+  -y, --accountUri=accountUri  Account URI (optional). Has a priority over the keyFile and password flags. Could be
+                               overriden by ACCOUNT_URI environment variable.
 ```
 
-_See code: [src/commands/leader/cancel-invite.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/cancel-invite.ts)_
+_See code: [src/commands/leader/cancel-invite.ts](https://github.com/Joystream/joystream/blob/v2.0.0/src/commands/leader/cancel-invite.ts)_
 
 ## `storage-node leader:create-bucket`
 
@@ -190,18 +337,21 @@ USAGE
   $ storage-node leader:create-bucket
 
 OPTIONS
-  -a, --allow              Accepts new bags
-  -h, --help               show CLI help
-  -i, --invited=invited    Invited storage operator ID (storage WG worker ID)
-  -k, --keyfile=keyfile    Key file for the account. Mandatory in non-dev environment.
-  -m, --dev                Use development mode
-  -n, --number=number      Storage bucket max total objects number
-  -p, --password=password  Key file password (optional).
-  -s, --size=size          Storage bucket max total objects size
-  -u, --apiUrl=apiUrl      Runtime API URL. Mandatory in non-dev environment. Default is ws://localhost:9944
+  -a, --allow                  Accepts new bags
+  -h, --help                   show CLI help
+  -i, --invited=invited        Invited storage operator ID (storage WG worker ID)
+  -k, --keyFile=keyFile        Key file for the account. Mandatory in non-dev environment.
+  -m, --dev                    Use development mode
+  -n, --number=number          Storage bucket max total objects number
+  -p, --password=password      Key file password (optional). Could be overriden by ACCOUNT_PWD environment variable.
+  -s, --size=size              Storage bucket max total objects size
+  -u, --apiUrl=apiUrl          [default: ws://localhost:9944] Runtime API URL. Mandatory in non-dev environment.
+
+  -y, --accountUri=accountUri  Account URI (optional). Has a priority over the keyFile and password flags. Could be
+                               overriden by ACCOUNT_URI environment variable.
 ```
 
-_See code: [src/commands/leader/create-bucket.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/create-bucket.ts)_
+_See code: [src/commands/leader/create-bucket.ts](https://github.com/Joystream/joystream/blob/v2.0.0/src/commands/leader/create-bucket.ts)_
 
 ## `storage-node leader:delete-bucket`
 
@@ -212,15 +362,18 @@ USAGE
   $ storage-node leader:delete-bucket
 
 OPTIONS
-  -h, --help               show CLI help
-  -i, --bucketId=bucketId  (required) Storage bucket ID
-  -k, --keyfile=keyfile    Key file for the account. Mandatory in non-dev environment.
-  -m, --dev                Use development mode
-  -p, --password=password  Key file password (optional).
-  -u, --apiUrl=apiUrl      Runtime API URL. Mandatory in non-dev environment. Default is ws://localhost:9944
+  -h, --help                   show CLI help
+  -i, --bucketId=bucketId      (required) Storage bucket ID
+  -k, --keyFile=keyFile        Key file for the account. Mandatory in non-dev environment.
+  -m, --dev                    Use development mode
+  -p, --password=password      Key file password (optional). Could be overriden by ACCOUNT_PWD environment variable.
+  -u, --apiUrl=apiUrl          [default: ws://localhost:9944] Runtime API URL. Mandatory in non-dev environment.
+
+  -y, --accountUri=accountUri  Account URI (optional). Has a priority over the keyFile and password flags. Could be
+                               overriden by ACCOUNT_URI environment variable.
 ```
 
-_See code: [src/commands/leader/delete-bucket.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/delete-bucket.ts)_
+_See code: [src/commands/leader/delete-bucket.ts](https://github.com/Joystream/joystream/blob/v2.0.0/src/commands/leader/delete-bucket.ts)_
 
 ## `storage-node leader:invite-operator`
 
@@ -233,14 +386,17 @@ USAGE
 OPTIONS
   -h, --help                   show CLI help
   -i, --bucketId=bucketId      (required) Storage bucket ID
-  -k, --keyfile=keyfile        Key file for the account. Mandatory in non-dev environment.
+  -k, --keyFile=keyFile        Key file for the account. Mandatory in non-dev environment.
   -m, --dev                    Use development mode
-  -p, --password=password      Key file password (optional).
-  -u, --apiUrl=apiUrl          Runtime API URL. Mandatory in non-dev environment. Default is ws://localhost:9944
+  -p, --password=password      Key file password (optional). Could be overriden by ACCOUNT_PWD environment variable.
+  -u, --apiUrl=apiUrl          [default: ws://localhost:9944] Runtime API URL. Mandatory in non-dev environment.
   -w, --operatorId=operatorId  (required) Storage bucket operator ID (storage group worker ID)
+
+  -y, --accountUri=accountUri  Account URI (optional). Has a priority over the keyFile and password flags. Could be
+                               overriden by ACCOUNT_URI environment variable.
 ```
 
-_See code: [src/commands/leader/invite-operator.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/invite-operator.ts)_
+_See code: [src/commands/leader/invite-operator.ts](https://github.com/Joystream/joystream/blob/v2.0.0/src/commands/leader/invite-operator.ts)_
 
 ## `storage-node leader:remove-operator`
 
@@ -251,15 +407,18 @@ USAGE
   $ storage-node leader:remove-operator
 
 OPTIONS
-  -h, --help               show CLI help
-  -i, --bucketId=bucketId  (required) Storage bucket ID
-  -k, --keyfile=keyfile    Key file for the account. Mandatory in non-dev environment.
-  -m, --dev                Use development mode
-  -p, --password=password  Key file password (optional).
-  -u, --apiUrl=apiUrl      Runtime API URL. Mandatory in non-dev environment. Default is ws://localhost:9944
+  -h, --help                   show CLI help
+  -i, --bucketId=bucketId      (required) Storage bucket ID
+  -k, --keyFile=keyFile        Key file for the account. Mandatory in non-dev environment.
+  -m, --dev                    Use development mode
+  -p, --password=password      Key file password (optional). Could be overriden by ACCOUNT_PWD environment variable.
+  -u, --apiUrl=apiUrl          [default: ws://localhost:9944] Runtime API URL. Mandatory in non-dev environment.
+
+  -y, --accountUri=accountUri  Account URI (optional). Has a priority over the keyFile and password flags. Could be
+                               overriden by ACCOUNT_URI environment variable.
 ```
 
-_See code: [src/commands/leader/remove-operator.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/remove-operator.ts)_
+_See code: [src/commands/leader/remove-operator.ts](https://github.com/Joystream/joystream/blob/v2.0.0/src/commands/leader/remove-operator.ts)_
 
 ## `storage-node leader:set-bucket-limits`
 
@@ -270,17 +429,20 @@ USAGE
   $ storage-node leader:set-bucket-limits
 
 OPTIONS
-  -h, --help               show CLI help
-  -i, --bucketId=bucketId  (required) Storage bucket ID
-  -k, --keyfile=keyfile    Key file for the account. Mandatory in non-dev environment.
-  -m, --dev                Use development mode
-  -o, --objects=objects    (required) New 'voucher object number limit' value
-  -p, --password=password  Key file password (optional).
-  -s, --size=size          (required) New 'voucher object size limit' value
-  -u, --apiUrl=apiUrl      Runtime API URL. Mandatory in non-dev environment. Default is ws://localhost:9944
+  -h, --help                   show CLI help
+  -i, --bucketId=bucketId      (required) Storage bucket ID
+  -k, --keyFile=keyFile        Key file for the account. Mandatory in non-dev environment.
+  -m, --dev                    Use development mode
+  -o, --objects=objects        (required) New 'voucher object number limit' value
+  -p, --password=password      Key file password (optional). Could be overriden by ACCOUNT_PWD environment variable.
+  -s, --size=size              (required) New 'voucher object size limit' value
+  -u, --apiUrl=apiUrl          [default: ws://localhost:9944] Runtime API URL. Mandatory in non-dev environment.
+
+  -y, --accountUri=accountUri  Account URI (optional). Has a priority over the keyFile and password flags. Could be
+                               overriden by ACCOUNT_URI environment variable.
 ```
 
-_See code: [src/commands/leader/set-bucket-limits.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/set-bucket-limits.ts)_
+_See code: [src/commands/leader/set-bucket-limits.ts](https://github.com/Joystream/joystream/blob/v2.0.0/src/commands/leader/set-bucket-limits.ts)_
 
 ## `storage-node leader:set-global-uploading-status`
 
@@ -291,15 +453,18 @@ USAGE
   $ storage-node leader:set-global-uploading-status
 
 OPTIONS
-  -h, --help               show CLI help
-  -k, --keyfile=keyfile    Key file for the account. Mandatory in non-dev environment.
-  -m, --dev                Use development mode
-  -p, --password=password  Key file password (optional).
-  -s, --set=(on|off)       (required) Sets global uploading block (on/off).
-  -u, --apiUrl=apiUrl      Runtime API URL. Mandatory in non-dev environment. Default is ws://localhost:9944
+  -h, --help                   show CLI help
+  -k, --keyFile=keyFile        Key file for the account. Mandatory in non-dev environment.
+  -m, --dev                    Use development mode
+  -p, --password=password      Key file password (optional). Could be overriden by ACCOUNT_PWD environment variable.
+  -s, --set=(on|off)           (required) Sets global uploading block (on/off).
+  -u, --apiUrl=apiUrl          [default: ws://localhost:9944] Runtime API URL. Mandatory in non-dev environment.
+
+  -y, --accountUri=accountUri  Account URI (optional). Has a priority over the keyFile and password flags. Could be
+                               overriden by ACCOUNT_URI environment variable.
 ```
 
-_See code: [src/commands/leader/set-global-uploading-status.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/set-global-uploading-status.ts)_
+_See code: [src/commands/leader/set-global-uploading-status.ts](https://github.com/Joystream/joystream/blob/v2.0.0/src/commands/leader/set-global-uploading-status.ts)_
 
 ## `storage-node leader:update-bag`
 
@@ -317,36 +482,39 @@ OPTIONS
       show CLI help
 
   -i, --bagId=bagId
-      (required) 
-             Bag ID. Format: {bag_type}:{sub_type}:{id}.
-             - Bag types: 'static', 'dynamic'
-             - Sub types: 'static:council', 'static:wg', 'dynamic:member', 'dynamic:channel'
-             - Id: 
-               - absent for 'static:council'
-               - working group name for 'static:wg'
-               - integer for 'dynamic:member' and 'dynamic:channel'
-             Examples:
-             - static:council
-             - static:wg:storage
-             - dynamic:member:4
-
-  -k, --keyfile=keyfile
+      (required) Bag ID. Format: {bag_type}:{sub_type}:{id}.
+           - Bag types: 'static', 'dynamic'
+           - Sub types: 'static:council', 'static:wg', 'dynamic:member', 'dynamic:channel'
+           - Id:
+             - absent for 'static:council'
+             - working group name for 'static:wg'
+             - integer for 'dynamic:member' and 'dynamic:channel'
+           Examples:
+           - static:council
+           - static:wg:storage
+           - dynamic:member:4
+
+  -k, --keyFile=keyFile
       Key file for the account. Mandatory in non-dev environment.
 
   -m, --dev
       Use development mode
 
   -p, --password=password
-      Key file password (optional).
+      Key file password (optional). Could be overriden by ACCOUNT_PWD environment variable.
 
   -r, --remove=remove
       [default: ] ID of a bucket to remove from bag
 
   -u, --apiUrl=apiUrl
-      Runtime API URL. Mandatory in non-dev environment. Default is ws://localhost:9944
+      [default: ws://localhost:9944] Runtime API URL. Mandatory in non-dev environment.
+
+  -y, --accountUri=accountUri
+      Account URI (optional). Has a priority over the keyFile and password flags. Could be overriden by ACCOUNT_URI 
+      environment variable.
 ```
 
-_See code: [src/commands/leader/update-bag.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/update-bag.ts)_
+_See code: [src/commands/leader/update-bag.ts](https://github.com/Joystream/joystream/blob/v2.0.0/src/commands/leader/update-bag.ts)_
 
 ## `storage-node leader:update-bag-limit`
 
@@ -357,15 +525,18 @@ USAGE
   $ storage-node leader:update-bag-limit
 
 OPTIONS
-  -h, --help               show CLI help
-  -k, --keyfile=keyfile    Key file for the account. Mandatory in non-dev environment.
-  -l, --limit=limit        (required) New StorageBucketsPerBagLimit value
-  -m, --dev                Use development mode
-  -p, --password=password  Key file password (optional).
-  -u, --apiUrl=apiUrl      Runtime API URL. Mandatory in non-dev environment. Default is ws://localhost:9944
+  -h, --help                   show CLI help
+  -k, --keyFile=keyFile        Key file for the account. Mandatory in non-dev environment.
+  -l, --limit=limit            (required) New StorageBucketsPerBagLimit value
+  -m, --dev                    Use development mode
+  -p, --password=password      Key file password (optional). Could be overriden by ACCOUNT_PWD environment variable.
+  -u, --apiUrl=apiUrl          [default: ws://localhost:9944] Runtime API URL. Mandatory in non-dev environment.
+
+  -y, --accountUri=accountUri  Account URI (optional). Has a priority over the keyFile and password flags. Could be
+                               overriden by ACCOUNT_URI environment variable.
 ```
 
-_See code: [src/commands/leader/update-bag-limit.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/update-bag-limit.ts)_
+_See code: [src/commands/leader/update-bag-limit.ts](https://github.com/Joystream/joystream/blob/v2.0.0/src/commands/leader/update-bag-limit.ts)_
 
 ## `storage-node leader:update-blacklist`
 
@@ -376,16 +547,19 @@ USAGE
   $ storage-node leader:update-blacklist
 
 OPTIONS
-  -a, --add=add            [default: ] Content ID to add
-  -h, --help               show CLI help
-  -k, --keyfile=keyfile    Key file for the account. Mandatory in non-dev environment.
-  -m, --dev                Use development mode
-  -p, --password=password  Key file password (optional).
-  -r, --remove=remove      [default: ] Content ID to remove
-  -u, --apiUrl=apiUrl      Runtime API URL. Mandatory in non-dev environment. Default is ws://localhost:9944
+  -a, --add=add                [default: ] Content ID to add
+  -h, --help                   show CLI help
+  -k, --keyFile=keyFile        Key file for the account. Mandatory in non-dev environment.
+  -m, --dev                    Use development mode
+  -p, --password=password      Key file password (optional). Could be overriden by ACCOUNT_PWD environment variable.
+  -r, --remove=remove          [default: ] Content ID to remove
+  -u, --apiUrl=apiUrl          [default: ws://localhost:9944] Runtime API URL. Mandatory in non-dev environment.
+
+  -y, --accountUri=accountUri  Account URI (optional). Has a priority over the keyFile and password flags. Could be
+                               overriden by ACCOUNT_URI environment variable.
 ```
 
-_See code: [src/commands/leader/update-blacklist.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/update-blacklist.ts)_
+_See code: [src/commands/leader/update-blacklist.ts](https://github.com/Joystream/joystream/blob/v2.0.0/src/commands/leader/update-blacklist.ts)_
 
 ## `storage-node leader:update-bucket-status`
 
@@ -396,18 +570,21 @@ USAGE
   $ storage-node leader:update-bucket-status
 
 OPTIONS
-  -d, --disable            Disables accepting new bags.
-  -e, --enable             Enables accepting new bags (default).
-  -h, --help               show CLI help
-  -i, --bucketId=bucketId  (required) Storage bucket ID
-  -k, --keyfile=keyfile    Key file for the account. Mandatory in non-dev environment.
-  -m, --dev                Use development mode
-  -p, --password=password  Key file password (optional).
-  -s, --set=(on|off)       (required) Sets 'accepting new bags' parameter for the bucket (on/off).
-  -u, --apiUrl=apiUrl      Runtime API URL. Mandatory in non-dev environment. Default is ws://localhost:9944
+  -d, --disable                Disables accepting new bags.
+  -e, --enable                 Enables accepting new bags (default).
+  -h, --help                   show CLI help
+  -i, --bucketId=bucketId      (required) Storage bucket ID
+  -k, --keyFile=keyFile        Key file for the account. Mandatory in non-dev environment.
+  -m, --dev                    Use development mode
+  -p, --password=password      Key file password (optional). Could be overriden by ACCOUNT_PWD environment variable.
+  -s, --set=(on|off)           (required) Sets 'accepting new bags' parameter for the bucket (on/off).
+  -u, --apiUrl=apiUrl          [default: ws://localhost:9944] Runtime API URL. Mandatory in non-dev environment.
+
+  -y, --accountUri=accountUri  Account URI (optional). Has a priority over the keyFile and password flags. Could be
+                               overriden by ACCOUNT_URI environment variable.
 ```
 
-_See code: [src/commands/leader/update-bucket-status.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/update-bucket-status.ts)_
+_See code: [src/commands/leader/update-bucket-status.ts](https://github.com/Joystream/joystream/blob/v2.0.0/src/commands/leader/update-bucket-status.ts)_
 
 ## `storage-node leader:update-data-fee`
 
@@ -418,15 +595,18 @@ USAGE
   $ storage-node leader:update-data-fee
 
 OPTIONS
-  -f, --fee=fee            (required) New data size fee
-  -h, --help               show CLI help
-  -k, --keyfile=keyfile    Key file for the account. Mandatory in non-dev environment.
-  -m, --dev                Use development mode
-  -p, --password=password  Key file password (optional).
-  -u, --apiUrl=apiUrl      Runtime API URL. Mandatory in non-dev environment. Default is ws://localhost:9944
+  -f, --fee=fee                (required) New data size fee
+  -h, --help                   show CLI help
+  -k, --keyFile=keyFile        Key file for the account. Mandatory in non-dev environment.
+  -m, --dev                    Use development mode
+  -p, --password=password      Key file password (optional). Could be overriden by ACCOUNT_PWD environment variable.
+  -u, --apiUrl=apiUrl          [default: ws://localhost:9944] Runtime API URL. Mandatory in non-dev environment.
+
+  -y, --accountUri=accountUri  Account URI (optional). Has a priority over the keyFile and password flags. Could be
+                               overriden by ACCOUNT_URI environment variable.
 ```
 
-_See code: [src/commands/leader/update-data-fee.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/update-data-fee.ts)_
+_See code: [src/commands/leader/update-data-fee.ts](https://github.com/Joystream/joystream/blob/v2.0.0/src/commands/leader/update-data-fee.ts)_
 
 ## `storage-node leader:update-dynamic-bag-policy`
 
@@ -438,15 +618,18 @@ USAGE
 
 OPTIONS
   -h, --help                      show CLI help
-  -k, --keyfile=keyfile           Key file for the account. Mandatory in non-dev environment.
+  -k, --keyFile=keyFile           Key file for the account. Mandatory in non-dev environment.
   -m, --dev                       Use development mode
   -n, --number=number             (required) New storage buckets number
-  -p, --password=password         Key file password (optional).
+  -p, --password=password         Key file password (optional). Could be overriden by ACCOUNT_PWD environment variable.
   -t, --bagType=(Channel|Member)  (required) Dynamic bag type (Channel, Member).
-  -u, --apiUrl=apiUrl             Runtime API URL. Mandatory in non-dev environment. Default is ws://localhost:9944
+  -u, --apiUrl=apiUrl             [default: ws://localhost:9944] Runtime API URL. Mandatory in non-dev environment.
+
+  -y, --accountUri=accountUri     Account URI (optional). Has a priority over the keyFile and password flags. Could be
+                                  overriden by ACCOUNT_URI environment variable.
 ```
 
-_See code: [src/commands/leader/update-dynamic-bag-policy.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/update-dynamic-bag-policy.ts)_
+_See code: [src/commands/leader/update-dynamic-bag-policy.ts](https://github.com/Joystream/joystream/blob/v2.0.0/src/commands/leader/update-dynamic-bag-policy.ts)_
 
 ## `storage-node leader:update-voucher-limits`
 
@@ -457,16 +640,19 @@ USAGE
   $ storage-node leader:update-voucher-limits
 
 OPTIONS
-  -h, --help               show CLI help
-  -k, --keyfile=keyfile    Key file for the account. Mandatory in non-dev environment.
-  -m, --dev                Use development mode
-  -o, --objects=objects    (required) New 'max voucher object number limit' value
-  -p, --password=password  Key file password (optional).
-  -s, --size=size          (required) New 'max voucher object size limit' value
-  -u, --apiUrl=apiUrl      Runtime API URL. Mandatory in non-dev environment. Default is ws://localhost:9944
+  -h, --help                   show CLI help
+  -k, --keyFile=keyFile        Key file for the account. Mandatory in non-dev environment.
+  -m, --dev                    Use development mode
+  -o, --objects=objects        (required) New 'max voucher object number limit' value
+  -p, --password=password      Key file password (optional). Could be overriden by ACCOUNT_PWD environment variable.
+  -s, --size=size              (required) New 'max voucher object size limit' value
+  -u, --apiUrl=apiUrl          [default: ws://localhost:9944] Runtime API URL. Mandatory in non-dev environment.
+
+  -y, --accountUri=accountUri  Account URI (optional). Has a priority over the keyFile and password flags. Could be
+                               overriden by ACCOUNT_URI environment variable.
 ```
 
-_See code: [src/commands/leader/update-voucher-limits.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/update-voucher-limits.ts)_
+_See code: [src/commands/leader/update-voucher-limits.ts](https://github.com/Joystream/joystream/blob/v2.0.0/src/commands/leader/update-voucher-limits.ts)_
 
 ## `storage-node operator:accept-invitation`
 
@@ -477,37 +663,44 @@ USAGE
   $ storage-node operator:accept-invitation
 
 OPTIONS
-  -h, --help               show CLI help
-  -i, --bucketId=bucketId  (required) Storage bucket ID
-  -k, --keyfile=keyfile    Key file for the account. Mandatory in non-dev environment.
-  -m, --dev                Use development mode
-  -p, --password=password  Key file password (optional).
-  -u, --apiUrl=apiUrl      Runtime API URL. Mandatory in non-dev environment. Default is ws://localhost:9944
-  -w, --workerId=workerId  (required) Storage operator worker ID
+  -h, --help                   show CLI help
+  -i, --bucketId=bucketId      (required) Storage bucket ID
+  -k, --keyFile=keyFile        Key file for the account. Mandatory in non-dev environment.
+  -m, --dev                    Use development mode
+  -p, --password=password      Key file password (optional). Could be overriden by ACCOUNT_PWD environment variable.
+  -u, --apiUrl=apiUrl          [default: ws://localhost:9944] Runtime API URL. Mandatory in non-dev environment.
+  -w, --workerId=workerId      (required) Storage operator worker ID
+
+  -y, --accountUri=accountUri  Account URI (optional). Has a priority over the keyFile and password flags. Could be
+                               overriden by ACCOUNT_URI environment variable.
 ```
 
-_See code: [src/commands/operator/accept-invitation.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/operator/accept-invitation.ts)_
+_See code: [src/commands/operator/accept-invitation.ts](https://github.com/Joystream/joystream/blob/v2.0.0/src/commands/operator/accept-invitation.ts)_
 
 ## `storage-node operator:set-metadata`
 
-Accept pending storage bucket invitation.
+Set metadata for the storage bucket.
 
 ```
 USAGE
   $ storage-node operator:set-metadata
 
 OPTIONS
+  -e, --endpoint=endpoint      Root distribution node endpoint
   -h, --help                   show CLI help
   -i, --bucketId=bucketId      (required) Storage bucket ID
-  -k, --keyfile=keyfile        Key file for the account. Mandatory in non-dev environment.
+  -j, --jsonFile=jsonFile      Path to JSON metadata file
+  -k, --keyFile=keyFile        Key file for the account. Mandatory in non-dev environment.
   -m, --dev                    Use development mode
-  -m, --metadata=metadata      Storage bucket operator metadata
-  -p, --password=password      Key file password (optional).
-  -u, --apiUrl=apiUrl          Runtime API URL. Mandatory in non-dev environment. Default is ws://localhost:9944
+  -p, --password=password      Key file password (optional). Could be overriden by ACCOUNT_PWD environment variable.
+  -u, --apiUrl=apiUrl          [default: ws://localhost:9944] Runtime API URL. Mandatory in non-dev environment.
   -w, --operatorId=operatorId  (required) Storage bucket operator ID (storage group worker ID)
+
+  -y, --accountUri=accountUri  Account URI (optional). Has a priority over the keyFile and password flags. Could be
+                               overriden by ACCOUNT_URI environment variable.
 ```
 
-_See code: [src/commands/operator/set-metadata.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/operator/set-metadata.ts)_
+_See code: [src/commands/operator/set-metadata.ts](https://github.com/Joystream/joystream/blob/v2.0.0/src/commands/operator/set-metadata.ts)_
 
 ## `storage-node server`
 
@@ -518,16 +711,41 @@ USAGE
   $ storage-node server
 
 OPTIONS
-  -d, --uploads=uploads    (required) Data uploading directory (absolute path).
-  -h, --help               show CLI help
-  -k, --keyfile=keyfile    Key file for the account. Mandatory in non-dev environment.
-  -m, --dev                Use development mode
-  -o, --port=port          (required) Server port.
-  -p, --password=password  Key file password (optional).
-  -u, --apiUrl=apiUrl      Runtime API URL. Mandatory in non-dev environment. Default is ws://localhost:9944
-  -w, --worker=worker      (required) Storage provider worker ID
+  -a, --disableUploadAuth                    Disable uploading authentication (should be used in testing-context only).
+  -d, --uploads=uploads                      (required) Data uploading directory (absolute path).
+
+  -e, --elasticSearchHost=elasticSearchHost  Elasticsearch host and port (e.g.: some.com:8081).
+                                             Log level could be set using the ELASTIC_LOG_LEVEL enviroment variable.
+                                             Supported values: warn, error, debug, info. Default:debug
+
+  -h, --help                                 show CLI help
+
+  -i, --syncInterval=syncInterval            [default: 1] Interval between synchronizations (in minutes)
+
+  -k, --keyFile=keyFile                      Key file for the account. Mandatory in non-dev environment.
+
+  -m, --dev                                  Use development mode
+
+  -o, --port=port                            (required) Server port.
+
+  -p, --password=password                    Key file password (optional). Could be overriden by ACCOUNT_PWD environment
+                                             variable.
+
+  -q, --queryNodeHost=queryNodeHost          (required) Query node host and port (e.g.: some.com:8081)
+
+  -r, --syncWorkersNumber=syncWorkersNumber  [default: 20] Sync workers number (max async operations in progress).
+
+  -s, --sync                                 Enable data synchronization.
+
+  -u, --apiUrl=apiUrl                        [default: ws://localhost:9944] Runtime API URL. Mandatory in non-dev
+                                             environment.
+
+  -w, --worker=worker                        (required) Storage provider worker ID
+
+  -y, --accountUri=accountUri                Account URI (optional). Has a priority over the keyFile and password flags.
+                                             Could be overriden by ACCOUNT_URI environment variable.
 ```
 
-_See code: [src/commands/server.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/server.ts)_
+_See code: [src/commands/server.ts](https://github.com/Joystream/joystream/blob/v2.0.0/src/commands/server.ts)_
 
 <!-- commandsstop -->

+ 38 - 7
storage-node-v2/package.json

@@ -1,14 +1,16 @@
 {
   "name": "storage-node-v2",
   "description": "Joystream storage subsystem.",
-  "version": "0.1.0",
+  "version": "2.0.0",
   "author": "Joystream contributors",
   "bin": {
     "storage-node": "./bin/run"
   },
   "bugs": "https://github.com/Joystream/joystream/issues",
   "dependencies": {
+    "@apollo/client": "^3.3.21",
     "@joystream/types": "^0.17.0",
+    "@joystream/metadata-protobuf": "^1.0.0",
     "@oclif/command": "^1",
     "@oclif/config": "^1",
     "@oclif/plugin-help": "^3",
@@ -16,34 +18,56 @@
     "@types/base64url": "^2.0.0",
     "@types/express": "4.17.13",
     "@types/file-type": "^10.9.1",
+    "@types/lodash": "^4.14.171",
     "@types/multer": "^1.4.5",
     "@types/node-cache": "^4.2.5",
+    "@types/promise-timeout": "^1.3.0",
     "@types/read-chunk": "^3.1.0",
+    "@types/rimraf": "^3.0.2",
     "@types/send": "^0.17.0",
+    "@types/superagent": "^4.1.12",
+    "@types/url-join": "^4.0.1",
+    "@types/uuid": "^8.3.1",
     "@types/winston": "^2.4.4",
+    "ajv": "^7",
     "await-lock": "^2.1.0",
     "base64url": "^3.0.1",
     "blake3": "^2.1.4",
+    "cross-fetch": "^3.1.4",
     "express": "4.17.1",
-    "express-openapi-validator": "^4.12.4",
+    "express-openapi-validator": "4.12.4",
     "express-winston": "^4.1.0",
+    "fast-folder-size": "^1.4.0",
     "file-type": "^16.5.0",
     "lodash": "^4.17.21",
     "multihashes": "^4.0.2",
     "node-cache": "^5.1.2",
     "openapi-editor": "^0.3.0",
+    "promise-timeout": "^1.3.0",
     "read-chunk": "^3.2.0",
+    "rimraf": "^3.0.2",
     "send": "^0.17.1",
+    "sleep-promise": "^9.1.0",
+    "superagent": "^6.1.0",
     "tslib": "^1",
-    "winston": "^3.3.3"
+    "url-join": "^4.0.1",
+    "uuid": "^8.3.2",
+    "winston": "^3.3.3",
+    "winston-elasticsearch": "^0.15.8"
   },
   "devDependencies": {
+    "@graphql-codegen/cli": "^1.21.4",
+    "@graphql-codegen/import-types-preset": "^1.18.1",
+    "@graphql-codegen/typescript": "^1.22.0",
+    "@graphql-codegen/typescript-document-nodes": "^1.17.11",
+    "@graphql-codegen/typescript-operations": "^1.17.16",
     "@joystream/eslint-config": "^1.0.0",
     "@oclif/dev-cli": "^1",
     "@oclif/test": "^1",
     "@types/chai": "^4",
     "@types/mocha": "^5",
     "@types/node": "^10",
+    "@types/pg": "^8.6.1",
     "@types/swagger-ui-express": "^4.1.2",
     "@typescript-eslint/eslint-plugin": "3.8.0",
     "@typescript-eslint/parser": "3.8.0",
@@ -54,6 +78,7 @@
     "globby": "^10",
     "mocha": "^5",
     "nyc": "^14",
+    "pg": "^8.7.1",
     "prettier": "^2.3.0",
     "sinon": "^11.1.1",
     "swagger-ui-express": "^4.1.6",
@@ -88,11 +113,14 @@
       "@oclif/plugin-help"
     ],
     "topics": {
-      "wg": {
-        "description": "Storage working group commands."
+      "dev": {
+        "description": "Development mode commands."
       },
-      "wg:leader": {
+      "leader": {
         "description": "Storage working group leader commands."
+      },
+      "operator": {
+        "description": "Storage provider(operator) commands."
       }
     }
   },
@@ -109,7 +137,10 @@
     "build": "tsc --build tsconfig.json",
     "format": "prettier ./src --write",
     "lint": "eslint ./src --ext .ts",
-    "api:edit": "openapi-editor --file ./src/api-spec/openapi.yaml --port 10021"
+    "api:edit": "openapi-editor --file ./src/api-spec/openapi.yaml --port 10021",
+    "generate:types:graphql": "yarn graphql-codegen -c ./src/services/queryNode/codegen.yml",
+    "generate:types:json-schema": "yarn ts-node ./src/services/metadata/generateTypes.ts",
+    "ensure": "yarn format && yarn lint --fix && yarn build"
   },
   "types": "lib/index.d.ts"
 }

+ 200 - 0
storage-node-v2/scripts/generate-test-data.ts

@@ -0,0 +1,200 @@
+#!/usr/bin/env ts-node
+
+import fs from 'fs'
+import path from 'path'
+const fsPromises = fs.promises
+import { Client, ClientConfig,QueryResult } from 'pg'
+import { exit } from 'process'
+
+async function doJob(): Promise<void> {
+  const uploadDirectory = '/Users/shamix/uploads5'
+  const fileSize = 1000
+
+  const objectNumber = 10000
+  const bagNumber = 10
+  const bucketNumber = 10
+
+  const urls = [
+    `http://localhost:3333/`,
+    `http://localhost:3334/`,
+    `http://localhost:3335/`,
+  ]
+
+  const updateDb = false
+  const generateFiles = true
+
+  if (updateDb) {
+    const config : ClientConfig = {
+      user: 'postgres',
+      password: 'postgres',
+      database: 'query_node_processor',
+      host: 'localhost'
+    }
+    const client = new Client(config)
+    await client.connect()
+
+    // Cleanup
+    await client.query('TRUNCATE storage_data_object')
+    await client.query('TRUNCATE storage_bucket CASCADE')
+    await client.query('TRUNCATE storage_bag CASCADE')
+    await client.query('TRUNCATE storage_bag_storage_bucket')
+  
+    // Generate objects
+    await createBags(client, bagNumber)
+    await createBuckets(client, bucketNumber)
+    await createBagBucketLinks(client)
+    await createBucketWorkerLinks(client)
+    await createBucketOperatorUrls(client, urls)
+    const dbTasks = createDataObjects(client, objectNumber)
+    await Promise.all(dbTasks)
+
+    await client.end()
+  }
+  
+  if (generateFiles) {
+    await createFiles(uploadDirectory, fileSize, objectNumber)
+  }
+}
+
+function createDataObjects(client: Client, objectNumber: number): Promise<QueryResult<any>>[] {
+  const tasks: any[] = []
+
+  const bagId = '1'
+  for(let i: number = 1; i <= objectNumber; i++){
+    const name = i.toString()
+
+    console.log(`Writing ${i} data object...`)
+
+    const dbTask = client.query(
+      `INSERT INTO storage_data_object(storage_bag_id, ipfs_hash, id, created_by_id, version, is_accepted, size) 
+       values(${bagId}, ${name}, ${name}, 'some', '1', true, 100)`
+    )
+
+    tasks.push(dbTask)
+  }
+
+  return tasks
+}
+
+async function createFiles(uploadDirectory: string, fileSize: number, objectNumber: number): Promise<void> {
+  const data = new Uint8Array(fileSize)
+  let tasks: any[] = []
+  for(let i: number = 1; i <= objectNumber; i++){
+    const name = i.toString()
+
+    console.log(`Writing ${i} file...`)
+
+    const fileTask = fsPromises.writeFile(
+      path.join(uploadDirectory, name), 
+      data
+    )
+
+    tasks.push(fileTask)
+
+    if (i % 100 === 0){
+      await Promise.all(tasks)
+      tasks.length = 0
+    }
+  }
+
+  if (tasks.length > 0) {
+    await Promise.all(tasks)
+  }
+}
+
+async function createBags(client: Client, bagNumber: number): Promise<void> {
+  for(let i: number = 1; i <= bagNumber; i++){
+    const name = i.toString()
+
+    console.log(`Writing ${i} bag...`)
+
+    await client.query(
+      `INSERT INTO storage_bag(id, created_by_id, version, owner) 
+       values(${name}, 'some', '1',  '{}')`
+    )
+  }
+}
+
+async function createBuckets(client: Client, bucketNumber: number): Promise<void> {
+  const missingWorkerId = `{"isTypeOf": "StorageBucketOperatorStatusMissing"}`
+  for(let i: number = 1; i <= bucketNumber; i++){
+    const name = i.toString()
+
+    console.log(`Writing ${i} bucket...`)
+
+    await client.query(
+      `INSERT INTO storage_bucket(id, created_by_id, version, operator_status, accepting_new_bags, data_objects_size_limit,data_object_count_limit) 
+       values(${name}, 'some', '1',  '${missingWorkerId}', true, 100000000, 100000000)`
+    )
+  }
+}
+
+
+async function createBagBucketLinks(client: Client): Promise<void> {
+    console.log(`Writing bag to bucket links...`)
+
+    // Bucket1 to Bag1
+    await client.query(
+      `INSERT INTO storage_bag_storage_bucket(storage_bag_id, storage_bucket_id) 
+       values('1', '1')`
+    )
+    // Bucket2 to Bag1
+    await client.query(
+      `INSERT INTO storage_bag_storage_bucket(storage_bag_id, storage_bucket_id) 
+       values('1', '2')`
+    )    
+    // Bucket3 to Bag1
+    await client.query(
+      `INSERT INTO storage_bag_storage_bucket(storage_bag_id, storage_bucket_id) 
+       values('1', '3')`
+    )
+}
+
+async function createBucketWorkerLinks(client: Client): Promise<void> {
+    console.log(`Writing bucket worker links...`)
+
+    const assignedWorker0 = `{"isTypeOf": "StorageBucketOperatorStatusActive", "workerId": 0}`
+    const assignedWorker1 = `{"isTypeOf": "StorageBucketOperatorStatusActive", "workerId": 1}`
+    const assignedWorker2 = `{"isTypeOf": "StorageBucketOperatorStatusActive", "workerId": 2}`
+
+    // Bucket1 to Worker0
+    await client.query(
+      `UPDATE storage_bucket
+       SET operator_status = '${assignedWorker0}'
+       WHERE id = '1'`
+    )
+    // Bucket2 to Worker1
+    await client.query(
+      `UPDATE storage_bucket
+       SET operator_status = '${assignedWorker1}'
+       WHERE id = '2'`
+    )   
+     // Bucket3 to Worker2
+    await client.query(
+      `UPDATE storage_bucket
+       SET operator_status = '${assignedWorker2}'
+       WHERE id = '3'`
+    )
+}
+
+async function createBucketOperatorUrls(client: Client, urls: string[]): Promise<void> {
+    console.log(`Writing bucket operator URLs...`)
+
+    for (let i = 0; i < urls.length; i++) {
+      const bucketId = i + 1
+      const metadata = urls[i]
+
+      await client.query(
+        `UPDATE storage_bucket
+         SET operator_metadata = '${metadata}'
+         WHERE id = '${bucketId}'`
+      )
+    }
+}
+
+doJob().then(() => {
+  console.log('Done')
+}).catch((err) => {
+  console.log(err)
+  exit(1)
+})

+ 21 - 0
storage-node-v2/scripts/init-for-cli.sh

@@ -0,0 +1,21 @@
+#!/usr/bin/env bash
+
+# Must be run on the clean development chain.
+
+SCRIPT_PATH="$(dirname "${BASH_SOURCE[0]}")"
+cd $SCRIPT_PATH
+
+CLI=../bin/run
+
+${CLI} dev:init
+${CLI} leader:update-bag-limit -l 7 --dev
+${CLI} leader:update-voucher-limits -o 1000 -s 10000000000 --dev
+BUCKET_ID=`${CLI} leader:create-bucket -i=0 -a -n=1000 -s=10000000000  --dev`
+${CLI} operator:accept-invitation -w=0 -i=${BUCKET_ID} --dev
+${CLI} leader:update-bag -a=${BUCKET_ID} -i static:council --dev
+${CLI} leader:update-dynamic-bag-policy -n 1 -t Channel --dev
+${CLI} operator:set-metadata -w=0 -i=${BUCKET_ID} -e="http://localhost:3333" --dev
+${CLI} leader:update-data-fee -f 100 --dev
+
+mkdir -p ~/tmp/uploads
+${CLI} server -w 0 -o 3333 -d ~/tmp/uploads --dev -q localhost:9944

+ 12 - 0
storage-node-v2/scripts/operatorMetadata.json

@@ -0,0 +1,12 @@
+{
+  "endpoint": "http://localhost:3333",
+  "location": {
+    "countryCode": "US",
+    "city": "Chicago",
+    "coordinates": {
+      "latitude": 50,
+      "longitude": 50
+    }
+  },
+  "extra": "Extra"
+}

+ 1 - 2
storage-node-v2/scripts/run-all-commands.sh

@@ -25,8 +25,7 @@ ${CLI} operator:accept-invitation -w=0 -i=${BUCKET_ID} --dev
 ${CLI} leader:set-bucket-limits -i=${BUCKET_ID} -o=100 -s=10000000 --dev
 ${CLI} leader:update-bucket-status -i=${BUCKET_ID} --set on --dev
 ${CLI} leader:update-bag -a=${BUCKET_ID} -i static:council --dev
-# Sets metadata endpoint to http://localhost:3333
-${CLI} operator:set-metadata -w=0 -i=${BUCKET_ID} -m="0x0A1C687474703A2F2F6C6F63616C686F73743A333333332F6170692F7631" --dev
+${CLI} operator:set-metadata -w=0 -i=${BUCKET_ID} -e="http://localhost:3333" --dev
 
 # Create and delete a bucket
 BUCKET_ID=`${CLI} leader:create-bucket -a -n=100 -s=10000000  --dev` # bucketId = 1

+ 106 - 11
storage-node-v2/src/api-spec/openapi.yaml

@@ -15,21 +15,23 @@ servers:
   - url: http://localhost:3333/api/v1/
 
 tags:
-  - name: public
-    description: Public storage node API
+  - name: files
+    description: Storage node Files API
+  - name: state
+    description: Storage node State API
 
 paths:
-  /files/{cid}:
+  /files/{id}:
     get:
       operationId: publicApi.getFile
       description: Returns a media file.
       tags:
-        - public
+        - files
       parameters:
-        - name: cid
+        - name: id
           required: true
           in: path
-          description: Content ID
+          description: Data object ID
           schema:
             type: string
       responses:
@@ -76,12 +78,12 @@ paths:
       operationId: publicApi.getFileHeaders
       description: Returns a media file headers.
       tags:
-        - public
+        - files
       parameters:
-        - name: cid
+        - name: id
           required: true
           in: path
-          description: Content ID
+          description: Data object ID
           schema:
             type: string
       responses:
@@ -100,7 +102,7 @@ paths:
       description: Upload data
       operationId: publicApi.uploadFile
       tags:
-        - public
+        - files
       requestBody:
         content:
           multipart/form-data:
@@ -151,7 +153,7 @@ paths:
       description: Get auth token from a server.
       operationId: publicApi.authTokenForUploading
       tags:
-        - public
+        - files
       requestBody:
         description: Token request parameters,
         content:
@@ -181,6 +183,68 @@ paths:
               schema:
                 $ref: '#/components/schemas/ErrorResponse'
 
+  /state/data-objects:
+    get:
+      operationId: stateApi.getAllLocalDataObjects
+      description: Returns all local data objects.
+      tags:
+        - state
+      responses:
+        200:
+          description: Ok
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/DataObjectResponse'
+
+  /state/bags/{bagId}/data-objects:
+    get:
+      operationId: stateApi.getLocalDataObjectsByBagId
+      description: Returns local data objects for the bag.
+      tags:
+        - state
+      parameters:
+        - name: bagId
+          required: true
+          in: path
+          description: Bag ID
+          schema:
+            type: string
+      responses:
+        200:
+          description: Ok
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/DataObjectResponse'
+
+  /version:
+    get:
+      operationId: stateApi.getVersion
+      description: Returns server version.
+      tags:
+        - state
+      responses:
+        200:
+          description: Ok
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/VersionResponse'
+  /state/data:
+    get:
+      operationId: stateApi.getLocalDataStats
+      description: Returns local uploading directory stats.
+      tags:
+        - state
+      responses:
+        200:
+          description: Ok
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/DataStatsResponse'
+
 components:
   securitySchemes:
     UploadAuth:
@@ -227,3 +291,34 @@ components:
           type: string
         message:
           type: string
+    DataStatsResponse:
+      type: object
+      required:
+        - totalSize
+        - objectNumber
+      properties:
+        totalSize:
+          type: integer
+          format: int64
+        objectNumber:
+          type: integer
+          format: int64
+        tempDirSize:
+          type: integer
+          format: int64
+        tempDownloads:
+          type: integer
+          format: int64
+    VersionResponse:
+      type: object
+      required:
+        - version
+      properties:
+        version:
+          type: string
+        userAgent:
+          type: string
+    DataObjectResponse:
+      type: array
+      items:
+        type: string

+ 40 - 16
storage-node-v2/src/command-base/ApiCommandBase.ts

@@ -1,6 +1,7 @@
 import { Command, flags } from '@oclif/command'
 import { createApi } from '../services/runtime/api'
 import { getAccountFromJsonFile, getAlicePair, getAccountFromUri } from '../services/runtime/accounts'
+import { parseBagId } from '../services/helpers/bagTypes'
 import { KeyringPair } from '@polkadot/keyring/types'
 import { ApiPromise } from '@polkadot/api'
 import logger from '../services/logger'
@@ -23,7 +24,7 @@ export default abstract class ApiCommandBase extends Command {
       description: 'Runtime API URL. Mandatory in non-dev environment.',
       default: 'ws://localhost:9944',
     }),
-    keyfile: flags.string({
+    keyFile: flags.string({
       char: 'k',
       description: 'Key file for the account. Mandatory in non-dev environment.',
     }),
@@ -31,10 +32,29 @@ export default abstract class ApiCommandBase extends Command {
       char: 'p',
       description: 'Key file password (optional). Could be overriden by ACCOUNT_PWD environment variable.',
     }),
-    accountURI: flags.string({
+    accountUri: flags.string({
       char: 'y',
       description:
-        'Account URI (optional). Has a priority over the keyfile and password flags. Could be overriden by ACCOUNT_URI environment variable.',
+        'Account URI (optional). Has a priority over the keyFile and password flags. Could be overriden by ACCOUNT_URI environment variable.',
+    }),
+  }
+
+  static extraFlags = {
+    bagId: flags.build({
+      parse: (value: string) => {
+        return parseBagId(value)
+      },
+      description: `Bag ID. Format: {bag_type}:{sub_type}:{id}.
+    - Bag types: 'static', 'dynamic'
+    - Sub types: 'static:council', 'static:wg', 'dynamic:member', 'dynamic:channel'
+    - Id:
+      - absent for 'static:council'
+      - working group name for 'static:wg'
+      - integer for 'dynamic:member' and 'dynamic:channel'
+    Examples:
+    - static:council
+    - static:wg:storage
+    - dynamic:member:4`,
     }),
   }
 
@@ -74,9 +94,13 @@ export default abstract class ApiCommandBase extends Command {
 
     // Some dev commands doesn't contain flags variables.
     const apiUrl = flags.apiUrl ?? 'ws://localhost:9944'
-    this.api = await createApi(apiUrl)
 
     logger.info(`Initialized runtime connection: ${apiUrl}`)
+    try {
+      this.api = await createApi(apiUrl)
+    } catch (err) {
+      logger.error(`Creating runtime API error: ${err.target?._url}`)
+    }
 
     await this.getApi()
   }
@@ -104,21 +128,21 @@ export default abstract class ApiCommandBase extends Command {
    * JSON-file or loads 'Alice' Keypair when in the development mode.
    *
    * @param dev - indicates the development mode (optional).
-   * @param keyfile - key file path (optional).
+   * @param keyFile - key file path (optional).
    * @param password - password for the key file (optional).
-   * @param accountURI - accountURI (optional). Overrides keyfile and password flags.
+   * @param accountURI - accountURI (optional). Overrides keyFile and password flags.
    * @returns KeyringPair instance.
    */
-  getAccount(flags: { dev: boolean; keyfile?: string; password?: string; accountURI?: string }): KeyringPair {
+  getAccount(flags: { dev: boolean; keyFile?: string; password?: string; accountUri?: string }): KeyringPair {
     // Select account URI variable from flags key and environment variable.
-    let accountURI = flags.accountURI ?? ''
+    let accountUri = flags.accountUri ?? ''
     if (!_.isEmpty(process.env.ACCOUNT_URI)) {
-      if (!_.isEmpty(flags.accountURI)) {
+      if (!_.isEmpty(flags.accountUri)) {
         logger.warn(
           `Both enviroment variable and command line argument were provided for the account URI. Environment variable has a priority.`
         )
       }
-      accountURI = process.env.ACCOUNT_URI ?? ''
+      accountUri = process.env.ACCOUNT_URI ?? ''
     }
 
     // Select password variable from flags key and environment variable.
@@ -132,18 +156,18 @@ export default abstract class ApiCommandBase extends Command {
       password = process.env.ACCOUNT_PWD ?? ''
     }
 
-    const keyfile = flags.keyfile ?? ''
+    const keyFile = flags.keyFile ?? ''
     // Create the Alice account for development mode.
     if (flags.dev) {
       return getAlicePair()
     }
     // Create an account using account URI
-    else if (!_.isEmpty(accountURI)) {
-      return getAccountFromUri(accountURI)
+    else if (!_.isEmpty(accountUri)) {
+      return getAccountFromUri(accountUri)
     }
-    // Create an account using the keyfile and password.
-    else if (!_.isEmpty(keyfile)) {
-      const account = getAccountFromJsonFile(keyfile)
+    // Create an account using the keyFile and password.
+    else if (!_.isEmpty(keyFile)) {
+      const account = getAccountFromJsonFile(keyFile)
       account.unlock(password)
 
       return account

+ 2 - 0
storage-node-v2/src/command-base/ExitCodes.ts

@@ -8,6 +8,8 @@ enum ExitCodes {
   InvalidParameters = 100,
   DevelopmentModeOnly,
   FileError,
+  InvalidWorkerId,
+  InvalidIntegerArray,
   ApiError = 200,
   UnsuccessfulRuntimeCall,
 }

+ 4 - 0
storage-node-v2/src/commands/dev/init.ts

@@ -12,6 +12,10 @@ import ApiCommandBase from '../../command-base/ApiCommandBase'
 export default class DevInit extends ApiCommandBase {
   static description = 'Initialize development environment. Sets Alice as storage working group leader.'
 
+  static flags = {
+    ...ApiCommandBase.flags,
+  }
+
   async run(): Promise<void> {
     await this.ensureDevelopmentChain()
 

+ 65 - 0
storage-node-v2/src/commands/dev/sync.ts

@@ -0,0 +1,65 @@
+import { Command, flags } from '@oclif/command'
+import { performSync } from '../../services/sync/synchronizer'
+import logger from '../../services/logger'
+
+/**
+ * CLI command:
+ * Synchronizes data: fixes the difference between node obligations and local
+ * storage.
+ *
+ * @remarks
+ * Should be run only during the development.
+ * Shell command: "dev:upload"
+ */
+export default class DevSync extends Command {
+  static description =
+    'Synchronizes the data - it fixes the differences between local data folder and worker ID obligations from the runtime.'
+
+  static flags = {
+    help: flags.help({ char: 'h' }),
+    workerId: flags.integer({
+      char: 'w',
+      required: true,
+      description: 'Storage node operator worker ID.',
+    }),
+    syncWorkersNumber: flags.integer({
+      char: 'p',
+      required: false,
+      description: 'Sync workers number (max async operations in progress).',
+    }),
+    queryNodeHost: flags.string({
+      char: 'q',
+      required: false,
+      description: 'Query node host and port (e.g.: some.com:8081)',
+    }),
+    dataSourceOperatorHost: flags.string({
+      char: 'o',
+      required: false,
+      description: 'Storage node host and port (e.g.: some.com:8081) to get data from.',
+    }),
+    uploads: flags.string({
+      char: 'd',
+      required: true,
+      description: 'Data uploading directory (absolute path).',
+    }),
+  }
+
+  async run(): Promise<void> {
+    const { flags } = this.parse(DevSync)
+
+    logger.info('Syncing...')
+
+    const queryNodeHost = flags.queryNodeHost ?? 'localhost:8081'
+    const queryNodeUrl = `http://${queryNodeHost}/graphql`
+    const syncWorkersNumber = flags.syncWorkersNumber ?? 20
+    const dataSourceOperatorHost = flags.dataSourceOperatorHost ?? 'localhost:3333'
+    const operatorUrl = `http://${dataSourceOperatorHost}/`
+
+    try {
+      await performSync(flags.workerId, syncWorkersNumber, queryNodeUrl, flags.uploads, operatorUrl)
+    } catch (err) {
+      logger.error(err)
+      logger.error(JSON.stringify(err, null, 2))
+    }
+  }
+}

+ 5 - 23
storage-node-v2/src/commands/dev/verify-bag-id.ts

@@ -1,6 +1,5 @@
-import { flags } from '@oclif/command'
+import { Command, flags } from '@oclif/command'
 import ApiCommandBase from '../../command-base/ApiCommandBase'
-import { parseBagId } from '../../services/helpers/bagTypes'
 import logger from '../../services/logger'
 
 /**
@@ -11,37 +10,20 @@ import logger from '../../services/logger'
  * Should be run only during the development.
  * Shell command: "dev:verify-bag-id"
  */
-export default class DevVerifyBagId extends ApiCommandBase {
+export default class DevVerifyBagId extends Command {
   static description = 'The command verifies bag id supported by the storage node. Requires chain connection.'
 
   static flags = {
-    bagId: flags.string({
+    help: flags.help({ char: 'h' }),
+    bagId: ApiCommandBase.extraFlags.bagId({
       char: 'i',
       required: true,
-      description: `
-      Bag ID. Format: {bag_type}:{sub_type}:{id}.
-      - Bag types: 'static', 'dynamic'
-      - Sub types: 'static:council', 'static:wg', 'dynamic:member', 'dynamic:channel'
-      - Id: 
-        - absent for 'static:council'
-        - working group name for 'static:wg'
-        - integer for 'dynamic:member' and 'dynamic:channel'
-      Examples:
-      - static:council
-      - static:wg:storage
-      - dynamic:member:4
-      `,
     }),
-    ...ApiCommandBase.flags,
   }
 
   async run(): Promise<void> {
     const { flags } = this.parse(DevVerifyBagId)
 
-    const api = await this.getApi()
-    const parsedBagId = parseBagId(api, flags.bagId)
-
-    logger.info(`Correct bag id: ${flags.bagId}`)
-    logger.info(`Parsed: ${parsedBagId}`)
+    logger.info(`Parsed: ${flags.bagId}`)
   }
 }

+ 6 - 18
storage-node-v2/src/commands/leader/update-bag.ts

@@ -1,10 +1,10 @@
 import { flags } from '@oclif/command'
 import { updateStorageBucketsForBag } from '../../services/runtime/extrinsics'
 import ApiCommandBase from '../../command-base/ApiCommandBase'
-import { parseBagId } from '../../services/helpers/bagTypes'
 import logger from '../../services/logger'
 import ExitCodes from '../../command-base/ExitCodes'
 import _ from 'lodash'
+import { CLIError } from '@oclif/errors'
 
 // Custom 'integer array' oclif flag.
 const integerArrFlags = {
@@ -12,7 +12,9 @@ const integerArrFlags = {
     parse: (value: string) => {
       const arr: number[] = value.split(',').map((v) => {
         if (!/^-?\d+$/.test(v)) {
-          throw new Error(`Expected comma-separated integers, but received: ${value}`)
+          throw new CLIError(`Expected comma-separated integers, but received: ${value}`, {
+            exit: ExitCodes.InvalidIntegerArray,
+          })
         }
         return parseInt(v)
       })
@@ -43,22 +45,9 @@ export default class LeaderUpdateBag extends ApiCommandBase {
       description: 'ID of a bucket to remove from bag',
       default: [],
     }),
-    bagId: flags.string({
+    bagId: ApiCommandBase.extraFlags.bagId({
       char: 'i',
       required: true,
-      description: `
-      Bag ID. Format: {bag_type}:{sub_type}:{id}.
-      - Bag types: 'static', 'dynamic'
-      - Sub types: 'static:council', 'static:wg', 'dynamic:member', 'dynamic:channel'
-      - Id: 
-        - absent for 'static:council'
-        - working group name for 'static:wg'
-        - integer for 'dynamic:member' and 'dynamic:channel'
-      Examples:
-      - static:council
-      - static:wg:storage
-      - dynamic:member:4
-      `,
     }),
     ...ApiCommandBase.flags,
   }
@@ -78,9 +67,8 @@ export default class LeaderUpdateBag extends ApiCommandBase {
 
     const account = this.getAccount(flags)
     const api = await this.getApi()
-    const bagId = parseBagId(api, flags.bagId)
 
-    const success = await updateStorageBucketsForBag(api, bagId, account, flags.add, flags.remove)
+    const success = await updateStorageBucketsForBag(api, flags.bagId, account, flags.add, flags.remove)
 
     this.exitAfterRuntimeCall(success)
   }

+ 1 - 1
storage-node-v2/src/commands/leader/update-dynamic-bag-policy.ts

@@ -44,7 +44,7 @@ export default class LeaderUpdateDynamicBagPolicy extends ApiCommandBase {
     const newNumber = flags.number
 
     const api = await this.getApi()
-    const dynamicBagType = parseDynamicBagType(api, flags.bagType)
+    const dynamicBagType = parseDynamicBagType(flags.bagType)
     const success = await updateNumberOfStorageBucketsInDynamicBagCreationPolicy(
       api,
       account,

+ 21 - 9
storage-node-v2/src/commands/operator/set-metadata.ts

@@ -2,7 +2,9 @@ import { flags } from '@oclif/command'
 import { setStorageOperatorMetadata } from '../../services/runtime/extrinsics'
 import ApiCommandBase from '../../command-base/ApiCommandBase'
 import logger from '../../services/logger'
-
+import { ValidationService } from '../../services/metadata/validationService'
+import { StorageBucketOperatorMetadata, IStorageBucketOperatorMetadata } from '@joystream/metadata-protobuf'
+import fs from 'fs'
 /**
  * CLI command:
  * Sets metadata for the storage bucket.
@@ -13,7 +15,7 @@ import logger from '../../services/logger'
  * Shell command: "operator:set-metadata"
  */
 export default class OperatorSetMetadata extends ApiCommandBase {
-  static description = 'Accept pending storage bucket invitation.'
+  static description = 'Set metadata for the storage bucket.'
 
   static flags = {
     bucketId: flags.integer({
@@ -26,19 +28,29 @@ export default class OperatorSetMetadata extends ApiCommandBase {
       required: true,
       description: 'Storage bucket operator ID (storage group worker ID)',
     }),
-    metadata: flags.string({
-      char: 'm',
-      description: 'Storage bucket operator metadata',
+    endpoint: flags.string({
+      char: 'e',
+      description: 'Root distribution node endpoint',
+      exclusive: ['input'],
+    }),
+    jsonFile: flags.string({
+      char: 'j',
+      description: 'Path to JSON metadata file',
+      exclusive: ['endpoint'],
     }),
     ...ApiCommandBase.flags,
   }
 
   async run(): Promise<void> {
     const { flags } = this.parse(OperatorSetMetadata)
+    const { operatorId, bucketId, jsonFile, endpoint } = flags
+
+    const validation = new ValidationService()
+    const metadata: IStorageBucketOperatorMetadata = jsonFile
+      ? validation.validate('OperatorMetadata', JSON.parse(fs.readFileSync(jsonFile).toString()))
+      : { endpoint }
 
-    const operator = flags.operatorId
-    const bucket = flags.bucketId
-    const metadata = flags.metadata ?? ''
+    const encodedMetadata = '0x' + Buffer.from(StorageBucketOperatorMetadata.encode(metadata).finish()).toString('hex')
 
     logger.info('Setting the storage operator metadata...')
     if (flags.dev) {
@@ -48,7 +60,7 @@ export default class OperatorSetMetadata extends ApiCommandBase {
     const account = this.getAccount(flags)
 
     const api = await this.getApi()
-    const success = await setStorageOperatorMetadata(api, account, operator, bucket, metadata)
+    const success = await setStorageOperatorMetadata(api, account, operatorId, bucketId, encodedMetadata)
 
     this.exitAfterRuntimeCall(success)
   }

+ 157 - 3
storage-node-v2/src/commands/server.ts

@@ -1,7 +1,18 @@
 import { flags } from '@oclif/command'
 import { createApp } from '../services/webApi/app'
 import ApiCommandBase from '../command-base/ApiCommandBase'
-import logger from '../services/logger'
+import logger, { initElasticLogger } from '../services/logger'
+import { ApiPromise } from '@polkadot/api'
+import { performSync, TempDirName } from '../services/sync/synchronizer'
+import sleep from 'sleep-promise'
+import rimraf from 'rimraf'
+import _ from 'lodash'
+import path from 'path'
+import { promisify } from 'util'
+import { KeyringPair } from '@polkadot/keyring/types'
+import ExitCodes from './../command-base/ExitCodes'
+import { CLIError } from '@oclif/errors'
+import { Worker } from '@joystream/types/working-group'
 
 /**
  * CLI command:
@@ -29,30 +40,97 @@ export default class Server extends ApiCommandBase {
       required: true,
       description: 'Server port.',
     }),
+    sync: flags.boolean({
+      char: 's',
+      description: 'Enable data synchronization.',
+      default: false,
+    }),
+    syncInterval: flags.integer({
+      char: 'i',
+      description: 'Interval between synchronizations (in minutes)',
+      default: 1,
+    }),
+    queryNodeHost: flags.string({
+      char: 'q',
+      required: true,
+      description: 'Query node host and port (e.g.: some.com:8081)',
+    }),
+    syncWorkersNumber: flags.integer({
+      char: 'r',
+      required: false,
+      description: 'Sync workers number (max async operations in progress).',
+      default: 20,
+    }),
+    elasticSearchHost: flags.string({
+      char: 'e',
+      required: false,
+      description: `Elasticsearch host and port (e.g.: some.com:8081).
+Log level could be set using the ELASTIC_LOG_LEVEL enviroment variable.
+Supported values: warn, error, debug, info. Default:debug`,
+    }),
+    disableUploadAuth: flags.boolean({
+      char: 'a',
+      description: 'Disable uploading authentication (should be used in testing-context only).',
+      default: false,
+    }),
     ...ApiCommandBase.flags,
   }
 
   async run(): Promise<void> {
     const { flags } = this.parse(Server)
 
+    await removeTempDirectory(flags.uploads, TempDirName)
+
+    let elasticUrl
+    if (!_.isEmpty(flags.elasticSearchHost)) {
+      elasticUrl = `http://${flags.elasticSearchHost}`
+      initElasticLogger(elasticUrl)
+    }
+
+    const queryNodeUrl = `http://${flags.queryNodeHost}/graphql`
+    logger.info(`Query node endpoint set: ${queryNodeUrl}`)
+
     if (flags.dev) {
       await this.ensureDevelopmentChain()
     }
 
+    if (flags.disableUploadAuth) {
+      logger.warn(`Uploading auth-schema disabled.`)
+    }
+
+    if (flags.sync) {
+      logger.info(`Synchronization enabled.`)
+
+      runSyncWithInterval(flags.worker, queryNodeUrl, flags.uploads, flags.syncWorkersNumber, flags.syncInterval)
+    }
+
     const account = this.getAccount(flags)
     const api = await this.getApi()
 
+    await verifyWorkerId(api, flags.worker, account)
+
     try {
       const port = flags.port
       const workerId = flags.worker ?? 0
       const maxFileSize = await api.consts.storage.maxDataObjectSize.toNumber()
       logger.debug(`Max file size runtime parameter: ${maxFileSize}`)
 
-      const app = await createApp(api, account, workerId, flags.uploads, maxFileSize)
+      const app = await createApp({
+        api,
+        account,
+        workerId,
+        maxFileSize,
+        uploadsDir: flags.uploads,
+        tempDirName: TempDirName,
+        process: this.config,
+        queryNodeUrl,
+        enableUploadingAuth: !flags.disableUploadAuth,
+        elasticSearchEndpoint: elasticUrl,
+      })
       logger.info(`Listening on http://localhost:${port}`)
       app.listen(port)
     } catch (err) {
-      logger.error(`Error: ${err}`)
+      logger.error(`Server error: ${err}`)
     }
   }
 
@@ -60,3 +138,79 @@ export default class Server extends ApiCommandBase {
   /* eslint-disable @typescript-eslint/no-empty-function */
   async finally(): Promise<void> {}
 }
+
+/**
+ * Run the data syncronization process.
+ *
+ * @param workerId - worker ID
+ * @param queryNodeUrl - Query Node for data fetching
+ * @param uploadsDir - data uploading directory
+ * @param syncWorkersNumber - defines a number of the async processes for sync
+ * @param syncIntervalMinutes - defines an interval between sync runs
+ *
+ * @returns void promise.
+ */
+function runSyncWithInterval(
+  workerId: number,
+  queryNodeUrl: string,
+  uploadsDirectory: string,
+  syncWorkersNumber: number,
+  syncIntervalMinutes: number
+) {
+  setTimeout(async () => {
+    const sleepIntevalInSeconds = syncIntervalMinutes * 60 * 1000
+
+    logger.info(`Sync paused for ${syncIntervalMinutes} minute(s).`)
+    await sleep(sleepIntevalInSeconds)
+    logger.info(`Resume syncing....`)
+
+    try {
+      await performSync(workerId, syncWorkersNumber, queryNodeUrl, uploadsDirectory)
+    } catch (err) {
+      logger.error(`Critical sync error: ${err}`)
+    }
+
+    runSyncWithInterval(workerId, queryNodeUrl, uploadsDirectory, syncWorkersNumber, syncIntervalMinutes)
+  }, 0)
+}
+
+/**
+ * Removes the temporary directory from the uploading directory.
+ * All files in the temp directory are deleted.
+ *
+ * @param uploadsDir - data uploading directory
+ * @param tempDirName - temporary directory name within the uploading directory
+ * @returns void promise.
+ */
+async function removeTempDirectory(uploadsDir: string, tempDirName: string): Promise<void> {
+  try {
+    logger.info(`Removing temp directory ...`)
+    const tempFileUploadingDir = path.join(uploadsDir, tempDirName)
+
+    const rimrafAsync = promisify(rimraf)
+    await rimrafAsync(tempFileUploadingDir)
+  } catch (err) {
+    logger.error(`Removing temp directory error: ${err}`)
+  }
+}
+
+/**
+ * Verifies the worker ID from the command line argument and provided Joystream account.
+ * It throws an error when not matched.
+ *
+ * @param api - runtime API promise
+ * @param workerId - worker ID from the command line arguments
+ * @param account - Joystream account KeyringPair
+ * @returns void promise.
+ */
+async function verifyWorkerId(api: ApiPromise, workerId: number, account: KeyringPair): Promise<void> {
+  // Cast Codec type to Worker type
+  const workerObj = (await api.query.storageWorkingGroup.workerById(workerId)) as unknown
+  const worker = workerObj as Worker
+
+  if (worker.role_account_id.toString() !== account.address) {
+    throw new CLIError(`Provided worker ID doesn't match the Joystream account.`, {
+      exit: ExitCodes.InvalidWorkerId,
+    })
+  }
+}

+ 41 - 27
storage-node-v2/src/services/helpers/bagTypes.ts

@@ -1,9 +1,23 @@
 import { BagId, DynamicBagType, DynamicBagTypeKey, Static, Dynamic } from '@joystream/types/storage'
 import { WorkingGroup } from '@joystream/types/common'
-import { ApiPromise } from '@polkadot/api'
+import { registry } from '@joystream/types'
+import { createType } from '@polkadot/types'
+import { DetectCodec, Codec } from '@polkadot/types/types'
 import ExitCodes from '../../command-base/ExitCodes'
 import { CLIError } from '@oclif/errors'
 
+/**
+ * Special error type for bagId parsing. Extends the CLIError with setting
+ * the `InvalidParameters` exit code.
+ */
+export class BagIdValidationError extends CLIError {
+  constructor(err: string) {
+    super(err, {
+      exit: ExitCodes.InvalidParameters,
+    })
+  }
+}
+
 /**
  * Parses the type string and returns the DynamicBagType instance.
  *
@@ -14,8 +28,8 @@ import { CLIError } from '@oclif/errors'
  * @param bagType - dynamic bag type string
  * @returns The DynamicBagType instance.
  */
-export function parseDynamicBagType(api: ApiPromise, bagType: DynamicBagTypeKey): DynamicBagType {
-  return api.createType('DynamicBagType', bagType)
+export function parseDynamicBagType(bagType: DynamicBagTypeKey): DynamicBagType {
+  return createJoystreamType('DynamicBagType', bagType)
 }
 
 /**
@@ -29,8 +43,8 @@ export function parseDynamicBagType(api: ApiPromise, bagType: DynamicBagTypeKey)
  * @param bagId - bag ID in string format
  * @returns The BagId instance.
  */
-export function parseBagId(api: ApiPromise, bagId: string): BagId {
-  const parser = new BagIdParser(api, bagId)
+export function parseBagId(bagId: string): BagId {
+  const parser = new BagIdParser(bagId)
 
   return parser.parse()
 }
@@ -40,19 +54,15 @@ export function parseBagId(api: ApiPromise, bagId: string): BagId {
  */
 class BagIdParser {
   bagId: string
-  api: ApiPromise
   bagIdParts: string[]
 
-  constructor(api: ApiPromise, bagId: string) {
+  constructor(bagId: string) {
     this.bagId = bagId
-    this.api = api
 
     this.bagIdParts = bagId.trim().toLowerCase().split(':')
 
     if (this.bagIdParts.length > 3 || this.bagIdParts.length < 2) {
-      throw new CLIError(`Invalid bagId: ${bagId}`, {
-        exit: ExitCodes.InvalidParameters,
-      })
+      throw new BagIdValidationError(`Invalid bagId: ${bagId}`)
     }
   }
 
@@ -69,9 +79,7 @@ class BagIdParser {
       return this.parseDynamicBagId()
     }
 
-    throw new CLIError(`Invalid bagId: ${this.bagId}`, {
-      exit: ExitCodes.InvalidParameters,
-    })
+    throw new BagIdValidationError(`Invalid bagId: ${this.bagId}`)
   }
 
   /**
@@ -81,8 +89,8 @@ class BagIdParser {
     // Try to construct static council bag ID.
     if (this.bagIdParts[1] === 'council') {
       if (this.bagIdParts.length === 2) {
-        const staticBagId: Static = this.api.createType('Static', 'Council')
-        const constructedBagId: BagId = this.api.createType('BagId', {
+        const staticBagId: Static = createJoystreamType('Static', 'Council')
+        const constructedBagId: BagId = createJoystreamType('BagId', {
           'Static': staticBagId,
         })
 
@@ -98,11 +106,11 @@ class BagIdParser {
 
         for (const group of groups) {
           if (group.toLowerCase() === actualGroup) {
-            const workingGroup: WorkingGroup = this.api.createType('WorkingGroup', group)
-            const staticBagId: Static = this.api.createType('Static', {
+            const workingGroup: WorkingGroup = createJoystreamType('WorkingGroup', group)
+            const staticBagId: Static = createJoystreamType('Static', {
               'WorkingGroup': workingGroup,
             })
-            const constructedBagId: BagId = this.api.createType('BagId', {
+            const constructedBagId: BagId = createJoystreamType('BagId', {
               'Static': staticBagId,
             })
 
@@ -112,9 +120,7 @@ class BagIdParser {
       }
     }
 
-    throw new CLIError(`Invalid static bagId: ${this.bagId}`, {
-      exit: ExitCodes.InvalidParameters,
-    })
+    throw new BagIdValidationError(`Invalid static bagId: ${this.bagId}`)
   }
 
   /**
@@ -136,8 +142,8 @@ class BagIdParser {
             const dynamic = {} as Record<DynamicBagTypeKey, number>
             dynamic[dynamicBagType as DynamicBagTypeKey] = parsedId
 
-            const dynamicBagId: Dynamic = this.api.createType('Dynamic', dynamic)
-            const constructedBagId: BagId = this.api.createType('BagId', {
+            const dynamicBagId: Dynamic = createJoystreamType('Dynamic', dynamic)
+            const constructedBagId: BagId = createJoystreamType('BagId', {
               'Dynamic': dynamicBagId,
             })
 
@@ -147,8 +153,16 @@ class BagIdParser {
       }
     }
 
-    throw new CLIError(`Invalid dynamic bagId: ${this.bagId}`, {
-      exit: ExitCodes.InvalidParameters,
-    })
+    throw new BagIdValidationError(`Invalid dynamic bagId: ${this.bagId}`)
   }
 }
+
+/**
+ * Creates Joystream type using type registry.
+ */
+function createJoystreamType<T extends Codec = Codec, K extends string = string>(
+  type: K,
+  value: unknown
+): DetectCodec<T, K> {
+  return createType(registry, type, value)
+}

+ 2 - 2
storage-node-v2/src/services/helpers/tokenNonceKeeper.ts

@@ -1,7 +1,7 @@
 import NodeCache from 'node-cache'
 
 // Expiration period in seconds for the local nonce cache.
-const TokenExpirationPeriod: number = 30 * 1000 // seconds
+const TokenExpirationPeriod = 30 // seconds
 
 // Max nonce number in local cache
 const MaxNonces = 100000
@@ -17,7 +17,7 @@ const nonceCache = new NodeCache({
  * Constructs and returns an expiration time for a token.
  */
 export function getTokenExpirationTime(): number {
-  return Date.now() + TokenExpirationPeriod
+  return Date.now() + 1000 * TokenExpirationPeriod
 }
 
 /**

+ 131 - 20
storage-node-v2/src/services/logger.ts

@@ -1,28 +1,38 @@
-import winston from 'winston'
+import winston, { transport } from 'winston'
 import expressWinston from 'express-winston'
 import { Handler, ErrorRequestHandler } from 'express'
+import { ElasticsearchTransport } from 'winston-elasticsearch'
+
+/**
+ * ElasticSearch server date format.
+ */
+const elasticDateFormat = 'YYYY-MM-DDTHH:mm:ss'
+
+/**
+ * Possible log levels.
+ */
+const levels = {
+  error: 0,
+  warn: 1,
+  info: 2,
+  http: 3,
+  debug: 4,
+}
 
 /**
  * Creates basic Winston logger. Console output redirected to the stderr.
  *
- * @returns Winston logger
+ * @returns Winston logger options
  *
  */
-function createDefaultLogger(): winston.Logger {
-  const levels = {
-    error: 0,
-    warn: 1,
-    info: 2,
-    http: 3,
-    debug: 4,
-  }
-
+function createDefaultLoggerOptions(): winston.LoggerOptions {
   const level = () => {
     const env = process.env.NODE_ENV || 'development'
     const isDevelopment = env === 'development'
     return isDevelopment ? 'debug' : 'warn'
   }
 
+  // Colors
   const colors = {
     error: 'red',
     warn: 'yellow',
@@ -30,39 +40,71 @@ function createDefaultLogger(): winston.Logger {
     http: 'magenta',
     debug: 'white',
   }
-
   winston.addColors(colors)
 
+  // Formats
   const format = winston.format.combine(
     winston.format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss:ms' }),
-    winston.format.colorize({ all: true }),
+    winston.format.colorize(),
     winston.format.printf((info) => `${info.timestamp} ${info.level}: ${info.message}`)
   )
 
   // Redirect all logs to the stderr
   const transports = [new winston.transports.Console({ stderrLevels: Object.keys(levels) })]
 
-  return winston.createLogger({
+  return {
     level: level(),
     levels,
     format,
     transports,
-  })
+  }
 }
 
-const Logger = createDefaultLogger()
+/**
+ * Creates basic Winston logger.
+ *
+ * @returns Winston logger
+ *
+ */
+function createDefaultLogger(): winston.Logger {
+  const defaultOptions = createDefaultLoggerOptions()
+
+  return winston.createLogger(defaultOptions)
+}
+
+// Default global logger variable
+let InnerLogger = createDefaultLogger()
+
+// Enables changing the underlying logger which is default import in other modules.
+const proxy = new Proxy(InnerLogger, {
+  get(target: winston.Logger, propKey: symbol) {
+    const method = Reflect.get(target, propKey)
+    return (...args: unknown[]) => {
+      return method.apply(InnerLogger, args)
+    }
+  },
+})
+
+export default proxy
 
-export default Logger
 /**
  * Creates Express-Winston logger handler.
  *
+ * @param elasticSearchEndpoint - elastic search engine endpoint (optional).
  * @returns  Express-Winston logger handler
  *
  */
-export function httpLogger(): Handler {
+export function httpLogger(elasticSearchEndpoint?: string): Handler {
+  const transports: winston.transport[] = [new winston.transports.Console()]
+
+  if (elasticSearchEndpoint) {
+    const esTransport = createElasticTransport(elasticSearchEndpoint)
+    transports.push(esTransport)
+  }
+
   const opts: expressWinston.LoggerOptions = {
-    transports: [new winston.transports.Console()],
-    format: winston.format.combine(winston.format.json()),
+    transports,
+    format: winston.format.combine(winston.format.timestamp({ format: elasticDateFormat }), winston.format.json()),
     meta: true,
     msg: 'HTTP {{req.method}} {{req.url}}',
     expressFormat: true,
@@ -109,3 +151,72 @@ export function createStdConsoleLogger(): winston.Logger {
     transports,
   })
 }
+/**
+ * Creates Winston logger with Elastic search.
+ *
+ * @returns Winston logger
+ *
+ */
+function createElasticLogger(elasticSearchEndpoint: string): winston.Logger {
+  const loggerOptions = createDefaultLoggerOptions()
+
+  // Formats
+  loggerOptions.format = winston.format.combine(
+    winston.format.timestamp({ format: elasticDateFormat }),
+    winston.format.printf((info) => `${info.timestamp} ${info.level}: ${info.message}`)
+  )
+
+  // Transports
+  let transports: transport[] = []
+  if (loggerOptions.transports !== undefined) {
+    transports = Array.isArray(loggerOptions.transports) ? loggerOptions.transports : [loggerOptions.transports]
+  }
+
+  const esTransport = createElasticTransport(elasticSearchEndpoint)
+  transports.push(esTransport)
+
+  // Logger
+  const logger = winston.createLogger(loggerOptions)
+
+  // Handle logger error.
+  logger.on('error', (err) => {
+    // Allow console for logging errors of the logger.
+    /* eslint-disable no-console */
+    console.error('Error in logger caught:', err)
+  })
+
+  return logger
+}
+
+/**
+ * Updates the default system logger with elastic search capabilities.
+ *
+ * @param elasticSearchEndpoint - elastic search engine endpoint.
+ */
+export function initElasticLogger(elasticSearchEndpoint: string): void {
+  InnerLogger = createElasticLogger(elasticSearchEndpoint)
+}
+
+/**
+ * Creates winston logger transport for the elastic search engine.
+ *
+ * @param elasticSearchEndpoint - elastic search engine endpoint.
+ * @returns elastic search winston transport
+ */
+function createElasticTransport(elasticSearchEndpoint: string): winston.transport {
+  const possibleLevels = ['warn', 'error', 'debug', 'info']
+
+  let elasticLogLevel = process.env.ELASTIC_LOG_LEVEL ?? ''
+  elasticLogLevel = elasticLogLevel.toLowerCase().trim()
+
+  if (!possibleLevels.includes(elasticLogLevel)) {
+    elasticLogLevel = 'debug' // default
+  }
+
+  const esTransportOpts = {
+    level: elasticLogLevel,
+    clientOpts: { node: elasticSearchEndpoint, maxRetries: 5 },
+    index: 'storage-node',
+  }
+  return new ElasticsearchTransport(esTransportOpts)
+}

برخی فایل ها در این مقایسه diff نمایش داده نمی شوند زیرا تعداد فایل ها بسیار زیاد است