Ver Fonte

Merge branch 'giza_staging' into colossus_staging

# Conflicts:
#	docker-compose.yml
#	storage-node-v2/scripts/run-all-commands.sh
#	storage-node/package.json
#	storage-node/packages/runtime-api/package.json
#	yarn.lock
Shamil Gadelshin há 3 anos atrás
pai
commit
b954792627
100 ficheiros alterados com 9409 adições e 5104 exclusões
  1. 8 0
      .env
  2. 0 28
      .github/workflows/content-metadata.yml
  3. 32 32
      .github/workflows/joystream-cli.yml
  4. 22 0
      .github/workflows/metadata-protobuf.yml
  5. 1 1
      .github/workflows/run-network-tests.yml
  6. 27 4
      build-docker-images.sh
  7. 4 4
      build-npm-packages.sh
  8. 5 5
      cli/package.json
  9. 0 4
      content-metadata-protobuf/.eslintignore
  10. 0 16
      content-metadata-protobuf/.eslintrc.js
  11. 0 2
      content-metadata-protobuf/.gitignore
  12. 0 4
      content-metadata-protobuf/.prettierignore
  13. 0 53
      content-metadata-protobuf/README.md
  14. 0 15
      content-metadata-protobuf/compile.sh
  15. 0 85
      content-metadata-protobuf/compiled/proto/Channel_pb.d.ts
  16. 0 646
      content-metadata-protobuf/compiled/proto/Channel_pb.js
  17. 0 57
      content-metadata-protobuf/compiled/proto/Person_pb.d.ts
  18. 0 428
      content-metadata-protobuf/compiled/proto/Person_pb.js
  19. 0 33
      content-metadata-protobuf/compiled/proto/Playlist_pb.d.ts
  20. 0 246
      content-metadata-protobuf/compiled/proto/Playlist_pb.js
  21. 0 85
      content-metadata-protobuf/compiled/proto/Series_pb.d.ts
  22. 0 666
      content-metadata-protobuf/compiled/proto/Series_pb.js
  23. 0 235
      content-metadata-protobuf/compiled/proto/Video_pb.d.ts
  24. 0 1847
      content-metadata-protobuf/compiled/proto/Video_pb.js
  25. 0 374
      content-metadata-protobuf/doc/index.md
  26. 0 13
      content-metadata-protobuf/generate-md-doc.sh
  27. 0 47
      content-metadata-protobuf/package.json
  28. 0 10
      content-metadata-protobuf/src/index.ts
  29. 0 33
      content-metadata-protobuf/test/channel.ts
  30. 0 115
      content-metadata-protobuf/test/video.ts
  31. 0 15
      content-metadata-protobuf/tsconfig.json
  32. 2 1
      devops/vscode/settings.json
  33. 26 0
      distributor-node.Dockerfile
  34. 1 0
      distributor-node/.eslintignore
  35. 9 0
      distributor-node/.gitignore
  36. 3 0
      distributor-node/.prettierignore
  37. 419 0
      distributor-node/README.md
  38. 3 0
      distributor-node/bin/run
  39. 3 0
      distributor-node/bin/run.cmd
  40. 22 0
      distributor-node/config.yml
  41. 21 0
      distributor-node/config/docker/config.docker.yml
  42. 5 0
      distributor-node/config/docker/filebeat.Dockerfile
  43. 19 0
      distributor-node/config/docker/filebeat.docker.yml
  44. 76 0
      distributor-node/docker-compose.yml
  45. 7 0
      distributor-node/openapitools.json
  46. 120 0
      distributor-node/package.json
  47. 16 0
      distributor-node/scripts/data/family-metadata.json
  48. 12 0
      distributor-node/scripts/data/operator-metadata.json
  49. 17 0
      distributor-node/scripts/init-bucket.sh
  50. 36 0
      distributor-node/scripts/test-commands.sh
  51. 1 0
      distributor-node/src/@types/@elastic/esc-winston-format/index.d.ts
  52. 1 0
      distributor-node/src/@types/js-image-generator/index.d.ts
  53. 216 0
      distributor-node/src/api-spec/openapi.yml
  54. 128 0
      distributor-node/src/app/index.ts
  55. 6 0
      distributor-node/src/command-base/ExitCodes.ts
  56. 65 0
      distributor-node/src/command-base/accounts.ts
  57. 40 0
      distributor-node/src/command-base/api.ts
  58. 96 0
      distributor-node/src/command-base/default.ts
  59. 135 0
      distributor-node/src/commands/dev/batchUpload.ts
  60. 93 0
      distributor-node/src/commands/dev/init.ts
  61. 38 0
      distributor-node/src/commands/leader/cancel-invitation.ts
  62. 25 0
      distributor-node/src/commands/leader/create-bucket-family.ts
  63. 38 0
      distributor-node/src/commands/leader/create-bucket.ts
  64. 28 0
      distributor-node/src/commands/leader/delete-bucket-family.ts
  65. 33 0
      distributor-node/src/commands/leader/delete-bucket.ts
  66. 39 0
      distributor-node/src/commands/leader/invite-bucket-operator.ts
  67. 38 0
      distributor-node/src/commands/leader/remove-bucket-operator.ts
  68. 45 0
      distributor-node/src/commands/leader/set-bucket-family-metadata.ts
  69. 28 0
      distributor-node/src/commands/leader/set-buckets-per-bag-limit.ts
  70. 54 0
      distributor-node/src/commands/leader/update-bag.ts
  71. 38 0
      distributor-node/src/commands/leader/update-bucket-mode.ts
  72. 39 0
      distributor-node/src/commands/leader/update-bucket-status.ts
  73. 53 0
      distributor-node/src/commands/leader/update-dynamic-bag-policy.ts
  74. 38 0
      distributor-node/src/commands/operator/accept-invitation.ts
  75. 61 0
      distributor-node/src/commands/operator/set-metadata.ts
  76. 19 0
      distributor-node/src/commands/start.ts
  77. 1 0
      distributor-node/src/index.ts
  78. 305 0
      distributor-node/src/services/cache/StateCacheService.ts
  79. 228 0
      distributor-node/src/services/content/ContentService.ts
  80. 87 0
      distributor-node/src/services/content/FileContinousReadStream.ts
  81. 134 0
      distributor-node/src/services/logging/LoggingService.ts
  82. 1 0
      distributor-node/src/services/logging/index.ts
  83. 352 0
      distributor-node/src/services/networking/NetworkingService.ts
  84. 27 0
      distributor-node/src/services/networking/distributor-node/generated/.openapi-generator-ignore
  85. 5 0
      distributor-node/src/services/networking/distributor-node/generated/.openapi-generator/FILES
  86. 1 0
      distributor-node/src/services/networking/distributor-node/generated/.openapi-generator/VERSION
  87. 380 0
      distributor-node/src/services/networking/distributor-node/generated/api.ts
  88. 71 0
      distributor-node/src/services/networking/distributor-node/generated/base.ts
  89. 138 0
      distributor-node/src/services/networking/distributor-node/generated/common.ts
  90. 101 0
      distributor-node/src/services/networking/distributor-node/generated/configuration.ts
  91. 18 0
      distributor-node/src/services/networking/distributor-node/generated/index.ts
  92. 1 0
      distributor-node/src/services/networking/index.ts
  93. 91 0
      distributor-node/src/services/networking/query-node/api.ts
  94. 33 0
      distributor-node/src/services/networking/query-node/codegen.yml
  95. 115 0
      distributor-node/src/services/networking/query-node/generated/queries.ts
  96. 4710 0
      distributor-node/src/services/networking/query-node/generated/schema.ts
  97. 78 0
      distributor-node/src/services/networking/query-node/queries/queries.graphql
  98. 145 0
      distributor-node/src/services/networking/runtime/api.ts
  99. 49 0
      distributor-node/src/services/networking/storage-node/api.ts
  100. 27 0
      distributor-node/src/services/networking/storage-node/generated/.openapi-generator-ignore

+ 8 - 0
.env

@@ -37,3 +37,11 @@ WS_PROVIDER_ENDPOINT_URI=ws://joystream-node:9944/
 
 # If running joystream-node on host machine you can use following address to reach it instead
 # WS_PROVIDER_ENDPOINT_URI=ws://host.docker.internal:9944/
+
+######################
+#    Storage Node    #
+######################
+COLOSSUS_PORT=3333
+QUERY_NODE_HOST=${GRAPHQL_SERVER_HOST}:${GRAPHQL_SERVER_PORT}
+WORKER_ID=0
+ACCOUNT_URI=//Alice

+ 0 - 28
.github/workflows/content-metadata.yml

@@ -1,28 +0,0 @@
-name: content-metadata
-on: [pull_request, push]
-
-jobs:
-  schemas_checks:
-    name: Checks
-    runs-on: ubuntu-latest
-    strategy:
-      matrix:
-        node-version: [14.x]
-    steps:
-    - uses: actions/checkout@v1
-    - name: Use Node.js ${{ matrix.node-version }}
-      uses: actions/setup-node@v1
-      with:
-        node-version: ${{ matrix.node-version }}
-    - name: test protobuf
-      run: |
-        # # Install protoc compiler
-        # sudo apt-get install -y protobuf-compiler
-        # protoc --version
-        # # Install documentation plugin
-        # sudo apt-get install -y golang-go
-        # go get -u github.com/pseudomuto/protoc-gen-doc/cmd/protoc-gen-doc
-        yarn install --frozen-lockfile
-        yarn workspace @joystream/content-metadata-protobuf build:ts
-        yarn workspace @joystream/content-metadata-protobuf checks --quiet
-        yarn workspace @joystream/content-metadata-protobuf test

+ 32 - 32
.github/workflows/joystream-cli.yml

@@ -9,22 +9,22 @@ jobs:
       matrix:
         node-version: [14.x]
     steps:
-    - uses: actions/checkout@v1
-    - name: Use Node.js ${{ matrix.node-version }}
-      uses: actions/setup-node@v1
-      with:
-        node-version: ${{ matrix.node-version }}
-    - name: checks
-      run: |
-        yarn install --frozen-lockfile
-        yarn workspace @joystream/types build
-        yarn workspace @joystream/content-metadata-protobuf build:ts
-        yarn workspace @joystream/cli checks --quiet
-    - name: yarn pack test
-      run: |
-        yarn workspace @joystream/cli pack --filename cli-pack-test.tgz
-        tar zxvf ./cli/cli-pack-test.tgz -C cli
-        cd ./cli/package && yarn link
+      - uses: actions/checkout@v1
+      - name: Use Node.js ${{ matrix.node-version }}
+        uses: actions/setup-node@v1
+        with:
+          node-version: ${{ matrix.node-version }}
+      - name: checks
+        run: |
+          yarn install --frozen-lockfile
+          yarn workspace @joystream/types build
+          yarn workspace @joystream/metadata-protobuf build
+          yarn workspace @joystream/cli checks --quiet
+      - name: yarn pack test
+        run: |
+          yarn workspace @joystream/cli pack --filename cli-pack-test.tgz
+          tar zxvf ./cli/cli-pack-test.tgz -C cli
+          cd ./cli/package && yarn link
 
   cli_build_osx:
     name: MacOS Checks
@@ -33,19 +33,19 @@ jobs:
       matrix:
         node-version: [14.x]
     steps:
-    - uses: actions/checkout@v1
-    - name: Use Node.js ${{ matrix.node-version }}
-      uses: actions/setup-node@v1
-      with:
-        node-version: ${{ matrix.node-version }}
-    - name: checks
-      run: |
-        yarn install --frozen-lockfile --network-timeout 120000
-        yarn workspace @joystream/types build
-        yarn workspace @joystream/content-metadata-protobuf build:ts
-        yarn workspace @joystream/cli checks --quiet
-    - name: yarn pack test
-      run: |
-        yarn workspace @joystream/cli pack --filename cli-pack-test.tgz
-        tar zxvf ./cli/cli-pack-test.tgz -C cli
-        cd ./cli/package && yarn link
+      - uses: actions/checkout@v1
+      - name: Use Node.js ${{ matrix.node-version }}
+        uses: actions/setup-node@v1
+        with:
+          node-version: ${{ matrix.node-version }}
+      - name: checks
+        run: |
+          yarn install --frozen-lockfile --network-timeout 120000
+          yarn workspace @joystream/types build
+          yarn workspace @joystream/metadata-protobuf build
+          yarn workspace @joystream/cli checks --quiet
+      - name: yarn pack test
+        run: |
+          yarn workspace @joystream/cli pack --filename cli-pack-test.tgz
+          tar zxvf ./cli/cli-pack-test.tgz -C cli
+          cd ./cli/package && yarn link

+ 22 - 0
.github/workflows/metadata-protobuf.yml

@@ -0,0 +1,22 @@
+name: metadata-protobuf
+on: [pull_request, push]
+
+jobs:
+  schemas_checks:
+    name: Checks
+    runs-on: ubuntu-latest
+    strategy:
+      matrix:
+        node-version: [14.x]
+    steps:
+    - uses: actions/checkout@v1
+    - name: Use Node.js ${{ matrix.node-version }}
+      uses: actions/setup-node@v1
+      with:
+        node-version: ${{ matrix.node-version }}
+    - name: test protobuf
+      run: |
+        yarn install --frozen-lockfile
+        yarn workspace @joystream/metadata-protobuf build
+        yarn workspace @joystream/metadata-protobuf checks --quiet
+        yarn workspace @joystream/metadata-protobuf test

+ 1 - 1
.github/workflows/run-network-tests.yml

@@ -152,7 +152,7 @@ jobs:
         run: |
           yarn install --frozen-lockfile
           yarn workspace @joystream/types build
-          yarn workspace @joystream/content-metadata-protobuf build:ts
+          yarn workspace @joystream/metadata-protobuf build
       - name: Ensure query-node builds
         run: yarn workspace query-node-root build
       - name: Ensure tests are runnable

+ 27 - 4
build-docker-images.sh

@@ -28,10 +28,33 @@ else
   fi
 fi
 
-# Build joystream/apps docker image
-echo "Building 'joystream/apps' docker image..."
+# Build colossus docker image
+echo "Building colossus docker image..."
 docker-compose build colossus
 
+# Build distributor docker image
+echo "Building distributor docker image..."
+docker-compose build distributor-node
+
+if [[ "$OSTYPE" == "linux-gnu" ]]; then
+    IP_ADDRESS=$(ip addr show | grep "\binet\b.*\bdocker0\b" | awk '{print $2}' | cut -d '/' -f 1)
+    # Run a local development chain
+    docker-compose -f docker-compose.linux-gnu-build.yml up -d joystream-node
+
+    # Build processor/graphql-server docker image
+    echo "Building joystream/apps docker image..."
+    WS_PROVIDER_ENDPOINT_URI=ws://${IP_ADDRESS}:9944/ docker-compose build graphql-server
+elif [[ "$OSTYPE" == "darwin"* ]]; then
+    # Run a local development chain
+    docker-compose up -d joystream-node
+
+    # Build processor/graphql-server docker image
+    echo "Building joystream/apps docker image..."
+    WS_PROVIDER_ENDPOINT_URI=ws://host.docker.internal:9944/ docker-compose build graphql-server
+fi
+
+docker-compose down
+
 # Build the pioneer docker image
-echo "Building pioneer docker image"
-docker-compose build pioneer
+# echo "Building pioneer docker image"
+# docker-compose build pioneer

+ 4 - 4
build-npm-packages.sh

@@ -4,9 +4,9 @@ set -e
 
 yarn
 yarn workspace @joystream/types build
-yarn workspace @joystream/content-metadata-protobuf build:ts
+yarn workspace @joystream/metadata-protobuf build
 yarn workspace query-node-root build
-yarn workspace @joystream/cli build
-yarn workspace storage-node build
+# yarn workspace @joystream/cli build
 yarn workspace storage-node-v2 build
-yarn workspace pioneer build
+yarn workspace @joystream/distributor-cli build
+# yarn workspace pioneer build

+ 5 - 5
cli/package.json

@@ -10,7 +10,7 @@
   "dependencies": {
     "@apidevtools/json-schema-ref-parser": "^9.0.6",
     "@ffprobe-installer/ffprobe": "^1.1.0",
-    "@joystream/content-metadata-protobuf": "^1.1.0",
+    "@joystream/metadata-protobuf": "^1.0.0",
     "@joystream/types": "^0.16.1",
     "@oclif/command": "^1.5.19",
     "@oclif/config": "^1.14.0",
@@ -18,7 +18,7 @@
     "@oclif/plugin-help": "^3.2.2",
     "@oclif/plugin-not-found": "^1.2.4",
     "@oclif/plugin-warn-if-update-available": "^1.7.0",
-    "@polkadot/api": "4.2.1",
+    "@polkadot/api": "5.9.1",
     "@types/cli-progress": "^3.9.1",
     "@types/fluent-ffmpeg": "^2.1.16",
     "@types/inquirer": "^6.5.0",
@@ -48,7 +48,7 @@
   "devDependencies": {
     "@oclif/dev-cli": "^1.22.2",
     "@oclif/test": "^1.2.5",
-    "@polkadot/ts": "^0.3.62",
+    "@polkadot/ts": "^0.4.8",
     "@types/chai": "^4.2.11",
     "@types/mocha": "^5.2.7",
     "@types/node": "^10.17.18",
@@ -60,8 +60,8 @@
     "json-schema-to-typescript": "^9.1.1",
     "mocha": "^5.2.0",
     "nyc": "^14.1.1",
-    "ts-node": "^8.8.2",
-    "typescript": "^3.8.3"
+    "ts-node": "^10.2.1",
+    "typescript": "^4.4.3"
   },
   "engines": {
     "node": ">=14.0.0",

+ 0 - 4
content-metadata-protobuf/.eslintignore

@@ -1,4 +0,0 @@
-lib/
-proto/
-compiled/
-.eslintrc.js

+ 0 - 16
content-metadata-protobuf/.eslintrc.js

@@ -1,16 +0,0 @@
-module.exports = {
-  env: {
-    mocha: true,
-  },
-  parserOptions: {
-    project: './tsconfig.json'
-  },
-  extends: [
-    '@joystream/eslint-config'
-  ],
-  rules: {
-    'no-unused-vars': 'off', // Required by the typescript rule below
-    '@typescript-eslint/no-unused-vars': ['error'],
-    '@typescript-eslint/no-floating-promises': 'error',
-  },
-}

+ 0 - 2
content-metadata-protobuf/.gitignore

@@ -1,2 +0,0 @@
-node_modules/
-lib/

+ 0 - 4
content-metadata-protobuf/.prettierignore

@@ -1,4 +0,0 @@
-lib/
-doc/
-proto/
-compiled/

+ 0 - 53
content-metadata-protobuf/README.md

@@ -1,53 +0,0 @@
-## Joystream Content Directory Metadata Library
-
-This package contains protobuf message definitions compiled to Javascript/Typescript used for creating and updating various metadata blobs in the joystream content directory.
-
-### Message Specs
-
-Documented in [doc](./doc) folder
-
-### Choice of protobuf protocol v2
-
-For our usecase we wish to re-use same message to create and update  subset of fields.
-For this reason we need the explicit information about wether a field has been set or not and this is only possible with proto v2.
-
-Background: required/optional feilds are deprecated in [proto v3](https://www.ben-morris.com/handling-protocol-buffers-backwards-compatibility-between-versions-2-and-3-using-c/)
-
-
-### Helper methods
-The custom Joystream types such as License have helper methods to construct pre-defined well known values.
-
-### Example code:
-
-Best place to look at are the [tests specs](./test)
-
-### Opaque types
-We use simple [ISO_639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) code representation for Language.
-useful npm package https://www.npmjs.com/package/iso-639-1
-
-### Building the package
-
-Building will compile the protofiles and build the library from source.
-
-- pre-requisists for compiling protofiles:
-    - [protoc](https://github.com/protocolbuffers/protobuf/releases)
-
-- pre-requisists for generating documentation:
-    - [golang](https://golang.org/)
-    - [protoc-gen-doc](https://github.com/pseudomuto/protoc-gen-doc) to generate docs
-
-```
-yarn && yarn build
-```
-
-### Generating docs
-
-```
-yarn generate-docs
-```
-
-### Tests
-
-```
-yarn test
-```

+ 0 - 15
content-metadata-protobuf/compile.sh

@@ -1,15 +0,0 @@
-#!/usr/bin/env bash
-
-# Path to this plugin
-PROTOC_GEN_TS_PATH="./node_modules/.bin/protoc-gen-ts"
-
-# Directory to write generated code to (.js and .d.ts files)
-OUT_DIR="./compiled"
-mkdir -p ${OUT_DIR}
-
-# Compile proto files
-protoc \
-    --plugin="protoc-gen-ts=${PROTOC_GEN_TS_PATH}" \
-    --js_out="import_style=commonjs,binary:${OUT_DIR}" \
-    --ts_out="${OUT_DIR}" \
-    proto/*.proto

+ 0 - 85
content-metadata-protobuf/compiled/proto/Channel_pb.d.ts

@@ -1,85 +0,0 @@
-// package: 
-// file: proto/Channel.proto
-
-import * as jspb from "google-protobuf";
-
-export class ChannelMetadata extends jspb.Message {
-  hasTitle(): boolean;
-  clearTitle(): void;
-  getTitle(): string | undefined;
-  setTitle(value: string): void;
-
-  hasDescription(): boolean;
-  clearDescription(): void;
-  getDescription(): string | undefined;
-  setDescription(value: string): void;
-
-  hasIsPublic(): boolean;
-  clearIsPublic(): void;
-  getIsPublic(): boolean | undefined;
-  setIsPublic(value: boolean): void;
-
-  hasLanguage(): boolean;
-  clearLanguage(): void;
-  getLanguage(): string | undefined;
-  setLanguage(value: string): void;
-
-  hasCoverPhoto(): boolean;
-  clearCoverPhoto(): void;
-  getCoverPhoto(): number | undefined;
-  setCoverPhoto(value: number): void;
-
-  hasAvatarPhoto(): boolean;
-  clearAvatarPhoto(): void;
-  getAvatarPhoto(): number | undefined;
-  setAvatarPhoto(value: number): void;
-
-  hasCategory(): boolean;
-  clearCategory(): void;
-  getCategory(): number | undefined;
-  setCategory(value: number): void;
-
-  serializeBinary(): Uint8Array;
-  toObject(includeInstance?: boolean): ChannelMetadata.AsObject;
-  static toObject(includeInstance: boolean, msg: ChannelMetadata): ChannelMetadata.AsObject;
-  static extensions: {[key: number]: jspb.ExtensionFieldInfo<jspb.Message>};
-  static extensionsBinary: {[key: number]: jspb.ExtensionFieldBinaryInfo<jspb.Message>};
-  static serializeBinaryToWriter(message: ChannelMetadata, writer: jspb.BinaryWriter): void;
-  static deserializeBinary(bytes: Uint8Array): ChannelMetadata;
-  static deserializeBinaryFromReader(message: ChannelMetadata, reader: jspb.BinaryReader): ChannelMetadata;
-}
-
-export namespace ChannelMetadata {
-  export type AsObject = {
-    title?: string,
-    description?: string,
-    isPublic?: boolean,
-    language?: string,
-    coverPhoto?: number,
-    avatarPhoto?: number,
-    category?: number,
-  }
-}
-
-export class ChannelCategoryMetadata extends jspb.Message {
-  hasName(): boolean;
-  clearName(): void;
-  getName(): string | undefined;
-  setName(value: string): void;
-
-  serializeBinary(): Uint8Array;
-  toObject(includeInstance?: boolean): ChannelCategoryMetadata.AsObject;
-  static toObject(includeInstance: boolean, msg: ChannelCategoryMetadata): ChannelCategoryMetadata.AsObject;
-  static extensions: {[key: number]: jspb.ExtensionFieldInfo<jspb.Message>};
-  static extensionsBinary: {[key: number]: jspb.ExtensionFieldBinaryInfo<jspb.Message>};
-  static serializeBinaryToWriter(message: ChannelCategoryMetadata, writer: jspb.BinaryWriter): void;
-  static deserializeBinary(bytes: Uint8Array): ChannelCategoryMetadata;
-  static deserializeBinaryFromReader(message: ChannelCategoryMetadata, reader: jspb.BinaryReader): ChannelCategoryMetadata;
-}
-
-export namespace ChannelCategoryMetadata {
-  export type AsObject = {
-    name?: string,
-  }
-}
-

+ 0 - 646
content-metadata-protobuf/compiled/proto/Channel_pb.js

@@ -1,646 +0,0 @@
-// source: proto/Channel.proto
-/**
- * @fileoverview
- * @enhanceable
- * @suppress {missingRequire} reports error on implicit type usages.
- * @suppress {messageConventions} JS Compiler reports an error if a variable or
- *     field starts with 'MSG_' and isn't a translatable message.
- * @public
- */
-// GENERATED CODE -- DO NOT EDIT!
-/* eslint-disable */
-// @ts-nocheck
-
-var jspb = require('google-protobuf');
-var goog = jspb;
-var global = Function('return this')();
-
-goog.exportSymbol('proto.ChannelCategoryMetadata', null, global);
-goog.exportSymbol('proto.ChannelMetadata', null, global);
-/**
- * Generated by JsPbCodeGenerator.
- * @param {Array=} opt_data Optional initial data array, typically from a
- * server response, or constructed directly in Javascript. The array is used
- * in place and becomes part of the constructed object. It is not cloned.
- * If no data is provided, the constructed object will be empty, but still
- * valid.
- * @extends {jspb.Message}
- * @constructor
- */
-proto.ChannelMetadata = function(opt_data) {
-  jspb.Message.initialize(this, opt_data, 0, -1, null, null);
-};
-goog.inherits(proto.ChannelMetadata, jspb.Message);
-if (goog.DEBUG && !COMPILED) {
-  /**
-   * @public
-   * @override
-   */
-  proto.ChannelMetadata.displayName = 'proto.ChannelMetadata';
-}
-/**
- * Generated by JsPbCodeGenerator.
- * @param {Array=} opt_data Optional initial data array, typically from a
- * server response, or constructed directly in Javascript. The array is used
- * in place and becomes part of the constructed object. It is not cloned.
- * If no data is provided, the constructed object will be empty, but still
- * valid.
- * @extends {jspb.Message}
- * @constructor
- */
-proto.ChannelCategoryMetadata = function(opt_data) {
-  jspb.Message.initialize(this, opt_data, 0, -1, null, null);
-};
-goog.inherits(proto.ChannelCategoryMetadata, jspb.Message);
-if (goog.DEBUG && !COMPILED) {
-  /**
-   * @public
-   * @override
-   */
-  proto.ChannelCategoryMetadata.displayName = 'proto.ChannelCategoryMetadata';
-}
-
-
-
-if (jspb.Message.GENERATE_TO_OBJECT) {
-/**
- * Creates an object representation of this proto.
- * Field names that are reserved in JavaScript and will be renamed to pb_name.
- * Optional fields that are not set will be set to undefined.
- * To access a reserved field use, foo.pb_<name>, eg, foo.pb_default.
- * For the list of reserved names please see:
- *     net/proto2/compiler/js/internal/generator.cc#kKeyword.
- * @param {boolean=} opt_includeInstance Deprecated. whether to include the
- *     JSPB instance for transitional soy proto support:
- *     http://goto/soy-param-migration
- * @return {!Object}
- */
-proto.ChannelMetadata.prototype.toObject = function(opt_includeInstance) {
-  return proto.ChannelMetadata.toObject(opt_includeInstance, this);
-};
-
-
-/**
- * Static version of the {@see toObject} method.
- * @param {boolean|undefined} includeInstance Deprecated. Whether to include
- *     the JSPB instance for transitional soy proto support:
- *     http://goto/soy-param-migration
- * @param {!proto.ChannelMetadata} msg The msg instance to transform.
- * @return {!Object}
- * @suppress {unusedLocalVariables} f is only used for nested messages
- */
-proto.ChannelMetadata.toObject = function(includeInstance, msg) {
-  var f, obj = {
-    title: (f = jspb.Message.getField(msg, 1)) == null ? undefined : f,
-    description: (f = jspb.Message.getField(msg, 2)) == null ? undefined : f,
-    isPublic: (f = jspb.Message.getBooleanField(msg, 3)) == null ? undefined : f,
-    language: (f = jspb.Message.getField(msg, 4)) == null ? undefined : f,
-    coverPhoto: (f = jspb.Message.getField(msg, 5)) == null ? undefined : f,
-    avatarPhoto: (f = jspb.Message.getField(msg, 6)) == null ? undefined : f,
-    category: (f = jspb.Message.getField(msg, 7)) == null ? undefined : f
-  };
-
-  if (includeInstance) {
-    obj.$jspbMessageInstance = msg;
-  }
-  return obj;
-};
-}
-
-
-/**
- * Deserializes binary data (in protobuf wire format).
- * @param {jspb.ByteSource} bytes The bytes to deserialize.
- * @return {!proto.ChannelMetadata}
- */
-proto.ChannelMetadata.deserializeBinary = function(bytes) {
-  var reader = new jspb.BinaryReader(bytes);
-  var msg = new proto.ChannelMetadata;
-  return proto.ChannelMetadata.deserializeBinaryFromReader(msg, reader);
-};
-
-
-/**
- * Deserializes binary data (in protobuf wire format) from the
- * given reader into the given message object.
- * @param {!proto.ChannelMetadata} msg The message object to deserialize into.
- * @param {!jspb.BinaryReader} reader The BinaryReader to use.
- * @return {!proto.ChannelMetadata}
- */
-proto.ChannelMetadata.deserializeBinaryFromReader = function(msg, reader) {
-  while (reader.nextField()) {
-    if (reader.isEndGroup()) {
-      break;
-    }
-    var field = reader.getFieldNumber();
-    switch (field) {
-    case 1:
-      var value = /** @type {string} */ (reader.readString());
-      msg.setTitle(value);
-      break;
-    case 2:
-      var value = /** @type {string} */ (reader.readString());
-      msg.setDescription(value);
-      break;
-    case 3:
-      var value = /** @type {boolean} */ (reader.readBool());
-      msg.setIsPublic(value);
-      break;
-    case 4:
-      var value = /** @type {string} */ (reader.readString());
-      msg.setLanguage(value);
-      break;
-    case 5:
-      var value = /** @type {number} */ (reader.readUint32());
-      msg.setCoverPhoto(value);
-      break;
-    case 6:
-      var value = /** @type {number} */ (reader.readUint32());
-      msg.setAvatarPhoto(value);
-      break;
-    case 7:
-      var value = /** @type {number} */ (reader.readUint64());
-      msg.setCategory(value);
-      break;
-    default:
-      reader.skipField();
-      break;
-    }
-  }
-  return msg;
-};
-
-
-/**
- * Serializes the message to binary data (in protobuf wire format).
- * @return {!Uint8Array}
- */
-proto.ChannelMetadata.prototype.serializeBinary = function() {
-  var writer = new jspb.BinaryWriter();
-  proto.ChannelMetadata.serializeBinaryToWriter(this, writer);
-  return writer.getResultBuffer();
-};
-
-
-/**
- * Serializes the given message to binary data (in protobuf wire
- * format), writing to the given BinaryWriter.
- * @param {!proto.ChannelMetadata} message
- * @param {!jspb.BinaryWriter} writer
- * @suppress {unusedLocalVariables} f is only used for nested messages
- */
-proto.ChannelMetadata.serializeBinaryToWriter = function(message, writer) {
-  var f = undefined;
-  f = /** @type {string} */ (jspb.Message.getField(message, 1));
-  if (f != null) {
-    writer.writeString(
-      1,
-      f
-    );
-  }
-  f = /** @type {string} */ (jspb.Message.getField(message, 2));
-  if (f != null) {
-    writer.writeString(
-      2,
-      f
-    );
-  }
-  f = /** @type {boolean} */ (jspb.Message.getField(message, 3));
-  if (f != null) {
-    writer.writeBool(
-      3,
-      f
-    );
-  }
-  f = /** @type {string} */ (jspb.Message.getField(message, 4));
-  if (f != null) {
-    writer.writeString(
-      4,
-      f
-    );
-  }
-  f = /** @type {number} */ (jspb.Message.getField(message, 5));
-  if (f != null) {
-    writer.writeUint32(
-      5,
-      f
-    );
-  }
-  f = /** @type {number} */ (jspb.Message.getField(message, 6));
-  if (f != null) {
-    writer.writeUint32(
-      6,
-      f
-    );
-  }
-  f = /** @type {number} */ (jspb.Message.getField(message, 7));
-  if (f != null) {
-    writer.writeUint64(
-      7,
-      f
-    );
-  }
-};
-
-
-/**
- * optional string title = 1;
- * @return {string}
- */
-proto.ChannelMetadata.prototype.getTitle = function() {
-  return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 1, ""));
-};
-
-
-/**
- * @param {string} value
- * @return {!proto.ChannelMetadata} returns this
- */
-proto.ChannelMetadata.prototype.setTitle = function(value) {
-  return jspb.Message.setField(this, 1, value);
-};
-
-
-/**
- * Clears the field making it undefined.
- * @return {!proto.ChannelMetadata} returns this
- */
-proto.ChannelMetadata.prototype.clearTitle = function() {
-  return jspb.Message.setField(this, 1, undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.ChannelMetadata.prototype.hasTitle = function() {
-  return jspb.Message.getField(this, 1) != null;
-};
-
-
-/**
- * optional string description = 2;
- * @return {string}
- */
-proto.ChannelMetadata.prototype.getDescription = function() {
-  return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 2, ""));
-};
-
-
-/**
- * @param {string} value
- * @return {!proto.ChannelMetadata} returns this
- */
-proto.ChannelMetadata.prototype.setDescription = function(value) {
-  return jspb.Message.setField(this, 2, value);
-};
-
-
-/**
- * Clears the field making it undefined.
- * @return {!proto.ChannelMetadata} returns this
- */
-proto.ChannelMetadata.prototype.clearDescription = function() {
-  return jspb.Message.setField(this, 2, undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.ChannelMetadata.prototype.hasDescription = function() {
-  return jspb.Message.getField(this, 2) != null;
-};
-
-
-/**
- * optional bool is_public = 3;
- * @return {boolean}
- */
-proto.ChannelMetadata.prototype.getIsPublic = function() {
-  return /** @type {boolean} */ (jspb.Message.getBooleanFieldWithDefault(this, 3, false));
-};
-
-
-/**
- * @param {boolean} value
- * @return {!proto.ChannelMetadata} returns this
- */
-proto.ChannelMetadata.prototype.setIsPublic = function(value) {
-  return jspb.Message.setField(this, 3, value);
-};
-
-
-/**
- * Clears the field making it undefined.
- * @return {!proto.ChannelMetadata} returns this
- */
-proto.ChannelMetadata.prototype.clearIsPublic = function() {
-  return jspb.Message.setField(this, 3, undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.ChannelMetadata.prototype.hasIsPublic = function() {
-  return jspb.Message.getField(this, 3) != null;
-};
-
-
-/**
- * optional string language = 4;
- * @return {string}
- */
-proto.ChannelMetadata.prototype.getLanguage = function() {
-  return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 4, ""));
-};
-
-
-/**
- * @param {string} value
- * @return {!proto.ChannelMetadata} returns this
- */
-proto.ChannelMetadata.prototype.setLanguage = function(value) {
-  return jspb.Message.setField(this, 4, value);
-};
-
-
-/**
- * Clears the field making it undefined.
- * @return {!proto.ChannelMetadata} returns this
- */
-proto.ChannelMetadata.prototype.clearLanguage = function() {
-  return jspb.Message.setField(this, 4, undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.ChannelMetadata.prototype.hasLanguage = function() {
-  return jspb.Message.getField(this, 4) != null;
-};
-
-
-/**
- * optional uint32 cover_photo = 5;
- * @return {number}
- */
-proto.ChannelMetadata.prototype.getCoverPhoto = function() {
-  return /** @type {number} */ (jspb.Message.getFieldWithDefault(this, 5, 0));
-};
-
-
-/**
- * @param {number} value
- * @return {!proto.ChannelMetadata} returns this
- */
-proto.ChannelMetadata.prototype.setCoverPhoto = function(value) {
-  return jspb.Message.setField(this, 5, value);
-};
-
-
-/**
- * Clears the field making it undefined.
- * @return {!proto.ChannelMetadata} returns this
- */
-proto.ChannelMetadata.prototype.clearCoverPhoto = function() {
-  return jspb.Message.setField(this, 5, undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.ChannelMetadata.prototype.hasCoverPhoto = function() {
-  return jspb.Message.getField(this, 5) != null;
-};
-
-
-/**
- * optional uint32 avatar_photo = 6;
- * @return {number}
- */
-proto.ChannelMetadata.prototype.getAvatarPhoto = function() {
-  return /** @type {number} */ (jspb.Message.getFieldWithDefault(this, 6, 0));
-};
-
-
-/**
- * @param {number} value
- * @return {!proto.ChannelMetadata} returns this
- */
-proto.ChannelMetadata.prototype.setAvatarPhoto = function(value) {
-  return jspb.Message.setField(this, 6, value);
-};
-
-
-/**
- * Clears the field making it undefined.
- * @return {!proto.ChannelMetadata} returns this
- */
-proto.ChannelMetadata.prototype.clearAvatarPhoto = function() {
-  return jspb.Message.setField(this, 6, undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.ChannelMetadata.prototype.hasAvatarPhoto = function() {
-  return jspb.Message.getField(this, 6) != null;
-};
-
-
-/**
- * optional uint64 category = 7;
- * @return {number}
- */
-proto.ChannelMetadata.prototype.getCategory = function() {
-  return /** @type {number} */ (jspb.Message.getFieldWithDefault(this, 7, 0));
-};
-
-
-/**
- * @param {number} value
- * @return {!proto.ChannelMetadata} returns this
- */
-proto.ChannelMetadata.prototype.setCategory = function(value) {
-  return jspb.Message.setField(this, 7, value);
-};
-
-
-/**
- * Clears the field making it undefined.
- * @return {!proto.ChannelMetadata} returns this
- */
-proto.ChannelMetadata.prototype.clearCategory = function() {
-  return jspb.Message.setField(this, 7, undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.ChannelMetadata.prototype.hasCategory = function() {
-  return jspb.Message.getField(this, 7) != null;
-};
-
-
-
-
-
-if (jspb.Message.GENERATE_TO_OBJECT) {
-/**
- * Creates an object representation of this proto.
- * Field names that are reserved in JavaScript and will be renamed to pb_name.
- * Optional fields that are not set will be set to undefined.
- * To access a reserved field use, foo.pb_<name>, eg, foo.pb_default.
- * For the list of reserved names please see:
- *     net/proto2/compiler/js/internal/generator.cc#kKeyword.
- * @param {boolean=} opt_includeInstance Deprecated. whether to include the
- *     JSPB instance for transitional soy proto support:
- *     http://goto/soy-param-migration
- * @return {!Object}
- */
-proto.ChannelCategoryMetadata.prototype.toObject = function(opt_includeInstance) {
-  return proto.ChannelCategoryMetadata.toObject(opt_includeInstance, this);
-};
-
-
-/**
- * Static version of the {@see toObject} method.
- * @param {boolean|undefined} includeInstance Deprecated. Whether to include
- *     the JSPB instance for transitional soy proto support:
- *     http://goto/soy-param-migration
- * @param {!proto.ChannelCategoryMetadata} msg The msg instance to transform.
- * @return {!Object}
- * @suppress {unusedLocalVariables} f is only used for nested messages
- */
-proto.ChannelCategoryMetadata.toObject = function(includeInstance, msg) {
-  var f, obj = {
-    name: (f = jspb.Message.getField(msg, 1)) == null ? undefined : f
-  };
-
-  if (includeInstance) {
-    obj.$jspbMessageInstance = msg;
-  }
-  return obj;
-};
-}
-
-
-/**
- * Deserializes binary data (in protobuf wire format).
- * @param {jspb.ByteSource} bytes The bytes to deserialize.
- * @return {!proto.ChannelCategoryMetadata}
- */
-proto.ChannelCategoryMetadata.deserializeBinary = function(bytes) {
-  var reader = new jspb.BinaryReader(bytes);
-  var msg = new proto.ChannelCategoryMetadata;
-  return proto.ChannelCategoryMetadata.deserializeBinaryFromReader(msg, reader);
-};
-
-
-/**
- * Deserializes binary data (in protobuf wire format) from the
- * given reader into the given message object.
- * @param {!proto.ChannelCategoryMetadata} msg The message object to deserialize into.
- * @param {!jspb.BinaryReader} reader The BinaryReader to use.
- * @return {!proto.ChannelCategoryMetadata}
- */
-proto.ChannelCategoryMetadata.deserializeBinaryFromReader = function(msg, reader) {
-  while (reader.nextField()) {
-    if (reader.isEndGroup()) {
-      break;
-    }
-    var field = reader.getFieldNumber();
-    switch (field) {
-    case 1:
-      var value = /** @type {string} */ (reader.readString());
-      msg.setName(value);
-      break;
-    default:
-      reader.skipField();
-      break;
-    }
-  }
-  return msg;
-};
-
-
-/**
- * Serializes the message to binary data (in protobuf wire format).
- * @return {!Uint8Array}
- */
-proto.ChannelCategoryMetadata.prototype.serializeBinary = function() {
-  var writer = new jspb.BinaryWriter();
-  proto.ChannelCategoryMetadata.serializeBinaryToWriter(this, writer);
-  return writer.getResultBuffer();
-};
-
-
-/**
- * Serializes the given message to binary data (in protobuf wire
- * format), writing to the given BinaryWriter.
- * @param {!proto.ChannelCategoryMetadata} message
- * @param {!jspb.BinaryWriter} writer
- * @suppress {unusedLocalVariables} f is only used for nested messages
- */
-proto.ChannelCategoryMetadata.serializeBinaryToWriter = function(message, writer) {
-  var f = undefined;
-  f = /** @type {string} */ (jspb.Message.getField(message, 1));
-  if (f != null) {
-    writer.writeString(
-      1,
-      f
-    );
-  }
-};
-
-
-/**
- * optional string name = 1;
- * @return {string}
- */
-proto.ChannelCategoryMetadata.prototype.getName = function() {
-  return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 1, ""));
-};
-
-
-/**
- * @param {string} value
- * @return {!proto.ChannelCategoryMetadata} returns this
- */
-proto.ChannelCategoryMetadata.prototype.setName = function(value) {
-  return jspb.Message.setField(this, 1, value);
-};
-
-
-/**
- * Clears the field making it undefined.
- * @return {!proto.ChannelCategoryMetadata} returns this
- */
-proto.ChannelCategoryMetadata.prototype.clearName = function() {
-  return jspb.Message.setField(this, 1, undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.ChannelCategoryMetadata.prototype.hasName = function() {
-  return jspb.Message.getField(this, 1) != null;
-};
-
-
-goog.object.extend(exports, proto);

+ 0 - 57
content-metadata-protobuf/compiled/proto/Person_pb.d.ts

@@ -1,57 +0,0 @@
-// package: 
-// file: proto/Person.proto
-
-import * as jspb from "google-protobuf";
-
-export class PersonMetadata extends jspb.Message {
-  hasFirstName(): boolean;
-  clearFirstName(): void;
-  getFirstName(): string | undefined;
-  setFirstName(value: string): void;
-
-  hasMiddleName(): boolean;
-  clearMiddleName(): void;
-  getMiddleName(): string | undefined;
-  setMiddleName(value: string): void;
-
-  hasLastName(): boolean;
-  clearLastName(): void;
-  getLastName(): string | undefined;
-  setLastName(value: string): void;
-
-  hasAbout(): boolean;
-  clearAbout(): void;
-  getAbout(): string | undefined;
-  setAbout(value: string): void;
-
-  hasCoverPhoto(): boolean;
-  clearCoverPhoto(): void;
-  getCoverPhoto(): number | undefined;
-  setCoverPhoto(value: number): void;
-
-  hasAvatarPhoto(): boolean;
-  clearAvatarPhoto(): void;
-  getAvatarPhoto(): number | undefined;
-  setAvatarPhoto(value: number): void;
-
-  serializeBinary(): Uint8Array;
-  toObject(includeInstance?: boolean): PersonMetadata.AsObject;
-  static toObject(includeInstance: boolean, msg: PersonMetadata): PersonMetadata.AsObject;
-  static extensions: {[key: number]: jspb.ExtensionFieldInfo<jspb.Message>};
-  static extensionsBinary: {[key: number]: jspb.ExtensionFieldBinaryInfo<jspb.Message>};
-  static serializeBinaryToWriter(message: PersonMetadata, writer: jspb.BinaryWriter): void;
-  static deserializeBinary(bytes: Uint8Array): PersonMetadata;
-  static deserializeBinaryFromReader(message: PersonMetadata, reader: jspb.BinaryReader): PersonMetadata;
-}
-
-export namespace PersonMetadata {
-  export type AsObject = {
-    firstName?: string,
-    middleName?: string,
-    lastName?: string,
-    about?: string,
-    coverPhoto?: number,
-    avatarPhoto?: number,
-  }
-}
-

+ 0 - 428
content-metadata-protobuf/compiled/proto/Person_pb.js

@@ -1,428 +0,0 @@
-// source: proto/Person.proto
-/**
- * @fileoverview
- * @enhanceable
- * @suppress {missingRequire} reports error on implicit type usages.
- * @suppress {messageConventions} JS Compiler reports an error if a variable or
- *     field starts with 'MSG_' and isn't a translatable message.
- * @public
- */
-// GENERATED CODE -- DO NOT EDIT!
-/* eslint-disable */
-// @ts-nocheck
-
-var jspb = require('google-protobuf');
-var goog = jspb;
-var global = Function('return this')();
-
-goog.exportSymbol('proto.PersonMetadata', null, global);
-/**
- * Generated by JsPbCodeGenerator.
- * @param {Array=} opt_data Optional initial data array, typically from a
- * server response, or constructed directly in Javascript. The array is used
- * in place and becomes part of the constructed object. It is not cloned.
- * If no data is provided, the constructed object will be empty, but still
- * valid.
- * @extends {jspb.Message}
- * @constructor
- */
-proto.PersonMetadata = function(opt_data) {
-  jspb.Message.initialize(this, opt_data, 0, -1, null, null);
-};
-goog.inherits(proto.PersonMetadata, jspb.Message);
-if (goog.DEBUG && !COMPILED) {
-  /**
-   * @public
-   * @override
-   */
-  proto.PersonMetadata.displayName = 'proto.PersonMetadata';
-}
-
-
-
-if (jspb.Message.GENERATE_TO_OBJECT) {
-/**
- * Creates an object representation of this proto.
- * Field names that are reserved in JavaScript and will be renamed to pb_name.
- * Optional fields that are not set will be set to undefined.
- * To access a reserved field use, foo.pb_<name>, eg, foo.pb_default.
- * For the list of reserved names please see:
- *     net/proto2/compiler/js/internal/generator.cc#kKeyword.
- * @param {boolean=} opt_includeInstance Deprecated. whether to include the
- *     JSPB instance for transitional soy proto support:
- *     http://goto/soy-param-migration
- * @return {!Object}
- */
-proto.PersonMetadata.prototype.toObject = function(opt_includeInstance) {
-  return proto.PersonMetadata.toObject(opt_includeInstance, this);
-};
-
-
-/**
- * Static version of the {@see toObject} method.
- * @param {boolean|undefined} includeInstance Deprecated. Whether to include
- *     the JSPB instance for transitional soy proto support:
- *     http://goto/soy-param-migration
- * @param {!proto.PersonMetadata} msg The msg instance to transform.
- * @return {!Object}
- * @suppress {unusedLocalVariables} f is only used for nested messages
- */
-proto.PersonMetadata.toObject = function(includeInstance, msg) {
-  var f, obj = {
-    firstName: (f = jspb.Message.getField(msg, 1)) == null ? undefined : f,
-    middleName: (f = jspb.Message.getField(msg, 2)) == null ? undefined : f,
-    lastName: (f = jspb.Message.getField(msg, 3)) == null ? undefined : f,
-    about: (f = jspb.Message.getField(msg, 4)) == null ? undefined : f,
-    coverPhoto: (f = jspb.Message.getField(msg, 5)) == null ? undefined : f,
-    avatarPhoto: (f = jspb.Message.getField(msg, 6)) == null ? undefined : f
-  };
-
-  if (includeInstance) {
-    obj.$jspbMessageInstance = msg;
-  }
-  return obj;
-};
-}
-
-
-/**
- * Deserializes binary data (in protobuf wire format).
- * @param {jspb.ByteSource} bytes The bytes to deserialize.
- * @return {!proto.PersonMetadata}
- */
-proto.PersonMetadata.deserializeBinary = function(bytes) {
-  var reader = new jspb.BinaryReader(bytes);
-  var msg = new proto.PersonMetadata;
-  return proto.PersonMetadata.deserializeBinaryFromReader(msg, reader);
-};
-
-
-/**
- * Deserializes binary data (in protobuf wire format) from the
- * given reader into the given message object.
- * @param {!proto.PersonMetadata} msg The message object to deserialize into.
- * @param {!jspb.BinaryReader} reader The BinaryReader to use.
- * @return {!proto.PersonMetadata}
- */
-proto.PersonMetadata.deserializeBinaryFromReader = function(msg, reader) {
-  while (reader.nextField()) {
-    if (reader.isEndGroup()) {
-      break;
-    }
-    var field = reader.getFieldNumber();
-    switch (field) {
-    case 1:
-      var value = /** @type {string} */ (reader.readString());
-      msg.setFirstName(value);
-      break;
-    case 2:
-      var value = /** @type {string} */ (reader.readString());
-      msg.setMiddleName(value);
-      break;
-    case 3:
-      var value = /** @type {string} */ (reader.readString());
-      msg.setLastName(value);
-      break;
-    case 4:
-      var value = /** @type {string} */ (reader.readString());
-      msg.setAbout(value);
-      break;
-    case 5:
-      var value = /** @type {number} */ (reader.readUint32());
-      msg.setCoverPhoto(value);
-      break;
-    case 6:
-      var value = /** @type {number} */ (reader.readUint32());
-      msg.setAvatarPhoto(value);
-      break;
-    default:
-      reader.skipField();
-      break;
-    }
-  }
-  return msg;
-};
-
-
-/**
- * Serializes the message to binary data (in protobuf wire format).
- * @return {!Uint8Array}
- */
-proto.PersonMetadata.prototype.serializeBinary = function() {
-  var writer = new jspb.BinaryWriter();
-  proto.PersonMetadata.serializeBinaryToWriter(this, writer);
-  return writer.getResultBuffer();
-};
-
-
-/**
- * Serializes the given message to binary data (in protobuf wire
- * format), writing to the given BinaryWriter.
- * @param {!proto.PersonMetadata} message
- * @param {!jspb.BinaryWriter} writer
- * @suppress {unusedLocalVariables} f is only used for nested messages
- */
-proto.PersonMetadata.serializeBinaryToWriter = function(message, writer) {
-  var f = undefined;
-  f = /** @type {string} */ (jspb.Message.getField(message, 1));
-  if (f != null) {
-    writer.writeString(
-      1,
-      f
-    );
-  }
-  f = /** @type {string} */ (jspb.Message.getField(message, 2));
-  if (f != null) {
-    writer.writeString(
-      2,
-      f
-    );
-  }
-  f = /** @type {string} */ (jspb.Message.getField(message, 3));
-  if (f != null) {
-    writer.writeString(
-      3,
-      f
-    );
-  }
-  f = /** @type {string} */ (jspb.Message.getField(message, 4));
-  if (f != null) {
-    writer.writeString(
-      4,
-      f
-    );
-  }
-  f = /** @type {number} */ (jspb.Message.getField(message, 5));
-  if (f != null) {
-    writer.writeUint32(
-      5,
-      f
-    );
-  }
-  f = /** @type {number} */ (jspb.Message.getField(message, 6));
-  if (f != null) {
-    writer.writeUint32(
-      6,
-      f
-    );
-  }
-};
-
-
-/**
- * optional string first_name = 1;
- * @return {string}
- */
-proto.PersonMetadata.prototype.getFirstName = function() {
-  return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 1, ""));
-};
-
-
-/**
- * @param {string} value
- * @return {!proto.PersonMetadata} returns this
- */
-proto.PersonMetadata.prototype.setFirstName = function(value) {
-  return jspb.Message.setField(this, 1, value);
-};
-
-
-/**
- * Clears the field making it undefined.
- * @return {!proto.PersonMetadata} returns this
- */
-proto.PersonMetadata.prototype.clearFirstName = function() {
-  return jspb.Message.setField(this, 1, undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.PersonMetadata.prototype.hasFirstName = function() {
-  return jspb.Message.getField(this, 1) != null;
-};
-
-
-/**
- * optional string middle_name = 2;
- * @return {string}
- */
-proto.PersonMetadata.prototype.getMiddleName = function() {
-  return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 2, ""));
-};
-
-
-/**
- * @param {string} value
- * @return {!proto.PersonMetadata} returns this
- */
-proto.PersonMetadata.prototype.setMiddleName = function(value) {
-  return jspb.Message.setField(this, 2, value);
-};
-
-
-/**
- * Clears the field making it undefined.
- * @return {!proto.PersonMetadata} returns this
- */
-proto.PersonMetadata.prototype.clearMiddleName = function() {
-  return jspb.Message.setField(this, 2, undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.PersonMetadata.prototype.hasMiddleName = function() {
-  return jspb.Message.getField(this, 2) != null;
-};
-
-
-/**
- * optional string last_name = 3;
- * @return {string}
- */
-proto.PersonMetadata.prototype.getLastName = function() {
-  return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 3, ""));
-};
-
-
-/**
- * @param {string} value
- * @return {!proto.PersonMetadata} returns this
- */
-proto.PersonMetadata.prototype.setLastName = function(value) {
-  return jspb.Message.setField(this, 3, value);
-};
-
-
-/**
- * Clears the field making it undefined.
- * @return {!proto.PersonMetadata} returns this
- */
-proto.PersonMetadata.prototype.clearLastName = function() {
-  return jspb.Message.setField(this, 3, undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.PersonMetadata.prototype.hasLastName = function() {
-  return jspb.Message.getField(this, 3) != null;
-};
-
-
-/**
- * optional string about = 4;
- * @return {string}
- */
-proto.PersonMetadata.prototype.getAbout = function() {
-  return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 4, ""));
-};
-
-
-/**
- * @param {string} value
- * @return {!proto.PersonMetadata} returns this
- */
-proto.PersonMetadata.prototype.setAbout = function(value) {
-  return jspb.Message.setField(this, 4, value);
-};
-
-
-/**
- * Clears the field making it undefined.
- * @return {!proto.PersonMetadata} returns this
- */
-proto.PersonMetadata.prototype.clearAbout = function() {
-  return jspb.Message.setField(this, 4, undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.PersonMetadata.prototype.hasAbout = function() {
-  return jspb.Message.getField(this, 4) != null;
-};
-
-
-/**
- * optional uint32 cover_photo = 5;
- * @return {number}
- */
-proto.PersonMetadata.prototype.getCoverPhoto = function() {
-  return /** @type {number} */ (jspb.Message.getFieldWithDefault(this, 5, 0));
-};
-
-
-/**
- * @param {number} value
- * @return {!proto.PersonMetadata} returns this
- */
-proto.PersonMetadata.prototype.setCoverPhoto = function(value) {
-  return jspb.Message.setField(this, 5, value);
-};
-
-
-/**
- * Clears the field making it undefined.
- * @return {!proto.PersonMetadata} returns this
- */
-proto.PersonMetadata.prototype.clearCoverPhoto = function() {
-  return jspb.Message.setField(this, 5, undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.PersonMetadata.prototype.hasCoverPhoto = function() {
-  return jspb.Message.getField(this, 5) != null;
-};
-
-
-/**
- * optional uint32 avatar_photo = 6;
- * @return {number}
- */
-proto.PersonMetadata.prototype.getAvatarPhoto = function() {
-  return /** @type {number} */ (jspb.Message.getFieldWithDefault(this, 6, 0));
-};
-
-
-/**
- * @param {number} value
- * @return {!proto.PersonMetadata} returns this
- */
-proto.PersonMetadata.prototype.setAvatarPhoto = function(value) {
-  return jspb.Message.setField(this, 6, value);
-};
-
-
-/**
- * Clears the field making it undefined.
- * @return {!proto.PersonMetadata} returns this
- */
-proto.PersonMetadata.prototype.clearAvatarPhoto = function() {
-  return jspb.Message.setField(this, 6, undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.PersonMetadata.prototype.hasAvatarPhoto = function() {
-  return jspb.Message.getField(this, 6) != null;
-};
-
-
-goog.object.extend(exports, proto);

+ 0 - 33
content-metadata-protobuf/compiled/proto/Playlist_pb.d.ts

@@ -1,33 +0,0 @@
-// package: 
-// file: proto/Playlist.proto
-
-import * as jspb from "google-protobuf";
-
-export class PlaylistMetadata extends jspb.Message {
-  hasTitle(): boolean;
-  clearTitle(): void;
-  getTitle(): string | undefined;
-  setTitle(value: string): void;
-
-  clearVideosList(): void;
-  getVideosList(): Array<number>;
-  setVideosList(value: Array<number>): void;
-  addVideos(value: number, index?: number): number;
-
-  serializeBinary(): Uint8Array;
-  toObject(includeInstance?: boolean): PlaylistMetadata.AsObject;
-  static toObject(includeInstance: boolean, msg: PlaylistMetadata): PlaylistMetadata.AsObject;
-  static extensions: {[key: number]: jspb.ExtensionFieldInfo<jspb.Message>};
-  static extensionsBinary: {[key: number]: jspb.ExtensionFieldBinaryInfo<jspb.Message>};
-  static serializeBinaryToWriter(message: PlaylistMetadata, writer: jspb.BinaryWriter): void;
-  static deserializeBinary(bytes: Uint8Array): PlaylistMetadata;
-  static deserializeBinaryFromReader(message: PlaylistMetadata, reader: jspb.BinaryReader): PlaylistMetadata;
-}
-
-export namespace PlaylistMetadata {
-  export type AsObject = {
-    title?: string,
-    videosList: Array<number>,
-  }
-}
-

+ 0 - 246
content-metadata-protobuf/compiled/proto/Playlist_pb.js

@@ -1,246 +0,0 @@
-// source: proto/Playlist.proto
-/**
- * @fileoverview
- * @enhanceable
- * @suppress {missingRequire} reports error on implicit type usages.
- * @suppress {messageConventions} JS Compiler reports an error if a variable or
- *     field starts with 'MSG_' and isn't a translatable message.
- * @public
- */
-// GENERATED CODE -- DO NOT EDIT!
-/* eslint-disable */
-// @ts-nocheck
-
-var jspb = require('google-protobuf');
-var goog = jspb;
-var global = Function('return this')();
-
-goog.exportSymbol('proto.PlaylistMetadata', null, global);
-/**
- * Generated by JsPbCodeGenerator.
- * @param {Array=} opt_data Optional initial data array, typically from a
- * server response, or constructed directly in Javascript. The array is used
- * in place and becomes part of the constructed object. It is not cloned.
- * If no data is provided, the constructed object will be empty, but still
- * valid.
- * @extends {jspb.Message}
- * @constructor
- */
-proto.PlaylistMetadata = function(opt_data) {
-  jspb.Message.initialize(this, opt_data, 0, -1, proto.PlaylistMetadata.repeatedFields_, null);
-};
-goog.inherits(proto.PlaylistMetadata, jspb.Message);
-if (goog.DEBUG && !COMPILED) {
-  /**
-   * @public
-   * @override
-   */
-  proto.PlaylistMetadata.displayName = 'proto.PlaylistMetadata';
-}
-
-/**
- * List of repeated fields within this message type.
- * @private {!Array<number>}
- * @const
- */
-proto.PlaylistMetadata.repeatedFields_ = [2];
-
-
-
-if (jspb.Message.GENERATE_TO_OBJECT) {
-/**
- * Creates an object representation of this proto.
- * Field names that are reserved in JavaScript and will be renamed to pb_name.
- * Optional fields that are not set will be set to undefined.
- * To access a reserved field use, foo.pb_<name>, eg, foo.pb_default.
- * For the list of reserved names please see:
- *     net/proto2/compiler/js/internal/generator.cc#kKeyword.
- * @param {boolean=} opt_includeInstance Deprecated. whether to include the
- *     JSPB instance for transitional soy proto support:
- *     http://goto/soy-param-migration
- * @return {!Object}
- */
-proto.PlaylistMetadata.prototype.toObject = function(opt_includeInstance) {
-  return proto.PlaylistMetadata.toObject(opt_includeInstance, this);
-};
-
-
-/**
- * Static version of the {@see toObject} method.
- * @param {boolean|undefined} includeInstance Deprecated. Whether to include
- *     the JSPB instance for transitional soy proto support:
- *     http://goto/soy-param-migration
- * @param {!proto.PlaylistMetadata} msg The msg instance to transform.
- * @return {!Object}
- * @suppress {unusedLocalVariables} f is only used for nested messages
- */
-proto.PlaylistMetadata.toObject = function(includeInstance, msg) {
-  var f, obj = {
-    title: (f = jspb.Message.getField(msg, 1)) == null ? undefined : f,
-    videosList: (f = jspb.Message.getRepeatedField(msg, 2)) == null ? undefined : f
-  };
-
-  if (includeInstance) {
-    obj.$jspbMessageInstance = msg;
-  }
-  return obj;
-};
-}
-
-
-/**
- * Deserializes binary data (in protobuf wire format).
- * @param {jspb.ByteSource} bytes The bytes to deserialize.
- * @return {!proto.PlaylistMetadata}
- */
-proto.PlaylistMetadata.deserializeBinary = function(bytes) {
-  var reader = new jspb.BinaryReader(bytes);
-  var msg = new proto.PlaylistMetadata;
-  return proto.PlaylistMetadata.deserializeBinaryFromReader(msg, reader);
-};
-
-
-/**
- * Deserializes binary data (in protobuf wire format) from the
- * given reader into the given message object.
- * @param {!proto.PlaylistMetadata} msg The message object to deserialize into.
- * @param {!jspb.BinaryReader} reader The BinaryReader to use.
- * @return {!proto.PlaylistMetadata}
- */
-proto.PlaylistMetadata.deserializeBinaryFromReader = function(msg, reader) {
-  while (reader.nextField()) {
-    if (reader.isEndGroup()) {
-      break;
-    }
-    var field = reader.getFieldNumber();
-    switch (field) {
-    case 1:
-      var value = /** @type {string} */ (reader.readString());
-      msg.setTitle(value);
-      break;
-    case 2:
-      var values = /** @type {!Array<number>} */ (reader.isDelimited() ? reader.readPackedUint64() : [reader.readUint64()]);
-      for (var i = 0; i < values.length; i++) {
-        msg.addVideos(values[i]);
-      }
-      break;
-    default:
-      reader.skipField();
-      break;
-    }
-  }
-  return msg;
-};
-
-
-/**
- * Serializes the message to binary data (in protobuf wire format).
- * @return {!Uint8Array}
- */
-proto.PlaylistMetadata.prototype.serializeBinary = function() {
-  var writer = new jspb.BinaryWriter();
-  proto.PlaylistMetadata.serializeBinaryToWriter(this, writer);
-  return writer.getResultBuffer();
-};
-
-
-/**
- * Serializes the given message to binary data (in protobuf wire
- * format), writing to the given BinaryWriter.
- * @param {!proto.PlaylistMetadata} message
- * @param {!jspb.BinaryWriter} writer
- * @suppress {unusedLocalVariables} f is only used for nested messages
- */
-proto.PlaylistMetadata.serializeBinaryToWriter = function(message, writer) {
-  var f = undefined;
-  f = /** @type {string} */ (jspb.Message.getField(message, 1));
-  if (f != null) {
-    writer.writeString(
-      1,
-      f
-    );
-  }
-  f = message.getVideosList();
-  if (f.length > 0) {
-    writer.writeRepeatedUint64(
-      2,
-      f
-    );
-  }
-};
-
-
-/**
- * optional string title = 1;
- * @return {string}
- */
-proto.PlaylistMetadata.prototype.getTitle = function() {
-  return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 1, ""));
-};
-
-
-/**
- * @param {string} value
- * @return {!proto.PlaylistMetadata} returns this
- */
-proto.PlaylistMetadata.prototype.setTitle = function(value) {
-  return jspb.Message.setField(this, 1, value);
-};
-
-
-/**
- * Clears the field making it undefined.
- * @return {!proto.PlaylistMetadata} returns this
- */
-proto.PlaylistMetadata.prototype.clearTitle = function() {
-  return jspb.Message.setField(this, 1, undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.PlaylistMetadata.prototype.hasTitle = function() {
-  return jspb.Message.getField(this, 1) != null;
-};
-
-
-/**
- * repeated uint64 videos = 2;
- * @return {!Array<number>}
- */
-proto.PlaylistMetadata.prototype.getVideosList = function() {
-  return /** @type {!Array<number>} */ (jspb.Message.getRepeatedField(this, 2));
-};
-
-
-/**
- * @param {!Array<number>} value
- * @return {!proto.PlaylistMetadata} returns this
- */
-proto.PlaylistMetadata.prototype.setVideosList = function(value) {
-  return jspb.Message.setField(this, 2, value || []);
-};
-
-
-/**
- * @param {number} value
- * @param {number=} opt_index
- * @return {!proto.PlaylistMetadata} returns this
- */
-proto.PlaylistMetadata.prototype.addVideos = function(value, opt_index) {
-  return jspb.Message.addToRepeatedField(this, 2, value, opt_index);
-};
-
-
-/**
- * Clears the list making it empty but non-null.
- * @return {!proto.PlaylistMetadata} returns this
- */
-proto.PlaylistMetadata.prototype.clearVideosList = function() {
-  return this.setVideosList([]);
-};
-
-
-goog.object.extend(exports, proto);

+ 0 - 85
content-metadata-protobuf/compiled/proto/Series_pb.d.ts

@@ -1,85 +0,0 @@
-// package: 
-// file: proto/Series.proto
-
-import * as jspb from "google-protobuf";
-
-export class SeriesMetadata extends jspb.Message {
-  hasTitle(): boolean;
-  clearTitle(): void;
-  getTitle(): string | undefined;
-  setTitle(value: string): void;
-
-  hasDescription(): boolean;
-  clearDescription(): void;
-  getDescription(): string | undefined;
-  setDescription(value: string): void;
-
-  hasCoverPhoto(): boolean;
-  clearCoverPhoto(): void;
-  getCoverPhoto(): number | undefined;
-  setCoverPhoto(value: number): void;
-
-  clearPersonsList(): void;
-  getPersonsList(): Array<number>;
-  setPersonsList(value: Array<number>): void;
-  addPersons(value: number, index?: number): number;
-
-  serializeBinary(): Uint8Array;
-  toObject(includeInstance?: boolean): SeriesMetadata.AsObject;
-  static toObject(includeInstance: boolean, msg: SeriesMetadata): SeriesMetadata.AsObject;
-  static extensions: {[key: number]: jspb.ExtensionFieldInfo<jspb.Message>};
-  static extensionsBinary: {[key: number]: jspb.ExtensionFieldBinaryInfo<jspb.Message>};
-  static serializeBinaryToWriter(message: SeriesMetadata, writer: jspb.BinaryWriter): void;
-  static deserializeBinary(bytes: Uint8Array): SeriesMetadata;
-  static deserializeBinaryFromReader(message: SeriesMetadata, reader: jspb.BinaryReader): SeriesMetadata;
-}
-
-export namespace SeriesMetadata {
-  export type AsObject = {
-    title?: string,
-    description?: string,
-    coverPhoto?: number,
-    personsList: Array<number>,
-  }
-}
-
-export class SeasonMetadata extends jspb.Message {
-  hasTitle(): boolean;
-  clearTitle(): void;
-  getTitle(): string | undefined;
-  setTitle(value: string): void;
-
-  hasDescription(): boolean;
-  clearDescription(): void;
-  getDescription(): string | undefined;
-  setDescription(value: string): void;
-
-  hasCoverPhoto(): boolean;
-  clearCoverPhoto(): void;
-  getCoverPhoto(): number | undefined;
-  setCoverPhoto(value: number): void;
-
-  clearPersonsList(): void;
-  getPersonsList(): Array<number>;
-  setPersonsList(value: Array<number>): void;
-  addPersons(value: number, index?: number): number;
-
-  serializeBinary(): Uint8Array;
-  toObject(includeInstance?: boolean): SeasonMetadata.AsObject;
-  static toObject(includeInstance: boolean, msg: SeasonMetadata): SeasonMetadata.AsObject;
-  static extensions: {[key: number]: jspb.ExtensionFieldInfo<jspb.Message>};
-  static extensionsBinary: {[key: number]: jspb.ExtensionFieldBinaryInfo<jspb.Message>};
-  static serializeBinaryToWriter(message: SeasonMetadata, writer: jspb.BinaryWriter): void;
-  static deserializeBinary(bytes: Uint8Array): SeasonMetadata;
-  static deserializeBinaryFromReader(message: SeasonMetadata, reader: jspb.BinaryReader): SeasonMetadata;
-}
-
-export namespace SeasonMetadata {
-  export type AsObject = {
-    title?: string,
-    description?: string,
-    coverPhoto?: number,
-    personsList: Array<number>,
-  }
-}
-

+ 0 - 666
content-metadata-protobuf/compiled/proto/Series_pb.js

@@ -1,666 +0,0 @@
-// source: proto/Series.proto
-/**
- * @fileoverview
- * @enhanceable
- * @suppress {missingRequire} reports error on implicit type usages.
- * @suppress {messageConventions} JS Compiler reports an error if a variable or
- *     field starts with 'MSG_' and isn't a translatable message.
- * @public
- */
-// GENERATED CODE -- DO NOT EDIT!
-/* eslint-disable */
-// @ts-nocheck
-
-var jspb = require('google-protobuf');
-var goog = jspb;
-var global = Function('return this')();
-
-goog.exportSymbol('proto.SeasonMetadata', null, global);
-goog.exportSymbol('proto.SeriesMetadata', null, global);
-/**
- * Generated by JsPbCodeGenerator.
- * @param {Array=} opt_data Optional initial data array, typically from a
- * server response, or constructed directly in Javascript. The array is used
- * in place and becomes part of the constructed object. It is not cloned.
- * If no data is provided, the constructed object will be empty, but still
- * valid.
- * @extends {jspb.Message}
- * @constructor
- */
-proto.SeriesMetadata = function(opt_data) {
-  jspb.Message.initialize(this, opt_data, 0, -1, proto.SeriesMetadata.repeatedFields_, null);
-};
-goog.inherits(proto.SeriesMetadata, jspb.Message);
-if (goog.DEBUG && !COMPILED) {
-  /**
-   * @public
-   * @override
-   */
-  proto.SeriesMetadata.displayName = 'proto.SeriesMetadata';
-}
-/**
- * Generated by JsPbCodeGenerator.
- * @param {Array=} opt_data Optional initial data array, typically from a
- * server response, or constructed directly in Javascript. The array is used
- * in place and becomes part of the constructed object. It is not cloned.
- * If no data is provided, the constructed object will be empty, but still
- * valid.
- * @extends {jspb.Message}
- * @constructor
- */
-proto.SeasonMetadata = function(opt_data) {
-  jspb.Message.initialize(this, opt_data, 0, -1, proto.SeasonMetadata.repeatedFields_, null);
-};
-goog.inherits(proto.SeasonMetadata, jspb.Message);
-if (goog.DEBUG && !COMPILED) {
-  /**
-   * @public
-   * @override
-   */
-  proto.SeasonMetadata.displayName = 'proto.SeasonMetadata';
-}
-
-/**
- * List of repeated fields within this message type.
- * @private {!Array<number>}
- * @const
- */
-proto.SeriesMetadata.repeatedFields_ = [4];
-
-
-
-if (jspb.Message.GENERATE_TO_OBJECT) {
-/**
- * Creates an object representation of this proto.
- * Field names that are reserved in JavaScript and will be renamed to pb_name.
- * Optional fields that are not set will be set to undefined.
- * To access a reserved field use, foo.pb_<name>, eg, foo.pb_default.
- * For the list of reserved names please see:
- *     net/proto2/compiler/js/internal/generator.cc#kKeyword.
- * @param {boolean=} opt_includeInstance Deprecated. whether to include the
- *     JSPB instance for transitional soy proto support:
- *     http://goto/soy-param-migration
- * @return {!Object}
- */
-proto.SeriesMetadata.prototype.toObject = function(opt_includeInstance) {
-  return proto.SeriesMetadata.toObject(opt_includeInstance, this);
-};
-
-
-/**
- * Static version of the {@see toObject} method.
- * @param {boolean|undefined} includeInstance Deprecated. Whether to include
- *     the JSPB instance for transitional soy proto support:
- *     http://goto/soy-param-migration
- * @param {!proto.SeriesMetadata} msg The msg instance to transform.
- * @return {!Object}
- * @suppress {unusedLocalVariables} f is only used for nested messages
- */
-proto.SeriesMetadata.toObject = function(includeInstance, msg) {
-  var f, obj = {
-    title: (f = jspb.Message.getField(msg, 1)) == null ? undefined : f,
-    description: (f = jspb.Message.getField(msg, 2)) == null ? undefined : f,
-    coverPhoto: (f = jspb.Message.getField(msg, 3)) == null ? undefined : f,
-    personsList: (f = jspb.Message.getRepeatedField(msg, 4)) == null ? undefined : f
-  };
-
-  if (includeInstance) {
-    obj.$jspbMessageInstance = msg;
-  }
-  return obj;
-};
-}
-
-
-/**
- * Deserializes binary data (in protobuf wire format).
- * @param {jspb.ByteSource} bytes The bytes to deserialize.
- * @return {!proto.SeriesMetadata}
- */
-proto.SeriesMetadata.deserializeBinary = function(bytes) {
-  var reader = new jspb.BinaryReader(bytes);
-  var msg = new proto.SeriesMetadata;
-  return proto.SeriesMetadata.deserializeBinaryFromReader(msg, reader);
-};
-
-
-/**
- * Deserializes binary data (in protobuf wire format) from the
- * given reader into the given message object.
- * @param {!proto.SeriesMetadata} msg The message object to deserialize into.
- * @param {!jspb.BinaryReader} reader The BinaryReader to use.
- * @return {!proto.SeriesMetadata}
- */
-proto.SeriesMetadata.deserializeBinaryFromReader = function(msg, reader) {
-  while (reader.nextField()) {
-    if (reader.isEndGroup()) {
-      break;
-    }
-    var field = reader.getFieldNumber();
-    switch (field) {
-    case 1:
-      var value = /** @type {string} */ (reader.readString());
-      msg.setTitle(value);
-      break;
-    case 2:
-      var value = /** @type {string} */ (reader.readString());
-      msg.setDescription(value);
-      break;
-    case 3:
-      var value = /** @type {number} */ (reader.readUint32());
-      msg.setCoverPhoto(value);
-      break;
-    case 4:
-      var values = /** @type {!Array<number>} */ (reader.isDelimited() ? reader.readPackedUint64() : [reader.readUint64()]);
-      for (var i = 0; i < values.length; i++) {
-        msg.addPersons(values[i]);
-      }
-      break;
-    default:
-      reader.skipField();
-      break;
-    }
-  }
-  return msg;
-};
-
-
-/**
- * Serializes the message to binary data (in protobuf wire format).
- * @return {!Uint8Array}
- */
-proto.SeriesMetadata.prototype.serializeBinary = function() {
-  var writer = new jspb.BinaryWriter();
-  proto.SeriesMetadata.serializeBinaryToWriter(this, writer);
-  return writer.getResultBuffer();
-};
-
-
-/**
- * Serializes the given message to binary data (in protobuf wire
- * format), writing to the given BinaryWriter.
- * @param {!proto.SeriesMetadata} message
- * @param {!jspb.BinaryWriter} writer
- * @suppress {unusedLocalVariables} f is only used for nested messages
- */
-proto.SeriesMetadata.serializeBinaryToWriter = function(message, writer) {
-  var f = undefined;
-  f = /** @type {string} */ (jspb.Message.getField(message, 1));
-  if (f != null) {
-    writer.writeString(
-      1,
-      f
-    );
-  }
-  f = /** @type {string} */ (jspb.Message.getField(message, 2));
-  if (f != null) {
-    writer.writeString(
-      2,
-      f
-    );
-  }
-  f = /** @type {number} */ (jspb.Message.getField(message, 3));
-  if (f != null) {
-    writer.writeUint32(
-      3,
-      f
-    );
-  }
-  f = message.getPersonsList();
-  if (f.length > 0) {
-    writer.writePackedUint64(
-      4,
-      f
-    );
-  }
-};
-
-
-/**
- * optional string title = 1;
- * @return {string}
- */
-proto.SeriesMetadata.prototype.getTitle = function() {
-  return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 1, ""));
-};
-
-
-/**
- * @param {string} value
- * @return {!proto.SeriesMetadata} returns this
- */
-proto.SeriesMetadata.prototype.setTitle = function(value) {
-  return jspb.Message.setField(this, 1, value);
-};
-
-
-/**
- * Clears the field making it undefined.
- * @return {!proto.SeriesMetadata} returns this
- */
-proto.SeriesMetadata.prototype.clearTitle = function() {
-  return jspb.Message.setField(this, 1, undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.SeriesMetadata.prototype.hasTitle = function() {
-  return jspb.Message.getField(this, 1) != null;
-};
-
-
-/**
- * optional string description = 2;
- * @return {string}
- */
-proto.SeriesMetadata.prototype.getDescription = function() {
-  return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 2, ""));
-};
-
-
-/**
- * @param {string} value
- * @return {!proto.SeriesMetadata} returns this
- */
-proto.SeriesMetadata.prototype.setDescription = function(value) {
-  return jspb.Message.setField(this, 2, value);
-};
-
-
-/**
- * Clears the field making it undefined.
- * @return {!proto.SeriesMetadata} returns this
- */
-proto.SeriesMetadata.prototype.clearDescription = function() {
-  return jspb.Message.setField(this, 2, undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.SeriesMetadata.prototype.hasDescription = function() {
-  return jspb.Message.getField(this, 2) != null;
-};
-
-
-/**
- * optional uint32 cover_photo = 3;
- * @return {number}
- */
-proto.SeriesMetadata.prototype.getCoverPhoto = function() {
-  return /** @type {number} */ (jspb.Message.getFieldWithDefault(this, 3, 0));
-};
-
-
-/**
- * @param {number} value
- * @return {!proto.SeriesMetadata} returns this
- */
-proto.SeriesMetadata.prototype.setCoverPhoto = function(value) {
-  return jspb.Message.setField(this, 3, value);
-};
-
-
-/**
- * Clears the field making it undefined.
- * @return {!proto.SeriesMetadata} returns this
- */
-proto.SeriesMetadata.prototype.clearCoverPhoto = function() {
-  return jspb.Message.setField(this, 3, undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.SeriesMetadata.prototype.hasCoverPhoto = function() {
-  return jspb.Message.getField(this, 3) != null;
-};
-
-
-/**
- * repeated uint64 persons = 4;
- * @return {!Array<number>}
- */
-proto.SeriesMetadata.prototype.getPersonsList = function() {
-  return /** @type {!Array<number>} */ (jspb.Message.getRepeatedField(this, 4));
-};
-
-
-/**
- * @param {!Array<number>} value
- * @return {!proto.SeriesMetadata} returns this
- */
-proto.SeriesMetadata.prototype.setPersonsList = function(value) {
-  return jspb.Message.setField(this, 4, value || []);
-};
-
-
-/**
- * @param {number} value
- * @param {number=} opt_index
- * @return {!proto.SeriesMetadata} returns this
- */
-proto.SeriesMetadata.prototype.addPersons = function(value, opt_index) {
-  return jspb.Message.addToRepeatedField(this, 4, value, opt_index);
-};
-
-
-/**
- * Clears the list making it empty but non-null.
- * @return {!proto.SeriesMetadata} returns this
- */
-proto.SeriesMetadata.prototype.clearPersonsList = function() {
-  return this.setPersonsList([]);
-};
-
-
-
-/**
- * List of repeated fields within this message type.
- * @private {!Array<number>}
- * @const
- */
-proto.SeasonMetadata.repeatedFields_ = [4];
-
-
-
-if (jspb.Message.GENERATE_TO_OBJECT) {
-/**
- * Creates an object representation of this proto.
- * Field names that are reserved in JavaScript and will be renamed to pb_name.
- * Optional fields that are not set will be set to undefined.
- * To access a reserved field use, foo.pb_<name>, eg, foo.pb_default.
- * For the list of reserved names please see:
- *     net/proto2/compiler/js/internal/generator.cc#kKeyword.
- * @param {boolean=} opt_includeInstance Deprecated. whether to include the
- *     JSPB instance for transitional soy proto support:
- *     http://goto/soy-param-migration
- * @return {!Object}
- */
-proto.SeasonMetadata.prototype.toObject = function(opt_includeInstance) {
-  return proto.SeasonMetadata.toObject(opt_includeInstance, this);
-};
-
-
-/**
- * Static version of the {@see toObject} method.
- * @param {boolean|undefined} includeInstance Deprecated. Whether to include
- *     the JSPB instance for transitional soy proto support:
- *     http://goto/soy-param-migration
- * @param {!proto.SeasonMetadata} msg The msg instance to transform.
- * @return {!Object}
- * @suppress {unusedLocalVariables} f is only used for nested messages
- */
-proto.SeasonMetadata.toObject = function(includeInstance, msg) {
-  var f, obj = {
-    title: (f = jspb.Message.getField(msg, 1)) == null ? undefined : f,
-    description: (f = jspb.Message.getField(msg, 2)) == null ? undefined : f,
-    coverPhoto: (f = jspb.Message.getField(msg, 3)) == null ? undefined : f,
-    personsList: (f = jspb.Message.getRepeatedField(msg, 4)) == null ? undefined : f
-  };
-
-  if (includeInstance) {
-    obj.$jspbMessageInstance = msg;
-  }
-  return obj;
-};
-}
-
-
-/**
- * Deserializes binary data (in protobuf wire format).
- * @param {jspb.ByteSource} bytes The bytes to deserialize.
- * @return {!proto.SeasonMetadata}
- */
-proto.SeasonMetadata.deserializeBinary = function(bytes) {
-  var reader = new jspb.BinaryReader(bytes);
-  var msg = new proto.SeasonMetadata;
-  return proto.SeasonMetadata.deserializeBinaryFromReader(msg, reader);
-};
-
-
-/**
- * Deserializes binary data (in protobuf wire format) from the
- * given reader into the given message object.
- * @param {!proto.SeasonMetadata} msg The message object to deserialize into.
- * @param {!jspb.BinaryReader} reader The BinaryReader to use.
- * @return {!proto.SeasonMetadata}
- */
-proto.SeasonMetadata.deserializeBinaryFromReader = function(msg, reader) {
-  while (reader.nextField()) {
-    if (reader.isEndGroup()) {
-      break;
-    }
-    var field = reader.getFieldNumber();
-    switch (field) {
-    case 1:
-      var value = /** @type {string} */ (reader.readString());
-      msg.setTitle(value);
-      break;
-    case 2:
-      var value = /** @type {string} */ (reader.readString());
-      msg.setDescription(value);
-      break;
-    case 3:
-      var value = /** @type {number} */ (reader.readUint32());
-      msg.setCoverPhoto(value);
-      break;
-    case 4:
-      var values = /** @type {!Array<number>} */ (reader.isDelimited() ? reader.readPackedUint64() : [reader.readUint64()]);
-      for (var i = 0; i < values.length; i++) {
-        msg.addPersons(values[i]);
-      }
-      break;
-    default:
-      reader.skipField();
-      break;
-    }
-  }
-  return msg;
-};
-
-
-/**
- * Serializes the message to binary data (in protobuf wire format).
- * @return {!Uint8Array}
- */
-proto.SeasonMetadata.prototype.serializeBinary = function() {
-  var writer = new jspb.BinaryWriter();
-  proto.SeasonMetadata.serializeBinaryToWriter(this, writer);
-  return writer.getResultBuffer();
-};
-
-
-/**
- * Serializes the given message to binary data (in protobuf wire
- * format), writing to the given BinaryWriter.
- * @param {!proto.SeasonMetadata} message
- * @param {!jspb.BinaryWriter} writer
- * @suppress {unusedLocalVariables} f is only used for nested messages
- */
-proto.SeasonMetadata.serializeBinaryToWriter = function(message, writer) {
-  var f = undefined;
-  f = /** @type {string} */ (jspb.Message.getField(message, 1));
-  if (f != null) {
-    writer.writeString(
-      1,
-      f
-    );
-  }
-  f = /** @type {string} */ (jspb.Message.getField(message, 2));
-  if (f != null) {
-    writer.writeString(
-      2,
-      f
-    );
-  }
-  f = /** @type {number} */ (jspb.Message.getField(message, 3));
-  if (f != null) {
-    writer.writeUint32(
-      3,
-      f
-    );
-  }
-  f = message.getPersonsList();
-  if (f.length > 0) {
-    writer.writePackedUint64(
-      4,
-      f
-    );
-  }
-};
-
-
-/**
- * optional string title = 1;
- * @return {string}
- */
-proto.SeasonMetadata.prototype.getTitle = function() {
-  return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 1, ""));
-};
-
-
-/**
- * @param {string} value
- * @return {!proto.SeasonMetadata} returns this
- */
-proto.SeasonMetadata.prototype.setTitle = function(value) {
-  return jspb.Message.setField(this, 1, value);
-};
-
-
-/**
- * Clears the field making it undefined.
- * @return {!proto.SeasonMetadata} returns this
- */
-proto.SeasonMetadata.prototype.clearTitle = function() {
-  return jspb.Message.setField(this, 1, undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.SeasonMetadata.prototype.hasTitle = function() {
-  return jspb.Message.getField(this, 1) != null;
-};
-
-
-/**
- * optional string description = 2;
- * @return {string}
- */
-proto.SeasonMetadata.prototype.getDescription = function() {
-  return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 2, ""));
-};
-
-
-/**
- * @param {string} value
- * @return {!proto.SeasonMetadata} returns this
- */
-proto.SeasonMetadata.prototype.setDescription = function(value) {
-  return jspb.Message.setField(this, 2, value);
-};
-
-
-/**
- * Clears the field making it undefined.
- * @return {!proto.SeasonMetadata} returns this
- */
-proto.SeasonMetadata.prototype.clearDescription = function() {
-  return jspb.Message.setField(this, 2, undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.SeasonMetadata.prototype.hasDescription = function() {
-  return jspb.Message.getField(this, 2) != null;
-};
-
-
-/**
- * optional uint32 cover_photo = 3;
- * @return {number}
- */
-proto.SeasonMetadata.prototype.getCoverPhoto = function() {
-  return /** @type {number} */ (jspb.Message.getFieldWithDefault(this, 3, 0));
-};
-
-
-/**
- * @param {number} value
- * @return {!proto.SeasonMetadata} returns this
- */
-proto.SeasonMetadata.prototype.setCoverPhoto = function(value) {
-  return jspb.Message.setField(this, 3, value);
-};
-
-
-/**
- * Clears the field making it undefined.
- * @return {!proto.SeasonMetadata} returns this
- */
-proto.SeasonMetadata.prototype.clearCoverPhoto = function() {
-  return jspb.Message.setField(this, 3, undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.SeasonMetadata.prototype.hasCoverPhoto = function() {
-  return jspb.Message.getField(this, 3) != null;
-};
-
-
-/**
- * repeated uint64 persons = 4;
- * @return {!Array<number>}
- */
-proto.SeasonMetadata.prototype.getPersonsList = function() {
-  return /** @type {!Array<number>} */ (jspb.Message.getRepeatedField(this, 4));
-};
-
-
-/**
- * @param {!Array<number>} value
- * @return {!proto.SeasonMetadata} returns this
- */
-proto.SeasonMetadata.prototype.setPersonsList = function(value) {
-  return jspb.Message.setField(this, 4, value || []);
-};
-
-
-/**
- * @param {number} value
- * @param {number=} opt_index
- * @return {!proto.SeasonMetadata} returns this
- */
-proto.SeasonMetadata.prototype.addPersons = function(value, opt_index) {
-  return jspb.Message.addToRepeatedField(this, 4, value, opt_index);
-};
-
-
-/**
- * Clears the list making it empty but non-null.
- * @return {!proto.SeasonMetadata} returns this
- */
-proto.SeasonMetadata.prototype.clearPersonsList = function() {
-  return this.setPersonsList([]);
-};
-
-
-goog.object.extend(exports, proto);

+ 0 - 235
content-metadata-protobuf/compiled/proto/Video_pb.d.ts

@@ -1,235 +0,0 @@
-// package: 
-// file: proto/Video.proto
-
-import * as jspb from "google-protobuf";
-
-export class PublishedBeforeJoystream extends jspb.Message {
-  hasIsPublished(): boolean;
-  clearIsPublished(): void;
-  getIsPublished(): boolean | undefined;
-  setIsPublished(value: boolean): void;
-
-  hasDate(): boolean;
-  clearDate(): void;
-  getDate(): string | undefined;
-  setDate(value: string): void;
-
-  serializeBinary(): Uint8Array;
-  toObject(includeInstance?: boolean): PublishedBeforeJoystream.AsObject;
-  static toObject(includeInstance: boolean, msg: PublishedBeforeJoystream): PublishedBeforeJoystream.AsObject;
-  static extensions: {[key: number]: jspb.ExtensionFieldInfo<jspb.Message>};
-  static extensionsBinary: {[key: number]: jspb.ExtensionFieldBinaryInfo<jspb.Message>};
-  static serializeBinaryToWriter(message: PublishedBeforeJoystream, writer: jspb.BinaryWriter): void;
-  static deserializeBinary(bytes: Uint8Array): PublishedBeforeJoystream;
-  static deserializeBinaryFromReader(message: PublishedBeforeJoystream, reader: jspb.BinaryReader): PublishedBeforeJoystream;
-}
-
-export namespace PublishedBeforeJoystream {
-  export type AsObject = {
-    isPublished?: boolean,
-    date?: string,
-  }
-}
-
-export class License extends jspb.Message {
-  hasCode(): boolean;
-  clearCode(): void;
-  getCode(): number | undefined;
-  setCode(value: number): void;
-
-  hasAttribution(): boolean;
-  clearAttribution(): void;
-  getAttribution(): string | undefined;
-  setAttribution(value: string): void;
-
-  hasCustomText(): boolean;
-  clearCustomText(): void;
-  getCustomText(): string | undefined;
-  setCustomText(value: string): void;
-
-  serializeBinary(): Uint8Array;
-  toObject(includeInstance?: boolean): License.AsObject;
-  static toObject(includeInstance: boolean, msg: License): License.AsObject;
-  static extensions: {[key: number]: jspb.ExtensionFieldInfo<jspb.Message>};
-  static extensionsBinary: {[key: number]: jspb.ExtensionFieldBinaryInfo<jspb.Message>};
-  static serializeBinaryToWriter(message: License, writer: jspb.BinaryWriter): void;
-  static deserializeBinary(bytes: Uint8Array): License;
-  static deserializeBinaryFromReader(message: License, reader: jspb.BinaryReader): License;
-}
-
-export namespace License {
-  export type AsObject = {
-    code?: number,
-    attribution?: string,
-    customText?: string,
-  }
-}
-
-export class MediaType extends jspb.Message {
-  hasCodecName(): boolean;
-  clearCodecName(): void;
-  getCodecName(): string | undefined;
-  setCodecName(value: string): void;
-
-  hasContainer(): boolean;
-  clearContainer(): void;
-  getContainer(): string | undefined;
-  setContainer(value: string): void;
-
-  hasMimeMediaType(): boolean;
-  clearMimeMediaType(): void;
-  getMimeMediaType(): string | undefined;
-  setMimeMediaType(value: string): void;
-
-  serializeBinary(): Uint8Array;
-  toObject(includeInstance?: boolean): MediaType.AsObject;
-  static toObject(includeInstance: boolean, msg: MediaType): MediaType.AsObject;
-  static extensions: {[key: number]: jspb.ExtensionFieldInfo<jspb.Message>};
-  static extensionsBinary: {[key: number]: jspb.ExtensionFieldBinaryInfo<jspb.Message>};
-  static serializeBinaryToWriter(message: MediaType, writer: jspb.BinaryWriter): void;
-  static deserializeBinary(bytes: Uint8Array): MediaType;
-  static deserializeBinaryFromReader(message: MediaType, reader: jspb.BinaryReader): MediaType;
-}
-
-export namespace MediaType {
-  export type AsObject = {
-    codecName?: string,
-    container?: string,
-    mimeMediaType?: string,
-  }
-}
-
-export class VideoMetadata extends jspb.Message {
-  hasTitle(): boolean;
-  clearTitle(): void;
-  getTitle(): string | undefined;
-  setTitle(value: string): void;
-
-  hasDescription(): boolean;
-  clearDescription(): void;
-  getDescription(): string | undefined;
-  setDescription(value: string): void;
-
-  hasVideo(): boolean;
-  clearVideo(): void;
-  getVideo(): number | undefined;
-  setVideo(value: number): void;
-
-  hasThumbnailPhoto(): boolean;
-  clearThumbnailPhoto(): void;
-  getThumbnailPhoto(): number | undefined;
-  setThumbnailPhoto(value: number): void;
-
-  hasDuration(): boolean;
-  clearDuration(): void;
-  getDuration(): number | undefined;
-  setDuration(value: number): void;
-
-  hasMediaPixelHeight(): boolean;
-  clearMediaPixelHeight(): void;
-  getMediaPixelHeight(): number | undefined;
-  setMediaPixelHeight(value: number): void;
-
-  hasMediaPixelWidth(): boolean;
-  clearMediaPixelWidth(): void;
-  getMediaPixelWidth(): number | undefined;
-  setMediaPixelWidth(value: number): void;
-
-  hasMediaType(): boolean;
-  clearMediaType(): void;
-  getMediaType(): MediaType | undefined;
-  setMediaType(value?: MediaType): void;
-
-  hasLanguage(): boolean;
-  clearLanguage(): void;
-  getLanguage(): string | undefined;
-  setLanguage(value: string): void;
-
-  hasLicense(): boolean;
-  clearLicense(): void;
-  getLicense(): License | undefined;
-  setLicense(value?: License): void;
-
-  hasPublishedBeforeJoystream(): boolean;
-  clearPublishedBeforeJoystream(): void;
-  getPublishedBeforeJoystream(): PublishedBeforeJoystream | undefined;
-  setPublishedBeforeJoystream(value?: PublishedBeforeJoystream): void;
-
-  hasHasMarketing(): boolean;
-  clearHasMarketing(): void;
-  getHasMarketing(): boolean | undefined;
-  setHasMarketing(value: boolean): void;
-
-  hasIsPublic(): boolean;
-  clearIsPublic(): void;
-  getIsPublic(): boolean | undefined;
-  setIsPublic(value: boolean): void;
-
-  hasIsExplicit(): boolean;
-  clearIsExplicit(): void;
-  getIsExplicit(): boolean | undefined;
-  setIsExplicit(value: boolean): void;
-
-  clearPersonsList(): void;
-  getPersonsList(): Array<number>;
-  setPersonsList(value: Array<number>): void;
-  addPersons(value: number, index?: number): number;
-
-  hasCategory(): boolean;
-  clearCategory(): void;
-  getCategory(): number | undefined;
-  setCategory(value: number): void;
-
-  serializeBinary(): Uint8Array;
-  toObject(includeInstance?: boolean): VideoMetadata.AsObject;
-  static toObject(includeInstance: boolean, msg: VideoMetadata): VideoMetadata.AsObject;
-  static extensions: {[key: number]: jspb.ExtensionFieldInfo<jspb.Message>};
-  static extensionsBinary: {[key: number]: jspb.ExtensionFieldBinaryInfo<jspb.Message>};
-  static serializeBinaryToWriter(message: VideoMetadata, writer: jspb.BinaryWriter): void;
-  static deserializeBinary(bytes: Uint8Array): VideoMetadata;
-  static deserializeBinaryFromReader(message: VideoMetadata, reader: jspb.BinaryReader): VideoMetadata;
-}
-
-export namespace VideoMetadata {
-  export type AsObject = {
-    title?: string,
-    description?: string,
-    video?: number,
-    thumbnailPhoto?: number,
-    duration?: number,
-    mediaPixelHeight?: number,
-    mediaPixelWidth?: number,
-    mediaType?: MediaType.AsObject,
-    language?: string,
-    license?: License.AsObject,
-    publishedBeforeJoystream?: PublishedBeforeJoystream.AsObject,
-    hasMarketing?: boolean,
-    isPublic?: boolean,
-    isExplicit?: boolean,
-    personsList: Array<number>,
-    category?: number,
-  }
-}
-
-export class VideoCategoryMetadata extends jspb.Message {
-  hasName(): boolean;
-  clearName(): void;
-  getName(): string | undefined;
-  setName(value: string): void;
-
-  serializeBinary(): Uint8Array;
-  toObject(includeInstance?: boolean): VideoCategoryMetadata.AsObject;
-  static toObject(includeInstance: boolean, msg: VideoCategoryMetadata): VideoCategoryMetadata.AsObject;
-  static extensions: {[key: number]: jspb.ExtensionFieldInfo<jspb.Message>};
-  static extensionsBinary: {[key: number]: jspb.ExtensionFieldBinaryInfo<jspb.Message>};
-  static serializeBinaryToWriter(message: VideoCategoryMetadata, writer: jspb.BinaryWriter): void;
-  static deserializeBinary(bytes: Uint8Array): VideoCategoryMetadata;
-  static deserializeBinaryFromReader(message: VideoCategoryMetadata, reader: jspb.BinaryReader): VideoCategoryMetadata;
-}
-
-export namespace VideoCategoryMetadata {
-  export type AsObject = {
-    name?: string,
-  }
-}
-

+ 0 - 1847
content-metadata-protobuf/compiled/proto/Video_pb.js

@@ -1,1847 +0,0 @@
-// source: proto/Video.proto
-/**
- * @fileoverview
- * @enhanceable
- * @suppress {missingRequire} reports error on implicit type usages.
- * @suppress {messageConventions} JS Compiler reports an error if a variable or
- *     field starts with 'MSG_' and isn't a translatable message.
- * @public
- */
-// GENERATED CODE -- DO NOT EDIT!
-/* eslint-disable */
-// @ts-nocheck
-
-var jspb = require('google-protobuf');
-var goog = jspb;
-var global = Function('return this')();
-
-goog.exportSymbol('proto.License', null, global);
-goog.exportSymbol('proto.MediaType', null, global);
-goog.exportSymbol('proto.PublishedBeforeJoystream', null, global);
-goog.exportSymbol('proto.VideoCategoryMetadata', null, global);
-goog.exportSymbol('proto.VideoMetadata', null, global);
-/**
- * Generated by JsPbCodeGenerator.
- * @param {Array=} opt_data Optional initial data array, typically from a
- * server response, or constructed directly in Javascript. The array is used
- * in place and becomes part of the constructed object. It is not cloned.
- * If no data is provided, the constructed object will be empty, but still
- * valid.
- * @extends {jspb.Message}
- * @constructor
- */
-proto.PublishedBeforeJoystream = function(opt_data) {
-  jspb.Message.initialize(this, opt_data, 0, -1, null, null);
-};
-goog.inherits(proto.PublishedBeforeJoystream, jspb.Message);
-if (goog.DEBUG && !COMPILED) {
-  /**
-   * @public
-   * @override
-   */
-  proto.PublishedBeforeJoystream.displayName = 'proto.PublishedBeforeJoystream';
-}
-/**
- * Generated by JsPbCodeGenerator.
- * @param {Array=} opt_data Optional initial data array, typically from a
- * server response, or constructed directly in Javascript. The array is used
- * in place and becomes part of the constructed object. It is not cloned.
- * If no data is provided, the constructed object will be empty, but still
- * valid.
- * @extends {jspb.Message}
- * @constructor
- */
-proto.License = function(opt_data) {
-  jspb.Message.initialize(this, opt_data, 0, -1, null, null);
-};
-goog.inherits(proto.License, jspb.Message);
-if (goog.DEBUG && !COMPILED) {
-  /**
-   * @public
-   * @override
-   */
-  proto.License.displayName = 'proto.License';
-}
-/**
- * Generated by JsPbCodeGenerator.
- * @param {Array=} opt_data Optional initial data array, typically from a
- * server response, or constructed directly in Javascript. The array is used
- * in place and becomes part of the constructed object. It is not cloned.
- * If no data is provided, the constructed object will be empty, but still
- * valid.
- * @extends {jspb.Message}
- * @constructor
- */
-proto.MediaType = function(opt_data) {
-  jspb.Message.initialize(this, opt_data, 0, -1, null, null);
-};
-goog.inherits(proto.MediaType, jspb.Message);
-if (goog.DEBUG && !COMPILED) {
-  /**
-   * @public
-   * @override
-   */
-  proto.MediaType.displayName = 'proto.MediaType';
-}
-/**
- * Generated by JsPbCodeGenerator.
- * @param {Array=} opt_data Optional initial data array, typically from a
- * server response, or constructed directly in Javascript. The array is used
- * in place and becomes part of the constructed object. It is not cloned.
- * If no data is provided, the constructed object will be empty, but still
- * valid.
- * @extends {jspb.Message}
- * @constructor
- */
-proto.VideoMetadata = function(opt_data) {
-  jspb.Message.initialize(this, opt_data, 0, -1, proto.VideoMetadata.repeatedFields_, null);
-};
-goog.inherits(proto.VideoMetadata, jspb.Message);
-if (goog.DEBUG && !COMPILED) {
-  /**
-   * @public
-   * @override
-   */
-  proto.VideoMetadata.displayName = 'proto.VideoMetadata';
-}
-/**
- * Generated by JsPbCodeGenerator.
- * @param {Array=} opt_data Optional initial data array, typically from a
- * server response, or constructed directly in Javascript. The array is used
- * in place and becomes part of the constructed object. It is not cloned.
- * If no data is provided, the constructed object will be empty, but still
- * valid.
- * @extends {jspb.Message}
- * @constructor
- */
-proto.VideoCategoryMetadata = function(opt_data) {
-  jspb.Message.initialize(this, opt_data, 0, -1, null, null);
-};
-goog.inherits(proto.VideoCategoryMetadata, jspb.Message);
-if (goog.DEBUG && !COMPILED) {
-  /**
-   * @public
-   * @override
-   */
-  proto.VideoCategoryMetadata.displayName = 'proto.VideoCategoryMetadata';
-}
-
-
-
-if (jspb.Message.GENERATE_TO_OBJECT) {
-/**
- * Creates an object representation of this proto.
- * Field names that are reserved in JavaScript and will be renamed to pb_name.
- * Optional fields that are not set will be set to undefined.
- * To access a reserved field use, foo.pb_<name>, eg, foo.pb_default.
- * For the list of reserved names please see:
- *     net/proto2/compiler/js/internal/generator.cc#kKeyword.
- * @param {boolean=} opt_includeInstance Deprecated. whether to include the
- *     JSPB instance for transitional soy proto support:
- *     http://goto/soy-param-migration
- * @return {!Object}
- */
-proto.PublishedBeforeJoystream.prototype.toObject = function(opt_includeInstance) {
-  return proto.PublishedBeforeJoystream.toObject(opt_includeInstance, this);
-};
-
-
-/**
- * Static version of the {@see toObject} method.
- * @param {boolean|undefined} includeInstance Deprecated. Whether to include
- *     the JSPB instance for transitional soy proto support:
- *     http://goto/soy-param-migration
- * @param {!proto.PublishedBeforeJoystream} msg The msg instance to transform.
- * @return {!Object}
- * @suppress {unusedLocalVariables} f is only used for nested messages
- */
-proto.PublishedBeforeJoystream.toObject = function(includeInstance, msg) {
-  var f, obj = {
-    isPublished: (f = jspb.Message.getBooleanField(msg, 1)) == null ? undefined : f,
-    date: (f = jspb.Message.getField(msg, 2)) == null ? undefined : f
-  };
-
-  if (includeInstance) {
-    obj.$jspbMessageInstance = msg;
-  }
-  return obj;
-};
-}
-
-
-/**
- * Deserializes binary data (in protobuf wire format).
- * @param {jspb.ByteSource} bytes The bytes to deserialize.
- * @return {!proto.PublishedBeforeJoystream}
- */
-proto.PublishedBeforeJoystream.deserializeBinary = function(bytes) {
-  var reader = new jspb.BinaryReader(bytes);
-  var msg = new proto.PublishedBeforeJoystream;
-  return proto.PublishedBeforeJoystream.deserializeBinaryFromReader(msg, reader);
-};
-
-
-/**
- * Deserializes binary data (in protobuf wire format) from the
- * given reader into the given message object.
- * @param {!proto.PublishedBeforeJoystream} msg The message object to deserialize into.
- * @param {!jspb.BinaryReader} reader The BinaryReader to use.
- * @return {!proto.PublishedBeforeJoystream}
- */
-proto.PublishedBeforeJoystream.deserializeBinaryFromReader = function(msg, reader) {
-  while (reader.nextField()) {
-    if (reader.isEndGroup()) {
-      break;
-    }
-    var field = reader.getFieldNumber();
-    switch (field) {
-    case 1:
-      var value = /** @type {boolean} */ (reader.readBool());
-      msg.setIsPublished(value);
-      break;
-    case 2:
-      var value = /** @type {string} */ (reader.readString());
-      msg.setDate(value);
-      break;
-    default:
-      reader.skipField();
-      break;
-    }
-  }
-  return msg;
-};
-
-
-/**
- * Serializes the message to binary data (in protobuf wire format).
- * @return {!Uint8Array}
- */
-proto.PublishedBeforeJoystream.prototype.serializeBinary = function() {
-  var writer = new jspb.BinaryWriter();
-  proto.PublishedBeforeJoystream.serializeBinaryToWriter(this, writer);
-  return writer.getResultBuffer();
-};
-
-
-/**
- * Serializes the given message to binary data (in protobuf wire
- * format), writing to the given BinaryWriter.
- * @param {!proto.PublishedBeforeJoystream} message
- * @param {!jspb.BinaryWriter} writer
- * @suppress {unusedLocalVariables} f is only used for nested messages
- */
-proto.PublishedBeforeJoystream.serializeBinaryToWriter = function(message, writer) {
-  var f = undefined;
-  f = /** @type {boolean} */ (jspb.Message.getField(message, 1));
-  if (f != null) {
-    writer.writeBool(
-      1,
-      f
-    );
-  }
-  f = /** @type {string} */ (jspb.Message.getField(message, 2));
-  if (f != null) {
-    writer.writeString(
-      2,
-      f
-    );
-  }
-};
-
-
-/**
- * optional bool is_published = 1;
- * @return {boolean}
- */
-proto.PublishedBeforeJoystream.prototype.getIsPublished = function() {
-  return /** @type {boolean} */ (jspb.Message.getBooleanFieldWithDefault(this, 1, false));
-};
-
-
-/**
- * @param {boolean} value
- * @return {!proto.PublishedBeforeJoystream} returns this
- */
-proto.PublishedBeforeJoystream.prototype.setIsPublished = function(value) {
-  return jspb.Message.setField(this, 1, value);
-};
-
-
-/**
- * Clears the field making it undefined.
- * @return {!proto.PublishedBeforeJoystream} returns this
- */
-proto.PublishedBeforeJoystream.prototype.clearIsPublished = function() {
-  return jspb.Message.setField(this, 1, undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.PublishedBeforeJoystream.prototype.hasIsPublished = function() {
-  return jspb.Message.getField(this, 1) != null;
-};
-
-
-/**
- * optional string date = 2;
- * @return {string}
- */
-proto.PublishedBeforeJoystream.prototype.getDate = function() {
-  return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 2, ""));
-};
-
-
-/**
- * @param {string} value
- * @return {!proto.PublishedBeforeJoystream} returns this
- */
-proto.PublishedBeforeJoystream.prototype.setDate = function(value) {
-  return jspb.Message.setField(this, 2, value);
-};
-
-
-/**
- * Clears the field making it undefined.
- * @return {!proto.PublishedBeforeJoystream} returns this
- */
-proto.PublishedBeforeJoystream.prototype.clearDate = function() {
-  return jspb.Message.setField(this, 2, undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.PublishedBeforeJoystream.prototype.hasDate = function() {
-  return jspb.Message.getField(this, 2) != null;
-};
-
-
-
-
-
-if (jspb.Message.GENERATE_TO_OBJECT) {
-/**
- * Creates an object representation of this proto.
- * Field names that are reserved in JavaScript and will be renamed to pb_name.
- * Optional fields that are not set will be set to undefined.
- * To access a reserved field use, foo.pb_<name>, eg, foo.pb_default.
- * For the list of reserved names please see:
- *     net/proto2/compiler/js/internal/generator.cc#kKeyword.
- * @param {boolean=} opt_includeInstance Deprecated. whether to include the
- *     JSPB instance for transitional soy proto support:
- *     http://goto/soy-param-migration
- * @return {!Object}
- */
-proto.License.prototype.toObject = function(opt_includeInstance) {
-  return proto.License.toObject(opt_includeInstance, this);
-};
-
-
-/**
- * Static version of the {@see toObject} method.
- * @param {boolean|undefined} includeInstance Deprecated. Whether to include
- *     the JSPB instance for transitional soy proto support:
- *     http://goto/soy-param-migration
- * @param {!proto.License} msg The msg instance to transform.
- * @return {!Object}
- * @suppress {unusedLocalVariables} f is only used for nested messages
- */
-proto.License.toObject = function(includeInstance, msg) {
-  var f, obj = {
-    code: (f = jspb.Message.getField(msg, 1)) == null ? undefined : f,
-    attribution: (f = jspb.Message.getField(msg, 2)) == null ? undefined : f,
-    customText: (f = jspb.Message.getField(msg, 3)) == null ? undefined : f
-  };
-
-  if (includeInstance) {
-    obj.$jspbMessageInstance = msg;
-  }
-  return obj;
-};
-}
-
-
-/**
- * Deserializes binary data (in protobuf wire format).
- * @param {jspb.ByteSource} bytes The bytes to deserialize.
- * @return {!proto.License}
- */
-proto.License.deserializeBinary = function(bytes) {
-  var reader = new jspb.BinaryReader(bytes);
-  var msg = new proto.License;
-  return proto.License.deserializeBinaryFromReader(msg, reader);
-};
-
-
-/**
- * Deserializes binary data (in protobuf wire format) from the
- * given reader into the given message object.
- * @param {!proto.License} msg The message object to deserialize into.
- * @param {!jspb.BinaryReader} reader The BinaryReader to use.
- * @return {!proto.License}
- */
-proto.License.deserializeBinaryFromReader = function(msg, reader) {
-  while (reader.nextField()) {
-    if (reader.isEndGroup()) {
-      break;
-    }
-    var field = reader.getFieldNumber();
-    switch (field) {
-    case 1:
-      var value = /** @type {number} */ (reader.readUint32());
-      msg.setCode(value);
-      break;
-    case 2:
-      var value = /** @type {string} */ (reader.readString());
-      msg.setAttribution(value);
-      break;
-    case 3:
-      var value = /** @type {string} */ (reader.readString());
-      msg.setCustomText(value);
-      break;
-    default:
-      reader.skipField();
-      break;
-    }
-  }
-  return msg;
-};
-
-
-/**
- * Serializes the message to binary data (in protobuf wire format).
- * @return {!Uint8Array}
- */
-proto.License.prototype.serializeBinary = function() {
-  var writer = new jspb.BinaryWriter();
-  proto.License.serializeBinaryToWriter(this, writer);
-  return writer.getResultBuffer();
-};
-
-
-/**
- * Serializes the given message to binary data (in protobuf wire
- * format), writing to the given BinaryWriter.
- * @param {!proto.License} message
- * @param {!jspb.BinaryWriter} writer
- * @suppress {unusedLocalVariables} f is only used for nested messages
- */
-proto.License.serializeBinaryToWriter = function(message, writer) {
-  var f = undefined;
-  f = /** @type {number} */ (jspb.Message.getField(message, 1));
-  if (f != null) {
-    writer.writeUint32(
-      1,
-      f
-    );
-  }
-  f = /** @type {string} */ (jspb.Message.getField(message, 2));
-  if (f != null) {
-    writer.writeString(
-      2,
-      f
-    );
-  }
-  f = /** @type {string} */ (jspb.Message.getField(message, 3));
-  if (f != null) {
-    writer.writeString(
-      3,
-      f
-    );
-  }
-};
-
-
-/**
- * optional uint32 code = 1;
- * @return {number}
- */
-proto.License.prototype.getCode = function() {
-  return /** @type {number} */ (jspb.Message.getFieldWithDefault(this, 1, 0));
-};
-
-
-/**
- * @param {number} value
- * @return {!proto.License} returns this
- */
-proto.License.prototype.setCode = function(value) {
-  return jspb.Message.setField(this, 1, value);
-};
-
-
-/**
- * Clears the field making it undefined.
- * @return {!proto.License} returns this
- */
-proto.License.prototype.clearCode = function() {
-  return jspb.Message.setField(this, 1, undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.License.prototype.hasCode = function() {
-  return jspb.Message.getField(this, 1) != null;
-};
-
-
-/**
- * optional string attribution = 2;
- * @return {string}
- */
-proto.License.prototype.getAttribution = function() {
-  return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 2, ""));
-};
-
-
-/**
- * @param {string} value
- * @return {!proto.License} returns this
- */
-proto.License.prototype.setAttribution = function(value) {
-  return jspb.Message.setField(this, 2, value);
-};
-
-
-/**
- * Clears the field making it undefined.
- * @return {!proto.License} returns this
- */
-proto.License.prototype.clearAttribution = function() {
-  return jspb.Message.setField(this, 2, undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.License.prototype.hasAttribution = function() {
-  return jspb.Message.getField(this, 2) != null;
-};
-
-
-/**
- * optional string custom_text = 3;
- * @return {string}
- */
-proto.License.prototype.getCustomText = function() {
-  return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 3, ""));
-};
-
-
-/**
- * @param {string} value
- * @return {!proto.License} returns this
- */
-proto.License.prototype.setCustomText = function(value) {
-  return jspb.Message.setField(this, 3, value);
-};
-
-
-/**
- * Clears the field making it undefined.
- * @return {!proto.License} returns this
- */
-proto.License.prototype.clearCustomText = function() {
-  return jspb.Message.setField(this, 3, undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.License.prototype.hasCustomText = function() {
-  return jspb.Message.getField(this, 3) != null;
-};
-
-
-
-
-
-if (jspb.Message.GENERATE_TO_OBJECT) {
-/**
- * Creates an object representation of this proto.
- * Field names that are reserved in JavaScript and will be renamed to pb_name.
- * Optional fields that are not set will be set to undefined.
- * To access a reserved field use, foo.pb_<name>, eg, foo.pb_default.
- * For the list of reserved names please see:
- *     net/proto2/compiler/js/internal/generator.cc#kKeyword.
- * @param {boolean=} opt_includeInstance Deprecated. whether to include the
- *     JSPB instance for transitional soy proto support:
- *     http://goto/soy-param-migration
- * @return {!Object}
- */
-proto.MediaType.prototype.toObject = function(opt_includeInstance) {
-  return proto.MediaType.toObject(opt_includeInstance, this);
-};
-
-
-/**
- * Static version of the {@see toObject} method.
- * @param {boolean|undefined} includeInstance Deprecated. Whether to include
- *     the JSPB instance for transitional soy proto support:
- *     http://goto/soy-param-migration
- * @param {!proto.MediaType} msg The msg instance to transform.
- * @return {!Object}
- * @suppress {unusedLocalVariables} f is only used for nested messages
- */
-proto.MediaType.toObject = function(includeInstance, msg) {
-  var f, obj = {
-    codecName: (f = jspb.Message.getField(msg, 1)) == null ? undefined : f,
-    container: (f = jspb.Message.getField(msg, 2)) == null ? undefined : f,
-    mimeMediaType: (f = jspb.Message.getField(msg, 3)) == null ? undefined : f
-  };
-
-  if (includeInstance) {
-    obj.$jspbMessageInstance = msg;
-  }
-  return obj;
-};
-}
-
-
-/**
- * Deserializes binary data (in protobuf wire format).
- * @param {jspb.ByteSource} bytes The bytes to deserialize.
- * @return {!proto.MediaType}
- */
-proto.MediaType.deserializeBinary = function(bytes) {
-  var reader = new jspb.BinaryReader(bytes);
-  var msg = new proto.MediaType;
-  return proto.MediaType.deserializeBinaryFromReader(msg, reader);
-};
-
-
-/**
- * Deserializes binary data (in protobuf wire format) from the
- * given reader into the given message object.
- * @param {!proto.MediaType} msg The message object to deserialize into.
- * @param {!jspb.BinaryReader} reader The BinaryReader to use.
- * @return {!proto.MediaType}
- */
-proto.MediaType.deserializeBinaryFromReader = function(msg, reader) {
-  while (reader.nextField()) {
-    if (reader.isEndGroup()) {
-      break;
-    }
-    var field = reader.getFieldNumber();
-    switch (field) {
-    case 1:
-      var value = /** @type {string} */ (reader.readString());
-      msg.setCodecName(value);
-      break;
-    case 2:
-      var value = /** @type {string} */ (reader.readString());
-      msg.setContainer(value);
-      break;
-    case 3:
-      var value = /** @type {string} */ (reader.readString());
-      msg.setMimeMediaType(value);
-      break;
-    default:
-      reader.skipField();
-      break;
-    }
-  }
-  return msg;
-};
-
-
-/**
- * Serializes the message to binary data (in protobuf wire format).
- * @return {!Uint8Array}
- */
-proto.MediaType.prototype.serializeBinary = function() {
-  var writer = new jspb.BinaryWriter();
-  proto.MediaType.serializeBinaryToWriter(this, writer);
-  return writer.getResultBuffer();
-};
-
-
-/**
- * Serializes the given message to binary data (in protobuf wire
- * format), writing to the given BinaryWriter.
- * @param {!proto.MediaType} message
- * @param {!jspb.BinaryWriter} writer
- * @suppress {unusedLocalVariables} f is only used for nested messages
- */
-proto.MediaType.serializeBinaryToWriter = function(message, writer) {
-  var f = undefined;
-  f = /** @type {string} */ (jspb.Message.getField(message, 1));
-  if (f != null) {
-    writer.writeString(
-      1,
-      f
-    );
-  }
-  f = /** @type {string} */ (jspb.Message.getField(message, 2));
-  if (f != null) {
-    writer.writeString(
-      2,
-      f
-    );
-  }
-  f = /** @type {string} */ (jspb.Message.getField(message, 3));
-  if (f != null) {
-    writer.writeString(
-      3,
-      f
-    );
-  }
-};
-
-
-/**
- * optional string codec_name = 1;
- * @return {string}
- */
-proto.MediaType.prototype.getCodecName = function() {
-  return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 1, ""));
-};
-
-
-/**
- * @param {string} value
- * @return {!proto.MediaType} returns this
- */
-proto.MediaType.prototype.setCodecName = function(value) {
-  return jspb.Message.setField(this, 1, value);
-};
-
-
-/**
- * Clears the field making it undefined.
- * @return {!proto.MediaType} returns this
- */
-proto.MediaType.prototype.clearCodecName = function() {
-  return jspb.Message.setField(this, 1, undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.MediaType.prototype.hasCodecName = function() {
-  return jspb.Message.getField(this, 1) != null;
-};
-
-
-/**
- * optional string container = 2;
- * @return {string}
- */
-proto.MediaType.prototype.getContainer = function() {
-  return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 2, ""));
-};
-
-
-/**
- * @param {string} value
- * @return {!proto.MediaType} returns this
- */
-proto.MediaType.prototype.setContainer = function(value) {
-  return jspb.Message.setField(this, 2, value);
-};
-
-
-/**
- * Clears the field making it undefined.
- * @return {!proto.MediaType} returns this
- */
-proto.MediaType.prototype.clearContainer = function() {
-  return jspb.Message.setField(this, 2, undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.MediaType.prototype.hasContainer = function() {
-  return jspb.Message.getField(this, 2) != null;
-};
-
-
-/**
- * optional string mime_media_type = 3;
- * @return {string}
- */
-proto.MediaType.prototype.getMimeMediaType = function() {
-  return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 3, ""));
-};
-
-
-/**
- * @param {string} value
- * @return {!proto.MediaType} returns this
- */
-proto.MediaType.prototype.setMimeMediaType = function(value) {
-  return jspb.Message.setField(this, 3, value);
-};
-
-
-/**
- * Clears the field making it undefined.
- * @return {!proto.MediaType} returns this
- */
-proto.MediaType.prototype.clearMimeMediaType = function() {
-  return jspb.Message.setField(this, 3, undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.MediaType.prototype.hasMimeMediaType = function() {
-  return jspb.Message.getField(this, 3) != null;
-};
-
-
-
-/**
- * List of repeated fields within this message type.
- * @private {!Array<number>}
- * @const
- */
-proto.VideoMetadata.repeatedFields_ = [15];
-
-
-
-if (jspb.Message.GENERATE_TO_OBJECT) {
-/**
- * Creates an object representation of this proto.
- * Field names that are reserved in JavaScript and will be renamed to pb_name.
- * Optional fields that are not set will be set to undefined.
- * To access a reserved field use, foo.pb_<name>, eg, foo.pb_default.
- * For the list of reserved names please see:
- *     net/proto2/compiler/js/internal/generator.cc#kKeyword.
- * @param {boolean=} opt_includeInstance Deprecated. whether to include the
- *     JSPB instance for transitional soy proto support:
- *     http://goto/soy-param-migration
- * @return {!Object}
- */
-proto.VideoMetadata.prototype.toObject = function(opt_includeInstance) {
-  return proto.VideoMetadata.toObject(opt_includeInstance, this);
-};
-
-
-/**
- * Static version of the {@see toObject} method.
- * @param {boolean|undefined} includeInstance Deprecated. Whether to include
- *     the JSPB instance for transitional soy proto support:
- *     http://goto/soy-param-migration
- * @param {!proto.VideoMetadata} msg The msg instance to transform.
- * @return {!Object}
- * @suppress {unusedLocalVariables} f is only used for nested messages
- */
-proto.VideoMetadata.toObject = function(includeInstance, msg) {
-  var f, obj = {
-    title: (f = jspb.Message.getField(msg, 1)) == null ? undefined : f,
-    description: (f = jspb.Message.getField(msg, 2)) == null ? undefined : f,
-    video: (f = jspb.Message.getField(msg, 3)) == null ? undefined : f,
-    thumbnailPhoto: (f = jspb.Message.getField(msg, 4)) == null ? undefined : f,
-    duration: (f = jspb.Message.getField(msg, 5)) == null ? undefined : f,
-    mediaPixelHeight: (f = jspb.Message.getField(msg, 6)) == null ? undefined : f,
-    mediaPixelWidth: (f = jspb.Message.getField(msg, 7)) == null ? undefined : f,
-    mediaType: (f = msg.getMediaType()) && proto.MediaType.toObject(includeInstance, f),
-    language: (f = jspb.Message.getField(msg, 9)) == null ? undefined : f,
-    license: (f = msg.getLicense()) && proto.License.toObject(includeInstance, f),
-    publishedBeforeJoystream: (f = msg.getPublishedBeforeJoystream()) && proto.PublishedBeforeJoystream.toObject(includeInstance, f),
-    hasMarketing: (f = jspb.Message.getBooleanField(msg, 12)) == null ? undefined : f,
-    isPublic: (f = jspb.Message.getBooleanField(msg, 13)) == null ? undefined : f,
-    isExplicit: (f = jspb.Message.getBooleanField(msg, 14)) == null ? undefined : f,
-    personsList: (f = jspb.Message.getRepeatedField(msg, 15)) == null ? undefined : f,
-    category: (f = jspb.Message.getField(msg, 16)) == null ? undefined : f
-  };
-
-  if (includeInstance) {
-    obj.$jspbMessageInstance = msg;
-  }
-  return obj;
-};
-}
-
-
-/**
- * Deserializes binary data (in protobuf wire format).
- * @param {jspb.ByteSource} bytes The bytes to deserialize.
- * @return {!proto.VideoMetadata}
- */
-proto.VideoMetadata.deserializeBinary = function(bytes) {
-  var reader = new jspb.BinaryReader(bytes);
-  var msg = new proto.VideoMetadata;
-  return proto.VideoMetadata.deserializeBinaryFromReader(msg, reader);
-};
-
-
-/**
- * Deserializes binary data (in protobuf wire format) from the
- * given reader into the given message object.
- * @param {!proto.VideoMetadata} msg The message object to deserialize into.
- * @param {!jspb.BinaryReader} reader The BinaryReader to use.
- * @return {!proto.VideoMetadata}
- */
-proto.VideoMetadata.deserializeBinaryFromReader = function(msg, reader) {
-  while (reader.nextField()) {
-    if (reader.isEndGroup()) {
-      break;
-    }
-    var field = reader.getFieldNumber();
-    switch (field) {
-    case 1:
-      var value = /** @type {string} */ (reader.readString());
-      msg.setTitle(value);
-      break;
-    case 2:
-      var value = /** @type {string} */ (reader.readString());
-      msg.setDescription(value);
-      break;
-    case 3:
-      var value = /** @type {number} */ (reader.readUint32());
-      msg.setVideo(value);
-      break;
-    case 4:
-      var value = /** @type {number} */ (reader.readUint32());
-      msg.setThumbnailPhoto(value);
-      break;
-    case 5:
-      var value = /** @type {number} */ (reader.readUint32());
-      msg.setDuration(value);
-      break;
-    case 6:
-      var value = /** @type {number} */ (reader.readUint32());
-      msg.setMediaPixelHeight(value);
-      break;
-    case 7:
-      var value = /** @type {number} */ (reader.readUint32());
-      msg.setMediaPixelWidth(value);
-      break;
-    case 8:
-      var value = new proto.MediaType;
-      reader.readMessage(value,proto.MediaType.deserializeBinaryFromReader);
-      msg.setMediaType(value);
-      break;
-    case 9:
-      var value = /** @type {string} */ (reader.readString());
-      msg.setLanguage(value);
-      break;
-    case 10:
-      var value = new proto.License;
-      reader.readMessage(value,proto.License.deserializeBinaryFromReader);
-      msg.setLicense(value);
-      break;
-    case 11:
-      var value = new proto.PublishedBeforeJoystream;
-      reader.readMessage(value,proto.PublishedBeforeJoystream.deserializeBinaryFromReader);
-      msg.setPublishedBeforeJoystream(value);
-      break;
-    case 12:
-      var value = /** @type {boolean} */ (reader.readBool());
-      msg.setHasMarketing(value);
-      break;
-    case 13:
-      var value = /** @type {boolean} */ (reader.readBool());
-      msg.setIsPublic(value);
-      break;
-    case 14:
-      var value = /** @type {boolean} */ (reader.readBool());
-      msg.setIsExplicit(value);
-      break;
-    case 15:
-      var values = /** @type {!Array<number>} */ (reader.isDelimited() ? reader.readPackedUint64() : [reader.readUint64()]);
-      for (var i = 0; i < values.length; i++) {
-        msg.addPersons(values[i]);
-      }
-      break;
-    case 16:
-      var value = /** @type {number} */ (reader.readUint64());
-      msg.setCategory(value);
-      break;
-    default:
-      reader.skipField();
-      break;
-    }
-  }
-  return msg;
-};
-
-
-/**
- * Serializes the message to binary data (in protobuf wire format).
- * @return {!Uint8Array}
- */
-proto.VideoMetadata.prototype.serializeBinary = function() {
-  var writer = new jspb.BinaryWriter();
-  proto.VideoMetadata.serializeBinaryToWriter(this, writer);
-  return writer.getResultBuffer();
-};
-
-
-/**
- * Serializes the given message to binary data (in protobuf wire
- * format), writing to the given BinaryWriter.
- * @param {!proto.VideoMetadata} message
- * @param {!jspb.BinaryWriter} writer
- * @suppress {unusedLocalVariables} f is only used for nested messages
- */
-proto.VideoMetadata.serializeBinaryToWriter = function(message, writer) {
-  var f = undefined;
-  f = /** @type {string} */ (jspb.Message.getField(message, 1));
-  if (f != null) {
-    writer.writeString(
-      1,
-      f
-    );
-  }
-  f = /** @type {string} */ (jspb.Message.getField(message, 2));
-  if (f != null) {
-    writer.writeString(
-      2,
-      f
-    );
-  }
-  f = /** @type {number} */ (jspb.Message.getField(message, 3));
-  if (f != null) {
-    writer.writeUint32(
-      3,
-      f
-    );
-  }
-  f = /** @type {number} */ (jspb.Message.getField(message, 4));
-  if (f != null) {
-    writer.writeUint32(
-      4,
-      f
-    );
-  }
-  f = /** @type {number} */ (jspb.Message.getField(message, 5));
-  if (f != null) {
-    writer.writeUint32(
-      5,
-      f
-    );
-  }
-  f = /** @type {number} */ (jspb.Message.getField(message, 6));
-  if (f != null) {
-    writer.writeUint32(
-      6,
-      f
-    );
-  }
-  f = /** @type {number} */ (jspb.Message.getField(message, 7));
-  if (f != null) {
-    writer.writeUint32(
-      7,
-      f
-    );
-  }
-  f = message.getMediaType();
-  if (f != null) {
-    writer.writeMessage(
-      8,
-      f,
-      proto.MediaType.serializeBinaryToWriter
-    );
-  }
-  f = /** @type {string} */ (jspb.Message.getField(message, 9));
-  if (f != null) {
-    writer.writeString(
-      9,
-      f
-    );
-  }
-  f = message.getLicense();
-  if (f != null) {
-    writer.writeMessage(
-      10,
-      f,
-      proto.License.serializeBinaryToWriter
-    );
-  }
-  f = message.getPublishedBeforeJoystream();
-  if (f != null) {
-    writer.writeMessage(
-      11,
-      f,
-      proto.PublishedBeforeJoystream.serializeBinaryToWriter
-    );
-  }
-  f = /** @type {boolean} */ (jspb.Message.getField(message, 12));
-  if (f != null) {
-    writer.writeBool(
-      12,
-      f
-    );
-  }
-  f = /** @type {boolean} */ (jspb.Message.getField(message, 13));
-  if (f != null) {
-    writer.writeBool(
-      13,
-      f
-    );
-  }
-  f = /** @type {boolean} */ (jspb.Message.getField(message, 14));
-  if (f != null) {
-    writer.writeBool(
-      14,
-      f
-    );
-  }
-  f = message.getPersonsList();
-  if (f.length > 0) {
-    writer.writePackedUint64(
-      15,
-      f
-    );
-  }
-  f = /** @type {number} */ (jspb.Message.getField(message, 16));
-  if (f != null) {
-    writer.writeUint64(
-      16,
-      f
-    );
-  }
-};
-
-
-/**
- * optional string title = 1;
- * @return {string}
- */
-proto.VideoMetadata.prototype.getTitle = function() {
-  return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 1, ""));
-};
-
-
-/**
- * @param {string} value
- * @return {!proto.VideoMetadata} returns this
- */
-proto.VideoMetadata.prototype.setTitle = function(value) {
-  return jspb.Message.setField(this, 1, value);
-};
-
-
-/**
- * Clears the field making it undefined.
- * @return {!proto.VideoMetadata} returns this
- */
-proto.VideoMetadata.prototype.clearTitle = function() {
-  return jspb.Message.setField(this, 1, undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.VideoMetadata.prototype.hasTitle = function() {
-  return jspb.Message.getField(this, 1) != null;
-};
-
-
-/**
- * optional string description = 2;
- * @return {string}
- */
-proto.VideoMetadata.prototype.getDescription = function() {
-  return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 2, ""));
-};
-
-
-/**
- * @param {string} value
- * @return {!proto.VideoMetadata} returns this
- */
-proto.VideoMetadata.prototype.setDescription = function(value) {
-  return jspb.Message.setField(this, 2, value);
-};
-
-
-/**
- * Clears the field making it undefined.
- * @return {!proto.VideoMetadata} returns this
- */
-proto.VideoMetadata.prototype.clearDescription = function() {
-  return jspb.Message.setField(this, 2, undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.VideoMetadata.prototype.hasDescription = function() {
-  return jspb.Message.getField(this, 2) != null;
-};
-
-
-/**
- * optional uint32 video = 3;
- * @return {number}
- */
-proto.VideoMetadata.prototype.getVideo = function() {
-  return /** @type {number} */ (jspb.Message.getFieldWithDefault(this, 3, 0));
-};
-
-
-/**
- * @param {number} value
- * @return {!proto.VideoMetadata} returns this
- */
-proto.VideoMetadata.prototype.setVideo = function(value) {
-  return jspb.Message.setField(this, 3, value);
-};
-
-
-/**
- * Clears the field making it undefined.
- * @return {!proto.VideoMetadata} returns this
- */
-proto.VideoMetadata.prototype.clearVideo = function() {
-  return jspb.Message.setField(this, 3, undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.VideoMetadata.prototype.hasVideo = function() {
-  return jspb.Message.getField(this, 3) != null;
-};
-
-
-/**
- * optional uint32 thumbnail_photo = 4;
- * @return {number}
- */
-proto.VideoMetadata.prototype.getThumbnailPhoto = function() {
-  return /** @type {number} */ (jspb.Message.getFieldWithDefault(this, 4, 0));
-};
-
-
-/**
- * @param {number} value
- * @return {!proto.VideoMetadata} returns this
- */
-proto.VideoMetadata.prototype.setThumbnailPhoto = function(value) {
-  return jspb.Message.setField(this, 4, value);
-};
-
-
-/**
- * Clears the field making it undefined.
- * @return {!proto.VideoMetadata} returns this
- */
-proto.VideoMetadata.prototype.clearThumbnailPhoto = function() {
-  return jspb.Message.setField(this, 4, undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.VideoMetadata.prototype.hasThumbnailPhoto = function() {
-  return jspb.Message.getField(this, 4) != null;
-};
-
-
-/**
- * optional uint32 duration = 5;
- * @return {number}
- */
-proto.VideoMetadata.prototype.getDuration = function() {
-  return /** @type {number} */ (jspb.Message.getFieldWithDefault(this, 5, 0));
-};
-
-
-/**
- * @param {number} value
- * @return {!proto.VideoMetadata} returns this
- */
-proto.VideoMetadata.prototype.setDuration = function(value) {
-  return jspb.Message.setField(this, 5, value);
-};
-
-
-/**
- * Clears the field making it undefined.
- * @return {!proto.VideoMetadata} returns this
- */
-proto.VideoMetadata.prototype.clearDuration = function() {
-  return jspb.Message.setField(this, 5, undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.VideoMetadata.prototype.hasDuration = function() {
-  return jspb.Message.getField(this, 5) != null;
-};
-
-
-/**
- * optional uint32 media_pixel_height = 6;
- * @return {number}
- */
-proto.VideoMetadata.prototype.getMediaPixelHeight = function() {
-  return /** @type {number} */ (jspb.Message.getFieldWithDefault(this, 6, 0));
-};
-
-
-/**
- * @param {number} value
- * @return {!proto.VideoMetadata} returns this
- */
-proto.VideoMetadata.prototype.setMediaPixelHeight = function(value) {
-  return jspb.Message.setField(this, 6, value);
-};
-
-
-/**
- * Clears the field making it undefined.
- * @return {!proto.VideoMetadata} returns this
- */
-proto.VideoMetadata.prototype.clearMediaPixelHeight = function() {
-  return jspb.Message.setField(this, 6, undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.VideoMetadata.prototype.hasMediaPixelHeight = function() {
-  return jspb.Message.getField(this, 6) != null;
-};
-
-
-/**
- * optional uint32 media_pixel_width = 7;
- * @return {number}
- */
-proto.VideoMetadata.prototype.getMediaPixelWidth = function() {
-  return /** @type {number} */ (jspb.Message.getFieldWithDefault(this, 7, 0));
-};
-
-
-/**
- * @param {number} value
- * @return {!proto.VideoMetadata} returns this
- */
-proto.VideoMetadata.prototype.setMediaPixelWidth = function(value) {
-  return jspb.Message.setField(this, 7, value);
-};
-
-
-/**
- * Clears the field making it undefined.
- * @return {!proto.VideoMetadata} returns this
- */
-proto.VideoMetadata.prototype.clearMediaPixelWidth = function() {
-  return jspb.Message.setField(this, 7, undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.VideoMetadata.prototype.hasMediaPixelWidth = function() {
-  return jspb.Message.getField(this, 7) != null;
-};
-
-
-/**
- * optional MediaType media_type = 8;
- * @return {?proto.MediaType}
- */
-proto.VideoMetadata.prototype.getMediaType = function() {
-  return /** @type{?proto.MediaType} */ (
-    jspb.Message.getWrapperField(this, proto.MediaType, 8));
-};
-
-
-/**
- * @param {?proto.MediaType|undefined} value
- * @return {!proto.VideoMetadata} returns this
-*/
-proto.VideoMetadata.prototype.setMediaType = function(value) {
-  return jspb.Message.setWrapperField(this, 8, value);
-};
-
-
-/**
- * Clears the message field making it undefined.
- * @return {!proto.VideoMetadata} returns this
- */
-proto.VideoMetadata.prototype.clearMediaType = function() {
-  return this.setMediaType(undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.VideoMetadata.prototype.hasMediaType = function() {
-  return jspb.Message.getField(this, 8) != null;
-};
-
-
-/**
- * optional string language = 9;
- * @return {string}
- */
-proto.VideoMetadata.prototype.getLanguage = function() {
-  return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 9, ""));
-};
-
-
-/**
- * @param {string} value
- * @return {!proto.VideoMetadata} returns this
- */
-proto.VideoMetadata.prototype.setLanguage = function(value) {
-  return jspb.Message.setField(this, 9, value);
-};
-
-
-/**
- * Clears the field making it undefined.
- * @return {!proto.VideoMetadata} returns this
- */
-proto.VideoMetadata.prototype.clearLanguage = function() {
-  return jspb.Message.setField(this, 9, undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.VideoMetadata.prototype.hasLanguage = function() {
-  return jspb.Message.getField(this, 9) != null;
-};
-
-
-/**
- * optional License license = 10;
- * @return {?proto.License}
- */
-proto.VideoMetadata.prototype.getLicense = function() {
-  return /** @type{?proto.License} */ (
-    jspb.Message.getWrapperField(this, proto.License, 10));
-};
-
-
-/**
- * @param {?proto.License|undefined} value
- * @return {!proto.VideoMetadata} returns this
-*/
-proto.VideoMetadata.prototype.setLicense = function(value) {
-  return jspb.Message.setWrapperField(this, 10, value);
-};
-
-
-/**
- * Clears the message field making it undefined.
- * @return {!proto.VideoMetadata} returns this
- */
-proto.VideoMetadata.prototype.clearLicense = function() {
-  return this.setLicense(undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.VideoMetadata.prototype.hasLicense = function() {
-  return jspb.Message.getField(this, 10) != null;
-};
-
-
-/**
- * optional PublishedBeforeJoystream published_before_joystream = 11;
- * @return {?proto.PublishedBeforeJoystream}
- */
-proto.VideoMetadata.prototype.getPublishedBeforeJoystream = function() {
-  return /** @type{?proto.PublishedBeforeJoystream} */ (
-    jspb.Message.getWrapperField(this, proto.PublishedBeforeJoystream, 11));
-};
-
-
-/**
- * @param {?proto.PublishedBeforeJoystream|undefined} value
- * @return {!proto.VideoMetadata} returns this
-*/
-proto.VideoMetadata.prototype.setPublishedBeforeJoystream = function(value) {
-  return jspb.Message.setWrapperField(this, 11, value);
-};
-
-
-/**
- * Clears the message field making it undefined.
- * @return {!proto.VideoMetadata} returns this
- */
-proto.VideoMetadata.prototype.clearPublishedBeforeJoystream = function() {
-  return this.setPublishedBeforeJoystream(undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.VideoMetadata.prototype.hasPublishedBeforeJoystream = function() {
-  return jspb.Message.getField(this, 11) != null;
-};
-
-
-/**
- * optional bool has_marketing = 12;
- * @return {boolean}
- */
-proto.VideoMetadata.prototype.getHasMarketing = function() {
-  return /** @type {boolean} */ (jspb.Message.getBooleanFieldWithDefault(this, 12, false));
-};
-
-
-/**
- * @param {boolean} value
- * @return {!proto.VideoMetadata} returns this
- */
-proto.VideoMetadata.prototype.setHasMarketing = function(value) {
-  return jspb.Message.setField(this, 12, value);
-};
-
-
-/**
- * Clears the field making it undefined.
- * @return {!proto.VideoMetadata} returns this
- */
-proto.VideoMetadata.prototype.clearHasMarketing = function() {
-  return jspb.Message.setField(this, 12, undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.VideoMetadata.prototype.hasHasMarketing = function() {
-  return jspb.Message.getField(this, 12) != null;
-};
-
-
-/**
- * optional bool is_public = 13;
- * @return {boolean}
- */
-proto.VideoMetadata.prototype.getIsPublic = function() {
-  return /** @type {boolean} */ (jspb.Message.getBooleanFieldWithDefault(this, 13, false));
-};
-
-
-/**
- * @param {boolean} value
- * @return {!proto.VideoMetadata} returns this
- */
-proto.VideoMetadata.prototype.setIsPublic = function(value) {
-  return jspb.Message.setField(this, 13, value);
-};
-
-
-/**
- * Clears the field making it undefined.
- * @return {!proto.VideoMetadata} returns this
- */
-proto.VideoMetadata.prototype.clearIsPublic = function() {
-  return jspb.Message.setField(this, 13, undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.VideoMetadata.prototype.hasIsPublic = function() {
-  return jspb.Message.getField(this, 13) != null;
-};
-
-
-/**
- * optional bool is_explicit = 14;
- * @return {boolean}
- */
-proto.VideoMetadata.prototype.getIsExplicit = function() {
-  return /** @type {boolean} */ (jspb.Message.getBooleanFieldWithDefault(this, 14, false));
-};
-
-
-/**
- * @param {boolean} value
- * @return {!proto.VideoMetadata} returns this
- */
-proto.VideoMetadata.prototype.setIsExplicit = function(value) {
-  return jspb.Message.setField(this, 14, value);
-};
-
-
-/**
- * Clears the field making it undefined.
- * @return {!proto.VideoMetadata} returns this
- */
-proto.VideoMetadata.prototype.clearIsExplicit = function() {
-  return jspb.Message.setField(this, 14, undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.VideoMetadata.prototype.hasIsExplicit = function() {
-  return jspb.Message.getField(this, 14) != null;
-};
-
-
-/**
- * repeated uint64 persons = 15;
- * @return {!Array<number>}
- */
-proto.VideoMetadata.prototype.getPersonsList = function() {
-  return /** @type {!Array<number>} */ (jspb.Message.getRepeatedField(this, 15));
-};
-
-
-/**
- * @param {!Array<number>} value
- * @return {!proto.VideoMetadata} returns this
- */
-proto.VideoMetadata.prototype.setPersonsList = function(value) {
-  return jspb.Message.setField(this, 15, value || []);
-};
-
-
-/**
- * @param {number} value
- * @param {number=} opt_index
- * @return {!proto.VideoMetadata} returns this
- */
-proto.VideoMetadata.prototype.addPersons = function(value, opt_index) {
-  return jspb.Message.addToRepeatedField(this, 15, value, opt_index);
-};
-
-
-/**
- * Clears the list making it empty but non-null.
- * @return {!proto.VideoMetadata} returns this
- */
-proto.VideoMetadata.prototype.clearPersonsList = function() {
-  return this.setPersonsList([]);
-};
-
-
-/**
- * optional uint64 category = 16;
- * @return {number}
- */
-proto.VideoMetadata.prototype.getCategory = function() {
-  return /** @type {number} */ (jspb.Message.getFieldWithDefault(this, 16, 0));
-};
-
-
-/**
- * @param {number} value
- * @return {!proto.VideoMetadata} returns this
- */
-proto.VideoMetadata.prototype.setCategory = function(value) {
-  return jspb.Message.setField(this, 16, value);
-};
-
-
-/**
- * Clears the field making it undefined.
- * @return {!proto.VideoMetadata} returns this
- */
-proto.VideoMetadata.prototype.clearCategory = function() {
-  return jspb.Message.setField(this, 16, undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.VideoMetadata.prototype.hasCategory = function() {
-  return jspb.Message.getField(this, 16) != null;
-};
-
-
-
-
-
-if (jspb.Message.GENERATE_TO_OBJECT) {
-/**
- * Creates an object representation of this proto.
- * Field names that are reserved in JavaScript and will be renamed to pb_name.
- * Optional fields that are not set will be set to undefined.
- * To access a reserved field use, foo.pb_<name>, eg, foo.pb_default.
- * For the list of reserved names please see:
- *     net/proto2/compiler/js/internal/generator.cc#kKeyword.
- * @param {boolean=} opt_includeInstance Deprecated. whether to include the
- *     JSPB instance for transitional soy proto support:
- *     http://goto/soy-param-migration
- * @return {!Object}
- */
-proto.VideoCategoryMetadata.prototype.toObject = function(opt_includeInstance) {
-  return proto.VideoCategoryMetadata.toObject(opt_includeInstance, this);
-};
-
-
-/**
- * Static version of the {@see toObject} method.
- * @param {boolean|undefined} includeInstance Deprecated. Whether to include
- *     the JSPB instance for transitional soy proto support:
- *     http://goto/soy-param-migration
- * @param {!proto.VideoCategoryMetadata} msg The msg instance to transform.
- * @return {!Object}
- * @suppress {unusedLocalVariables} f is only used for nested messages
- */
-proto.VideoCategoryMetadata.toObject = function(includeInstance, msg) {
-  var f, obj = {
-    name: (f = jspb.Message.getField(msg, 1)) == null ? undefined : f
-  };
-
-  if (includeInstance) {
-    obj.$jspbMessageInstance = msg;
-  }
-  return obj;
-};
-}
-
-
-/**
- * Deserializes binary data (in protobuf wire format).
- * @param {jspb.ByteSource} bytes The bytes to deserialize.
- * @return {!proto.VideoCategoryMetadata}
- */
-proto.VideoCategoryMetadata.deserializeBinary = function(bytes) {
-  var reader = new jspb.BinaryReader(bytes);
-  var msg = new proto.VideoCategoryMetadata;
-  return proto.VideoCategoryMetadata.deserializeBinaryFromReader(msg, reader);
-};
-
-
-/**
- * Deserializes binary data (in protobuf wire format) from the
- * given reader into the given message object.
- * @param {!proto.VideoCategoryMetadata} msg The message object to deserialize into.
- * @param {!jspb.BinaryReader} reader The BinaryReader to use.
- * @return {!proto.VideoCategoryMetadata}
- */
-proto.VideoCategoryMetadata.deserializeBinaryFromReader = function(msg, reader) {
-  while (reader.nextField()) {
-    if (reader.isEndGroup()) {
-      break;
-    }
-    var field = reader.getFieldNumber();
-    switch (field) {
-    case 1:
-      var value = /** @type {string} */ (reader.readString());
-      msg.setName(value);
-      break;
-    default:
-      reader.skipField();
-      break;
-    }
-  }
-  return msg;
-};
-
-
-/**
- * Serializes the message to binary data (in protobuf wire format).
- * @return {!Uint8Array}
- */
-proto.VideoCategoryMetadata.prototype.serializeBinary = function() {
-  var writer = new jspb.BinaryWriter();
-  proto.VideoCategoryMetadata.serializeBinaryToWriter(this, writer);
-  return writer.getResultBuffer();
-};
-
-
-/**
- * Serializes the given message to binary data (in protobuf wire
- * format), writing to the given BinaryWriter.
- * @param {!proto.VideoCategoryMetadata} message
- * @param {!jspb.BinaryWriter} writer
- * @suppress {unusedLocalVariables} f is only used for nested messages
- */
-proto.VideoCategoryMetadata.serializeBinaryToWriter = function(message, writer) {
-  var f = undefined;
-  f = /** @type {string} */ (jspb.Message.getField(message, 1));
-  if (f != null) {
-    writer.writeString(
-      1,
-      f
-    );
-  }
-};
-
-
-/**
- * optional string name = 1;
- * @return {string}
- */
-proto.VideoCategoryMetadata.prototype.getName = function() {
-  return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 1, ""));
-};
-
-
-/**
- * @param {string} value
- * @return {!proto.VideoCategoryMetadata} returns this
- */
-proto.VideoCategoryMetadata.prototype.setName = function(value) {
-  return jspb.Message.setField(this, 1, value);
-};
-
-
-/**
- * Clears the field making it undefined.
- * @return {!proto.VideoCategoryMetadata} returns this
- */
-proto.VideoCategoryMetadata.prototype.clearName = function() {
-  return jspb.Message.setField(this, 1, undefined);
-};
-
-
-/**
- * Returns whether this field is set.
- * @return {boolean}
- */
-proto.VideoCategoryMetadata.prototype.hasName = function() {
-  return jspb.Message.getField(this, 1) != null;
-};
-
-
-goog.object.extend(exports, proto);

+ 0 - 374
content-metadata-protobuf/doc/index.md

@@ -1,374 +0,0 @@
-# Protocol Documentation
-<a name="top"></a>
-
-## Table of Contents
-
-- [proto/Channel.proto](#proto/Channel.proto)
-    - [ChannelCategoryMetadata](#.ChannelCategoryMetadata)
-    - [ChannelMetadata](#.ChannelMetadata)
-  
-- [proto/Person.proto](#proto/Person.proto)
-    - [PersonMetadata](#.PersonMetadata)
-  
-- [proto/Playlist.proto](#proto/Playlist.proto)
-    - [PlaylistMetadata](#.PlaylistMetadata)
-  
-- [proto/Series.proto](#proto/Series.proto)
-    - [SeasonMetadata](#.SeasonMetadata)
-    - [SeriesMetadata](#.SeriesMetadata)
-  
-- [proto/Video.proto](#proto/Video.proto)
-    - [License](#.License)
-    - [MediaType](#.MediaType)
-    - [PublishedBeforeJoystream](#.PublishedBeforeJoystream)
-    - [VideoCategoryMetadata](#.VideoCategoryMetadata)
-    - [VideoMetadata](#.VideoMetadata)
-  
-- [Scalar Value Types](#scalar-value-types)
-
-
-
-<a name="proto/Channel.proto"></a>
-<p align="right"><a href="#top">Top</a></p>
-
-## proto/Channel.proto
-
-
-
-<a name=".ChannelCategoryMetadata"></a>
-
-### ChannelCategoryMetadata
-
-
-
-| Field | Type | Label | Description |
-| ----- | ---- | ----- | ----------- |
-| name | [string](#string) | optional | Category Name |
-
-
-
-
-
-
-<a name=".ChannelMetadata"></a>
-
-### ChannelMetadata
-
-
-
-| Field | Type | Label | Description |
-| ----- | ---- | ----- | ----------- |
-| title | [string](#string) | optional | Channel Title |
-| description | [string](#string) | optional | Channel Description |
-| is_public | [bool](#bool) | optional | Wether to display channel to the public |
-| language | [string](#string) | optional | ISO_639-1 Language [Code](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) |
-| cover_photo | [uint32](#uint32) | optional | index into external [assets array](#.Assets) |
-| avatar_photo | [uint32](#uint32) | optional | index into external [assets array](#.Assets) |
-| category | [uint64](#uint64) | optional | Channel Category Id |
-
-
-
-
-
- 
-
- 
-
- 
-
- 
-
-
-
-<a name="proto/Person.proto"></a>
-<p align="right"><a href="#top">Top</a></p>
-
-## proto/Person.proto
-
-
-
-<a name=".PersonMetadata"></a>
-
-### PersonMetadata
-
-
-
-| Field | Type | Label | Description |
-| ----- | ---- | ----- | ----------- |
-| first_name | [string](#string) | optional |  |
-| middle_name | [string](#string) | optional |  |
-| last_name | [string](#string) | optional |  |
-| about | [string](#string) | optional |  |
-| cover_photo | [uint32](#uint32) | optional | index into external [assets array](#.Assets) |
-| avatar_photo | [uint32](#uint32) | optional | index into external [assets array](#.Assets) |
-
-
-
-
-
- 
-
- 
-
- 
-
- 
-
-
-
-<a name="proto/Playlist.proto"></a>
-<p align="right"><a href="#top">Top</a></p>
-
-## proto/Playlist.proto
-
-
-
-<a name=".PlaylistMetadata"></a>
-
-### PlaylistMetadata
-
-
-
-| Field | Type | Label | Description |
-| ----- | ---- | ----- | ----------- |
-| title | [string](#string) | optional |  |
-| videos | [uint64](#uint64) | repeated | Videos in the playlist |
-
-
-
-
-
- 
-
- 
-
- 
-
- 
-
-
-
-<a name="proto/Series.proto"></a>
-<p align="right"><a href="#top">Top</a></p>
-
-## proto/Series.proto
-
-
-
-<a name=".SeasonMetadata"></a>
-
-### SeasonMetadata
-
-
-
-| Field | Type | Label | Description |
-| ----- | ---- | ----- | ----------- |
-| title | [string](#string) | optional |  |
-| description | [string](#string) | optional |  |
-| cover_photo | [uint32](#uint32) | optional | index into external [assets array](#.Assets) |
-| persons | [uint64](#uint64) | repeated | Person(s) referenced by PersonId involved in this Season |
-
-
-
-
-
-
-<a name=".SeriesMetadata"></a>
-
-### SeriesMetadata
-
-
-
-| Field | Type | Label | Description |
-| ----- | ---- | ----- | ----------- |
-| title | [string](#string) | optional |  |
-| description | [string](#string) | optional |  |
-| cover_photo | [uint32](#uint32) | optional | index into external [assets array](#.Assets) |
-| persons | [uint64](#uint64) | repeated | Person(s) referenced by PersonId involved in this Series |
-
-
-
-
-
- 
-
- 
-
- 
-
- 
-
-
-
-<a name="proto/Video.proto"></a>
-<p align="right"><a href="#top">Top</a></p>
-
-## proto/Video.proto
-
-
-
-<a name=".License"></a>
-
-### License
-License types defined by Joystream
-
-
-| Field | Type | Label | Description |
-| ----- | ---- | ----- | ----------- |
-| code | [uint32](#uint32) | optional | License code defined by Joystream. [reference](../src/KnownLicenses.json) |
-| attribution | [string](#string) | optional | Text for licenses that require an attribution |
-| custom_text | [string](#string) | optional | Text for custom license type |
-
-
-
-
-
-
-<a name=".MediaType"></a>
-
-### MediaType
-Codec, Container, MIME media-type information
-
-
-| Field | Type | Label | Description |
-| ----- | ---- | ----- | ----------- |
-| codec_name | [string](#string) | optional | Codec corresponding to `name` field from [FFmpeg](https://github.com/FFmpeg/FFmpeg/blob/master/libavcodec/codec_desc.c) |
-| container | [string](#string) | optional | Video container format, eg. &#39;MP4&#39;, &#39;WebM&#39;, &#39;Ogg&#39; [ref](https://developer.mozilla.org/en-US/docs/Web/Media/Formats/Video_codecs) |
-| mime_media_type | [string](#string) | optional | MIME Media Type, eg. &#39;video/mp4&#39; [ref](https://www.iana.org/assignments/media-types/media-types.xhtml#video) |
-
-
-
-
-
-
-<a name=".PublishedBeforeJoystream"></a>
-
-### PublishedBeforeJoystream
-Publication status before joystream
-
-
-| Field | Type | Label | Description |
-| ----- | ---- | ----- | ----------- |
-| is_published | [bool](#bool) | optional | Was video published before joystream platform |
-| date | [string](#string) | optional | Date of publication: &#39;YYYY-MM-DD&#39; [ISO-8601](https://www.iso.org/iso-8601-date-and-time-format.html) |
-
-
-
-
-
-
-<a name=".VideoCategoryMetadata"></a>
-
-### VideoCategoryMetadata
-
-
-
-| Field | Type | Label | Description |
-| ----- | ---- | ----- | ----------- |
-| name | [string](#string) | optional | Category name |
-
-
-
-
-
-
-<a name=".VideoMetadata"></a>
-
-### VideoMetadata
-
-
-
-| Field | Type | Label | Description |
-| ----- | ---- | ----- | ----------- |
-| title | [string](#string) | optional | Video Title |
-| description | [string](#string) | optional | Video Description |
-| video | [uint32](#uint32) | optional | index into external [assets array](#.Assets) |
-| thumbnail_photo | [uint32](#uint32) | optional | index into external [assets array](#.Assets) |
-| duration | [uint32](#uint32) | optional | Lengths of video in seconds |
-| media_pixel_height | [uint32](#uint32) | optional | Resolution of the video (Height) |
-| media_pixel_width | [uint32](#uint32) | optional | Resolution of the video (Width) |
-| media_type | [MediaType](#MediaType) | optional | Encoding and Container format used |
-| language | [string](#string) | optional | ISO_639-1 Language [Code](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) |
-| license | [License](#License) | optional | License type for the media |
-| published_before_joystream | [PublishedBeforeJoystream](#PublishedBeforeJoystream) | optional | Date of publication |
-| has_marketing | [bool](#bool) | optional | Does video have marketing or advertising in the stream |
-| is_public | [bool](#bool) | optional | Should video be publicy visible yet |
-| is_explicit | [bool](#bool) | optional | Does Video have explicit language or scenes |
-| persons | [uint64](#uint64) | repeated | Person(s) referenced by PersonId involved in this video |
-| category | [uint64](#uint64) | optional | Video Category Id |
-
-
-
-
-
- 
-
- 
-
- 
-
- 
-
-
-
-## Scalar Value Types
-
-| .proto Type | Notes | C++ | Java | Python | Go | C# | PHP | Ruby |
-| ----------- | ----- | --- | ---- | ------ | -- | -- | --- | ---- |
-| <a name="double" /> double |  | double | double | float | float64 | double | float | Float |
-| <a name="float" /> float |  | float | float | float | float32 | float | float | Float |
-| <a name="int32" /> int32 | Uses variable-length encoding. Inefficient for encoding negative numbers – if your field is likely to have negative values, use sint32 instead. | int32 | int | int | int32 | int | integer | Bignum or Fixnum (as required) |
-| <a name="int64" /> int64 | Uses variable-length encoding. Inefficient for encoding negative numbers – if your field is likely to have negative values, use sint64 instead. | int64 | long | int/long | int64 | long | integer/string | Bignum |
-| <a name="uint32" /> uint32 | Uses variable-length encoding. | uint32 | int | int/long | uint32 | uint | integer | Bignum or Fixnum (as required) |
-| <a name="uint64" /> uint64 | Uses variable-length encoding. | uint64 | long | int/long | uint64 | ulong | integer/string | Bignum or Fixnum (as required) |
-| <a name="sint32" /> sint32 | Uses variable-length encoding. Signed int value. These more efficiently encode negative numbers than regular int32s. | int32 | int | int | int32 | int | integer | Bignum or Fixnum (as required) |
-| <a name="sint64" /> sint64 | Uses variable-length encoding. Signed int value. These more efficiently encode negative numbers than regular int64s. | int64 | long | int/long | int64 | long | integer/string | Bignum |
-| <a name="fixed32" /> fixed32 | Always four bytes. More efficient than uint32 if values are often greater than 2^28. | uint32 | int | int | uint32 | uint | integer | Bignum or Fixnum (as required) |
-| <a name="fixed64" /> fixed64 | Always eight bytes. More efficient than uint64 if values are often greater than 2^56. | uint64 | long | int/long | uint64 | ulong | integer/string | Bignum |
-| <a name="sfixed32" /> sfixed32 | Always four bytes. | int32 | int | int | int32 | int | integer | Bignum or Fixnum (as required) |
-| <a name="sfixed64" /> sfixed64 | Always eight bytes. | int64 | long | int/long | int64 | long | integer/string | Bignum |
-| <a name="bool" /> bool |  | bool | boolean | boolean | bool | bool | boolean | TrueClass/FalseClass |
-| <a name="string" /> string | A string must always contain UTF-8 encoded or 7-bit ASCII text. | string | String | str/unicode | string | string | string | String (UTF-8) |
-| <a name="bytes" /> bytes | May contain any arbitrary sequence of bytes. | string | ByteString | str | []byte | ByteString | string | String (ASCII-8BIT) |
-
-<!-- 
-    This extra documentation will be appended to the generated docs.
--->
-
-## Referencing Assets
-<a name=".Assets"></a>
-
-Applications that process messages that contain a `uint32` field that references an asset such as a cover photo or video, should interpret this value as a zero based index into an array/vector that is received external (out of band) to the protobuf message.
-
-Example in context of query-node processing the runtime event `VideoCreated`
-
-```rust
-// Runtime event associated with creating a Video
-VideoCreated(video_id: VideoId, video: Video, assets: Vec<NewAsset>, params: VideoCreationParameters)
-
-struct VideoCreationParameters {
-  in_category: VideoCategoryId,
-  // binary serialized VideoMetadata protobuf message
-  meta: Vec<u8>,
-}
-
-// suppose assets is a vector of two elements. This is the "out of band" array being referenced by the VideoMetadata message
-assets = [
-    NewAsset::Uri("https://mydomain.net/thumbnail.png"),
-    NewAsset::Upload({
-       content_id,
-       ipfs_hash,
-       size,
-       ...
-    }),
-];
-
-meta = VideoMetadata {
-    ...
-    // refers to second element: assets[1] which is being uploaded to the storage system
-    video: 1,
-    // refers to the first element assets[0] which is being referneced by a url string.
-    thumbnail_photo: 0,
-    ...
-};
-```

+ 0 - 13
content-metadata-protobuf/generate-md-doc.sh

@@ -1,13 +0,0 @@
-#!/usr/bin/env bash
-
-# Directory to write generated documentation to
-OUT_DIR_DOC="./doc"
-mkdir -p ${OUT_DIR_DOC}
-
-# Gernerate Markdown docs
-protoc \
-    --doc_out="${OUT_DIR_DOC}" --doc_opt=markdown,index.md \
-    proto/*.proto
-
-# Append some custom docs to generated protocol docs
-cat doc-appendix.md >> ${OUT_DIR_DOC}/index.md

+ 0 - 47
content-metadata-protobuf/package.json

@@ -1,47 +0,0 @@
-{
-  "name": "@joystream/content-metadata-protobuf",
-  "version": "1.1.0",
-  "description": "Joystream Content Metadata Protobuf Library ",
-  "main": "lib/index.js",
-  "types": "lib/index.d.ts",
-  "repository": "https://github.com/joystream/joystream",
-  "author": "Joystream Contributors",
-  "license": "MIT",
-  "private": false,
-  "scripts": {
-    "build": "./compile.sh && yarn build:ts",
-    "build:ts": "tsc",
-    "compile": "./compile.sh",
-    "generate-doc": "./generate-md-doc.sh",
-    "test": "env TS_NODE_COMPILER_OPTIONS='{\"module\": \"commonjs\" }' mocha -r ts-node/register 'test/**/*.ts'",
-    "lint": "eslint ./src --ext .ts",
-    "checks": "tsc --noEmit --pretty && prettier ./ --check && yarn lint",
-    "format": "prettier ./ --write",
-    "prepublish": "yarn build"
-  },
-  "files": [
-    "lib/**/*",
-    "doc/**",
-    "proto/**",
-    "compiled/**/*",
-    "README.md"
-  ],
-  "dependencies": {
-    "google-protobuf": "^3.14.0"
-  },
-  "devDependencies": {
-    "@types/chai": "^4.2.11",
-    "@types/mocha": "^8.2.0",
-    "chai": "^4.2.0",
-    "eslint": "^7.6.0",
-    "mocha": "^8.2.1",
-    "prettier": "2.0.2",
-    "ts-node": "^8.8.1",
-    "ts-protoc-gen": "^0.14.0",
-    "typescript": "^4.1.3"
-  },
-  "publishConfig": {
-    "access": "public",
-    "registry": "https://registry.npmjs.org"
-  }
-}

+ 0 - 10
content-metadata-protobuf/src/index.ts

@@ -1,10 +0,0 @@
-// Some helpers for constructing known licenses
-import licenses from './licenses'
-export { licenses }
-
-// protobuf message constructors
-export * from '../compiled/proto/Video_pb'
-export * from '../compiled/proto/Channel_pb'
-export * from '../compiled/proto/Person_pb'
-export * from '../compiled/proto/Playlist_pb'
-export * from '../compiled/proto/Series_pb'

+ 0 - 33
content-metadata-protobuf/test/channel.ts

@@ -1,33 +0,0 @@
-import { ChannelMetadata } from '../src'
-import { assert } from 'chai'
-
-describe('Channel Metadata', () => {
-  it('Message', () => {
-    const channel = new ChannelMetadata()
-
-    const title = 'title'
-    const description = 'description'
-    const isPublic = false
-    const language = 'fr'
-
-    channel.setTitle(title)
-    channel.setDescription(description)
-    channel.setIsPublic(isPublic)
-    channel.setLanguage(language)
-    channel.setAvatarPhoto(0)
-    channel.setCoverPhoto(1)
-    channel.setCategory(100)
-
-    assert.deepEqual(channel.toObject(), {
-      title,
-      description,
-      isPublic,
-      language,
-      avatarPhoto: 0,
-      coverPhoto: 1,
-      category: 100,
-    })
-
-    assert.deepEqual(ChannelMetadata.deserializeBinary(channel.serializeBinary()), channel)
-  })
-})

+ 0 - 115
content-metadata-protobuf/test/video.ts

@@ -1,115 +0,0 @@
-import { VideoMetadata, PublishedBeforeJoystream, MediaType, License } from '../src'
-import { assert, expect } from 'chai'
-
-describe('Video Metadata', () => {
-  it('Message', () => {
-    const meta = new VideoMetadata()
-
-    const title = 'Video Title'
-    const description = 'Video Description'
-    const duration = 100
-
-    meta.setTitle(title)
-    meta.setDescription(description)
-    meta.setDuration(duration)
-    meta.setMediaPixelHeight(1)
-    meta.setMediaPixelWidth(2)
-    meta.setMediaType(new MediaType())
-    meta.setLanguage('en')
-    meta.setLicense(new License())
-    meta.setPublishedBeforeJoystream(new PublishedBeforeJoystream())
-    meta.setHasMarketing(true)
-    meta.setIsPublic(true)
-    meta.setIsExplicit(false)
-    meta.setVideo(0)
-    meta.setThumbnailPhoto(1)
-    meta.setCategory(101)
-
-    assert.deepEqual(meta.toObject(), {
-      title,
-      description,
-      duration,
-      mediaPixelHeight: 1,
-      mediaPixelWidth: 2,
-      mediaType: {
-        codecName: undefined,
-        container: undefined,
-        mimeMediaType: undefined,
-      },
-      language: 'en',
-      license: {
-        code: undefined,
-        attribution: undefined,
-        customText: undefined,
-      },
-      publishedBeforeJoystream: { isPublished: undefined, date: undefined },
-      hasMarketing: true,
-      isPublic: true,
-      isExplicit: false,
-      thumbnailPhoto: 1,
-      video: 0,
-      personsList: [],
-      category: 101,
-    })
-
-    // sanity check - encoding / decoding works
-    assert.deepEqual(VideoMetadata.deserializeBinary(meta.serializeBinary()), meta)
-  })
-
-  it('Message: PublishedBeforeJoystream', () => {
-    const meta = new VideoMetadata()
-
-    expect(meta.hasPublishedBeforeJoystream()).equals(false, 'PublishedBeforeJoystream field should NOT be set')
-
-    const published = new PublishedBeforeJoystream()
-    const isPublished = true
-    const date = '1950-12-24'
-    published.setIsPublished(isPublished)
-    published.setDate(date)
-
-    meta.setPublishedBeforeJoystream(published)
-
-    // Field should now be set
-    expect(meta.hasPublishedBeforeJoystream()).equals(true, 'PublishedBeforeJoystream field should be set')
-
-    assert.deepEqual(published.toObject(), {
-      isPublished,
-      date,
-    })
-  })
-
-  it('Message: License', () => {
-    const license = new License()
-
-    const code = 1000
-    const attribution = 'Attribution Text'
-    const customText = 'Custom License Details'
-    license.setCode(code)
-    license.setAttribution(attribution)
-    license.setCustomText(customText)
-
-    assert.deepEqual(license.toObject(), {
-      code,
-      attribution,
-      customText,
-    })
-  })
-
-  it('Message: MediaType', () => {
-    const mediaType = new MediaType()
-
-    const codecName = 'mpeg4'
-    const container = 'avi'
-    const mimeMediaType = 'videp/mp4'
-
-    mediaType.setCodecName(codecName)
-    mediaType.setContainer(container)
-    mediaType.setMimeMediaType(mimeMediaType)
-
-    assert.deepEqual(mediaType.toObject(), {
-      codecName,
-      container,
-      mimeMediaType,
-    })
-  })
-})

+ 0 - 15
content-metadata-protobuf/tsconfig.json

@@ -1,15 +0,0 @@
-{
-  "compilerOptions": {
-    "target": "esnext",
-    "module": "commonjs",
-    "outDir": "lib",
-    "strict": true,
-    "declaration": true,
-    "esModuleInterop": true,
-    "forceConsistentCasingInFileNames": true,
-    "skipLibCheck": true,
-    "resolveJsonModule": true,
-  },
-  "include": ["src"],
-  "exclude": ["node_modules", "test"]
-}

+ 2 - 1
devops/vscode/settings.json

@@ -5,6 +5,7 @@
     "./tests/network-tests",
     "./types",
     "./storage-node",
-    "./atlas"
+    "./atlas",
+    "./distributor-node"
   ]
 }

+ 26 - 0
distributor-node.Dockerfile

@@ -0,0 +1,26 @@
+FROM --platform=linux/x86-64 node:14 as builder
+
+WORKDIR /joystream
+COPY ./types types
+COPY ./metadata-protobuf metadata-protobuf
+COPY ./distributor-node distributor-node
+COPY ./yarn.lock yarn.lock
+COPY ./package.json package.json
+
+EXPOSE 3334
+
+# Build & cleanup
+# (must be inside a single "RUN", see: https://stackoverflow.com/questions/40212836/docker-image-larger-than-its-filesystem)
+RUN \
+  yarn --frozen-lockfile &&\
+  yarn workspace @joystream/types build &&\
+  yarn workspace @joystream/metadata-protobuf build &&\
+  yarn workspace @joystream/distributor-cli build &&\
+  find . -name "node_modules" -type d -prune &&\
+  yarn --frozen-lockfile --production &&\
+  yarn cache clean
+
+ENV CONFIG_PATH ./distributor-node/config/docker/config.docker.yml
+
+ENTRYPOINT ["yarn", "joystream-distributor"]
+CMD ["start"]

+ 1 - 0
distributor-node/.eslintignore

@@ -0,0 +1 @@
+src/types/generated

+ 9 - 0
distributor-node/.gitignore

@@ -0,0 +1,9 @@
+*-debug.log
+*-error.log
+/.nyc_output
+/dist
+/lib
+/package-lock.json
+/tmp
+node_modules
+/local

+ 3 - 0
distributor-node/.prettierignore

@@ -0,0 +1,3 @@
+/**/generated
+/**/mock.graphql
+lib

+ 419 - 0
distributor-node/README.md

@@ -0,0 +1,419 @@
+@joystream/distributor-cli
+==========================
+
+Joystream distributor node CLI
+
+[![oclif](https://img.shields.io/badge/cli-oclif-brightgreen.svg)](https://oclif.io)
+[![Version](https://img.shields.io/npm/v/@joystream/distributor-cli.svg)](https://npmjs.org/package/@joystream/distributor-cli)
+[![Downloads/week](https://img.shields.io/npm/dw/@joystream/distributor-cli.svg)](https://npmjs.org/package/@joystream/distributor-cli)
+[![License](https://img.shields.io/npm/l/@joystream/distributor-cli.svg)](https://github.com/Joystream/joystream/blob/master/package.json)
+
+<!-- toc -->
+* [Usage](#usage)
+* [Commands](#commands)
+<!-- tocstop -->
+# Usage
+<!-- usage -->
+```sh-session
+$ npm install -g @joystream/distributor-cli
+$ joystream-distributor COMMAND
+running command...
+$ joystream-distributor (-v|--version|version)
+@joystream/distributor-cli/0.1.0 linux-x64 node-v14.17.3
+$ joystream-distributor --help [COMMAND]
+USAGE
+  $ joystream-distributor COMMAND
+...
+```
+<!-- usagestop -->
+# Commands
+<!-- commands -->
+* [`joystream-distributor dev:init`](#joystream-distributor-devinit)
+* [`joystream-distributor help [COMMAND]`](#joystream-distributor-help-command)
+* [`joystream-distributor leader:cancel-invitation`](#joystream-distributor-leadercancel-invitation)
+* [`joystream-distributor leader:create-bucket`](#joystream-distributor-leadercreate-bucket)
+* [`joystream-distributor leader:create-bucket-family`](#joystream-distributor-leadercreate-bucket-family)
+* [`joystream-distributor leader:delete-bucket`](#joystream-distributor-leaderdelete-bucket)
+* [`joystream-distributor leader:delete-bucket-family`](#joystream-distributor-leaderdelete-bucket-family)
+* [`joystream-distributor leader:invite-bucket-operator`](#joystream-distributor-leaderinvite-bucket-operator)
+* [`joystream-distributor leader:set-buckets-per-bag-limit`](#joystream-distributor-leaderset-buckets-per-bag-limit)
+* [`joystream-distributor leader:update-bag`](#joystream-distributor-leaderupdate-bag)
+* [`joystream-distributor leader:update-bucket-mode`](#joystream-distributor-leaderupdate-bucket-mode)
+* [`joystream-distributor leader:update-bucket-status`](#joystream-distributor-leaderupdate-bucket-status)
+* [`joystream-distributor leader:update-dynamic-bag-policy`](#joystream-distributor-leaderupdate-dynamic-bag-policy)
+* [`joystream-distributor operator:accept-invitation`](#joystream-distributor-operatoraccept-invitation)
+* [`joystream-distributor operator:set-metadata`](#joystream-distributor-operatorset-metadata)
+* [`joystream-distributor start`](#joystream-distributor-start)
+
+## `joystream-distributor dev:init`
+
+Initialize development environment. Sets Alice as distributor working group leader.
+
+```
+USAGE
+  $ joystream-distributor dev:init
+
+OPTIONS
+  -c, --configPath=configPath  [default: ./config.yml] Path to config JSON/YAML file (relative to current working
+                               directory)
+
+  -y, --yes                    Answer "yes" to any prompt, skipping any manual confirmations
+```
+
+_See code: [src/commands/dev/init.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/dev/init.ts)_
+
+## `joystream-distributor help [COMMAND]`
+
+display help for joystream-distributor
+
+```
+USAGE
+  $ joystream-distributor help [COMMAND]
+
+ARGUMENTS
+  COMMAND  command to show help for
+
+OPTIONS
+  --all  see all commands in CLI
+```
+
+_See code: [@oclif/plugin-help](https://github.com/oclif/plugin-help/blob/v2.2.3/src/commands/help.ts)_
+
+## `joystream-distributor leader:cancel-invitation`
+
+Cancel pending distribution bucket operator invitation.
+
+```
+USAGE
+  $ joystream-distributor leader:cancel-invitation
+
+OPTIONS
+  -B, --bucketId=bucketId      (required) Distribution bucket id
+
+  -c, --configPath=configPath  [default: ./config.yml] Path to config JSON/YAML file (relative to current working
+                               directory)
+
+  -f, --familyId=familyId      (required) Distribution bucket family id
+
+  -w, --workerId=workerId      (required) ID of the invited operator (distribution group worker)
+
+  -y, --yes                    Answer "yes" to any prompt, skipping any manual confirmations
+
+DESCRIPTION
+  Requires distribution working group leader permissions.
+```
+
+_See code: [src/commands/leader/cancel-invitation.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/cancel-invitation.ts)_
+
+## `joystream-distributor leader:create-bucket`
+
+Create new distribution bucket. Requires distribution working group leader permissions.
+
+```
+USAGE
+  $ joystream-distributor leader:create-bucket
+
+OPTIONS
+  -a, --acceptingBags=(yes|no)  [default: no] Whether the created bucket should accept new bags
+
+  -c, --configPath=configPath   [default: ./config.yml] Path to config JSON/YAML file (relative to current working
+                                directory)
+
+  -f, --familyId=familyId       (required) Distribution bucket family id
+
+  -y, --yes                     Answer "yes" to any prompt, skipping any manual confirmations
+```
+
+_See code: [src/commands/leader/create-bucket.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/create-bucket.ts)_
+
+## `joystream-distributor leader:create-bucket-family`
+
+Create new distribution bucket family. Requires distribution working group leader permissions.
+
+```
+USAGE
+  $ joystream-distributor leader:create-bucket-family
+
+OPTIONS
+  -c, --configPath=configPath  [default: ./config.yml] Path to config JSON/YAML file (relative to current working
+                               directory)
+
+  -y, --yes                    Answer "yes" to any prompt, skipping any manual confirmations
+```
+
+_See code: [src/commands/leader/create-bucket-family.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/create-bucket-family.ts)_
+
+## `joystream-distributor leader:delete-bucket`
+
+Delete distribution bucket. The bucket must have no operators. Requires distribution working group leader permissions.
+
+```
+USAGE
+  $ joystream-distributor leader:delete-bucket
+
+OPTIONS
+  -B, --bucketId=bucketId      (required) Distribution bucket id
+
+  -c, --configPath=configPath  [default: ./config.yml] Path to config JSON/YAML file (relative to current working
+                               directory)
+
+  -f, --familyId=familyId      (required) Distribution bucket family id
+
+  -y, --yes                    Answer "yes" to any prompt, skipping any manual confirmations
+```
+
+_See code: [src/commands/leader/delete-bucket.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/delete-bucket.ts)_
+
+## `joystream-distributor leader:delete-bucket-family`
+
+Delete distribution bucket family. Requires distribution working group leader permissions.
+
+```
+USAGE
+  $ joystream-distributor leader:delete-bucket-family
+
+OPTIONS
+  -c, --configPath=configPath  [default: ./config.yml] Path to config JSON/YAML file (relative to current working
+                               directory)
+
+  -f, --familyId=familyId      (required) Distribution bucket family id
+
+  -y, --yes                    Answer "yes" to any prompt, skipping any manual confirmations
+```
+
+_See code: [src/commands/leader/delete-bucket-family.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/delete-bucket-family.ts)_
+
+## `joystream-distributor leader:invite-bucket-operator`
+
+Invite distribution bucket operator (distribution group worker).
+
+```
+USAGE
+  $ joystream-distributor leader:invite-bucket-operator
+
+OPTIONS
+  -B, --bucketId=bucketId      (required) Distribution bucket id
+
+  -c, --configPath=configPath  [default: ./config.yml] Path to config JSON/YAML file (relative to current working
+                               directory)
+
+  -f, --familyId=familyId      (required) Distribution bucket family id
+
+  -w, --workerId=workerId      (required) ID of the distribution group worker to invite as bucket operator
+
+  -y, --yes                    Answer "yes" to any prompt, skipping any manual confirmations
+
+DESCRIPTION
+  The specified bucket must not have any operator currently.
+     Requires distribution working group leader permissions.
+```
+
+_See code: [src/commands/leader/invite-bucket-operator.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/invite-bucket-operator.ts)_
+
+## `joystream-distributor leader:set-buckets-per-bag-limit`
+
+Set max. distribution buckets per bag limit. Requires distribution working group leader permissions.
+
+```
+USAGE
+  $ joystream-distributor leader:set-buckets-per-bag-limit
+
+OPTIONS
+  -c, --configPath=configPath  [default: ./config.yml] Path to config JSON/YAML file (relative to current working
+                               directory)
+
+  -l, --limit=limit            (required) New limit value
+
+  -y, --yes                    Answer "yes" to any prompt, skipping any manual confirmations
+```
+
+_See code: [src/commands/leader/set-buckets-per-bag-limit.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/set-buckets-per-bag-limit.ts)_
+
+## `joystream-distributor leader:update-bag`
+
+Add/remove distribution buckets from a bag.
+
+```
+USAGE
+  $ joystream-distributor leader:update-bag
+
+OPTIONS
+  -a, --add=add
+      [default: ] ID of a bucket to add to bag
+
+  -b, --bagId=bagId
+      (required) Bag ID. Format: {bag_type}:{sub_type}:{id}.
+           - Bag types: 'static', 'dynamic'
+           - Sub types: 'static:council', 'static:wg', 'dynamic:member', 'dynamic:channel'
+           - Id:
+             - absent for 'static:council'
+             - working group name for 'static:wg'
+             - integer for 'dynamic:member' and 'dynamic:channel'
+           Examples:
+           - static:council
+           - static:wg:storage
+           - dynamic:member:4
+
+  -c, --configPath=configPath
+      [default: ./config.yml] Path to config JSON/YAML file (relative to current working directory)
+
+  -f, --familyId=familyId
+      (required) ID of the distribution bucket family
+
+  -r, --remove=remove
+      [default: ] ID of a bucket to remove from bag
+
+  -y, --yes
+      Answer "yes" to any prompt, skipping any manual confirmations
+
+EXAMPLE
+  $ joystream-distributor leader:update-bag -b 1 -f 1 -a 1 -a 2 -a 3 -r 4 -r 5
+```
+
+_See code: [src/commands/leader/update-bag.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/update-bag.ts)_
+
+## `joystream-distributor leader:update-bucket-mode`
+
+Update distribution bucket mode ("distributing" flag). Requires distribution working group leader permissions.
+
+```
+USAGE
+  $ joystream-distributor leader:update-bucket-mode
+
+OPTIONS
+  -B, --bucketId=bucketId      (required) Distribution bucket id
+
+  -c, --configPath=configPath  [default: ./config.yml] Path to config JSON/YAML file (relative to current working
+                               directory)
+
+  -d, --mode=(on|off)          (required) Whether the bucket should be "on" (distributing) or "off" (not distributing)
+
+  -f, --familyId=familyId      (required) Distribution bucket family id
+
+  -y, --yes                    Answer "yes" to any prompt, skipping any manual confirmations
+```
+
+_See code: [src/commands/leader/update-bucket-mode.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/update-bucket-mode.ts)_
+
+## `joystream-distributor leader:update-bucket-status`
+
+Update distribution bucket status ("acceptingNewBags" flag). Requires distribution working group leader permissions.
+
+```
+USAGE
+  $ joystream-distributor leader:update-bucket-status
+
+OPTIONS
+  -B, --bucketId=bucketId       (required) Distribution bucket id
+  -a, --acceptingBags=(yes|no)  (required) Whether the bucket should accept new bags
+
+  -c, --configPath=configPath   [default: ./config.yml] Path to config JSON/YAML file (relative to current working
+                                directory)
+
+  -f, --familyId=familyId       (required) Distribution bucket family id
+
+  -y, --yes                     Answer "yes" to any prompt, skipping any manual confirmations
+```
+
+_See code: [src/commands/leader/update-bucket-status.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/update-bucket-status.ts)_
+
+## `joystream-distributor leader:update-dynamic-bag-policy`
+
+Update dynamic bag creation policy (number of buckets by family that should store given dynamic bag type).
+
+```
+USAGE
+  $ joystream-distributor leader:update-dynamic-bag-policy
+
+OPTIONS
+  -c, --configPath=configPath  [default: ./config.yml] Path to config JSON/YAML file (relative to current working
+                               directory)
+
+  -p, --policy=policy          Key-value pair of {familyId}:{numberOfBuckets}
+
+  -t, --type=(Member|Channel)  (required) Dynamic bag type
+
+  -y, --yes                    Answer "yes" to any prompt, skipping any manual confirmations
+
+DESCRIPTION
+  Requires distribution working group leader permissions.
+
+EXAMPLE
+  $ joystream-distributor leader:update-dynamic-bag-policy -t Member -p 1:5 -p 2:10 -p 3:5
+```
+
+_See code: [src/commands/leader/update-dynamic-bag-policy.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/update-dynamic-bag-policy.ts)_
+
+## `joystream-distributor operator:accept-invitation`
+
+Accept pending distribution bucket operator invitation.
+
+```
+USAGE
+  $ joystream-distributor operator:accept-invitation
+
+OPTIONS
+  -B, --bucketId=bucketId      (required) Distribution bucket id
+
+  -c, --configPath=configPath  [default: ./config.yml] Path to config JSON/YAML file (relative to current working
+                               directory)
+
+  -f, --familyId=familyId      (required) Distribution bucket family id
+
+  -w, --workerId=workerId      (required) ID of the invited operator (distribution group worker)
+
+  -y, --yes                    Answer "yes" to any prompt, skipping any manual confirmations
+
+DESCRIPTION
+  Requires the invited distribution group worker role key.
+```
+
+_See code: [src/commands/operator/accept-invitation.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/operator/accept-invitation.ts)_
+
+## `joystream-distributor operator:set-metadata`
+
+Set/update distribution bucket operator metadata.
+
+```
+USAGE
+  $ joystream-distributor operator:set-metadata
+
+OPTIONS
+  -B, --bucketId=bucketId      (required) Distribution bucket id
+
+  -c, --configPath=configPath  [default: ./config.yml] Path to config JSON/YAML file (relative to current working
+                               directory)
+
+  -e, --endpoint=endpoint      Root distribution node endpoint
+
+  -f, --familyId=familyId      (required) Distribution bucket family id
+
+  -i, --input=input            Path to JSON metadata file
+
+  -w, --workerId=workerId      (required) ID of the invited operator (distribution group worker)
+
+  -y, --yes                    Answer "yes" to any prompt, skipping any manual confirmations
+
+DESCRIPTION
+  Requires active distribution bucket operator worker role key.
+```
+
+_See code: [src/commands/operator/set-metadata.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/operator/set-metadata.ts)_
+
+## `joystream-distributor start`
+
+Start the node
+
+```
+USAGE
+  $ joystream-distributor start
+
+OPTIONS
+  -c, --configPath=configPath  [default: ./config.yml] Path to config JSON/YAML file (relative to current working
+                               directory)
+
+  -y, --yes                    Answer "yes" to any prompt, skipping any manual confirmations
+```
+
+_See code: [src/commands/start.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/start.ts)_
+<!-- commandsstop -->

+ 3 - 0
distributor-node/bin/run

@@ -0,0 +1,3 @@
+#!/usr/bin/env node
+
+require('@oclif/command').run().then(require('@oclif/command/flush')).catch(require('@oclif/errors/handle'))

+ 3 - 0
distributor-node/bin/run.cmd

@@ -0,0 +1,3 @@
+@echo off
+
+node "%~dp0\run" %*

+ 22 - 0
distributor-node/config.yml

@@ -0,0 +1,22 @@
+id: test-node
+endpoints:
+  queryNode: http://localhost:8081/graphql
+  substrateNode: ws://localhost:9944
+  # elasticSearch: http://localhost:9200
+directories:
+  data: ./local/data
+  cache: ./local/cache
+  logs: ./local/logs
+log:
+  file: debug
+  console: info
+  # elastic: info
+limits:
+  storage: 100G
+  maxConcurrentStorageNodeDownloads: 100
+  maxConcurrentOutboundConnections: 300
+  outboundRequestsTimeout: 5000
+port: 3334
+keys: [//Alice]
+buckets: 'all'
+workerId: 0

+ 21 - 0
distributor-node/config/docker/config.docker.yml

@@ -0,0 +1,21 @@
+id: distributor-node-docker
+endpoints:
+  queryNode: http://graphql-server-mnt:4002/graphql
+  substrateNode: ws://joystream-node:9944
+  # elasticSearch: http://elasticsearch:9200
+directories:
+  data: /data
+  cache: /cache
+  logs: /logs
+log:
+  console: info
+  # elastic: info
+limits:
+  storage: 100G
+  maxConcurrentStorageNodeDownloads: 100
+  maxConcurrentOutboundConnections: 300
+  outboundRequestsTimeout: 5000
+port: 3334
+keys: [//Alice]
+buckets: 'all'
+workerId: 0

+ 5 - 0
distributor-node/config/docker/filebeat.Dockerfile

@@ -0,0 +1,5 @@
+FROM docker.elastic.co/beats/filebeat:7.13.3
+COPY ./filebeat.docker.yml /usr/share/filebeat/filebeat.yml
+USER root
+RUN chown root:filebeat /usr/share/filebeat/filebeat.yml
+USER filebeat

+ 19 - 0
distributor-node/config/docker/filebeat.docker.yml

@@ -0,0 +1,19 @@
+setup.kibana:
+  host: 'kibana:5601'
+output.elasticsearch:
+  hosts: ['elasticsearch:9200']
+# Using log files:
+filebeat.inputs:
+  - type: log
+    enabled: true
+    paths: /logs/*.json
+    json.keys_under_root: true
+    json.overwrite_keys: true
+    json.add_error_key: true
+    json.expand_keys: true
+# Docker autodiscover alternative:
+# filebeat.autodiscover:
+#   providers:
+#     - type: docker
+#       hints.enabled: true
+#       hints.default_config.enabled: false

+ 76 - 0
distributor-node/docker-compose.yml

@@ -0,0 +1,76 @@
+version: '3.4'
+
+services:
+  distributor-node:
+    image: node:14
+    labels:
+      co.elastic.logs/enabled: true
+      co.elastic.logs/json.keys_under_root: true
+      co.elastic.logs/json.overwrite_keys: true
+      co.elastic.logs/json.add_error_key: true
+      co.elastic.logs/json.expand_keys: true
+    volumes:
+      - type: bind
+        source: ..
+        target: /joystream
+      - data:/data
+      - cache:/cache
+      - logs:/logs
+    networks:
+      - joystream
+    ports:
+      - 127.0.0.1:3334:3334
+    working_dir: /joystream/distributor-node
+    init: true
+    entrypoint: ["./bin/run"]
+    command: ["start", "./config/docker/distributor-dev.docker.yml"]
+  # Ref: https://www.elastic.co/guide/en/elasticsearch/reference/7.13/docker.html
+  elasticsearch:
+    image: docker.elastic.co/elasticsearch/elasticsearch:7.13.2
+    container_name: elasticsearch
+    environment:
+      - discovery.type=single-node
+      - bootstrap.memory_lock=true
+      - "ES_JAVA_OPTS=-Xms512m -Xmx512m"
+    ulimits:
+      memlock:
+        soft: -1
+        hard: -1
+    volumes:
+      - es-data:/usr/share/elasticsearch/data
+    ports:
+      - 127.0.0.1:9200:9200
+  # Ref: https://www.elastic.co/guide/en/kibana/7.13/docker.html
+  kibana:
+    image: docker.elastic.co/kibana/kibana:7.13.2
+    container_name: kibana
+    ports:
+      - 127.0.0.1:5601:5601
+    environment:
+      ELASTICSEARCH_HOSTS: http://elasticsearch:9200
+  # Ref: https://www.elastic.co/guide/en/beats/filebeat/current/running-on-docker.html
+  filebeat:
+    user: root
+    image: joystream/distributor-filebeat
+    build:
+      context: ./config/docker
+      dockerfile: ./filebeat.Dockerfile
+    volumes:
+      - /var/run/docker.sock:/var/run/docker.sock:ro
+      - logs:/logs
+
+volumes:
+  es-data:
+    driver: local
+  logs:
+    driver: local
+  cache:
+    driver: local
+  data:
+    driver: local
+
+# Join default joystream network (from root docker-compose)
+networks:
+  joystream:
+    external: true
+    name: joystream_default

+ 7 - 0
distributor-node/openapitools.json

@@ -0,0 +1,7 @@
+{
+  "$schema": "node_modules/@openapitools/openapi-generator-cli/config.schema.json",
+  "spaces": 2,
+  "generator-cli": {
+    "version": "5.2.0"
+  }
+}

+ 120 - 0
distributor-node/package.json

@@ -0,0 +1,120 @@
+{
+  "name": "@joystream/distributor-cli",
+  "description": "Joystream distributor node CLI",
+  "version": "0.1.0",
+  "author": "Joystream contributors",
+  "bin": {
+    "joystream-distributor": "./bin/run"
+  },
+  "bugs": "https://github.com/Joystream/joystream/issues",
+  "dependencies": {
+    "@joystream/types": "^0.17.0",
+    "@joystream/metadata-protobuf": "^1.0.0",
+    "@elastic/ecs-winston-format": "^1.1.0",
+    "@oclif/command": "^1",
+    "@oclif/config": "^1",
+    "@oclif/plugin-help": "^2",
+    "@apollo/client": "^3.2.5",
+    "graphql": "^14.7.0",
+    "winston": "^3.3.3",
+    "fast-safe-stringify": "^2.1.1",
+    "ajv": "^7",
+    "axios": "^0.21.1",
+    "cross-fetch": "^3.1.4",
+    "express": "^4.17.1",
+    "express-winston": "^4.1.0",
+    "express-openapi-validator": "^4.12.4",
+    "file-type": "^16.5.1",
+    "lodash": "^4.17.21",
+    "lru-cache": "^6.0.0",
+    "node-cleanup": "^2.1.2",
+    "proper-lockfile": "^4.1.2",
+    "read-chunk": "^3.2.0",
+    "send": "^0.17.1",
+    "tslib": "^1",
+    "yaml": "^1.10.2",
+    "queue": "^6.0.2",
+    "express-http-proxy": "^1.6.2",
+    "winston-elasticsearch": "^0.15.8",
+    "node-cache": "^5.1.2",
+    "cors": "^2.8.5",
+    "inquirer": "^8.1.2",
+    "multihashes": "^4.0.3",
+    "blake3": "^2.1.4",
+    "js-image-generator": "^1.0.3"
+  },
+  "devDependencies": {
+    "@graphql-codegen/cli": "^1.21.4",
+    "@graphql-codegen/import-types-preset": "^1.18.1",
+    "@graphql-codegen/typescript": "^1.22.0",
+    "@graphql-codegen/typescript-document-nodes": "^1.17.11",
+    "@graphql-codegen/typescript-operations": "^1.17.16",
+    "@oclif/dev-cli": "^1",
+    "@oclif/test": "^1",
+    "@openapitools/openapi-generator-cli": "^2.3.6",
+    "@types/chai": "^4",
+    "@types/mocha": "^5",
+    "@types/node": "^14",
+    "@types/node-cleanup": "^2.1.1",
+    "@types/express-http-proxy": "^1.6.2",
+    "@types/node-cache": "^4.2.5",
+    "@types/send": "^0.17.0",
+    "@types/inquirer": "^8.1.1",
+    "@types/cors": "^2.8.12",
+    "chai": "^4",
+    "globby": "^10",
+    "json-schema-to-typescript": "^10.1.4",
+    "mocha": "^5",
+    "nyc": "^14",
+    "openapi-typescript": "^4.0.2",
+    "ts-node": "^8",
+    "typescript": "^3.3"
+  },
+  "engines": {
+    "node": ">=14.16.1"
+  },
+  "files": [
+    "/bin",
+    "/lib",
+    "/npm-shrinkwrap.json",
+    "/oclif.manifest.json"
+  ],
+  "homepage": "https://github.com/Joystream/joystream",
+  "keywords": [
+    "oclif"
+  ],
+  "license": "GPL-3.0-only",
+  "main": "lib/index.js",
+  "oclif": {
+    "commands": "./lib/commands",
+    "bin": "joystream-distributor",
+    "plugins": [
+      "@oclif/plugin-help"
+    ]
+  },
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/Joystream/joystream",
+    "directory": "distributor-node"
+  },
+  "scripts": {
+    "postpack": "rm -f oclif.manifest.json",
+    "prepack": "rm -rf lib && tsc -b && oclif-dev manifest && oclif-dev readme",
+    "test": "nyc --extension .ts mocha --forbid-only \"test/**/*.test.ts\"",
+    "version": "oclif-dev readme && git add README.md",
+    "generate:types:json-schema": "yarn ts-node ./src/services/validation/generateTypes.ts",
+    "generate:types:graphql": "yarn graphql-codegen -c ./src/services/networking/query-node/codegen.yml",
+    "generate:types:openapi": "yarn openapi-typescript ./src/api-spec/openapi.yml -o ./src/types/generated/OpenApi.ts -c ../prettierrc.js",
+    "generate:types:all": "yarn generate:types:json-schema && yarn generate:types:graphql && yarn generate:types:openapi",
+    "generate:api:storage-node": "yarn openapi-generator-cli generate -i ../storage-node-v2/src/api-spec/openapi.yaml -g typescript-axios -o ./src/services/networking/storage-node/generated",
+    "generate:api:distributor-node": "yarn openapi-generator-cli generate -i ./src/api-spec/openapi.yml -g typescript-axios -o ./src/services/networking/distributor-node/generated",
+    "generate:api:all": "yarn generate:api:storage-node && yarn generate:api:distributor-node",
+    "generate:all": "yarn generate:types:all && yarn generate:api:all",
+    "build": "rm -rf lib && tsc --build tsconfig.json && cp -r ./src/api-spec ./lib/api-spec",
+    "lint": "eslint ./src --ext .ts",
+    "format": "prettier ./ --write",
+    "checks": "tsc --noEmit --pretty && prettier ./ --check && yarn lint",
+    "cli": "./bin/run"
+  },
+  "types": "lib/index.d.ts"
+}

+ 16 - 0
distributor-node/scripts/data/family-metadata.json

@@ -0,0 +1,16 @@
+{
+  "region": "eu-west",
+  "description": "Western Europe",
+  "boundary": [
+    { "longitude": 0.935664253776034, "latitude": 61.70157919955392 },
+    { "longitude": 7.077063962609969, "latitude": 37.40179586925884 },
+    { "longitude": 27.46754964469303, "latitude": 32.88770433956931 },
+    { "longitude": 40.68423960078124, "latitude": 48.39367044189657 },
+    { "longitude": 32.14019766910849, "latitude": 54.63502471598309 },
+    { "longitude": 28.56450578831937, "latitude": 59.09093283322235 },
+    { "longitude": 30.75892533489921, "latitude": 70.1670216697313 },
+    { "longitude": 19.2385951319647, "latitude": 73.4978175093038 },
+    { "longitude": -9.158590783812665, "latitude": 67.80006125371919 },
+    { "longitude": 0.935664253776034, "latitude": 61.70157919955392 }
+  ]
+}

+ 12 - 0
distributor-node/scripts/data/operator-metadata.json

@@ -0,0 +1,12 @@
+{
+  "endpoint": "http://localhost:3334",
+  "location": {
+    "countryCode": "DE",
+    "city": "Berlin",
+    "coordinates": {
+      "latitude": 52.520008,
+      "longitude": 13.404954
+    }
+  },
+  "extra": "Some additional information"
+}

+ 17 - 0
distributor-node/scripts/init-bucket.sh

@@ -0,0 +1,17 @@
+#!/usr/bin/env bash
+
+SCRIPT_PATH="$(dirname "${BASH_SOURCE[0]}")"
+cd $SCRIPT_PATH
+
+export AUTO_CONFIRM=true
+export CONFIG_PATH="../config.yml"
+CLI=../bin/run
+
+${CLI} dev:init
+${CLI} leader:set-buckets-per-bag-limit -l 10
+FAMILY_ID=`${CLI} leader:create-bucket-family ${CONFIG}`
+BUCKET_ID=`${CLI} leader:create-bucket -f ${FAMILY_ID} -a yes`
+${CLI} leader:update-bag -b static:council -f ${FAMILY_ID} -a ${BUCKET_ID}
+${CLI} leader:update-bucket-mode -f ${FAMILY_ID} -B ${BUCKET_ID} --mode on
+${CLI} leader:invite-bucket-operator -f ${FAMILY_ID} -B ${BUCKET_ID} -w 0
+${CLI} operator:accept-invitation -f ${FAMILY_ID} -B ${BUCKET_ID} -w 0

+ 36 - 0
distributor-node/scripts/test-commands.sh

@@ -0,0 +1,36 @@
+#!/usr/bin/env bash
+
+SCRIPT_PATH="$(dirname "${BASH_SOURCE[0]}")"
+cd $SCRIPT_PATH
+
+export AUTO_CONFIRM=true
+export CONFIG_PATH="../config.yml"
+CLI=../bin/run
+
+${CLI} dev:init
+${CLI} leader:set-buckets-per-bag-limit -l 10
+FAMILY_ID=`${CLI} leader:create-bucket-family`
+BUCKET_ID=`${CLI} leader:create-bucket -f ${FAMILY_ID} -a yes`
+${CLI} leader:update-bag -b static:council -f ${FAMILY_ID} -a ${BUCKET_ID}
+${CLI} leader:update-bag -b static:wg:storage -f ${FAMILY_ID} -a ${BUCKET_ID}
+${CLI} leader:update-bag -b static:wg:content -f ${FAMILY_ID} -a ${BUCKET_ID}
+${CLI} leader:update-bag -b static:wg:operations -f ${FAMILY_ID} -a ${BUCKET_ID}
+${CLI} leader:update-bag -b static:wg:gateway -f ${FAMILY_ID} -a ${BUCKET_ID}
+${CLI} leader:update-bag -b static:wg:distribution -f ${FAMILY_ID} -a ${BUCKET_ID}
+${CLI} leader:update-bucket-status -f ${FAMILY_ID} -B ${BUCKET_ID}  --acceptingBags yes
+${CLI} leader:update-bucket-mode -f ${FAMILY_ID} -B ${BUCKET_ID} --mode on
+${CLI} leader:update-dynamic-bag-policy -t Member -p ${FAMILY_ID}:5
+${CLI} leader:update-dynamic-bag-policy -t Member
+${CLI} leader:invite-bucket-operator -f ${FAMILY_ID} -B ${BUCKET_ID} -w 0
+${CLI} leader:cancel-invitation -f ${FAMILY_ID} -B ${BUCKET_ID} -w 0
+${CLI} leader:invite-bucket-operator -f ${FAMILY_ID} -B ${BUCKET_ID} -w 0
+${CLI} operator:accept-invitation -f ${FAMILY_ID} -B ${BUCKET_ID} -w 0
+${CLI} operator:set-metadata -f ${FAMILY_ID} -B ${BUCKET_ID} -w 0 -i ./data/operator-metadata.json
+${CLI} leader:remove-bucket-operator -f ${FAMILY_ID} -B ${BUCKET_ID} -w 0
+${CLI} leader:set-bucket-family-metadata -f ${FAMILY_ID} -i ./data/family-metadata.json
+
+# Deletion commands tested separately, since bucket operator removal is not yet supported
+FAMILY_TO_DELETE_ID=`${CLI} leader:create-bucket-family`
+BUCKET_TO_DELETE_ID=`${CLI} leader:create-bucket -f ${FAMILY_TO_DELETE_ID} -a yes`
+${CLI} leader:delete-bucket -f ${FAMILY_TO_DELETE_ID} -B ${BUCKET_TO_DELETE_ID}
+${CLI} leader:delete-bucket-family -f ${FAMILY_TO_DELETE_ID}

+ 1 - 0
distributor-node/src/@types/@elastic/esc-winston-format/index.d.ts

@@ -0,0 +1 @@
+declare module '@elastic/ecs-winston-format'

+ 1 - 0
distributor-node/src/@types/js-image-generator/index.d.ts

@@ -0,0 +1 @@
+declare module 'js-image-generator'

+ 216 - 0
distributor-node/src/api-spec/openapi.yml

@@ -0,0 +1,216 @@
+openapi: 3.0.3
+info:
+  title: Distributor node API
+  description: Distributor node API
+  contact:
+    email: info@joystream.org
+  license:
+    name: GPL-3.0-only
+    url: https://spdx.org/licenses/GPL-3.0-only.html
+  version: 0.1.0
+externalDocs:
+  description: Distributor node API
+  url: https://github.com/Joystream/joystream/issues/2224
+servers:
+  - url: http://localhost:3334/api/v1/
+
+tags:
+  - name: public
+    description: Public distributor node API
+
+paths:
+  /status:
+    get:
+      operationId: public.status
+      description: Returns json object describing current node status.
+      tags:
+        - public
+      responses:
+        200:
+          description: OK
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/StatusResponse'
+        500:
+          description: Unexpected server error
+  /buckets:
+    get:
+      operationId: public.buckets
+      description: Returns list of distributed buckets
+      tags:
+        - public
+      responses:
+        200:
+          description: OK
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/BucketsResponse'
+        500:
+          description: Unexpected server error
+  /asset/{objectId}:
+    head:
+      operationId: public.assetHead
+      description: Returns asset response headers (cache status, content type and/or length, accepted ranges etc.)
+      tags:
+        - public
+      parameters:
+        - $ref: '#components/parameters/ObjectId'
+      responses:
+        200:
+          description: Object is supported and should be send on GET request.
+          headers:
+            X-Cache:
+              $ref: '#/components/headers/X-Cache'
+        421:
+          description: Misdirected request. Data object not supported by the node.
+        404:
+          description: Data object does not exist.
+        500:
+          description: Unexpected server error
+    get:
+      operationId: public.asset
+      description: Returns a media file.
+      tags:
+        - public
+      parameters:
+        - $ref: '#components/parameters/ObjectId'
+      responses:
+        200:
+          description: Full available object data sent
+          headers:
+            X-Cache:
+              $ref: '#/components/headers/X-Cache'
+            X-Data-Source:
+              $ref: '#/components/headers/X-Data-Source'
+          content:
+            image/*:
+              schema:
+                type: string
+                format: binary
+            audio/*:
+              schema:
+                type: string
+                format: binary
+            video/*:
+              schema:
+                type: string
+                format: binary
+        206:
+          description: Requested partial object data sent
+          headers:
+            X-Cache:
+              $ref: '#/components/headers/X-Cache'
+            X-Data-Source:
+              $ref: '#/components/headers/X-Data-Source'
+          content:
+            image/*:
+              schema:
+                type: string
+                format: binary
+            audio/*:
+              schema:
+                type: string
+                format: binary
+            video/*:
+              schema:
+                type: string
+                format: binary
+        421:
+          description: Misdirected request. Data object not supported.
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/ErrorResponse'
+        404:
+          description: Data object does not exist.
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/ErrorResponse'
+        500:
+          description: Unexpected server error
+
+components:
+  parameters:
+    ObjectId:
+      name: objectId
+      required: true
+      in: path
+      description: Data Object ID
+      schema:
+        type: string
+        pattern: \d+
+  headers:
+    X-Cache:
+      description: Describes cache status of an object.
+        Hit - object is already fully fetched in distributor node's cache.
+        Pending - object is still beeing fetched from the storage node.
+        Miss - object is neither in cache not currently beeing fetched. Fetching from storage node may be triggered.
+      schema:
+        type: string
+        enum: ['hit', 'pending', 'miss']
+    X-Data-Source:
+      description: Describes the source of data stream.
+        External - the request was proxied to a storage node.
+        Local - the data is streamed from local file.
+      schema:
+        type: string
+        enum: ['external', 'local']
+  schemas:
+    ErrorResponse:
+      type: object
+      required:
+        - message
+      properties:
+        type:
+          type: string
+        message:
+          type: string
+    StatusResponse:
+      type: object
+      required:
+        - id
+        - objectsInCache
+        - storageLimit
+        - storageUsed
+        - uptime
+        - downloadsInProgress
+      properties:
+        id:
+          type: string
+        objectsInCache:
+          type: integer
+          minimum: 0
+        storageLimit:
+          type: integer
+          minimum: 0
+        storageUsed:
+          type: integer
+          minimum: 0
+        uptime:
+          type: integer
+          minimum: 0
+        downloadsInProgress:
+          type: integer
+          minimum: 0
+    BucketsResponse:
+      oneOf:
+        - type: object
+          required:
+            - 'bucketIds'
+          properties:
+            bucketIds:
+              type: array
+              minItems: 1
+              items:
+                type: integer
+                minimum: 0
+        - type: object
+          required:
+            - 'allByWorkerId'
+          properties:
+            allByWorkerId:
+              type: integer
+              minimum: 0

+ 128 - 0
distributor-node/src/app/index.ts

@@ -0,0 +1,128 @@
+import { ReadonlyConfig } from '../types'
+import { NetworkingService } from '../services/networking'
+import { LoggingService } from '../services/logging'
+import { StateCacheService } from '../services/cache/StateCacheService'
+import { ContentService } from '../services/content/ContentService'
+import { ServerService } from '../services/server/ServerService'
+import { Logger } from 'winston'
+import fs from 'fs'
+import nodeCleanup from 'node-cleanup'
+
+export class App {
+  private config: ReadonlyConfig
+  private content: ContentService
+  private stateCache: StateCacheService
+  private networking: NetworkingService
+  private server: ServerService
+  private logging: LoggingService
+  private logger: Logger
+
+  constructor(config: ReadonlyConfig) {
+    this.config = config
+    this.logging = LoggingService.withAppConfig(config)
+    this.stateCache = new StateCacheService(config, this.logging)
+    this.content = new ContentService(config, this.logging, this.stateCache)
+    this.networking = new NetworkingService(config, this.stateCache, this.logging)
+    this.server = new ServerService(config, this.stateCache, this.content, this.logging, this.networking)
+    this.logger = this.logging.createLogger('App')
+  }
+
+  private checkConfigDirectories(): void {
+    Object.entries(this.config.directories).forEach(([name, path]) => {
+      const dirInfo = `${name} directory (${path})`
+      if (!fs.existsSync(path)) {
+        try {
+          fs.mkdirSync(path, { recursive: true })
+        } catch (e) {
+          throw new Error(`${dirInfo} doesn't exist and cannot be created!`)
+        }
+      }
+      try {
+        fs.accessSync(path, fs.constants.R_OK)
+      } catch (e) {
+        throw new Error(`${dirInfo} is not readable`)
+      }
+      try {
+        fs.accessSync(path, fs.constants.W_OK)
+      } catch (e) {
+        throw new Error(`${dirInfo} is not writable`)
+      }
+    })
+  }
+
+  public async start(): Promise<void> {
+    this.logger.info('Starting the app')
+    this.checkConfigDirectories()
+    this.stateCache.load()
+    const dataObjects = await this.networking.fetchSupportedDataObjects()
+    await this.content.startupInit(dataObjects)
+    this.server.start()
+    nodeCleanup(this.exitHandler.bind(this))
+  }
+
+  private async exitGracefully(): Promise<void> {
+    // Async exit handler - ideally should not take more than 10 sec
+    // We can try to wait until some pending downloads are finished here etc.
+    this.logger.info('Graceful exit initialized')
+
+    // Stop accepting any new requests and save cache
+    this.server.stop()
+    this.stateCache.clearInterval()
+    this.stateCache.saveSync()
+
+    // Try to process remaining downloads
+    const MAX_RETRY_ATTEMPTS = 3
+    let retryCounter = 0
+    while (retryCounter < MAX_RETRY_ATTEMPTS && this.stateCache.getPendingDownloadsCount()) {
+      const pendingDownloadsCount = this.stateCache.getPendingDownloadsCount()
+      this.logger.info(`${pendingDownloadsCount} pending downloads in progress... Retrying exit in 5 sec...`, {
+        retryCounter,
+        pendingDownloadsCount,
+      })
+      await new Promise((resolve) => setTimeout(resolve, 5000))
+      this.stateCache.saveSync()
+      ++retryCounter
+    }
+
+    if (this.stateCache.getPendingDownloadsCount()) {
+      this.logger.warn('Limit reached: Could not finish all pending downloads.', {
+        pendingDownloadsCount: this.stateCache.getPendingDownloadsCount(),
+      })
+    }
+
+    this.logger.info('Graceful exit finished')
+    await this.logging.end()
+  }
+
+  private exitCritically(): void {
+    this.logger.info('Critical exit initialized')
+    // Handling exits due to an error - only some critical, synchronous work can be done here
+    this.server.stop()
+    this.stateCache.clearInterval()
+    this.stateCache.saveSync()
+    this.logger.info('Critical exit finished')
+  }
+
+  private exitHandler(exitCode: number | null, signal: string | null): boolean | undefined {
+    this.logger.info('Exiting')
+    this.stateCache.clearInterval()
+    if (signal) {
+      // Async exit can be executed
+      this.exitGracefully()
+        .then(() => {
+          process.kill(process.pid, signal)
+        })
+        .catch((err) => {
+          this.logger.error('Graceful exit error', { err })
+          this.logging.end().finally(() => {
+            process.kill(process.pid, signal)
+          })
+        })
+      nodeCleanup.uninstall()
+      return false
+    } else {
+      // Only synchronous work can be done here
+      this.exitCritically()
+    }
+  }
+}

+ 6 - 0
distributor-node/src/command-base/ExitCodes.ts

@@ -0,0 +1,6 @@
+enum ExitCodes {
+  OK = 0,
+  Error = 1,
+  ApiError = 200,
+}
+export = ExitCodes

+ 65 - 0
distributor-node/src/command-base/accounts.ts

@@ -0,0 +1,65 @@
+import ApiCommandBase from './api'
+import { AccountId } from '@polkadot/types/interfaces'
+import { Keyring } from '@polkadot/api'
+import { KeyringInstance, KeyringOptions, KeyringPair } from '@polkadot/keyring/types'
+import { CLIError } from '@oclif/errors'
+
+export const DEFAULT_ACCOUNT_TYPE = 'sr25519'
+export const KEYRING_OPTIONS: KeyringOptions = {
+  type: DEFAULT_ACCOUNT_TYPE,
+}
+
+/**
+ * Abstract base class for account-related commands.
+ */
+export default abstract class AccountsCommandBase extends ApiCommandBase {
+  private keyring!: KeyringInstance
+
+  isKeyAvailable(key: AccountId | string): boolean {
+    return this.keyring.getPairs().some((p) => p.address === key.toString())
+  }
+
+  getPairs(includeDevAccounts = true): KeyringPair[] {
+    return this.keyring.getPairs().filter((p) => includeDevAccounts || !p.meta.isTesting)
+  }
+
+  getPair(key: string): KeyringPair {
+    const pair = this.keyring.getPair(key)
+    if (!pair) {
+      throw new CLIError(`Required key for account ${key} is not available`)
+    }
+    return pair
+  }
+
+  async getDecodedPair(key: string): Promise<KeyringPair> {
+    // Just for Joystream CLI compatibility currently
+    return this.getPair(key)
+  }
+
+  initKeyring(): void {
+    this.keyring = new Keyring(KEYRING_OPTIONS)
+    this.appConfig.keys.forEach((suri) => this.keyring.addFromUri(suri))
+  }
+
+  async getDistributorLeadKey(): Promise<string> {
+    const currentLead = await this.api.query.distributionWorkingGroup.currentLead()
+    if (!currentLead.isSome) {
+      throw new CLIError('There is no active distributor working group lead currently')
+    }
+    const worker = await this.api.query.distributionWorkingGroup.workerById(currentLead.unwrap())
+    return worker.role_account_id.toString()
+  }
+
+  async getDistributorWorkerRoleKey(workerId: number): Promise<string> {
+    const worker = await this.api.query.distributionWorkingGroup.workerById(workerId)
+    if (!worker) {
+      throw new CLIError(`Worker not found by id: ${workerId}!`)
+    }
+    return worker.role_account_id.toString()
+  }
+
+  async init(): Promise<void> {
+    await super.init()
+    await this.initKeyring()
+  }
+}

+ 40 - 0
distributor-node/src/command-base/api.ts

@@ -0,0 +1,40 @@
+import DefaultCommandBase from './default'
+import { CLIError } from '@oclif/errors'
+import { SubmittableResult } from '@polkadot/api'
+import { KeyringPair } from '@polkadot/keyring/types'
+import chalk from 'chalk'
+import { SubmittableExtrinsic } from '@polkadot/api/types'
+import { formatBalance } from '@polkadot/util'
+import { ExtrinsicFailedError, RuntimeApi } from '../services/networking/runtime/api'
+import ExitCodes from './ExitCodes'
+
+/**
+ * Abstract base class for commands that require access to the API.
+ */
+export default abstract class ApiCommandBase extends DefaultCommandBase {
+  protected api!: RuntimeApi
+
+  async init(): Promise<void> {
+    await super.init()
+    this.api = await RuntimeApi.create(this.logging, this.appConfig.endpoints.substrateNode)
+  }
+
+  async sendAndFollowTx(account: KeyringPair, tx: SubmittableExtrinsic<'promise'>): Promise<SubmittableResult> {
+    // Calculate fee and ask for confirmation
+    const fee = await this.api.estimateFee(account, tx)
+
+    await this.requireConfirmation(
+      `Tx fee of ${chalk.cyan(formatBalance(fee))} will be deduced from you account, do you confirm the transfer?`
+    )
+
+    try {
+      const res = await this.api.sendExtrinsic(account, tx)
+      return res
+    } catch (e) {
+      if (e instanceof ExtrinsicFailedError) {
+        throw new CLIError(`Extrinsic failed! ${e.message}`, { exit: ExitCodes.ApiError })
+      }
+      throw e
+    }
+  }
+}

+ 96 - 0
distributor-node/src/command-base/default.ts

@@ -0,0 +1,96 @@
+import Command, { flags as oclifFlags } from '@oclif/command'
+import inquirer from 'inquirer'
+import ExitCodes from './ExitCodes'
+import { ReadonlyConfig } from '../types/config'
+import { ConfigParserService } from '../services/parsers/ConfigParserService'
+import { LoggingService } from '../services/logging'
+import { Logger } from 'winston'
+import { BagIdParserService } from '../services/parsers/BagIdParserService'
+
+export const flags = {
+  ...oclifFlags,
+  integerArr: oclifFlags.build({
+    parse: (value: string) => {
+      const arr: number[] = value.split(',').map((v) => {
+        if (!/^-?\d+$/.test(v)) {
+          throw new Error(`Expected comma-separated integers, but received: ${value}`)
+        }
+        return parseInt(v)
+      })
+      return arr
+    },
+  }),
+  bagId: oclifFlags.build({
+    parse: (value: string) => {
+      const parser = new BagIdParserService()
+      return parser.parseBagId(value)
+    },
+    description: `Bag ID. Format: {bag_type}:{sub_type}:{id}.
+    - Bag types: 'static', 'dynamic'
+    - Sub types: 'static:council', 'static:wg', 'dynamic:member', 'dynamic:channel'
+    - Id:
+      - absent for 'static:council'
+      - working group name for 'static:wg'
+      - integer for 'dynamic:member' and 'dynamic:channel'
+    Examples:
+    - static:council
+    - static:wg:storage
+    - dynamic:member:4`,
+  }),
+}
+export default abstract class DefaultCommandBase extends Command {
+  protected appConfig!: ReadonlyConfig
+  protected logging!: LoggingService
+  protected autoConfirm!: boolean
+  private logger!: Logger
+
+  static flags = {
+    yes: flags.boolean({
+      required: false,
+      default: false,
+      description: 'Answer "yes" to any prompt, skipping any manual confirmations',
+      char: 'y',
+    }),
+    configPath: flags.string({
+      required: false,
+      default: process.env.CONFIG_PATH || './config.yml',
+      description: 'Path to config JSON/YAML file (relative to current working directory)',
+      char: 'c',
+    }),
+  }
+
+  async init(): Promise<void> {
+    const { configPath, yes } = this.parse(this.constructor as typeof DefaultCommandBase).flags
+    const configParser = new ConfigParserService()
+    this.appConfig = configParser.loadConfing(configPath) as ReadonlyConfig
+    this.logging = LoggingService.withCLIConfig()
+    this.logger = this.logging.createLogger('CLI')
+    this.autoConfirm = !!(process.env.AUTO_CONFIRM === 'true' || parseInt(process.env.AUTO_CONFIRM || '') || yes)
+  }
+
+  public log(message: string, ...meta: unknown[]): void {
+    this.logger.info(message, ...meta)
+  }
+
+  public output(value: unknown): void {
+    console.log(value)
+  }
+
+  async requireConfirmation(
+    message = 'Are you sure you want to execute this action?',
+    defaultVal = false
+  ): Promise<void> {
+    if (this.autoConfirm) {
+      return
+    }
+    const { confirmed } = await inquirer.prompt([{ type: 'confirm', name: 'confirmed', message, default: defaultVal }])
+    if (!confirmed) {
+      this.exit(ExitCodes.OK)
+    }
+  }
+
+  async finally(err: any): Promise<void> {
+    if (!err) this.exit(ExitCodes.OK)
+    super.finally(err)
+  }
+}

+ 135 - 0
distributor-node/src/commands/dev/batchUpload.ts

@@ -0,0 +1,135 @@
+import AccountsCommandBase from '../../command-base/accounts'
+import DefaultCommandBase, { flags } from '../../command-base/default'
+import { hash } from 'blake3'
+import { PublicApi, Configuration, TokenRequest } from '../../services/networking/storage-node/generated'
+import { u8aToHex } from '@polkadot/util'
+import * as multihash from 'multihashes'
+import FormData from 'form-data'
+import imgGen from 'js-image-generator'
+import { SubmittableExtrinsic } from '@polkadot/api/types'
+import { BagIdParserService } from '../../services/parsers/BagIdParserService'
+import axios from 'axios'
+
+async function generateRandomImage(): Promise<Buffer> {
+  return new Promise((resolve, reject) => {
+    imgGen.generateImage(10, 10, 80, function (err: any, image: any) {
+      if (err) {
+        reject(err)
+      } else {
+        resolve(image.data)
+      }
+    })
+  })
+}
+
+export default class DevBatchUpload extends AccountsCommandBase {
+  static flags = {
+    ...DefaultCommandBase.flags,
+    bagId: flags.string({
+      char: 'b',
+      required: true,
+    }),
+    bucketId: flags.integer({
+      char: 'B',
+      description: 'Distribution bucket id',
+      required: true,
+    }),
+    batchSize: flags.integer({
+      char: 'S',
+      required: true,
+    }),
+    batchesCount: flags.integer({
+      char: 'C',
+      required: true,
+    }),
+  }
+
+  async run(): Promise<void> {
+    const { api } = this
+    const { bagId, bucketId, batchSize, batchesCount } = this.parse(DevBatchUpload).flags
+    const sudoKey = (await api.query.sudo.key()).toHuman()
+    const dataFee = await api.query.storage.dataObjectPerMegabyteFee()
+    const storageApi = new PublicApi(
+      new Configuration({
+        basePath: 'http://127.0.0.1:3333/api/v1',
+        formDataCtor: FormData,
+      })
+    )
+
+    for (let i = 0; i < batchesCount; ++i) {
+      const nextObjectId = (await api.query.storage.nextDataObjectId()).toNumber()
+      // Generate batch
+      const batch: [SubmittableExtrinsic<'promise'>, Buffer][] = []
+      for (let j = 0; j < batchSize; ++j) {
+        const dataObject = await generateRandomImage()
+        const dataHash = multihash.toB58String(multihash.encode(hash(dataObject) as Buffer, 'blake3'))
+        batch.push([
+          api.tx.sudo.sudo(
+            api.tx.storage.sudoUploadDataObjects({
+              deletionPrizeSourceAccountId: sudoKey,
+              objectCreationList: [
+                {
+                  Size: dataObject.byteLength,
+                  IpfsContentId: dataHash,
+                },
+              ],
+              expectedDataSizeFee: dataFee,
+              bagId: new BagIdParserService().parseBagId(bagId),
+            })
+          ),
+          dataObject,
+        ])
+      }
+      // Send batch
+      await this.sendAndFollowTx(this.getPair(sudoKey), api.tx.utility.batch(batch.map(([tx]) => tx)))
+
+      // Send storage node uploads
+      await Promise.all(
+        batch.map(async ([, dataObject], k) => {
+          const dataObjectId = nextObjectId + k
+          const data: TokenRequest['data'] = {
+            accountId: sudoKey,
+            bagId,
+            dataObjectId,
+            memberId: 0,
+            storageBucketId: bucketId,
+          }
+          const message = JSON.stringify(data)
+          const signature = u8aToHex(this.getPair(sudoKey).sign(message))
+          const {
+            data: { token },
+          } = await storageApi.publicApiAuthTokenForUploading({
+            data,
+            signature,
+          })
+          if (!token) {
+            throw new Error('Recieved empty token!')
+          }
+
+          const formData = new FormData()
+          formData.append('dataObjectId', dataObjectId.toString())
+          formData.append('storageBucketId', bucketId.toString())
+          formData.append('bagId', bagId)
+          formData.append('file', dataObject, { filename: 'test.jpg', knownLength: dataObject.byteLength })
+          this.log(`Uploading object ${dataObjectId}`)
+          try {
+            await axios({
+              method: 'POST',
+              url: 'http://127.0.0.1:3333/api/v1/files',
+              data: formData,
+              headers: {
+                'x-api-key': token,
+                'content-type': 'multipart/form-data',
+                ...formData.getHeaders(),
+              },
+            })
+          } catch (e) {
+            if (axios.isAxiosError(e)) {
+              console.log(e.response?.data)
+            }
+          }
+        })
+      )
+    }
+  }
+}

+ 93 - 0
distributor-node/src/commands/dev/init.ts

@@ -0,0 +1,93 @@
+import { MemberId } from '@joystream/types/members'
+import AccountsCommandBase from '../../command-base/accounts'
+import DefaultCommandBase from '../../command-base/default'
+
+const ALICE = '5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY'
+
+export default class DevInit extends AccountsCommandBase {
+  static description = 'Initialize development environment. Sets Alice as distributor working group leader.'
+
+  static flags = {
+    ...DefaultCommandBase.flags,
+  }
+
+  async run(): Promise<void> {
+    const { api } = this
+
+    if (!api.isDevelopment) {
+      this.error('Not connected to dev chain!')
+    }
+
+    const SudoKeyPair = this.getPair(ALICE)
+    const LeadKeyPair = this.getPair(ALICE)
+
+    // Create membership if not already created
+    const members = await api.query.members.memberIdsByControllerAccountId(LeadKeyPair.address)
+
+    let memberId: MemberId | undefined = members.toArray()[0]
+
+    if (memberId === undefined) {
+      const res = await this.api.sendExtrinsic(LeadKeyPair, api.tx.members.buyMembership(0, 'alice', null, null))
+      memberId = this.api.getEvent(res, 'members', 'MemberRegistered').data[0]
+    }
+
+    // Create a new lead opening.
+    const currentLead = await api.query.distributionWorkingGroup.currentLead()
+    if (currentLead.isSome) {
+      this.log('Distributor lead already exists, skipping...')
+      return
+    }
+
+    this.log(`Making member id: ${memberId} the distribution lead.`)
+
+    // Create curator lead opening
+    const addOpeningRes = await this.api.sendExtrinsic(
+      SudoKeyPair,
+      this.api.sudo(
+        api.tx.distributionWorkingGroup.addOpening(
+          { CurrentBlock: null },
+          { max_review_period_length: 9999 },
+          'dev distributor lead opening',
+          'Leader'
+        )
+      )
+    )
+
+    const openingAddedEvent = this.api.getEvent(addOpeningRes, 'distributionWorkingGroup', 'OpeningAdded')
+    const openingId = openingAddedEvent.data[0]
+
+    // Apply to lead opening
+    const applyRes = await this.api.sendExtrinsic(
+      LeadKeyPair,
+      this.api.tx.distributionWorkingGroup.applyOnOpening(
+        memberId, // member id
+        openingId, // opening id
+        LeadKeyPair.address, // address
+        null, // opt role stake
+        null, // opt appl. stake
+        'dev distributor lead application' // human_readable_text
+      )
+    )
+
+    const appliedEvent = this.api.getEvent(applyRes, 'distributionWorkingGroup', 'AppliedOnOpening')
+    const applicationId = appliedEvent.data[1]
+
+    // Begin review period
+    await this.api.sendExtrinsic(
+      SudoKeyPair,
+      this.api.sudo(this.api.tx.distributionWorkingGroup.beginApplicantReview(openingId))
+    )
+
+    // Fill opening
+    await this.api.sendExtrinsic(
+      SudoKeyPair,
+      this.api.sudo(
+        this.api.tx.distributionWorkingGroup.fillOpening(
+          openingId,
+          api.createType('ApplicationIdSet', [applicationId]),
+          null
+        )
+      )
+    )
+  }
+}

+ 38 - 0
distributor-node/src/commands/leader/cancel-invitation.ts

@@ -0,0 +1,38 @@
+import AccountsCommandBase from '../../command-base/accounts'
+import DefaultCommandBase, { flags } from '../../command-base/default'
+
+export default class LeaderCancelInvitation extends AccountsCommandBase {
+  static description = `Cancel pending distribution bucket operator invitation.
+  Requires distribution working group leader permissions.`
+
+  static flags = {
+    bucketId: flags.integer({
+      char: 'B',
+      description: 'Distribution bucket id',
+      required: true,
+    }),
+    familyId: flags.integer({
+      char: 'f',
+      description: 'Distribution bucket family id',
+      required: true,
+    }),
+    workerId: flags.integer({
+      char: 'w',
+      description: 'ID of the invited operator (distribution group worker)',
+      required: true,
+    }),
+    ...DefaultCommandBase.flags,
+  }
+
+  async run(): Promise<void> {
+    const { bucketId, familyId, workerId } = this.parse(LeaderCancelInvitation).flags
+    const leadKey = await this.getDistributorLeadKey()
+
+    this.log(`Canceling distribution bucket operator invitation (bucket: ${bucketId}, worker: ${workerId})...`)
+    await this.sendAndFollowTx(
+      await this.getDecodedPair(leadKey),
+      this.api.tx.storage.cancelDistributionBucketOperatorInvite(familyId, bucketId, workerId)
+    )
+    this.log('Invitation succesfully canceled!')
+  }
+}

+ 25 - 0
distributor-node/src/commands/leader/create-bucket-family.ts

@@ -0,0 +1,25 @@
+import AccountsCommandBase from '../../command-base/accounts'
+import DefaultCommandBase from '../../command-base/default'
+
+export default class LeaderCreateBucketFamily extends AccountsCommandBase {
+  static description = `Create new distribution bucket family. Requires distribution working group leader permissions.`
+
+  static flags = {
+    ...DefaultCommandBase.flags,
+  }
+
+  async run(): Promise<void> {
+    const leadKey = await this.getDistributorLeadKey()
+
+    this.log('Creating new distribution bucket family...')
+    const result = await this.sendAndFollowTx(
+      await this.getDecodedPair(leadKey),
+      this.api.tx.storage.createDistributionBucketFamily()
+    )
+    const event = this.api.getEvent(result, 'storage', 'DistributionBucketFamilyCreated')
+
+    this.log('Bucket family succesfully created!')
+    const bucketFamilyId = event.data[0]
+    this.output(bucketFamilyId.toString())
+  }
+}

+ 38 - 0
distributor-node/src/commands/leader/create-bucket.ts

@@ -0,0 +1,38 @@
+import { flags } from '@oclif/command'
+import AccountsCommandBase from '../../command-base/accounts'
+import DefaultCommandBase from '../../command-base/default'
+
+export default class LeaderCreateBucket extends AccountsCommandBase {
+  static description = `Create new distribution bucket. Requires distribution working group leader permissions.`
+
+  static flags = {
+    familyId: flags.integer({
+      char: 'f',
+      description: 'Distribution bucket family id',
+      required: true,
+    }),
+    acceptingBags: flags.enum({
+      char: 'a',
+      description: 'Whether the created bucket should accept new bags',
+      options: ['yes', 'no'],
+      default: 'no',
+    }),
+    ...DefaultCommandBase.flags,
+  }
+
+  async run(): Promise<void> {
+    const { familyId, acceptingBags } = this.parse(LeaderCreateBucket).flags
+    const leadKey = await this.getDistributorLeadKey()
+
+    this.log('Creating new distribution bucket...')
+    const result = await this.sendAndFollowTx(
+      await this.getDecodedPair(leadKey),
+      this.api.tx.storage.createDistributionBucket(familyId, acceptingBags === 'yes')
+    )
+    const event = this.api.getEvent(result, 'storage', 'DistributionBucketCreated')
+
+    this.log('Bucket succesfully created!')
+    const bucketId = event.data[0]
+    this.output(bucketId.toString())
+  }
+}

+ 28 - 0
distributor-node/src/commands/leader/delete-bucket-family.ts

@@ -0,0 +1,28 @@
+import { flags } from '@oclif/command'
+import AccountsCommandBase from '../../command-base/accounts'
+import DefaultCommandBase from '../../command-base/default'
+
+export default class LeaderDeleteBucketFamily extends AccountsCommandBase {
+  static description = `Delete distribution bucket family. Requires distribution working group leader permissions.`
+
+  static flags = {
+    familyId: flags.integer({
+      char: 'f',
+      description: 'Distribution bucket family id',
+      required: true,
+    }),
+    ...DefaultCommandBase.flags,
+  }
+
+  async run(): Promise<void> {
+    const { familyId } = this.parse(LeaderDeleteBucketFamily).flags
+    const leadKey = await this.getDistributorLeadKey()
+
+    this.log(`Deleting distribution bucket family (${familyId})...`)
+    await this.sendAndFollowTx(
+      await this.getDecodedPair(leadKey),
+      this.api.tx.storage.deleteDistributionBucketFamily(familyId)
+    )
+    this.log('Bucket family succesfully deleted!')
+  }
+}

+ 33 - 0
distributor-node/src/commands/leader/delete-bucket.ts

@@ -0,0 +1,33 @@
+import { flags } from '@oclif/command'
+import AccountsCommandBase from '../../command-base/accounts'
+import DefaultCommandBase from '../../command-base/default'
+
+export default class LeaderDeleteBucket extends AccountsCommandBase {
+  static description = `Delete distribution bucket. The bucket must have no operators. Requires distribution working group leader permissions.`
+
+  static flags = {
+    bucketId: flags.integer({
+      char: 'B',
+      description: 'Distribution bucket id',
+      required: true,
+    }),
+    familyId: flags.integer({
+      char: 'f',
+      description: 'Distribution bucket family id',
+      required: true,
+    }),
+    ...DefaultCommandBase.flags,
+  }
+
+  async run(): Promise<void> {
+    const { bucketId, familyId } = this.parse(LeaderDeleteBucket).flags
+    const leadKey = await this.getDistributorLeadKey()
+
+    this.log(`Deleting distribution bucket (${bucketId})...`)
+    await this.sendAndFollowTx(
+      await this.getDecodedPair(leadKey),
+      this.api.tx.storage.deleteDistributionBucket(familyId, bucketId)
+    )
+    this.log('Bucket succesfully deleted!')
+  }
+}

+ 39 - 0
distributor-node/src/commands/leader/invite-bucket-operator.ts

@@ -0,0 +1,39 @@
+import AccountsCommandBase from '../../command-base/accounts'
+import DefaultCommandBase, { flags } from '../../command-base/default'
+
+export default class LeaderInviteBucketOperator extends AccountsCommandBase {
+  static description = `Invite distribution bucket operator (distribution group worker).
+  The specified bucket must not have any operator currently.
+  Requires distribution working group leader permissions.`
+
+  static flags = {
+    bucketId: flags.integer({
+      char: 'B',
+      description: 'Distribution bucket id',
+      required: true,
+    }),
+    familyId: flags.integer({
+      char: 'f',
+      description: 'Distribution bucket family id',
+      required: true,
+    }),
+    workerId: flags.integer({
+      char: 'w',
+      description: 'ID of the distribution group worker to invite as bucket operator',
+      required: true,
+    }),
+    ...DefaultCommandBase.flags,
+  }
+
+  async run(): Promise<void> {
+    const { bucketId, familyId, workerId } = this.parse(LeaderInviteBucketOperator).flags
+    const leadKey = await this.getDistributorLeadKey()
+
+    this.log(`Inviting distribution bucket operator (bucket: ${bucketId}, worker: ${workerId})...`)
+    await this.sendAndFollowTx(
+      await this.getDecodedPair(leadKey),
+      this.api.tx.storage.inviteDistributionBucketOperator(familyId, bucketId, workerId)
+    )
+    this.log('Bucket operator succesfully invited!')
+  }
+}

+ 38 - 0
distributor-node/src/commands/leader/remove-bucket-operator.ts

@@ -0,0 +1,38 @@
+import AccountsCommandBase from '../../command-base/accounts'
+import DefaultCommandBase, { flags } from '../../command-base/default'
+
+export default class LeaderRemoveBucketOperator extends AccountsCommandBase {
+  static description = `Remove distribution bucket operator (distribution group worker).
+  Requires distribution working group leader permissions.`
+
+  static flags = {
+    bucketId: flags.integer({
+      char: 'B',
+      description: 'Distribution bucket id',
+      required: true,
+    }),
+    familyId: flags.integer({
+      char: 'f',
+      description: 'Distribution bucket family id',
+      required: true,
+    }),
+    workerId: flags.integer({
+      char: 'w',
+      description: 'ID of the operator (distribution working group worker) to remove from the bucket',
+      required: true,
+    }),
+    ...DefaultCommandBase.flags,
+  }
+
+  async run(): Promise<void> {
+    const { bucketId, familyId, workerId } = this.parse(LeaderRemoveBucketOperator).flags
+    const leadKey = await this.getDistributorLeadKey()
+
+    this.log(`Removing distribution bucket operator (bucket: ${bucketId}, worker: ${workerId})...`)
+    await this.sendAndFollowTx(
+      await this.getDecodedPair(leadKey),
+      this.api.tx.storage.removeDistributionBucketOperator(familyId, bucketId, workerId)
+    )
+    this.log('Bucket operator succesfully removed!')
+  }
+}

+ 45 - 0
distributor-node/src/commands/leader/set-bucket-family-metadata.ts

@@ -0,0 +1,45 @@
+import fs from 'fs'
+import AccountsCommandBase from '../../command-base/accounts'
+import DefaultCommandBase, { flags } from '../../command-base/default'
+import { ValidationService } from '../../services/validation/ValidationService'
+import { DistributionBucketFamilyMetadata, IDistributionBucketFamilyMetadata } from '@joystream/metadata-protobuf'
+
+export default class LeaderSetBucketFamilyMetadata extends AccountsCommandBase {
+  static description = `Set/update distribution bucket family metadata.
+  Requires distribution working group leader permissions.`
+
+  static flags = {
+    familyId: flags.integer({
+      char: 'f',
+      description: 'Distribution bucket family id',
+      required: true,
+    }),
+    input: flags.string({
+      char: 'i',
+      description: 'Path to JSON metadata file',
+      required: true,
+    }),
+    ...DefaultCommandBase.flags,
+  }
+
+  async run(): Promise<void> {
+    const { familyId, input } = this.parse(LeaderSetBucketFamilyMetadata).flags
+    const leadKey = await this.getDistributorLeadKey()
+
+    const validation = new ValidationService()
+    const metadata: IDistributionBucketFamilyMetadata = validation.validate(
+      'FamilyMetadata',
+      JSON.parse(fs.readFileSync(input).toString())
+    )
+
+    this.log(`Setting bucket family metadata (family: ${familyId})`, metadata)
+    await this.sendAndFollowTx(
+      await this.getDecodedPair(leadKey),
+      this.api.tx.storage.setDistributionBucketFamilyMetadata(
+        familyId,
+        '0x' + Buffer.from(DistributionBucketFamilyMetadata.encode(metadata).finish()).toString('hex')
+      )
+    )
+    this.log('Bucket family metadata succesfully set/updated!')
+  }
+}

+ 28 - 0
distributor-node/src/commands/leader/set-buckets-per-bag-limit.ts

@@ -0,0 +1,28 @@
+import { flags } from '@oclif/command'
+import AccountsCommandBase from '../../command-base/accounts'
+import DefaultCommandBase from '../../command-base/default'
+
+export default class LeaderSetBucketsPerBagLimit extends AccountsCommandBase {
+  static description = `Set max. distribution buckets per bag limit. Requires distribution working group leader permissions.`
+
+  static flags = {
+    limit: flags.integer({
+      char: 'l',
+      description: 'New limit value',
+      required: true,
+    }),
+    ...DefaultCommandBase.flags,
+  }
+
+  async run(): Promise<void> {
+    const { limit } = this.parse(LeaderSetBucketsPerBagLimit).flags
+    const leadKey = await this.getDistributorLeadKey()
+
+    this.log(`Setting new buckets per bag limit (${limit})...`)
+    await this.sendAndFollowTx(
+      await this.getDecodedPair(leadKey),
+      this.api.tx.storage.updateDistributionBucketsPerBagLimit(limit)
+    )
+    this.log('Limit succesfully updated!')
+  }
+}

+ 54 - 0
distributor-node/src/commands/leader/update-bag.ts

@@ -0,0 +1,54 @@
+import AccountsCommandBase from '../../command-base/accounts'
+import DefaultCommandBase, { flags } from '../../command-base/default'
+
+export default class LeaderUpdateBag extends AccountsCommandBase {
+  static description = 'Add/remove distribution buckets from a bag.'
+
+  static flags = {
+    bagId: flags.bagId({
+      char: 'b',
+      required: true,
+    }),
+    familyId: flags.integer({
+      char: 'f',
+      description: 'ID of the distribution bucket family',
+      required: true,
+    }),
+    add: flags.integerArr({
+      char: 'a',
+      description: 'ID of a bucket to add to bag',
+      default: [],
+      multiple: true,
+    }),
+    remove: flags.integerArr({
+      char: 'r',
+      description: 'ID of a bucket to remove from bag',
+      default: [],
+      multiple: true,
+    }),
+    ...DefaultCommandBase.flags,
+  }
+
+  static examples = [`$ joystream-distributor leader:update-bag -b 1 -f 1 -a 1 -a 2 -a 3 -r 4 -r 5`]
+
+  async run(): Promise<void> {
+    const { bagId, familyId, add, remove } = this.parse(LeaderUpdateBag).flags
+    const leadKey = await this.getDistributorLeadKey()
+
+    this.log(
+      `Updating distribution buckets for bag ${bagId} (adding: ${add.join(',' || 'NONE')}, removing: ${
+        remove.join(',') || 'NONE'
+      })...`
+    )
+    await this.sendAndFollowTx(
+      await this.getDecodedPair(leadKey),
+      this.api.tx.storage.updateDistributionBucketsForBag(
+        bagId,
+        familyId,
+        this.api.createType('DistributionBucketIdSet', add),
+        this.api.createType('DistributionBucketIdSet', remove)
+      )
+    )
+    this.log('Bag succesfully updated!')
+  }
+}

+ 38 - 0
distributor-node/src/commands/leader/update-bucket-mode.ts

@@ -0,0 +1,38 @@
+import AccountsCommandBase from '../../command-base/accounts'
+import DefaultCommandBase, { flags } from '../../command-base/default'
+
+export default class LeaderUpdateBucketMode extends AccountsCommandBase {
+  static description = `Update distribution bucket mode ("distributing" flag). Requires distribution working group leader permissions.`
+
+  static flags = {
+    bucketId: flags.integer({
+      char: 'B',
+      description: 'Distribution bucket id',
+      required: true,
+    }),
+    familyId: flags.integer({
+      char: 'f',
+      description: 'Distribution bucket family id',
+      required: true,
+    }),
+    mode: flags.enum<'on' | 'off'>({
+      char: 'd',
+      description: 'Whether the bucket should be "on" (distributing) or "off" (not distributing)',
+      required: true,
+      options: ['on', 'off'],
+    }),
+    ...DefaultCommandBase.flags,
+  }
+
+  async run(): Promise<void> {
+    const { bucketId, familyId, mode } = this.parse(LeaderUpdateBucketMode).flags
+    const leadKey = await this.getDistributorLeadKey()
+
+    this.log(`Updating distribution bucket mode (${bucketId}, distributing: ${mode})...`)
+    await this.sendAndFollowTx(
+      await this.getDecodedPair(leadKey),
+      this.api.tx.storage.updateDistributionBucketMode(familyId, bucketId, mode === 'on')
+    )
+    this.log('Bucket mode succesfully updated!')
+  }
+}

+ 39 - 0
distributor-node/src/commands/leader/update-bucket-status.ts

@@ -0,0 +1,39 @@
+import { flags } from '@oclif/command'
+import AccountsCommandBase from '../../command-base/accounts'
+import DefaultCommandBase from '../../command-base/default'
+
+export default class LeaderUpdateBucketStatus extends AccountsCommandBase {
+  static description = `Update distribution bucket status ("acceptingNewBags" flag). Requires distribution working group leader permissions.`
+
+  static flags = {
+    bucketId: flags.integer({
+      char: 'B',
+      description: 'Distribution bucket id',
+      required: true,
+    }),
+    familyId: flags.integer({
+      char: 'f',
+      description: 'Distribution bucket family id',
+      required: true,
+    }),
+    acceptingBags: flags.enum<'yes' | 'no'>({
+      char: 'a',
+      description: 'Whether the bucket should accept new bags',
+      options: ['yes', 'no'],
+      required: true,
+    }),
+    ...DefaultCommandBase.flags,
+  }
+
+  async run(): Promise<void> {
+    const { bucketId, familyId, acceptingBags } = this.parse(LeaderUpdateBucketStatus).flags
+    const leadKey = await this.getDistributorLeadKey()
+
+    this.log(`Updating distribution bucket status (${bucketId}, acceptingNewBags: ${acceptingBags})...`)
+    await this.sendAndFollowTx(
+      await this.getDecodedPair(leadKey),
+      this.api.tx.storage.updateDistributionBucketStatus(familyId, bucketId, acceptingBags === 'yes')
+    )
+    this.log('Bucket status succesfully updated!')
+  }
+}

+ 53 - 0
distributor-node/src/commands/leader/update-dynamic-bag-policy.ts

@@ -0,0 +1,53 @@
+import { flags } from '@oclif/command'
+import { DynamicBagTypeKey } from '@joystream/types/storage'
+import AccountsCommandBase from '../../command-base/accounts'
+import DefaultCommandBase from '../../command-base/default'
+
+export default class LeaderUpdateDynamicBagPolicy extends AccountsCommandBase {
+  static description = `Update dynamic bag creation policy (number of buckets by family that should store given dynamic bag type).
+    Requires distribution working group leader permissions.`
+
+  static flags = {
+    type: flags.enum<DynamicBagTypeKey>({
+      char: 't',
+      description: 'Dynamic bag type',
+      options: ['Member', 'Channel'],
+      required: true,
+    }),
+    policy: flags.build({
+      parse: (v) => {
+        const pair = v.split(':')
+        if (pair.length !== 2 || !/^\d+$/.test(pair[0]) || !/^\d+$/.test(pair[1])) {
+          throw new Error(`Expected {familyId}:{numberOfBuckets} pair, got: ${v}`)
+        }
+        return [parseInt(pair[0]), parseInt(pair[1])] as [number, number]
+      },
+    })({
+      char: 'p',
+      description: 'Key-value pair of {familyId}:{numberOfBuckets}',
+      multiple: true,
+    }),
+    ...DefaultCommandBase.flags,
+  }
+
+  static examples = [`$ joystream-distributor leader:update-dynamic-bag-policy -t Member -p 1:5 -p 2:10 -p 3:5`]
+
+  async run(): Promise<void> {
+    const { type, policy } = this.parse(LeaderUpdateDynamicBagPolicy).flags
+    const leadKey = await this.getDistributorLeadKey()
+
+    this.log(`Updating dynamic bag policy (${type})...`)
+    await this.sendAndFollowTx(
+      await this.getDecodedPair(leadKey),
+      this.api.tx.storage.updateFamiliesInDynamicBagCreationPolicy(
+        type,
+        // FIXME: https://github.com/polkadot-js/api/pull/3789
+        this.api.createType(
+          'DynamicBagCreationPolicyDistributorFamiliesMap',
+          new Map((policy || []).sort(([keyA], [keyB]) => keyA - keyB))
+        )
+      )
+    )
+    this.log('Dynamic bag creation policy succesfully updated!')
+  }
+}

+ 38 - 0
distributor-node/src/commands/operator/accept-invitation.ts

@@ -0,0 +1,38 @@
+import AccountsCommandBase from '../../command-base/accounts'
+import DefaultCommandBase, { flags } from '../../command-base/default'
+
+export default class OperatorAcceptInvitation extends AccountsCommandBase {
+  static description = `Accept pending distribution bucket operator invitation.
+  Requires the invited distribution group worker role key.`
+
+  static flags = {
+    bucketId: flags.integer({
+      char: 'B',
+      description: 'Distribution bucket id',
+      required: true,
+    }),
+    familyId: flags.integer({
+      char: 'f',
+      description: 'Distribution bucket family id',
+      required: true,
+    }),
+    workerId: flags.integer({
+      char: 'w',
+      description: 'ID of the invited operator (distribution group worker)',
+      required: true,
+    }),
+    ...DefaultCommandBase.flags,
+  }
+
+  async run(): Promise<void> {
+    const { bucketId, familyId, workerId } = this.parse(OperatorAcceptInvitation).flags
+    const workerKey = await this.getDistributorWorkerRoleKey(workerId)
+
+    this.log(`Accepting distribution bucket operator invitation (bucket: ${bucketId}, worker: ${workerId})...`)
+    await this.sendAndFollowTx(
+      await this.getDecodedPair(workerKey),
+      this.api.tx.storage.acceptDistributionBucketInvitation(workerId, familyId, bucketId)
+    )
+    this.log('Invitation succesfully accepted!')
+  }
+}

+ 61 - 0
distributor-node/src/commands/operator/set-metadata.ts

@@ -0,0 +1,61 @@
+import fs from 'fs'
+import AccountsCommandBase from '../../command-base/accounts'
+import DefaultCommandBase, { flags } from '../../command-base/default'
+import { ValidationService } from '../../services/validation/ValidationService'
+import { DistributionBucketOperatorMetadata, IDistributionBucketOperatorMetadata } from '@joystream/metadata-protobuf'
+
+export default class OperatorSetMetadata extends AccountsCommandBase {
+  static description = `Set/update distribution bucket operator metadata.
+  Requires active distribution bucket operator worker role key.`
+
+  static flags = {
+    bucketId: flags.integer({
+      char: 'B',
+      description: 'Distribution bucket id',
+      required: true,
+    }),
+    familyId: flags.integer({
+      char: 'f',
+      description: 'Distribution bucket family id',
+      required: true,
+    }),
+    workerId: flags.integer({
+      char: 'w',
+      description: 'ID of the operator (distribution group worker)',
+      required: true,
+    }),
+    endpoint: flags.string({
+      char: 'e',
+      description: 'Root distribution node endpoint',
+      exclusive: ['input'],
+    }),
+    input: flags.string({
+      char: 'i',
+      description: 'Path to JSON metadata file',
+      exclusive: ['endpoint'],
+    }),
+    ...DefaultCommandBase.flags,
+  }
+
+  async run(): Promise<void> {
+    const { bucketId, familyId, workerId, input, endpoint } = this.parse(OperatorSetMetadata).flags
+    const workerKey = await this.getDistributorWorkerRoleKey(workerId)
+
+    const validation = new ValidationService()
+    const metadata: IDistributionBucketOperatorMetadata = input
+      ? validation.validate('OperatorMetadata', JSON.parse(fs.readFileSync(input).toString()))
+      : { endpoint }
+
+    this.log(`Setting bucket operator metadata (bucket: ${bucketId}, worker: ${workerId})...`, metadata)
+    await this.sendAndFollowTx(
+      await this.getDecodedPair(workerKey),
+      this.api.tx.storage.setDistributionOperatorMetadata(
+        workerId,
+        familyId,
+        bucketId,
+        '0x' + Buffer.from(DistributionBucketOperatorMetadata.encode(metadata).finish()).toString('hex')
+      )
+    )
+    this.log('Bucket operator metadata succesfully set/updated!')
+  }
+}

+ 19 - 0
distributor-node/src/commands/start.ts

@@ -0,0 +1,19 @@
+import DefaultCommandBase from '../command-base/default'
+import { App } from '../app'
+
+export default class StartNode extends DefaultCommandBase {
+  static description = 'Start the node'
+
+  static flags = {
+    ...DefaultCommandBase.flags,
+  }
+
+  async run(): Promise<void> {
+    const app = new App(this.appConfig)
+    app.start()
+  }
+
+  async finally(): Promise<void> {
+    /* Do nothing */
+  }
+}

+ 1 - 0
distributor-node/src/index.ts

@@ -0,0 +1 @@
+export { run } from '@oclif/command'

+ 305 - 0
distributor-node/src/services/cache/StateCacheService.ts

@@ -0,0 +1,305 @@
+import { Logger } from 'winston'
+import { ReadonlyConfig, StorageNodeDownloadResponse } from '../../types'
+import { LoggingService } from '../logging'
+import _ from 'lodash'
+import fs from 'fs'
+
+// LRU-SP cache parameters
+// Since size is in KB, these parameters should be enough for grouping objects of size up to 2^24 KB = 16 GB
+// TODO: Intoduce MAX_CACHED_ITEM_SIZE and skip caching for large objects entirely? (ie. 10 GB objects)
+export const CACHE_GROUP_LOG_BASE = 2
+export const CACHE_GROUPS_COUNT = 24
+
+type PendingDownloadStatus = 'Waiting' | 'LookingForSource' | 'Downloading'
+
+export interface PendingDownloadData {
+  objectSize: number
+  status: PendingDownloadStatus
+  promise: Promise<StorageNodeDownloadResponse>
+}
+
+export interface StorageNodeEndpointData {
+  last10ResponseTimes: number[]
+}
+
+export interface CacheItemData {
+  sizeKB: number
+  popularity: number
+  lastAccessTime: number
+}
+
+export class StateCacheService {
+  private logger: Logger
+  private config: ReadonlyConfig
+  private cacheFilePath: string
+  private saveInterval: NodeJS.Timeout
+
+  private memoryState = {
+    pendingDownloadsByContentHash: new Map<string, PendingDownloadData>(),
+    contentHashByObjectId: new Map<string, string>(),
+    storageNodeEndpointDataByEndpoint: new Map<string, StorageNodeEndpointData>(),
+    groupNumberByContentHash: new Map<string, number>(),
+  }
+
+  private storedState = {
+    lruCacheGroups: Array.from({ length: CACHE_GROUPS_COUNT }).map(() => new Map<string, CacheItemData>()),
+    mimeTypeByContentHash: new Map<string, string>(),
+  }
+
+  public constructor(config: ReadonlyConfig, logging: LoggingService, saveIntervalMs = 60 * 1000) {
+    this.logger = logging.createLogger('StateCacheService')
+    this.cacheFilePath = `${config.directories.cache}/cache.json`
+    this.config = config
+    this.saveInterval = setInterval(() => this.save(), saveIntervalMs)
+  }
+
+  public setContentMimeType(contentHash: string, mimeType: string): void {
+    this.storedState.mimeTypeByContentHash.set(contentHash, mimeType)
+  }
+
+  public getContentMimeType(contentHash: string): string | undefined {
+    return this.storedState.mimeTypeByContentHash.get(contentHash)
+  }
+
+  public setObjectContentHash(objectId: string, hash: string): void {
+    this.memoryState.contentHashByObjectId.set(objectId, hash)
+  }
+
+  public getObjectContentHash(objectId: string): string | undefined {
+    return this.memoryState.contentHashByObjectId.get(objectId)
+  }
+
+  private calcCacheGroup({ sizeKB, popularity }: CacheItemData) {
+    return Math.min(
+      Math.max(Math.ceil(Math.log(sizeKB / popularity) / Math.log(CACHE_GROUP_LOG_BASE)), 0),
+      CACHE_GROUPS_COUNT - 1
+    )
+  }
+
+  public getCachedContentHashes(): string[] {
+    let hashes: string[] = []
+    for (const [, group] of this.storedState.lruCacheGroups.entries()) {
+      hashes = hashes.concat(Array.from(group.keys()))
+    }
+    return hashes
+  }
+
+  public getCachedContentLength(): number {
+    return this.storedState.lruCacheGroups.reduce((a, b) => a + b.size, 0)
+  }
+
+  public newContent(contentHash: string, sizeInBytes: number): void {
+    const { groupNumberByContentHash } = this.memoryState
+    const { lruCacheGroups } = this.storedState
+    if (groupNumberByContentHash.get(contentHash)) {
+      this.logger.warn('newContent was called for content that already exists, ignoring the call', { contentHash })
+      return
+    }
+    const cacheItemData: CacheItemData = {
+      popularity: 1,
+      lastAccessTime: Date.now(),
+      sizeKB: Math.ceil(sizeInBytes / 1024),
+    }
+    const groupNumber = this.calcCacheGroup(cacheItemData)
+    groupNumberByContentHash.set(contentHash, groupNumber)
+    lruCacheGroups[groupNumber].set(contentHash, cacheItemData)
+  }
+
+  public peekContent(contentHash: string): CacheItemData | undefined {
+    const groupNumber = this.memoryState.groupNumberByContentHash.get(contentHash)
+    if (groupNumber !== undefined) {
+      return this.storedState.lruCacheGroups[groupNumber].get(contentHash)
+    }
+  }
+
+  public useContent(contentHash: string): void {
+    const { groupNumberByContentHash } = this.memoryState
+    const { lruCacheGroups } = this.storedState
+    const groupNumber = groupNumberByContentHash.get(contentHash)
+    if (groupNumber === undefined) {
+      this.logger.warn('groupNumberByContentHash missing when trying to update LRU of content', { contentHash })
+      return
+    }
+    const group = lruCacheGroups[groupNumber]
+    const cacheItemData = group.get(contentHash)
+    if (!cacheItemData) {
+      this.logger.warn('Cache inconsistency: item missing in group retrieved from by groupNumberByContentHash map!', {
+        contentHash,
+        groupNumber,
+      })
+      groupNumberByContentHash.delete(contentHash)
+      return
+    }
+    cacheItemData.lastAccessTime = Date.now()
+    ++cacheItemData.popularity
+    // Move object to the top of the current group / new group
+    const targetGroupNumber = this.calcCacheGroup(cacheItemData)
+    const targetGroup = lruCacheGroups[targetGroupNumber]
+    group.delete(contentHash)
+    targetGroup.set(contentHash, cacheItemData)
+    if (targetGroupNumber !== groupNumber) {
+      groupNumberByContentHash.set(contentHash, targetGroupNumber)
+    }
+  }
+
+  public getCacheEvictCandidateHash(): string | null {
+    let highestCost = 0
+    let bestCandidate: string | null = null
+    for (const group of this.storedState.lruCacheGroups) {
+      const lastItemInGroup = Array.from(group.entries())[0]
+      if (lastItemInGroup) {
+        const [contentHash, objectData] = lastItemInGroup
+        const elapsedSinceLastAccessed = Math.ceil((Date.now() - objectData.lastAccessTime) / 60_000)
+        const itemCost = (elapsedSinceLastAccessed * objectData.sizeKB) / objectData.popularity
+        if (itemCost >= highestCost) {
+          highestCost = itemCost
+          bestCandidate = contentHash
+        }
+      }
+    }
+    return bestCandidate
+  }
+
+  public newPendingDownload(
+    contentHash: string,
+    objectSize: number,
+    promise: Promise<StorageNodeDownloadResponse>
+  ): PendingDownloadData {
+    const pendingDownload: PendingDownloadData = {
+      status: 'Waiting',
+      objectSize,
+      promise,
+    }
+    this.memoryState.pendingDownloadsByContentHash.set(contentHash, pendingDownload)
+    return pendingDownload
+  }
+
+  public getPendingDownloadsCount(): number {
+    return this.memoryState.pendingDownloadsByContentHash.size
+  }
+
+  public getPendingDownload(contentHash: string): PendingDownloadData | undefined {
+    return this.memoryState.pendingDownloadsByContentHash.get(contentHash)
+  }
+
+  public dropPendingDownload(contentHash: string): void {
+    this.memoryState.pendingDownloadsByContentHash.delete(contentHash)
+  }
+
+  public dropByHash(contentHash: string): void {
+    this.logger.debug('Dropping all state by content hash', contentHash)
+    this.storedState.mimeTypeByContentHash.delete(contentHash)
+    this.memoryState.pendingDownloadsByContentHash.delete(contentHash)
+    const cacheGroupNumber = this.memoryState.groupNumberByContentHash.get(contentHash)
+    this.logger.debug('Cache group by hash established', { contentHash, cacheGroupNumber })
+    if (cacheGroupNumber) {
+      this.memoryState.groupNumberByContentHash.delete(contentHash)
+      this.storedState.lruCacheGroups[cacheGroupNumber].delete(contentHash)
+    }
+  }
+
+  public setStorageNodeEndpointResponseTime(endpoint: string, time: number): void {
+    const data = this.memoryState.storageNodeEndpointDataByEndpoint.get(endpoint) || { last10ResponseTimes: [] }
+    if (data.last10ResponseTimes.length === 10) {
+      data.last10ResponseTimes.shift()
+    }
+    data.last10ResponseTimes.push(time)
+    if (!this.memoryState.storageNodeEndpointDataByEndpoint.has(endpoint)) {
+      this.memoryState.storageNodeEndpointDataByEndpoint.set(endpoint, data)
+    }
+  }
+
+  public getStorageNodeEndpointMeanResponseTime(endpoint: string, max = 99999): number {
+    const data = this.memoryState.storageNodeEndpointDataByEndpoint.get(endpoint)
+    return _.mean(data?.last10ResponseTimes || [max])
+  }
+
+  public getStorageNodeEndpointsMeanResponseTimes(max = 99999): [string, number][] {
+    return Array.from(this.memoryState.storageNodeEndpointDataByEndpoint.keys()).map((endpoint) => [
+      endpoint,
+      this.getStorageNodeEndpointMeanResponseTime(endpoint, max),
+    ])
+  }
+
+  private serializeData() {
+    const { lruCacheGroups, mimeTypeByContentHash } = this.storedState
+    return JSON.stringify(
+      {
+        lruCacheGroups: lruCacheGroups.map((g) => Array.from(g.entries())),
+        mimeTypeByContentHash: Array.from(mimeTypeByContentHash.entries()),
+      },
+      null,
+      2 // TODO: Only for debugging
+    )
+  }
+
+  public async save(): Promise<boolean> {
+    return new Promise((resolve) => {
+      const serialized = this.serializeData()
+      const fd = fs.openSync(this.cacheFilePath, 'w')
+      fs.write(fd, serialized, (err) => {
+        fs.closeSync(fd)
+        if (err) {
+          this.logger.error('Cache file save error', { err })
+          resolve(false)
+        } else {
+          this.logger.verbose('Cache file updated')
+          resolve(true)
+        }
+      })
+    })
+  }
+
+  public saveSync(): void {
+    const serialized = this.serializeData()
+    fs.writeFileSync(this.cacheFilePath, serialized)
+  }
+
+  private loadGroupNumberByContentHashMap() {
+    const contentHashes = _.uniq(this.getCachedContentHashes())
+    const { lruCacheGroups: groups } = this.storedState
+    const { groupNumberByContentHash } = this.memoryState
+
+    contentHashes.forEach((contentHash) => {
+      groups.forEach((group, groupNumber) => {
+        if (group.has(contentHash)) {
+          if (!groupNumberByContentHash.has(contentHash)) {
+            groupNumberByContentHash.set(contentHash, groupNumber)
+          } else {
+            // Content duplicated in multiple groups - remove!
+            this.logger.warn(
+              `Content hash ${contentHash} was found in in multiple lru cache groups. Removing from group ${groupNumber}...`,
+              { firstGroup: groupNumberByContentHash.get(contentHash), currentGroup: groupNumber }
+            )
+            group.delete(contentHash)
+          }
+        }
+      })
+    })
+  }
+
+  public load(): void {
+    if (fs.existsSync(this.cacheFilePath)) {
+      this.logger.info('Loading cache from file', { file: this.cacheFilePath })
+      try {
+        const fileContent = JSON.parse(fs.readFileSync(this.cacheFilePath).toString())
+        ;((fileContent.lruCacheGroups || []) as Array<Array<[string, CacheItemData]>>).forEach((group, groupIndex) => {
+          this.storedState.lruCacheGroups[groupIndex] = new Map<string, CacheItemData>(group)
+        })
+        this.storedState.mimeTypeByContentHash = new Map<string, string>(fileContent.mimeTypeByContentHash || [])
+        this.loadGroupNumberByContentHashMap()
+      } catch (err) {
+        this.logger.error('Error while trying to load data from cache file! Will start from scratch', {
+          file: this.cacheFilePath,
+          err,
+        })
+      }
+    } else {
+      this.logger.warn(`Cache file (${this.cacheFilePath}) is empty. Starting from scratch`)
+    }
+  }
+
+  public clearInterval(): void {
+    clearInterval(this.saveInterval)
+  }
+}

+ 228 - 0
distributor-node/src/services/content/ContentService.ts

@@ -0,0 +1,228 @@
+import fs from 'fs'
+import { ReadonlyConfig, DataObjectData } from '../../types'
+import { StateCacheService } from '../cache/StateCacheService'
+import { LoggingService } from '../logging'
+import { Logger } from 'winston'
+import { FileContinousReadStream, FileContinousReadStreamOptions } from './FileContinousReadStream'
+import FileType from 'file-type'
+import _ from 'lodash'
+import { Readable, pipeline } from 'stream'
+
+export const DEFAULT_CONTENT_TYPE = 'application/octet-stream'
+
+export class ContentService {
+  private config: ReadonlyConfig
+  private dataDir: string
+  private logger: Logger
+  private stateCache: StateCacheService
+
+  private contentSizeSum = 0
+
+  public get usedSpace(): number {
+    return this.contentSizeSum
+  }
+
+  public get freeSpace(): number {
+    return this.config.limits.storage - this.contentSizeSum
+  }
+
+  public constructor(config: ReadonlyConfig, logging: LoggingService, stateCache: StateCacheService) {
+    this.config = config
+    this.logger = logging.createLogger('ContentService')
+    this.stateCache = stateCache
+    this.dataDir = config.directories.data
+  }
+
+  public async startupInit(supportedObjects: DataObjectData[]): Promise<void> {
+    const dataObjectsByHash = _.groupBy(supportedObjects, (o) => o.contentHash)
+    const dataDirFiles = fs.readdirSync(this.dataDir)
+    const filesCountOnStartup = dataDirFiles.length
+    const cachedContentHashes = this.stateCache.getCachedContentHashes()
+    const cacheItemsOnStartup = cachedContentHashes.length
+
+    this.logger.info('ContentService initializing...', {
+      supportedObjects: supportedObjects.length,
+      filesCountOnStartup,
+      cacheItemsOnStartup,
+    })
+    let filesDropped = 0
+    for (const contentHash of dataDirFiles) {
+      this.logger.debug('Checking content file', { contentHash })
+      // Add fileSize to contentSizeSum for each file. If the file ends up dropped - contentSizeSum will be reduced by this.drop().
+      const fileSize = this.fileSize(contentHash)
+      this.contentSizeSum += fileSize
+
+      // Drop files that are not part of current chain assignment
+      const objectsByHash = dataObjectsByHash[contentHash] || []
+      if (!objectsByHash.length) {
+        this.drop(contentHash, 'Not supported')
+        continue
+      }
+
+      // Compare file size to expected one
+      const { size: dataObjectSize } = objectsByHash[0]
+      if (fileSize !== dataObjectSize) {
+        // Existing file size does not match the expected one
+        const msg = `Unexpected file size. Expected: ${dataObjectSize}, actual: ${fileSize}`
+        this.logger.warn(msg, { fileSize, dataObjectSize })
+        this.drop(contentHash, msg)
+        ++filesDropped
+      } else {
+        // Existing file size is OK - detect mimeType if missing
+        if (!this.stateCache.getContentMimeType(contentHash)) {
+          this.stateCache.setContentMimeType(contentHash, await this.guessMimeType(contentHash))
+        }
+      }
+
+      // Recreate contentHashByObjectId map for all supported data objects
+      objectsByHash.forEach(({ contentHash, objectId }) => {
+        this.stateCache.setObjectContentHash(objectId, contentHash)
+      })
+    }
+
+    let cacheItemsDropped = 0
+    for (const contentHash of cachedContentHashes) {
+      if (!this.exists(contentHash)) {
+        // Content is part of cache data, but does not exist in filesystem - drop from cache
+        this.stateCache.dropByHash(contentHash)
+        ++cacheItemsDropped
+      }
+    }
+
+    this.logger.info('ContentService initialized', {
+      filesDropped,
+      cacheItemsDropped,
+      contentSizeSum: this.contentSizeSum,
+    })
+  }
+
+  public drop(contentHash: string, reason?: string): void {
+    if (this.exists(contentHash)) {
+      const size = this.fileSize(contentHash)
+      fs.unlinkSync(this.path(contentHash))
+      this.contentSizeSum -= size
+      this.logger.debug('Dropping content', { contentHash, reason, size, contentSizeSum: this.contentSizeSum })
+    } else {
+      this.logger.warn('Trying to drop content that no loger exists', { contentHash, reason })
+    }
+    this.stateCache.dropByHash(contentHash)
+  }
+
+  public fileSize(contentHash: string): number {
+    return fs.statSync(this.path(contentHash)).size
+  }
+
+  public path(contentHash: string): string {
+    return `${this.dataDir}/${contentHash}`
+  }
+
+  public exists(contentHash: string): boolean {
+    return fs.existsSync(this.path(contentHash))
+  }
+
+  public createReadStream(contentHash: string): fs.ReadStream {
+    return fs.createReadStream(this.path(contentHash))
+  }
+
+  public createWriteStream(contentHash: string): fs.WriteStream {
+    return fs.createWriteStream(this.path(contentHash), { autoClose: true, emitClose: true })
+  }
+
+  public createContinousReadStream(
+    contentHash: string,
+    options: FileContinousReadStreamOptions
+  ): FileContinousReadStream {
+    return new FileContinousReadStream(this.path(contentHash), options)
+  }
+
+  public async guessMimeType(contentHash: string): Promise<string> {
+    const guessResult = await FileType.fromFile(this.path(contentHash))
+    return guessResult?.mime || DEFAULT_CONTENT_TYPE
+  }
+
+  private async evictCacheUntilFreeSpaceReached(targetFreeSpace: number): Promise<void> {
+    this.logger.verbose('Cache eviction triggered.', { targetFreeSpace, currentFreeSpace: this.freeSpace })
+    let itemsDropped = 0
+    while (this.freeSpace < targetFreeSpace) {
+      const evictCandidateHash = this.stateCache.getCacheEvictCandidateHash()
+      if (evictCandidateHash) {
+        this.drop(evictCandidateHash, 'Cache eviction')
+        ++itemsDropped
+      } else {
+        this.logger.verbose('Nothing to drop from cache, waiting...', { freeSpace: this.freeSpace, targetFreeSpace })
+        await new Promise((resolve) => setTimeout(resolve, 1000))
+      }
+    }
+    this.logger.verbose('Cache eviction finalized.', { currentfreeSpace: this.freeSpace, itemsDropped })
+  }
+
+  public async handleNewContent(contentHash: string, expectedSize: number, dataStream: Readable): Promise<void> {
+    this.logger.verbose('Handling new content', {
+      contentHash,
+      expectedSize,
+    })
+
+    // Trigger cache eviction if required
+    if (this.freeSpace < expectedSize) {
+      await this.evictCacheUntilFreeSpaceReached(expectedSize)
+    }
+
+    // Reserve space for the new object
+    this.contentSizeSum += expectedSize
+    this.logger.verbose('Reserved space for new data object', {
+      contentHash,
+      expectedSize,
+      newContentSizeSum: this.contentSizeSum,
+    })
+
+    // Return a promise that resolves when the new file is created
+    return new Promise<void>((resolve, reject) => {
+      const fileStream = this.createWriteStream(contentHash)
+
+      let bytesRecieved = 0
+
+      pipeline(dataStream, fileStream, async (err) => {
+        const { bytesWritten } = fileStream
+        const logMetadata = {
+          contentHash,
+          expectedSize,
+          bytesRecieved,
+          bytesWritten,
+        }
+        if (err) {
+          this.logger.error(`Error while processing content data stream`, {
+            err,
+            ...logMetadata,
+          })
+          this.drop(contentHash)
+          reject(err)
+        } else {
+          if (bytesWritten === bytesRecieved && bytesWritten === expectedSize) {
+            const mimeType = await this.guessMimeType(contentHash)
+            this.logger.info('New content accepted', { ...logMetadata })
+            this.stateCache.dropPendingDownload(contentHash)
+            this.stateCache.newContent(contentHash, expectedSize)
+            this.stateCache.setContentMimeType(contentHash, mimeType)
+          } else {
+            this.logger.error('Content rejected: Bytes written/recieved/expected mismatch!', {
+              ...logMetadata,
+            })
+            this.drop(contentHash)
+          }
+        }
+      })
+
+      fileStream.on('open', () => {
+        // Note: The promise is resolved on "ready" event, since that's what's awaited in the current flow
+        resolve()
+      })
+
+      dataStream.on('data', (chunk) => {
+        bytesRecieved += chunk.length
+        if (bytesRecieved > expectedSize) {
+          dataStream.destroy(new Error('Unexpected content size: Too much data recieved from source!'))
+        }
+      })
+    })
+  }
+}

+ 87 - 0
distributor-node/src/services/content/FileContinousReadStream.ts

@@ -0,0 +1,87 @@
+import { Readable } from 'stream'
+import fs from 'fs'
+
+export interface FileContinousReadStreamOptions {
+  end: number
+  start?: number
+  chunkSize?: number
+  missingDataRetryTime?: number
+  maxRetries?: number
+}
+
+export class FileContinousReadStream extends Readable {
+  private fd: number
+  private position: number
+  private lastByte: number
+  private missingDataRetryTime: number
+  private maxRetries: number
+  private finished: boolean
+  private interval: NodeJS.Timeout | undefined
+
+  public constructor(path: string, options: FileContinousReadStreamOptions) {
+    super({
+      highWaterMark: options.chunkSize || 1 * 1024 * 1024, // default: 1 MB
+    })
+    this.fd = fs.openSync(path, 'r')
+    this.position = options.start || 0
+    this.lastByte = options.end
+    this.missingDataRetryTime = options.missingDataRetryTime || 50 // 50 ms
+    this.maxRetries = options.maxRetries || 2400 // 2400 retries x 50 ms = 120s timeout
+    this.finished = false
+  }
+
+  private finish() {
+    fs.closeSync(this.fd)
+    this.finished = true
+  }
+
+  private readChunkSync(bytesN: number): Buffer | null {
+    const chunk = Buffer.alloc(bytesN)
+    const readBytes = fs.readSync(this.fd, chunk, 0, bytesN, this.position)
+    const newPosition = this.position + readBytes
+    if (readBytes < bytesN && newPosition <= this.lastByte) {
+      return null
+    }
+    if (newPosition > this.lastByte) {
+      this.finish()
+      return chunk.slice(0, readBytes)
+    }
+    this.position = newPosition
+    return chunk
+  }
+
+  // Reason: https://nodejs.org/docs/latest/api/stream.html#stream_implementing_a_readable_stream
+  // eslint-disable-next-line @typescript-eslint/naming-convention
+  _read(bytesN: number): void {
+    if (this.finished) {
+      this.push(null)
+      return
+    }
+    const chunk = this.readChunkSync(bytesN)
+    if (chunk === null) {
+      let retries = 0
+      const interval = setInterval(() => {
+        const chunk = this.readChunkSync(bytesN)
+        if (chunk !== null) {
+          clearInterval(interval)
+          return this.push(chunk)
+        }
+        if (++retries >= this.maxRetries) {
+          clearInterval(interval)
+          this.destroy(new Error('Max missing data retries limit reached'))
+        }
+      }, this.missingDataRetryTime)
+      this.interval = interval
+    } else {
+      this.push(chunk)
+    }
+  }
+
+  // Reason: https://nodejs.org/docs/latest/api/stream.html#stream_implementing_a_readable_stream
+  // eslint-disable-next-line @typescript-eslint/naming-convention
+  _destroy(): void {
+    if (this.interval) {
+      clearInterval(this.interval)
+    }
+  }
+}

+ 134 - 0
distributor-node/src/services/logging/LoggingService.ts

@@ -0,0 +1,134 @@
+import winston, { Logger, LoggerOptions } from 'winston'
+import escFormat from '@elastic/ecs-winston-format'
+import { ElasticsearchTransport } from 'winston-elasticsearch'
+import { ReadonlyConfig } from '../../types'
+import { blake2AsHex } from '@polkadot/util-crypto'
+import { Format } from 'logform'
+import stringify from 'fast-safe-stringify'
+import NodeCache from 'node-cache'
+
+const cliColors = {
+  error: 'red',
+  warn: 'yellow',
+  info: 'green',
+  http: 'magenta',
+  debug: 'grey',
+}
+
+winston.addColors(cliColors)
+
+const pausedLogs = new NodeCache({
+  deleteOnExpire: true,
+})
+
+// Pause log for a specified time period
+const pauseFormat: (opts: { id: string }) => Format = winston.format((info, opts: { id: string }) => {
+  if (info['@pauseFor']) {
+    const messageHash = blake2AsHex(`${opts.id}:${info.level}:${info.message}`)
+    if (!pausedLogs.has(messageHash)) {
+      pausedLogs.set(messageHash, null, info['@pauseFor'])
+      info.message += ` (this log message will be skipped for the next ${info['@pauseFor']}s)`
+      delete info['@pauseFor']
+      return info
+    }
+    return false
+  }
+
+  return info
+})
+
+const cliFormat = winston.format.combine(
+  winston.format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss:ms' }),
+  winston.format.metadata({ fillExcept: ['label', 'level', 'timestamp', 'message'] }),
+  winston.format.colorize({ all: true }),
+  winston.format.printf(
+    (info) =>
+      `${info.timestamp} ${info.label} ${info.level}: ${info.message}` +
+      (Object.keys(info.metadata).length ? `\n${stringify(info.metadata, undefined, 4)}` : '')
+  )
+)
+
+export class LoggingService {
+  private rootLogger: Logger
+  private esTransport: ElasticsearchTransport | undefined
+
+  private constructor(options: LoggerOptions, esTransport?: ElasticsearchTransport) {
+    this.esTransport = esTransport
+    this.rootLogger = winston.createLogger(options)
+  }
+
+  public static withAppConfig(config: ReadonlyConfig): LoggingService {
+    const transports: winston.LoggerOptions['transports'] = []
+
+    let esTransport: ElasticsearchTransport | undefined
+    if (config.log?.elastic && config.log.elastic !== 'off') {
+      if (!config.endpoints.elasticSearch) {
+        throw new Error('config.endpoints.elasticSearch must be provided when elasticSeach logging is enabled!')
+      }
+      esTransport = new ElasticsearchTransport({
+        level: config.log.elastic,
+        format: winston.format.combine(pauseFormat({ id: 'es' }), escFormat()),
+        flushInterval: 5000,
+        source: config.id,
+        clientOpts: {
+          node: {
+            url: new URL(config.endpoints.elasticSearch),
+          },
+        },
+      })
+      transports.push(esTransport)
+    }
+
+    const fileTransport =
+      config.log?.file && config.log.file !== 'off'
+        ? new winston.transports.File({
+            filename: `${config.directories.logs}/logs.json`,
+            level: config.log.file,
+            format: winston.format.combine(pauseFormat({ id: 'file' }), escFormat()),
+          })
+        : undefined
+    if (fileTransport) {
+      transports.push(fileTransport)
+    }
+
+    const consoleTransport =
+      config.log?.console && config.log.console !== 'off'
+        ? new winston.transports.Console({
+            level: config.log.console,
+            format: winston.format.combine(pauseFormat({ id: 'cli' }), cliFormat),
+          })
+        : undefined
+    if (consoleTransport) {
+      transports.push(consoleTransport)
+    }
+
+    return new LoggingService(
+      {
+        transports,
+      },
+      esTransport
+    )
+  }
+
+  public static withCLIConfig(): LoggingService {
+    return new LoggingService({
+      transports: new winston.transports.Console({
+        // Log everything to stderr, only the command output value will be written to stdout
+        stderrLevels: Object.keys(winston.config.npm.levels),
+        format: winston.format.combine(pauseFormat({ id: 'cli' }), cliFormat),
+      }),
+    })
+  }
+
+  public createLogger(label: string, ...meta: unknown[]): Logger {
+    return this.rootLogger.child({ label, ...meta })
+  }
+
+  public async end(): Promise<void> {
+    if (this.esTransport) {
+      await this.esTransport.flush()
+    }
+    this.rootLogger.end()
+    await Promise.all(this.rootLogger.transports.map((t) => new Promise((resolve) => t.on('finish', resolve))))
+  }
+}

+ 1 - 0
distributor-node/src/services/logging/index.ts

@@ -0,0 +1 @@
+export { LoggingService } from './LoggingService'

+ 352 - 0
distributor-node/src/services/networking/NetworkingService.ts

@@ -0,0 +1,352 @@
+import { ReadonlyConfig } from '../../types/config'
+import { QueryNodeApi } from './query-node/api'
+import { Logger } from 'winston'
+import { LoggingService } from '../logging'
+import { StorageNodeApi } from './storage-node/api'
+import { PendingDownloadData, StateCacheService } from '../cache/StateCacheService'
+import { DataObjectDetailsFragment } from './query-node/generated/queries'
+import axios from 'axios'
+import {
+  StorageNodeEndpointData,
+  DataObjectAccessPoints,
+  DataObjectData,
+  DataObjectInfo,
+  StorageNodeDownloadResponse,
+  DownloadData,
+} from '../../types'
+import queue from 'queue'
+import { DistributionBucketOperatorStatus } from './query-node/generated/schema'
+import http from 'http'
+import https from 'https'
+import { parseAxiosError } from '../parsers/errors'
+
+const MAX_CONCURRENT_AVAILABILITY_CHECKS_PER_DOWNLOAD = 10
+const MAX_CONCURRENT_RESPONSE_TIME_CHECKS = 10
+const STORAGE_NODE_ENDPOINTS_CHECK_INTERVAL_MS = 60000
+
+export class NetworkingService {
+  private config: ReadonlyConfig
+  private queryNodeApi: QueryNodeApi
+  // private runtimeApi: RuntimeApi
+  private logging: LoggingService
+  private stateCache: StateCacheService
+  private logger: Logger
+
+  private storageNodeEndpointsCheckInterval: NodeJS.Timeout
+  private testLatencyQueue: queue
+  private downloadQueue: queue
+
+  constructor(config: ReadonlyConfig, stateCache: StateCacheService, logging: LoggingService) {
+    axios.defaults.timeout = config.limits.outboundRequestsTimeout
+    const httpConfig: http.AgentOptions | https.AgentOptions = {
+      keepAlive: true,
+      timeout: config.limits.outboundRequestsTimeout,
+      maxSockets: config.limits.maxConcurrentOutboundConnections,
+    }
+    axios.defaults.httpAgent = new http.Agent(httpConfig)
+    axios.defaults.httpsAgent = new https.Agent(httpConfig)
+    this.config = config
+    this.logging = logging
+    this.stateCache = stateCache
+    this.logger = logging.createLogger('NetworkingManager')
+    this.queryNodeApi = new QueryNodeApi(config.endpoints.queryNode)
+    // this.runtimeApi = new RuntimeApi(config.endpoints.substrateNode)
+    this.checkActiveStorageNodeEndpoints()
+    this.storageNodeEndpointsCheckInterval = setInterval(
+      this.checkActiveStorageNodeEndpoints.bind(this),
+      STORAGE_NODE_ENDPOINTS_CHECK_INTERVAL_MS
+    )
+    // Queues
+    this.testLatencyQueue = queue({ concurrency: MAX_CONCURRENT_RESPONSE_TIME_CHECKS, autostart: true }).on(
+      'end',
+      () => {
+        this.logger.verbose('Mean response times updated', {
+          responseTimes: this.stateCache.getStorageNodeEndpointsMeanResponseTimes(),
+        })
+      }
+    )
+    this.downloadQueue = queue({ concurrency: config.limits.maxConcurrentStorageNodeDownloads, autostart: true })
+  }
+
+  public clearIntervals(): void {
+    clearInterval(this.storageNodeEndpointsCheckInterval)
+  }
+
+  private validateNodeEndpoint(endpoint: string): void {
+    const endpointUrl = new URL(endpoint)
+    if (endpointUrl.protocol !== 'http:' && endpointUrl.protocol !== 'https:') {
+      throw new Error(`Invalid endpoint protocol: ${endpointUrl.protocol}`)
+    }
+  }
+
+  private filterStorageNodeEndpoints(input: StorageNodeEndpointData[]): StorageNodeEndpointData[] {
+    return input.filter((b) => {
+      try {
+        this.validateNodeEndpoint(b.endpoint)
+        return true
+      } catch (err) {
+        this.logger.warn(`Invalid storage node endpoint: ${b.endpoint} for bucket ${b.bucketId}`, {
+          bucketId: b.bucketId,
+          endpoint: b.endpoint,
+          err,
+          '@pauseFor': 900,
+        })
+        return false
+      }
+    })
+  }
+
+  private prepareStorageNodeEndpoints(details: DataObjectDetailsFragment) {
+    const endpointsData = details.storageBag.storageAssignments
+      .filter(
+        (a) =>
+          a.storageBucket.operatorStatus.__typename === 'StorageBucketOperatorStatusActive' &&
+          a.storageBucket.operatorMetadata?.nodeEndpoint
+      )
+      .map((a) => ({
+        bucketId: a.storageBucket.id,
+        endpoint: a.storageBucket.operatorMetadata!.nodeEndpoint!,
+      }))
+
+    return this.filterStorageNodeEndpoints(endpointsData)
+  }
+
+  private parseDataObjectAccessPoints(details: DataObjectDetailsFragment): DataObjectAccessPoints {
+    return {
+      storageNodes: this.prepareStorageNodeEndpoints(details),
+    }
+  }
+
+  public async dataObjectInfo(objectId: string): Promise<DataObjectInfo> {
+    const details = await this.queryNodeApi.getDataObjectDetails(objectId)
+    if (details) {
+      this.stateCache.setObjectContentHash(objectId, details.ipfsHash)
+    }
+    return {
+      exists: !!details,
+      isSupported:
+        (this.config.buckets === 'all' &&
+          details?.storageBag.distirbutionAssignments.some((d) =>
+            d.distributionBucket.operators.some(
+              (o) => o.workerId === this.config.workerId && o.status === DistributionBucketOperatorStatus.Active
+            )
+          )) ||
+        (Array.isArray(this.config.buckets) &&
+          this.config.buckets.some((bucketId) =>
+            details?.storageBag.distirbutionAssignments
+              .map((a) => a.distributionBucket.id)
+              .includes(bucketId.toString())
+          )),
+      data: details
+        ? {
+            objectId,
+            accessPoints: this.parseDataObjectAccessPoints(details),
+            contentHash: details.ipfsHash,
+            size: parseInt(details.size),
+          }
+        : undefined,
+    }
+  }
+
+  private sortEndpointsByMeanResponseTime(endpoints: string[]) {
+    return endpoints.sort(
+      (a, b) =>
+        this.stateCache.getStorageNodeEndpointMeanResponseTime(a) -
+        this.stateCache.getStorageNodeEndpointMeanResponseTime(b)
+    )
+  }
+
+  private downloadJob(
+    pendingDownload: PendingDownloadData,
+    downloadData: DownloadData,
+    onSourceFound: (response: StorageNodeDownloadResponse) => void,
+    onError: (error: Error) => void,
+    onFinished?: () => void
+  ): Promise<void> {
+    const {
+      objectData: { contentHash, accessPoints },
+      startAt,
+    } = downloadData
+
+    pendingDownload.status = 'LookingForSource'
+
+    return new Promise<void>((resolve, reject) => {
+      // Handlers:
+      const fail = (message: string) => {
+        this.stateCache.dropPendingDownload(contentHash)
+        onError(new Error(message))
+        reject(new Error(message))
+      }
+
+      const sourceFound = (response: StorageNodeDownloadResponse) => {
+        this.logger.info('Download source chosen', { contentHash, source: response.config.url })
+        pendingDownload.status = 'Downloading'
+        onSourceFound(response)
+      }
+
+      const finish = () => {
+        onFinished && onFinished()
+        resolve()
+      }
+
+      const storageEndpoints = this.sortEndpointsByMeanResponseTime(
+        accessPoints?.storageNodes.map((n) => n.endpoint) || []
+      )
+
+      this.logger.info('Downloading new data object', {
+        contentHash,
+        possibleSources: storageEndpoints.map((e) => ({
+          endpoint: e,
+          meanResponseTime: this.stateCache.getStorageNodeEndpointMeanResponseTime(e),
+        })),
+      })
+      if (!storageEndpoints.length) {
+        return fail('No storage endpoints available to download the data object from')
+      }
+
+      const availabilityQueue = queue({
+        concurrency: MAX_CONCURRENT_AVAILABILITY_CHECKS_PER_DOWNLOAD,
+        autostart: true,
+      })
+      const objectDownloadQueue = queue({ concurrency: 1, autostart: true })
+
+      storageEndpoints.forEach(async (endpoint) => {
+        availabilityQueue.push(async () => {
+          const api = new StorageNodeApi(endpoint, this.logging)
+          const available = await api.isObjectAvailable(contentHash)
+          if (!available) {
+            throw new Error('Not avilable')
+          }
+          return endpoint
+        })
+      })
+
+      availabilityQueue.on('success', (endpoint) => {
+        availabilityQueue.stop()
+        const job = async () => {
+          const api = new StorageNodeApi(endpoint, this.logging)
+          const response = await api.downloadObject(contentHash, startAt)
+          return response
+        }
+        objectDownloadQueue.push(job)
+      })
+
+      availabilityQueue.on('error', () => {
+        /*
+        Do nothing.
+        The handler is needed to avoid unhandled promise rejection
+        */
+      })
+
+      availabilityQueue.on('end', () => {
+        if (!objectDownloadQueue.length) {
+          fail('Failed to download the object from any availablable storage provider')
+        }
+      })
+
+      objectDownloadQueue.on('error', (err) => {
+        this.logger.error('Download attempt from storage node failed after availability was confirmed:', { err })
+      })
+
+      objectDownloadQueue.on('end', () => {
+        if (availabilityQueue.length) {
+          availabilityQueue.start()
+        } else {
+          fail('Failed to download the object from any availablable storage provider')
+        }
+      })
+
+      objectDownloadQueue.on('success', (response: StorageNodeDownloadResponse) => {
+        availabilityQueue.removeAllListeners().end()
+        objectDownloadQueue.removeAllListeners().end()
+        response.data.on('close', finish).on('error', finish).on('end', finish)
+        sourceFound(response)
+      })
+    })
+  }
+
+  public downloadDataObject(downloadData: DownloadData): Promise<StorageNodeDownloadResponse> | null {
+    const {
+      objectData: { contentHash, size },
+    } = downloadData
+
+    if (this.stateCache.getPendingDownload(contentHash)) {
+      // Already downloading
+      return null
+    }
+
+    let resolveDownload: (response: StorageNodeDownloadResponse) => void, rejectDownload: (err: Error) => void
+    const downloadPromise = new Promise<StorageNodeDownloadResponse>((resolve, reject) => {
+      resolveDownload = resolve
+      rejectDownload = reject
+    })
+
+    // Queue the download
+    const pendingDownload = this.stateCache.newPendingDownload(contentHash, size, downloadPromise)
+    this.downloadQueue.push(() => this.downloadJob(pendingDownload, downloadData, resolveDownload, rejectDownload))
+
+    return downloadPromise
+  }
+
+  async fetchSupportedDataObjects(): Promise<DataObjectData[]> {
+    const data =
+      this.config.buckets === 'all'
+        ? await this.queryNodeApi.getDistributionBucketsWithObjectsByWorkerId(this.config.workerId)
+        : await this.queryNodeApi.getDistributionBucketsWithObjectsByIds(this.config.buckets.map((id) => id.toString()))
+    const objectsData: DataObjectData[] = []
+    data.forEach((bucket) => {
+      bucket.bagAssignments.forEach((a) => {
+        a.storageBag.objects.forEach((object) => {
+          const { ipfsHash, id, size } = object
+          objectsData.push({ contentHash: ipfsHash, objectId: id, size: parseInt(size) })
+        })
+      })
+    })
+
+    return objectsData
+  }
+
+  async checkActiveStorageNodeEndpoints(): Promise<void> {
+    const activeStorageOperators = await this.queryNodeApi.getActiveStorageBucketOperatorsData()
+    const endpoints = this.filterStorageNodeEndpoints(
+      activeStorageOperators.map(({ id, operatorMetadata }) => ({
+        bucketId: id,
+        endpoint: operatorMetadata!.nodeEndpoint!,
+      }))
+    )
+    this.logger.verbose('Checking nearby storage nodes...', { validEndpointsCount: endpoints.length })
+
+    endpoints.forEach(({ endpoint }) =>
+      this.testLatencyQueue.push(async () => {
+        await this.checkResponseTime(endpoint)
+      })
+    )
+  }
+
+  async checkResponseTime(endpoint: string): Promise<void> {
+    const start = Date.now()
+    this.logger.debug(`Sending storage node response-time check request to: ${endpoint}`, { endpoint })
+    try {
+      // TODO: Use a status endpoint once available?
+      await axios.get(endpoint, {
+        headers: {
+          connection: 'close',
+        },
+      })
+      throw new Error('Unexpected status 200')
+    } catch (err) {
+      if (axios.isAxiosError(err) && err.response?.status === 404) {
+        // This is the expected outcome currently
+        const responseTime = Date.now() - start
+        this.logger.debug(`${endpoint} check request response time: ${responseTime}`, { endpoint, responseTime })
+        this.stateCache.setStorageNodeEndpointResponseTime(endpoint, responseTime)
+      } else {
+        this.logger.warn(`${endpoint} check request unexpected response`, {
+          endpoint,
+          err: axios.isAxiosError(err) ? parseAxiosError(err) : err,
+          '@pauseFor': 900,
+        })
+      }
+    }
+  }
+}

+ 27 - 0
distributor-node/src/services/networking/distributor-node/generated/.openapi-generator-ignore

@@ -0,0 +1,27 @@
+# OpenAPI Generator Ignore
+# Generated by openapi-generator https://github.com/openapitools/openapi-generator
+
+# Use this file to prevent files from being overwritten by the generator.
+# The patterns follow closely to .gitignore or .dockerignore.
+
+# As an example, the C# client generator defines ApiClient.cs.
+# You can make changes and tell OpenAPI Generator to ignore just this file by uncommenting the following line:
+#ApiClient.cs
+
+# You can match any string of characters against a directory, file or extension with a single asterisk (*):
+#foo/*/qux
+# The above matches foo/bar/qux and foo/baz/qux, but not foo/bar/baz/qux
+
+# You can recursively match patterns against a directory, file or extension with a double asterisk (**):
+#foo/**/qux
+# This matches foo/bar/qux, foo/baz/qux, and foo/bar/baz/qux
+
+# You can also negate patterns with an exclamation (!).
+# For example, you can ignore all files in a docs folder with the file extension .md:
+#docs/*.md
+# Then explicitly reverse the ignore rule for a single file:
+#!docs/README.md
+
+git_push.sh
+.npmignore
+.gitignore

+ 5 - 0
distributor-node/src/services/networking/distributor-node/generated/.openapi-generator/FILES

@@ -0,0 +1,5 @@
+api.ts
+base.ts
+common.ts
+configuration.ts
+index.ts

+ 1 - 0
distributor-node/src/services/networking/distributor-node/generated/.openapi-generator/VERSION

@@ -0,0 +1 @@
+5.2.0

+ 380 - 0
distributor-node/src/services/networking/distributor-node/generated/api.ts

@@ -0,0 +1,380 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Distributor node API
+ * Distributor node API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+
+import { Configuration } from './configuration';
+import globalAxios, { AxiosPromise, AxiosInstance } from 'axios';
+// Some imports not used depending on template conditions
+// @ts-ignore
+import { DUMMY_BASE_URL, assertParamExists, setApiKeyToObject, setBasicAuthToObject, setBearerAuthToObject, setOAuthToObject, setSearchParams, serializeDataIfNeeded, toPathString, createRequestFunction } from './common';
+// @ts-ignore
+import { BASE_PATH, COLLECTION_FORMATS, RequestArgs, BaseAPI, RequiredError } from './base';
+
+/**
+ * @type BucketsResponse
+ * @export
+ */
+export type BucketsResponse = BucketsResponseOneOf | BucketsResponseOneOf1;
+
+/**
+ * 
+ * @export
+ * @interface BucketsResponseOneOf
+ */
+export interface BucketsResponseOneOf {
+    /**
+     * 
+     * @type {Array<number>}
+     * @memberof BucketsResponseOneOf
+     */
+    bucketIds: Array<number>;
+}
+/**
+ * 
+ * @export
+ * @interface BucketsResponseOneOf1
+ */
+export interface BucketsResponseOneOf1 {
+    /**
+     * 
+     * @type {number}
+     * @memberof BucketsResponseOneOf1
+     */
+    allByWorkerId: number;
+}
+/**
+ * 
+ * @export
+ * @interface ErrorResponse
+ */
+export interface ErrorResponse {
+    /**
+     * 
+     * @type {string}
+     * @memberof ErrorResponse
+     */
+    type?: string;
+    /**
+     * 
+     * @type {string}
+     * @memberof ErrorResponse
+     */
+    message: string;
+}
+/**
+ * 
+ * @export
+ * @interface StatusResponse
+ */
+export interface StatusResponse {
+    /**
+     * 
+     * @type {string}
+     * @memberof StatusResponse
+     */
+    id: string;
+    /**
+     * 
+     * @type {number}
+     * @memberof StatusResponse
+     */
+    objectsInCache: number;
+    /**
+     * 
+     * @type {number}
+     * @memberof StatusResponse
+     */
+    storageLimit: number;
+    /**
+     * 
+     * @type {number}
+     * @memberof StatusResponse
+     */
+    storageUsed: number;
+    /**
+     * 
+     * @type {number}
+     * @memberof StatusResponse
+     */
+    uptime: number;
+    /**
+     * 
+     * @type {number}
+     * @memberof StatusResponse
+     */
+    downloadsInProgress: number;
+}
+
+/**
+ * PublicApi - axios parameter creator
+ * @export
+ */
+export const PublicApiAxiosParamCreator = function (configuration?: Configuration) {
+    return {
+        /**
+         * Returns a media file.
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        publicAsset: async (options: any = {}): Promise<RequestArgs> => {
+            const localVarPath = `/asset/{objectId}`;
+            // use dummy base URL string because the URL constructor only accepts absolute URLs.
+            const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL);
+            let baseOptions;
+            if (configuration) {
+                baseOptions = configuration.baseOptions;
+            }
+
+            const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options};
+            const localVarHeaderParameter = {} as any;
+            const localVarQueryParameter = {} as any;
+
+
+    
+            setSearchParams(localVarUrlObj, localVarQueryParameter, options.query);
+            let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
+            localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
+
+            return {
+                url: toPathString(localVarUrlObj),
+                options: localVarRequestOptions,
+            };
+        },
+        /**
+         * Returns asset response headers (cache status, content type and/or length, accepted ranges etc.)
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        publicAssetHead: async (options: any = {}): Promise<RequestArgs> => {
+            const localVarPath = `/asset/{objectId}`;
+            // use dummy base URL string because the URL constructor only accepts absolute URLs.
+            const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL);
+            let baseOptions;
+            if (configuration) {
+                baseOptions = configuration.baseOptions;
+            }
+
+            const localVarRequestOptions = { method: 'HEAD', ...baseOptions, ...options};
+            const localVarHeaderParameter = {} as any;
+            const localVarQueryParameter = {} as any;
+
+
+    
+            setSearchParams(localVarUrlObj, localVarQueryParameter, options.query);
+            let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
+            localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
+
+            return {
+                url: toPathString(localVarUrlObj),
+                options: localVarRequestOptions,
+            };
+        },
+        /**
+         * Returns list of distributed buckets
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        publicBuckets: async (options: any = {}): Promise<RequestArgs> => {
+            const localVarPath = `/buckets`;
+            // use dummy base URL string because the URL constructor only accepts absolute URLs.
+            const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL);
+            let baseOptions;
+            if (configuration) {
+                baseOptions = configuration.baseOptions;
+            }
+
+            const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options};
+            const localVarHeaderParameter = {} as any;
+            const localVarQueryParameter = {} as any;
+
+
+    
+            setSearchParams(localVarUrlObj, localVarQueryParameter, options.query);
+            let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
+            localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
+
+            return {
+                url: toPathString(localVarUrlObj),
+                options: localVarRequestOptions,
+            };
+        },
+        /**
+         * Returns json object describing current node status.
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        publicStatus: async (options: any = {}): Promise<RequestArgs> => {
+            const localVarPath = `/status`;
+            // use dummy base URL string because the URL constructor only accepts absolute URLs.
+            const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL);
+            let baseOptions;
+            if (configuration) {
+                baseOptions = configuration.baseOptions;
+            }
+
+            const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options};
+            const localVarHeaderParameter = {} as any;
+            const localVarQueryParameter = {} as any;
+
+
+    
+            setSearchParams(localVarUrlObj, localVarQueryParameter, options.query);
+            let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
+            localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
+
+            return {
+                url: toPathString(localVarUrlObj),
+                options: localVarRequestOptions,
+            };
+        },
+    }
+};
+
+/**
+ * PublicApi - functional programming interface
+ * @export
+ */
+export const PublicApiFp = function(configuration?: Configuration) {
+    const localVarAxiosParamCreator = PublicApiAxiosParamCreator(configuration)
+    return {
+        /**
+         * Returns a media file.
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        async publicAsset(options?: any): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<any>> {
+            const localVarAxiosArgs = await localVarAxiosParamCreator.publicAsset(options);
+            return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration);
+        },
+        /**
+         * Returns asset response headers (cache status, content type and/or length, accepted ranges etc.)
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        async publicAssetHead(options?: any): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<void>> {
+            const localVarAxiosArgs = await localVarAxiosParamCreator.publicAssetHead(options);
+            return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration);
+        },
+        /**
+         * Returns list of distributed buckets
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        async publicBuckets(options?: any): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<BucketsResponse>> {
+            const localVarAxiosArgs = await localVarAxiosParamCreator.publicBuckets(options);
+            return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration);
+        },
+        /**
+         * Returns json object describing current node status.
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        async publicStatus(options?: any): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<StatusResponse>> {
+            const localVarAxiosArgs = await localVarAxiosParamCreator.publicStatus(options);
+            return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration);
+        },
+    }
+};
+
+/**
+ * PublicApi - factory interface
+ * @export
+ */
+export const PublicApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) {
+    const localVarFp = PublicApiFp(configuration)
+    return {
+        /**
+         * Returns a media file.
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        publicAsset(options?: any): AxiosPromise<any> {
+            return localVarFp.publicAsset(options).then((request) => request(axios, basePath));
+        },
+        /**
+         * Returns asset response headers (cache status, content type and/or length, accepted ranges etc.)
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        publicAssetHead(options?: any): AxiosPromise<void> {
+            return localVarFp.publicAssetHead(options).then((request) => request(axios, basePath));
+        },
+        /**
+         * Returns list of distributed buckets
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        publicBuckets(options?: any): AxiosPromise<BucketsResponse> {
+            return localVarFp.publicBuckets(options).then((request) => request(axios, basePath));
+        },
+        /**
+         * Returns json object describing current node status.
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        publicStatus(options?: any): AxiosPromise<StatusResponse> {
+            return localVarFp.publicStatus(options).then((request) => request(axios, basePath));
+        },
+    };
+};
+
+/**
+ * PublicApi - object-oriented interface
+ * @export
+ * @class PublicApi
+ * @extends {BaseAPI}
+ */
+export class PublicApi extends BaseAPI {
+    /**
+     * Returns a media file.
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     * @memberof PublicApi
+     */
+    public publicAsset(options?: any) {
+        return PublicApiFp(this.configuration).publicAsset(options).then((request) => request(this.axios, this.basePath));
+    }
+
+    /**
+     * Returns asset response headers (cache status, content type and/or length, accepted ranges etc.)
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     * @memberof PublicApi
+     */
+    public publicAssetHead(options?: any) {
+        return PublicApiFp(this.configuration).publicAssetHead(options).then((request) => request(this.axios, this.basePath));
+    }
+
+    /**
+     * Returns list of distributed buckets
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     * @memberof PublicApi
+     */
+    public publicBuckets(options?: any) {
+        return PublicApiFp(this.configuration).publicBuckets(options).then((request) => request(this.axios, this.basePath));
+    }
+
+    /**
+     * Returns json object describing current node status.
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     * @memberof PublicApi
+     */
+    public publicStatus(options?: any) {
+        return PublicApiFp(this.configuration).publicStatus(options).then((request) => request(this.axios, this.basePath));
+    }
+}
+
+

+ 71 - 0
distributor-node/src/services/networking/distributor-node/generated/base.ts

@@ -0,0 +1,71 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Distributor node API
+ * Distributor node API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+
+import { Configuration } from "./configuration";
+// Some imports not used depending on template conditions
+// @ts-ignore
+import globalAxios, { AxiosPromise, AxiosInstance } from 'axios';
+
+export const BASE_PATH = "http://localhost:3334/api/v1".replace(/\/+$/, "");
+
+/**
+ *
+ * @export
+ */
+export const COLLECTION_FORMATS = {
+    csv: ",",
+    ssv: " ",
+    tsv: "\t",
+    pipes: "|",
+};
+
+/**
+ *
+ * @export
+ * @interface RequestArgs
+ */
+export interface RequestArgs {
+    url: string;
+    options: any;
+}
+
+/**
+ *
+ * @export
+ * @class BaseAPI
+ */
+export class BaseAPI {
+    protected configuration: Configuration | undefined;
+
+    constructor(configuration?: Configuration, protected basePath: string = BASE_PATH, protected axios: AxiosInstance = globalAxios) {
+        if (configuration) {
+            this.configuration = configuration;
+            this.basePath = configuration.basePath || this.basePath;
+        }
+    }
+};
+
+/**
+ *
+ * @export
+ * @class RequiredError
+ * @extends {Error}
+ */
+export class RequiredError extends Error {
+    name: "RequiredError" = "RequiredError";
+    constructor(public field: string, msg?: string) {
+        super(msg);
+    }
+}

+ 138 - 0
distributor-node/src/services/networking/distributor-node/generated/common.ts

@@ -0,0 +1,138 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Distributor node API
+ * Distributor node API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+
+import { Configuration } from "./configuration";
+import { RequiredError, RequestArgs } from "./base";
+import { AxiosInstance } from 'axios';
+
+/**
+ *
+ * @export
+ */
+export const DUMMY_BASE_URL = 'https://example.com'
+
+/**
+ *
+ * @throws {RequiredError}
+ * @export
+ */
+export const assertParamExists = function (functionName: string, paramName: string, paramValue: unknown) {
+    if (paramValue === null || paramValue === undefined) {
+        throw new RequiredError(paramName, `Required parameter ${paramName} was null or undefined when calling ${functionName}.`);
+    }
+}
+
+/**
+ *
+ * @export
+ */
+export const setApiKeyToObject = async function (object: any, keyParamName: string, configuration?: Configuration) {
+    if (configuration && configuration.apiKey) {
+        const localVarApiKeyValue = typeof configuration.apiKey === 'function'
+            ? await configuration.apiKey(keyParamName)
+            : await configuration.apiKey;
+        object[keyParamName] = localVarApiKeyValue;
+    }
+}
+
+/**
+ *
+ * @export
+ */
+export const setBasicAuthToObject = function (object: any, configuration?: Configuration) {
+    if (configuration && (configuration.username || configuration.password)) {
+        object["auth"] = { username: configuration.username, password: configuration.password };
+    }
+}
+
+/**
+ *
+ * @export
+ */
+export const setBearerAuthToObject = async function (object: any, configuration?: Configuration) {
+    if (configuration && configuration.accessToken) {
+        const accessToken = typeof configuration.accessToken === 'function'
+            ? await configuration.accessToken()
+            : await configuration.accessToken;
+        object["Authorization"] = "Bearer " + accessToken;
+    }
+}
+
+/**
+ *
+ * @export
+ */
+export const setOAuthToObject = async function (object: any, name: string, scopes: string[], configuration?: Configuration) {
+    if (configuration && configuration.accessToken) {
+        const localVarAccessTokenValue = typeof configuration.accessToken === 'function'
+            ? await configuration.accessToken(name, scopes)
+            : await configuration.accessToken;
+        object["Authorization"] = "Bearer " + localVarAccessTokenValue;
+    }
+}
+
+/**
+ *
+ * @export
+ */
+export const setSearchParams = function (url: URL, ...objects: any[]) {
+    const searchParams = new URLSearchParams(url.search);
+    for (const object of objects) {
+        for (const key in object) {
+            if (Array.isArray(object[key])) {
+                searchParams.delete(key);
+                for (const item of object[key]) {
+                    searchParams.append(key, item);
+                }
+            } else {
+                searchParams.set(key, object[key]);
+            }
+        }
+    }
+    url.search = searchParams.toString();
+}
+
+/**
+ *
+ * @export
+ */
+export const serializeDataIfNeeded = function (value: any, requestOptions: any, configuration?: Configuration) {
+    const nonString = typeof value !== 'string';
+    const needsSerialization = nonString && configuration && configuration.isJsonMime
+        ? configuration.isJsonMime(requestOptions.headers['Content-Type'])
+        : nonString;
+    return needsSerialization
+        ? JSON.stringify(value !== undefined ? value : {})
+        : (value || "");
+}
+
+/**
+ *
+ * @export
+ */
+export const toPathString = function (url: URL) {
+    return url.pathname + url.search + url.hash
+}
+
+/**
+ *
+ * @export
+ */
+export const createRequestFunction = function (axiosArgs: RequestArgs, globalAxios: AxiosInstance, BASE_PATH: string, configuration?: Configuration) {
+    return (axios: AxiosInstance = globalAxios, basePath: string = BASE_PATH) => {
+        const axiosRequestArgs = {...axiosArgs.options, url: (configuration?.basePath || basePath) + axiosArgs.url};
+        return axios.request(axiosRequestArgs);
+    };
+}

+ 101 - 0
distributor-node/src/services/networking/distributor-node/generated/configuration.ts

@@ -0,0 +1,101 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Distributor node API
+ * Distributor node API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+
+export interface ConfigurationParameters {
+    apiKey?: string | Promise<string> | ((name: string) => string) | ((name: string) => Promise<string>);
+    username?: string;
+    password?: string;
+    accessToken?: string | Promise<string> | ((name?: string, scopes?: string[]) => string) | ((name?: string, scopes?: string[]) => Promise<string>);
+    basePath?: string;
+    baseOptions?: any;
+    formDataCtor?: new () => any;
+}
+
+export class Configuration {
+    /**
+     * parameter for apiKey security
+     * @param name security name
+     * @memberof Configuration
+     */
+    apiKey?: string | Promise<string> | ((name: string) => string) | ((name: string) => Promise<string>);
+    /**
+     * parameter for basic security
+     *
+     * @type {string}
+     * @memberof Configuration
+     */
+    username?: string;
+    /**
+     * parameter for basic security
+     *
+     * @type {string}
+     * @memberof Configuration
+     */
+    password?: string;
+    /**
+     * parameter for oauth2 security
+     * @param name security name
+     * @param scopes oauth2 scope
+     * @memberof Configuration
+     */
+    accessToken?: string | Promise<string> | ((name?: string, scopes?: string[]) => string) | ((name?: string, scopes?: string[]) => Promise<string>);
+    /**
+     * override base path
+     *
+     * @type {string}
+     * @memberof Configuration
+     */
+    basePath?: string;
+    /**
+     * base options for axios calls
+     *
+     * @type {any}
+     * @memberof Configuration
+     */
+    baseOptions?: any;
+    /**
+     * The FormData constructor that will be used to create multipart form data
+     * requests. You can inject this here so that execution environments that
+     * do not support the FormData class can still run the generated client.
+     *
+     * @type {new () => FormData}
+     */
+    formDataCtor?: new () => any;
+
+    constructor(param: ConfigurationParameters = {}) {
+        this.apiKey = param.apiKey;
+        this.username = param.username;
+        this.password = param.password;
+        this.accessToken = param.accessToken;
+        this.basePath = param.basePath;
+        this.baseOptions = param.baseOptions;
+        this.formDataCtor = param.formDataCtor;
+    }
+
+    /**
+     * Check if the given MIME is a JSON MIME.
+     * JSON MIME examples:
+     *   application/json
+     *   application/json; charset=UTF8
+     *   APPLICATION/JSON
+     *   application/vnd.company+json
+     * @param mime - MIME (Multipurpose Internet Mail Extensions)
+     * @return True if the given MIME is JSON, false otherwise.
+     */
+    public isJsonMime(mime: string): boolean {
+        const jsonMime: RegExp = new RegExp('^(application\/json|[^;/ \t]+\/[^;/ \t]+[+]json)[ \t]*(;.*)?$', 'i');
+        return mime !== null && (jsonMime.test(mime) || mime.toLowerCase() === 'application/json-patch+json');
+    }
+}

+ 18 - 0
distributor-node/src/services/networking/distributor-node/generated/index.ts

@@ -0,0 +1,18 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Distributor node API
+ * Distributor node API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+
+export * from "./api";
+export * from "./configuration";
+

+ 1 - 0
distributor-node/src/services/networking/index.ts

@@ -0,0 +1 @@
+export { NetworkingService } from './NetworkingService'

+ 91 - 0
distributor-node/src/services/networking/query-node/api.ts

@@ -0,0 +1,91 @@
+import { ApolloClient, NormalizedCacheObject, HttpLink, InMemoryCache, DocumentNode } from '@apollo/client/core'
+import fetch from 'cross-fetch'
+import {
+  DataObjectDetailsFragment,
+  GetDataObjectDetails,
+  GetDataObjectDetailsQuery,
+  GetDataObjectDetailsQueryVariables,
+  DistirubtionBucketWithObjectsFragment,
+  GetDistributionBucketsWithObjectsByIdsQuery,
+  GetDistributionBucketsWithObjectsByIdsQueryVariables,
+  GetDistributionBucketsWithObjectsByIds,
+  GetDistributionBucketsWithObjectsByWorkerIdQuery,
+  GetDistributionBucketsWithObjectsByWorkerIdQueryVariables,
+  GetDistributionBucketsWithObjectsByWorkerId,
+  StorageBucketOperatorFieldsFragment,
+  GetActiveStorageBucketOperatorsDataQuery,
+  GetActiveStorageBucketOperatorsDataQueryVariables,
+  GetActiveStorageBucketOperatorsData,
+} from './generated/queries'
+import { Maybe } from './generated/schema'
+
+export class QueryNodeApi {
+  private apolloClient: ApolloClient<NormalizedCacheObject>
+
+  public constructor(endpoint: string) {
+    this.apolloClient = new ApolloClient({
+      link: new HttpLink({ uri: endpoint, fetch }),
+      cache: new InMemoryCache(),
+      defaultOptions: { query: { fetchPolicy: 'no-cache', errorPolicy: 'all' } },
+    })
+  }
+
+  // Get entity by unique input
+  protected async uniqueEntityQuery<
+    QueryT extends { [k: string]: Maybe<Record<string, unknown>> | undefined },
+    VariablesT extends Record<string, unknown>
+  >(
+    query: DocumentNode,
+    variables: VariablesT,
+    resultKey: keyof QueryT
+  ): Promise<Required<QueryT>[keyof QueryT] | null> {
+    return (await this.apolloClient.query<QueryT, VariablesT>({ query, variables })).data[resultKey] || null
+  }
+
+  // Get entities by "non-unique" input and return first result
+  protected async firstEntityQuery<
+    QueryT extends { [k: string]: unknown[] },
+    VariablesT extends Record<string, unknown>
+  >(query: DocumentNode, variables: VariablesT, resultKey: keyof QueryT): Promise<QueryT[keyof QueryT][number] | null> {
+    return (await this.apolloClient.query<QueryT, VariablesT>({ query, variables })).data[resultKey][0] || null
+  }
+
+  // Query-node: get multiple entities
+  protected async multipleEntitiesQuery<
+    QueryT extends { [k: string]: unknown[] },
+    VariablesT extends Record<string, unknown>
+  >(query: DocumentNode, variables: VariablesT, resultKey: keyof QueryT): Promise<QueryT[keyof QueryT]> {
+    return (await this.apolloClient.query<QueryT, VariablesT>({ query, variables })).data[resultKey]
+  }
+
+  public getDataObjectDetails(objectId: string): Promise<DataObjectDetailsFragment | null> {
+    return this.uniqueEntityQuery<GetDataObjectDetailsQuery, GetDataObjectDetailsQueryVariables>(
+      GetDataObjectDetails,
+      { id: objectId },
+      'storageDataObjectByUniqueInput'
+    )
+  }
+
+  public getDistributionBucketsWithObjectsByIds(ids: string[]): Promise<DistirubtionBucketWithObjectsFragment[]> {
+    return this.multipleEntitiesQuery<
+      GetDistributionBucketsWithObjectsByIdsQuery,
+      GetDistributionBucketsWithObjectsByIdsQueryVariables
+    >(GetDistributionBucketsWithObjectsByIds, { ids }, 'distributionBuckets')
+  }
+
+  public getDistributionBucketsWithObjectsByWorkerId(
+    workerId: number
+  ): Promise<DistirubtionBucketWithObjectsFragment[]> {
+    return this.multipleEntitiesQuery<
+      GetDistributionBucketsWithObjectsByWorkerIdQuery,
+      GetDistributionBucketsWithObjectsByWorkerIdQueryVariables
+    >(GetDistributionBucketsWithObjectsByWorkerId, { workerId }, 'distributionBuckets')
+  }
+
+  public getActiveStorageBucketOperatorsData(): Promise<StorageBucketOperatorFieldsFragment[]> {
+    return this.multipleEntitiesQuery<
+      GetActiveStorageBucketOperatorsDataQuery,
+      GetActiveStorageBucketOperatorsDataQueryVariables
+    >(GetActiveStorageBucketOperatorsData, {}, 'storageBuckets')
+  }
+}

+ 33 - 0
distributor-node/src/services/networking/query-node/codegen.yml

@@ -0,0 +1,33 @@
+# Paths are relative to root distribution-node directory
+overwrite: true
+
+schema: '../query-node/generated/graphql-server/generated/schema.graphql'
+
+documents:
+  - 'src/services/networking/query-node/queries/*.graphql'
+
+config:
+  scalars:
+    Date: Date
+  preResolveTypes: true # avoid using Pick
+  skipTypename: true # skip __typename field in typings unless it's part of the query
+
+generates:
+  src/services/networking/query-node/generated/schema.ts:
+    hooks:
+      afterOneFileWrite:
+        - prettier --write
+        - eslint --fix
+    plugins:
+      - typescript
+  src/services/networking/query-node/generated/queries.ts:
+    preset: import-types
+    presetConfig:
+      typesPath: ./schema
+    hooks:
+      afterOneFileWrite:
+        - prettier --write
+        - eslint --fix
+    plugins:
+      - typescript-operations
+      - typescript-document-nodes

+ 115 - 0
distributor-node/src/services/networking/query-node/generated/queries.ts

@@ -0,0 +1,115 @@
+import * as Types from './schema';
+
+import gql from 'graphql-tag';
+export type DataObjectDetailsFragment = { id: string, size: any, ipfsHash: string, isAccepted: boolean, storageBag: { storageAssignments: Array<{ storageBucket: { id: string, operatorMetadata?: Types.Maybe<{ nodeEndpoint?: Types.Maybe<string> }>, operatorStatus: { __typename: 'StorageBucketOperatorStatusMissing' } | { __typename: 'StorageBucketOperatorStatusInvited' } | { __typename: 'StorageBucketOperatorStatusActive' } } }>, distirbutionAssignments: Array<{ distributionBucket: { id: string, operators: Array<{ workerId: number, status: Types.DistributionBucketOperatorStatus }> } }> } };
+
+export type GetDataObjectDetailsQueryVariables = Types.Exact<{
+  id: Types.Scalars['ID'];
+}>;
+
+
+export type GetDataObjectDetailsQuery = { storageDataObjectByUniqueInput?: Types.Maybe<DataObjectDetailsFragment> };
+
+export type DistirubtionBucketWithObjectsFragment = { id: string, bagAssignments: Array<{ storageBag: { objects: Array<{ id: string, size: any, ipfsHash: string }> } }> };
+
+export type GetDistributionBucketsWithObjectsByIdsQueryVariables = Types.Exact<{
+  ids?: Types.Maybe<Array<Types.Scalars['ID']> | Types.Scalars['ID']>;
+}>;
+
+
+export type GetDistributionBucketsWithObjectsByIdsQuery = { distributionBuckets: Array<DistirubtionBucketWithObjectsFragment> };
+
+export type GetDistributionBucketsWithObjectsByWorkerIdQueryVariables = Types.Exact<{
+  workerId: Types.Scalars['Int'];
+}>;
+
+
+export type GetDistributionBucketsWithObjectsByWorkerIdQuery = { distributionBuckets: Array<DistirubtionBucketWithObjectsFragment> };
+
+export type StorageBucketOperatorFieldsFragment = { id: string, operatorMetadata?: Types.Maybe<{ nodeEndpoint?: Types.Maybe<string> }> };
+
+export type GetActiveStorageBucketOperatorsDataQueryVariables = Types.Exact<{ [key: string]: never; }>;
+
+
+export type GetActiveStorageBucketOperatorsDataQuery = { storageBuckets: Array<StorageBucketOperatorFieldsFragment> };
+
+export const DataObjectDetails = gql`
+    fragment DataObjectDetails on StorageDataObject {
+  id
+  size
+  ipfsHash
+  isAccepted
+  storageBag {
+    storageAssignments {
+      storageBucket {
+        id
+        operatorMetadata {
+          nodeEndpoint
+        }
+        operatorStatus {
+          __typename
+        }
+      }
+    }
+    distirbutionAssignments {
+      distributionBucket {
+        id
+        operators {
+          workerId
+          status
+        }
+      }
+    }
+  }
+}
+    `;
+export const DistirubtionBucketWithObjects = gql`
+    fragment DistirubtionBucketWithObjects on DistributionBucket {
+  id
+  bagAssignments {
+    storageBag {
+      objects {
+        id
+        size
+        ipfsHash
+      }
+    }
+  }
+}
+    `;
+export const StorageBucketOperatorFields = gql`
+    fragment StorageBucketOperatorFields on StorageBucket {
+  id
+  operatorMetadata {
+    nodeEndpoint
+  }
+}
+    `;
+export const GetDataObjectDetails = gql`
+    query getDataObjectDetails($id: ID!) {
+  storageDataObjectByUniqueInput(where: {id: $id}) {
+    ...DataObjectDetails
+  }
+}
+    ${DataObjectDetails}`;
+export const GetDistributionBucketsWithObjectsByIds = gql`
+    query getDistributionBucketsWithObjectsByIds($ids: [ID!]) {
+  distributionBuckets(where: {id_in: $ids}) {
+    ...DistirubtionBucketWithObjects
+  }
+}
+    ${DistirubtionBucketWithObjects}`;
+export const GetDistributionBucketsWithObjectsByWorkerId = gql`
+    query getDistributionBucketsWithObjectsByWorkerId($workerId: Int!) {
+  distributionBuckets(where: {operators_some: {workerId_eq: $workerId, status_eq: ACTIVE}}) {
+    ...DistirubtionBucketWithObjects
+  }
+}
+    ${DistirubtionBucketWithObjects}`;
+export const GetActiveStorageBucketOperatorsData = gql`
+    query getActiveStorageBucketOperatorsData {
+  storageBuckets(where: {operatorStatus_json: {isTypeOf_eq: "StorageBucketOperatorStatusActive"}, operatorMetadata: {nodeEndpoint_contains: "http"}}, limit: 9999) {
+    ...StorageBucketOperatorFields
+  }
+}
+    ${StorageBucketOperatorFields}`;

+ 4710 - 0
distributor-node/src/services/networking/query-node/generated/schema.ts

@@ -0,0 +1,4710 @@
+export type Maybe<T> = T | null;
+export type Exact<T extends { [key: string]: unknown }> = { [K in keyof T]: T[K] };
+export type MakeOptional<T, K extends keyof T> = Omit<T, K> & { [SubKey in K]?: Maybe<T[SubKey]> };
+export type MakeMaybe<T, K extends keyof T> = Omit<T, K> & { [SubKey in K]: Maybe<T[SubKey]> };
+/** All built-in and custom scalars, mapped to their actual values */
+export type Scalars = {
+  ID: string;
+  String: string;
+  Boolean: boolean;
+  Int: number;
+  Float: number;
+  /** The javascript `Date` as string. Type represents date and time as the ISO Date string. */
+  DateTime: any;
+  /** The `JSONObject` scalar type represents JSON objects as specified by [ECMA-404](http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf). */
+  JSONObject: any;
+  /** GraphQL representation of BigInt */
+  BigInt: any;
+};
+
+export enum AssetAvailability {
+  Accepted = 'ACCEPTED',
+  Pending = 'PENDING',
+  Invalid = 'INVALID'
+}
+
+export type BaseGraphQlObject = {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+};
+
+export type BaseModel = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+};
+
+export type BaseModelUuid = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+};
+
+export type BaseWhereInput = {
+  id_eq?: Maybe<Scalars['String']>;
+  id_in?: Maybe<Array<Scalars['String']>>;
+  createdAt_eq?: Maybe<Scalars['String']>;
+  createdAt_lt?: Maybe<Scalars['String']>;
+  createdAt_lte?: Maybe<Scalars['String']>;
+  createdAt_gt?: Maybe<Scalars['String']>;
+  createdAt_gte?: Maybe<Scalars['String']>;
+  createdById_eq?: Maybe<Scalars['String']>;
+  updatedAt_eq?: Maybe<Scalars['String']>;
+  updatedAt_lt?: Maybe<Scalars['String']>;
+  updatedAt_lte?: Maybe<Scalars['String']>;
+  updatedAt_gt?: Maybe<Scalars['String']>;
+  updatedAt_gte?: Maybe<Scalars['String']>;
+  updatedById_eq?: Maybe<Scalars['String']>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['String']>;
+  deletedAt_lt?: Maybe<Scalars['String']>;
+  deletedAt_lte?: Maybe<Scalars['String']>;
+  deletedAt_gt?: Maybe<Scalars['String']>;
+  deletedAt_gte?: Maybe<Scalars['String']>;
+  deletedById_eq?: Maybe<Scalars['String']>;
+};
+
+
+export type Channel = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  ownerMember?: Maybe<Membership>;
+  ownerMemberId?: Maybe<Scalars['String']>;
+  ownerCuratorGroup?: Maybe<CuratorGroup>;
+  ownerCuratorGroupId?: Maybe<Scalars['String']>;
+  category?: Maybe<ChannelCategory>;
+  categoryId?: Maybe<Scalars['String']>;
+  /** Reward account where revenue is sent if set. */
+  rewardAccount?: Maybe<Scalars['String']>;
+  /** The title of the Channel */
+  title?: Maybe<Scalars['String']>;
+  /** The description of a Channel */
+  description?: Maybe<Scalars['String']>;
+  coverPhotoDataObject?: Maybe<DataObject>;
+  coverPhotoDataObjectId?: Maybe<Scalars['String']>;
+  /** URLs where the asset content can be accessed (if any) */
+  coverPhotoUrls: Array<Scalars['String']>;
+  /** Availability meta information */
+  coverPhotoAvailability: AssetAvailability;
+  avatarPhotoDataObject?: Maybe<DataObject>;
+  avatarPhotoDataObjectId?: Maybe<Scalars['String']>;
+  /** URLs where the asset content can be accessed (if any) */
+  avatarPhotoUrls: Array<Scalars['String']>;
+  /** Availability meta information */
+  avatarPhotoAvailability: AssetAvailability;
+  /** Flag signaling whether a channel is public. */
+  isPublic?: Maybe<Scalars['Boolean']>;
+  /** Flag signaling whether a channel is censored. */
+  isCensored: Scalars['Boolean'];
+  language?: Maybe<Language>;
+  languageId?: Maybe<Scalars['String']>;
+  videos: Array<Video>;
+  createdInBlock: Scalars['Int'];
+};
+
+export type ChannelCategoriesByNameFtsOutput = {
+  item: ChannelCategoriesByNameSearchResult;
+  rank: Scalars['Float'];
+  isTypeOf: Scalars['String'];
+  highlight: Scalars['String'];
+};
+
+export type ChannelCategoriesByNameSearchResult = ChannelCategory;
+
+/** Category of media channel */
+export type ChannelCategory = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  /** The name of the category */
+  name?: Maybe<Scalars['String']>;
+  channels: Array<Channel>;
+  createdInBlock: Scalars['Int'];
+};
+
+export type ChannelCategoryConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<ChannelCategoryEdge>;
+  pageInfo: PageInfo;
+};
+
+export type ChannelCategoryCreateInput = {
+  name?: Maybe<Scalars['String']>;
+  createdInBlock: Scalars['Float'];
+};
+
+export type ChannelCategoryEdge = {
+  node: ChannelCategory;
+  cursor: Scalars['String'];
+};
+
+export enum ChannelCategoryOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  NameAsc = 'name_ASC',
+  NameDesc = 'name_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC'
+}
+
+export type ChannelCategoryUpdateInput = {
+  name?: Maybe<Scalars['String']>;
+  createdInBlock?: Maybe<Scalars['Float']>;
+};
+
+export type ChannelCategoryWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  name_eq?: Maybe<Scalars['String']>;
+  name_contains?: Maybe<Scalars['String']>;
+  name_startsWith?: Maybe<Scalars['String']>;
+  name_endsWith?: Maybe<Scalars['String']>;
+  name_in?: Maybe<Array<Scalars['String']>>;
+  createdInBlock_eq?: Maybe<Scalars['Int']>;
+  createdInBlock_gt?: Maybe<Scalars['Int']>;
+  createdInBlock_gte?: Maybe<Scalars['Int']>;
+  createdInBlock_lt?: Maybe<Scalars['Int']>;
+  createdInBlock_lte?: Maybe<Scalars['Int']>;
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>;
+  channels_none?: Maybe<ChannelWhereInput>;
+  channels_some?: Maybe<ChannelWhereInput>;
+  channels_every?: Maybe<ChannelWhereInput>;
+  AND?: Maybe<Array<ChannelCategoryWhereInput>>;
+  OR?: Maybe<Array<ChannelCategoryWhereInput>>;
+};
+
+export type ChannelCategoryWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type ChannelConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<ChannelEdge>;
+  pageInfo: PageInfo;
+};
+
+export type ChannelCreateInput = {
+  ownerMember?: Maybe<Scalars['ID']>;
+  ownerCuratorGroup?: Maybe<Scalars['ID']>;
+  category?: Maybe<Scalars['ID']>;
+  rewardAccount?: Maybe<Scalars['String']>;
+  title?: Maybe<Scalars['String']>;
+  description?: Maybe<Scalars['String']>;
+  coverPhotoDataObject?: Maybe<Scalars['ID']>;
+  coverPhotoUrls: Array<Scalars['String']>;
+  coverPhotoAvailability: AssetAvailability;
+  avatarPhotoDataObject?: Maybe<Scalars['ID']>;
+  avatarPhotoUrls: Array<Scalars['String']>;
+  avatarPhotoAvailability: AssetAvailability;
+  isPublic?: Maybe<Scalars['Boolean']>;
+  isCensored: Scalars['Boolean'];
+  language?: Maybe<Scalars['ID']>;
+  createdInBlock: Scalars['Float'];
+};
+
+export type ChannelEdge = {
+  node: Channel;
+  cursor: Scalars['String'];
+};
+
+export enum ChannelOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  OwnerMemberAsc = 'ownerMember_ASC',
+  OwnerMemberDesc = 'ownerMember_DESC',
+  OwnerCuratorGroupAsc = 'ownerCuratorGroup_ASC',
+  OwnerCuratorGroupDesc = 'ownerCuratorGroup_DESC',
+  CategoryAsc = 'category_ASC',
+  CategoryDesc = 'category_DESC',
+  RewardAccountAsc = 'rewardAccount_ASC',
+  RewardAccountDesc = 'rewardAccount_DESC',
+  TitleAsc = 'title_ASC',
+  TitleDesc = 'title_DESC',
+  DescriptionAsc = 'description_ASC',
+  DescriptionDesc = 'description_DESC',
+  CoverPhotoDataObjectAsc = 'coverPhotoDataObject_ASC',
+  CoverPhotoDataObjectDesc = 'coverPhotoDataObject_DESC',
+  CoverPhotoAvailabilityAsc = 'coverPhotoAvailability_ASC',
+  CoverPhotoAvailabilityDesc = 'coverPhotoAvailability_DESC',
+  AvatarPhotoDataObjectAsc = 'avatarPhotoDataObject_ASC',
+  AvatarPhotoDataObjectDesc = 'avatarPhotoDataObject_DESC',
+  AvatarPhotoAvailabilityAsc = 'avatarPhotoAvailability_ASC',
+  AvatarPhotoAvailabilityDesc = 'avatarPhotoAvailability_DESC',
+  IsPublicAsc = 'isPublic_ASC',
+  IsPublicDesc = 'isPublic_DESC',
+  IsCensoredAsc = 'isCensored_ASC',
+  IsCensoredDesc = 'isCensored_DESC',
+  LanguageAsc = 'language_ASC',
+  LanguageDesc = 'language_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC'
+}
+
+export type ChannelUpdateInput = {
+  ownerMember?: Maybe<Scalars['ID']>;
+  ownerCuratorGroup?: Maybe<Scalars['ID']>;
+  category?: Maybe<Scalars['ID']>;
+  rewardAccount?: Maybe<Scalars['String']>;
+  title?: Maybe<Scalars['String']>;
+  description?: Maybe<Scalars['String']>;
+  coverPhotoDataObject?: Maybe<Scalars['ID']>;
+  coverPhotoUrls?: Maybe<Array<Scalars['String']>>;
+  coverPhotoAvailability?: Maybe<AssetAvailability>;
+  avatarPhotoDataObject?: Maybe<Scalars['ID']>;
+  avatarPhotoUrls?: Maybe<Array<Scalars['String']>>;
+  avatarPhotoAvailability?: Maybe<AssetAvailability>;
+  isPublic?: Maybe<Scalars['Boolean']>;
+  isCensored?: Maybe<Scalars['Boolean']>;
+  language?: Maybe<Scalars['ID']>;
+  createdInBlock?: Maybe<Scalars['Float']>;
+};
+
+export type ChannelWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  ownerMember_eq?: Maybe<Scalars['ID']>;
+  ownerMember_in?: Maybe<Array<Scalars['ID']>>;
+  ownerCuratorGroup_eq?: Maybe<Scalars['ID']>;
+  ownerCuratorGroup_in?: Maybe<Array<Scalars['ID']>>;
+  category_eq?: Maybe<Scalars['ID']>;
+  category_in?: Maybe<Array<Scalars['ID']>>;
+  rewardAccount_eq?: Maybe<Scalars['String']>;
+  rewardAccount_contains?: Maybe<Scalars['String']>;
+  rewardAccount_startsWith?: Maybe<Scalars['String']>;
+  rewardAccount_endsWith?: Maybe<Scalars['String']>;
+  rewardAccount_in?: Maybe<Array<Scalars['String']>>;
+  title_eq?: Maybe<Scalars['String']>;
+  title_contains?: Maybe<Scalars['String']>;
+  title_startsWith?: Maybe<Scalars['String']>;
+  title_endsWith?: Maybe<Scalars['String']>;
+  title_in?: Maybe<Array<Scalars['String']>>;
+  description_eq?: Maybe<Scalars['String']>;
+  description_contains?: Maybe<Scalars['String']>;
+  description_startsWith?: Maybe<Scalars['String']>;
+  description_endsWith?: Maybe<Scalars['String']>;
+  description_in?: Maybe<Array<Scalars['String']>>;
+  coverPhotoDataObject_eq?: Maybe<Scalars['ID']>;
+  coverPhotoDataObject_in?: Maybe<Array<Scalars['ID']>>;
+  coverPhotoAvailability_eq?: Maybe<AssetAvailability>;
+  coverPhotoAvailability_in?: Maybe<Array<AssetAvailability>>;
+  avatarPhotoDataObject_eq?: Maybe<Scalars['ID']>;
+  avatarPhotoDataObject_in?: Maybe<Array<Scalars['ID']>>;
+  avatarPhotoAvailability_eq?: Maybe<AssetAvailability>;
+  avatarPhotoAvailability_in?: Maybe<Array<AssetAvailability>>;
+  isPublic_eq?: Maybe<Scalars['Boolean']>;
+  isPublic_in?: Maybe<Array<Scalars['Boolean']>>;
+  isCensored_eq?: Maybe<Scalars['Boolean']>;
+  isCensored_in?: Maybe<Array<Scalars['Boolean']>>;
+  language_eq?: Maybe<Scalars['ID']>;
+  language_in?: Maybe<Array<Scalars['ID']>>;
+  createdInBlock_eq?: Maybe<Scalars['Int']>;
+  createdInBlock_gt?: Maybe<Scalars['Int']>;
+  createdInBlock_gte?: Maybe<Scalars['Int']>;
+  createdInBlock_lt?: Maybe<Scalars['Int']>;
+  createdInBlock_lte?: Maybe<Scalars['Int']>;
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>;
+  ownerMember?: Maybe<MembershipWhereInput>;
+  ownerCuratorGroup?: Maybe<CuratorGroupWhereInput>;
+  category?: Maybe<ChannelCategoryWhereInput>;
+  coverPhotoDataObject?: Maybe<DataObjectWhereInput>;
+  avatarPhotoDataObject?: Maybe<DataObjectWhereInput>;
+  language?: Maybe<LanguageWhereInput>;
+  videos_none?: Maybe<VideoWhereInput>;
+  videos_some?: Maybe<VideoWhereInput>;
+  videos_every?: Maybe<VideoWhereInput>;
+  AND?: Maybe<Array<ChannelWhereInput>>;
+  OR?: Maybe<Array<ChannelWhereInput>>;
+};
+
+export type ChannelWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type CuratorGroup = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  /** Curators belonging to this group */
+  curatorIds: Array<Scalars['Int']>;
+  /** Is group active or not */
+  isActive: Scalars['Boolean'];
+  channels: Array<Channel>;
+};
+
+export type CuratorGroupConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<CuratorGroupEdge>;
+  pageInfo: PageInfo;
+};
+
+export type CuratorGroupCreateInput = {
+  curatorIds: Array<Scalars['Int']>;
+  isActive: Scalars['Boolean'];
+};
+
+export type CuratorGroupEdge = {
+  node: CuratorGroup;
+  cursor: Scalars['String'];
+};
+
+export enum CuratorGroupOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  IsActiveAsc = 'isActive_ASC',
+  IsActiveDesc = 'isActive_DESC'
+}
+
+export type CuratorGroupUpdateInput = {
+  curatorIds?: Maybe<Array<Scalars['Int']>>;
+  isActive?: Maybe<Scalars['Boolean']>;
+};
+
+export type CuratorGroupWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  isActive_eq?: Maybe<Scalars['Boolean']>;
+  isActive_in?: Maybe<Array<Scalars['Boolean']>>;
+  channels_none?: Maybe<ChannelWhereInput>;
+  channels_some?: Maybe<ChannelWhereInput>;
+  channels_every?: Maybe<ChannelWhereInput>;
+  AND?: Maybe<Array<CuratorGroupWhereInput>>;
+  OR?: Maybe<Array<CuratorGroupWhereInput>>;
+};
+
+export type CuratorGroupWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+/** Manages content ids, type and storage provider decision about it */
+export type DataObject = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  /** Content owner */
+  owner: DataObjectOwner;
+  /** Content added at */
+  createdInBlock: Scalars['Int'];
+  /** Content type id */
+  typeId: Scalars['Int'];
+  /** Content size in bytes */
+  size: Scalars['Int'];
+  liaison?: Maybe<Worker>;
+  liaisonId?: Maybe<Scalars['String']>;
+  /** Storage provider as liaison judgment */
+  liaisonJudgement: LiaisonJudgement;
+  /** IPFS content id */
+  ipfsContentId: Scalars['String'];
+  /** Joystream runtime content */
+  joystreamContentId: Scalars['String'];
+  channelcoverPhotoDataObject?: Maybe<Array<Channel>>;
+  channelavatarPhotoDataObject?: Maybe<Array<Channel>>;
+  videothumbnailPhotoDataObject?: Maybe<Array<Video>>;
+  videomediaDataObject?: Maybe<Array<Video>>;
+};
+
+export type DataObjectConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<DataObjectEdge>;
+  pageInfo: PageInfo;
+};
+
+export type DataObjectCreateInput = {
+  owner: Scalars['JSONObject'];
+  createdInBlock: Scalars['Float'];
+  typeId: Scalars['Float'];
+  size: Scalars['Float'];
+  liaison?: Maybe<Scalars['ID']>;
+  liaisonJudgement: LiaisonJudgement;
+  ipfsContentId: Scalars['String'];
+  joystreamContentId: Scalars['String'];
+};
+
+export type DataObjectEdge = {
+  node: DataObject;
+  cursor: Scalars['String'];
+};
+
+export enum DataObjectOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+  TypeIdAsc = 'typeId_ASC',
+  TypeIdDesc = 'typeId_DESC',
+  SizeAsc = 'size_ASC',
+  SizeDesc = 'size_DESC',
+  LiaisonAsc = 'liaison_ASC',
+  LiaisonDesc = 'liaison_DESC',
+  LiaisonJudgementAsc = 'liaisonJudgement_ASC',
+  LiaisonJudgementDesc = 'liaisonJudgement_DESC',
+  IpfsContentIdAsc = 'ipfsContentId_ASC',
+  IpfsContentIdDesc = 'ipfsContentId_DESC',
+  JoystreamContentIdAsc = 'joystreamContentId_ASC',
+  JoystreamContentIdDesc = 'joystreamContentId_DESC'
+}
+
+export type DataObjectOwner = DataObjectOwnerMember | DataObjectOwnerChannel | DataObjectOwnerDao | DataObjectOwnerCouncil | DataObjectOwnerWorkingGroup;
+
+export type DataObjectOwnerChannel = {
+  /** Channel identifier */
+  channel: Scalars['Int'];
+  /** Variant needs to have at least one property. This value is not used. */
+  dummy?: Maybe<Scalars['Int']>;
+};
+
+export type DataObjectOwnerChannelCreateInput = {
+  channel: Scalars['Float'];
+  dummy?: Maybe<Scalars['Float']>;
+};
+
+export type DataObjectOwnerChannelUpdateInput = {
+  channel?: Maybe<Scalars['Float']>;
+  dummy?: Maybe<Scalars['Float']>;
+};
+
+export type DataObjectOwnerChannelWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  channel_eq?: Maybe<Scalars['Int']>;
+  channel_gt?: Maybe<Scalars['Int']>;
+  channel_gte?: Maybe<Scalars['Int']>;
+  channel_lt?: Maybe<Scalars['Int']>;
+  channel_lte?: Maybe<Scalars['Int']>;
+  channel_in?: Maybe<Array<Scalars['Int']>>;
+  dummy_eq?: Maybe<Scalars['Int']>;
+  dummy_gt?: Maybe<Scalars['Int']>;
+  dummy_gte?: Maybe<Scalars['Int']>;
+  dummy_lt?: Maybe<Scalars['Int']>;
+  dummy_lte?: Maybe<Scalars['Int']>;
+  dummy_in?: Maybe<Array<Scalars['Int']>>;
+  AND?: Maybe<Array<DataObjectOwnerChannelWhereInput>>;
+  OR?: Maybe<Array<DataObjectOwnerChannelWhereInput>>;
+};
+
+export type DataObjectOwnerChannelWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type DataObjectOwnerCouncil = {
+  /** Variant needs to have at least one property. This value is not used. */
+  dummy?: Maybe<Scalars['Int']>;
+};
+
+export type DataObjectOwnerCouncilCreateInput = {
+  dummy?: Maybe<Scalars['Float']>;
+};
+
+export type DataObjectOwnerCouncilUpdateInput = {
+  dummy?: Maybe<Scalars['Float']>;
+};
+
+export type DataObjectOwnerCouncilWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  dummy_eq?: Maybe<Scalars['Int']>;
+  dummy_gt?: Maybe<Scalars['Int']>;
+  dummy_gte?: Maybe<Scalars['Int']>;
+  dummy_lt?: Maybe<Scalars['Int']>;
+  dummy_lte?: Maybe<Scalars['Int']>;
+  dummy_in?: Maybe<Array<Scalars['Int']>>;
+  AND?: Maybe<Array<DataObjectOwnerCouncilWhereInput>>;
+  OR?: Maybe<Array<DataObjectOwnerCouncilWhereInput>>;
+};
+
+export type DataObjectOwnerCouncilWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type DataObjectOwnerDao = {
+  /** DAO identifier */
+  dao: Scalars['Int'];
+};
+
+export type DataObjectOwnerDaoCreateInput = {
+  dao: Scalars['Float'];
+};
+
+export type DataObjectOwnerDaoUpdateInput = {
+  dao?: Maybe<Scalars['Float']>;
+};
+
+export type DataObjectOwnerDaoWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  dao_eq?: Maybe<Scalars['Int']>;
+  dao_gt?: Maybe<Scalars['Int']>;
+  dao_gte?: Maybe<Scalars['Int']>;
+  dao_lt?: Maybe<Scalars['Int']>;
+  dao_lte?: Maybe<Scalars['Int']>;
+  dao_in?: Maybe<Array<Scalars['Int']>>;
+  AND?: Maybe<Array<DataObjectOwnerDaoWhereInput>>;
+  OR?: Maybe<Array<DataObjectOwnerDaoWhereInput>>;
+};
+
+export type DataObjectOwnerDaoWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type DataObjectOwnerMember = {
+  /** Member identifier */
+  member: Scalars['Int'];
+  /** Variant needs to have at least one property. This value is not used. */
+  dummy?: Maybe<Scalars['Int']>;
+};
+
+export type DataObjectOwnerMemberCreateInput = {
+  member: Scalars['Float'];
+  dummy?: Maybe<Scalars['Float']>;
+};
+
+export type DataObjectOwnerMemberUpdateInput = {
+  member?: Maybe<Scalars['Float']>;
+  dummy?: Maybe<Scalars['Float']>;
+};
+
+export type DataObjectOwnerMemberWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  member_eq?: Maybe<Scalars['Int']>;
+  member_gt?: Maybe<Scalars['Int']>;
+  member_gte?: Maybe<Scalars['Int']>;
+  member_lt?: Maybe<Scalars['Int']>;
+  member_lte?: Maybe<Scalars['Int']>;
+  member_in?: Maybe<Array<Scalars['Int']>>;
+  dummy_eq?: Maybe<Scalars['Int']>;
+  dummy_gt?: Maybe<Scalars['Int']>;
+  dummy_gte?: Maybe<Scalars['Int']>;
+  dummy_lt?: Maybe<Scalars['Int']>;
+  dummy_lte?: Maybe<Scalars['Int']>;
+  dummy_in?: Maybe<Array<Scalars['Int']>>;
+  AND?: Maybe<Array<DataObjectOwnerMemberWhereInput>>;
+  OR?: Maybe<Array<DataObjectOwnerMemberWhereInput>>;
+};
+
+export type DataObjectOwnerMemberWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type DataObjectOwnerWorkingGroup = {
+  /** Working group identifier */
+  workingGroup: Scalars['Int'];
+};
+
+export type DataObjectOwnerWorkingGroupCreateInput = {
+  workingGroup: Scalars['Float'];
+};
+
+export type DataObjectOwnerWorkingGroupUpdateInput = {
+  workingGroup?: Maybe<Scalars['Float']>;
+};
+
+export type DataObjectOwnerWorkingGroupWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  workingGroup_eq?: Maybe<Scalars['Int']>;
+  workingGroup_gt?: Maybe<Scalars['Int']>;
+  workingGroup_gte?: Maybe<Scalars['Int']>;
+  workingGroup_lt?: Maybe<Scalars['Int']>;
+  workingGroup_lte?: Maybe<Scalars['Int']>;
+  workingGroup_in?: Maybe<Array<Scalars['Int']>>;
+  AND?: Maybe<Array<DataObjectOwnerWorkingGroupWhereInput>>;
+  OR?: Maybe<Array<DataObjectOwnerWorkingGroupWhereInput>>;
+};
+
+export type DataObjectOwnerWorkingGroupWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type DataObjectUpdateInput = {
+  owner?: Maybe<Scalars['JSONObject']>;
+  createdInBlock?: Maybe<Scalars['Float']>;
+  typeId?: Maybe<Scalars['Float']>;
+  size?: Maybe<Scalars['Float']>;
+  liaison?: Maybe<Scalars['ID']>;
+  liaisonJudgement?: Maybe<LiaisonJudgement>;
+  ipfsContentId?: Maybe<Scalars['String']>;
+  joystreamContentId?: Maybe<Scalars['String']>;
+};
+
+export type DataObjectWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  owner_json?: Maybe<Scalars['JSONObject']>;
+  createdInBlock_eq?: Maybe<Scalars['Int']>;
+  createdInBlock_gt?: Maybe<Scalars['Int']>;
+  createdInBlock_gte?: Maybe<Scalars['Int']>;
+  createdInBlock_lt?: Maybe<Scalars['Int']>;
+  createdInBlock_lte?: Maybe<Scalars['Int']>;
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>;
+  typeId_eq?: Maybe<Scalars['Int']>;
+  typeId_gt?: Maybe<Scalars['Int']>;
+  typeId_gte?: Maybe<Scalars['Int']>;
+  typeId_lt?: Maybe<Scalars['Int']>;
+  typeId_lte?: Maybe<Scalars['Int']>;
+  typeId_in?: Maybe<Array<Scalars['Int']>>;
+  size_eq?: Maybe<Scalars['Int']>;
+  size_gt?: Maybe<Scalars['Int']>;
+  size_gte?: Maybe<Scalars['Int']>;
+  size_lt?: Maybe<Scalars['Int']>;
+  size_lte?: Maybe<Scalars['Int']>;
+  size_in?: Maybe<Array<Scalars['Int']>>;
+  liaison_eq?: Maybe<Scalars['ID']>;
+  liaison_in?: Maybe<Array<Scalars['ID']>>;
+  liaisonJudgement_eq?: Maybe<LiaisonJudgement>;
+  liaisonJudgement_in?: Maybe<Array<LiaisonJudgement>>;
+  ipfsContentId_eq?: Maybe<Scalars['String']>;
+  ipfsContentId_contains?: Maybe<Scalars['String']>;
+  ipfsContentId_startsWith?: Maybe<Scalars['String']>;
+  ipfsContentId_endsWith?: Maybe<Scalars['String']>;
+  ipfsContentId_in?: Maybe<Array<Scalars['String']>>;
+  joystreamContentId_eq?: Maybe<Scalars['String']>;
+  joystreamContentId_contains?: Maybe<Scalars['String']>;
+  joystreamContentId_startsWith?: Maybe<Scalars['String']>;
+  joystreamContentId_endsWith?: Maybe<Scalars['String']>;
+  joystreamContentId_in?: Maybe<Array<Scalars['String']>>;
+  liaison?: Maybe<WorkerWhereInput>;
+  channelcoverPhotoDataObject_none?: Maybe<ChannelWhereInput>;
+  channelcoverPhotoDataObject_some?: Maybe<ChannelWhereInput>;
+  channelcoverPhotoDataObject_every?: Maybe<ChannelWhereInput>;
+  channelavatarPhotoDataObject_none?: Maybe<ChannelWhereInput>;
+  channelavatarPhotoDataObject_some?: Maybe<ChannelWhereInput>;
+  channelavatarPhotoDataObject_every?: Maybe<ChannelWhereInput>;
+  videothumbnailPhotoDataObject_none?: Maybe<VideoWhereInput>;
+  videothumbnailPhotoDataObject_some?: Maybe<VideoWhereInput>;
+  videothumbnailPhotoDataObject_every?: Maybe<VideoWhereInput>;
+  videomediaDataObject_none?: Maybe<VideoWhereInput>;
+  videomediaDataObject_some?: Maybe<VideoWhereInput>;
+  videomediaDataObject_every?: Maybe<VideoWhereInput>;
+  AND?: Maybe<Array<DataObjectWhereInput>>;
+  OR?: Maybe<Array<DataObjectWhereInput>>;
+};
+
+export type DataObjectWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+
+export type DeleteResponse = {
+  id: Scalars['ID'];
+};
+
+export type DistributionBucket = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  family: DistributionBucketFamily;
+  familyId: Scalars['String'];
+  operators: Array<DistributionBucketOperator>;
+  /** Whether the bucket is accepting any new bags */
+  acceptingNewBags: Scalars['Boolean'];
+  /** Whether the bucket is currently distributing content */
+  distributing: Scalars['Boolean'];
+  bagAssignments: Array<StorageBagDistributionAssignment>;
+};
+
+export type DistributionBucketConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<DistributionBucketEdge>;
+  pageInfo: PageInfo;
+};
+
+export type DistributionBucketCreateInput = {
+  family: Scalars['ID'];
+  acceptingNewBags: Scalars['Boolean'];
+  distributing: Scalars['Boolean'];
+};
+
+export type DistributionBucketEdge = {
+  node: DistributionBucket;
+  cursor: Scalars['String'];
+};
+
+export type DistributionBucketFamily = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  metadata?: Maybe<DistributionBucketFamilyMetadata>;
+  metadataId?: Maybe<Scalars['String']>;
+  buckets: Array<DistributionBucket>;
+};
+
+export type DistributionBucketFamilyConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<DistributionBucketFamilyEdge>;
+  pageInfo: PageInfo;
+};
+
+export type DistributionBucketFamilyCreateInput = {
+  metadata?: Maybe<Scalars['ID']>;
+};
+
+export type DistributionBucketFamilyEdge = {
+  node: DistributionBucketFamily;
+  cursor: Scalars['String'];
+};
+
+export type DistributionBucketFamilyMetadata = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  /** Name of the geographical region covered by the family (ie.: us-east-1) */
+  region?: Maybe<Scalars['String']>;
+  /** Optional, more specific description of the region covered by the family */
+  description?: Maybe<Scalars['String']>;
+  boundary: Array<GeoCoordinates>;
+  distributionbucketfamilymetadata?: Maybe<Array<DistributionBucketFamily>>;
+};
+
+export type DistributionBucketFamilyMetadataConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<DistributionBucketFamilyMetadataEdge>;
+  pageInfo: PageInfo;
+};
+
+export type DistributionBucketFamilyMetadataCreateInput = {
+  region?: Maybe<Scalars['String']>;
+  description?: Maybe<Scalars['String']>;
+};
+
+export type DistributionBucketFamilyMetadataEdge = {
+  node: DistributionBucketFamilyMetadata;
+  cursor: Scalars['String'];
+};
+
+export enum DistributionBucketFamilyMetadataOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  RegionAsc = 'region_ASC',
+  RegionDesc = 'region_DESC',
+  DescriptionAsc = 'description_ASC',
+  DescriptionDesc = 'description_DESC'
+}
+
+export type DistributionBucketFamilyMetadataUpdateInput = {
+  region?: Maybe<Scalars['String']>;
+  description?: Maybe<Scalars['String']>;
+};
+
+export type DistributionBucketFamilyMetadataWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  region_eq?: Maybe<Scalars['String']>;
+  region_contains?: Maybe<Scalars['String']>;
+  region_startsWith?: Maybe<Scalars['String']>;
+  region_endsWith?: Maybe<Scalars['String']>;
+  region_in?: Maybe<Array<Scalars['String']>>;
+  description_eq?: Maybe<Scalars['String']>;
+  description_contains?: Maybe<Scalars['String']>;
+  description_startsWith?: Maybe<Scalars['String']>;
+  description_endsWith?: Maybe<Scalars['String']>;
+  description_in?: Maybe<Array<Scalars['String']>>;
+  boundary_none?: Maybe<GeoCoordinatesWhereInput>;
+  boundary_some?: Maybe<GeoCoordinatesWhereInput>;
+  boundary_every?: Maybe<GeoCoordinatesWhereInput>;
+  distributionbucketfamilymetadata_none?: Maybe<DistributionBucketFamilyWhereInput>;
+  distributionbucketfamilymetadata_some?: Maybe<DistributionBucketFamilyWhereInput>;
+  distributionbucketfamilymetadata_every?: Maybe<DistributionBucketFamilyWhereInput>;
+  AND?: Maybe<Array<DistributionBucketFamilyMetadataWhereInput>>;
+  OR?: Maybe<Array<DistributionBucketFamilyMetadataWhereInput>>;
+};
+
+export type DistributionBucketFamilyMetadataWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export enum DistributionBucketFamilyOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  MetadataAsc = 'metadata_ASC',
+  MetadataDesc = 'metadata_DESC'
+}
+
+export type DistributionBucketFamilyUpdateInput = {
+  metadata?: Maybe<Scalars['ID']>;
+};
+
+export type DistributionBucketFamilyWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  metadata_eq?: Maybe<Scalars['ID']>;
+  metadata_in?: Maybe<Array<Scalars['ID']>>;
+  metadata?: Maybe<DistributionBucketFamilyMetadataWhereInput>;
+  buckets_none?: Maybe<DistributionBucketWhereInput>;
+  buckets_some?: Maybe<DistributionBucketWhereInput>;
+  buckets_every?: Maybe<DistributionBucketWhereInput>;
+  AND?: Maybe<Array<DistributionBucketFamilyWhereInput>>;
+  OR?: Maybe<Array<DistributionBucketFamilyWhereInput>>;
+};
+
+export type DistributionBucketFamilyWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type DistributionBucketOperator = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  distributionBucket: DistributionBucket;
+  distributionBucketId: Scalars['String'];
+  /** ID of the distribution group worker */
+  workerId: Scalars['Int'];
+  /** Current operator status */
+  status: DistributionBucketOperatorStatus;
+  metadata?: Maybe<DistributionBucketOperatorMetadata>;
+  metadataId?: Maybe<Scalars['String']>;
+};
+
+export type DistributionBucketOperatorConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<DistributionBucketOperatorEdge>;
+  pageInfo: PageInfo;
+};
+
+export type DistributionBucketOperatorCreateInput = {
+  distributionBucket: Scalars['ID'];
+  workerId: Scalars['Float'];
+  status: DistributionBucketOperatorStatus;
+  metadata?: Maybe<Scalars['ID']>;
+};
+
+export type DistributionBucketOperatorEdge = {
+  node: DistributionBucketOperator;
+  cursor: Scalars['String'];
+};
+
+export type DistributionBucketOperatorMetadata = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  /** Root distributor node api endpoint */
+  nodeEndpoint?: Maybe<Scalars['String']>;
+  nodeLocation?: Maybe<NodeLocationMetadata>;
+  nodeLocationId?: Maybe<Scalars['String']>;
+  /** Additional information about the node/operator */
+  extra?: Maybe<Scalars['String']>;
+  distributionbucketoperatormetadata?: Maybe<Array<DistributionBucketOperator>>;
+};
+
+export type DistributionBucketOperatorMetadataConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<DistributionBucketOperatorMetadataEdge>;
+  pageInfo: PageInfo;
+};
+
+export type DistributionBucketOperatorMetadataCreateInput = {
+  nodeEndpoint?: Maybe<Scalars['String']>;
+  nodeLocation?: Maybe<Scalars['ID']>;
+  extra?: Maybe<Scalars['String']>;
+};
+
+export type DistributionBucketOperatorMetadataEdge = {
+  node: DistributionBucketOperatorMetadata;
+  cursor: Scalars['String'];
+};
+
+export enum DistributionBucketOperatorMetadataOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  NodeEndpointAsc = 'nodeEndpoint_ASC',
+  NodeEndpointDesc = 'nodeEndpoint_DESC',
+  NodeLocationAsc = 'nodeLocation_ASC',
+  NodeLocationDesc = 'nodeLocation_DESC',
+  ExtraAsc = 'extra_ASC',
+  ExtraDesc = 'extra_DESC'
+}
+
+export type DistributionBucketOperatorMetadataUpdateInput = {
+  nodeEndpoint?: Maybe<Scalars['String']>;
+  nodeLocation?: Maybe<Scalars['ID']>;
+  extra?: Maybe<Scalars['String']>;
+};
+
+export type DistributionBucketOperatorMetadataWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  nodeEndpoint_eq?: Maybe<Scalars['String']>;
+  nodeEndpoint_contains?: Maybe<Scalars['String']>;
+  nodeEndpoint_startsWith?: Maybe<Scalars['String']>;
+  nodeEndpoint_endsWith?: Maybe<Scalars['String']>;
+  nodeEndpoint_in?: Maybe<Array<Scalars['String']>>;
+  nodeLocation_eq?: Maybe<Scalars['ID']>;
+  nodeLocation_in?: Maybe<Array<Scalars['ID']>>;
+  extra_eq?: Maybe<Scalars['String']>;
+  extra_contains?: Maybe<Scalars['String']>;
+  extra_startsWith?: Maybe<Scalars['String']>;
+  extra_endsWith?: Maybe<Scalars['String']>;
+  extra_in?: Maybe<Array<Scalars['String']>>;
+  nodeLocation?: Maybe<NodeLocationMetadataWhereInput>;
+  distributionbucketoperatormetadata_none?: Maybe<DistributionBucketOperatorWhereInput>;
+  distributionbucketoperatormetadata_some?: Maybe<DistributionBucketOperatorWhereInput>;
+  distributionbucketoperatormetadata_every?: Maybe<DistributionBucketOperatorWhereInput>;
+  AND?: Maybe<Array<DistributionBucketOperatorMetadataWhereInput>>;
+  OR?: Maybe<Array<DistributionBucketOperatorMetadataWhereInput>>;
+};
+
+export type DistributionBucketOperatorMetadataWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export enum DistributionBucketOperatorOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  DistributionBucketAsc = 'distributionBucket_ASC',
+  DistributionBucketDesc = 'distributionBucket_DESC',
+  WorkerIdAsc = 'workerId_ASC',
+  WorkerIdDesc = 'workerId_DESC',
+  StatusAsc = 'status_ASC',
+  StatusDesc = 'status_DESC',
+  MetadataAsc = 'metadata_ASC',
+  MetadataDesc = 'metadata_DESC'
+}
+
+export enum DistributionBucketOperatorStatus {
+  Invited = 'INVITED',
+  Active = 'ACTIVE'
+}
+
+export type DistributionBucketOperatorUpdateInput = {
+  distributionBucket?: Maybe<Scalars['ID']>;
+  workerId?: Maybe<Scalars['Float']>;
+  status?: Maybe<DistributionBucketOperatorStatus>;
+  metadata?: Maybe<Scalars['ID']>;
+};
+
+export type DistributionBucketOperatorWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  distributionBucket_eq?: Maybe<Scalars['ID']>;
+  distributionBucket_in?: Maybe<Array<Scalars['ID']>>;
+  workerId_eq?: Maybe<Scalars['Int']>;
+  workerId_gt?: Maybe<Scalars['Int']>;
+  workerId_gte?: Maybe<Scalars['Int']>;
+  workerId_lt?: Maybe<Scalars['Int']>;
+  workerId_lte?: Maybe<Scalars['Int']>;
+  workerId_in?: Maybe<Array<Scalars['Int']>>;
+  status_eq?: Maybe<DistributionBucketOperatorStatus>;
+  status_in?: Maybe<Array<DistributionBucketOperatorStatus>>;
+  metadata_eq?: Maybe<Scalars['ID']>;
+  metadata_in?: Maybe<Array<Scalars['ID']>>;
+  distributionBucket?: Maybe<DistributionBucketWhereInput>;
+  metadata?: Maybe<DistributionBucketOperatorMetadataWhereInput>;
+  AND?: Maybe<Array<DistributionBucketOperatorWhereInput>>;
+  OR?: Maybe<Array<DistributionBucketOperatorWhereInput>>;
+};
+
+export type DistributionBucketOperatorWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export enum DistributionBucketOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  FamilyAsc = 'family_ASC',
+  FamilyDesc = 'family_DESC',
+  AcceptingNewBagsAsc = 'acceptingNewBags_ASC',
+  AcceptingNewBagsDesc = 'acceptingNewBags_DESC',
+  DistributingAsc = 'distributing_ASC',
+  DistributingDesc = 'distributing_DESC'
+}
+
+export type DistributionBucketUpdateInput = {
+  family?: Maybe<Scalars['ID']>;
+  acceptingNewBags?: Maybe<Scalars['Boolean']>;
+  distributing?: Maybe<Scalars['Boolean']>;
+};
+
+export type DistributionBucketWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  family_eq?: Maybe<Scalars['ID']>;
+  family_in?: Maybe<Array<Scalars['ID']>>;
+  acceptingNewBags_eq?: Maybe<Scalars['Boolean']>;
+  acceptingNewBags_in?: Maybe<Array<Scalars['Boolean']>>;
+  distributing_eq?: Maybe<Scalars['Boolean']>;
+  distributing_in?: Maybe<Array<Scalars['Boolean']>>;
+  family?: Maybe<DistributionBucketFamilyWhereInput>;
+  operators_none?: Maybe<DistributionBucketOperatorWhereInput>;
+  operators_some?: Maybe<DistributionBucketOperatorWhereInput>;
+  operators_every?: Maybe<DistributionBucketOperatorWhereInput>;
+  bagAssignments_none?: Maybe<StorageBagDistributionAssignmentWhereInput>;
+  bagAssignments_some?: Maybe<StorageBagDistributionAssignmentWhereInput>;
+  bagAssignments_every?: Maybe<StorageBagDistributionAssignmentWhereInput>;
+  AND?: Maybe<Array<DistributionBucketWhereInput>>;
+  OR?: Maybe<Array<DistributionBucketWhereInput>>;
+};
+
+export type DistributionBucketWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type GeoCoordinates = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  latitude: Scalars['Float'];
+  longitude: Scalars['Float'];
+  boundarySourceBucketFamilyMeta?: Maybe<DistributionBucketFamilyMetadata>;
+  boundarySourceBucketFamilyMetaId?: Maybe<Scalars['String']>;
+  nodelocationmetadatacoordinates?: Maybe<Array<NodeLocationMetadata>>;
+};
+
+export type GeoCoordinatesConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<GeoCoordinatesEdge>;
+  pageInfo: PageInfo;
+};
+
+export type GeoCoordinatesCreateInput = {
+  latitude: Scalars['Float'];
+  longitude: Scalars['Float'];
+  boundarySourceBucketFamilyMeta?: Maybe<Scalars['ID']>;
+};
+
+export type GeoCoordinatesEdge = {
+  node: GeoCoordinates;
+  cursor: Scalars['String'];
+};
+
+export enum GeoCoordinatesOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  LatitudeAsc = 'latitude_ASC',
+  LatitudeDesc = 'latitude_DESC',
+  LongitudeAsc = 'longitude_ASC',
+  LongitudeDesc = 'longitude_DESC',
+  BoundarySourceBucketFamilyMetaAsc = 'boundarySourceBucketFamilyMeta_ASC',
+  BoundarySourceBucketFamilyMetaDesc = 'boundarySourceBucketFamilyMeta_DESC'
+}
+
+export type GeoCoordinatesUpdateInput = {
+  latitude?: Maybe<Scalars['Float']>;
+  longitude?: Maybe<Scalars['Float']>;
+  boundarySourceBucketFamilyMeta?: Maybe<Scalars['ID']>;
+};
+
+export type GeoCoordinatesWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  latitude_eq?: Maybe<Scalars['Float']>;
+  latitude_gt?: Maybe<Scalars['Float']>;
+  latitude_gte?: Maybe<Scalars['Float']>;
+  latitude_lt?: Maybe<Scalars['Float']>;
+  latitude_lte?: Maybe<Scalars['Float']>;
+  latitude_in?: Maybe<Array<Scalars['Float']>>;
+  longitude_eq?: Maybe<Scalars['Float']>;
+  longitude_gt?: Maybe<Scalars['Float']>;
+  longitude_gte?: Maybe<Scalars['Float']>;
+  longitude_lt?: Maybe<Scalars['Float']>;
+  longitude_lte?: Maybe<Scalars['Float']>;
+  longitude_in?: Maybe<Array<Scalars['Float']>>;
+  boundarySourceBucketFamilyMeta_eq?: Maybe<Scalars['ID']>;
+  boundarySourceBucketFamilyMeta_in?: Maybe<Array<Scalars['ID']>>;
+  boundarySourceBucketFamilyMeta?: Maybe<DistributionBucketFamilyMetadataWhereInput>;
+  nodelocationmetadatacoordinates_none?: Maybe<NodeLocationMetadataWhereInput>;
+  nodelocationmetadatacoordinates_some?: Maybe<NodeLocationMetadataWhereInput>;
+  nodelocationmetadatacoordinates_every?: Maybe<NodeLocationMetadataWhereInput>;
+  AND?: Maybe<Array<GeoCoordinatesWhereInput>>;
+  OR?: Maybe<Array<GeoCoordinatesWhereInput>>;
+};
+
+export type GeoCoordinatesWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+
+export type Language = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  /** Language identifier ISO 639-1 */
+  iso: Scalars['String'];
+  createdInBlock: Scalars['Int'];
+  channellanguage?: Maybe<Array<Channel>>;
+  videolanguage?: Maybe<Array<Video>>;
+};
+
+export type LanguageConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<LanguageEdge>;
+  pageInfo: PageInfo;
+};
+
+export type LanguageCreateInput = {
+  iso: Scalars['String'];
+  createdInBlock: Scalars['Float'];
+};
+
+export type LanguageEdge = {
+  node: Language;
+  cursor: Scalars['String'];
+};
+
+export enum LanguageOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  IsoAsc = 'iso_ASC',
+  IsoDesc = 'iso_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC'
+}
+
+export type LanguageUpdateInput = {
+  iso?: Maybe<Scalars['String']>;
+  createdInBlock?: Maybe<Scalars['Float']>;
+};
+
+export type LanguageWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  iso_eq?: Maybe<Scalars['String']>;
+  iso_contains?: Maybe<Scalars['String']>;
+  iso_startsWith?: Maybe<Scalars['String']>;
+  iso_endsWith?: Maybe<Scalars['String']>;
+  iso_in?: Maybe<Array<Scalars['String']>>;
+  createdInBlock_eq?: Maybe<Scalars['Int']>;
+  createdInBlock_gt?: Maybe<Scalars['Int']>;
+  createdInBlock_gte?: Maybe<Scalars['Int']>;
+  createdInBlock_lt?: Maybe<Scalars['Int']>;
+  createdInBlock_lte?: Maybe<Scalars['Int']>;
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>;
+  channellanguage_none?: Maybe<ChannelWhereInput>;
+  channellanguage_some?: Maybe<ChannelWhereInput>;
+  channellanguage_every?: Maybe<ChannelWhereInput>;
+  videolanguage_none?: Maybe<VideoWhereInput>;
+  videolanguage_some?: Maybe<VideoWhereInput>;
+  videolanguage_every?: Maybe<VideoWhereInput>;
+  AND?: Maybe<Array<LanguageWhereInput>>;
+  OR?: Maybe<Array<LanguageWhereInput>>;
+};
+
+export type LanguageWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export enum LiaisonJudgement {
+  Pending = 'PENDING',
+  Accepted = 'ACCEPTED'
+}
+
+export type License = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  /** License code defined by Joystream */
+  code?: Maybe<Scalars['Int']>;
+  /** Attribution (if required by the license) */
+  attribution?: Maybe<Scalars['String']>;
+  /** Custom license content */
+  customText?: Maybe<Scalars['String']>;
+  videolicense?: Maybe<Array<Video>>;
+};
+
+export type LicenseConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<LicenseEdge>;
+  pageInfo: PageInfo;
+};
+
+export type LicenseCreateInput = {
+  code?: Maybe<Scalars['Float']>;
+  attribution?: Maybe<Scalars['String']>;
+  customText?: Maybe<Scalars['String']>;
+};
+
+export type LicenseEdge = {
+  node: License;
+  cursor: Scalars['String'];
+};
+
+export enum LicenseOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  CodeAsc = 'code_ASC',
+  CodeDesc = 'code_DESC',
+  AttributionAsc = 'attribution_ASC',
+  AttributionDesc = 'attribution_DESC',
+  CustomTextAsc = 'customText_ASC',
+  CustomTextDesc = 'customText_DESC'
+}
+
+export type LicenseUpdateInput = {
+  code?: Maybe<Scalars['Float']>;
+  attribution?: Maybe<Scalars['String']>;
+  customText?: Maybe<Scalars['String']>;
+};
+
+export type LicenseWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  code_eq?: Maybe<Scalars['Int']>;
+  code_gt?: Maybe<Scalars['Int']>;
+  code_gte?: Maybe<Scalars['Int']>;
+  code_lt?: Maybe<Scalars['Int']>;
+  code_lte?: Maybe<Scalars['Int']>;
+  code_in?: Maybe<Array<Scalars['Int']>>;
+  attribution_eq?: Maybe<Scalars['String']>;
+  attribution_contains?: Maybe<Scalars['String']>;
+  attribution_startsWith?: Maybe<Scalars['String']>;
+  attribution_endsWith?: Maybe<Scalars['String']>;
+  attribution_in?: Maybe<Array<Scalars['String']>>;
+  customText_eq?: Maybe<Scalars['String']>;
+  customText_contains?: Maybe<Scalars['String']>;
+  customText_startsWith?: Maybe<Scalars['String']>;
+  customText_endsWith?: Maybe<Scalars['String']>;
+  customText_in?: Maybe<Array<Scalars['String']>>;
+  videolicense_none?: Maybe<VideoWhereInput>;
+  videolicense_some?: Maybe<VideoWhereInput>;
+  videolicense_every?: Maybe<VideoWhereInput>;
+  AND?: Maybe<Array<LicenseWhereInput>>;
+  OR?: Maybe<Array<LicenseWhereInput>>;
+};
+
+export type LicenseWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type MembersByHandleFtsOutput = {
+  item: MembersByHandleSearchResult;
+  rank: Scalars['Float'];
+  isTypeOf: Scalars['String'];
+  highlight: Scalars['String'];
+};
+
+export type MembersByHandleSearchResult = Membership;
+
+/** Stored information about a registered user */
+export type Membership = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  /** The unique handle chosen by member */
+  handle: Scalars['String'];
+  /** A Url to member's Avatar image */
+  avatarUri?: Maybe<Scalars['String']>;
+  /** Short text chosen by member to share information about themselves */
+  about?: Maybe<Scalars['String']>;
+  /** Member's controller account id */
+  controllerAccount: Scalars['String'];
+  /** Member's root account id */
+  rootAccount: Scalars['String'];
+  /** Blocknumber when member was registered */
+  createdInBlock: Scalars['Int'];
+  /** How the member was registered */
+  entry: MembershipEntryMethod;
+  /** The type of subscription the member has purchased if any. */
+  subscription?: Maybe<Scalars['Int']>;
+  channels: Array<Channel>;
+};
+
+export type MembershipConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<MembershipEdge>;
+  pageInfo: PageInfo;
+};
+
+export type MembershipCreateInput = {
+  handle: Scalars['String'];
+  avatarUri?: Maybe<Scalars['String']>;
+  about?: Maybe<Scalars['String']>;
+  controllerAccount: Scalars['String'];
+  rootAccount: Scalars['String'];
+  createdInBlock: Scalars['Float'];
+  entry: MembershipEntryMethod;
+  subscription?: Maybe<Scalars['Float']>;
+};
+
+export type MembershipEdge = {
+  node: Membership;
+  cursor: Scalars['String'];
+};
+
+export enum MembershipEntryMethod {
+  Paid = 'PAID',
+  Screening = 'SCREENING',
+  Genesis = 'GENESIS'
+}
+
+export enum MembershipOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  HandleAsc = 'handle_ASC',
+  HandleDesc = 'handle_DESC',
+  AvatarUriAsc = 'avatarUri_ASC',
+  AvatarUriDesc = 'avatarUri_DESC',
+  AboutAsc = 'about_ASC',
+  AboutDesc = 'about_DESC',
+  ControllerAccountAsc = 'controllerAccount_ASC',
+  ControllerAccountDesc = 'controllerAccount_DESC',
+  RootAccountAsc = 'rootAccount_ASC',
+  RootAccountDesc = 'rootAccount_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+  EntryAsc = 'entry_ASC',
+  EntryDesc = 'entry_DESC',
+  SubscriptionAsc = 'subscription_ASC',
+  SubscriptionDesc = 'subscription_DESC'
+}
+
+export type MembershipUpdateInput = {
+  handle?: Maybe<Scalars['String']>;
+  avatarUri?: Maybe<Scalars['String']>;
+  about?: Maybe<Scalars['String']>;
+  controllerAccount?: Maybe<Scalars['String']>;
+  rootAccount?: Maybe<Scalars['String']>;
+  createdInBlock?: Maybe<Scalars['Float']>;
+  entry?: Maybe<MembershipEntryMethod>;
+  subscription?: Maybe<Scalars['Float']>;
+};
+
+export type MembershipWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  handle_eq?: Maybe<Scalars['String']>;
+  handle_contains?: Maybe<Scalars['String']>;
+  handle_startsWith?: Maybe<Scalars['String']>;
+  handle_endsWith?: Maybe<Scalars['String']>;
+  handle_in?: Maybe<Array<Scalars['String']>>;
+  avatarUri_eq?: Maybe<Scalars['String']>;
+  avatarUri_contains?: Maybe<Scalars['String']>;
+  avatarUri_startsWith?: Maybe<Scalars['String']>;
+  avatarUri_endsWith?: Maybe<Scalars['String']>;
+  avatarUri_in?: Maybe<Array<Scalars['String']>>;
+  about_eq?: Maybe<Scalars['String']>;
+  about_contains?: Maybe<Scalars['String']>;
+  about_startsWith?: Maybe<Scalars['String']>;
+  about_endsWith?: Maybe<Scalars['String']>;
+  about_in?: Maybe<Array<Scalars['String']>>;
+  controllerAccount_eq?: Maybe<Scalars['String']>;
+  controllerAccount_contains?: Maybe<Scalars['String']>;
+  controllerAccount_startsWith?: Maybe<Scalars['String']>;
+  controllerAccount_endsWith?: Maybe<Scalars['String']>;
+  controllerAccount_in?: Maybe<Array<Scalars['String']>>;
+  rootAccount_eq?: Maybe<Scalars['String']>;
+  rootAccount_contains?: Maybe<Scalars['String']>;
+  rootAccount_startsWith?: Maybe<Scalars['String']>;
+  rootAccount_endsWith?: Maybe<Scalars['String']>;
+  rootAccount_in?: Maybe<Array<Scalars['String']>>;
+  createdInBlock_eq?: Maybe<Scalars['Int']>;
+  createdInBlock_gt?: Maybe<Scalars['Int']>;
+  createdInBlock_gte?: Maybe<Scalars['Int']>;
+  createdInBlock_lt?: Maybe<Scalars['Int']>;
+  createdInBlock_lte?: Maybe<Scalars['Int']>;
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>;
+  entry_eq?: Maybe<MembershipEntryMethod>;
+  entry_in?: Maybe<Array<MembershipEntryMethod>>;
+  subscription_eq?: Maybe<Scalars['Int']>;
+  subscription_gt?: Maybe<Scalars['Int']>;
+  subscription_gte?: Maybe<Scalars['Int']>;
+  subscription_lt?: Maybe<Scalars['Int']>;
+  subscription_lte?: Maybe<Scalars['Int']>;
+  subscription_in?: Maybe<Array<Scalars['Int']>>;
+  channels_none?: Maybe<ChannelWhereInput>;
+  channels_some?: Maybe<ChannelWhereInput>;
+  channels_every?: Maybe<ChannelWhereInput>;
+  AND?: Maybe<Array<MembershipWhereInput>>;
+  OR?: Maybe<Array<MembershipWhereInput>>;
+};
+
+export type MembershipWhereUniqueInput = {
+  id?: Maybe<Scalars['ID']>;
+  handle?: Maybe<Scalars['String']>;
+};
+
+export type NextEntityId = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  /** Next deterministic id for entities without custom id */
+  nextId: Scalars['Int'];
+};
+
+export type NextEntityIdConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<NextEntityIdEdge>;
+  pageInfo: PageInfo;
+};
+
+export type NextEntityIdCreateInput = {
+  nextId: Scalars['Float'];
+};
+
+export type NextEntityIdEdge = {
+  node: NextEntityId;
+  cursor: Scalars['String'];
+};
+
+export enum NextEntityIdOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  NextIdAsc = 'nextId_ASC',
+  NextIdDesc = 'nextId_DESC'
+}
+
+export type NextEntityIdUpdateInput = {
+  nextId?: Maybe<Scalars['Float']>;
+};
+
+export type NextEntityIdWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  nextId_eq?: Maybe<Scalars['Int']>;
+  nextId_gt?: Maybe<Scalars['Int']>;
+  nextId_gte?: Maybe<Scalars['Int']>;
+  nextId_lt?: Maybe<Scalars['Int']>;
+  nextId_lte?: Maybe<Scalars['Int']>;
+  nextId_in?: Maybe<Array<Scalars['Int']>>;
+  AND?: Maybe<Array<NextEntityIdWhereInput>>;
+  OR?: Maybe<Array<NextEntityIdWhereInput>>;
+};
+
+export type NextEntityIdWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type NodeLocationMetadata = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  /** ISO 3166-1 alpha-2 country code (2 letters) */
+  countryCode?: Maybe<Scalars['String']>;
+  /** City name */
+  city?: Maybe<Scalars['String']>;
+  coordinates?: Maybe<GeoCoordinates>;
+  coordinatesId?: Maybe<Scalars['String']>;
+  distributionbucketoperatormetadatanodeLocation?: Maybe<Array<DistributionBucketOperatorMetadata>>;
+  storagebucketoperatormetadatanodeLocation?: Maybe<Array<StorageBucketOperatorMetadata>>;
+};
+
+export type NodeLocationMetadataConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<NodeLocationMetadataEdge>;
+  pageInfo: PageInfo;
+};
+
+export type NodeLocationMetadataCreateInput = {
+  countryCode?: Maybe<Scalars['String']>;
+  city?: Maybe<Scalars['String']>;
+  coordinates?: Maybe<Scalars['ID']>;
+};
+
+export type NodeLocationMetadataEdge = {
+  node: NodeLocationMetadata;
+  cursor: Scalars['String'];
+};
+
+export enum NodeLocationMetadataOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  CountryCodeAsc = 'countryCode_ASC',
+  CountryCodeDesc = 'countryCode_DESC',
+  CityAsc = 'city_ASC',
+  CityDesc = 'city_DESC',
+  CoordinatesAsc = 'coordinates_ASC',
+  CoordinatesDesc = 'coordinates_DESC'
+}
+
+export type NodeLocationMetadataUpdateInput = {
+  countryCode?: Maybe<Scalars['String']>;
+  city?: Maybe<Scalars['String']>;
+  coordinates?: Maybe<Scalars['ID']>;
+};
+
+export type NodeLocationMetadataWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  countryCode_eq?: Maybe<Scalars['String']>;
+  countryCode_contains?: Maybe<Scalars['String']>;
+  countryCode_startsWith?: Maybe<Scalars['String']>;
+  countryCode_endsWith?: Maybe<Scalars['String']>;
+  countryCode_in?: Maybe<Array<Scalars['String']>>;
+  city_eq?: Maybe<Scalars['String']>;
+  city_contains?: Maybe<Scalars['String']>;
+  city_startsWith?: Maybe<Scalars['String']>;
+  city_endsWith?: Maybe<Scalars['String']>;
+  city_in?: Maybe<Array<Scalars['String']>>;
+  coordinates_eq?: Maybe<Scalars['ID']>;
+  coordinates_in?: Maybe<Array<Scalars['ID']>>;
+  coordinates?: Maybe<GeoCoordinatesWhereInput>;
+  distributionbucketoperatormetadatanodeLocation_none?: Maybe<DistributionBucketOperatorMetadataWhereInput>;
+  distributionbucketoperatormetadatanodeLocation_some?: Maybe<DistributionBucketOperatorMetadataWhereInput>;
+  distributionbucketoperatormetadatanodeLocation_every?: Maybe<DistributionBucketOperatorMetadataWhereInput>;
+  storagebucketoperatormetadatanodeLocation_none?: Maybe<StorageBucketOperatorMetadataWhereInput>;
+  storagebucketoperatormetadatanodeLocation_some?: Maybe<StorageBucketOperatorMetadataWhereInput>;
+  storagebucketoperatormetadatanodeLocation_every?: Maybe<StorageBucketOperatorMetadataWhereInput>;
+  AND?: Maybe<Array<NodeLocationMetadataWhereInput>>;
+  OR?: Maybe<Array<NodeLocationMetadataWhereInput>>;
+};
+
+export type NodeLocationMetadataWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type PageInfo = {
+  hasNextPage: Scalars['Boolean'];
+  hasPreviousPage: Scalars['Boolean'];
+  startCursor?: Maybe<Scalars['String']>;
+  endCursor?: Maybe<Scalars['String']>;
+};
+
+export type ProcessorState = {
+  lastCompleteBlock: Scalars['Float'];
+  lastProcessedEvent: Scalars['String'];
+  indexerHead: Scalars['Float'];
+  chainHead: Scalars['Float'];
+};
+
+export type Query = {
+  channelCategories: Array<ChannelCategory>;
+  channelCategoryByUniqueInput?: Maybe<ChannelCategory>;
+  channelCategoriesConnection: ChannelCategoryConnection;
+  channels: Array<Channel>;
+  channelByUniqueInput?: Maybe<Channel>;
+  channelsConnection: ChannelConnection;
+  curatorGroups: Array<CuratorGroup>;
+  curatorGroupByUniqueInput?: Maybe<CuratorGroup>;
+  curatorGroupsConnection: CuratorGroupConnection;
+  dataObjects: Array<DataObject>;
+  dataObjectByUniqueInput?: Maybe<DataObject>;
+  dataObjectsConnection: DataObjectConnection;
+  distributionBucketFamilyMetadata: Array<DistributionBucketFamilyMetadata>;
+  distributionBucketFamilyMetadataByUniqueInput?: Maybe<DistributionBucketFamilyMetadata>;
+  distributionBucketFamilyMetadataConnection: DistributionBucketFamilyMetadataConnection;
+  distributionBucketFamilies: Array<DistributionBucketFamily>;
+  distributionBucketFamilyByUniqueInput?: Maybe<DistributionBucketFamily>;
+  distributionBucketFamiliesConnection: DistributionBucketFamilyConnection;
+  distributionBucketOperatorMetadata: Array<DistributionBucketOperatorMetadata>;
+  distributionBucketOperatorMetadataByUniqueInput?: Maybe<DistributionBucketOperatorMetadata>;
+  distributionBucketOperatorMetadataConnection: DistributionBucketOperatorMetadataConnection;
+  distributionBucketOperators: Array<DistributionBucketOperator>;
+  distributionBucketOperatorByUniqueInput?: Maybe<DistributionBucketOperator>;
+  distributionBucketOperatorsConnection: DistributionBucketOperatorConnection;
+  distributionBuckets: Array<DistributionBucket>;
+  distributionBucketByUniqueInput?: Maybe<DistributionBucket>;
+  distributionBucketsConnection: DistributionBucketConnection;
+  geoCoordinates: Array<GeoCoordinates>;
+  geoCoordinatesByUniqueInput?: Maybe<GeoCoordinates>;
+  geoCoordinatesConnection: GeoCoordinatesConnection;
+  languages: Array<Language>;
+  languageByUniqueInput?: Maybe<Language>;
+  languagesConnection: LanguageConnection;
+  licenses: Array<License>;
+  licenseByUniqueInput?: Maybe<License>;
+  licensesConnection: LicenseConnection;
+  memberships: Array<Membership>;
+  membershipByUniqueInput?: Maybe<Membership>;
+  membershipsConnection: MembershipConnection;
+  nextEntityIds: Array<NextEntityId>;
+  nextEntityIdByUniqueInput?: Maybe<NextEntityId>;
+  nextEntityIdsConnection: NextEntityIdConnection;
+  nodeLocationMetadata: Array<NodeLocationMetadata>;
+  nodeLocationMetadataByUniqueInput?: Maybe<NodeLocationMetadata>;
+  nodeLocationMetadataConnection: NodeLocationMetadataConnection;
+  channelCategoriesByName: Array<ChannelCategoriesByNameFtsOutput>;
+  membersByHandle: Array<MembersByHandleFtsOutput>;
+  search: Array<SearchFtsOutput>;
+  videoCategoriesByName: Array<VideoCategoriesByNameFtsOutput>;
+  storageBagDistributionAssignments: Array<StorageBagDistributionAssignment>;
+  storageBagDistributionAssignmentByUniqueInput?: Maybe<StorageBagDistributionAssignment>;
+  storageBagDistributionAssignmentsConnection: StorageBagDistributionAssignmentConnection;
+  storageBagStorageAssignments: Array<StorageBagStorageAssignment>;
+  storageBagStorageAssignmentByUniqueInput?: Maybe<StorageBagStorageAssignment>;
+  storageBagStorageAssignmentsConnection: StorageBagStorageAssignmentConnection;
+  storageBags: Array<StorageBag>;
+  storageBagByUniqueInput?: Maybe<StorageBag>;
+  storageBagsConnection: StorageBagConnection;
+  storageBucketOperatorMetadata: Array<StorageBucketOperatorMetadata>;
+  storageBucketOperatorMetadataByUniqueInput?: Maybe<StorageBucketOperatorMetadata>;
+  storageBucketOperatorMetadataConnection: StorageBucketOperatorMetadataConnection;
+  storageBuckets: Array<StorageBucket>;
+  storageBucketByUniqueInput?: Maybe<StorageBucket>;
+  storageBucketsConnection: StorageBucketConnection;
+  storageDataObjects: Array<StorageDataObject>;
+  storageDataObjectByUniqueInput?: Maybe<StorageDataObject>;
+  storageDataObjectsConnection: StorageDataObjectConnection;
+  storageSystemParameters: Array<StorageSystemParameters>;
+  storageSystemParametersByUniqueInput?: Maybe<StorageSystemParameters>;
+  storageSystemParametersConnection: StorageSystemParametersConnection;
+  videoCategories: Array<VideoCategory>;
+  videoCategoryByUniqueInput?: Maybe<VideoCategory>;
+  videoCategoriesConnection: VideoCategoryConnection;
+  videoMediaEncodings: Array<VideoMediaEncoding>;
+  videoMediaEncodingByUniqueInput?: Maybe<VideoMediaEncoding>;
+  videoMediaEncodingsConnection: VideoMediaEncodingConnection;
+  videoMediaMetadata: Array<VideoMediaMetadata>;
+  videoMediaMetadataByUniqueInput?: Maybe<VideoMediaMetadata>;
+  videoMediaMetadataConnection: VideoMediaMetadataConnection;
+  videos: Array<Video>;
+  videoByUniqueInput?: Maybe<Video>;
+  videosConnection: VideoConnection;
+  workers: Array<Worker>;
+  workerByUniqueInput?: Maybe<Worker>;
+  workersConnection: WorkerConnection;
+};
+
+
+export type QueryChannelCategoriesArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<ChannelCategoryWhereInput>;
+  orderBy?: Maybe<Array<ChannelCategoryOrderByInput>>;
+};
+
+
+export type QueryChannelCategoryByUniqueInputArgs = {
+  where: ChannelCategoryWhereUniqueInput;
+};
+
+
+export type QueryChannelCategoriesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<ChannelCategoryWhereInput>;
+  orderBy?: Maybe<Array<ChannelCategoryOrderByInput>>;
+};
+
+
+export type QueryChannelsArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<ChannelWhereInput>;
+  orderBy?: Maybe<Array<ChannelOrderByInput>>;
+};
+
+
+export type QueryChannelByUniqueInputArgs = {
+  where: ChannelWhereUniqueInput;
+};
+
+
+export type QueryChannelsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<ChannelWhereInput>;
+  orderBy?: Maybe<Array<ChannelOrderByInput>>;
+};
+
+
+export type QueryCuratorGroupsArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<CuratorGroupWhereInput>;
+  orderBy?: Maybe<Array<CuratorGroupOrderByInput>>;
+};
+
+
+export type QueryCuratorGroupByUniqueInputArgs = {
+  where: CuratorGroupWhereUniqueInput;
+};
+
+
+export type QueryCuratorGroupsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<CuratorGroupWhereInput>;
+  orderBy?: Maybe<Array<CuratorGroupOrderByInput>>;
+};
+
+
+export type QueryDataObjectsArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<DataObjectWhereInput>;
+  orderBy?: Maybe<Array<DataObjectOrderByInput>>;
+};
+
+
+export type QueryDataObjectByUniqueInputArgs = {
+  where: DataObjectWhereUniqueInput;
+};
+
+
+export type QueryDataObjectsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<DataObjectWhereInput>;
+  orderBy?: Maybe<Array<DataObjectOrderByInput>>;
+};
+
+
+export type QueryDistributionBucketFamilyMetadataArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<DistributionBucketFamilyMetadataWhereInput>;
+  orderBy?: Maybe<Array<DistributionBucketFamilyMetadataOrderByInput>>;
+};
+
+
+export type QueryDistributionBucketFamilyMetadataByUniqueInputArgs = {
+  where: DistributionBucketFamilyMetadataWhereUniqueInput;
+};
+
+
+export type QueryDistributionBucketFamilyMetadataConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<DistributionBucketFamilyMetadataWhereInput>;
+  orderBy?: Maybe<Array<DistributionBucketFamilyMetadataOrderByInput>>;
+};
+
+
+export type QueryDistributionBucketFamiliesArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<DistributionBucketFamilyWhereInput>;
+  orderBy?: Maybe<Array<DistributionBucketFamilyOrderByInput>>;
+};
+
+
+export type QueryDistributionBucketFamilyByUniqueInputArgs = {
+  where: DistributionBucketFamilyWhereUniqueInput;
+};
+
+
+export type QueryDistributionBucketFamiliesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<DistributionBucketFamilyWhereInput>;
+  orderBy?: Maybe<Array<DistributionBucketFamilyOrderByInput>>;
+};
+
+
+export type QueryDistributionBucketOperatorMetadataArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<DistributionBucketOperatorMetadataWhereInput>;
+  orderBy?: Maybe<Array<DistributionBucketOperatorMetadataOrderByInput>>;
+};
+
+
+export type QueryDistributionBucketOperatorMetadataByUniqueInputArgs = {
+  where: DistributionBucketOperatorMetadataWhereUniqueInput;
+};
+
+
+export type QueryDistributionBucketOperatorMetadataConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<DistributionBucketOperatorMetadataWhereInput>;
+  orderBy?: Maybe<Array<DistributionBucketOperatorMetadataOrderByInput>>;
+};
+
+
+export type QueryDistributionBucketOperatorsArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<DistributionBucketOperatorWhereInput>;
+  orderBy?: Maybe<Array<DistributionBucketOperatorOrderByInput>>;
+};
+
+
+export type QueryDistributionBucketOperatorByUniqueInputArgs = {
+  where: DistributionBucketOperatorWhereUniqueInput;
+};
+
+
+export type QueryDistributionBucketOperatorsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<DistributionBucketOperatorWhereInput>;
+  orderBy?: Maybe<Array<DistributionBucketOperatorOrderByInput>>;
+};
+
+
+export type QueryDistributionBucketsArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<DistributionBucketWhereInput>;
+  orderBy?: Maybe<Array<DistributionBucketOrderByInput>>;
+};
+
+
+export type QueryDistributionBucketByUniqueInputArgs = {
+  where: DistributionBucketWhereUniqueInput;
+};
+
+
+export type QueryDistributionBucketsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<DistributionBucketWhereInput>;
+  orderBy?: Maybe<Array<DistributionBucketOrderByInput>>;
+};
+
+
+export type QueryGeoCoordinatesArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<GeoCoordinatesWhereInput>;
+  orderBy?: Maybe<Array<GeoCoordinatesOrderByInput>>;
+};
+
+
+export type QueryGeoCoordinatesByUniqueInputArgs = {
+  where: GeoCoordinatesWhereUniqueInput;
+};
+
+
+export type QueryGeoCoordinatesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<GeoCoordinatesWhereInput>;
+  orderBy?: Maybe<Array<GeoCoordinatesOrderByInput>>;
+};
+
+
+export type QueryLanguagesArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<LanguageWhereInput>;
+  orderBy?: Maybe<Array<LanguageOrderByInput>>;
+};
+
+
+export type QueryLanguageByUniqueInputArgs = {
+  where: LanguageWhereUniqueInput;
+};
+
+
+export type QueryLanguagesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<LanguageWhereInput>;
+  orderBy?: Maybe<Array<LanguageOrderByInput>>;
+};
+
+
+export type QueryLicensesArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<LicenseWhereInput>;
+  orderBy?: Maybe<Array<LicenseOrderByInput>>;
+};
+
+
+export type QueryLicenseByUniqueInputArgs = {
+  where: LicenseWhereUniqueInput;
+};
+
+
+export type QueryLicensesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<LicenseWhereInput>;
+  orderBy?: Maybe<Array<LicenseOrderByInput>>;
+};
+
+
+export type QueryMembershipsArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<MembershipWhereInput>;
+  orderBy?: Maybe<Array<MembershipOrderByInput>>;
+};
+
+
+export type QueryMembershipByUniqueInputArgs = {
+  where: MembershipWhereUniqueInput;
+};
+
+
+export type QueryMembershipsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<MembershipWhereInput>;
+  orderBy?: Maybe<Array<MembershipOrderByInput>>;
+};
+
+
+export type QueryNextEntityIdsArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<NextEntityIdWhereInput>;
+  orderBy?: Maybe<Array<NextEntityIdOrderByInput>>;
+};
+
+
+export type QueryNextEntityIdByUniqueInputArgs = {
+  where: NextEntityIdWhereUniqueInput;
+};
+
+
+export type QueryNextEntityIdsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<NextEntityIdWhereInput>;
+  orderBy?: Maybe<Array<NextEntityIdOrderByInput>>;
+};
+
+
+export type QueryNodeLocationMetadataArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<NodeLocationMetadataWhereInput>;
+  orderBy?: Maybe<Array<NodeLocationMetadataOrderByInput>>;
+};
+
+
+export type QueryNodeLocationMetadataByUniqueInputArgs = {
+  where: NodeLocationMetadataWhereUniqueInput;
+};
+
+
+export type QueryNodeLocationMetadataConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<NodeLocationMetadataWhereInput>;
+  orderBy?: Maybe<Array<NodeLocationMetadataOrderByInput>>;
+};
+
+
+export type QueryChannelCategoriesByNameArgs = {
+  whereChannelCategory?: Maybe<ChannelCategoryWhereInput>;
+  skip?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  text: Scalars['String'];
+};
+
+
+export type QueryMembersByHandleArgs = {
+  whereMembership?: Maybe<MembershipWhereInput>;
+  skip?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  text: Scalars['String'];
+};
+
+
+export type QuerySearchArgs = {
+  whereVideo?: Maybe<VideoWhereInput>;
+  whereChannel?: Maybe<ChannelWhereInput>;
+  skip?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  text: Scalars['String'];
+};
+
+
+export type QueryVideoCategoriesByNameArgs = {
+  whereVideoCategory?: Maybe<VideoCategoryWhereInput>;
+  skip?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  text: Scalars['String'];
+};
+
+
+export type QueryStorageBagDistributionAssignmentsArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<StorageBagDistributionAssignmentWhereInput>;
+  orderBy?: Maybe<Array<StorageBagDistributionAssignmentOrderByInput>>;
+};
+
+
+export type QueryStorageBagDistributionAssignmentByUniqueInputArgs = {
+  where: StorageBagDistributionAssignmentWhereUniqueInput;
+};
+
+
+export type QueryStorageBagDistributionAssignmentsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<StorageBagDistributionAssignmentWhereInput>;
+  orderBy?: Maybe<Array<StorageBagDistributionAssignmentOrderByInput>>;
+};
+
+
+export type QueryStorageBagStorageAssignmentsArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<StorageBagStorageAssignmentWhereInput>;
+  orderBy?: Maybe<Array<StorageBagStorageAssignmentOrderByInput>>;
+};
+
+
+export type QueryStorageBagStorageAssignmentByUniqueInputArgs = {
+  where: StorageBagStorageAssignmentWhereUniqueInput;
+};
+
+
+export type QueryStorageBagStorageAssignmentsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<StorageBagStorageAssignmentWhereInput>;
+  orderBy?: Maybe<Array<StorageBagStorageAssignmentOrderByInput>>;
+};
+
+
+export type QueryStorageBagsArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<StorageBagWhereInput>;
+  orderBy?: Maybe<Array<StorageBagOrderByInput>>;
+};
+
+
+export type QueryStorageBagByUniqueInputArgs = {
+  where: StorageBagWhereUniqueInput;
+};
+
+
+export type QueryStorageBagsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<StorageBagWhereInput>;
+  orderBy?: Maybe<Array<StorageBagOrderByInput>>;
+};
+
+
+export type QueryStorageBucketOperatorMetadataArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<StorageBucketOperatorMetadataWhereInput>;
+  orderBy?: Maybe<Array<StorageBucketOperatorMetadataOrderByInput>>;
+};
+
+
+export type QueryStorageBucketOperatorMetadataByUniqueInputArgs = {
+  where: StorageBucketOperatorMetadataWhereUniqueInput;
+};
+
+
+export type QueryStorageBucketOperatorMetadataConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<StorageBucketOperatorMetadataWhereInput>;
+  orderBy?: Maybe<Array<StorageBucketOperatorMetadataOrderByInput>>;
+};
+
+
+export type QueryStorageBucketsArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<StorageBucketWhereInput>;
+  orderBy?: Maybe<Array<StorageBucketOrderByInput>>;
+};
+
+
+export type QueryStorageBucketByUniqueInputArgs = {
+  where: StorageBucketWhereUniqueInput;
+};
+
+
+export type QueryStorageBucketsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<StorageBucketWhereInput>;
+  orderBy?: Maybe<Array<StorageBucketOrderByInput>>;
+};
+
+
+export type QueryStorageDataObjectsArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<StorageDataObjectWhereInput>;
+  orderBy?: Maybe<Array<StorageDataObjectOrderByInput>>;
+};
+
+
+export type QueryStorageDataObjectByUniqueInputArgs = {
+  where: StorageDataObjectWhereUniqueInput;
+};
+
+
+export type QueryStorageDataObjectsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<StorageDataObjectWhereInput>;
+  orderBy?: Maybe<Array<StorageDataObjectOrderByInput>>;
+};
+
+
+export type QueryStorageSystemParametersArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<StorageSystemParametersWhereInput>;
+  orderBy?: Maybe<Array<StorageSystemParametersOrderByInput>>;
+};
+
+
+export type QueryStorageSystemParametersByUniqueInputArgs = {
+  where: StorageSystemParametersWhereUniqueInput;
+};
+
+
+export type QueryStorageSystemParametersConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<StorageSystemParametersWhereInput>;
+  orderBy?: Maybe<Array<StorageSystemParametersOrderByInput>>;
+};
+
+
+export type QueryVideoCategoriesArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<VideoCategoryWhereInput>;
+  orderBy?: Maybe<Array<VideoCategoryOrderByInput>>;
+};
+
+
+export type QueryVideoCategoryByUniqueInputArgs = {
+  where: VideoCategoryWhereUniqueInput;
+};
+
+
+export type QueryVideoCategoriesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<VideoCategoryWhereInput>;
+  orderBy?: Maybe<Array<VideoCategoryOrderByInput>>;
+};
+
+
+export type QueryVideoMediaEncodingsArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<VideoMediaEncodingWhereInput>;
+  orderBy?: Maybe<Array<VideoMediaEncodingOrderByInput>>;
+};
+
+
+export type QueryVideoMediaEncodingByUniqueInputArgs = {
+  where: VideoMediaEncodingWhereUniqueInput;
+};
+
+
+export type QueryVideoMediaEncodingsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<VideoMediaEncodingWhereInput>;
+  orderBy?: Maybe<Array<VideoMediaEncodingOrderByInput>>;
+};
+
+
+export type QueryVideoMediaMetadataArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<VideoMediaMetadataWhereInput>;
+  orderBy?: Maybe<Array<VideoMediaMetadataOrderByInput>>;
+};
+
+
+export type QueryVideoMediaMetadataByUniqueInputArgs = {
+  where: VideoMediaMetadataWhereUniqueInput;
+};
+
+
+export type QueryVideoMediaMetadataConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<VideoMediaMetadataWhereInput>;
+  orderBy?: Maybe<Array<VideoMediaMetadataOrderByInput>>;
+};
+
+
+export type QueryVideosArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<VideoWhereInput>;
+  orderBy?: Maybe<Array<VideoOrderByInput>>;
+};
+
+
+export type QueryVideoByUniqueInputArgs = {
+  where: VideoWhereUniqueInput;
+};
+
+
+export type QueryVideosConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<VideoWhereInput>;
+  orderBy?: Maybe<Array<VideoOrderByInput>>;
+};
+
+
+export type QueryWorkersArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<WorkerWhereInput>;
+  orderBy?: Maybe<Array<WorkerOrderByInput>>;
+};
+
+
+export type QueryWorkerByUniqueInputArgs = {
+  where: WorkerWhereUniqueInput;
+};
+
+
+export type QueryWorkersConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<WorkerWhereInput>;
+  orderBy?: Maybe<Array<WorkerOrderByInput>>;
+};
+
+export type SearchFtsOutput = {
+  item: SearchSearchResult;
+  rank: Scalars['Float'];
+  isTypeOf: Scalars['String'];
+  highlight: Scalars['String'];
+};
+
+export type SearchSearchResult = Channel | Video;
+
+export type StandardDeleteResponse = {
+  id: Scalars['ID'];
+};
+
+export type StorageBag = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  objects: Array<StorageDataObject>;
+  storageAssignments: Array<StorageBagStorageAssignment>;
+  distirbutionAssignments: Array<StorageBagDistributionAssignment>;
+  /** Owner of the storage bag */
+  owner: StorageBagOwner;
+};
+
+export type StorageBagConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<StorageBagEdge>;
+  pageInfo: PageInfo;
+};
+
+export type StorageBagCreateInput = {
+  owner: Scalars['JSONObject'];
+};
+
+export type StorageBagDistributionAssignment = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  storageBag: StorageBag;
+  storageBagId: Scalars['String'];
+  distributionBucket: DistributionBucket;
+  distributionBucketId: Scalars['String'];
+};
+
+export type StorageBagDistributionAssignmentConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<StorageBagDistributionAssignmentEdge>;
+  pageInfo: PageInfo;
+};
+
+export type StorageBagDistributionAssignmentCreateInput = {
+  storageBag: Scalars['ID'];
+  distributionBucket: Scalars['ID'];
+};
+
+export type StorageBagDistributionAssignmentEdge = {
+  node: StorageBagDistributionAssignment;
+  cursor: Scalars['String'];
+};
+
+export enum StorageBagDistributionAssignmentOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  StorageBagAsc = 'storageBag_ASC',
+  StorageBagDesc = 'storageBag_DESC',
+  DistributionBucketAsc = 'distributionBucket_ASC',
+  DistributionBucketDesc = 'distributionBucket_DESC'
+}
+
+export type StorageBagDistributionAssignmentUpdateInput = {
+  storageBag?: Maybe<Scalars['ID']>;
+  distributionBucket?: Maybe<Scalars['ID']>;
+};
+
+export type StorageBagDistributionAssignmentWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  storageBag_eq?: Maybe<Scalars['ID']>;
+  storageBag_in?: Maybe<Array<Scalars['ID']>>;
+  distributionBucket_eq?: Maybe<Scalars['ID']>;
+  distributionBucket_in?: Maybe<Array<Scalars['ID']>>;
+  storageBag?: Maybe<StorageBagWhereInput>;
+  distributionBucket?: Maybe<DistributionBucketWhereInput>;
+  AND?: Maybe<Array<StorageBagDistributionAssignmentWhereInput>>;
+  OR?: Maybe<Array<StorageBagDistributionAssignmentWhereInput>>;
+};
+
+export type StorageBagDistributionAssignmentWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type StorageBagEdge = {
+  node: StorageBag;
+  cursor: Scalars['String'];
+};
+
+export enum StorageBagOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC'
+}
+
+export type StorageBagOwner = StorageBagOwnerCouncil | StorageBagOwnerWorkingGroup | StorageBagOwnerMember | StorageBagOwnerChannel | StorageBagOwnerDao;
+
+export type StorageBagOwnerChannel = {
+  channelId?: Maybe<Scalars['Int']>;
+};
+
+export type StorageBagOwnerChannelCreateInput = {
+  channelId?: Maybe<Scalars['Float']>;
+};
+
+export type StorageBagOwnerChannelUpdateInput = {
+  channelId?: Maybe<Scalars['Float']>;
+};
+
+export type StorageBagOwnerChannelWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  channelId_eq?: Maybe<Scalars['Int']>;
+  channelId_gt?: Maybe<Scalars['Int']>;
+  channelId_gte?: Maybe<Scalars['Int']>;
+  channelId_lt?: Maybe<Scalars['Int']>;
+  channelId_lte?: Maybe<Scalars['Int']>;
+  channelId_in?: Maybe<Array<Scalars['Int']>>;
+  AND?: Maybe<Array<StorageBagOwnerChannelWhereInput>>;
+  OR?: Maybe<Array<StorageBagOwnerChannelWhereInput>>;
+};
+
+export type StorageBagOwnerChannelWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type StorageBagOwnerCouncil = {
+  phantom?: Maybe<Scalars['Int']>;
+};
+
+export type StorageBagOwnerCouncilCreateInput = {
+  phantom?: Maybe<Scalars['Float']>;
+};
+
+export type StorageBagOwnerCouncilUpdateInput = {
+  phantom?: Maybe<Scalars['Float']>;
+};
+
+export type StorageBagOwnerCouncilWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  phantom_eq?: Maybe<Scalars['Int']>;
+  phantom_gt?: Maybe<Scalars['Int']>;
+  phantom_gte?: Maybe<Scalars['Int']>;
+  phantom_lt?: Maybe<Scalars['Int']>;
+  phantom_lte?: Maybe<Scalars['Int']>;
+  phantom_in?: Maybe<Array<Scalars['Int']>>;
+  AND?: Maybe<Array<StorageBagOwnerCouncilWhereInput>>;
+  OR?: Maybe<Array<StorageBagOwnerCouncilWhereInput>>;
+};
+
+export type StorageBagOwnerCouncilWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type StorageBagOwnerDao = {
+  daoId?: Maybe<Scalars['Int']>;
+};
+
+export type StorageBagOwnerDaoCreateInput = {
+  daoId?: Maybe<Scalars['Float']>;
+};
+
+export type StorageBagOwnerDaoUpdateInput = {
+  daoId?: Maybe<Scalars['Float']>;
+};
+
+export type StorageBagOwnerDaoWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  daoId_eq?: Maybe<Scalars['Int']>;
+  daoId_gt?: Maybe<Scalars['Int']>;
+  daoId_gte?: Maybe<Scalars['Int']>;
+  daoId_lt?: Maybe<Scalars['Int']>;
+  daoId_lte?: Maybe<Scalars['Int']>;
+  daoId_in?: Maybe<Array<Scalars['Int']>>;
+  AND?: Maybe<Array<StorageBagOwnerDaoWhereInput>>;
+  OR?: Maybe<Array<StorageBagOwnerDaoWhereInput>>;
+};
+
+export type StorageBagOwnerDaoWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type StorageBagOwnerMember = {
+  memberId?: Maybe<Scalars['Int']>;
+};
+
+export type StorageBagOwnerMemberCreateInput = {
+  memberId?: Maybe<Scalars['Float']>;
+};
+
+export type StorageBagOwnerMemberUpdateInput = {
+  memberId?: Maybe<Scalars['Float']>;
+};
+
+export type StorageBagOwnerMemberWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  memberId_eq?: Maybe<Scalars['Int']>;
+  memberId_gt?: Maybe<Scalars['Int']>;
+  memberId_gte?: Maybe<Scalars['Int']>;
+  memberId_lt?: Maybe<Scalars['Int']>;
+  memberId_lte?: Maybe<Scalars['Int']>;
+  memberId_in?: Maybe<Array<Scalars['Int']>>;
+  AND?: Maybe<Array<StorageBagOwnerMemberWhereInput>>;
+  OR?: Maybe<Array<StorageBagOwnerMemberWhereInput>>;
+};
+
+export type StorageBagOwnerMemberWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type StorageBagOwnerWorkingGroup = {
+  workingGroupId?: Maybe<Scalars['String']>;
+};
+
+export type StorageBagOwnerWorkingGroupCreateInput = {
+  workingGroupId?: Maybe<Scalars['String']>;
+};
+
+export type StorageBagOwnerWorkingGroupUpdateInput = {
+  workingGroupId?: Maybe<Scalars['String']>;
+};
+
+export type StorageBagOwnerWorkingGroupWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  workingGroupId_eq?: Maybe<Scalars['String']>;
+  workingGroupId_contains?: Maybe<Scalars['String']>;
+  workingGroupId_startsWith?: Maybe<Scalars['String']>;
+  workingGroupId_endsWith?: Maybe<Scalars['String']>;
+  workingGroupId_in?: Maybe<Array<Scalars['String']>>;
+  AND?: Maybe<Array<StorageBagOwnerWorkingGroupWhereInput>>;
+  OR?: Maybe<Array<StorageBagOwnerWorkingGroupWhereInput>>;
+};
+
+export type StorageBagOwnerWorkingGroupWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type StorageBagStorageAssignment = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  storageBag: StorageBag;
+  storageBagId: Scalars['String'];
+  storageBucket: StorageBucket;
+  storageBucketId: Scalars['String'];
+};
+
+export type StorageBagStorageAssignmentConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<StorageBagStorageAssignmentEdge>;
+  pageInfo: PageInfo;
+};
+
+export type StorageBagStorageAssignmentCreateInput = {
+  storageBag: Scalars['ID'];
+  storageBucket: Scalars['ID'];
+};
+
+export type StorageBagStorageAssignmentEdge = {
+  node: StorageBagStorageAssignment;
+  cursor: Scalars['String'];
+};
+
+export enum StorageBagStorageAssignmentOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  StorageBagAsc = 'storageBag_ASC',
+  StorageBagDesc = 'storageBag_DESC',
+  StorageBucketAsc = 'storageBucket_ASC',
+  StorageBucketDesc = 'storageBucket_DESC'
+}
+
+export type StorageBagStorageAssignmentUpdateInput = {
+  storageBag?: Maybe<Scalars['ID']>;
+  storageBucket?: Maybe<Scalars['ID']>;
+};
+
+export type StorageBagStorageAssignmentWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  storageBag_eq?: Maybe<Scalars['ID']>;
+  storageBag_in?: Maybe<Array<Scalars['ID']>>;
+  storageBucket_eq?: Maybe<Scalars['ID']>;
+  storageBucket_in?: Maybe<Array<Scalars['ID']>>;
+  storageBag?: Maybe<StorageBagWhereInput>;
+  storageBucket?: Maybe<StorageBucketWhereInput>;
+  AND?: Maybe<Array<StorageBagStorageAssignmentWhereInput>>;
+  OR?: Maybe<Array<StorageBagStorageAssignmentWhereInput>>;
+};
+
+export type StorageBagStorageAssignmentWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type StorageBagUpdateInput = {
+  owner?: Maybe<Scalars['JSONObject']>;
+};
+
+export type StorageBagWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  owner_json?: Maybe<Scalars['JSONObject']>;
+  objects_none?: Maybe<StorageDataObjectWhereInput>;
+  objects_some?: Maybe<StorageDataObjectWhereInput>;
+  objects_every?: Maybe<StorageDataObjectWhereInput>;
+  storageAssignments_none?: Maybe<StorageBagStorageAssignmentWhereInput>;
+  storageAssignments_some?: Maybe<StorageBagStorageAssignmentWhereInput>;
+  storageAssignments_every?: Maybe<StorageBagStorageAssignmentWhereInput>;
+  distirbutionAssignments_none?: Maybe<StorageBagDistributionAssignmentWhereInput>;
+  distirbutionAssignments_some?: Maybe<StorageBagDistributionAssignmentWhereInput>;
+  distirbutionAssignments_every?: Maybe<StorageBagDistributionAssignmentWhereInput>;
+  AND?: Maybe<Array<StorageBagWhereInput>>;
+  OR?: Maybe<Array<StorageBagWhereInput>>;
+};
+
+export type StorageBagWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type StorageBucket = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  /** Current bucket operator status */
+  operatorStatus: StorageBucketOperatorStatus;
+  operatorMetadata?: Maybe<StorageBucketOperatorMetadata>;
+  operatorMetadataId?: Maybe<Scalars['String']>;
+  /** Whether the bucket is accepting any new storage bags */
+  acceptingNewBags: Scalars['Boolean'];
+  bagAssignments: Array<StorageBagStorageAssignment>;
+  /** Bucket's data object size limit in bytes */
+  dataObjectsSizeLimit: Scalars['BigInt'];
+  /** Bucket's data object count limit */
+  dataObjectCountLimit: Scalars['BigInt'];
+};
+
+export type StorageBucketConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<StorageBucketEdge>;
+  pageInfo: PageInfo;
+};
+
+export type StorageBucketCreateInput = {
+  operatorStatus: Scalars['JSONObject'];
+  operatorMetadata?: Maybe<Scalars['ID']>;
+  acceptingNewBags: Scalars['Boolean'];
+  dataObjectsSizeLimit: Scalars['BigInt'];
+  dataObjectCountLimit: Scalars['BigInt'];
+};
+
+export type StorageBucketEdge = {
+  node: StorageBucket;
+  cursor: Scalars['String'];
+};
+
+export type StorageBucketOperatorMetadata = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  /** Root node endpoint */
+  nodeEndpoint?: Maybe<Scalars['String']>;
+  nodeLocation?: Maybe<NodeLocationMetadata>;
+  nodeLocationId?: Maybe<Scalars['String']>;
+  /** Additional information about the node/operator */
+  extra?: Maybe<Scalars['String']>;
+  storagebucketoperatorMetadata?: Maybe<Array<StorageBucket>>;
+};
+
+export type StorageBucketOperatorMetadataConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<StorageBucketOperatorMetadataEdge>;
+  pageInfo: PageInfo;
+};
+
+export type StorageBucketOperatorMetadataCreateInput = {
+  nodeEndpoint?: Maybe<Scalars['String']>;
+  nodeLocation?: Maybe<Scalars['ID']>;
+  extra?: Maybe<Scalars['String']>;
+};
+
+export type StorageBucketOperatorMetadataEdge = {
+  node: StorageBucketOperatorMetadata;
+  cursor: Scalars['String'];
+};
+
+export enum StorageBucketOperatorMetadataOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  NodeEndpointAsc = 'nodeEndpoint_ASC',
+  NodeEndpointDesc = 'nodeEndpoint_DESC',
+  NodeLocationAsc = 'nodeLocation_ASC',
+  NodeLocationDesc = 'nodeLocation_DESC',
+  ExtraAsc = 'extra_ASC',
+  ExtraDesc = 'extra_DESC'
+}
+
+export type StorageBucketOperatorMetadataUpdateInput = {
+  nodeEndpoint?: Maybe<Scalars['String']>;
+  nodeLocation?: Maybe<Scalars['ID']>;
+  extra?: Maybe<Scalars['String']>;
+};
+
+export type StorageBucketOperatorMetadataWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  nodeEndpoint_eq?: Maybe<Scalars['String']>;
+  nodeEndpoint_contains?: Maybe<Scalars['String']>;
+  nodeEndpoint_startsWith?: Maybe<Scalars['String']>;
+  nodeEndpoint_endsWith?: Maybe<Scalars['String']>;
+  nodeEndpoint_in?: Maybe<Array<Scalars['String']>>;
+  nodeLocation_eq?: Maybe<Scalars['ID']>;
+  nodeLocation_in?: Maybe<Array<Scalars['ID']>>;
+  extra_eq?: Maybe<Scalars['String']>;
+  extra_contains?: Maybe<Scalars['String']>;
+  extra_startsWith?: Maybe<Scalars['String']>;
+  extra_endsWith?: Maybe<Scalars['String']>;
+  extra_in?: Maybe<Array<Scalars['String']>>;
+  nodeLocation?: Maybe<NodeLocationMetadataWhereInput>;
+  storagebucketoperatorMetadata_none?: Maybe<StorageBucketWhereInput>;
+  storagebucketoperatorMetadata_some?: Maybe<StorageBucketWhereInput>;
+  storagebucketoperatorMetadata_every?: Maybe<StorageBucketWhereInput>;
+  AND?: Maybe<Array<StorageBucketOperatorMetadataWhereInput>>;
+  OR?: Maybe<Array<StorageBucketOperatorMetadataWhereInput>>;
+};
+
+export type StorageBucketOperatorMetadataWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type StorageBucketOperatorStatus = StorageBucketOperatorStatusMissing | StorageBucketOperatorStatusInvited | StorageBucketOperatorStatusActive;
+
+export type StorageBucketOperatorStatusActive = {
+  workerId: Scalars['Int'];
+};
+
+export type StorageBucketOperatorStatusActiveCreateInput = {
+  workerId: Scalars['Float'];
+};
+
+export type StorageBucketOperatorStatusActiveUpdateInput = {
+  workerId?: Maybe<Scalars['Float']>;
+};
+
+export type StorageBucketOperatorStatusActiveWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  workerId_eq?: Maybe<Scalars['Int']>;
+  workerId_gt?: Maybe<Scalars['Int']>;
+  workerId_gte?: Maybe<Scalars['Int']>;
+  workerId_lt?: Maybe<Scalars['Int']>;
+  workerId_lte?: Maybe<Scalars['Int']>;
+  workerId_in?: Maybe<Array<Scalars['Int']>>;
+  AND?: Maybe<Array<StorageBucketOperatorStatusActiveWhereInput>>;
+  OR?: Maybe<Array<StorageBucketOperatorStatusActiveWhereInput>>;
+};
+
+export type StorageBucketOperatorStatusActiveWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type StorageBucketOperatorStatusInvited = {
+  workerId: Scalars['Int'];
+};
+
+export type StorageBucketOperatorStatusInvitedCreateInput = {
+  workerId: Scalars['Float'];
+};
+
+export type StorageBucketOperatorStatusInvitedUpdateInput = {
+  workerId?: Maybe<Scalars['Float']>;
+};
+
+export type StorageBucketOperatorStatusInvitedWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  workerId_eq?: Maybe<Scalars['Int']>;
+  workerId_gt?: Maybe<Scalars['Int']>;
+  workerId_gte?: Maybe<Scalars['Int']>;
+  workerId_lt?: Maybe<Scalars['Int']>;
+  workerId_lte?: Maybe<Scalars['Int']>;
+  workerId_in?: Maybe<Array<Scalars['Int']>>;
+  AND?: Maybe<Array<StorageBucketOperatorStatusInvitedWhereInput>>;
+  OR?: Maybe<Array<StorageBucketOperatorStatusInvitedWhereInput>>;
+};
+
+export type StorageBucketOperatorStatusInvitedWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type StorageBucketOperatorStatusMissing = {
+  phantom?: Maybe<Scalars['Int']>;
+};
+
+export type StorageBucketOperatorStatusMissingCreateInput = {
+  phantom?: Maybe<Scalars['Float']>;
+};
+
+export type StorageBucketOperatorStatusMissingUpdateInput = {
+  phantom?: Maybe<Scalars['Float']>;
+};
+
+export type StorageBucketOperatorStatusMissingWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  phantom_eq?: Maybe<Scalars['Int']>;
+  phantom_gt?: Maybe<Scalars['Int']>;
+  phantom_gte?: Maybe<Scalars['Int']>;
+  phantom_lt?: Maybe<Scalars['Int']>;
+  phantom_lte?: Maybe<Scalars['Int']>;
+  phantom_in?: Maybe<Array<Scalars['Int']>>;
+  AND?: Maybe<Array<StorageBucketOperatorStatusMissingWhereInput>>;
+  OR?: Maybe<Array<StorageBucketOperatorStatusMissingWhereInput>>;
+};
+
+export type StorageBucketOperatorStatusMissingWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export enum StorageBucketOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  OperatorMetadataAsc = 'operatorMetadata_ASC',
+  OperatorMetadataDesc = 'operatorMetadata_DESC',
+  AcceptingNewBagsAsc = 'acceptingNewBags_ASC',
+  AcceptingNewBagsDesc = 'acceptingNewBags_DESC',
+  DataObjectsSizeLimitAsc = 'dataObjectsSizeLimit_ASC',
+  DataObjectsSizeLimitDesc = 'dataObjectsSizeLimit_DESC',
+  DataObjectCountLimitAsc = 'dataObjectCountLimit_ASC',
+  DataObjectCountLimitDesc = 'dataObjectCountLimit_DESC'
+}
+
+export type StorageBucketUpdateInput = {
+  operatorStatus?: Maybe<Scalars['JSONObject']>;
+  operatorMetadata?: Maybe<Scalars['ID']>;
+  acceptingNewBags?: Maybe<Scalars['Boolean']>;
+  dataObjectsSizeLimit?: Maybe<Scalars['BigInt']>;
+  dataObjectCountLimit?: Maybe<Scalars['BigInt']>;
+};
+
+export type StorageBucketWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  operatorStatus_json?: Maybe<Scalars['JSONObject']>;
+  operatorMetadata_eq?: Maybe<Scalars['ID']>;
+  operatorMetadata_in?: Maybe<Array<Scalars['ID']>>;
+  acceptingNewBags_eq?: Maybe<Scalars['Boolean']>;
+  acceptingNewBags_in?: Maybe<Array<Scalars['Boolean']>>;
+  dataObjectsSizeLimit_eq?: Maybe<Scalars['BigInt']>;
+  dataObjectsSizeLimit_gt?: Maybe<Scalars['BigInt']>;
+  dataObjectsSizeLimit_gte?: Maybe<Scalars['BigInt']>;
+  dataObjectsSizeLimit_lt?: Maybe<Scalars['BigInt']>;
+  dataObjectsSizeLimit_lte?: Maybe<Scalars['BigInt']>;
+  dataObjectsSizeLimit_in?: Maybe<Array<Scalars['BigInt']>>;
+  dataObjectCountLimit_eq?: Maybe<Scalars['BigInt']>;
+  dataObjectCountLimit_gt?: Maybe<Scalars['BigInt']>;
+  dataObjectCountLimit_gte?: Maybe<Scalars['BigInt']>;
+  dataObjectCountLimit_lt?: Maybe<Scalars['BigInt']>;
+  dataObjectCountLimit_lte?: Maybe<Scalars['BigInt']>;
+  dataObjectCountLimit_in?: Maybe<Array<Scalars['BigInt']>>;
+  operatorMetadata?: Maybe<StorageBucketOperatorMetadataWhereInput>;
+  bagAssignments_none?: Maybe<StorageBagStorageAssignmentWhereInput>;
+  bagAssignments_some?: Maybe<StorageBagStorageAssignmentWhereInput>;
+  bagAssignments_every?: Maybe<StorageBagStorageAssignmentWhereInput>;
+  AND?: Maybe<Array<StorageBucketWhereInput>>;
+  OR?: Maybe<Array<StorageBucketWhereInput>>;
+};
+
+export type StorageBucketWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type StorageDataObject = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  /** Whether the data object was uploaded and accepted by the storage provider */
+  isAccepted: Scalars['Boolean'];
+  /** Data object size in bytes */
+  size: Scalars['BigInt'];
+  storageBag: StorageBag;
+  storageBagId: Scalars['String'];
+  /** IPFS content hash */
+  ipfsHash: Scalars['String'];
+  /** Public key used to authenticate the uploader by the storage provider */
+  authenticationKey?: Maybe<Scalars['String']>;
+};
+
+export type StorageDataObjectConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<StorageDataObjectEdge>;
+  pageInfo: PageInfo;
+};
+
+export type StorageDataObjectCreateInput = {
+  isAccepted: Scalars['Boolean'];
+  size: Scalars['BigInt'];
+  storageBag: Scalars['ID'];
+  ipfsHash: Scalars['String'];
+  authenticationKey?: Maybe<Scalars['String']>;
+};
+
+export type StorageDataObjectEdge = {
+  node: StorageDataObject;
+  cursor: Scalars['String'];
+};
+
+export enum StorageDataObjectOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  IsAcceptedAsc = 'isAccepted_ASC',
+  IsAcceptedDesc = 'isAccepted_DESC',
+  SizeAsc = 'size_ASC',
+  SizeDesc = 'size_DESC',
+  StorageBagAsc = 'storageBag_ASC',
+  StorageBagDesc = 'storageBag_DESC',
+  IpfsHashAsc = 'ipfsHash_ASC',
+  IpfsHashDesc = 'ipfsHash_DESC',
+  AuthenticationKeyAsc = 'authenticationKey_ASC',
+  AuthenticationKeyDesc = 'authenticationKey_DESC'
+}
+
+export type StorageDataObjectUpdateInput = {
+  isAccepted?: Maybe<Scalars['Boolean']>;
+  size?: Maybe<Scalars['BigInt']>;
+  storageBag?: Maybe<Scalars['ID']>;
+  ipfsHash?: Maybe<Scalars['String']>;
+  authenticationKey?: Maybe<Scalars['String']>;
+};
+
+export type StorageDataObjectWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  isAccepted_eq?: Maybe<Scalars['Boolean']>;
+  isAccepted_in?: Maybe<Array<Scalars['Boolean']>>;
+  size_eq?: Maybe<Scalars['BigInt']>;
+  size_gt?: Maybe<Scalars['BigInt']>;
+  size_gte?: Maybe<Scalars['BigInt']>;
+  size_lt?: Maybe<Scalars['BigInt']>;
+  size_lte?: Maybe<Scalars['BigInt']>;
+  size_in?: Maybe<Array<Scalars['BigInt']>>;
+  storageBag_eq?: Maybe<Scalars['ID']>;
+  storageBag_in?: Maybe<Array<Scalars['ID']>>;
+  ipfsHash_eq?: Maybe<Scalars['String']>;
+  ipfsHash_contains?: Maybe<Scalars['String']>;
+  ipfsHash_startsWith?: Maybe<Scalars['String']>;
+  ipfsHash_endsWith?: Maybe<Scalars['String']>;
+  ipfsHash_in?: Maybe<Array<Scalars['String']>>;
+  authenticationKey_eq?: Maybe<Scalars['String']>;
+  authenticationKey_contains?: Maybe<Scalars['String']>;
+  authenticationKey_startsWith?: Maybe<Scalars['String']>;
+  authenticationKey_endsWith?: Maybe<Scalars['String']>;
+  authenticationKey_in?: Maybe<Array<Scalars['String']>>;
+  storageBag?: Maybe<StorageBagWhereInput>;
+  AND?: Maybe<Array<StorageDataObjectWhereInput>>;
+  OR?: Maybe<Array<StorageDataObjectWhereInput>>;
+};
+
+export type StorageDataObjectWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+/** Global storage system parameters */
+export type StorageSystemParameters = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  /** Blacklisted content hashes */
+  blacklist: Array<Scalars['String']>;
+};
+
+export type StorageSystemParametersConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<StorageSystemParametersEdge>;
+  pageInfo: PageInfo;
+};
+
+export type StorageSystemParametersCreateInput = {
+  blacklist: Array<Scalars['String']>;
+};
+
+export type StorageSystemParametersEdge = {
+  node: StorageSystemParameters;
+  cursor: Scalars['String'];
+};
+
+export enum StorageSystemParametersOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC'
+}
+
+export type StorageSystemParametersUpdateInput = {
+  blacklist?: Maybe<Array<Scalars['String']>>;
+};
+
+export type StorageSystemParametersWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  AND?: Maybe<Array<StorageSystemParametersWhereInput>>;
+  OR?: Maybe<Array<StorageSystemParametersWhereInput>>;
+};
+
+export type StorageSystemParametersWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type Subscription = {
+  stateSubscription: ProcessorState;
+};
+
+export type Video = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  channel?: Maybe<Channel>;
+  channelId?: Maybe<Scalars['String']>;
+  category?: Maybe<VideoCategory>;
+  categoryId?: Maybe<Scalars['String']>;
+  /** The title of the video */
+  title?: Maybe<Scalars['String']>;
+  /** The description of the Video */
+  description?: Maybe<Scalars['String']>;
+  /** Video duration in seconds */
+  duration?: Maybe<Scalars['Int']>;
+  thumbnailPhotoDataObject?: Maybe<DataObject>;
+  thumbnailPhotoDataObjectId?: Maybe<Scalars['String']>;
+  /** URLs where the asset content can be accessed (if any) */
+  thumbnailPhotoUrls: Array<Scalars['String']>;
+  /** Availability meta information */
+  thumbnailPhotoAvailability: AssetAvailability;
+  language?: Maybe<Language>;
+  languageId?: Maybe<Scalars['String']>;
+  /** Whether or not Video contains marketing */
+  hasMarketing?: Maybe<Scalars['Boolean']>;
+  /** If the Video was published on other platform before beeing published on Joystream - the original publication date */
+  publishedBeforeJoystream?: Maybe<Scalars['DateTime']>;
+  /** Whether the Video is supposed to be publically displayed */
+  isPublic?: Maybe<Scalars['Boolean']>;
+  /** Flag signaling whether a video is censored. */
+  isCensored: Scalars['Boolean'];
+  /** Whether the Video contains explicit material. */
+  isExplicit?: Maybe<Scalars['Boolean']>;
+  license?: Maybe<License>;
+  licenseId?: Maybe<Scalars['String']>;
+  mediaDataObject?: Maybe<DataObject>;
+  mediaDataObjectId?: Maybe<Scalars['String']>;
+  /** URLs where the asset content can be accessed (if any) */
+  mediaUrls: Array<Scalars['String']>;
+  /** Availability meta information */
+  mediaAvailability: AssetAvailability;
+  mediaMetadata?: Maybe<VideoMediaMetadata>;
+  mediaMetadataId?: Maybe<Scalars['String']>;
+  createdInBlock: Scalars['Int'];
+  /** Is video featured or not */
+  isFeatured: Scalars['Boolean'];
+};
+
+export type VideoCategoriesByNameFtsOutput = {
+  item: VideoCategoriesByNameSearchResult;
+  rank: Scalars['Float'];
+  isTypeOf: Scalars['String'];
+  highlight: Scalars['String'];
+};
+
+export type VideoCategoriesByNameSearchResult = VideoCategory;
+
+export type VideoCategory = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  /** The name of the category */
+  name?: Maybe<Scalars['String']>;
+  videos: Array<Video>;
+  createdInBlock: Scalars['Int'];
+};
+
+export type VideoCategoryConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<VideoCategoryEdge>;
+  pageInfo: PageInfo;
+};
+
+export type VideoCategoryCreateInput = {
+  name?: Maybe<Scalars['String']>;
+  createdInBlock: Scalars['Float'];
+};
+
+export type VideoCategoryEdge = {
+  node: VideoCategory;
+  cursor: Scalars['String'];
+};
+
+export enum VideoCategoryOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  NameAsc = 'name_ASC',
+  NameDesc = 'name_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC'
+}
+
+export type VideoCategoryUpdateInput = {
+  name?: Maybe<Scalars['String']>;
+  createdInBlock?: Maybe<Scalars['Float']>;
+};
+
+export type VideoCategoryWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  name_eq?: Maybe<Scalars['String']>;
+  name_contains?: Maybe<Scalars['String']>;
+  name_startsWith?: Maybe<Scalars['String']>;
+  name_endsWith?: Maybe<Scalars['String']>;
+  name_in?: Maybe<Array<Scalars['String']>>;
+  createdInBlock_eq?: Maybe<Scalars['Int']>;
+  createdInBlock_gt?: Maybe<Scalars['Int']>;
+  createdInBlock_gte?: Maybe<Scalars['Int']>;
+  createdInBlock_lt?: Maybe<Scalars['Int']>;
+  createdInBlock_lte?: Maybe<Scalars['Int']>;
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>;
+  videos_none?: Maybe<VideoWhereInput>;
+  videos_some?: Maybe<VideoWhereInput>;
+  videos_every?: Maybe<VideoWhereInput>;
+  AND?: Maybe<Array<VideoCategoryWhereInput>>;
+  OR?: Maybe<Array<VideoCategoryWhereInput>>;
+};
+
+export type VideoCategoryWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type VideoConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<VideoEdge>;
+  pageInfo: PageInfo;
+};
+
+export type VideoCreateInput = {
+  channel?: Maybe<Scalars['ID']>;
+  category?: Maybe<Scalars['ID']>;
+  title?: Maybe<Scalars['String']>;
+  description?: Maybe<Scalars['String']>;
+  duration?: Maybe<Scalars['Float']>;
+  thumbnailPhotoDataObject?: Maybe<Scalars['ID']>;
+  thumbnailPhotoUrls: Array<Scalars['String']>;
+  thumbnailPhotoAvailability: AssetAvailability;
+  language?: Maybe<Scalars['ID']>;
+  hasMarketing?: Maybe<Scalars['Boolean']>;
+  publishedBeforeJoystream?: Maybe<Scalars['DateTime']>;
+  isPublic?: Maybe<Scalars['Boolean']>;
+  isCensored: Scalars['Boolean'];
+  isExplicit?: Maybe<Scalars['Boolean']>;
+  license?: Maybe<Scalars['ID']>;
+  mediaDataObject?: Maybe<Scalars['ID']>;
+  mediaUrls: Array<Scalars['String']>;
+  mediaAvailability: AssetAvailability;
+  mediaMetadata?: Maybe<Scalars['ID']>;
+  createdInBlock: Scalars['Float'];
+  isFeatured: Scalars['Boolean'];
+};
+
+export type VideoEdge = {
+  node: Video;
+  cursor: Scalars['String'];
+};
+
+export type VideoMediaEncoding = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  /** Encoding of the video media object */
+  codecName?: Maybe<Scalars['String']>;
+  /** Media container format */
+  container?: Maybe<Scalars['String']>;
+  /** Content MIME type */
+  mimeMediaType?: Maybe<Scalars['String']>;
+  videomediametadataencoding?: Maybe<Array<VideoMediaMetadata>>;
+};
+
+export type VideoMediaEncodingConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<VideoMediaEncodingEdge>;
+  pageInfo: PageInfo;
+};
+
+export type VideoMediaEncodingCreateInput = {
+  codecName?: Maybe<Scalars['String']>;
+  container?: Maybe<Scalars['String']>;
+  mimeMediaType?: Maybe<Scalars['String']>;
+};
+
+export type VideoMediaEncodingEdge = {
+  node: VideoMediaEncoding;
+  cursor: Scalars['String'];
+};
+
+export enum VideoMediaEncodingOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  CodecNameAsc = 'codecName_ASC',
+  CodecNameDesc = 'codecName_DESC',
+  ContainerAsc = 'container_ASC',
+  ContainerDesc = 'container_DESC',
+  MimeMediaTypeAsc = 'mimeMediaType_ASC',
+  MimeMediaTypeDesc = 'mimeMediaType_DESC'
+}
+
+export type VideoMediaEncodingUpdateInput = {
+  codecName?: Maybe<Scalars['String']>;
+  container?: Maybe<Scalars['String']>;
+  mimeMediaType?: Maybe<Scalars['String']>;
+};
+
+export type VideoMediaEncodingWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  codecName_eq?: Maybe<Scalars['String']>;
+  codecName_contains?: Maybe<Scalars['String']>;
+  codecName_startsWith?: Maybe<Scalars['String']>;
+  codecName_endsWith?: Maybe<Scalars['String']>;
+  codecName_in?: Maybe<Array<Scalars['String']>>;
+  container_eq?: Maybe<Scalars['String']>;
+  container_contains?: Maybe<Scalars['String']>;
+  container_startsWith?: Maybe<Scalars['String']>;
+  container_endsWith?: Maybe<Scalars['String']>;
+  container_in?: Maybe<Array<Scalars['String']>>;
+  mimeMediaType_eq?: Maybe<Scalars['String']>;
+  mimeMediaType_contains?: Maybe<Scalars['String']>;
+  mimeMediaType_startsWith?: Maybe<Scalars['String']>;
+  mimeMediaType_endsWith?: Maybe<Scalars['String']>;
+  mimeMediaType_in?: Maybe<Array<Scalars['String']>>;
+  videomediametadataencoding_none?: Maybe<VideoMediaMetadataWhereInput>;
+  videomediametadataencoding_some?: Maybe<VideoMediaMetadataWhereInput>;
+  videomediametadataencoding_every?: Maybe<VideoMediaMetadataWhereInput>;
+  AND?: Maybe<Array<VideoMediaEncodingWhereInput>>;
+  OR?: Maybe<Array<VideoMediaEncodingWhereInput>>;
+};
+
+export type VideoMediaEncodingWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type VideoMediaMetadata = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  encoding?: Maybe<VideoMediaEncoding>;
+  encodingId?: Maybe<Scalars['String']>;
+  /** Video media width in pixels */
+  pixelWidth?: Maybe<Scalars['Int']>;
+  /** Video media height in pixels */
+  pixelHeight?: Maybe<Scalars['Int']>;
+  /** Video media size in bytes */
+  size?: Maybe<Scalars['Int']>;
+  video?: Maybe<Video>;
+  createdInBlock: Scalars['Int'];
+};
+
+export type VideoMediaMetadataConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<VideoMediaMetadataEdge>;
+  pageInfo: PageInfo;
+};
+
+export type VideoMediaMetadataCreateInput = {
+  encoding?: Maybe<Scalars['ID']>;
+  pixelWidth?: Maybe<Scalars['Float']>;
+  pixelHeight?: Maybe<Scalars['Float']>;
+  size?: Maybe<Scalars['Float']>;
+  createdInBlock: Scalars['Float'];
+};
+
+export type VideoMediaMetadataEdge = {
+  node: VideoMediaMetadata;
+  cursor: Scalars['String'];
+};
+
+export enum VideoMediaMetadataOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  EncodingAsc = 'encoding_ASC',
+  EncodingDesc = 'encoding_DESC',
+  PixelWidthAsc = 'pixelWidth_ASC',
+  PixelWidthDesc = 'pixelWidth_DESC',
+  PixelHeightAsc = 'pixelHeight_ASC',
+  PixelHeightDesc = 'pixelHeight_DESC',
+  SizeAsc = 'size_ASC',
+  SizeDesc = 'size_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC'
+}
+
+export type VideoMediaMetadataUpdateInput = {
+  encoding?: Maybe<Scalars['ID']>;
+  pixelWidth?: Maybe<Scalars['Float']>;
+  pixelHeight?: Maybe<Scalars['Float']>;
+  size?: Maybe<Scalars['Float']>;
+  createdInBlock?: Maybe<Scalars['Float']>;
+};
+
+export type VideoMediaMetadataWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  encoding_eq?: Maybe<Scalars['ID']>;
+  encoding_in?: Maybe<Array<Scalars['ID']>>;
+  pixelWidth_eq?: Maybe<Scalars['Int']>;
+  pixelWidth_gt?: Maybe<Scalars['Int']>;
+  pixelWidth_gte?: Maybe<Scalars['Int']>;
+  pixelWidth_lt?: Maybe<Scalars['Int']>;
+  pixelWidth_lte?: Maybe<Scalars['Int']>;
+  pixelWidth_in?: Maybe<Array<Scalars['Int']>>;
+  pixelHeight_eq?: Maybe<Scalars['Int']>;
+  pixelHeight_gt?: Maybe<Scalars['Int']>;
+  pixelHeight_gte?: Maybe<Scalars['Int']>;
+  pixelHeight_lt?: Maybe<Scalars['Int']>;
+  pixelHeight_lte?: Maybe<Scalars['Int']>;
+  pixelHeight_in?: Maybe<Array<Scalars['Int']>>;
+  size_eq?: Maybe<Scalars['Int']>;
+  size_gt?: Maybe<Scalars['Int']>;
+  size_gte?: Maybe<Scalars['Int']>;
+  size_lt?: Maybe<Scalars['Int']>;
+  size_lte?: Maybe<Scalars['Int']>;
+  size_in?: Maybe<Array<Scalars['Int']>>;
+  createdInBlock_eq?: Maybe<Scalars['Int']>;
+  createdInBlock_gt?: Maybe<Scalars['Int']>;
+  createdInBlock_gte?: Maybe<Scalars['Int']>;
+  createdInBlock_lt?: Maybe<Scalars['Int']>;
+  createdInBlock_lte?: Maybe<Scalars['Int']>;
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>;
+  encoding?: Maybe<VideoMediaEncodingWhereInput>;
+  video?: Maybe<VideoWhereInput>;
+  AND?: Maybe<Array<VideoMediaMetadataWhereInput>>;
+  OR?: Maybe<Array<VideoMediaMetadataWhereInput>>;
+};
+
+export type VideoMediaMetadataWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export enum VideoOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  ChannelAsc = 'channel_ASC',
+  ChannelDesc = 'channel_DESC',
+  CategoryAsc = 'category_ASC',
+  CategoryDesc = 'category_DESC',
+  TitleAsc = 'title_ASC',
+  TitleDesc = 'title_DESC',
+  DescriptionAsc = 'description_ASC',
+  DescriptionDesc = 'description_DESC',
+  DurationAsc = 'duration_ASC',
+  DurationDesc = 'duration_DESC',
+  ThumbnailPhotoDataObjectAsc = 'thumbnailPhotoDataObject_ASC',
+  ThumbnailPhotoDataObjectDesc = 'thumbnailPhotoDataObject_DESC',
+  ThumbnailPhotoAvailabilityAsc = 'thumbnailPhotoAvailability_ASC',
+  ThumbnailPhotoAvailabilityDesc = 'thumbnailPhotoAvailability_DESC',
+  LanguageAsc = 'language_ASC',
+  LanguageDesc = 'language_DESC',
+  HasMarketingAsc = 'hasMarketing_ASC',
+  HasMarketingDesc = 'hasMarketing_DESC',
+  PublishedBeforeJoystreamAsc = 'publishedBeforeJoystream_ASC',
+  PublishedBeforeJoystreamDesc = 'publishedBeforeJoystream_DESC',
+  IsPublicAsc = 'isPublic_ASC',
+  IsPublicDesc = 'isPublic_DESC',
+  IsCensoredAsc = 'isCensored_ASC',
+  IsCensoredDesc = 'isCensored_DESC',
+  IsExplicitAsc = 'isExplicit_ASC',
+  IsExplicitDesc = 'isExplicit_DESC',
+  LicenseAsc = 'license_ASC',
+  LicenseDesc = 'license_DESC',
+  MediaDataObjectAsc = 'mediaDataObject_ASC',
+  MediaDataObjectDesc = 'mediaDataObject_DESC',
+  MediaAvailabilityAsc = 'mediaAvailability_ASC',
+  MediaAvailabilityDesc = 'mediaAvailability_DESC',
+  MediaMetadataAsc = 'mediaMetadata_ASC',
+  MediaMetadataDesc = 'mediaMetadata_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+  IsFeaturedAsc = 'isFeatured_ASC',
+  IsFeaturedDesc = 'isFeatured_DESC'
+}
+
+export type VideoUpdateInput = {
+  channel?: Maybe<Scalars['ID']>;
+  category?: Maybe<Scalars['ID']>;
+  title?: Maybe<Scalars['String']>;
+  description?: Maybe<Scalars['String']>;
+  duration?: Maybe<Scalars['Float']>;
+  thumbnailPhotoDataObject?: Maybe<Scalars['ID']>;
+  thumbnailPhotoUrls?: Maybe<Array<Scalars['String']>>;
+  thumbnailPhotoAvailability?: Maybe<AssetAvailability>;
+  language?: Maybe<Scalars['ID']>;
+  hasMarketing?: Maybe<Scalars['Boolean']>;
+  publishedBeforeJoystream?: Maybe<Scalars['DateTime']>;
+  isPublic?: Maybe<Scalars['Boolean']>;
+  isCensored?: Maybe<Scalars['Boolean']>;
+  isExplicit?: Maybe<Scalars['Boolean']>;
+  license?: Maybe<Scalars['ID']>;
+  mediaDataObject?: Maybe<Scalars['ID']>;
+  mediaUrls?: Maybe<Array<Scalars['String']>>;
+  mediaAvailability?: Maybe<AssetAvailability>;
+  mediaMetadata?: Maybe<Scalars['ID']>;
+  createdInBlock?: Maybe<Scalars['Float']>;
+  isFeatured?: Maybe<Scalars['Boolean']>;
+};
+
+export type VideoWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  channel_eq?: Maybe<Scalars['ID']>;
+  channel_in?: Maybe<Array<Scalars['ID']>>;
+  category_eq?: Maybe<Scalars['ID']>;
+  category_in?: Maybe<Array<Scalars['ID']>>;
+  title_eq?: Maybe<Scalars['String']>;
+  title_contains?: Maybe<Scalars['String']>;
+  title_startsWith?: Maybe<Scalars['String']>;
+  title_endsWith?: Maybe<Scalars['String']>;
+  title_in?: Maybe<Array<Scalars['String']>>;
+  description_eq?: Maybe<Scalars['String']>;
+  description_contains?: Maybe<Scalars['String']>;
+  description_startsWith?: Maybe<Scalars['String']>;
+  description_endsWith?: Maybe<Scalars['String']>;
+  description_in?: Maybe<Array<Scalars['String']>>;
+  duration_eq?: Maybe<Scalars['Int']>;
+  duration_gt?: Maybe<Scalars['Int']>;
+  duration_gte?: Maybe<Scalars['Int']>;
+  duration_lt?: Maybe<Scalars['Int']>;
+  duration_lte?: Maybe<Scalars['Int']>;
+  duration_in?: Maybe<Array<Scalars['Int']>>;
+  thumbnailPhotoDataObject_eq?: Maybe<Scalars['ID']>;
+  thumbnailPhotoDataObject_in?: Maybe<Array<Scalars['ID']>>;
+  thumbnailPhotoAvailability_eq?: Maybe<AssetAvailability>;
+  thumbnailPhotoAvailability_in?: Maybe<Array<AssetAvailability>>;
+  language_eq?: Maybe<Scalars['ID']>;
+  language_in?: Maybe<Array<Scalars['ID']>>;
+  hasMarketing_eq?: Maybe<Scalars['Boolean']>;
+  hasMarketing_in?: Maybe<Array<Scalars['Boolean']>>;
+  publishedBeforeJoystream_eq?: Maybe<Scalars['DateTime']>;
+  publishedBeforeJoystream_lt?: Maybe<Scalars['DateTime']>;
+  publishedBeforeJoystream_lte?: Maybe<Scalars['DateTime']>;
+  publishedBeforeJoystream_gt?: Maybe<Scalars['DateTime']>;
+  publishedBeforeJoystream_gte?: Maybe<Scalars['DateTime']>;
+  isPublic_eq?: Maybe<Scalars['Boolean']>;
+  isPublic_in?: Maybe<Array<Scalars['Boolean']>>;
+  isCensored_eq?: Maybe<Scalars['Boolean']>;
+  isCensored_in?: Maybe<Array<Scalars['Boolean']>>;
+  isExplicit_eq?: Maybe<Scalars['Boolean']>;
+  isExplicit_in?: Maybe<Array<Scalars['Boolean']>>;
+  license_eq?: Maybe<Scalars['ID']>;
+  license_in?: Maybe<Array<Scalars['ID']>>;
+  mediaDataObject_eq?: Maybe<Scalars['ID']>;
+  mediaDataObject_in?: Maybe<Array<Scalars['ID']>>;
+  mediaAvailability_eq?: Maybe<AssetAvailability>;
+  mediaAvailability_in?: Maybe<Array<AssetAvailability>>;
+  mediaMetadata_eq?: Maybe<Scalars['ID']>;
+  mediaMetadata_in?: Maybe<Array<Scalars['ID']>>;
+  createdInBlock_eq?: Maybe<Scalars['Int']>;
+  createdInBlock_gt?: Maybe<Scalars['Int']>;
+  createdInBlock_gte?: Maybe<Scalars['Int']>;
+  createdInBlock_lt?: Maybe<Scalars['Int']>;
+  createdInBlock_lte?: Maybe<Scalars['Int']>;
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>;
+  isFeatured_eq?: Maybe<Scalars['Boolean']>;
+  isFeatured_in?: Maybe<Array<Scalars['Boolean']>>;
+  channel?: Maybe<ChannelWhereInput>;
+  category?: Maybe<VideoCategoryWhereInput>;
+  thumbnailPhotoDataObject?: Maybe<DataObjectWhereInput>;
+  language?: Maybe<LanguageWhereInput>;
+  license?: Maybe<LicenseWhereInput>;
+  mediaDataObject?: Maybe<DataObjectWhereInput>;
+  mediaMetadata?: Maybe<VideoMediaMetadataWhereInput>;
+  AND?: Maybe<Array<VideoWhereInput>>;
+  OR?: Maybe<Array<VideoWhereInput>>;
+};
+
+export type VideoWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type Worker = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  /** Sign of worker still being active */
+  isActive: Scalars['Boolean'];
+  /** Runtime identifier */
+  workerId: Scalars['String'];
+  /** Associated working group */
+  type: WorkerType;
+  /** Custom metadata set by provider */
+  metadata?: Maybe<Scalars['String']>;
+  dataObjects: Array<DataObject>;
+};
+
+export type WorkerConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<WorkerEdge>;
+  pageInfo: PageInfo;
+};
+
+export type WorkerCreateInput = {
+  isActive: Scalars['Boolean'];
+  workerId: Scalars['String'];
+  type: WorkerType;
+  metadata?: Maybe<Scalars['String']>;
+};
+
+export type WorkerEdge = {
+  node: Worker;
+  cursor: Scalars['String'];
+};
+
+export enum WorkerOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  IsActiveAsc = 'isActive_ASC',
+  IsActiveDesc = 'isActive_DESC',
+  WorkerIdAsc = 'workerId_ASC',
+  WorkerIdDesc = 'workerId_DESC',
+  TypeAsc = 'type_ASC',
+  TypeDesc = 'type_DESC',
+  MetadataAsc = 'metadata_ASC',
+  MetadataDesc = 'metadata_DESC'
+}
+
+export enum WorkerType {
+  Gateway = 'GATEWAY',
+  Storage = 'STORAGE'
+}
+
+export type WorkerUpdateInput = {
+  isActive?: Maybe<Scalars['Boolean']>;
+  workerId?: Maybe<Scalars['String']>;
+  type?: Maybe<WorkerType>;
+  metadata?: Maybe<Scalars['String']>;
+};
+
+export type WorkerWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  isActive_eq?: Maybe<Scalars['Boolean']>;
+  isActive_in?: Maybe<Array<Scalars['Boolean']>>;
+  workerId_eq?: Maybe<Scalars['String']>;
+  workerId_contains?: Maybe<Scalars['String']>;
+  workerId_startsWith?: Maybe<Scalars['String']>;
+  workerId_endsWith?: Maybe<Scalars['String']>;
+  workerId_in?: Maybe<Array<Scalars['String']>>;
+  type_eq?: Maybe<WorkerType>;
+  type_in?: Maybe<Array<WorkerType>>;
+  metadata_eq?: Maybe<Scalars['String']>;
+  metadata_contains?: Maybe<Scalars['String']>;
+  metadata_startsWith?: Maybe<Scalars['String']>;
+  metadata_endsWith?: Maybe<Scalars['String']>;
+  metadata_in?: Maybe<Array<Scalars['String']>>;
+  dataObjects_none?: Maybe<DataObjectWhereInput>;
+  dataObjects_some?: Maybe<DataObjectWhereInput>;
+  dataObjects_every?: Maybe<DataObjectWhereInput>;
+  AND?: Maybe<Array<WorkerWhereInput>>;
+  OR?: Maybe<Array<WorkerWhereInput>>;
+};
+
+export type WorkerWhereUniqueInput = {
+  id: Scalars['ID'];
+};

+ 78 - 0
distributor-node/src/services/networking/query-node/queries/queries.graphql

@@ -0,0 +1,78 @@
+fragment DataObjectDetails on StorageDataObject {
+  id
+  size
+  ipfsHash
+  isAccepted
+  storageBag {
+    storageAssignments {
+      storageBucket {
+        id
+        operatorMetadata {
+          nodeEndpoint
+        }
+        operatorStatus {
+          __typename
+        }
+      }
+    }
+    distirbutionAssignments {
+      distributionBucket {
+        id
+        operators {
+          workerId
+          status
+        }
+      }
+    }
+  }
+}
+
+query getDataObjectDetails($id: ID!) {
+  storageDataObjectByUniqueInput(where: { id: $id }) {
+    ...DataObjectDetails
+  }
+}
+
+fragment DistirubtionBucketWithObjects on DistributionBucket {
+  id
+  bagAssignments {
+    storageBag {
+      objects {
+        id
+        size
+        ipfsHash
+      }
+    }
+  }
+}
+
+query getDistributionBucketsWithObjectsByIds($ids: [ID!]) {
+  distributionBuckets(where: { id_in: $ids }) {
+    ...DistirubtionBucketWithObjects
+  }
+}
+
+query getDistributionBucketsWithObjectsByWorkerId($workerId: Int!) {
+  distributionBuckets(where: { operators_some: { workerId_eq: $workerId, status_eq: ACTIVE } }) {
+    ...DistirubtionBucketWithObjects
+  }
+}
+
+fragment StorageBucketOperatorFields on StorageBucket {
+  id
+  operatorMetadata {
+    nodeEndpoint
+  }
+}
+
+query getActiveStorageBucketOperatorsData {
+  storageBuckets(
+    where: {
+      operatorStatus_json: { isTypeOf_eq: "StorageBucketOperatorStatusActive" }
+      operatorMetadata: { nodeEndpoint_contains: "http" }
+    }
+    limit: 9999
+  ) {
+    ...StorageBucketOperatorFields
+  }
+}

+ 145 - 0
distributor-node/src/services/networking/runtime/api.ts

@@ -0,0 +1,145 @@
+import { types } from '@joystream/types/'
+import { ApiPromise, WsProvider, SubmittableResult } from '@polkadot/api'
+import { SubmittableExtrinsic, AugmentedEvent } from '@polkadot/api/types'
+import { KeyringPair } from '@polkadot/keyring/types'
+import { Balance } from '@polkadot/types/interfaces'
+import { formatBalance } from '@polkadot/util'
+import { IEvent } from '@polkadot/types/types'
+import { DispatchError } from '@polkadot/types/interfaces/system'
+import { LoggingService } from '../../logging'
+import { Logger } from 'winston'
+
+export class ExtrinsicFailedError extends Error {}
+
+export class RuntimeApi {
+  private _api: ApiPromise
+  private logger: Logger
+
+  public isDevelopment = false
+
+  private constructor(logging: LoggingService, originalApi: ApiPromise, isDevelopment: boolean) {
+    this.isDevelopment = isDevelopment
+    this.logger = logging.createLogger('SubstrateApi')
+    this._api = originalApi
+  }
+
+  static async create(
+    logging: LoggingService,
+    apiUri: string,
+    metadataCache?: Record<string, any>
+  ): Promise<RuntimeApi> {
+    const { api, chainType } = await RuntimeApi.initApi(apiUri, metadataCache)
+    return new RuntimeApi(logging, api, chainType.isDevelopment || chainType.isLocal)
+  }
+
+  private static async initApi(apiUri: string, metadataCache?: Record<string, any>) {
+    const wsProvider: WsProvider = new WsProvider(apiUri)
+    const api = await ApiPromise.create({ provider: wsProvider, types, metadata: metadataCache })
+
+    // Initializing some api params based on pioneer/packages/react-api/Api.tsx
+    const [properties, chainType] = await Promise.all([api.rpc.system.properties(), api.rpc.system.chainType()])
+
+    const tokenSymbol = properties.tokenSymbol.unwrap()[0].toString()
+    const tokenDecimals = properties.tokenDecimals.unwrap()[0].toNumber()
+
+    // formatBlanace config
+    formatBalance.setDefaults({
+      decimals: tokenDecimals,
+      unit: tokenSymbol,
+    })
+
+    return { api, properties, chainType }
+  }
+
+  public get query(): ApiPromise['query'] {
+    return this._api.query
+  }
+
+  public get tx(): ApiPromise['tx'] {
+    return this._api.tx
+  }
+
+  public get consts(): ApiPromise['consts'] {
+    return this._api.consts
+  }
+
+  public get derive(): ApiPromise['derive'] {
+    return this._api.derive
+  }
+
+  public get createType(): ApiPromise['createType'] {
+    return this._api.createType.bind(this._api)
+  }
+
+  public sudo(tx: SubmittableExtrinsic<'promise'>): SubmittableExtrinsic<'promise'> {
+    return this._api.tx.sudo.sudo(tx)
+  }
+
+  public async estimateFee(account: KeyringPair, tx: SubmittableExtrinsic<'promise'>): Promise<Balance> {
+    const paymentInfo = await tx.paymentInfo(account)
+    return paymentInfo.partialFee
+  }
+
+  public findEvent<
+    S extends keyof ApiPromise['events'] & string,
+    M extends keyof ApiPromise['events'][S] & string,
+    EventType = ApiPromise['events'][S][M] extends AugmentedEvent<'promise', infer T> ? IEvent<T> : never
+  >(result: SubmittableResult, section: S, method: M): EventType | undefined {
+    return result.findRecord(section, method)?.event as EventType | undefined
+  }
+
+  public getEvent<
+    S extends keyof ApiPromise['events'] & string,
+    M extends keyof ApiPromise['events'][S] & string,
+    EventType = ApiPromise['events'][S][M] extends AugmentedEvent<'promise', infer T> ? IEvent<T> : never
+  >(result: SubmittableResult, section: S, method: M): EventType {
+    const event = this.findEvent(result, section, method)
+    if (!event) {
+      throw new Error(`Cannot find expected ${section}.${method} event in result: ${result.toHuman()}`)
+    }
+    return (event as unknown) as EventType
+  }
+
+  sendExtrinsic(keyPair: KeyringPair, tx: SubmittableExtrinsic<'promise'>): Promise<SubmittableResult> {
+    this.logger.info(`Sending ${tx.method.section}.${tx.method.method} extrinsic from ${keyPair.address}`)
+    return new Promise((resolve, reject) => {
+      let unsubscribe: () => void
+      tx.signAndSend(keyPair, {}, (result) => {
+        // Implementation loosely based on /pioneer/packages/react-signer/src/Modal.tsx
+        if (!result || !result.status) {
+          return
+        }
+
+        if (result.status.isInBlock) {
+          unsubscribe()
+          result.events
+            .filter(({ event }) => event.section === 'system')
+            .forEach(({ event }) => {
+              if (event.method === 'ExtrinsicFailed') {
+                const dispatchError = event.data[0] as DispatchError
+                let errorMsg = dispatchError.toString()
+                if (dispatchError.isModule) {
+                  try {
+                    const { name, documentation } = this._api.registry.findMetaError(dispatchError.asModule)
+                    errorMsg = `${name} (${documentation})`
+                  } catch (e) {
+                    // This probably means we don't have this error in the metadata
+                    // In this case - continue (we'll just display dispatchError.toString())
+                  }
+                }
+                reject(new ExtrinsicFailedError(`Extrinsic execution error: ${errorMsg}`))
+              } else if (event.method === 'ExtrinsicSuccess') {
+                resolve(result)
+              }
+            })
+        } else if (result.isError) {
+          reject(new ExtrinsicFailedError('Extrinsic execution error!'))
+        }
+      })
+        .then((unsubFunc) => (unsubscribe = unsubFunc))
+        .catch((e) =>
+          reject(new ExtrinsicFailedError(`Cannot send the extrinsic: ${e.message ? e.message : JSON.stringify(e)}`))
+        )
+    })
+  }
+}

+ 49 - 0
distributor-node/src/services/networking/storage-node/api.ts

@@ -0,0 +1,49 @@
+import { Configuration } from './generated'
+import { PublicApi } from './generated/api'
+import axios, { AxiosRequestConfig } from 'axios'
+import { LoggingService } from '../../logging'
+import { Logger } from 'winston'
+import { StorageNodeDownloadResponse } from '../../../types'
+import { parseAxiosError } from '../../parsers/errors'
+
+export class StorageNodeApi {
+  private logger: Logger
+  private publicApi: PublicApi
+  private endpoint: string
+
+  public constructor(endpoint: string, logging: LoggingService) {
+    const config = new Configuration({
+      basePath: endpoint,
+    })
+    this.publicApi = new PublicApi(config)
+    this.endpoint = new URL(endpoint).toString()
+    this.logger = logging.createLogger('StorageNodeApi', { endpoint })
+  }
+
+  public async isObjectAvailable(contentHash: string): Promise<boolean> {
+    this.logger.debug('Checking object availibility', { contentHash })
+    try {
+      await this.publicApi.publicApiGetFileHeaders(contentHash)
+      this.logger.debug('Data object available', { contentHash })
+      return true
+    } catch (err) {
+      if (axios.isAxiosError(err)) {
+        this.logger.debug('Data object not available', { err: parseAxiosError(err) })
+        return false
+      }
+      this.logger.error('Unexpected error while requesting data object', { err })
+      throw err
+    }
+  }
+
+  public async downloadObject(contentHash: string, startAt?: number): Promise<StorageNodeDownloadResponse> {
+    this.logger.verbose('Sending download request', { contentHash, startAt })
+    const options: AxiosRequestConfig = {
+      responseType: 'stream',
+    }
+    if (startAt) {
+      options.headers.Range = `bytes=${startAt}-`
+    }
+    return this.publicApi.publicApiGetFile(contentHash, options)
+  }
+}

+ 27 - 0
distributor-node/src/services/networking/storage-node/generated/.openapi-generator-ignore

@@ -0,0 +1,27 @@
+# OpenAPI Generator Ignore
+# Generated by openapi-generator https://github.com/openapitools/openapi-generator
+
+# Use this file to prevent files from being overwritten by the generator.
+# The patterns follow closely to .gitignore or .dockerignore.
+
+# As an example, the C# client generator defines ApiClient.cs.
+# You can make changes and tell OpenAPI Generator to ignore just this file by uncommenting the following line:
+#ApiClient.cs
+
+# You can match any string of characters against a directory, file or extension with a single asterisk (*):
+#foo/*/qux
+# The above matches foo/bar/qux and foo/baz/qux, but not foo/bar/baz/qux
+
+# You can recursively match patterns against a directory, file or extension with a double asterisk (**):
+#foo/**/qux
+# This matches foo/bar/qux, foo/baz/qux, and foo/bar/baz/qux
+
+# You can also negate patterns with an exclamation (!).
+# For example, you can ignore all files in a docs folder with the file extension .md:
+#docs/*.md
+# Then explicitly reverse the ignore rule for a single file:
+#!docs/README.md
+
+git_push.sh
+.npmignore
+.gitignore

Alguns ficheiros não foram mostrados porque muitos ficheiros mudaram neste diff