Browse Source

Merge branch 'giza_staging' into giza-staging-cli

Leszek Wiesner 3 years ago
parent
commit
d2e0f40fd3
100 changed files with 11175 additions and 50 deletions
  1. 8 3
      .env
  2. 32 32
      .github/workflows/joystream-cli.yml
  3. 27 4
      build-docker-images.sh
  4. 3 3
      build-npm-packages.sh
  5. 26 7
      colossus.Dockerfile
  6. 2 1
      devops/vscode/settings.json
  7. 26 0
      distributor-node.Dockerfile
  8. 1 0
      distributor-node/.eslintignore
  9. 9 0
      distributor-node/.gitignore
  10. 3 0
      distributor-node/.prettierignore
  11. 419 0
      distributor-node/README.md
  12. 3 0
      distributor-node/bin/run
  13. 3 0
      distributor-node/bin/run.cmd
  14. 22 0
      distributor-node/config.yml
  15. 21 0
      distributor-node/config/docker/config.docker.yml
  16. 5 0
      distributor-node/config/docker/filebeat.Dockerfile
  17. 19 0
      distributor-node/config/docker/filebeat.docker.yml
  18. 76 0
      distributor-node/docker-compose.yml
  19. 7 0
      distributor-node/openapitools.json
  20. 120 0
      distributor-node/package.json
  21. 16 0
      distributor-node/scripts/data/family-metadata.json
  22. 12 0
      distributor-node/scripts/data/operator-metadata.json
  23. 17 0
      distributor-node/scripts/init-bucket.sh
  24. 36 0
      distributor-node/scripts/test-commands.sh
  25. 1 0
      distributor-node/src/@types/@elastic/esc-winston-format/index.d.ts
  26. 1 0
      distributor-node/src/@types/js-image-generator/index.d.ts
  27. 216 0
      distributor-node/src/api-spec/openapi.yml
  28. 128 0
      distributor-node/src/app/index.ts
  29. 6 0
      distributor-node/src/command-base/ExitCodes.ts
  30. 65 0
      distributor-node/src/command-base/accounts.ts
  31. 40 0
      distributor-node/src/command-base/api.ts
  32. 96 0
      distributor-node/src/command-base/default.ts
  33. 135 0
      distributor-node/src/commands/dev/batchUpload.ts
  34. 93 0
      distributor-node/src/commands/dev/init.ts
  35. 38 0
      distributor-node/src/commands/leader/cancel-invitation.ts
  36. 25 0
      distributor-node/src/commands/leader/create-bucket-family.ts
  37. 38 0
      distributor-node/src/commands/leader/create-bucket.ts
  38. 28 0
      distributor-node/src/commands/leader/delete-bucket-family.ts
  39. 33 0
      distributor-node/src/commands/leader/delete-bucket.ts
  40. 39 0
      distributor-node/src/commands/leader/invite-bucket-operator.ts
  41. 38 0
      distributor-node/src/commands/leader/remove-bucket-operator.ts
  42. 45 0
      distributor-node/src/commands/leader/set-bucket-family-metadata.ts
  43. 28 0
      distributor-node/src/commands/leader/set-buckets-per-bag-limit.ts
  44. 54 0
      distributor-node/src/commands/leader/update-bag.ts
  45. 38 0
      distributor-node/src/commands/leader/update-bucket-mode.ts
  46. 39 0
      distributor-node/src/commands/leader/update-bucket-status.ts
  47. 53 0
      distributor-node/src/commands/leader/update-dynamic-bag-policy.ts
  48. 38 0
      distributor-node/src/commands/operator/accept-invitation.ts
  49. 61 0
      distributor-node/src/commands/operator/set-metadata.ts
  50. 19 0
      distributor-node/src/commands/start.ts
  51. 1 0
      distributor-node/src/index.ts
  52. 305 0
      distributor-node/src/services/cache/StateCacheService.ts
  53. 228 0
      distributor-node/src/services/content/ContentService.ts
  54. 87 0
      distributor-node/src/services/content/FileContinousReadStream.ts
  55. 134 0
      distributor-node/src/services/logging/LoggingService.ts
  56. 1 0
      distributor-node/src/services/logging/index.ts
  57. 352 0
      distributor-node/src/services/networking/NetworkingService.ts
  58. 27 0
      distributor-node/src/services/networking/distributor-node/generated/.openapi-generator-ignore
  59. 5 0
      distributor-node/src/services/networking/distributor-node/generated/.openapi-generator/FILES
  60. 1 0
      distributor-node/src/services/networking/distributor-node/generated/.openapi-generator/VERSION
  61. 380 0
      distributor-node/src/services/networking/distributor-node/generated/api.ts
  62. 71 0
      distributor-node/src/services/networking/distributor-node/generated/base.ts
  63. 138 0
      distributor-node/src/services/networking/distributor-node/generated/common.ts
  64. 101 0
      distributor-node/src/services/networking/distributor-node/generated/configuration.ts
  65. 18 0
      distributor-node/src/services/networking/distributor-node/generated/index.ts
  66. 1 0
      distributor-node/src/services/networking/index.ts
  67. 91 0
      distributor-node/src/services/networking/query-node/api.ts
  68. 33 0
      distributor-node/src/services/networking/query-node/codegen.yml
  69. 115 0
      distributor-node/src/services/networking/query-node/generated/queries.ts
  70. 4710 0
      distributor-node/src/services/networking/query-node/generated/schema.ts
  71. 78 0
      distributor-node/src/services/networking/query-node/queries/queries.graphql
  72. 145 0
      distributor-node/src/services/networking/runtime/api.ts
  73. 49 0
      distributor-node/src/services/networking/storage-node/api.ts
  74. 27 0
      distributor-node/src/services/networking/storage-node/generated/.openapi-generator-ignore
  75. 5 0
      distributor-node/src/services/networking/storage-node/generated/.openapi-generator/FILES
  76. 1 0
      distributor-node/src/services/networking/storage-node/generated/.openapi-generator/VERSION
  77. 453 0
      distributor-node/src/services/networking/storage-node/generated/api.ts
  78. 71 0
      distributor-node/src/services/networking/storage-node/generated/base.ts
  79. 138 0
      distributor-node/src/services/networking/storage-node/generated/common.ts
  80. 101 0
      distributor-node/src/services/networking/storage-node/generated/configuration.ts
  81. 18 0
      distributor-node/src/services/networking/storage-node/generated/index.ts
  82. 63 0
      distributor-node/src/services/parsers/BagIdParserService.ts
  83. 114 0
      distributor-node/src/services/parsers/ConfigParserService.ts
  84. 14 0
      distributor-node/src/services/parsers/errors.ts
  85. 117 0
      distributor-node/src/services/server/ServerService.ts
  86. 258 0
      distributor-node/src/services/server/controllers/public.ts
  87. 30 0
      distributor-node/src/services/validation/ValidationService.ts
  88. 13 0
      distributor-node/src/services/validation/generateTypes.ts
  89. 56 0
      distributor-node/src/services/validation/schemas/configSchema.ts
  90. 23 0
      distributor-node/src/services/validation/schemas/familyMetadataSchema.ts
  91. 24 0
      distributor-node/src/services/validation/schemas/index.ts
  92. 28 0
      distributor-node/src/services/validation/schemas/operatorMetadataSchema.ts
  93. 10 0
      distributor-node/src/services/validation/schemas/utils.ts
  94. 5 0
      distributor-node/src/types/api.ts
  95. 1 0
      distributor-node/src/types/common.ts
  96. 9 0
      distributor-node/src/types/config.ts
  97. 35 0
      distributor-node/src/types/generated/ConfigJson.d.ts
  98. 15 0
      distributor-node/src/types/generated/FamilyMetadataJson.d.ts
  99. 148 0
      distributor-node/src/types/generated/OpenApi.ts
  100. 19 0
      distributor-node/src/types/generated/OperatorMetadataJson.d.ts

+ 8 - 3
.env

@@ -14,9 +14,6 @@ DB_PORT=5432
 DEBUG=index-builder:*
 TYPEORM_LOGGING=error
 
-DEBUG=index-builder:*
-TYPEORM_LOGGING=error
-
 ###########################
 #    Indexer options      #
 ###########################
@@ -40,3 +37,11 @@ WS_PROVIDER_ENDPOINT_URI=ws://joystream-node:9944/
 
 # If running joystream-node on host machine you can use following address to reach it instead
 # WS_PROVIDER_ENDPOINT_URI=ws://host.docker.internal:9944/
+
+######################
+#    Storage Node    #
+######################
+COLOSSUS_PORT=3333
+QUERY_NODE_HOST=${GRAPHQL_SERVER_HOST}:${GRAPHQL_SERVER_PORT}
+WORKER_ID=0
+ACCOUNT_URI=//Alice

+ 32 - 32
.github/workflows/joystream-cli.yml

@@ -9,22 +9,22 @@ jobs:
       matrix:
         node-version: [14.x]
     steps:
-    - uses: actions/checkout@v1
-    - name: Use Node.js ${{ matrix.node-version }}
-      uses: actions/setup-node@v1
-      with:
-        node-version: ${{ matrix.node-version }}
-    - name: checks
-      run: |
-        yarn install --frozen-lockfile
-        yarn workspace @joystream/types build
-        yarn workspace @joystream/metadata-protobuf build
-        yarn workspace @joystream/cli checks --quiet
-    - name: yarn pack test
-      run: |
-        yarn workspace @joystream/cli pack --filename cli-pack-test.tgz
-        tar zxvf ./cli/cli-pack-test.tgz -C cli
-        cd ./cli/package && yarn link
+      - uses: actions/checkout@v1
+      - name: Use Node.js ${{ matrix.node-version }}
+        uses: actions/setup-node@v1
+        with:
+          node-version: ${{ matrix.node-version }}
+      - name: checks
+        run: |
+          yarn install --frozen-lockfile
+          yarn workspace @joystream/types build
+          yarn workspace @joystream/metadata-protobuf build
+          yarn workspace @joystream/cli checks --quiet
+      - name: yarn pack test
+        run: |
+          yarn workspace @joystream/cli pack --filename cli-pack-test.tgz
+          tar zxvf ./cli/cli-pack-test.tgz -C cli
+          cd ./cli/package && yarn link
 
   cli_build_osx:
     name: MacOS Checks
@@ -33,19 +33,19 @@ jobs:
       matrix:
         node-version: [14.x]
     steps:
-    - uses: actions/checkout@v1
-    - name: Use Node.js ${{ matrix.node-version }}
-      uses: actions/setup-node@v1
-      with:
-        node-version: ${{ matrix.node-version }}
-    - name: checks
-      run: |
-        yarn install --frozen-lockfile --network-timeout 120000
-        yarn workspace @joystream/types build
-        yarn workspace @joystream/metadata-protobuf build
-        yarn workspace @joystream/cli checks --quiet
-    - name: yarn pack test
-      run: |
-        yarn workspace @joystream/cli pack --filename cli-pack-test.tgz
-        tar zxvf ./cli/cli-pack-test.tgz -C cli
-        cd ./cli/package && yarn link
+      - uses: actions/checkout@v1
+      - name: Use Node.js ${{ matrix.node-version }}
+        uses: actions/setup-node@v1
+        with:
+          node-version: ${{ matrix.node-version }}
+      - name: checks
+        run: |
+          yarn install --frozen-lockfile --network-timeout 120000
+          yarn workspace @joystream/types build
+          yarn workspace @joystream/metadata-protobuf build
+          yarn workspace @joystream/cli checks --quiet
+      - name: yarn pack test
+        run: |
+          yarn workspace @joystream/cli pack --filename cli-pack-test.tgz
+          tar zxvf ./cli/cli-pack-test.tgz -C cli
+          cd ./cli/package && yarn link

+ 27 - 4
build-docker-images.sh

@@ -28,10 +28,33 @@ else
   fi
 fi
 
-# Build joystream/apps docker image
-echo "Building 'joystream/apps' docker image..."
+# Build colossus docker image
+echo "Building colossus docker image..."
 docker-compose build colossus
 
+# Build distributor docker image
+echo "Building distributor docker image..."
+docker-compose build distributor-node
+
+if [[ "$OSTYPE" == "linux-gnu" ]]; then
+    IP_ADDRESS=$(ip addr show | grep "\binet\b.*\bdocker0\b" | awk '{print $2}' | cut -d '/' -f 1)
+    # Run a local development chain
+    docker-compose -f docker-compose.linux-gnu-build.yml up -d joystream-node
+
+    # Build processor/graphql-server docker image
+    echo "Building joystream/apps docker image..."
+    WS_PROVIDER_ENDPOINT_URI=ws://${IP_ADDRESS}:9944/ docker-compose build graphql-server
+elif [[ "$OSTYPE" == "darwin"* ]]; then
+    # Run a local development chain
+    docker-compose up -d joystream-node
+
+    # Build processor/graphql-server docker image
+    echo "Building joystream/apps docker image..."
+    WS_PROVIDER_ENDPOINT_URI=ws://host.docker.internal:9944/ docker-compose build graphql-server
+fi
+
+docker-compose down
+
 # Build the pioneer docker image
-echo "Building pioneer docker image"
-docker-compose build pioneer
+# echo "Building pioneer docker image"
+# docker-compose build pioneer

+ 3 - 3
build-npm-packages.sh

@@ -6,7 +6,7 @@ yarn
 yarn workspace @joystream/types build
 yarn workspace @joystream/metadata-protobuf build
 yarn workspace query-node-root build
-yarn workspace @joystream/cli build
-yarn workspace storage-node build
+# yarn workspace @joystream/cli build
 yarn workspace storage-node-v2 build
-yarn workspace pioneer build
+yarn workspace @joystream/distributor-cli build
+# yarn workspace pioneer build

+ 26 - 7
colossus.Dockerfile

@@ -2,15 +2,34 @@ FROM --platform=linux/x86-64 node:14 as builder
 
 WORKDIR /joystream
 COPY . /joystream
-RUN  rm -fr /joystream/pioneer
 
-EXPOSE 3001
-
-RUN yarn --frozen-lockfile
+RUN yarn
 
 RUN yarn workspace @joystream/types build
-RUN yarn workspace storage-node build
+RUN yarn workspace @joystream/metadata-protobuf build
+RUN yarn workspace storage-node-v2 build
 
-RUN yarn
+# Use these volumes to persist uploading data and to pass the keyfile.
+VOLUME ["/data", "/keystore"]
+
+# Required variables
+ENV WS_PROVIDER_ENDPOINT_URI=ws://not-set
+ENV COLOSSUS_PORT=3333
+ENV QUERY_NODE_HOST=not-set
+ENV WORKER_ID=not-set
+# - set external key file using the `/keystore` volume
+ENV ACCOUNT_KEYFILE=
+ENV ACCOUNT_PWD=
+# Optional variables
+ENV SYNC_INTERVAL=1
+ENV ELASTIC_SEARCH_HOST=
+# warn, error, debug, info
+ENV ELASTIC_LOG_LEVEL=debug
+# - overrides account key file
+ENV ACCOUNT_URI=
+
+# Colossus node port
+EXPOSE ${COLOSSUS_PORT}
 
-ENTRYPOINT yarn colossus --dev --ws-provider $WS_PROVIDER_ENDPOINT_URI
+WORKDIR /joystream/storage-node-v2
+ENTRYPOINT yarn storage-node server --queryNodeHost ${QUERY_NODE_HOST} --port ${COLOSSUS_PORT} --uploads /data --worker ${WORKER_ID} --apiUrl ${WS_PROVIDER_ENDPOINT_URI} --sync --syncInterval=${SYNC_INTERVAL} --keyFile=${ACCOUNT_KEYFILE} --elasticSearchHost=${ELASTIC_SEARCH_HOST}

+ 2 - 1
devops/vscode/settings.json

@@ -5,6 +5,7 @@
     "./tests/network-tests",
     "./types",
     "./storage-node",
-    "./atlas"
+    "./atlas",
+    "./distributor-node"
   ]
 }

+ 26 - 0
distributor-node.Dockerfile

@@ -0,0 +1,26 @@
+FROM --platform=linux/x86-64 node:14 as builder
+
+WORKDIR /joystream
+COPY ./types types
+COPY ./metadata-protobuf metadata-protobuf
+COPY ./distributor-node distributor-node
+COPY ./yarn.lock yarn.lock
+COPY ./package.json package.json
+
+EXPOSE 3334
+
+# Build & cleanup
+# (must be inside a single "RUN", see: https://stackoverflow.com/questions/40212836/docker-image-larger-than-its-filesystem)
+RUN \
+  yarn --frozen-lockfile &&\
+  yarn workspace @joystream/types build &&\
+  yarn workspace @joystream/metadata-protobuf build &&\
+  yarn workspace @joystream/distributor-cli build &&\
+  find . -name "node_modules" -type d -prune &&\
+  yarn --frozen-lockfile --production &&\
+  yarn cache clean
+
+ENV CONFIG_PATH ./distributor-node/config/docker/config.docker.yml
+
+ENTRYPOINT ["yarn", "joystream-distributor"]
+CMD ["start"]

+ 1 - 0
distributor-node/.eslintignore

@@ -0,0 +1 @@
+src/types/generated

+ 9 - 0
distributor-node/.gitignore

@@ -0,0 +1,9 @@
+*-debug.log
+*-error.log
+/.nyc_output
+/dist
+/lib
+/package-lock.json
+/tmp
+node_modules
+/local

+ 3 - 0
distributor-node/.prettierignore

@@ -0,0 +1,3 @@
+/**/generated
+/**/mock.graphql
+lib

+ 419 - 0
distributor-node/README.md

@@ -0,0 +1,419 @@
+@joystream/distributor-cli
+==========================
+
+Joystream distributor node CLI
+
+[![oclif](https://img.shields.io/badge/cli-oclif-brightgreen.svg)](https://oclif.io)
+[![Version](https://img.shields.io/npm/v/@joystream/distributor-cli.svg)](https://npmjs.org/package/@joystream/distributor-cli)
+[![Downloads/week](https://img.shields.io/npm/dw/@joystream/distributor-cli.svg)](https://npmjs.org/package/@joystream/distributor-cli)
+[![License](https://img.shields.io/npm/l/@joystream/distributor-cli.svg)](https://github.com/Joystream/joystream/blob/master/package.json)
+
+<!-- toc -->
+* [Usage](#usage)
+* [Commands](#commands)
+<!-- tocstop -->
+# Usage
+<!-- usage -->
+```sh-session
+$ npm install -g @joystream/distributor-cli
+$ joystream-distributor COMMAND
+running command...
+$ joystream-distributor (-v|--version|version)
+@joystream/distributor-cli/0.1.0 linux-x64 node-v14.17.3
+$ joystream-distributor --help [COMMAND]
+USAGE
+  $ joystream-distributor COMMAND
+...
+```
+<!-- usagestop -->
+# Commands
+<!-- commands -->
+* [`joystream-distributor dev:init`](#joystream-distributor-devinit)
+* [`joystream-distributor help [COMMAND]`](#joystream-distributor-help-command)
+* [`joystream-distributor leader:cancel-invitation`](#joystream-distributor-leadercancel-invitation)
+* [`joystream-distributor leader:create-bucket`](#joystream-distributor-leadercreate-bucket)
+* [`joystream-distributor leader:create-bucket-family`](#joystream-distributor-leadercreate-bucket-family)
+* [`joystream-distributor leader:delete-bucket`](#joystream-distributor-leaderdelete-bucket)
+* [`joystream-distributor leader:delete-bucket-family`](#joystream-distributor-leaderdelete-bucket-family)
+* [`joystream-distributor leader:invite-bucket-operator`](#joystream-distributor-leaderinvite-bucket-operator)
+* [`joystream-distributor leader:set-buckets-per-bag-limit`](#joystream-distributor-leaderset-buckets-per-bag-limit)
+* [`joystream-distributor leader:update-bag`](#joystream-distributor-leaderupdate-bag)
+* [`joystream-distributor leader:update-bucket-mode`](#joystream-distributor-leaderupdate-bucket-mode)
+* [`joystream-distributor leader:update-bucket-status`](#joystream-distributor-leaderupdate-bucket-status)
+* [`joystream-distributor leader:update-dynamic-bag-policy`](#joystream-distributor-leaderupdate-dynamic-bag-policy)
+* [`joystream-distributor operator:accept-invitation`](#joystream-distributor-operatoraccept-invitation)
+* [`joystream-distributor operator:set-metadata`](#joystream-distributor-operatorset-metadata)
+* [`joystream-distributor start`](#joystream-distributor-start)
+
+## `joystream-distributor dev:init`
+
+Initialize development environment. Sets Alice as distributor working group leader.
+
+```
+USAGE
+  $ joystream-distributor dev:init
+
+OPTIONS
+  -c, --configPath=configPath  [default: ./config.yml] Path to config JSON/YAML file (relative to current working
+                               directory)
+
+  -y, --yes                    Answer "yes" to any prompt, skipping any manual confirmations
+```
+
+_See code: [src/commands/dev/init.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/dev/init.ts)_
+
+## `joystream-distributor help [COMMAND]`
+
+display help for joystream-distributor
+
+```
+USAGE
+  $ joystream-distributor help [COMMAND]
+
+ARGUMENTS
+  COMMAND  command to show help for
+
+OPTIONS
+  --all  see all commands in CLI
+```
+
+_See code: [@oclif/plugin-help](https://github.com/oclif/plugin-help/blob/v2.2.3/src/commands/help.ts)_
+
+## `joystream-distributor leader:cancel-invitation`
+
+Cancel pending distribution bucket operator invitation.
+
+```
+USAGE
+  $ joystream-distributor leader:cancel-invitation
+
+OPTIONS
+  -B, --bucketId=bucketId      (required) Distribution bucket id
+
+  -c, --configPath=configPath  [default: ./config.yml] Path to config JSON/YAML file (relative to current working
+                               directory)
+
+  -f, --familyId=familyId      (required) Distribution bucket family id
+
+  -w, --workerId=workerId      (required) ID of the invited operator (distribution group worker)
+
+  -y, --yes                    Answer "yes" to any prompt, skipping any manual confirmations
+
+DESCRIPTION
+  Requires distribution working group leader permissions.
+```
+
+_See code: [src/commands/leader/cancel-invitation.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/cancel-invitation.ts)_
+
+## `joystream-distributor leader:create-bucket`
+
+Create new distribution bucket. Requires distribution working group leader permissions.
+
+```
+USAGE
+  $ joystream-distributor leader:create-bucket
+
+OPTIONS
+  -a, --acceptingBags=(yes|no)  [default: no] Whether the created bucket should accept new bags
+
+  -c, --configPath=configPath   [default: ./config.yml] Path to config JSON/YAML file (relative to current working
+                                directory)
+
+  -f, --familyId=familyId       (required) Distribution bucket family id
+
+  -y, --yes                     Answer "yes" to any prompt, skipping any manual confirmations
+```
+
+_See code: [src/commands/leader/create-bucket.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/create-bucket.ts)_
+
+## `joystream-distributor leader:create-bucket-family`
+
+Create new distribution bucket family. Requires distribution working group leader permissions.
+
+```
+USAGE
+  $ joystream-distributor leader:create-bucket-family
+
+OPTIONS
+  -c, --configPath=configPath  [default: ./config.yml] Path to config JSON/YAML file (relative to current working
+                               directory)
+
+  -y, --yes                    Answer "yes" to any prompt, skipping any manual confirmations
+```
+
+_See code: [src/commands/leader/create-bucket-family.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/create-bucket-family.ts)_
+
+## `joystream-distributor leader:delete-bucket`
+
+Delete distribution bucket. The bucket must have no operators. Requires distribution working group leader permissions.
+
+```
+USAGE
+  $ joystream-distributor leader:delete-bucket
+
+OPTIONS
+  -B, --bucketId=bucketId      (required) Distribution bucket id
+
+  -c, --configPath=configPath  [default: ./config.yml] Path to config JSON/YAML file (relative to current working
+                               directory)
+
+  -f, --familyId=familyId      (required) Distribution bucket family id
+
+  -y, --yes                    Answer "yes" to any prompt, skipping any manual confirmations
+```
+
+_See code: [src/commands/leader/delete-bucket.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/delete-bucket.ts)_
+
+## `joystream-distributor leader:delete-bucket-family`
+
+Delete distribution bucket family. Requires distribution working group leader permissions.
+
+```
+USAGE
+  $ joystream-distributor leader:delete-bucket-family
+
+OPTIONS
+  -c, --configPath=configPath  [default: ./config.yml] Path to config JSON/YAML file (relative to current working
+                               directory)
+
+  -f, --familyId=familyId      (required) Distribution bucket family id
+
+  -y, --yes                    Answer "yes" to any prompt, skipping any manual confirmations
+```
+
+_See code: [src/commands/leader/delete-bucket-family.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/delete-bucket-family.ts)_
+
+## `joystream-distributor leader:invite-bucket-operator`
+
+Invite distribution bucket operator (distribution group worker).
+
+```
+USAGE
+  $ joystream-distributor leader:invite-bucket-operator
+
+OPTIONS
+  -B, --bucketId=bucketId      (required) Distribution bucket id
+
+  -c, --configPath=configPath  [default: ./config.yml] Path to config JSON/YAML file (relative to current working
+                               directory)
+
+  -f, --familyId=familyId      (required) Distribution bucket family id
+
+  -w, --workerId=workerId      (required) ID of the distribution group worker to invite as bucket operator
+
+  -y, --yes                    Answer "yes" to any prompt, skipping any manual confirmations
+
+DESCRIPTION
+  The specified bucket must not have any operator currently.
+     Requires distribution working group leader permissions.
+```
+
+_See code: [src/commands/leader/invite-bucket-operator.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/invite-bucket-operator.ts)_
+
+## `joystream-distributor leader:set-buckets-per-bag-limit`
+
+Set max. distribution buckets per bag limit. Requires distribution working group leader permissions.
+
+```
+USAGE
+  $ joystream-distributor leader:set-buckets-per-bag-limit
+
+OPTIONS
+  -c, --configPath=configPath  [default: ./config.yml] Path to config JSON/YAML file (relative to current working
+                               directory)
+
+  -l, --limit=limit            (required) New limit value
+
+  -y, --yes                    Answer "yes" to any prompt, skipping any manual confirmations
+```
+
+_See code: [src/commands/leader/set-buckets-per-bag-limit.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/set-buckets-per-bag-limit.ts)_
+
+## `joystream-distributor leader:update-bag`
+
+Add/remove distribution buckets from a bag.
+
+```
+USAGE
+  $ joystream-distributor leader:update-bag
+
+OPTIONS
+  -a, --add=add
+      [default: ] ID of a bucket to add to bag
+
+  -b, --bagId=bagId
+      (required) Bag ID. Format: {bag_type}:{sub_type}:{id}.
+           - Bag types: 'static', 'dynamic'
+           - Sub types: 'static:council', 'static:wg', 'dynamic:member', 'dynamic:channel'
+           - Id:
+             - absent for 'static:council'
+             - working group name for 'static:wg'
+             - integer for 'dynamic:member' and 'dynamic:channel'
+           Examples:
+           - static:council
+           - static:wg:storage
+           - dynamic:member:4
+
+  -c, --configPath=configPath
+      [default: ./config.yml] Path to config JSON/YAML file (relative to current working directory)
+
+  -f, --familyId=familyId
+      (required) ID of the distribution bucket family
+
+  -r, --remove=remove
+      [default: ] ID of a bucket to remove from bag
+
+  -y, --yes
+      Answer "yes" to any prompt, skipping any manual confirmations
+
+EXAMPLE
+  $ joystream-distributor leader:update-bag -b 1 -f 1 -a 1 -a 2 -a 3 -r 4 -r 5
+```
+
+_See code: [src/commands/leader/update-bag.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/update-bag.ts)_
+
+## `joystream-distributor leader:update-bucket-mode`
+
+Update distribution bucket mode ("distributing" flag). Requires distribution working group leader permissions.
+
+```
+USAGE
+  $ joystream-distributor leader:update-bucket-mode
+
+OPTIONS
+  -B, --bucketId=bucketId      (required) Distribution bucket id
+
+  -c, --configPath=configPath  [default: ./config.yml] Path to config JSON/YAML file (relative to current working
+                               directory)
+
+  -d, --mode=(on|off)          (required) Whether the bucket should be "on" (distributing) or "off" (not distributing)
+
+  -f, --familyId=familyId      (required) Distribution bucket family id
+
+  -y, --yes                    Answer "yes" to any prompt, skipping any manual confirmations
+```
+
+_See code: [src/commands/leader/update-bucket-mode.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/update-bucket-mode.ts)_
+
+## `joystream-distributor leader:update-bucket-status`
+
+Update distribution bucket status ("acceptingNewBags" flag). Requires distribution working group leader permissions.
+
+```
+USAGE
+  $ joystream-distributor leader:update-bucket-status
+
+OPTIONS
+  -B, --bucketId=bucketId       (required) Distribution bucket id
+  -a, --acceptingBags=(yes|no)  (required) Whether the bucket should accept new bags
+
+  -c, --configPath=configPath   [default: ./config.yml] Path to config JSON/YAML file (relative to current working
+                                directory)
+
+  -f, --familyId=familyId       (required) Distribution bucket family id
+
+  -y, --yes                     Answer "yes" to any prompt, skipping any manual confirmations
+```
+
+_See code: [src/commands/leader/update-bucket-status.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/update-bucket-status.ts)_
+
+## `joystream-distributor leader:update-dynamic-bag-policy`
+
+Update dynamic bag creation policy (number of buckets by family that should store given dynamic bag type).
+
+```
+USAGE
+  $ joystream-distributor leader:update-dynamic-bag-policy
+
+OPTIONS
+  -c, --configPath=configPath  [default: ./config.yml] Path to config JSON/YAML file (relative to current working
+                               directory)
+
+  -p, --policy=policy          Key-value pair of {familyId}:{numberOfBuckets}
+
+  -t, --type=(Member|Channel)  (required) Dynamic bag type
+
+  -y, --yes                    Answer "yes" to any prompt, skipping any manual confirmations
+
+DESCRIPTION
+  Requires distribution working group leader permissions.
+
+EXAMPLE
+  $ joystream-distributor leader:update-dynamic-bag-policy -t Member -p 1:5 -p 2:10 -p 3:5
+```
+
+_See code: [src/commands/leader/update-dynamic-bag-policy.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/update-dynamic-bag-policy.ts)_
+
+## `joystream-distributor operator:accept-invitation`
+
+Accept pending distribution bucket operator invitation.
+
+```
+USAGE
+  $ joystream-distributor operator:accept-invitation
+
+OPTIONS
+  -B, --bucketId=bucketId      (required) Distribution bucket id
+
+  -c, --configPath=configPath  [default: ./config.yml] Path to config JSON/YAML file (relative to current working
+                               directory)
+
+  -f, --familyId=familyId      (required) Distribution bucket family id
+
+  -w, --workerId=workerId      (required) ID of the invited operator (distribution group worker)
+
+  -y, --yes                    Answer "yes" to any prompt, skipping any manual confirmations
+
+DESCRIPTION
+  Requires the invited distribution group worker role key.
+```
+
+_See code: [src/commands/operator/accept-invitation.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/operator/accept-invitation.ts)_
+
+## `joystream-distributor operator:set-metadata`
+
+Set/update distribution bucket operator metadata.
+
+```
+USAGE
+  $ joystream-distributor operator:set-metadata
+
+OPTIONS
+  -B, --bucketId=bucketId      (required) Distribution bucket id
+
+  -c, --configPath=configPath  [default: ./config.yml] Path to config JSON/YAML file (relative to current working
+                               directory)
+
+  -e, --endpoint=endpoint      Root distribution node endpoint
+
+  -f, --familyId=familyId      (required) Distribution bucket family id
+
+  -i, --input=input            Path to JSON metadata file
+
+  -w, --workerId=workerId      (required) ID of the invited operator (distribution group worker)
+
+  -y, --yes                    Answer "yes" to any prompt, skipping any manual confirmations
+
+DESCRIPTION
+  Requires active distribution bucket operator worker role key.
+```
+
+_See code: [src/commands/operator/set-metadata.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/operator/set-metadata.ts)_
+
+## `joystream-distributor start`
+
+Start the node
+
+```
+USAGE
+  $ joystream-distributor start
+
+OPTIONS
+  -c, --configPath=configPath  [default: ./config.yml] Path to config JSON/YAML file (relative to current working
+                               directory)
+
+  -y, --yes                    Answer "yes" to any prompt, skipping any manual confirmations
+```
+
+_See code: [src/commands/start.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/start.ts)_
+<!-- commandsstop -->

+ 3 - 0
distributor-node/bin/run

@@ -0,0 +1,3 @@
+#!/usr/bin/env node
+
+require('@oclif/command').run().then(require('@oclif/command/flush')).catch(require('@oclif/errors/handle'))

+ 3 - 0
distributor-node/bin/run.cmd

@@ -0,0 +1,3 @@
+@echo off
+
+node "%~dp0\run" %*

+ 22 - 0
distributor-node/config.yml

@@ -0,0 +1,22 @@
+id: test-node
+endpoints:
+  queryNode: http://localhost:8081/graphql
+  substrateNode: ws://localhost:9944
+  # elasticSearch: http://localhost:9200
+directories:
+  data: ./local/data
+  cache: ./local/cache
+  logs: ./local/logs
+log:
+  file: debug
+  console: info
+  # elastic: info
+limits:
+  storage: 100G
+  maxConcurrentStorageNodeDownloads: 100
+  maxConcurrentOutboundConnections: 300
+  outboundRequestsTimeout: 5000
+port: 3334
+keys: [//Alice]
+buckets: 'all'
+workerId: 0

+ 21 - 0
distributor-node/config/docker/config.docker.yml

@@ -0,0 +1,21 @@
+id: distributor-node-docker
+endpoints:
+  queryNode: http://graphql-server-mnt:4002/graphql
+  substrateNode: ws://joystream-node:9944
+  # elasticSearch: http://elasticsearch:9200
+directories:
+  data: /data
+  cache: /cache
+  logs: /logs
+log:
+  console: info
+  # elastic: info
+limits:
+  storage: 100G
+  maxConcurrentStorageNodeDownloads: 100
+  maxConcurrentOutboundConnections: 300
+  outboundRequestsTimeout: 5000
+port: 3334
+keys: [//Alice]
+buckets: 'all'
+workerId: 0

+ 5 - 0
distributor-node/config/docker/filebeat.Dockerfile

@@ -0,0 +1,5 @@
+FROM docker.elastic.co/beats/filebeat:7.13.3
+COPY ./filebeat.docker.yml /usr/share/filebeat/filebeat.yml
+USER root
+RUN chown root:filebeat /usr/share/filebeat/filebeat.yml
+USER filebeat

+ 19 - 0
distributor-node/config/docker/filebeat.docker.yml

@@ -0,0 +1,19 @@
+setup.kibana:
+  host: 'kibana:5601'
+output.elasticsearch:
+  hosts: ['elasticsearch:9200']
+# Using log files:
+filebeat.inputs:
+  - type: log
+    enabled: true
+    paths: /logs/*.json
+    json.keys_under_root: true
+    json.overwrite_keys: true
+    json.add_error_key: true
+    json.expand_keys: true
+# Docker autodiscover alternative:
+# filebeat.autodiscover:
+#   providers:
+#     - type: docker
+#       hints.enabled: true
+#       hints.default_config.enabled: false

+ 76 - 0
distributor-node/docker-compose.yml

@@ -0,0 +1,76 @@
+version: '3.4'
+
+services:
+  distributor-node:
+    image: node:14
+    labels:
+      co.elastic.logs/enabled: true
+      co.elastic.logs/json.keys_under_root: true
+      co.elastic.logs/json.overwrite_keys: true
+      co.elastic.logs/json.add_error_key: true
+      co.elastic.logs/json.expand_keys: true
+    volumes:
+      - type: bind
+        source: ..
+        target: /joystream
+      - data:/data
+      - cache:/cache
+      - logs:/logs
+    networks:
+      - joystream
+    ports:
+      - 127.0.0.1:3334:3334
+    working_dir: /joystream/distributor-node
+    init: true
+    entrypoint: ["./bin/run"]
+    command: ["start", "./config/docker/distributor-dev.docker.yml"]
+  # Ref: https://www.elastic.co/guide/en/elasticsearch/reference/7.13/docker.html
+  elasticsearch:
+    image: docker.elastic.co/elasticsearch/elasticsearch:7.13.2
+    container_name: elasticsearch
+    environment:
+      - discovery.type=single-node
+      - bootstrap.memory_lock=true
+      - "ES_JAVA_OPTS=-Xms512m -Xmx512m"
+    ulimits:
+      memlock:
+        soft: -1
+        hard: -1
+    volumes:
+      - es-data:/usr/share/elasticsearch/data
+    ports:
+      - 127.0.0.1:9200:9200
+  # Ref: https://www.elastic.co/guide/en/kibana/7.13/docker.html
+  kibana:
+    image: docker.elastic.co/kibana/kibana:7.13.2
+    container_name: kibana
+    ports:
+      - 127.0.0.1:5601:5601
+    environment:
+      ELASTICSEARCH_HOSTS: http://elasticsearch:9200
+  # Ref: https://www.elastic.co/guide/en/beats/filebeat/current/running-on-docker.html
+  filebeat:
+    user: root
+    image: joystream/distributor-filebeat
+    build:
+      context: ./config/docker
+      dockerfile: ./filebeat.Dockerfile
+    volumes:
+      - /var/run/docker.sock:/var/run/docker.sock:ro
+      - logs:/logs
+
+volumes:
+  es-data:
+    driver: local
+  logs:
+    driver: local
+  cache:
+    driver: local
+  data:
+    driver: local
+
+# Join default joystream network (from root docker-compose)
+networks:
+  joystream:
+    external: true
+    name: joystream_default

+ 7 - 0
distributor-node/openapitools.json

@@ -0,0 +1,7 @@
+{
+  "$schema": "node_modules/@openapitools/openapi-generator-cli/config.schema.json",
+  "spaces": 2,
+  "generator-cli": {
+    "version": "5.2.0"
+  }
+}

+ 120 - 0
distributor-node/package.json

@@ -0,0 +1,120 @@
+{
+  "name": "@joystream/distributor-cli",
+  "description": "Joystream distributor node CLI",
+  "version": "0.1.0",
+  "author": "Joystream contributors",
+  "bin": {
+    "joystream-distributor": "./bin/run"
+  },
+  "bugs": "https://github.com/Joystream/joystream/issues",
+  "dependencies": {
+    "@joystream/types": "^0.17.0",
+    "@joystream/metadata-protobuf": "^1.0.0",
+    "@elastic/ecs-winston-format": "^1.1.0",
+    "@oclif/command": "^1",
+    "@oclif/config": "^1",
+    "@oclif/plugin-help": "^2",
+    "@apollo/client": "^3.2.5",
+    "graphql": "^14.7.0",
+    "winston": "^3.3.3",
+    "fast-safe-stringify": "^2.1.1",
+    "ajv": "^7",
+    "axios": "^0.21.1",
+    "cross-fetch": "^3.1.4",
+    "express": "^4.17.1",
+    "express-winston": "^4.1.0",
+    "express-openapi-validator": "^4.12.4",
+    "file-type": "^16.5.1",
+    "lodash": "^4.17.21",
+    "lru-cache": "^6.0.0",
+    "node-cleanup": "^2.1.2",
+    "proper-lockfile": "^4.1.2",
+    "read-chunk": "^3.2.0",
+    "send": "^0.17.1",
+    "tslib": "^1",
+    "yaml": "^1.10.2",
+    "queue": "^6.0.2",
+    "express-http-proxy": "^1.6.2",
+    "winston-elasticsearch": "^0.15.8",
+    "node-cache": "^5.1.2",
+    "cors": "^2.8.5",
+    "inquirer": "^8.1.2",
+    "multihashes": "^4.0.3",
+    "blake3": "^2.1.4",
+    "js-image-generator": "^1.0.3"
+  },
+  "devDependencies": {
+    "@graphql-codegen/cli": "^1.21.4",
+    "@graphql-codegen/import-types-preset": "^1.18.1",
+    "@graphql-codegen/typescript": "^1.22.0",
+    "@graphql-codegen/typescript-document-nodes": "^1.17.11",
+    "@graphql-codegen/typescript-operations": "^1.17.16",
+    "@oclif/dev-cli": "^1",
+    "@oclif/test": "^1",
+    "@openapitools/openapi-generator-cli": "^2.3.6",
+    "@types/chai": "^4",
+    "@types/mocha": "^5",
+    "@types/node": "^14",
+    "@types/node-cleanup": "^2.1.1",
+    "@types/express-http-proxy": "^1.6.2",
+    "@types/node-cache": "^4.2.5",
+    "@types/send": "^0.17.0",
+    "@types/inquirer": "^8.1.1",
+    "@types/cors": "^2.8.12",
+    "chai": "^4",
+    "globby": "^10",
+    "json-schema-to-typescript": "^10.1.4",
+    "mocha": "^5",
+    "nyc": "^14",
+    "openapi-typescript": "^4.0.2",
+    "ts-node": "^8",
+    "typescript": "^3.3"
+  },
+  "engines": {
+    "node": ">=14.16.1"
+  },
+  "files": [
+    "/bin",
+    "/lib",
+    "/npm-shrinkwrap.json",
+    "/oclif.manifest.json"
+  ],
+  "homepage": "https://github.com/Joystream/joystream",
+  "keywords": [
+    "oclif"
+  ],
+  "license": "GPL-3.0-only",
+  "main": "lib/index.js",
+  "oclif": {
+    "commands": "./lib/commands",
+    "bin": "joystream-distributor",
+    "plugins": [
+      "@oclif/plugin-help"
+    ]
+  },
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/Joystream/joystream",
+    "directory": "distributor-node"
+  },
+  "scripts": {
+    "postpack": "rm -f oclif.manifest.json",
+    "prepack": "rm -rf lib && tsc -b && oclif-dev manifest && oclif-dev readme",
+    "test": "nyc --extension .ts mocha --forbid-only \"test/**/*.test.ts\"",
+    "version": "oclif-dev readme && git add README.md",
+    "generate:types:json-schema": "yarn ts-node ./src/services/validation/generateTypes.ts",
+    "generate:types:graphql": "yarn graphql-codegen -c ./src/services/networking/query-node/codegen.yml",
+    "generate:types:openapi": "yarn openapi-typescript ./src/api-spec/openapi.yml -o ./src/types/generated/OpenApi.ts -c ../prettierrc.js",
+    "generate:types:all": "yarn generate:types:json-schema && yarn generate:types:graphql && yarn generate:types:openapi",
+    "generate:api:storage-node": "yarn openapi-generator-cli generate -i ../storage-node-v2/src/api-spec/openapi.yaml -g typescript-axios -o ./src/services/networking/storage-node/generated",
+    "generate:api:distributor-node": "yarn openapi-generator-cli generate -i ./src/api-spec/openapi.yml -g typescript-axios -o ./src/services/networking/distributor-node/generated",
+    "generate:api:all": "yarn generate:api:storage-node && yarn generate:api:distributor-node",
+    "generate:all": "yarn generate:types:all && yarn generate:api:all",
+    "build": "rm -rf lib && tsc --build tsconfig.json && cp -r ./src/api-spec ./lib/api-spec",
+    "lint": "eslint ./src --ext .ts",
+    "format": "prettier ./ --write",
+    "checks": "tsc --noEmit --pretty && prettier ./ --check && yarn lint",
+    "cli": "./bin/run"
+  },
+  "types": "lib/index.d.ts"
+}

+ 16 - 0
distributor-node/scripts/data/family-metadata.json

@@ -0,0 +1,16 @@
+{
+  "region": "eu-west",
+  "description": "Western Europe",
+  "boundary": [
+    { "longitude": 0.935664253776034, "latitude": 61.70157919955392 },
+    { "longitude": 7.077063962609969, "latitude": 37.40179586925884 },
+    { "longitude": 27.46754964469303, "latitude": 32.88770433956931 },
+    { "longitude": 40.68423960078124, "latitude": 48.39367044189657 },
+    { "longitude": 32.14019766910849, "latitude": 54.63502471598309 },
+    { "longitude": 28.56450578831937, "latitude": 59.09093283322235 },
+    { "longitude": 30.75892533489921, "latitude": 70.1670216697313 },
+    { "longitude": 19.2385951319647, "latitude": 73.4978175093038 },
+    { "longitude": -9.158590783812665, "latitude": 67.80006125371919 },
+    { "longitude": 0.935664253776034, "latitude": 61.70157919955392 }
+  ]
+}

+ 12 - 0
distributor-node/scripts/data/operator-metadata.json

@@ -0,0 +1,12 @@
+{
+  "endpoint": "http://localhost:3334",
+  "location": {
+    "countryCode": "DE",
+    "city": "Berlin",
+    "coordinates": {
+      "latitude": 52.520008,
+      "longitude": 13.404954
+    }
+  },
+  "extra": "Some additional information"
+}

+ 17 - 0
distributor-node/scripts/init-bucket.sh

@@ -0,0 +1,17 @@
+#!/usr/bin/env bash
+
+SCRIPT_PATH="$(dirname "${BASH_SOURCE[0]}")"
+cd $SCRIPT_PATH
+
+export AUTO_CONFIRM=true
+export CONFIG_PATH="../config.yml"
+CLI=../bin/run
+
+${CLI} dev:init
+${CLI} leader:set-buckets-per-bag-limit -l 10
+FAMILY_ID=`${CLI} leader:create-bucket-family ${CONFIG}`
+BUCKET_ID=`${CLI} leader:create-bucket -f ${FAMILY_ID} -a yes`
+${CLI} leader:update-bag -b static:council -f ${FAMILY_ID} -a ${BUCKET_ID}
+${CLI} leader:update-bucket-mode -f ${FAMILY_ID} -B ${BUCKET_ID} --mode on
+${CLI} leader:invite-bucket-operator -f ${FAMILY_ID} -B ${BUCKET_ID} -w 0
+${CLI} operator:accept-invitation -f ${FAMILY_ID} -B ${BUCKET_ID} -w 0

+ 36 - 0
distributor-node/scripts/test-commands.sh

@@ -0,0 +1,36 @@
+#!/usr/bin/env bash
+
+SCRIPT_PATH="$(dirname "${BASH_SOURCE[0]}")"
+cd $SCRIPT_PATH
+
+export AUTO_CONFIRM=true
+export CONFIG_PATH="../config.yml"
+CLI=../bin/run
+
+${CLI} dev:init
+${CLI} leader:set-buckets-per-bag-limit -l 10
+FAMILY_ID=`${CLI} leader:create-bucket-family`
+BUCKET_ID=`${CLI} leader:create-bucket -f ${FAMILY_ID} -a yes`
+${CLI} leader:update-bag -b static:council -f ${FAMILY_ID} -a ${BUCKET_ID}
+${CLI} leader:update-bag -b static:wg:storage -f ${FAMILY_ID} -a ${BUCKET_ID}
+${CLI} leader:update-bag -b static:wg:content -f ${FAMILY_ID} -a ${BUCKET_ID}
+${CLI} leader:update-bag -b static:wg:operations -f ${FAMILY_ID} -a ${BUCKET_ID}
+${CLI} leader:update-bag -b static:wg:gateway -f ${FAMILY_ID} -a ${BUCKET_ID}
+${CLI} leader:update-bag -b static:wg:distribution -f ${FAMILY_ID} -a ${BUCKET_ID}
+${CLI} leader:update-bucket-status -f ${FAMILY_ID} -B ${BUCKET_ID}  --acceptingBags yes
+${CLI} leader:update-bucket-mode -f ${FAMILY_ID} -B ${BUCKET_ID} --mode on
+${CLI} leader:update-dynamic-bag-policy -t Member -p ${FAMILY_ID}:5
+${CLI} leader:update-dynamic-bag-policy -t Member
+${CLI} leader:invite-bucket-operator -f ${FAMILY_ID} -B ${BUCKET_ID} -w 0
+${CLI} leader:cancel-invitation -f ${FAMILY_ID} -B ${BUCKET_ID} -w 0
+${CLI} leader:invite-bucket-operator -f ${FAMILY_ID} -B ${BUCKET_ID} -w 0
+${CLI} operator:accept-invitation -f ${FAMILY_ID} -B ${BUCKET_ID} -w 0
+${CLI} operator:set-metadata -f ${FAMILY_ID} -B ${BUCKET_ID} -w 0 -i ./data/operator-metadata.json
+${CLI} leader:remove-bucket-operator -f ${FAMILY_ID} -B ${BUCKET_ID} -w 0
+${CLI} leader:set-bucket-family-metadata -f ${FAMILY_ID} -i ./data/family-metadata.json
+
+# Deletion commands tested separately, since bucket operator removal is not yet supported
+FAMILY_TO_DELETE_ID=`${CLI} leader:create-bucket-family`
+BUCKET_TO_DELETE_ID=`${CLI} leader:create-bucket -f ${FAMILY_TO_DELETE_ID} -a yes`
+${CLI} leader:delete-bucket -f ${FAMILY_TO_DELETE_ID} -B ${BUCKET_TO_DELETE_ID}
+${CLI} leader:delete-bucket-family -f ${FAMILY_TO_DELETE_ID}

+ 1 - 0
distributor-node/src/@types/@elastic/esc-winston-format/index.d.ts

@@ -0,0 +1 @@
+declare module '@elastic/ecs-winston-format'

+ 1 - 0
distributor-node/src/@types/js-image-generator/index.d.ts

@@ -0,0 +1 @@
+declare module 'js-image-generator'

+ 216 - 0
distributor-node/src/api-spec/openapi.yml

@@ -0,0 +1,216 @@
+openapi: 3.0.3
+info:
+  title: Distributor node API
+  description: Distributor node API
+  contact:
+    email: info@joystream.org
+  license:
+    name: GPL-3.0-only
+    url: https://spdx.org/licenses/GPL-3.0-only.html
+  version: 0.1.0
+externalDocs:
+  description: Distributor node API
+  url: https://github.com/Joystream/joystream/issues/2224
+servers:
+  - url: http://localhost:3334/api/v1/
+
+tags:
+  - name: public
+    description: Public distributor node API
+
+paths:
+  /status:
+    get:
+      operationId: public.status
+      description: Returns json object describing current node status.
+      tags:
+        - public
+      responses:
+        200:
+          description: OK
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/StatusResponse'
+        500:
+          description: Unexpected server error
+  /buckets:
+    get:
+      operationId: public.buckets
+      description: Returns list of distributed buckets
+      tags:
+        - public
+      responses:
+        200:
+          description: OK
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/BucketsResponse'
+        500:
+          description: Unexpected server error
+  /asset/{objectId}:
+    head:
+      operationId: public.assetHead
+      description: Returns asset response headers (cache status, content type and/or length, accepted ranges etc.)
+      tags:
+        - public
+      parameters:
+        - $ref: '#components/parameters/ObjectId'
+      responses:
+        200:
+          description: Object is supported and should be send on GET request.
+          headers:
+            X-Cache:
+              $ref: '#/components/headers/X-Cache'
+        421:
+          description: Misdirected request. Data object not supported by the node.
+        404:
+          description: Data object does not exist.
+        500:
+          description: Unexpected server error
+    get:
+      operationId: public.asset
+      description: Returns a media file.
+      tags:
+        - public
+      parameters:
+        - $ref: '#components/parameters/ObjectId'
+      responses:
+        200:
+          description: Full available object data sent
+          headers:
+            X-Cache:
+              $ref: '#/components/headers/X-Cache'
+            X-Data-Source:
+              $ref: '#/components/headers/X-Data-Source'
+          content:
+            image/*:
+              schema:
+                type: string
+                format: binary
+            audio/*:
+              schema:
+                type: string
+                format: binary
+            video/*:
+              schema:
+                type: string
+                format: binary
+        206:
+          description: Requested partial object data sent
+          headers:
+            X-Cache:
+              $ref: '#/components/headers/X-Cache'
+            X-Data-Source:
+              $ref: '#/components/headers/X-Data-Source'
+          content:
+            image/*:
+              schema:
+                type: string
+                format: binary
+            audio/*:
+              schema:
+                type: string
+                format: binary
+            video/*:
+              schema:
+                type: string
+                format: binary
+        421:
+          description: Misdirected request. Data object not supported.
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/ErrorResponse'
+        404:
+          description: Data object does not exist.
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/ErrorResponse'
+        500:
+          description: Unexpected server error
+
+components:
+  parameters:
+    ObjectId:
+      name: objectId
+      required: true
+      in: path
+      description: Data Object ID
+      schema:
+        type: string
+        pattern: \d+
+  headers:
+    X-Cache:
+      description: Describes cache status of an object.
+        Hit - object is already fully fetched in distributor node's cache.
+        Pending - object is still beeing fetched from the storage node.
+        Miss - object is neither in cache not currently beeing fetched. Fetching from storage node may be triggered.
+      schema:
+        type: string
+        enum: ['hit', 'pending', 'miss']
+    X-Data-Source:
+      description: Describes the source of data stream.
+        External - the request was proxied to a storage node.
+        Local - the data is streamed from local file.
+      schema:
+        type: string
+        enum: ['external', 'local']
+  schemas:
+    ErrorResponse:
+      type: object
+      required:
+        - message
+      properties:
+        type:
+          type: string
+        message:
+          type: string
+    StatusResponse:
+      type: object
+      required:
+        - id
+        - objectsInCache
+        - storageLimit
+        - storageUsed
+        - uptime
+        - downloadsInProgress
+      properties:
+        id:
+          type: string
+        objectsInCache:
+          type: integer
+          minimum: 0
+        storageLimit:
+          type: integer
+          minimum: 0
+        storageUsed:
+          type: integer
+          minimum: 0
+        uptime:
+          type: integer
+          minimum: 0
+        downloadsInProgress:
+          type: integer
+          minimum: 0
+    BucketsResponse:
+      oneOf:
+        - type: object
+          required:
+            - 'bucketIds'
+          properties:
+            bucketIds:
+              type: array
+              minItems: 1
+              items:
+                type: integer
+                minimum: 0
+        - type: object
+          required:
+            - 'allByWorkerId'
+          properties:
+            allByWorkerId:
+              type: integer
+              minimum: 0

+ 128 - 0
distributor-node/src/app/index.ts

@@ -0,0 +1,128 @@
+import { ReadonlyConfig } from '../types'
+import { NetworkingService } from '../services/networking'
+import { LoggingService } from '../services/logging'
+import { StateCacheService } from '../services/cache/StateCacheService'
+import { ContentService } from '../services/content/ContentService'
+import { ServerService } from '../services/server/ServerService'
+import { Logger } from 'winston'
+import fs from 'fs'
+import nodeCleanup from 'node-cleanup'
+
+export class App {
+  private config: ReadonlyConfig
+  private content: ContentService
+  private stateCache: StateCacheService
+  private networking: NetworkingService
+  private server: ServerService
+  private logging: LoggingService
+  private logger: Logger
+
+  constructor(config: ReadonlyConfig) {
+    this.config = config
+    this.logging = LoggingService.withAppConfig(config)
+    this.stateCache = new StateCacheService(config, this.logging)
+    this.content = new ContentService(config, this.logging, this.stateCache)
+    this.networking = new NetworkingService(config, this.stateCache, this.logging)
+    this.server = new ServerService(config, this.stateCache, this.content, this.logging, this.networking)
+    this.logger = this.logging.createLogger('App')
+  }
+
+  private checkConfigDirectories(): void {
+    Object.entries(this.config.directories).forEach(([name, path]) => {
+      const dirInfo = `${name} directory (${path})`
+      if (!fs.existsSync(path)) {
+        try {
+          fs.mkdirSync(path, { recursive: true })
+        } catch (e) {
+          throw new Error(`${dirInfo} doesn't exist and cannot be created!`)
+        }
+      }
+      try {
+        fs.accessSync(path, fs.constants.R_OK)
+      } catch (e) {
+        throw new Error(`${dirInfo} is not readable`)
+      }
+      try {
+        fs.accessSync(path, fs.constants.W_OK)
+      } catch (e) {
+        throw new Error(`${dirInfo} is not writable`)
+      }
+    })
+  }
+
+  public async start(): Promise<void> {
+    this.logger.info('Starting the app')
+    this.checkConfigDirectories()
+    this.stateCache.load()
+    const dataObjects = await this.networking.fetchSupportedDataObjects()
+    await this.content.startupInit(dataObjects)
+    this.server.start()
+    nodeCleanup(this.exitHandler.bind(this))
+  }
+
+  private async exitGracefully(): Promise<void> {
+    // Async exit handler - ideally should not take more than 10 sec
+    // We can try to wait until some pending downloads are finished here etc.
+    this.logger.info('Graceful exit initialized')
+
+    // Stop accepting any new requests and save cache
+    this.server.stop()
+    this.stateCache.clearInterval()
+    this.stateCache.saveSync()
+
+    // Try to process remaining downloads
+    const MAX_RETRY_ATTEMPTS = 3
+    let retryCounter = 0
+    while (retryCounter < MAX_RETRY_ATTEMPTS && this.stateCache.getPendingDownloadsCount()) {
+      const pendingDownloadsCount = this.stateCache.getPendingDownloadsCount()
+      this.logger.info(`${pendingDownloadsCount} pending downloads in progress... Retrying exit in 5 sec...`, {
+        retryCounter,
+        pendingDownloadsCount,
+      })
+      await new Promise((resolve) => setTimeout(resolve, 5000))
+      this.stateCache.saveSync()
+      ++retryCounter
+    }
+
+    if (this.stateCache.getPendingDownloadsCount()) {
+      this.logger.warn('Limit reached: Could not finish all pending downloads.', {
+        pendingDownloadsCount: this.stateCache.getPendingDownloadsCount(),
+      })
+    }
+
+    this.logger.info('Graceful exit finished')
+    await this.logging.end()
+  }
+
+  private exitCritically(): void {
+    this.logger.info('Critical exit initialized')
+    // Handling exits due to an error - only some critical, synchronous work can be done here
+    this.server.stop()
+    this.stateCache.clearInterval()
+    this.stateCache.saveSync()
+    this.logger.info('Critical exit finished')
+  }
+
+  private exitHandler(exitCode: number | null, signal: string | null): boolean | undefined {
+    this.logger.info('Exiting')
+    this.stateCache.clearInterval()
+    if (signal) {
+      // Async exit can be executed
+      this.exitGracefully()
+        .then(() => {
+          process.kill(process.pid, signal)
+        })
+        .catch((err) => {
+          this.logger.error('Graceful exit error', { err })
+          this.logging.end().finally(() => {
+            process.kill(process.pid, signal)
+          })
+        })
+      nodeCleanup.uninstall()
+      return false
+    } else {
+      // Only synchronous work can be done here
+      this.exitCritically()
+    }
+  }
+}

+ 6 - 0
distributor-node/src/command-base/ExitCodes.ts

@@ -0,0 +1,6 @@
+enum ExitCodes {
+  OK = 0,
+  Error = 1,
+  ApiError = 200,
+}
+export = ExitCodes

+ 65 - 0
distributor-node/src/command-base/accounts.ts

@@ -0,0 +1,65 @@
+import ApiCommandBase from './api'
+import { AccountId } from '@polkadot/types/interfaces'
+import { Keyring } from '@polkadot/api'
+import { KeyringInstance, KeyringOptions, KeyringPair } from '@polkadot/keyring/types'
+import { CLIError } from '@oclif/errors'
+
+export const DEFAULT_ACCOUNT_TYPE = 'sr25519'
+export const KEYRING_OPTIONS: KeyringOptions = {
+  type: DEFAULT_ACCOUNT_TYPE,
+}
+
+/**
+ * Abstract base class for account-related commands.
+ */
+export default abstract class AccountsCommandBase extends ApiCommandBase {
+  private keyring!: KeyringInstance
+
+  isKeyAvailable(key: AccountId | string): boolean {
+    return this.keyring.getPairs().some((p) => p.address === key.toString())
+  }
+
+  getPairs(includeDevAccounts = true): KeyringPair[] {
+    return this.keyring.getPairs().filter((p) => includeDevAccounts || !p.meta.isTesting)
+  }
+
+  getPair(key: string): KeyringPair {
+    const pair = this.keyring.getPair(key)
+    if (!pair) {
+      throw new CLIError(`Required key for account ${key} is not available`)
+    }
+    return pair
+  }
+
+  async getDecodedPair(key: string): Promise<KeyringPair> {
+    // Just for Joystream CLI compatibility currently
+    return this.getPair(key)
+  }
+
+  initKeyring(): void {
+    this.keyring = new Keyring(KEYRING_OPTIONS)
+    this.appConfig.keys.forEach((suri) => this.keyring.addFromUri(suri))
+  }
+
+  async getDistributorLeadKey(): Promise<string> {
+    const currentLead = await this.api.query.distributionWorkingGroup.currentLead()
+    if (!currentLead.isSome) {
+      throw new CLIError('There is no active distributor working group lead currently')
+    }
+    const worker = await this.api.query.distributionWorkingGroup.workerById(currentLead.unwrap())
+    return worker.role_account_id.toString()
+  }
+
+  async getDistributorWorkerRoleKey(workerId: number): Promise<string> {
+    const worker = await this.api.query.distributionWorkingGroup.workerById(workerId)
+    if (!worker) {
+      throw new CLIError(`Worker not found by id: ${workerId}!`)
+    }
+    return worker.role_account_id.toString()
+  }
+
+  async init(): Promise<void> {
+    await super.init()
+    await this.initKeyring()
+  }
+}

+ 40 - 0
distributor-node/src/command-base/api.ts

@@ -0,0 +1,40 @@
+import DefaultCommandBase from './default'
+import { CLIError } from '@oclif/errors'
+import { SubmittableResult } from '@polkadot/api'
+import { KeyringPair } from '@polkadot/keyring/types'
+import chalk from 'chalk'
+import { SubmittableExtrinsic } from '@polkadot/api/types'
+import { formatBalance } from '@polkadot/util'
+import { ExtrinsicFailedError, RuntimeApi } from '../services/networking/runtime/api'
+import ExitCodes from './ExitCodes'
+
+/**
+ * Abstract base class for commands that require access to the API.
+ */
+export default abstract class ApiCommandBase extends DefaultCommandBase {
+  protected api!: RuntimeApi
+
+  async init(): Promise<void> {
+    await super.init()
+    this.api = await RuntimeApi.create(this.logging, this.appConfig.endpoints.substrateNode)
+  }
+
+  async sendAndFollowTx(account: KeyringPair, tx: SubmittableExtrinsic<'promise'>): Promise<SubmittableResult> {
+    // Calculate fee and ask for confirmation
+    const fee = await this.api.estimateFee(account, tx)
+
+    await this.requireConfirmation(
+      `Tx fee of ${chalk.cyan(formatBalance(fee))} will be deduced from you account, do you confirm the transfer?`
+    )
+
+    try {
+      const res = await this.api.sendExtrinsic(account, tx)
+      return res
+    } catch (e) {
+      if (e instanceof ExtrinsicFailedError) {
+        throw new CLIError(`Extrinsic failed! ${e.message}`, { exit: ExitCodes.ApiError })
+      }
+      throw e
+    }
+  }
+}

+ 96 - 0
distributor-node/src/command-base/default.ts

@@ -0,0 +1,96 @@
+import Command, { flags as oclifFlags } from '@oclif/command'
+import inquirer from 'inquirer'
+import ExitCodes from './ExitCodes'
+import { ReadonlyConfig } from '../types/config'
+import { ConfigParserService } from '../services/parsers/ConfigParserService'
+import { LoggingService } from '../services/logging'
+import { Logger } from 'winston'
+import { BagIdParserService } from '../services/parsers/BagIdParserService'
+
+export const flags = {
+  ...oclifFlags,
+  integerArr: oclifFlags.build({
+    parse: (value: string) => {
+      const arr: number[] = value.split(',').map((v) => {
+        if (!/^-?\d+$/.test(v)) {
+          throw new Error(`Expected comma-separated integers, but received: ${value}`)
+        }
+        return parseInt(v)
+      })
+      return arr
+    },
+  }),
+  bagId: oclifFlags.build({
+    parse: (value: string) => {
+      const parser = new BagIdParserService()
+      return parser.parseBagId(value)
+    },
+    description: `Bag ID. Format: {bag_type}:{sub_type}:{id}.
+    - Bag types: 'static', 'dynamic'
+    - Sub types: 'static:council', 'static:wg', 'dynamic:member', 'dynamic:channel'
+    - Id:
+      - absent for 'static:council'
+      - working group name for 'static:wg'
+      - integer for 'dynamic:member' and 'dynamic:channel'
+    Examples:
+    - static:council
+    - static:wg:storage
+    - dynamic:member:4`,
+  }),
+}
+export default abstract class DefaultCommandBase extends Command {
+  protected appConfig!: ReadonlyConfig
+  protected logging!: LoggingService
+  protected autoConfirm!: boolean
+  private logger!: Logger
+
+  static flags = {
+    yes: flags.boolean({
+      required: false,
+      default: false,
+      description: 'Answer "yes" to any prompt, skipping any manual confirmations',
+      char: 'y',
+    }),
+    configPath: flags.string({
+      required: false,
+      default: process.env.CONFIG_PATH || './config.yml',
+      description: 'Path to config JSON/YAML file (relative to current working directory)',
+      char: 'c',
+    }),
+  }
+
+  async init(): Promise<void> {
+    const { configPath, yes } = this.parse(this.constructor as typeof DefaultCommandBase).flags
+    const configParser = new ConfigParserService()
+    this.appConfig = configParser.loadConfing(configPath) as ReadonlyConfig
+    this.logging = LoggingService.withCLIConfig()
+    this.logger = this.logging.createLogger('CLI')
+    this.autoConfirm = !!(process.env.AUTO_CONFIRM === 'true' || parseInt(process.env.AUTO_CONFIRM || '') || yes)
+  }
+
+  public log(message: string, ...meta: unknown[]): void {
+    this.logger.info(message, ...meta)
+  }
+
+  public output(value: unknown): void {
+    console.log(value)
+  }
+
+  async requireConfirmation(
+    message = 'Are you sure you want to execute this action?',
+    defaultVal = false
+  ): Promise<void> {
+    if (this.autoConfirm) {
+      return
+    }
+    const { confirmed } = await inquirer.prompt([{ type: 'confirm', name: 'confirmed', message, default: defaultVal }])
+    if (!confirmed) {
+      this.exit(ExitCodes.OK)
+    }
+  }
+
+  async finally(err: any): Promise<void> {
+    if (!err) this.exit(ExitCodes.OK)
+    super.finally(err)
+  }
+}

+ 135 - 0
distributor-node/src/commands/dev/batchUpload.ts

@@ -0,0 +1,135 @@
+import AccountsCommandBase from '../../command-base/accounts'
+import DefaultCommandBase, { flags } from '../../command-base/default'
+import { hash } from 'blake3'
+import { PublicApi, Configuration, TokenRequest } from '../../services/networking/storage-node/generated'
+import { u8aToHex } from '@polkadot/util'
+import * as multihash from 'multihashes'
+import FormData from 'form-data'
+import imgGen from 'js-image-generator'
+import { SubmittableExtrinsic } from '@polkadot/api/types'
+import { BagIdParserService } from '../../services/parsers/BagIdParserService'
+import axios from 'axios'
+
+async function generateRandomImage(): Promise<Buffer> {
+  return new Promise((resolve, reject) => {
+    imgGen.generateImage(10, 10, 80, function (err: any, image: any) {
+      if (err) {
+        reject(err)
+      } else {
+        resolve(image.data)
+      }
+    })
+  })
+}
+
+export default class DevBatchUpload extends AccountsCommandBase {
+  static flags = {
+    ...DefaultCommandBase.flags,
+    bagId: flags.string({
+      char: 'b',
+      required: true,
+    }),
+    bucketId: flags.integer({
+      char: 'B',
+      description: 'Distribution bucket id',
+      required: true,
+    }),
+    batchSize: flags.integer({
+      char: 'S',
+      required: true,
+    }),
+    batchesCount: flags.integer({
+      char: 'C',
+      required: true,
+    }),
+  }
+
+  async run(): Promise<void> {
+    const { api } = this
+    const { bagId, bucketId, batchSize, batchesCount } = this.parse(DevBatchUpload).flags
+    const sudoKey = (await api.query.sudo.key()).toHuman()
+    const dataFee = await api.query.storage.dataObjectPerMegabyteFee()
+    const storageApi = new PublicApi(
+      new Configuration({
+        basePath: 'http://127.0.0.1:3333/api/v1',
+        formDataCtor: FormData,
+      })
+    )
+
+    for (let i = 0; i < batchesCount; ++i) {
+      const nextObjectId = (await api.query.storage.nextDataObjectId()).toNumber()
+      // Generate batch
+      const batch: [SubmittableExtrinsic<'promise'>, Buffer][] = []
+      for (let j = 0; j < batchSize; ++j) {
+        const dataObject = await generateRandomImage()
+        const dataHash = multihash.toB58String(multihash.encode(hash(dataObject) as Buffer, 'blake3'))
+        batch.push([
+          api.tx.sudo.sudo(
+            api.tx.storage.sudoUploadDataObjects({
+              deletionPrizeSourceAccountId: sudoKey,
+              objectCreationList: [
+                {
+                  Size: dataObject.byteLength,
+                  IpfsContentId: dataHash,
+                },
+              ],
+              expectedDataSizeFee: dataFee,
+              bagId: new BagIdParserService().parseBagId(bagId),
+            })
+          ),
+          dataObject,
+        ])
+      }
+      // Send batch
+      await this.sendAndFollowTx(this.getPair(sudoKey), api.tx.utility.batch(batch.map(([tx]) => tx)))
+
+      // Send storage node uploads
+      await Promise.all(
+        batch.map(async ([, dataObject], k) => {
+          const dataObjectId = nextObjectId + k
+          const data: TokenRequest['data'] = {
+            accountId: sudoKey,
+            bagId,
+            dataObjectId,
+            memberId: 0,
+            storageBucketId: bucketId,
+          }
+          const message = JSON.stringify(data)
+          const signature = u8aToHex(this.getPair(sudoKey).sign(message))
+          const {
+            data: { token },
+          } = await storageApi.publicApiAuthTokenForUploading({
+            data,
+            signature,
+          })
+          if (!token) {
+            throw new Error('Recieved empty token!')
+          }
+
+          const formData = new FormData()
+          formData.append('dataObjectId', dataObjectId.toString())
+          formData.append('storageBucketId', bucketId.toString())
+          formData.append('bagId', bagId)
+          formData.append('file', dataObject, { filename: 'test.jpg', knownLength: dataObject.byteLength })
+          this.log(`Uploading object ${dataObjectId}`)
+          try {
+            await axios({
+              method: 'POST',
+              url: 'http://127.0.0.1:3333/api/v1/files',
+              data: formData,
+              headers: {
+                'x-api-key': token,
+                'content-type': 'multipart/form-data',
+                ...formData.getHeaders(),
+              },
+            })
+          } catch (e) {
+            if (axios.isAxiosError(e)) {
+              console.log(e.response?.data)
+            }
+          }
+        })
+      )
+    }
+  }
+}

+ 93 - 0
distributor-node/src/commands/dev/init.ts

@@ -0,0 +1,93 @@
+import { MemberId } from '@joystream/types/members'
+import AccountsCommandBase from '../../command-base/accounts'
+import DefaultCommandBase from '../../command-base/default'
+
+const ALICE = '5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY'
+
+export default class DevInit extends AccountsCommandBase {
+  static description = 'Initialize development environment. Sets Alice as distributor working group leader.'
+
+  static flags = {
+    ...DefaultCommandBase.flags,
+  }
+
+  async run(): Promise<void> {
+    const { api } = this
+
+    if (!api.isDevelopment) {
+      this.error('Not connected to dev chain!')
+    }
+
+    const SudoKeyPair = this.getPair(ALICE)
+    const LeadKeyPair = this.getPair(ALICE)
+
+    // Create membership if not already created
+    const members = await api.query.members.memberIdsByControllerAccountId(LeadKeyPair.address)
+
+    let memberId: MemberId | undefined = members.toArray()[0]
+
+    if (memberId === undefined) {
+      const res = await this.api.sendExtrinsic(LeadKeyPair, api.tx.members.buyMembership(0, 'alice', null, null))
+      memberId = this.api.getEvent(res, 'members', 'MemberRegistered').data[0]
+    }
+
+    // Create a new lead opening.
+    const currentLead = await api.query.distributionWorkingGroup.currentLead()
+    if (currentLead.isSome) {
+      this.log('Distributor lead already exists, skipping...')
+      return
+    }
+
+    this.log(`Making member id: ${memberId} the distribution lead.`)
+
+    // Create curator lead opening
+    const addOpeningRes = await this.api.sendExtrinsic(
+      SudoKeyPair,
+      this.api.sudo(
+        api.tx.distributionWorkingGroup.addOpening(
+          { CurrentBlock: null },
+          { max_review_period_length: 9999 },
+          'dev distributor lead opening',
+          'Leader'
+        )
+      )
+    )
+
+    const openingAddedEvent = this.api.getEvent(addOpeningRes, 'distributionWorkingGroup', 'OpeningAdded')
+    const openingId = openingAddedEvent.data[0]
+
+    // Apply to lead opening
+    const applyRes = await this.api.sendExtrinsic(
+      LeadKeyPair,
+      this.api.tx.distributionWorkingGroup.applyOnOpening(
+        memberId, // member id
+        openingId, // opening id
+        LeadKeyPair.address, // address
+        null, // opt role stake
+        null, // opt appl. stake
+        'dev distributor lead application' // human_readable_text
+      )
+    )
+
+    const appliedEvent = this.api.getEvent(applyRes, 'distributionWorkingGroup', 'AppliedOnOpening')
+    const applicationId = appliedEvent.data[1]
+
+    // Begin review period
+    await this.api.sendExtrinsic(
+      SudoKeyPair,
+      this.api.sudo(this.api.tx.distributionWorkingGroup.beginApplicantReview(openingId))
+    )
+
+    // Fill opening
+    await this.api.sendExtrinsic(
+      SudoKeyPair,
+      this.api.sudo(
+        this.api.tx.distributionWorkingGroup.fillOpening(
+          openingId,
+          api.createType('ApplicationIdSet', [applicationId]),
+          null
+        )
+      )
+    )
+  }
+}

+ 38 - 0
distributor-node/src/commands/leader/cancel-invitation.ts

@@ -0,0 +1,38 @@
+import AccountsCommandBase from '../../command-base/accounts'
+import DefaultCommandBase, { flags } from '../../command-base/default'
+
+export default class LeaderCancelInvitation extends AccountsCommandBase {
+  static description = `Cancel pending distribution bucket operator invitation.
+  Requires distribution working group leader permissions.`
+
+  static flags = {
+    bucketId: flags.integer({
+      char: 'B',
+      description: 'Distribution bucket id',
+      required: true,
+    }),
+    familyId: flags.integer({
+      char: 'f',
+      description: 'Distribution bucket family id',
+      required: true,
+    }),
+    workerId: flags.integer({
+      char: 'w',
+      description: 'ID of the invited operator (distribution group worker)',
+      required: true,
+    }),
+    ...DefaultCommandBase.flags,
+  }
+
+  async run(): Promise<void> {
+    const { bucketId, familyId, workerId } = this.parse(LeaderCancelInvitation).flags
+    const leadKey = await this.getDistributorLeadKey()
+
+    this.log(`Canceling distribution bucket operator invitation (bucket: ${bucketId}, worker: ${workerId})...`)
+    await this.sendAndFollowTx(
+      await this.getDecodedPair(leadKey),
+      this.api.tx.storage.cancelDistributionBucketOperatorInvite(familyId, bucketId, workerId)
+    )
+    this.log('Invitation succesfully canceled!')
+  }
+}

+ 25 - 0
distributor-node/src/commands/leader/create-bucket-family.ts

@@ -0,0 +1,25 @@
+import AccountsCommandBase from '../../command-base/accounts'
+import DefaultCommandBase from '../../command-base/default'
+
+export default class LeaderCreateBucketFamily extends AccountsCommandBase {
+  static description = `Create new distribution bucket family. Requires distribution working group leader permissions.`
+
+  static flags = {
+    ...DefaultCommandBase.flags,
+  }
+
+  async run(): Promise<void> {
+    const leadKey = await this.getDistributorLeadKey()
+
+    this.log('Creating new distribution bucket family...')
+    const result = await this.sendAndFollowTx(
+      await this.getDecodedPair(leadKey),
+      this.api.tx.storage.createDistributionBucketFamily()
+    )
+    const event = this.api.getEvent(result, 'storage', 'DistributionBucketFamilyCreated')
+
+    this.log('Bucket family succesfully created!')
+    const bucketFamilyId = event.data[0]
+    this.output(bucketFamilyId.toString())
+  }
+}

+ 38 - 0
distributor-node/src/commands/leader/create-bucket.ts

@@ -0,0 +1,38 @@
+import { flags } from '@oclif/command'
+import AccountsCommandBase from '../../command-base/accounts'
+import DefaultCommandBase from '../../command-base/default'
+
+export default class LeaderCreateBucket extends AccountsCommandBase {
+  static description = `Create new distribution bucket. Requires distribution working group leader permissions.`
+
+  static flags = {
+    familyId: flags.integer({
+      char: 'f',
+      description: 'Distribution bucket family id',
+      required: true,
+    }),
+    acceptingBags: flags.enum({
+      char: 'a',
+      description: 'Whether the created bucket should accept new bags',
+      options: ['yes', 'no'],
+      default: 'no',
+    }),
+    ...DefaultCommandBase.flags,
+  }
+
+  async run(): Promise<void> {
+    const { familyId, acceptingBags } = this.parse(LeaderCreateBucket).flags
+    const leadKey = await this.getDistributorLeadKey()
+
+    this.log('Creating new distribution bucket...')
+    const result = await this.sendAndFollowTx(
+      await this.getDecodedPair(leadKey),
+      this.api.tx.storage.createDistributionBucket(familyId, acceptingBags === 'yes')
+    )
+    const event = this.api.getEvent(result, 'storage', 'DistributionBucketCreated')
+
+    this.log('Bucket succesfully created!')
+    const bucketId = event.data[0]
+    this.output(bucketId.toString())
+  }
+}

+ 28 - 0
distributor-node/src/commands/leader/delete-bucket-family.ts

@@ -0,0 +1,28 @@
+import { flags } from '@oclif/command'
+import AccountsCommandBase from '../../command-base/accounts'
+import DefaultCommandBase from '../../command-base/default'
+
+export default class LeaderDeleteBucketFamily extends AccountsCommandBase {
+  static description = `Delete distribution bucket family. Requires distribution working group leader permissions.`
+
+  static flags = {
+    familyId: flags.integer({
+      char: 'f',
+      description: 'Distribution bucket family id',
+      required: true,
+    }),
+    ...DefaultCommandBase.flags,
+  }
+
+  async run(): Promise<void> {
+    const { familyId } = this.parse(LeaderDeleteBucketFamily).flags
+    const leadKey = await this.getDistributorLeadKey()
+
+    this.log(`Deleting distribution bucket family (${familyId})...`)
+    await this.sendAndFollowTx(
+      await this.getDecodedPair(leadKey),
+      this.api.tx.storage.deleteDistributionBucketFamily(familyId)
+    )
+    this.log('Bucket family succesfully deleted!')
+  }
+}

+ 33 - 0
distributor-node/src/commands/leader/delete-bucket.ts

@@ -0,0 +1,33 @@
+import { flags } from '@oclif/command'
+import AccountsCommandBase from '../../command-base/accounts'
+import DefaultCommandBase from '../../command-base/default'
+
+export default class LeaderDeleteBucket extends AccountsCommandBase {
+  static description = `Delete distribution bucket. The bucket must have no operators. Requires distribution working group leader permissions.`
+
+  static flags = {
+    bucketId: flags.integer({
+      char: 'B',
+      description: 'Distribution bucket id',
+      required: true,
+    }),
+    familyId: flags.integer({
+      char: 'f',
+      description: 'Distribution bucket family id',
+      required: true,
+    }),
+    ...DefaultCommandBase.flags,
+  }
+
+  async run(): Promise<void> {
+    const { bucketId, familyId } = this.parse(LeaderDeleteBucket).flags
+    const leadKey = await this.getDistributorLeadKey()
+
+    this.log(`Deleting distribution bucket (${bucketId})...`)
+    await this.sendAndFollowTx(
+      await this.getDecodedPair(leadKey),
+      this.api.tx.storage.deleteDistributionBucket(familyId, bucketId)
+    )
+    this.log('Bucket succesfully deleted!')
+  }
+}

+ 39 - 0
distributor-node/src/commands/leader/invite-bucket-operator.ts

@@ -0,0 +1,39 @@
+import AccountsCommandBase from '../../command-base/accounts'
+import DefaultCommandBase, { flags } from '../../command-base/default'
+
+export default class LeaderInviteBucketOperator extends AccountsCommandBase {
+  static description = `Invite distribution bucket operator (distribution group worker).
+  The specified bucket must not have any operator currently.
+  Requires distribution working group leader permissions.`
+
+  static flags = {
+    bucketId: flags.integer({
+      char: 'B',
+      description: 'Distribution bucket id',
+      required: true,
+    }),
+    familyId: flags.integer({
+      char: 'f',
+      description: 'Distribution bucket family id',
+      required: true,
+    }),
+    workerId: flags.integer({
+      char: 'w',
+      description: 'ID of the distribution group worker to invite as bucket operator',
+      required: true,
+    }),
+    ...DefaultCommandBase.flags,
+  }
+
+  async run(): Promise<void> {
+    const { bucketId, familyId, workerId } = this.parse(LeaderInviteBucketOperator).flags
+    const leadKey = await this.getDistributorLeadKey()
+
+    this.log(`Inviting distribution bucket operator (bucket: ${bucketId}, worker: ${workerId})...`)
+    await this.sendAndFollowTx(
+      await this.getDecodedPair(leadKey),
+      this.api.tx.storage.inviteDistributionBucketOperator(familyId, bucketId, workerId)
+    )
+    this.log('Bucket operator succesfully invited!')
+  }
+}

+ 38 - 0
distributor-node/src/commands/leader/remove-bucket-operator.ts

@@ -0,0 +1,38 @@
+import AccountsCommandBase from '../../command-base/accounts'
+import DefaultCommandBase, { flags } from '../../command-base/default'
+
+export default class LeaderRemoveBucketOperator extends AccountsCommandBase {
+  static description = `Remove distribution bucket operator (distribution group worker).
+  Requires distribution working group leader permissions.`
+
+  static flags = {
+    bucketId: flags.integer({
+      char: 'B',
+      description: 'Distribution bucket id',
+      required: true,
+    }),
+    familyId: flags.integer({
+      char: 'f',
+      description: 'Distribution bucket family id',
+      required: true,
+    }),
+    workerId: flags.integer({
+      char: 'w',
+      description: 'ID of the operator (distribution working group worker) to remove from the bucket',
+      required: true,
+    }),
+    ...DefaultCommandBase.flags,
+  }
+
+  async run(): Promise<void> {
+    const { bucketId, familyId, workerId } = this.parse(LeaderRemoveBucketOperator).flags
+    const leadKey = await this.getDistributorLeadKey()
+
+    this.log(`Removing distribution bucket operator (bucket: ${bucketId}, worker: ${workerId})...`)
+    await this.sendAndFollowTx(
+      await this.getDecodedPair(leadKey),
+      this.api.tx.storage.removeDistributionBucketOperator(familyId, bucketId, workerId)
+    )
+    this.log('Bucket operator succesfully removed!')
+  }
+}

+ 45 - 0
distributor-node/src/commands/leader/set-bucket-family-metadata.ts

@@ -0,0 +1,45 @@
+import fs from 'fs'
+import AccountsCommandBase from '../../command-base/accounts'
+import DefaultCommandBase, { flags } from '../../command-base/default'
+import { ValidationService } from '../../services/validation/ValidationService'
+import { DistributionBucketFamilyMetadata, IDistributionBucketFamilyMetadata } from '@joystream/metadata-protobuf'
+
+export default class LeaderSetBucketFamilyMetadata extends AccountsCommandBase {
+  static description = `Set/update distribution bucket family metadata.
+  Requires distribution working group leader permissions.`
+
+  static flags = {
+    familyId: flags.integer({
+      char: 'f',
+      description: 'Distribution bucket family id',
+      required: true,
+    }),
+    input: flags.string({
+      char: 'i',
+      description: 'Path to JSON metadata file',
+      required: true,
+    }),
+    ...DefaultCommandBase.flags,
+  }
+
+  async run(): Promise<void> {
+    const { familyId, input } = this.parse(LeaderSetBucketFamilyMetadata).flags
+    const leadKey = await this.getDistributorLeadKey()
+
+    const validation = new ValidationService()
+    const metadata: IDistributionBucketFamilyMetadata = validation.validate(
+      'FamilyMetadata',
+      JSON.parse(fs.readFileSync(input).toString())
+    )
+
+    this.log(`Setting bucket family metadata (family: ${familyId})`, metadata)
+    await this.sendAndFollowTx(
+      await this.getDecodedPair(leadKey),
+      this.api.tx.storage.setDistributionBucketFamilyMetadata(
+        familyId,
+        '0x' + Buffer.from(DistributionBucketFamilyMetadata.encode(metadata).finish()).toString('hex')
+      )
+    )
+    this.log('Bucket family metadata succesfully set/updated!')
+  }
+}

+ 28 - 0
distributor-node/src/commands/leader/set-buckets-per-bag-limit.ts

@@ -0,0 +1,28 @@
+import { flags } from '@oclif/command'
+import AccountsCommandBase from '../../command-base/accounts'
+import DefaultCommandBase from '../../command-base/default'
+
+export default class LeaderSetBucketsPerBagLimit extends AccountsCommandBase {
+  static description = `Set max. distribution buckets per bag limit. Requires distribution working group leader permissions.`
+
+  static flags = {
+    limit: flags.integer({
+      char: 'l',
+      description: 'New limit value',
+      required: true,
+    }),
+    ...DefaultCommandBase.flags,
+  }
+
+  async run(): Promise<void> {
+    const { limit } = this.parse(LeaderSetBucketsPerBagLimit).flags
+    const leadKey = await this.getDistributorLeadKey()
+
+    this.log(`Setting new buckets per bag limit (${limit})...`)
+    await this.sendAndFollowTx(
+      await this.getDecodedPair(leadKey),
+      this.api.tx.storage.updateDistributionBucketsPerBagLimit(limit)
+    )
+    this.log('Limit succesfully updated!')
+  }
+}

+ 54 - 0
distributor-node/src/commands/leader/update-bag.ts

@@ -0,0 +1,54 @@
+import AccountsCommandBase from '../../command-base/accounts'
+import DefaultCommandBase, { flags } from '../../command-base/default'
+
+export default class LeaderUpdateBag extends AccountsCommandBase {
+  static description = 'Add/remove distribution buckets from a bag.'
+
+  static flags = {
+    bagId: flags.bagId({
+      char: 'b',
+      required: true,
+    }),
+    familyId: flags.integer({
+      char: 'f',
+      description: 'ID of the distribution bucket family',
+      required: true,
+    }),
+    add: flags.integerArr({
+      char: 'a',
+      description: 'ID of a bucket to add to bag',
+      default: [],
+      multiple: true,
+    }),
+    remove: flags.integerArr({
+      char: 'r',
+      description: 'ID of a bucket to remove from bag',
+      default: [],
+      multiple: true,
+    }),
+    ...DefaultCommandBase.flags,
+  }
+
+  static examples = [`$ joystream-distributor leader:update-bag -b 1 -f 1 -a 1 -a 2 -a 3 -r 4 -r 5`]
+
+  async run(): Promise<void> {
+    const { bagId, familyId, add, remove } = this.parse(LeaderUpdateBag).flags
+    const leadKey = await this.getDistributorLeadKey()
+
+    this.log(
+      `Updating distribution buckets for bag ${bagId} (adding: ${add.join(',' || 'NONE')}, removing: ${
+        remove.join(',') || 'NONE'
+      })...`
+    )
+    await this.sendAndFollowTx(
+      await this.getDecodedPair(leadKey),
+      this.api.tx.storage.updateDistributionBucketsForBag(
+        bagId,
+        familyId,
+        this.api.createType('DistributionBucketIdSet', add),
+        this.api.createType('DistributionBucketIdSet', remove)
+      )
+    )
+    this.log('Bag succesfully updated!')
+  }
+}

+ 38 - 0
distributor-node/src/commands/leader/update-bucket-mode.ts

@@ -0,0 +1,38 @@
+import AccountsCommandBase from '../../command-base/accounts'
+import DefaultCommandBase, { flags } from '../../command-base/default'
+
+export default class LeaderUpdateBucketMode extends AccountsCommandBase {
+  static description = `Update distribution bucket mode ("distributing" flag). Requires distribution working group leader permissions.`
+
+  static flags = {
+    bucketId: flags.integer({
+      char: 'B',
+      description: 'Distribution bucket id',
+      required: true,
+    }),
+    familyId: flags.integer({
+      char: 'f',
+      description: 'Distribution bucket family id',
+      required: true,
+    }),
+    mode: flags.enum<'on' | 'off'>({
+      char: 'd',
+      description: 'Whether the bucket should be "on" (distributing) or "off" (not distributing)',
+      required: true,
+      options: ['on', 'off'],
+    }),
+    ...DefaultCommandBase.flags,
+  }
+
+  async run(): Promise<void> {
+    const { bucketId, familyId, mode } = this.parse(LeaderUpdateBucketMode).flags
+    const leadKey = await this.getDistributorLeadKey()
+
+    this.log(`Updating distribution bucket mode (${bucketId}, distributing: ${mode})...`)
+    await this.sendAndFollowTx(
+      await this.getDecodedPair(leadKey),
+      this.api.tx.storage.updateDistributionBucketMode(familyId, bucketId, mode === 'on')
+    )
+    this.log('Bucket mode succesfully updated!')
+  }
+}

+ 39 - 0
distributor-node/src/commands/leader/update-bucket-status.ts

@@ -0,0 +1,39 @@
+import { flags } from '@oclif/command'
+import AccountsCommandBase from '../../command-base/accounts'
+import DefaultCommandBase from '../../command-base/default'
+
+export default class LeaderUpdateBucketStatus extends AccountsCommandBase {
+  static description = `Update distribution bucket status ("acceptingNewBags" flag). Requires distribution working group leader permissions.`
+
+  static flags = {
+    bucketId: flags.integer({
+      char: 'B',
+      description: 'Distribution bucket id',
+      required: true,
+    }),
+    familyId: flags.integer({
+      char: 'f',
+      description: 'Distribution bucket family id',
+      required: true,
+    }),
+    acceptingBags: flags.enum<'yes' | 'no'>({
+      char: 'a',
+      description: 'Whether the bucket should accept new bags',
+      options: ['yes', 'no'],
+      required: true,
+    }),
+    ...DefaultCommandBase.flags,
+  }
+
+  async run(): Promise<void> {
+    const { bucketId, familyId, acceptingBags } = this.parse(LeaderUpdateBucketStatus).flags
+    const leadKey = await this.getDistributorLeadKey()
+
+    this.log(`Updating distribution bucket status (${bucketId}, acceptingNewBags: ${acceptingBags})...`)
+    await this.sendAndFollowTx(
+      await this.getDecodedPair(leadKey),
+      this.api.tx.storage.updateDistributionBucketStatus(familyId, bucketId, acceptingBags === 'yes')
+    )
+    this.log('Bucket status succesfully updated!')
+  }
+}

+ 53 - 0
distributor-node/src/commands/leader/update-dynamic-bag-policy.ts

@@ -0,0 +1,53 @@
+import { flags } from '@oclif/command'
+import { DynamicBagTypeKey } from '@joystream/types/storage'
+import AccountsCommandBase from '../../command-base/accounts'
+import DefaultCommandBase from '../../command-base/default'
+
+export default class LeaderUpdateDynamicBagPolicy extends AccountsCommandBase {
+  static description = `Update dynamic bag creation policy (number of buckets by family that should store given dynamic bag type).
+    Requires distribution working group leader permissions.`
+
+  static flags = {
+    type: flags.enum<DynamicBagTypeKey>({
+      char: 't',
+      description: 'Dynamic bag type',
+      options: ['Member', 'Channel'],
+      required: true,
+    }),
+    policy: flags.build({
+      parse: (v) => {
+        const pair = v.split(':')
+        if (pair.length !== 2 || !/^\d+$/.test(pair[0]) || !/^\d+$/.test(pair[1])) {
+          throw new Error(`Expected {familyId}:{numberOfBuckets} pair, got: ${v}`)
+        }
+        return [parseInt(pair[0]), parseInt(pair[1])] as [number, number]
+      },
+    })({
+      char: 'p',
+      description: 'Key-value pair of {familyId}:{numberOfBuckets}',
+      multiple: true,
+    }),
+    ...DefaultCommandBase.flags,
+  }
+
+  static examples = [`$ joystream-distributor leader:update-dynamic-bag-policy -t Member -p 1:5 -p 2:10 -p 3:5`]
+
+  async run(): Promise<void> {
+    const { type, policy } = this.parse(LeaderUpdateDynamicBagPolicy).flags
+    const leadKey = await this.getDistributorLeadKey()
+
+    this.log(`Updating dynamic bag policy (${type})...`)
+    await this.sendAndFollowTx(
+      await this.getDecodedPair(leadKey),
+      this.api.tx.storage.updateFamiliesInDynamicBagCreationPolicy(
+        type,
+        // FIXME: https://github.com/polkadot-js/api/pull/3789
+        this.api.createType(
+          'DynamicBagCreationPolicyDistributorFamiliesMap',
+          new Map((policy || []).sort(([keyA], [keyB]) => keyA - keyB))
+        )
+      )
+    )
+    this.log('Dynamic bag creation policy succesfully updated!')
+  }
+}

+ 38 - 0
distributor-node/src/commands/operator/accept-invitation.ts

@@ -0,0 +1,38 @@
+import AccountsCommandBase from '../../command-base/accounts'
+import DefaultCommandBase, { flags } from '../../command-base/default'
+
+export default class OperatorAcceptInvitation extends AccountsCommandBase {
+  static description = `Accept pending distribution bucket operator invitation.
+  Requires the invited distribution group worker role key.`
+
+  static flags = {
+    bucketId: flags.integer({
+      char: 'B',
+      description: 'Distribution bucket id',
+      required: true,
+    }),
+    familyId: flags.integer({
+      char: 'f',
+      description: 'Distribution bucket family id',
+      required: true,
+    }),
+    workerId: flags.integer({
+      char: 'w',
+      description: 'ID of the invited operator (distribution group worker)',
+      required: true,
+    }),
+    ...DefaultCommandBase.flags,
+  }
+
+  async run(): Promise<void> {
+    const { bucketId, familyId, workerId } = this.parse(OperatorAcceptInvitation).flags
+    const workerKey = await this.getDistributorWorkerRoleKey(workerId)
+
+    this.log(`Accepting distribution bucket operator invitation (bucket: ${bucketId}, worker: ${workerId})...`)
+    await this.sendAndFollowTx(
+      await this.getDecodedPair(workerKey),
+      this.api.tx.storage.acceptDistributionBucketInvitation(workerId, familyId, bucketId)
+    )
+    this.log('Invitation succesfully accepted!')
+  }
+}

+ 61 - 0
distributor-node/src/commands/operator/set-metadata.ts

@@ -0,0 +1,61 @@
+import fs from 'fs'
+import AccountsCommandBase from '../../command-base/accounts'
+import DefaultCommandBase, { flags } from '../../command-base/default'
+import { ValidationService } from '../../services/validation/ValidationService'
+import { DistributionBucketOperatorMetadata, IDistributionBucketOperatorMetadata } from '@joystream/metadata-protobuf'
+
+export default class OperatorSetMetadata extends AccountsCommandBase {
+  static description = `Set/update distribution bucket operator metadata.
+  Requires active distribution bucket operator worker role key.`
+
+  static flags = {
+    bucketId: flags.integer({
+      char: 'B',
+      description: 'Distribution bucket id',
+      required: true,
+    }),
+    familyId: flags.integer({
+      char: 'f',
+      description: 'Distribution bucket family id',
+      required: true,
+    }),
+    workerId: flags.integer({
+      char: 'w',
+      description: 'ID of the operator (distribution group worker)',
+      required: true,
+    }),
+    endpoint: flags.string({
+      char: 'e',
+      description: 'Root distribution node endpoint',
+      exclusive: ['input'],
+    }),
+    input: flags.string({
+      char: 'i',
+      description: 'Path to JSON metadata file',
+      exclusive: ['endpoint'],
+    }),
+    ...DefaultCommandBase.flags,
+  }
+
+  async run(): Promise<void> {
+    const { bucketId, familyId, workerId, input, endpoint } = this.parse(OperatorSetMetadata).flags
+    const workerKey = await this.getDistributorWorkerRoleKey(workerId)
+
+    const validation = new ValidationService()
+    const metadata: IDistributionBucketOperatorMetadata = input
+      ? validation.validate('OperatorMetadata', JSON.parse(fs.readFileSync(input).toString()))
+      : { endpoint }
+
+    this.log(`Setting bucket operator metadata (bucket: ${bucketId}, worker: ${workerId})...`, metadata)
+    await this.sendAndFollowTx(
+      await this.getDecodedPair(workerKey),
+      this.api.tx.storage.setDistributionOperatorMetadata(
+        workerId,
+        familyId,
+        bucketId,
+        '0x' + Buffer.from(DistributionBucketOperatorMetadata.encode(metadata).finish()).toString('hex')
+      )
+    )
+    this.log('Bucket operator metadata succesfully set/updated!')
+  }
+}

+ 19 - 0
distributor-node/src/commands/start.ts

@@ -0,0 +1,19 @@
+import DefaultCommandBase from '../command-base/default'
+import { App } from '../app'
+
+export default class StartNode extends DefaultCommandBase {
+  static description = 'Start the node'
+
+  static flags = {
+    ...DefaultCommandBase.flags,
+  }
+
+  async run(): Promise<void> {
+    const app = new App(this.appConfig)
+    app.start()
+  }
+
+  async finally(): Promise<void> {
+    /* Do nothing */
+  }
+}

+ 1 - 0
distributor-node/src/index.ts

@@ -0,0 +1 @@
+export { run } from '@oclif/command'

+ 305 - 0
distributor-node/src/services/cache/StateCacheService.ts

@@ -0,0 +1,305 @@
+import { Logger } from 'winston'
+import { ReadonlyConfig, StorageNodeDownloadResponse } from '../../types'
+import { LoggingService } from '../logging'
+import _ from 'lodash'
+import fs from 'fs'
+
+// LRU-SP cache parameters
+// Since size is in KB, these parameters should be enough for grouping objects of size up to 2^24 KB = 16 GB
+// TODO: Intoduce MAX_CACHED_ITEM_SIZE and skip caching for large objects entirely? (ie. 10 GB objects)
+export const CACHE_GROUP_LOG_BASE = 2
+export const CACHE_GROUPS_COUNT = 24
+
+type PendingDownloadStatus = 'Waiting' | 'LookingForSource' | 'Downloading'
+
+export interface PendingDownloadData {
+  objectSize: number
+  status: PendingDownloadStatus
+  promise: Promise<StorageNodeDownloadResponse>
+}
+
+export interface StorageNodeEndpointData {
+  last10ResponseTimes: number[]
+}
+
+export interface CacheItemData {
+  sizeKB: number
+  popularity: number
+  lastAccessTime: number
+}
+
+export class StateCacheService {
+  private logger: Logger
+  private config: ReadonlyConfig
+  private cacheFilePath: string
+  private saveInterval: NodeJS.Timeout
+
+  private memoryState = {
+    pendingDownloadsByContentHash: new Map<string, PendingDownloadData>(),
+    contentHashByObjectId: new Map<string, string>(),
+    storageNodeEndpointDataByEndpoint: new Map<string, StorageNodeEndpointData>(),
+    groupNumberByContentHash: new Map<string, number>(),
+  }
+
+  private storedState = {
+    lruCacheGroups: Array.from({ length: CACHE_GROUPS_COUNT }).map(() => new Map<string, CacheItemData>()),
+    mimeTypeByContentHash: new Map<string, string>(),
+  }
+
+  public constructor(config: ReadonlyConfig, logging: LoggingService, saveIntervalMs = 60 * 1000) {
+    this.logger = logging.createLogger('StateCacheService')
+    this.cacheFilePath = `${config.directories.cache}/cache.json`
+    this.config = config
+    this.saveInterval = setInterval(() => this.save(), saveIntervalMs)
+  }
+
+  public setContentMimeType(contentHash: string, mimeType: string): void {
+    this.storedState.mimeTypeByContentHash.set(contentHash, mimeType)
+  }
+
+  public getContentMimeType(contentHash: string): string | undefined {
+    return this.storedState.mimeTypeByContentHash.get(contentHash)
+  }
+
+  public setObjectContentHash(objectId: string, hash: string): void {
+    this.memoryState.contentHashByObjectId.set(objectId, hash)
+  }
+
+  public getObjectContentHash(objectId: string): string | undefined {
+    return this.memoryState.contentHashByObjectId.get(objectId)
+  }
+
+  private calcCacheGroup({ sizeKB, popularity }: CacheItemData) {
+    return Math.min(
+      Math.max(Math.ceil(Math.log(sizeKB / popularity) / Math.log(CACHE_GROUP_LOG_BASE)), 0),
+      CACHE_GROUPS_COUNT - 1
+    )
+  }
+
+  public getCachedContentHashes(): string[] {
+    let hashes: string[] = []
+    for (const [, group] of this.storedState.lruCacheGroups.entries()) {
+      hashes = hashes.concat(Array.from(group.keys()))
+    }
+    return hashes
+  }
+
+  public getCachedContentLength(): number {
+    return this.storedState.lruCacheGroups.reduce((a, b) => a + b.size, 0)
+  }
+
+  public newContent(contentHash: string, sizeInBytes: number): void {
+    const { groupNumberByContentHash } = this.memoryState
+    const { lruCacheGroups } = this.storedState
+    if (groupNumberByContentHash.get(contentHash)) {
+      this.logger.warn('newContent was called for content that already exists, ignoring the call', { contentHash })
+      return
+    }
+    const cacheItemData: CacheItemData = {
+      popularity: 1,
+      lastAccessTime: Date.now(),
+      sizeKB: Math.ceil(sizeInBytes / 1024),
+    }
+    const groupNumber = this.calcCacheGroup(cacheItemData)
+    groupNumberByContentHash.set(contentHash, groupNumber)
+    lruCacheGroups[groupNumber].set(contentHash, cacheItemData)
+  }
+
+  public peekContent(contentHash: string): CacheItemData | undefined {
+    const groupNumber = this.memoryState.groupNumberByContentHash.get(contentHash)
+    if (groupNumber !== undefined) {
+      return this.storedState.lruCacheGroups[groupNumber].get(contentHash)
+    }
+  }
+
+  public useContent(contentHash: string): void {
+    const { groupNumberByContentHash } = this.memoryState
+    const { lruCacheGroups } = this.storedState
+    const groupNumber = groupNumberByContentHash.get(contentHash)
+    if (groupNumber === undefined) {
+      this.logger.warn('groupNumberByContentHash missing when trying to update LRU of content', { contentHash })
+      return
+    }
+    const group = lruCacheGroups[groupNumber]
+    const cacheItemData = group.get(contentHash)
+    if (!cacheItemData) {
+      this.logger.warn('Cache inconsistency: item missing in group retrieved from by groupNumberByContentHash map!', {
+        contentHash,
+        groupNumber,
+      })
+      groupNumberByContentHash.delete(contentHash)
+      return
+    }
+    cacheItemData.lastAccessTime = Date.now()
+    ++cacheItemData.popularity
+    // Move object to the top of the current group / new group
+    const targetGroupNumber = this.calcCacheGroup(cacheItemData)
+    const targetGroup = lruCacheGroups[targetGroupNumber]
+    group.delete(contentHash)
+    targetGroup.set(contentHash, cacheItemData)
+    if (targetGroupNumber !== groupNumber) {
+      groupNumberByContentHash.set(contentHash, targetGroupNumber)
+    }
+  }
+
+  public getCacheEvictCandidateHash(): string | null {
+    let highestCost = 0
+    let bestCandidate: string | null = null
+    for (const group of this.storedState.lruCacheGroups) {
+      const lastItemInGroup = Array.from(group.entries())[0]
+      if (lastItemInGroup) {
+        const [contentHash, objectData] = lastItemInGroup
+        const elapsedSinceLastAccessed = Math.ceil((Date.now() - objectData.lastAccessTime) / 60_000)
+        const itemCost = (elapsedSinceLastAccessed * objectData.sizeKB) / objectData.popularity
+        if (itemCost >= highestCost) {
+          highestCost = itemCost
+          bestCandidate = contentHash
+        }
+      }
+    }
+    return bestCandidate
+  }
+
+  public newPendingDownload(
+    contentHash: string,
+    objectSize: number,
+    promise: Promise<StorageNodeDownloadResponse>
+  ): PendingDownloadData {
+    const pendingDownload: PendingDownloadData = {
+      status: 'Waiting',
+      objectSize,
+      promise,
+    }
+    this.memoryState.pendingDownloadsByContentHash.set(contentHash, pendingDownload)
+    return pendingDownload
+  }
+
+  public getPendingDownloadsCount(): number {
+    return this.memoryState.pendingDownloadsByContentHash.size
+  }
+
+  public getPendingDownload(contentHash: string): PendingDownloadData | undefined {
+    return this.memoryState.pendingDownloadsByContentHash.get(contentHash)
+  }
+
+  public dropPendingDownload(contentHash: string): void {
+    this.memoryState.pendingDownloadsByContentHash.delete(contentHash)
+  }
+
+  public dropByHash(contentHash: string): void {
+    this.logger.debug('Dropping all state by content hash', contentHash)
+    this.storedState.mimeTypeByContentHash.delete(contentHash)
+    this.memoryState.pendingDownloadsByContentHash.delete(contentHash)
+    const cacheGroupNumber = this.memoryState.groupNumberByContentHash.get(contentHash)
+    this.logger.debug('Cache group by hash established', { contentHash, cacheGroupNumber })
+    if (cacheGroupNumber) {
+      this.memoryState.groupNumberByContentHash.delete(contentHash)
+      this.storedState.lruCacheGroups[cacheGroupNumber].delete(contentHash)
+    }
+  }
+
+  public setStorageNodeEndpointResponseTime(endpoint: string, time: number): void {
+    const data = this.memoryState.storageNodeEndpointDataByEndpoint.get(endpoint) || { last10ResponseTimes: [] }
+    if (data.last10ResponseTimes.length === 10) {
+      data.last10ResponseTimes.shift()
+    }
+    data.last10ResponseTimes.push(time)
+    if (!this.memoryState.storageNodeEndpointDataByEndpoint.has(endpoint)) {
+      this.memoryState.storageNodeEndpointDataByEndpoint.set(endpoint, data)
+    }
+  }
+
+  public getStorageNodeEndpointMeanResponseTime(endpoint: string, max = 99999): number {
+    const data = this.memoryState.storageNodeEndpointDataByEndpoint.get(endpoint)
+    return _.mean(data?.last10ResponseTimes || [max])
+  }
+
+  public getStorageNodeEndpointsMeanResponseTimes(max = 99999): [string, number][] {
+    return Array.from(this.memoryState.storageNodeEndpointDataByEndpoint.keys()).map((endpoint) => [
+      endpoint,
+      this.getStorageNodeEndpointMeanResponseTime(endpoint, max),
+    ])
+  }
+
+  private serializeData() {
+    const { lruCacheGroups, mimeTypeByContentHash } = this.storedState
+    return JSON.stringify(
+      {
+        lruCacheGroups: lruCacheGroups.map((g) => Array.from(g.entries())),
+        mimeTypeByContentHash: Array.from(mimeTypeByContentHash.entries()),
+      },
+      null,
+      2 // TODO: Only for debugging
+    )
+  }
+
+  public async save(): Promise<boolean> {
+    return new Promise((resolve) => {
+      const serialized = this.serializeData()
+      const fd = fs.openSync(this.cacheFilePath, 'w')
+      fs.write(fd, serialized, (err) => {
+        fs.closeSync(fd)
+        if (err) {
+          this.logger.error('Cache file save error', { err })
+          resolve(false)
+        } else {
+          this.logger.verbose('Cache file updated')
+          resolve(true)
+        }
+      })
+    })
+  }
+
+  public saveSync(): void {
+    const serialized = this.serializeData()
+    fs.writeFileSync(this.cacheFilePath, serialized)
+  }
+
+  private loadGroupNumberByContentHashMap() {
+    const contentHashes = _.uniq(this.getCachedContentHashes())
+    const { lruCacheGroups: groups } = this.storedState
+    const { groupNumberByContentHash } = this.memoryState
+
+    contentHashes.forEach((contentHash) => {
+      groups.forEach((group, groupNumber) => {
+        if (group.has(contentHash)) {
+          if (!groupNumberByContentHash.has(contentHash)) {
+            groupNumberByContentHash.set(contentHash, groupNumber)
+          } else {
+            // Content duplicated in multiple groups - remove!
+            this.logger.warn(
+              `Content hash ${contentHash} was found in in multiple lru cache groups. Removing from group ${groupNumber}...`,
+              { firstGroup: groupNumberByContentHash.get(contentHash), currentGroup: groupNumber }
+            )
+            group.delete(contentHash)
+          }
+        }
+      })
+    })
+  }
+
+  public load(): void {
+    if (fs.existsSync(this.cacheFilePath)) {
+      this.logger.info('Loading cache from file', { file: this.cacheFilePath })
+      try {
+        const fileContent = JSON.parse(fs.readFileSync(this.cacheFilePath).toString())
+        ;((fileContent.lruCacheGroups || []) as Array<Array<[string, CacheItemData]>>).forEach((group, groupIndex) => {
+          this.storedState.lruCacheGroups[groupIndex] = new Map<string, CacheItemData>(group)
+        })
+        this.storedState.mimeTypeByContentHash = new Map<string, string>(fileContent.mimeTypeByContentHash || [])
+        this.loadGroupNumberByContentHashMap()
+      } catch (err) {
+        this.logger.error('Error while trying to load data from cache file! Will start from scratch', {
+          file: this.cacheFilePath,
+          err,
+        })
+      }
+    } else {
+      this.logger.warn(`Cache file (${this.cacheFilePath}) is empty. Starting from scratch`)
+    }
+  }
+
+  public clearInterval(): void {
+    clearInterval(this.saveInterval)
+  }
+}

+ 228 - 0
distributor-node/src/services/content/ContentService.ts

@@ -0,0 +1,228 @@
+import fs from 'fs'
+import { ReadonlyConfig, DataObjectData } from '../../types'
+import { StateCacheService } from '../cache/StateCacheService'
+import { LoggingService } from '../logging'
+import { Logger } from 'winston'
+import { FileContinousReadStream, FileContinousReadStreamOptions } from './FileContinousReadStream'
+import FileType from 'file-type'
+import _ from 'lodash'
+import { Readable, pipeline } from 'stream'
+
+export const DEFAULT_CONTENT_TYPE = 'application/octet-stream'
+
+export class ContentService {
+  private config: ReadonlyConfig
+  private dataDir: string
+  private logger: Logger
+  private stateCache: StateCacheService
+
+  private contentSizeSum = 0
+
+  public get usedSpace(): number {
+    return this.contentSizeSum
+  }
+
+  public get freeSpace(): number {
+    return this.config.limits.storage - this.contentSizeSum
+  }
+
+  public constructor(config: ReadonlyConfig, logging: LoggingService, stateCache: StateCacheService) {
+    this.config = config
+    this.logger = logging.createLogger('ContentService')
+    this.stateCache = stateCache
+    this.dataDir = config.directories.data
+  }
+
+  public async startupInit(supportedObjects: DataObjectData[]): Promise<void> {
+    const dataObjectsByHash = _.groupBy(supportedObjects, (o) => o.contentHash)
+    const dataDirFiles = fs.readdirSync(this.dataDir)
+    const filesCountOnStartup = dataDirFiles.length
+    const cachedContentHashes = this.stateCache.getCachedContentHashes()
+    const cacheItemsOnStartup = cachedContentHashes.length
+
+    this.logger.info('ContentService initializing...', {
+      supportedObjects: supportedObjects.length,
+      filesCountOnStartup,
+      cacheItemsOnStartup,
+    })
+    let filesDropped = 0
+    for (const contentHash of dataDirFiles) {
+      this.logger.debug('Checking content file', { contentHash })
+      // Add fileSize to contentSizeSum for each file. If the file ends up dropped - contentSizeSum will be reduced by this.drop().
+      const fileSize = this.fileSize(contentHash)
+      this.contentSizeSum += fileSize
+
+      // Drop files that are not part of current chain assignment
+      const objectsByHash = dataObjectsByHash[contentHash] || []
+      if (!objectsByHash.length) {
+        this.drop(contentHash, 'Not supported')
+        continue
+      }
+
+      // Compare file size to expected one
+      const { size: dataObjectSize } = objectsByHash[0]
+      if (fileSize !== dataObjectSize) {
+        // Existing file size does not match the expected one
+        const msg = `Unexpected file size. Expected: ${dataObjectSize}, actual: ${fileSize}`
+        this.logger.warn(msg, { fileSize, dataObjectSize })
+        this.drop(contentHash, msg)
+        ++filesDropped
+      } else {
+        // Existing file size is OK - detect mimeType if missing
+        if (!this.stateCache.getContentMimeType(contentHash)) {
+          this.stateCache.setContentMimeType(contentHash, await this.guessMimeType(contentHash))
+        }
+      }
+
+      // Recreate contentHashByObjectId map for all supported data objects
+      objectsByHash.forEach(({ contentHash, objectId }) => {
+        this.stateCache.setObjectContentHash(objectId, contentHash)
+      })
+    }
+
+    let cacheItemsDropped = 0
+    for (const contentHash of cachedContentHashes) {
+      if (!this.exists(contentHash)) {
+        // Content is part of cache data, but does not exist in filesystem - drop from cache
+        this.stateCache.dropByHash(contentHash)
+        ++cacheItemsDropped
+      }
+    }
+
+    this.logger.info('ContentService initialized', {
+      filesDropped,
+      cacheItemsDropped,
+      contentSizeSum: this.contentSizeSum,
+    })
+  }
+
+  public drop(contentHash: string, reason?: string): void {
+    if (this.exists(contentHash)) {
+      const size = this.fileSize(contentHash)
+      fs.unlinkSync(this.path(contentHash))
+      this.contentSizeSum -= size
+      this.logger.debug('Dropping content', { contentHash, reason, size, contentSizeSum: this.contentSizeSum })
+    } else {
+      this.logger.warn('Trying to drop content that no loger exists', { contentHash, reason })
+    }
+    this.stateCache.dropByHash(contentHash)
+  }
+
+  public fileSize(contentHash: string): number {
+    return fs.statSync(this.path(contentHash)).size
+  }
+
+  public path(contentHash: string): string {
+    return `${this.dataDir}/${contentHash}`
+  }
+
+  public exists(contentHash: string): boolean {
+    return fs.existsSync(this.path(contentHash))
+  }
+
+  public createReadStream(contentHash: string): fs.ReadStream {
+    return fs.createReadStream(this.path(contentHash))
+  }
+
+  public createWriteStream(contentHash: string): fs.WriteStream {
+    return fs.createWriteStream(this.path(contentHash), { autoClose: true, emitClose: true })
+  }
+
+  public createContinousReadStream(
+    contentHash: string,
+    options: FileContinousReadStreamOptions
+  ): FileContinousReadStream {
+    return new FileContinousReadStream(this.path(contentHash), options)
+  }
+
+  public async guessMimeType(contentHash: string): Promise<string> {
+    const guessResult = await FileType.fromFile(this.path(contentHash))
+    return guessResult?.mime || DEFAULT_CONTENT_TYPE
+  }
+
+  private async evictCacheUntilFreeSpaceReached(targetFreeSpace: number): Promise<void> {
+    this.logger.verbose('Cache eviction triggered.', { targetFreeSpace, currentFreeSpace: this.freeSpace })
+    let itemsDropped = 0
+    while (this.freeSpace < targetFreeSpace) {
+      const evictCandidateHash = this.stateCache.getCacheEvictCandidateHash()
+      if (evictCandidateHash) {
+        this.drop(evictCandidateHash, 'Cache eviction')
+        ++itemsDropped
+      } else {
+        this.logger.verbose('Nothing to drop from cache, waiting...', { freeSpace: this.freeSpace, targetFreeSpace })
+        await new Promise((resolve) => setTimeout(resolve, 1000))
+      }
+    }
+    this.logger.verbose('Cache eviction finalized.', { currentfreeSpace: this.freeSpace, itemsDropped })
+  }
+
+  public async handleNewContent(contentHash: string, expectedSize: number, dataStream: Readable): Promise<void> {
+    this.logger.verbose('Handling new content', {
+      contentHash,
+      expectedSize,
+    })
+
+    // Trigger cache eviction if required
+    if (this.freeSpace < expectedSize) {
+      await this.evictCacheUntilFreeSpaceReached(expectedSize)
+    }
+
+    // Reserve space for the new object
+    this.contentSizeSum += expectedSize
+    this.logger.verbose('Reserved space for new data object', {
+      contentHash,
+      expectedSize,
+      newContentSizeSum: this.contentSizeSum,
+    })
+
+    // Return a promise that resolves when the new file is created
+    return new Promise<void>((resolve, reject) => {
+      const fileStream = this.createWriteStream(contentHash)
+
+      let bytesRecieved = 0
+
+      pipeline(dataStream, fileStream, async (err) => {
+        const { bytesWritten } = fileStream
+        const logMetadata = {
+          contentHash,
+          expectedSize,
+          bytesRecieved,
+          bytesWritten,
+        }
+        if (err) {
+          this.logger.error(`Error while processing content data stream`, {
+            err,
+            ...logMetadata,
+          })
+          this.drop(contentHash)
+          reject(err)
+        } else {
+          if (bytesWritten === bytesRecieved && bytesWritten === expectedSize) {
+            const mimeType = await this.guessMimeType(contentHash)
+            this.logger.info('New content accepted', { ...logMetadata })
+            this.stateCache.dropPendingDownload(contentHash)
+            this.stateCache.newContent(contentHash, expectedSize)
+            this.stateCache.setContentMimeType(contentHash, mimeType)
+          } else {
+            this.logger.error('Content rejected: Bytes written/recieved/expected mismatch!', {
+              ...logMetadata,
+            })
+            this.drop(contentHash)
+          }
+        }
+      })
+
+      fileStream.on('open', () => {
+        // Note: The promise is resolved on "ready" event, since that's what's awaited in the current flow
+        resolve()
+      })
+
+      dataStream.on('data', (chunk) => {
+        bytesRecieved += chunk.length
+        if (bytesRecieved > expectedSize) {
+          dataStream.destroy(new Error('Unexpected content size: Too much data recieved from source!'))
+        }
+      })
+    })
+  }
+}

+ 87 - 0
distributor-node/src/services/content/FileContinousReadStream.ts

@@ -0,0 +1,87 @@
+import { Readable } from 'stream'
+import fs from 'fs'
+
+export interface FileContinousReadStreamOptions {
+  end: number
+  start?: number
+  chunkSize?: number
+  missingDataRetryTime?: number
+  maxRetries?: number
+}
+
+export class FileContinousReadStream extends Readable {
+  private fd: number
+  private position: number
+  private lastByte: number
+  private missingDataRetryTime: number
+  private maxRetries: number
+  private finished: boolean
+  private interval: NodeJS.Timeout | undefined
+
+  public constructor(path: string, options: FileContinousReadStreamOptions) {
+    super({
+      highWaterMark: options.chunkSize || 1 * 1024 * 1024, // default: 1 MB
+    })
+    this.fd = fs.openSync(path, 'r')
+    this.position = options.start || 0
+    this.lastByte = options.end
+    this.missingDataRetryTime = options.missingDataRetryTime || 50 // 50 ms
+    this.maxRetries = options.maxRetries || 2400 // 2400 retries x 50 ms = 120s timeout
+    this.finished = false
+  }
+
+  private finish() {
+    fs.closeSync(this.fd)
+    this.finished = true
+  }
+
+  private readChunkSync(bytesN: number): Buffer | null {
+    const chunk = Buffer.alloc(bytesN)
+    const readBytes = fs.readSync(this.fd, chunk, 0, bytesN, this.position)
+    const newPosition = this.position + readBytes
+    if (readBytes < bytesN && newPosition <= this.lastByte) {
+      return null
+    }
+    if (newPosition > this.lastByte) {
+      this.finish()
+      return chunk.slice(0, readBytes)
+    }
+    this.position = newPosition
+    return chunk
+  }
+
+  // Reason: https://nodejs.org/docs/latest/api/stream.html#stream_implementing_a_readable_stream
+  // eslint-disable-next-line @typescript-eslint/naming-convention
+  _read(bytesN: number): void {
+    if (this.finished) {
+      this.push(null)
+      return
+    }
+    const chunk = this.readChunkSync(bytesN)
+    if (chunk === null) {
+      let retries = 0
+      const interval = setInterval(() => {
+        const chunk = this.readChunkSync(bytesN)
+        if (chunk !== null) {
+          clearInterval(interval)
+          return this.push(chunk)
+        }
+        if (++retries >= this.maxRetries) {
+          clearInterval(interval)
+          this.destroy(new Error('Max missing data retries limit reached'))
+        }
+      }, this.missingDataRetryTime)
+      this.interval = interval
+    } else {
+      this.push(chunk)
+    }
+  }
+
+  // Reason: https://nodejs.org/docs/latest/api/stream.html#stream_implementing_a_readable_stream
+  // eslint-disable-next-line @typescript-eslint/naming-convention
+  _destroy(): void {
+    if (this.interval) {
+      clearInterval(this.interval)
+    }
+  }
+}

+ 134 - 0
distributor-node/src/services/logging/LoggingService.ts

@@ -0,0 +1,134 @@
+import winston, { Logger, LoggerOptions } from 'winston'
+import escFormat from '@elastic/ecs-winston-format'
+import { ElasticsearchTransport } from 'winston-elasticsearch'
+import { ReadonlyConfig } from '../../types'
+import { blake2AsHex } from '@polkadot/util-crypto'
+import { Format } from 'logform'
+import stringify from 'fast-safe-stringify'
+import NodeCache from 'node-cache'
+
+const cliColors = {
+  error: 'red',
+  warn: 'yellow',
+  info: 'green',
+  http: 'magenta',
+  debug: 'grey',
+}
+
+winston.addColors(cliColors)
+
+const pausedLogs = new NodeCache({
+  deleteOnExpire: true,
+})
+
+// Pause log for a specified time period
+const pauseFormat: (opts: { id: string }) => Format = winston.format((info, opts: { id: string }) => {
+  if (info['@pauseFor']) {
+    const messageHash = blake2AsHex(`${opts.id}:${info.level}:${info.message}`)
+    if (!pausedLogs.has(messageHash)) {
+      pausedLogs.set(messageHash, null, info['@pauseFor'])
+      info.message += ` (this log message will be skipped for the next ${info['@pauseFor']}s)`
+      delete info['@pauseFor']
+      return info
+    }
+    return false
+  }
+
+  return info
+})
+
+const cliFormat = winston.format.combine(
+  winston.format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss:ms' }),
+  winston.format.metadata({ fillExcept: ['label', 'level', 'timestamp', 'message'] }),
+  winston.format.colorize({ all: true }),
+  winston.format.printf(
+    (info) =>
+      `${info.timestamp} ${info.label} ${info.level}: ${info.message}` +
+      (Object.keys(info.metadata).length ? `\n${stringify(info.metadata, undefined, 4)}` : '')
+  )
+)
+
+export class LoggingService {
+  private rootLogger: Logger
+  private esTransport: ElasticsearchTransport | undefined
+
+  private constructor(options: LoggerOptions, esTransport?: ElasticsearchTransport) {
+    this.esTransport = esTransport
+    this.rootLogger = winston.createLogger(options)
+  }
+
+  public static withAppConfig(config: ReadonlyConfig): LoggingService {
+    const transports: winston.LoggerOptions['transports'] = []
+
+    let esTransport: ElasticsearchTransport | undefined
+    if (config.log?.elastic && config.log.elastic !== 'off') {
+      if (!config.endpoints.elasticSearch) {
+        throw new Error('config.endpoints.elasticSearch must be provided when elasticSeach logging is enabled!')
+      }
+      esTransport = new ElasticsearchTransport({
+        level: config.log.elastic,
+        format: winston.format.combine(pauseFormat({ id: 'es' }), escFormat()),
+        flushInterval: 5000,
+        source: config.id,
+        clientOpts: {
+          node: {
+            url: new URL(config.endpoints.elasticSearch),
+          },
+        },
+      })
+      transports.push(esTransport)
+    }
+
+    const fileTransport =
+      config.log?.file && config.log.file !== 'off'
+        ? new winston.transports.File({
+            filename: `${config.directories.logs}/logs.json`,
+            level: config.log.file,
+            format: winston.format.combine(pauseFormat({ id: 'file' }), escFormat()),
+          })
+        : undefined
+    if (fileTransport) {
+      transports.push(fileTransport)
+    }
+
+    const consoleTransport =
+      config.log?.console && config.log.console !== 'off'
+        ? new winston.transports.Console({
+            level: config.log.console,
+            format: winston.format.combine(pauseFormat({ id: 'cli' }), cliFormat),
+          })
+        : undefined
+    if (consoleTransport) {
+      transports.push(consoleTransport)
+    }
+
+    return new LoggingService(
+      {
+        transports,
+      },
+      esTransport
+    )
+  }
+
+  public static withCLIConfig(): LoggingService {
+    return new LoggingService({
+      transports: new winston.transports.Console({
+        // Log everything to stderr, only the command output value will be written to stdout
+        stderrLevels: Object.keys(winston.config.npm.levels),
+        format: winston.format.combine(pauseFormat({ id: 'cli' }), cliFormat),
+      }),
+    })
+  }
+
+  public createLogger(label: string, ...meta: unknown[]): Logger {
+    return this.rootLogger.child({ label, ...meta })
+  }
+
+  public async end(): Promise<void> {
+    if (this.esTransport) {
+      await this.esTransport.flush()
+    }
+    this.rootLogger.end()
+    await Promise.all(this.rootLogger.transports.map((t) => new Promise((resolve) => t.on('finish', resolve))))
+  }
+}

+ 1 - 0
distributor-node/src/services/logging/index.ts

@@ -0,0 +1 @@
+export { LoggingService } from './LoggingService'

+ 352 - 0
distributor-node/src/services/networking/NetworkingService.ts

@@ -0,0 +1,352 @@
+import { ReadonlyConfig } from '../../types/config'
+import { QueryNodeApi } from './query-node/api'
+import { Logger } from 'winston'
+import { LoggingService } from '../logging'
+import { StorageNodeApi } from './storage-node/api'
+import { PendingDownloadData, StateCacheService } from '../cache/StateCacheService'
+import { DataObjectDetailsFragment } from './query-node/generated/queries'
+import axios from 'axios'
+import {
+  StorageNodeEndpointData,
+  DataObjectAccessPoints,
+  DataObjectData,
+  DataObjectInfo,
+  StorageNodeDownloadResponse,
+  DownloadData,
+} from '../../types'
+import queue from 'queue'
+import { DistributionBucketOperatorStatus } from './query-node/generated/schema'
+import http from 'http'
+import https from 'https'
+import { parseAxiosError } from '../parsers/errors'
+
+const MAX_CONCURRENT_AVAILABILITY_CHECKS_PER_DOWNLOAD = 10
+const MAX_CONCURRENT_RESPONSE_TIME_CHECKS = 10
+const STORAGE_NODE_ENDPOINTS_CHECK_INTERVAL_MS = 60000
+
+export class NetworkingService {
+  private config: ReadonlyConfig
+  private queryNodeApi: QueryNodeApi
+  // private runtimeApi: RuntimeApi
+  private logging: LoggingService
+  private stateCache: StateCacheService
+  private logger: Logger
+
+  private storageNodeEndpointsCheckInterval: NodeJS.Timeout
+  private testLatencyQueue: queue
+  private downloadQueue: queue
+
+  constructor(config: ReadonlyConfig, stateCache: StateCacheService, logging: LoggingService) {
+    axios.defaults.timeout = config.limits.outboundRequestsTimeout
+    const httpConfig: http.AgentOptions | https.AgentOptions = {
+      keepAlive: true,
+      timeout: config.limits.outboundRequestsTimeout,
+      maxSockets: config.limits.maxConcurrentOutboundConnections,
+    }
+    axios.defaults.httpAgent = new http.Agent(httpConfig)
+    axios.defaults.httpsAgent = new https.Agent(httpConfig)
+    this.config = config
+    this.logging = logging
+    this.stateCache = stateCache
+    this.logger = logging.createLogger('NetworkingManager')
+    this.queryNodeApi = new QueryNodeApi(config.endpoints.queryNode)
+    // this.runtimeApi = new RuntimeApi(config.endpoints.substrateNode)
+    this.checkActiveStorageNodeEndpoints()
+    this.storageNodeEndpointsCheckInterval = setInterval(
+      this.checkActiveStorageNodeEndpoints.bind(this),
+      STORAGE_NODE_ENDPOINTS_CHECK_INTERVAL_MS
+    )
+    // Queues
+    this.testLatencyQueue = queue({ concurrency: MAX_CONCURRENT_RESPONSE_TIME_CHECKS, autostart: true }).on(
+      'end',
+      () => {
+        this.logger.verbose('Mean response times updated', {
+          responseTimes: this.stateCache.getStorageNodeEndpointsMeanResponseTimes(),
+        })
+      }
+    )
+    this.downloadQueue = queue({ concurrency: config.limits.maxConcurrentStorageNodeDownloads, autostart: true })
+  }
+
+  public clearIntervals(): void {
+    clearInterval(this.storageNodeEndpointsCheckInterval)
+  }
+
+  private validateNodeEndpoint(endpoint: string): void {
+    const endpointUrl = new URL(endpoint)
+    if (endpointUrl.protocol !== 'http:' && endpointUrl.protocol !== 'https:') {
+      throw new Error(`Invalid endpoint protocol: ${endpointUrl.protocol}`)
+    }
+  }
+
+  private filterStorageNodeEndpoints(input: StorageNodeEndpointData[]): StorageNodeEndpointData[] {
+    return input.filter((b) => {
+      try {
+        this.validateNodeEndpoint(b.endpoint)
+        return true
+      } catch (err) {
+        this.logger.warn(`Invalid storage node endpoint: ${b.endpoint} for bucket ${b.bucketId}`, {
+          bucketId: b.bucketId,
+          endpoint: b.endpoint,
+          err,
+          '@pauseFor': 900,
+        })
+        return false
+      }
+    })
+  }
+
+  private prepareStorageNodeEndpoints(details: DataObjectDetailsFragment) {
+    const endpointsData = details.storageBag.storageAssignments
+      .filter(
+        (a) =>
+          a.storageBucket.operatorStatus.__typename === 'StorageBucketOperatorStatusActive' &&
+          a.storageBucket.operatorMetadata?.nodeEndpoint
+      )
+      .map((a) => ({
+        bucketId: a.storageBucket.id,
+        endpoint: a.storageBucket.operatorMetadata!.nodeEndpoint!,
+      }))
+
+    return this.filterStorageNodeEndpoints(endpointsData)
+  }
+
+  private parseDataObjectAccessPoints(details: DataObjectDetailsFragment): DataObjectAccessPoints {
+    return {
+      storageNodes: this.prepareStorageNodeEndpoints(details),
+    }
+  }
+
+  public async dataObjectInfo(objectId: string): Promise<DataObjectInfo> {
+    const details = await this.queryNodeApi.getDataObjectDetails(objectId)
+    if (details) {
+      this.stateCache.setObjectContentHash(objectId, details.ipfsHash)
+    }
+    return {
+      exists: !!details,
+      isSupported:
+        (this.config.buckets === 'all' &&
+          details?.storageBag.distirbutionAssignments.some((d) =>
+            d.distributionBucket.operators.some(
+              (o) => o.workerId === this.config.workerId && o.status === DistributionBucketOperatorStatus.Active
+            )
+          )) ||
+        (Array.isArray(this.config.buckets) &&
+          this.config.buckets.some((bucketId) =>
+            details?.storageBag.distirbutionAssignments
+              .map((a) => a.distributionBucket.id)
+              .includes(bucketId.toString())
+          )),
+      data: details
+        ? {
+            objectId,
+            accessPoints: this.parseDataObjectAccessPoints(details),
+            contentHash: details.ipfsHash,
+            size: parseInt(details.size),
+          }
+        : undefined,
+    }
+  }
+
+  private sortEndpointsByMeanResponseTime(endpoints: string[]) {
+    return endpoints.sort(
+      (a, b) =>
+        this.stateCache.getStorageNodeEndpointMeanResponseTime(a) -
+        this.stateCache.getStorageNodeEndpointMeanResponseTime(b)
+    )
+  }
+
+  private downloadJob(
+    pendingDownload: PendingDownloadData,
+    downloadData: DownloadData,
+    onSourceFound: (response: StorageNodeDownloadResponse) => void,
+    onError: (error: Error) => void,
+    onFinished?: () => void
+  ): Promise<void> {
+    const {
+      objectData: { contentHash, accessPoints },
+      startAt,
+    } = downloadData
+
+    pendingDownload.status = 'LookingForSource'
+
+    return new Promise<void>((resolve, reject) => {
+      // Handlers:
+      const fail = (message: string) => {
+        this.stateCache.dropPendingDownload(contentHash)
+        onError(new Error(message))
+        reject(new Error(message))
+      }
+
+      const sourceFound = (response: StorageNodeDownloadResponse) => {
+        this.logger.info('Download source chosen', { contentHash, source: response.config.url })
+        pendingDownload.status = 'Downloading'
+        onSourceFound(response)
+      }
+
+      const finish = () => {
+        onFinished && onFinished()
+        resolve()
+      }
+
+      const storageEndpoints = this.sortEndpointsByMeanResponseTime(
+        accessPoints?.storageNodes.map((n) => n.endpoint) || []
+      )
+
+      this.logger.info('Downloading new data object', {
+        contentHash,
+        possibleSources: storageEndpoints.map((e) => ({
+          endpoint: e,
+          meanResponseTime: this.stateCache.getStorageNodeEndpointMeanResponseTime(e),
+        })),
+      })
+      if (!storageEndpoints.length) {
+        return fail('No storage endpoints available to download the data object from')
+      }
+
+      const availabilityQueue = queue({
+        concurrency: MAX_CONCURRENT_AVAILABILITY_CHECKS_PER_DOWNLOAD,
+        autostart: true,
+      })
+      const objectDownloadQueue = queue({ concurrency: 1, autostart: true })
+
+      storageEndpoints.forEach(async (endpoint) => {
+        availabilityQueue.push(async () => {
+          const api = new StorageNodeApi(endpoint, this.logging)
+          const available = await api.isObjectAvailable(contentHash)
+          if (!available) {
+            throw new Error('Not avilable')
+          }
+          return endpoint
+        })
+      })
+
+      availabilityQueue.on('success', (endpoint) => {
+        availabilityQueue.stop()
+        const job = async () => {
+          const api = new StorageNodeApi(endpoint, this.logging)
+          const response = await api.downloadObject(contentHash, startAt)
+          return response
+        }
+        objectDownloadQueue.push(job)
+      })
+
+      availabilityQueue.on('error', () => {
+        /*
+        Do nothing.
+        The handler is needed to avoid unhandled promise rejection
+        */
+      })
+
+      availabilityQueue.on('end', () => {
+        if (!objectDownloadQueue.length) {
+          fail('Failed to download the object from any availablable storage provider')
+        }
+      })
+
+      objectDownloadQueue.on('error', (err) => {
+        this.logger.error('Download attempt from storage node failed after availability was confirmed:', { err })
+      })
+
+      objectDownloadQueue.on('end', () => {
+        if (availabilityQueue.length) {
+          availabilityQueue.start()
+        } else {
+          fail('Failed to download the object from any availablable storage provider')
+        }
+      })
+
+      objectDownloadQueue.on('success', (response: StorageNodeDownloadResponse) => {
+        availabilityQueue.removeAllListeners().end()
+        objectDownloadQueue.removeAllListeners().end()
+        response.data.on('close', finish).on('error', finish).on('end', finish)
+        sourceFound(response)
+      })
+    })
+  }
+
+  public downloadDataObject(downloadData: DownloadData): Promise<StorageNodeDownloadResponse> | null {
+    const {
+      objectData: { contentHash, size },
+    } = downloadData
+
+    if (this.stateCache.getPendingDownload(contentHash)) {
+      // Already downloading
+      return null
+    }
+
+    let resolveDownload: (response: StorageNodeDownloadResponse) => void, rejectDownload: (err: Error) => void
+    const downloadPromise = new Promise<StorageNodeDownloadResponse>((resolve, reject) => {
+      resolveDownload = resolve
+      rejectDownload = reject
+    })
+
+    // Queue the download
+    const pendingDownload = this.stateCache.newPendingDownload(contentHash, size, downloadPromise)
+    this.downloadQueue.push(() => this.downloadJob(pendingDownload, downloadData, resolveDownload, rejectDownload))
+
+    return downloadPromise
+  }
+
+  async fetchSupportedDataObjects(): Promise<DataObjectData[]> {
+    const data =
+      this.config.buckets === 'all'
+        ? await this.queryNodeApi.getDistributionBucketsWithObjectsByWorkerId(this.config.workerId)
+        : await this.queryNodeApi.getDistributionBucketsWithObjectsByIds(this.config.buckets.map((id) => id.toString()))
+    const objectsData: DataObjectData[] = []
+    data.forEach((bucket) => {
+      bucket.bagAssignments.forEach((a) => {
+        a.storageBag.objects.forEach((object) => {
+          const { ipfsHash, id, size } = object
+          objectsData.push({ contentHash: ipfsHash, objectId: id, size: parseInt(size) })
+        })
+      })
+    })
+
+    return objectsData
+  }
+
+  async checkActiveStorageNodeEndpoints(): Promise<void> {
+    const activeStorageOperators = await this.queryNodeApi.getActiveStorageBucketOperatorsData()
+    const endpoints = this.filterStorageNodeEndpoints(
+      activeStorageOperators.map(({ id, operatorMetadata }) => ({
+        bucketId: id,
+        endpoint: operatorMetadata!.nodeEndpoint!,
+      }))
+    )
+    this.logger.verbose('Checking nearby storage nodes...', { validEndpointsCount: endpoints.length })
+
+    endpoints.forEach(({ endpoint }) =>
+      this.testLatencyQueue.push(async () => {
+        await this.checkResponseTime(endpoint)
+      })
+    )
+  }
+
+  async checkResponseTime(endpoint: string): Promise<void> {
+    const start = Date.now()
+    this.logger.debug(`Sending storage node response-time check request to: ${endpoint}`, { endpoint })
+    try {
+      // TODO: Use a status endpoint once available?
+      await axios.get(endpoint, {
+        headers: {
+          connection: 'close',
+        },
+      })
+      throw new Error('Unexpected status 200')
+    } catch (err) {
+      if (axios.isAxiosError(err) && err.response?.status === 404) {
+        // This is the expected outcome currently
+        const responseTime = Date.now() - start
+        this.logger.debug(`${endpoint} check request response time: ${responseTime}`, { endpoint, responseTime })
+        this.stateCache.setStorageNodeEndpointResponseTime(endpoint, responseTime)
+      } else {
+        this.logger.warn(`${endpoint} check request unexpected response`, {
+          endpoint,
+          err: axios.isAxiosError(err) ? parseAxiosError(err) : err,
+          '@pauseFor': 900,
+        })
+      }
+    }
+  }
+}

+ 27 - 0
distributor-node/src/services/networking/distributor-node/generated/.openapi-generator-ignore

@@ -0,0 +1,27 @@
+# OpenAPI Generator Ignore
+# Generated by openapi-generator https://github.com/openapitools/openapi-generator
+
+# Use this file to prevent files from being overwritten by the generator.
+# The patterns follow closely to .gitignore or .dockerignore.
+
+# As an example, the C# client generator defines ApiClient.cs.
+# You can make changes and tell OpenAPI Generator to ignore just this file by uncommenting the following line:
+#ApiClient.cs
+
+# You can match any string of characters against a directory, file or extension with a single asterisk (*):
+#foo/*/qux
+# The above matches foo/bar/qux and foo/baz/qux, but not foo/bar/baz/qux
+
+# You can recursively match patterns against a directory, file or extension with a double asterisk (**):
+#foo/**/qux
+# This matches foo/bar/qux, foo/baz/qux, and foo/bar/baz/qux
+
+# You can also negate patterns with an exclamation (!).
+# For example, you can ignore all files in a docs folder with the file extension .md:
+#docs/*.md
+# Then explicitly reverse the ignore rule for a single file:
+#!docs/README.md
+
+git_push.sh
+.npmignore
+.gitignore

+ 5 - 0
distributor-node/src/services/networking/distributor-node/generated/.openapi-generator/FILES

@@ -0,0 +1,5 @@
+api.ts
+base.ts
+common.ts
+configuration.ts
+index.ts

+ 1 - 0
distributor-node/src/services/networking/distributor-node/generated/.openapi-generator/VERSION

@@ -0,0 +1 @@
+5.2.0

+ 380 - 0
distributor-node/src/services/networking/distributor-node/generated/api.ts

@@ -0,0 +1,380 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Distributor node API
+ * Distributor node API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+
+import { Configuration } from './configuration';
+import globalAxios, { AxiosPromise, AxiosInstance } from 'axios';
+// Some imports not used depending on template conditions
+// @ts-ignore
+import { DUMMY_BASE_URL, assertParamExists, setApiKeyToObject, setBasicAuthToObject, setBearerAuthToObject, setOAuthToObject, setSearchParams, serializeDataIfNeeded, toPathString, createRequestFunction } from './common';
+// @ts-ignore
+import { BASE_PATH, COLLECTION_FORMATS, RequestArgs, BaseAPI, RequiredError } from './base';
+
+/**
+ * @type BucketsResponse
+ * @export
+ */
+export type BucketsResponse = BucketsResponseOneOf | BucketsResponseOneOf1;
+
+/**
+ * 
+ * @export
+ * @interface BucketsResponseOneOf
+ */
+export interface BucketsResponseOneOf {
+    /**
+     * 
+     * @type {Array<number>}
+     * @memberof BucketsResponseOneOf
+     */
+    bucketIds: Array<number>;
+}
+/**
+ * 
+ * @export
+ * @interface BucketsResponseOneOf1
+ */
+export interface BucketsResponseOneOf1 {
+    /**
+     * 
+     * @type {number}
+     * @memberof BucketsResponseOneOf1
+     */
+    allByWorkerId: number;
+}
+/**
+ * 
+ * @export
+ * @interface ErrorResponse
+ */
+export interface ErrorResponse {
+    /**
+     * 
+     * @type {string}
+     * @memberof ErrorResponse
+     */
+    type?: string;
+    /**
+     * 
+     * @type {string}
+     * @memberof ErrorResponse
+     */
+    message: string;
+}
+/**
+ * 
+ * @export
+ * @interface StatusResponse
+ */
+export interface StatusResponse {
+    /**
+     * 
+     * @type {string}
+     * @memberof StatusResponse
+     */
+    id: string;
+    /**
+     * 
+     * @type {number}
+     * @memberof StatusResponse
+     */
+    objectsInCache: number;
+    /**
+     * 
+     * @type {number}
+     * @memberof StatusResponse
+     */
+    storageLimit: number;
+    /**
+     * 
+     * @type {number}
+     * @memberof StatusResponse
+     */
+    storageUsed: number;
+    /**
+     * 
+     * @type {number}
+     * @memberof StatusResponse
+     */
+    uptime: number;
+    /**
+     * 
+     * @type {number}
+     * @memberof StatusResponse
+     */
+    downloadsInProgress: number;
+}
+
+/**
+ * PublicApi - axios parameter creator
+ * @export
+ */
+export const PublicApiAxiosParamCreator = function (configuration?: Configuration) {
+    return {
+        /**
+         * Returns a media file.
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        publicAsset: async (options: any = {}): Promise<RequestArgs> => {
+            const localVarPath = `/asset/{objectId}`;
+            // use dummy base URL string because the URL constructor only accepts absolute URLs.
+            const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL);
+            let baseOptions;
+            if (configuration) {
+                baseOptions = configuration.baseOptions;
+            }
+
+            const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options};
+            const localVarHeaderParameter = {} as any;
+            const localVarQueryParameter = {} as any;
+
+
+    
+            setSearchParams(localVarUrlObj, localVarQueryParameter, options.query);
+            let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
+            localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
+
+            return {
+                url: toPathString(localVarUrlObj),
+                options: localVarRequestOptions,
+            };
+        },
+        /**
+         * Returns asset response headers (cache status, content type and/or length, accepted ranges etc.)
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        publicAssetHead: async (options: any = {}): Promise<RequestArgs> => {
+            const localVarPath = `/asset/{objectId}`;
+            // use dummy base URL string because the URL constructor only accepts absolute URLs.
+            const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL);
+            let baseOptions;
+            if (configuration) {
+                baseOptions = configuration.baseOptions;
+            }
+
+            const localVarRequestOptions = { method: 'HEAD', ...baseOptions, ...options};
+            const localVarHeaderParameter = {} as any;
+            const localVarQueryParameter = {} as any;
+
+
+    
+            setSearchParams(localVarUrlObj, localVarQueryParameter, options.query);
+            let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
+            localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
+
+            return {
+                url: toPathString(localVarUrlObj),
+                options: localVarRequestOptions,
+            };
+        },
+        /**
+         * Returns list of distributed buckets
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        publicBuckets: async (options: any = {}): Promise<RequestArgs> => {
+            const localVarPath = `/buckets`;
+            // use dummy base URL string because the URL constructor only accepts absolute URLs.
+            const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL);
+            let baseOptions;
+            if (configuration) {
+                baseOptions = configuration.baseOptions;
+            }
+
+            const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options};
+            const localVarHeaderParameter = {} as any;
+            const localVarQueryParameter = {} as any;
+
+
+    
+            setSearchParams(localVarUrlObj, localVarQueryParameter, options.query);
+            let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
+            localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
+
+            return {
+                url: toPathString(localVarUrlObj),
+                options: localVarRequestOptions,
+            };
+        },
+        /**
+         * Returns json object describing current node status.
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        publicStatus: async (options: any = {}): Promise<RequestArgs> => {
+            const localVarPath = `/status`;
+            // use dummy base URL string because the URL constructor only accepts absolute URLs.
+            const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL);
+            let baseOptions;
+            if (configuration) {
+                baseOptions = configuration.baseOptions;
+            }
+
+            const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options};
+            const localVarHeaderParameter = {} as any;
+            const localVarQueryParameter = {} as any;
+
+
+    
+            setSearchParams(localVarUrlObj, localVarQueryParameter, options.query);
+            let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
+            localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
+
+            return {
+                url: toPathString(localVarUrlObj),
+                options: localVarRequestOptions,
+            };
+        },
+    }
+};
+
+/**
+ * PublicApi - functional programming interface
+ * @export
+ */
+export const PublicApiFp = function(configuration?: Configuration) {
+    const localVarAxiosParamCreator = PublicApiAxiosParamCreator(configuration)
+    return {
+        /**
+         * Returns a media file.
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        async publicAsset(options?: any): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<any>> {
+            const localVarAxiosArgs = await localVarAxiosParamCreator.publicAsset(options);
+            return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration);
+        },
+        /**
+         * Returns asset response headers (cache status, content type and/or length, accepted ranges etc.)
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        async publicAssetHead(options?: any): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<void>> {
+            const localVarAxiosArgs = await localVarAxiosParamCreator.publicAssetHead(options);
+            return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration);
+        },
+        /**
+         * Returns list of distributed buckets
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        async publicBuckets(options?: any): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<BucketsResponse>> {
+            const localVarAxiosArgs = await localVarAxiosParamCreator.publicBuckets(options);
+            return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration);
+        },
+        /**
+         * Returns json object describing current node status.
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        async publicStatus(options?: any): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<StatusResponse>> {
+            const localVarAxiosArgs = await localVarAxiosParamCreator.publicStatus(options);
+            return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration);
+        },
+    }
+};
+
+/**
+ * PublicApi - factory interface
+ * @export
+ */
+export const PublicApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) {
+    const localVarFp = PublicApiFp(configuration)
+    return {
+        /**
+         * Returns a media file.
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        publicAsset(options?: any): AxiosPromise<any> {
+            return localVarFp.publicAsset(options).then((request) => request(axios, basePath));
+        },
+        /**
+         * Returns asset response headers (cache status, content type and/or length, accepted ranges etc.)
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        publicAssetHead(options?: any): AxiosPromise<void> {
+            return localVarFp.publicAssetHead(options).then((request) => request(axios, basePath));
+        },
+        /**
+         * Returns list of distributed buckets
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        publicBuckets(options?: any): AxiosPromise<BucketsResponse> {
+            return localVarFp.publicBuckets(options).then((request) => request(axios, basePath));
+        },
+        /**
+         * Returns json object describing current node status.
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        publicStatus(options?: any): AxiosPromise<StatusResponse> {
+            return localVarFp.publicStatus(options).then((request) => request(axios, basePath));
+        },
+    };
+};
+
+/**
+ * PublicApi - object-oriented interface
+ * @export
+ * @class PublicApi
+ * @extends {BaseAPI}
+ */
+export class PublicApi extends BaseAPI {
+    /**
+     * Returns a media file.
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     * @memberof PublicApi
+     */
+    public publicAsset(options?: any) {
+        return PublicApiFp(this.configuration).publicAsset(options).then((request) => request(this.axios, this.basePath));
+    }
+
+    /**
+     * Returns asset response headers (cache status, content type and/or length, accepted ranges etc.)
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     * @memberof PublicApi
+     */
+    public publicAssetHead(options?: any) {
+        return PublicApiFp(this.configuration).publicAssetHead(options).then((request) => request(this.axios, this.basePath));
+    }
+
+    /**
+     * Returns list of distributed buckets
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     * @memberof PublicApi
+     */
+    public publicBuckets(options?: any) {
+        return PublicApiFp(this.configuration).publicBuckets(options).then((request) => request(this.axios, this.basePath));
+    }
+
+    /**
+     * Returns json object describing current node status.
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     * @memberof PublicApi
+     */
+    public publicStatus(options?: any) {
+        return PublicApiFp(this.configuration).publicStatus(options).then((request) => request(this.axios, this.basePath));
+    }
+}
+
+

+ 71 - 0
distributor-node/src/services/networking/distributor-node/generated/base.ts

@@ -0,0 +1,71 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Distributor node API
+ * Distributor node API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+
+import { Configuration } from "./configuration";
+// Some imports not used depending on template conditions
+// @ts-ignore
+import globalAxios, { AxiosPromise, AxiosInstance } from 'axios';
+
+export const BASE_PATH = "http://localhost:3334/api/v1".replace(/\/+$/, "");
+
+/**
+ *
+ * @export
+ */
+export const COLLECTION_FORMATS = {
+    csv: ",",
+    ssv: " ",
+    tsv: "\t",
+    pipes: "|",
+};
+
+/**
+ *
+ * @export
+ * @interface RequestArgs
+ */
+export interface RequestArgs {
+    url: string;
+    options: any;
+}
+
+/**
+ *
+ * @export
+ * @class BaseAPI
+ */
+export class BaseAPI {
+    protected configuration: Configuration | undefined;
+
+    constructor(configuration?: Configuration, protected basePath: string = BASE_PATH, protected axios: AxiosInstance = globalAxios) {
+        if (configuration) {
+            this.configuration = configuration;
+            this.basePath = configuration.basePath || this.basePath;
+        }
+    }
+};
+
+/**
+ *
+ * @export
+ * @class RequiredError
+ * @extends {Error}
+ */
+export class RequiredError extends Error {
+    name: "RequiredError" = "RequiredError";
+    constructor(public field: string, msg?: string) {
+        super(msg);
+    }
+}

+ 138 - 0
distributor-node/src/services/networking/distributor-node/generated/common.ts

@@ -0,0 +1,138 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Distributor node API
+ * Distributor node API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+
+import { Configuration } from "./configuration";
+import { RequiredError, RequestArgs } from "./base";
+import { AxiosInstance } from 'axios';
+
+/**
+ *
+ * @export
+ */
+export const DUMMY_BASE_URL = 'https://example.com'
+
+/**
+ *
+ * @throws {RequiredError}
+ * @export
+ */
+export const assertParamExists = function (functionName: string, paramName: string, paramValue: unknown) {
+    if (paramValue === null || paramValue === undefined) {
+        throw new RequiredError(paramName, `Required parameter ${paramName} was null or undefined when calling ${functionName}.`);
+    }
+}
+
+/**
+ *
+ * @export
+ */
+export const setApiKeyToObject = async function (object: any, keyParamName: string, configuration?: Configuration) {
+    if (configuration && configuration.apiKey) {
+        const localVarApiKeyValue = typeof configuration.apiKey === 'function'
+            ? await configuration.apiKey(keyParamName)
+            : await configuration.apiKey;
+        object[keyParamName] = localVarApiKeyValue;
+    }
+}
+
+/**
+ *
+ * @export
+ */
+export const setBasicAuthToObject = function (object: any, configuration?: Configuration) {
+    if (configuration && (configuration.username || configuration.password)) {
+        object["auth"] = { username: configuration.username, password: configuration.password };
+    }
+}
+
+/**
+ *
+ * @export
+ */
+export const setBearerAuthToObject = async function (object: any, configuration?: Configuration) {
+    if (configuration && configuration.accessToken) {
+        const accessToken = typeof configuration.accessToken === 'function'
+            ? await configuration.accessToken()
+            : await configuration.accessToken;
+        object["Authorization"] = "Bearer " + accessToken;
+    }
+}
+
+/**
+ *
+ * @export
+ */
+export const setOAuthToObject = async function (object: any, name: string, scopes: string[], configuration?: Configuration) {
+    if (configuration && configuration.accessToken) {
+        const localVarAccessTokenValue = typeof configuration.accessToken === 'function'
+            ? await configuration.accessToken(name, scopes)
+            : await configuration.accessToken;
+        object["Authorization"] = "Bearer " + localVarAccessTokenValue;
+    }
+}
+
+/**
+ *
+ * @export
+ */
+export const setSearchParams = function (url: URL, ...objects: any[]) {
+    const searchParams = new URLSearchParams(url.search);
+    for (const object of objects) {
+        for (const key in object) {
+            if (Array.isArray(object[key])) {
+                searchParams.delete(key);
+                for (const item of object[key]) {
+                    searchParams.append(key, item);
+                }
+            } else {
+                searchParams.set(key, object[key]);
+            }
+        }
+    }
+    url.search = searchParams.toString();
+}
+
+/**
+ *
+ * @export
+ */
+export const serializeDataIfNeeded = function (value: any, requestOptions: any, configuration?: Configuration) {
+    const nonString = typeof value !== 'string';
+    const needsSerialization = nonString && configuration && configuration.isJsonMime
+        ? configuration.isJsonMime(requestOptions.headers['Content-Type'])
+        : nonString;
+    return needsSerialization
+        ? JSON.stringify(value !== undefined ? value : {})
+        : (value || "");
+}
+
+/**
+ *
+ * @export
+ */
+export const toPathString = function (url: URL) {
+    return url.pathname + url.search + url.hash
+}
+
+/**
+ *
+ * @export
+ */
+export const createRequestFunction = function (axiosArgs: RequestArgs, globalAxios: AxiosInstance, BASE_PATH: string, configuration?: Configuration) {
+    return (axios: AxiosInstance = globalAxios, basePath: string = BASE_PATH) => {
+        const axiosRequestArgs = {...axiosArgs.options, url: (configuration?.basePath || basePath) + axiosArgs.url};
+        return axios.request(axiosRequestArgs);
+    };
+}

+ 101 - 0
distributor-node/src/services/networking/distributor-node/generated/configuration.ts

@@ -0,0 +1,101 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Distributor node API
+ * Distributor node API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+
+export interface ConfigurationParameters {
+    apiKey?: string | Promise<string> | ((name: string) => string) | ((name: string) => Promise<string>);
+    username?: string;
+    password?: string;
+    accessToken?: string | Promise<string> | ((name?: string, scopes?: string[]) => string) | ((name?: string, scopes?: string[]) => Promise<string>);
+    basePath?: string;
+    baseOptions?: any;
+    formDataCtor?: new () => any;
+}
+
+export class Configuration {
+    /**
+     * parameter for apiKey security
+     * @param name security name
+     * @memberof Configuration
+     */
+    apiKey?: string | Promise<string> | ((name: string) => string) | ((name: string) => Promise<string>);
+    /**
+     * parameter for basic security
+     *
+     * @type {string}
+     * @memberof Configuration
+     */
+    username?: string;
+    /**
+     * parameter for basic security
+     *
+     * @type {string}
+     * @memberof Configuration
+     */
+    password?: string;
+    /**
+     * parameter for oauth2 security
+     * @param name security name
+     * @param scopes oauth2 scope
+     * @memberof Configuration
+     */
+    accessToken?: string | Promise<string> | ((name?: string, scopes?: string[]) => string) | ((name?: string, scopes?: string[]) => Promise<string>);
+    /**
+     * override base path
+     *
+     * @type {string}
+     * @memberof Configuration
+     */
+    basePath?: string;
+    /**
+     * base options for axios calls
+     *
+     * @type {any}
+     * @memberof Configuration
+     */
+    baseOptions?: any;
+    /**
+     * The FormData constructor that will be used to create multipart form data
+     * requests. You can inject this here so that execution environments that
+     * do not support the FormData class can still run the generated client.
+     *
+     * @type {new () => FormData}
+     */
+    formDataCtor?: new () => any;
+
+    constructor(param: ConfigurationParameters = {}) {
+        this.apiKey = param.apiKey;
+        this.username = param.username;
+        this.password = param.password;
+        this.accessToken = param.accessToken;
+        this.basePath = param.basePath;
+        this.baseOptions = param.baseOptions;
+        this.formDataCtor = param.formDataCtor;
+    }
+
+    /**
+     * Check if the given MIME is a JSON MIME.
+     * JSON MIME examples:
+     *   application/json
+     *   application/json; charset=UTF8
+     *   APPLICATION/JSON
+     *   application/vnd.company+json
+     * @param mime - MIME (Multipurpose Internet Mail Extensions)
+     * @return True if the given MIME is JSON, false otherwise.
+     */
+    public isJsonMime(mime: string): boolean {
+        const jsonMime: RegExp = new RegExp('^(application\/json|[^;/ \t]+\/[^;/ \t]+[+]json)[ \t]*(;.*)?$', 'i');
+        return mime !== null && (jsonMime.test(mime) || mime.toLowerCase() === 'application/json-patch+json');
+    }
+}

+ 18 - 0
distributor-node/src/services/networking/distributor-node/generated/index.ts

@@ -0,0 +1,18 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Distributor node API
+ * Distributor node API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+
+export * from "./api";
+export * from "./configuration";
+

+ 1 - 0
distributor-node/src/services/networking/index.ts

@@ -0,0 +1 @@
+export { NetworkingService } from './NetworkingService'

+ 91 - 0
distributor-node/src/services/networking/query-node/api.ts

@@ -0,0 +1,91 @@
+import { ApolloClient, NormalizedCacheObject, HttpLink, InMemoryCache, DocumentNode } from '@apollo/client/core'
+import fetch from 'cross-fetch'
+import {
+  DataObjectDetailsFragment,
+  GetDataObjectDetails,
+  GetDataObjectDetailsQuery,
+  GetDataObjectDetailsQueryVariables,
+  DistirubtionBucketWithObjectsFragment,
+  GetDistributionBucketsWithObjectsByIdsQuery,
+  GetDistributionBucketsWithObjectsByIdsQueryVariables,
+  GetDistributionBucketsWithObjectsByIds,
+  GetDistributionBucketsWithObjectsByWorkerIdQuery,
+  GetDistributionBucketsWithObjectsByWorkerIdQueryVariables,
+  GetDistributionBucketsWithObjectsByWorkerId,
+  StorageBucketOperatorFieldsFragment,
+  GetActiveStorageBucketOperatorsDataQuery,
+  GetActiveStorageBucketOperatorsDataQueryVariables,
+  GetActiveStorageBucketOperatorsData,
+} from './generated/queries'
+import { Maybe } from './generated/schema'
+
+export class QueryNodeApi {
+  private apolloClient: ApolloClient<NormalizedCacheObject>
+
+  public constructor(endpoint: string) {
+    this.apolloClient = new ApolloClient({
+      link: new HttpLink({ uri: endpoint, fetch }),
+      cache: new InMemoryCache(),
+      defaultOptions: { query: { fetchPolicy: 'no-cache', errorPolicy: 'all' } },
+    })
+  }
+
+  // Get entity by unique input
+  protected async uniqueEntityQuery<
+    QueryT extends { [k: string]: Maybe<Record<string, unknown>> | undefined },
+    VariablesT extends Record<string, unknown>
+  >(
+    query: DocumentNode,
+    variables: VariablesT,
+    resultKey: keyof QueryT
+  ): Promise<Required<QueryT>[keyof QueryT] | null> {
+    return (await this.apolloClient.query<QueryT, VariablesT>({ query, variables })).data[resultKey] || null
+  }
+
+  // Get entities by "non-unique" input and return first result
+  protected async firstEntityQuery<
+    QueryT extends { [k: string]: unknown[] },
+    VariablesT extends Record<string, unknown>
+  >(query: DocumentNode, variables: VariablesT, resultKey: keyof QueryT): Promise<QueryT[keyof QueryT][number] | null> {
+    return (await this.apolloClient.query<QueryT, VariablesT>({ query, variables })).data[resultKey][0] || null
+  }
+
+  // Query-node: get multiple entities
+  protected async multipleEntitiesQuery<
+    QueryT extends { [k: string]: unknown[] },
+    VariablesT extends Record<string, unknown>
+  >(query: DocumentNode, variables: VariablesT, resultKey: keyof QueryT): Promise<QueryT[keyof QueryT]> {
+    return (await this.apolloClient.query<QueryT, VariablesT>({ query, variables })).data[resultKey]
+  }
+
+  public getDataObjectDetails(objectId: string): Promise<DataObjectDetailsFragment | null> {
+    return this.uniqueEntityQuery<GetDataObjectDetailsQuery, GetDataObjectDetailsQueryVariables>(
+      GetDataObjectDetails,
+      { id: objectId },
+      'storageDataObjectByUniqueInput'
+    )
+  }
+
+  public getDistributionBucketsWithObjectsByIds(ids: string[]): Promise<DistirubtionBucketWithObjectsFragment[]> {
+    return this.multipleEntitiesQuery<
+      GetDistributionBucketsWithObjectsByIdsQuery,
+      GetDistributionBucketsWithObjectsByIdsQueryVariables
+    >(GetDistributionBucketsWithObjectsByIds, { ids }, 'distributionBuckets')
+  }
+
+  public getDistributionBucketsWithObjectsByWorkerId(
+    workerId: number
+  ): Promise<DistirubtionBucketWithObjectsFragment[]> {
+    return this.multipleEntitiesQuery<
+      GetDistributionBucketsWithObjectsByWorkerIdQuery,
+      GetDistributionBucketsWithObjectsByWorkerIdQueryVariables
+    >(GetDistributionBucketsWithObjectsByWorkerId, { workerId }, 'distributionBuckets')
+  }
+
+  public getActiveStorageBucketOperatorsData(): Promise<StorageBucketOperatorFieldsFragment[]> {
+    return this.multipleEntitiesQuery<
+      GetActiveStorageBucketOperatorsDataQuery,
+      GetActiveStorageBucketOperatorsDataQueryVariables
+    >(GetActiveStorageBucketOperatorsData, {}, 'storageBuckets')
+  }
+}

+ 33 - 0
distributor-node/src/services/networking/query-node/codegen.yml

@@ -0,0 +1,33 @@
+# Paths are relative to root distribution-node directory
+overwrite: true
+
+schema: '../query-node/generated/graphql-server/generated/schema.graphql'
+
+documents:
+  - 'src/services/networking/query-node/queries/*.graphql'
+
+config:
+  scalars:
+    Date: Date
+  preResolveTypes: true # avoid using Pick
+  skipTypename: true # skip __typename field in typings unless it's part of the query
+
+generates:
+  src/services/networking/query-node/generated/schema.ts:
+    hooks:
+      afterOneFileWrite:
+        - prettier --write
+        - eslint --fix
+    plugins:
+      - typescript
+  src/services/networking/query-node/generated/queries.ts:
+    preset: import-types
+    presetConfig:
+      typesPath: ./schema
+    hooks:
+      afterOneFileWrite:
+        - prettier --write
+        - eslint --fix
+    plugins:
+      - typescript-operations
+      - typescript-document-nodes

+ 115 - 0
distributor-node/src/services/networking/query-node/generated/queries.ts

@@ -0,0 +1,115 @@
+import * as Types from './schema';
+
+import gql from 'graphql-tag';
+export type DataObjectDetailsFragment = { id: string, size: any, ipfsHash: string, isAccepted: boolean, storageBag: { storageAssignments: Array<{ storageBucket: { id: string, operatorMetadata?: Types.Maybe<{ nodeEndpoint?: Types.Maybe<string> }>, operatorStatus: { __typename: 'StorageBucketOperatorStatusMissing' } | { __typename: 'StorageBucketOperatorStatusInvited' } | { __typename: 'StorageBucketOperatorStatusActive' } } }>, distirbutionAssignments: Array<{ distributionBucket: { id: string, operators: Array<{ workerId: number, status: Types.DistributionBucketOperatorStatus }> } }> } };
+
+export type GetDataObjectDetailsQueryVariables = Types.Exact<{
+  id: Types.Scalars['ID'];
+}>;
+
+
+export type GetDataObjectDetailsQuery = { storageDataObjectByUniqueInput?: Types.Maybe<DataObjectDetailsFragment> };
+
+export type DistirubtionBucketWithObjectsFragment = { id: string, bagAssignments: Array<{ storageBag: { objects: Array<{ id: string, size: any, ipfsHash: string }> } }> };
+
+export type GetDistributionBucketsWithObjectsByIdsQueryVariables = Types.Exact<{
+  ids?: Types.Maybe<Array<Types.Scalars['ID']> | Types.Scalars['ID']>;
+}>;
+
+
+export type GetDistributionBucketsWithObjectsByIdsQuery = { distributionBuckets: Array<DistirubtionBucketWithObjectsFragment> };
+
+export type GetDistributionBucketsWithObjectsByWorkerIdQueryVariables = Types.Exact<{
+  workerId: Types.Scalars['Int'];
+}>;
+
+
+export type GetDistributionBucketsWithObjectsByWorkerIdQuery = { distributionBuckets: Array<DistirubtionBucketWithObjectsFragment> };
+
+export type StorageBucketOperatorFieldsFragment = { id: string, operatorMetadata?: Types.Maybe<{ nodeEndpoint?: Types.Maybe<string> }> };
+
+export type GetActiveStorageBucketOperatorsDataQueryVariables = Types.Exact<{ [key: string]: never; }>;
+
+
+export type GetActiveStorageBucketOperatorsDataQuery = { storageBuckets: Array<StorageBucketOperatorFieldsFragment> };
+
+export const DataObjectDetails = gql`
+    fragment DataObjectDetails on StorageDataObject {
+  id
+  size
+  ipfsHash
+  isAccepted
+  storageBag {
+    storageAssignments {
+      storageBucket {
+        id
+        operatorMetadata {
+          nodeEndpoint
+        }
+        operatorStatus {
+          __typename
+        }
+      }
+    }
+    distirbutionAssignments {
+      distributionBucket {
+        id
+        operators {
+          workerId
+          status
+        }
+      }
+    }
+  }
+}
+    `;
+export const DistirubtionBucketWithObjects = gql`
+    fragment DistirubtionBucketWithObjects on DistributionBucket {
+  id
+  bagAssignments {
+    storageBag {
+      objects {
+        id
+        size
+        ipfsHash
+      }
+    }
+  }
+}
+    `;
+export const StorageBucketOperatorFields = gql`
+    fragment StorageBucketOperatorFields on StorageBucket {
+  id
+  operatorMetadata {
+    nodeEndpoint
+  }
+}
+    `;
+export const GetDataObjectDetails = gql`
+    query getDataObjectDetails($id: ID!) {
+  storageDataObjectByUniqueInput(where: {id: $id}) {
+    ...DataObjectDetails
+  }
+}
+    ${DataObjectDetails}`;
+export const GetDistributionBucketsWithObjectsByIds = gql`
+    query getDistributionBucketsWithObjectsByIds($ids: [ID!]) {
+  distributionBuckets(where: {id_in: $ids}) {
+    ...DistirubtionBucketWithObjects
+  }
+}
+    ${DistirubtionBucketWithObjects}`;
+export const GetDistributionBucketsWithObjectsByWorkerId = gql`
+    query getDistributionBucketsWithObjectsByWorkerId($workerId: Int!) {
+  distributionBuckets(where: {operators_some: {workerId_eq: $workerId, status_eq: ACTIVE}}) {
+    ...DistirubtionBucketWithObjects
+  }
+}
+    ${DistirubtionBucketWithObjects}`;
+export const GetActiveStorageBucketOperatorsData = gql`
+    query getActiveStorageBucketOperatorsData {
+  storageBuckets(where: {operatorStatus_json: {isTypeOf_eq: "StorageBucketOperatorStatusActive"}, operatorMetadata: {nodeEndpoint_contains: "http"}}, limit: 9999) {
+    ...StorageBucketOperatorFields
+  }
+}
+    ${StorageBucketOperatorFields}`;

+ 4710 - 0
distributor-node/src/services/networking/query-node/generated/schema.ts

@@ -0,0 +1,4710 @@
+export type Maybe<T> = T | null;
+export type Exact<T extends { [key: string]: unknown }> = { [K in keyof T]: T[K] };
+export type MakeOptional<T, K extends keyof T> = Omit<T, K> & { [SubKey in K]?: Maybe<T[SubKey]> };
+export type MakeMaybe<T, K extends keyof T> = Omit<T, K> & { [SubKey in K]: Maybe<T[SubKey]> };
+/** All built-in and custom scalars, mapped to their actual values */
+export type Scalars = {
+  ID: string;
+  String: string;
+  Boolean: boolean;
+  Int: number;
+  Float: number;
+  /** The javascript `Date` as string. Type represents date and time as the ISO Date string. */
+  DateTime: any;
+  /** The `JSONObject` scalar type represents JSON objects as specified by [ECMA-404](http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf). */
+  JSONObject: any;
+  /** GraphQL representation of BigInt */
+  BigInt: any;
+};
+
+export enum AssetAvailability {
+  Accepted = 'ACCEPTED',
+  Pending = 'PENDING',
+  Invalid = 'INVALID'
+}
+
+export type BaseGraphQlObject = {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+};
+
+export type BaseModel = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+};
+
+export type BaseModelUuid = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+};
+
+export type BaseWhereInput = {
+  id_eq?: Maybe<Scalars['String']>;
+  id_in?: Maybe<Array<Scalars['String']>>;
+  createdAt_eq?: Maybe<Scalars['String']>;
+  createdAt_lt?: Maybe<Scalars['String']>;
+  createdAt_lte?: Maybe<Scalars['String']>;
+  createdAt_gt?: Maybe<Scalars['String']>;
+  createdAt_gte?: Maybe<Scalars['String']>;
+  createdById_eq?: Maybe<Scalars['String']>;
+  updatedAt_eq?: Maybe<Scalars['String']>;
+  updatedAt_lt?: Maybe<Scalars['String']>;
+  updatedAt_lte?: Maybe<Scalars['String']>;
+  updatedAt_gt?: Maybe<Scalars['String']>;
+  updatedAt_gte?: Maybe<Scalars['String']>;
+  updatedById_eq?: Maybe<Scalars['String']>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['String']>;
+  deletedAt_lt?: Maybe<Scalars['String']>;
+  deletedAt_lte?: Maybe<Scalars['String']>;
+  deletedAt_gt?: Maybe<Scalars['String']>;
+  deletedAt_gte?: Maybe<Scalars['String']>;
+  deletedById_eq?: Maybe<Scalars['String']>;
+};
+
+
+export type Channel = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  ownerMember?: Maybe<Membership>;
+  ownerMemberId?: Maybe<Scalars['String']>;
+  ownerCuratorGroup?: Maybe<CuratorGroup>;
+  ownerCuratorGroupId?: Maybe<Scalars['String']>;
+  category?: Maybe<ChannelCategory>;
+  categoryId?: Maybe<Scalars['String']>;
+  /** Reward account where revenue is sent if set. */
+  rewardAccount?: Maybe<Scalars['String']>;
+  /** The title of the Channel */
+  title?: Maybe<Scalars['String']>;
+  /** The description of a Channel */
+  description?: Maybe<Scalars['String']>;
+  coverPhotoDataObject?: Maybe<DataObject>;
+  coverPhotoDataObjectId?: Maybe<Scalars['String']>;
+  /** URLs where the asset content can be accessed (if any) */
+  coverPhotoUrls: Array<Scalars['String']>;
+  /** Availability meta information */
+  coverPhotoAvailability: AssetAvailability;
+  avatarPhotoDataObject?: Maybe<DataObject>;
+  avatarPhotoDataObjectId?: Maybe<Scalars['String']>;
+  /** URLs where the asset content can be accessed (if any) */
+  avatarPhotoUrls: Array<Scalars['String']>;
+  /** Availability meta information */
+  avatarPhotoAvailability: AssetAvailability;
+  /** Flag signaling whether a channel is public. */
+  isPublic?: Maybe<Scalars['Boolean']>;
+  /** Flag signaling whether a channel is censored. */
+  isCensored: Scalars['Boolean'];
+  language?: Maybe<Language>;
+  languageId?: Maybe<Scalars['String']>;
+  videos: Array<Video>;
+  createdInBlock: Scalars['Int'];
+};
+
+export type ChannelCategoriesByNameFtsOutput = {
+  item: ChannelCategoriesByNameSearchResult;
+  rank: Scalars['Float'];
+  isTypeOf: Scalars['String'];
+  highlight: Scalars['String'];
+};
+
+export type ChannelCategoriesByNameSearchResult = ChannelCategory;
+
+/** Category of media channel */
+export type ChannelCategory = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  /** The name of the category */
+  name?: Maybe<Scalars['String']>;
+  channels: Array<Channel>;
+  createdInBlock: Scalars['Int'];
+};
+
+export type ChannelCategoryConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<ChannelCategoryEdge>;
+  pageInfo: PageInfo;
+};
+
+export type ChannelCategoryCreateInput = {
+  name?: Maybe<Scalars['String']>;
+  createdInBlock: Scalars['Float'];
+};
+
+export type ChannelCategoryEdge = {
+  node: ChannelCategory;
+  cursor: Scalars['String'];
+};
+
+export enum ChannelCategoryOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  NameAsc = 'name_ASC',
+  NameDesc = 'name_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC'
+}
+
+export type ChannelCategoryUpdateInput = {
+  name?: Maybe<Scalars['String']>;
+  createdInBlock?: Maybe<Scalars['Float']>;
+};
+
+export type ChannelCategoryWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  name_eq?: Maybe<Scalars['String']>;
+  name_contains?: Maybe<Scalars['String']>;
+  name_startsWith?: Maybe<Scalars['String']>;
+  name_endsWith?: Maybe<Scalars['String']>;
+  name_in?: Maybe<Array<Scalars['String']>>;
+  createdInBlock_eq?: Maybe<Scalars['Int']>;
+  createdInBlock_gt?: Maybe<Scalars['Int']>;
+  createdInBlock_gte?: Maybe<Scalars['Int']>;
+  createdInBlock_lt?: Maybe<Scalars['Int']>;
+  createdInBlock_lte?: Maybe<Scalars['Int']>;
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>;
+  channels_none?: Maybe<ChannelWhereInput>;
+  channels_some?: Maybe<ChannelWhereInput>;
+  channels_every?: Maybe<ChannelWhereInput>;
+  AND?: Maybe<Array<ChannelCategoryWhereInput>>;
+  OR?: Maybe<Array<ChannelCategoryWhereInput>>;
+};
+
+export type ChannelCategoryWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type ChannelConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<ChannelEdge>;
+  pageInfo: PageInfo;
+};
+
+export type ChannelCreateInput = {
+  ownerMember?: Maybe<Scalars['ID']>;
+  ownerCuratorGroup?: Maybe<Scalars['ID']>;
+  category?: Maybe<Scalars['ID']>;
+  rewardAccount?: Maybe<Scalars['String']>;
+  title?: Maybe<Scalars['String']>;
+  description?: Maybe<Scalars['String']>;
+  coverPhotoDataObject?: Maybe<Scalars['ID']>;
+  coverPhotoUrls: Array<Scalars['String']>;
+  coverPhotoAvailability: AssetAvailability;
+  avatarPhotoDataObject?: Maybe<Scalars['ID']>;
+  avatarPhotoUrls: Array<Scalars['String']>;
+  avatarPhotoAvailability: AssetAvailability;
+  isPublic?: Maybe<Scalars['Boolean']>;
+  isCensored: Scalars['Boolean'];
+  language?: Maybe<Scalars['ID']>;
+  createdInBlock: Scalars['Float'];
+};
+
+export type ChannelEdge = {
+  node: Channel;
+  cursor: Scalars['String'];
+};
+
+export enum ChannelOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  OwnerMemberAsc = 'ownerMember_ASC',
+  OwnerMemberDesc = 'ownerMember_DESC',
+  OwnerCuratorGroupAsc = 'ownerCuratorGroup_ASC',
+  OwnerCuratorGroupDesc = 'ownerCuratorGroup_DESC',
+  CategoryAsc = 'category_ASC',
+  CategoryDesc = 'category_DESC',
+  RewardAccountAsc = 'rewardAccount_ASC',
+  RewardAccountDesc = 'rewardAccount_DESC',
+  TitleAsc = 'title_ASC',
+  TitleDesc = 'title_DESC',
+  DescriptionAsc = 'description_ASC',
+  DescriptionDesc = 'description_DESC',
+  CoverPhotoDataObjectAsc = 'coverPhotoDataObject_ASC',
+  CoverPhotoDataObjectDesc = 'coverPhotoDataObject_DESC',
+  CoverPhotoAvailabilityAsc = 'coverPhotoAvailability_ASC',
+  CoverPhotoAvailabilityDesc = 'coverPhotoAvailability_DESC',
+  AvatarPhotoDataObjectAsc = 'avatarPhotoDataObject_ASC',
+  AvatarPhotoDataObjectDesc = 'avatarPhotoDataObject_DESC',
+  AvatarPhotoAvailabilityAsc = 'avatarPhotoAvailability_ASC',
+  AvatarPhotoAvailabilityDesc = 'avatarPhotoAvailability_DESC',
+  IsPublicAsc = 'isPublic_ASC',
+  IsPublicDesc = 'isPublic_DESC',
+  IsCensoredAsc = 'isCensored_ASC',
+  IsCensoredDesc = 'isCensored_DESC',
+  LanguageAsc = 'language_ASC',
+  LanguageDesc = 'language_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC'
+}
+
+export type ChannelUpdateInput = {
+  ownerMember?: Maybe<Scalars['ID']>;
+  ownerCuratorGroup?: Maybe<Scalars['ID']>;
+  category?: Maybe<Scalars['ID']>;
+  rewardAccount?: Maybe<Scalars['String']>;
+  title?: Maybe<Scalars['String']>;
+  description?: Maybe<Scalars['String']>;
+  coverPhotoDataObject?: Maybe<Scalars['ID']>;
+  coverPhotoUrls?: Maybe<Array<Scalars['String']>>;
+  coverPhotoAvailability?: Maybe<AssetAvailability>;
+  avatarPhotoDataObject?: Maybe<Scalars['ID']>;
+  avatarPhotoUrls?: Maybe<Array<Scalars['String']>>;
+  avatarPhotoAvailability?: Maybe<AssetAvailability>;
+  isPublic?: Maybe<Scalars['Boolean']>;
+  isCensored?: Maybe<Scalars['Boolean']>;
+  language?: Maybe<Scalars['ID']>;
+  createdInBlock?: Maybe<Scalars['Float']>;
+};
+
+export type ChannelWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  ownerMember_eq?: Maybe<Scalars['ID']>;
+  ownerMember_in?: Maybe<Array<Scalars['ID']>>;
+  ownerCuratorGroup_eq?: Maybe<Scalars['ID']>;
+  ownerCuratorGroup_in?: Maybe<Array<Scalars['ID']>>;
+  category_eq?: Maybe<Scalars['ID']>;
+  category_in?: Maybe<Array<Scalars['ID']>>;
+  rewardAccount_eq?: Maybe<Scalars['String']>;
+  rewardAccount_contains?: Maybe<Scalars['String']>;
+  rewardAccount_startsWith?: Maybe<Scalars['String']>;
+  rewardAccount_endsWith?: Maybe<Scalars['String']>;
+  rewardAccount_in?: Maybe<Array<Scalars['String']>>;
+  title_eq?: Maybe<Scalars['String']>;
+  title_contains?: Maybe<Scalars['String']>;
+  title_startsWith?: Maybe<Scalars['String']>;
+  title_endsWith?: Maybe<Scalars['String']>;
+  title_in?: Maybe<Array<Scalars['String']>>;
+  description_eq?: Maybe<Scalars['String']>;
+  description_contains?: Maybe<Scalars['String']>;
+  description_startsWith?: Maybe<Scalars['String']>;
+  description_endsWith?: Maybe<Scalars['String']>;
+  description_in?: Maybe<Array<Scalars['String']>>;
+  coverPhotoDataObject_eq?: Maybe<Scalars['ID']>;
+  coverPhotoDataObject_in?: Maybe<Array<Scalars['ID']>>;
+  coverPhotoAvailability_eq?: Maybe<AssetAvailability>;
+  coverPhotoAvailability_in?: Maybe<Array<AssetAvailability>>;
+  avatarPhotoDataObject_eq?: Maybe<Scalars['ID']>;
+  avatarPhotoDataObject_in?: Maybe<Array<Scalars['ID']>>;
+  avatarPhotoAvailability_eq?: Maybe<AssetAvailability>;
+  avatarPhotoAvailability_in?: Maybe<Array<AssetAvailability>>;
+  isPublic_eq?: Maybe<Scalars['Boolean']>;
+  isPublic_in?: Maybe<Array<Scalars['Boolean']>>;
+  isCensored_eq?: Maybe<Scalars['Boolean']>;
+  isCensored_in?: Maybe<Array<Scalars['Boolean']>>;
+  language_eq?: Maybe<Scalars['ID']>;
+  language_in?: Maybe<Array<Scalars['ID']>>;
+  createdInBlock_eq?: Maybe<Scalars['Int']>;
+  createdInBlock_gt?: Maybe<Scalars['Int']>;
+  createdInBlock_gte?: Maybe<Scalars['Int']>;
+  createdInBlock_lt?: Maybe<Scalars['Int']>;
+  createdInBlock_lte?: Maybe<Scalars['Int']>;
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>;
+  ownerMember?: Maybe<MembershipWhereInput>;
+  ownerCuratorGroup?: Maybe<CuratorGroupWhereInput>;
+  category?: Maybe<ChannelCategoryWhereInput>;
+  coverPhotoDataObject?: Maybe<DataObjectWhereInput>;
+  avatarPhotoDataObject?: Maybe<DataObjectWhereInput>;
+  language?: Maybe<LanguageWhereInput>;
+  videos_none?: Maybe<VideoWhereInput>;
+  videos_some?: Maybe<VideoWhereInput>;
+  videos_every?: Maybe<VideoWhereInput>;
+  AND?: Maybe<Array<ChannelWhereInput>>;
+  OR?: Maybe<Array<ChannelWhereInput>>;
+};
+
+export type ChannelWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type CuratorGroup = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  /** Curators belonging to this group */
+  curatorIds: Array<Scalars['Int']>;
+  /** Is group active or not */
+  isActive: Scalars['Boolean'];
+  channels: Array<Channel>;
+};
+
+export type CuratorGroupConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<CuratorGroupEdge>;
+  pageInfo: PageInfo;
+};
+
+export type CuratorGroupCreateInput = {
+  curatorIds: Array<Scalars['Int']>;
+  isActive: Scalars['Boolean'];
+};
+
+export type CuratorGroupEdge = {
+  node: CuratorGroup;
+  cursor: Scalars['String'];
+};
+
+export enum CuratorGroupOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  IsActiveAsc = 'isActive_ASC',
+  IsActiveDesc = 'isActive_DESC'
+}
+
+export type CuratorGroupUpdateInput = {
+  curatorIds?: Maybe<Array<Scalars['Int']>>;
+  isActive?: Maybe<Scalars['Boolean']>;
+};
+
+export type CuratorGroupWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  isActive_eq?: Maybe<Scalars['Boolean']>;
+  isActive_in?: Maybe<Array<Scalars['Boolean']>>;
+  channels_none?: Maybe<ChannelWhereInput>;
+  channels_some?: Maybe<ChannelWhereInput>;
+  channels_every?: Maybe<ChannelWhereInput>;
+  AND?: Maybe<Array<CuratorGroupWhereInput>>;
+  OR?: Maybe<Array<CuratorGroupWhereInput>>;
+};
+
+export type CuratorGroupWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+/** Manages content ids, type and storage provider decision about it */
+export type DataObject = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  /** Content owner */
+  owner: DataObjectOwner;
+  /** Content added at */
+  createdInBlock: Scalars['Int'];
+  /** Content type id */
+  typeId: Scalars['Int'];
+  /** Content size in bytes */
+  size: Scalars['Int'];
+  liaison?: Maybe<Worker>;
+  liaisonId?: Maybe<Scalars['String']>;
+  /** Storage provider as liaison judgment */
+  liaisonJudgement: LiaisonJudgement;
+  /** IPFS content id */
+  ipfsContentId: Scalars['String'];
+  /** Joystream runtime content */
+  joystreamContentId: Scalars['String'];
+  channelcoverPhotoDataObject?: Maybe<Array<Channel>>;
+  channelavatarPhotoDataObject?: Maybe<Array<Channel>>;
+  videothumbnailPhotoDataObject?: Maybe<Array<Video>>;
+  videomediaDataObject?: Maybe<Array<Video>>;
+};
+
+export type DataObjectConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<DataObjectEdge>;
+  pageInfo: PageInfo;
+};
+
+export type DataObjectCreateInput = {
+  owner: Scalars['JSONObject'];
+  createdInBlock: Scalars['Float'];
+  typeId: Scalars['Float'];
+  size: Scalars['Float'];
+  liaison?: Maybe<Scalars['ID']>;
+  liaisonJudgement: LiaisonJudgement;
+  ipfsContentId: Scalars['String'];
+  joystreamContentId: Scalars['String'];
+};
+
+export type DataObjectEdge = {
+  node: DataObject;
+  cursor: Scalars['String'];
+};
+
+export enum DataObjectOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+  TypeIdAsc = 'typeId_ASC',
+  TypeIdDesc = 'typeId_DESC',
+  SizeAsc = 'size_ASC',
+  SizeDesc = 'size_DESC',
+  LiaisonAsc = 'liaison_ASC',
+  LiaisonDesc = 'liaison_DESC',
+  LiaisonJudgementAsc = 'liaisonJudgement_ASC',
+  LiaisonJudgementDesc = 'liaisonJudgement_DESC',
+  IpfsContentIdAsc = 'ipfsContentId_ASC',
+  IpfsContentIdDesc = 'ipfsContentId_DESC',
+  JoystreamContentIdAsc = 'joystreamContentId_ASC',
+  JoystreamContentIdDesc = 'joystreamContentId_DESC'
+}
+
+export type DataObjectOwner = DataObjectOwnerMember | DataObjectOwnerChannel | DataObjectOwnerDao | DataObjectOwnerCouncil | DataObjectOwnerWorkingGroup;
+
+export type DataObjectOwnerChannel = {
+  /** Channel identifier */
+  channel: Scalars['Int'];
+  /** Variant needs to have at least one property. This value is not used. */
+  dummy?: Maybe<Scalars['Int']>;
+};
+
+export type DataObjectOwnerChannelCreateInput = {
+  channel: Scalars['Float'];
+  dummy?: Maybe<Scalars['Float']>;
+};
+
+export type DataObjectOwnerChannelUpdateInput = {
+  channel?: Maybe<Scalars['Float']>;
+  dummy?: Maybe<Scalars['Float']>;
+};
+
+export type DataObjectOwnerChannelWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  channel_eq?: Maybe<Scalars['Int']>;
+  channel_gt?: Maybe<Scalars['Int']>;
+  channel_gte?: Maybe<Scalars['Int']>;
+  channel_lt?: Maybe<Scalars['Int']>;
+  channel_lte?: Maybe<Scalars['Int']>;
+  channel_in?: Maybe<Array<Scalars['Int']>>;
+  dummy_eq?: Maybe<Scalars['Int']>;
+  dummy_gt?: Maybe<Scalars['Int']>;
+  dummy_gte?: Maybe<Scalars['Int']>;
+  dummy_lt?: Maybe<Scalars['Int']>;
+  dummy_lte?: Maybe<Scalars['Int']>;
+  dummy_in?: Maybe<Array<Scalars['Int']>>;
+  AND?: Maybe<Array<DataObjectOwnerChannelWhereInput>>;
+  OR?: Maybe<Array<DataObjectOwnerChannelWhereInput>>;
+};
+
+export type DataObjectOwnerChannelWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type DataObjectOwnerCouncil = {
+  /** Variant needs to have at least one property. This value is not used. */
+  dummy?: Maybe<Scalars['Int']>;
+};
+
+export type DataObjectOwnerCouncilCreateInput = {
+  dummy?: Maybe<Scalars['Float']>;
+};
+
+export type DataObjectOwnerCouncilUpdateInput = {
+  dummy?: Maybe<Scalars['Float']>;
+};
+
+export type DataObjectOwnerCouncilWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  dummy_eq?: Maybe<Scalars['Int']>;
+  dummy_gt?: Maybe<Scalars['Int']>;
+  dummy_gte?: Maybe<Scalars['Int']>;
+  dummy_lt?: Maybe<Scalars['Int']>;
+  dummy_lte?: Maybe<Scalars['Int']>;
+  dummy_in?: Maybe<Array<Scalars['Int']>>;
+  AND?: Maybe<Array<DataObjectOwnerCouncilWhereInput>>;
+  OR?: Maybe<Array<DataObjectOwnerCouncilWhereInput>>;
+};
+
+export type DataObjectOwnerCouncilWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type DataObjectOwnerDao = {
+  /** DAO identifier */
+  dao: Scalars['Int'];
+};
+
+export type DataObjectOwnerDaoCreateInput = {
+  dao: Scalars['Float'];
+};
+
+export type DataObjectOwnerDaoUpdateInput = {
+  dao?: Maybe<Scalars['Float']>;
+};
+
+export type DataObjectOwnerDaoWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  dao_eq?: Maybe<Scalars['Int']>;
+  dao_gt?: Maybe<Scalars['Int']>;
+  dao_gte?: Maybe<Scalars['Int']>;
+  dao_lt?: Maybe<Scalars['Int']>;
+  dao_lte?: Maybe<Scalars['Int']>;
+  dao_in?: Maybe<Array<Scalars['Int']>>;
+  AND?: Maybe<Array<DataObjectOwnerDaoWhereInput>>;
+  OR?: Maybe<Array<DataObjectOwnerDaoWhereInput>>;
+};
+
+export type DataObjectOwnerDaoWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type DataObjectOwnerMember = {
+  /** Member identifier */
+  member: Scalars['Int'];
+  /** Variant needs to have at least one property. This value is not used. */
+  dummy?: Maybe<Scalars['Int']>;
+};
+
+export type DataObjectOwnerMemberCreateInput = {
+  member: Scalars['Float'];
+  dummy?: Maybe<Scalars['Float']>;
+};
+
+export type DataObjectOwnerMemberUpdateInput = {
+  member?: Maybe<Scalars['Float']>;
+  dummy?: Maybe<Scalars['Float']>;
+};
+
+export type DataObjectOwnerMemberWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  member_eq?: Maybe<Scalars['Int']>;
+  member_gt?: Maybe<Scalars['Int']>;
+  member_gte?: Maybe<Scalars['Int']>;
+  member_lt?: Maybe<Scalars['Int']>;
+  member_lte?: Maybe<Scalars['Int']>;
+  member_in?: Maybe<Array<Scalars['Int']>>;
+  dummy_eq?: Maybe<Scalars['Int']>;
+  dummy_gt?: Maybe<Scalars['Int']>;
+  dummy_gte?: Maybe<Scalars['Int']>;
+  dummy_lt?: Maybe<Scalars['Int']>;
+  dummy_lte?: Maybe<Scalars['Int']>;
+  dummy_in?: Maybe<Array<Scalars['Int']>>;
+  AND?: Maybe<Array<DataObjectOwnerMemberWhereInput>>;
+  OR?: Maybe<Array<DataObjectOwnerMemberWhereInput>>;
+};
+
+export type DataObjectOwnerMemberWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type DataObjectOwnerWorkingGroup = {
+  /** Working group identifier */
+  workingGroup: Scalars['Int'];
+};
+
+export type DataObjectOwnerWorkingGroupCreateInput = {
+  workingGroup: Scalars['Float'];
+};
+
+export type DataObjectOwnerWorkingGroupUpdateInput = {
+  workingGroup?: Maybe<Scalars['Float']>;
+};
+
+export type DataObjectOwnerWorkingGroupWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  workingGroup_eq?: Maybe<Scalars['Int']>;
+  workingGroup_gt?: Maybe<Scalars['Int']>;
+  workingGroup_gte?: Maybe<Scalars['Int']>;
+  workingGroup_lt?: Maybe<Scalars['Int']>;
+  workingGroup_lte?: Maybe<Scalars['Int']>;
+  workingGroup_in?: Maybe<Array<Scalars['Int']>>;
+  AND?: Maybe<Array<DataObjectOwnerWorkingGroupWhereInput>>;
+  OR?: Maybe<Array<DataObjectOwnerWorkingGroupWhereInput>>;
+};
+
+export type DataObjectOwnerWorkingGroupWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type DataObjectUpdateInput = {
+  owner?: Maybe<Scalars['JSONObject']>;
+  createdInBlock?: Maybe<Scalars['Float']>;
+  typeId?: Maybe<Scalars['Float']>;
+  size?: Maybe<Scalars['Float']>;
+  liaison?: Maybe<Scalars['ID']>;
+  liaisonJudgement?: Maybe<LiaisonJudgement>;
+  ipfsContentId?: Maybe<Scalars['String']>;
+  joystreamContentId?: Maybe<Scalars['String']>;
+};
+
+export type DataObjectWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  owner_json?: Maybe<Scalars['JSONObject']>;
+  createdInBlock_eq?: Maybe<Scalars['Int']>;
+  createdInBlock_gt?: Maybe<Scalars['Int']>;
+  createdInBlock_gte?: Maybe<Scalars['Int']>;
+  createdInBlock_lt?: Maybe<Scalars['Int']>;
+  createdInBlock_lte?: Maybe<Scalars['Int']>;
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>;
+  typeId_eq?: Maybe<Scalars['Int']>;
+  typeId_gt?: Maybe<Scalars['Int']>;
+  typeId_gte?: Maybe<Scalars['Int']>;
+  typeId_lt?: Maybe<Scalars['Int']>;
+  typeId_lte?: Maybe<Scalars['Int']>;
+  typeId_in?: Maybe<Array<Scalars['Int']>>;
+  size_eq?: Maybe<Scalars['Int']>;
+  size_gt?: Maybe<Scalars['Int']>;
+  size_gte?: Maybe<Scalars['Int']>;
+  size_lt?: Maybe<Scalars['Int']>;
+  size_lte?: Maybe<Scalars['Int']>;
+  size_in?: Maybe<Array<Scalars['Int']>>;
+  liaison_eq?: Maybe<Scalars['ID']>;
+  liaison_in?: Maybe<Array<Scalars['ID']>>;
+  liaisonJudgement_eq?: Maybe<LiaisonJudgement>;
+  liaisonJudgement_in?: Maybe<Array<LiaisonJudgement>>;
+  ipfsContentId_eq?: Maybe<Scalars['String']>;
+  ipfsContentId_contains?: Maybe<Scalars['String']>;
+  ipfsContentId_startsWith?: Maybe<Scalars['String']>;
+  ipfsContentId_endsWith?: Maybe<Scalars['String']>;
+  ipfsContentId_in?: Maybe<Array<Scalars['String']>>;
+  joystreamContentId_eq?: Maybe<Scalars['String']>;
+  joystreamContentId_contains?: Maybe<Scalars['String']>;
+  joystreamContentId_startsWith?: Maybe<Scalars['String']>;
+  joystreamContentId_endsWith?: Maybe<Scalars['String']>;
+  joystreamContentId_in?: Maybe<Array<Scalars['String']>>;
+  liaison?: Maybe<WorkerWhereInput>;
+  channelcoverPhotoDataObject_none?: Maybe<ChannelWhereInput>;
+  channelcoverPhotoDataObject_some?: Maybe<ChannelWhereInput>;
+  channelcoverPhotoDataObject_every?: Maybe<ChannelWhereInput>;
+  channelavatarPhotoDataObject_none?: Maybe<ChannelWhereInput>;
+  channelavatarPhotoDataObject_some?: Maybe<ChannelWhereInput>;
+  channelavatarPhotoDataObject_every?: Maybe<ChannelWhereInput>;
+  videothumbnailPhotoDataObject_none?: Maybe<VideoWhereInput>;
+  videothumbnailPhotoDataObject_some?: Maybe<VideoWhereInput>;
+  videothumbnailPhotoDataObject_every?: Maybe<VideoWhereInput>;
+  videomediaDataObject_none?: Maybe<VideoWhereInput>;
+  videomediaDataObject_some?: Maybe<VideoWhereInput>;
+  videomediaDataObject_every?: Maybe<VideoWhereInput>;
+  AND?: Maybe<Array<DataObjectWhereInput>>;
+  OR?: Maybe<Array<DataObjectWhereInput>>;
+};
+
+export type DataObjectWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+
+export type DeleteResponse = {
+  id: Scalars['ID'];
+};
+
+export type DistributionBucket = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  family: DistributionBucketFamily;
+  familyId: Scalars['String'];
+  operators: Array<DistributionBucketOperator>;
+  /** Whether the bucket is accepting any new bags */
+  acceptingNewBags: Scalars['Boolean'];
+  /** Whether the bucket is currently distributing content */
+  distributing: Scalars['Boolean'];
+  bagAssignments: Array<StorageBagDistributionAssignment>;
+};
+
+export type DistributionBucketConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<DistributionBucketEdge>;
+  pageInfo: PageInfo;
+};
+
+export type DistributionBucketCreateInput = {
+  family: Scalars['ID'];
+  acceptingNewBags: Scalars['Boolean'];
+  distributing: Scalars['Boolean'];
+};
+
+export type DistributionBucketEdge = {
+  node: DistributionBucket;
+  cursor: Scalars['String'];
+};
+
+export type DistributionBucketFamily = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  metadata?: Maybe<DistributionBucketFamilyMetadata>;
+  metadataId?: Maybe<Scalars['String']>;
+  buckets: Array<DistributionBucket>;
+};
+
+export type DistributionBucketFamilyConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<DistributionBucketFamilyEdge>;
+  pageInfo: PageInfo;
+};
+
+export type DistributionBucketFamilyCreateInput = {
+  metadata?: Maybe<Scalars['ID']>;
+};
+
+export type DistributionBucketFamilyEdge = {
+  node: DistributionBucketFamily;
+  cursor: Scalars['String'];
+};
+
+export type DistributionBucketFamilyMetadata = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  /** Name of the geographical region covered by the family (ie.: us-east-1) */
+  region?: Maybe<Scalars['String']>;
+  /** Optional, more specific description of the region covered by the family */
+  description?: Maybe<Scalars['String']>;
+  boundary: Array<GeoCoordinates>;
+  distributionbucketfamilymetadata?: Maybe<Array<DistributionBucketFamily>>;
+};
+
+export type DistributionBucketFamilyMetadataConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<DistributionBucketFamilyMetadataEdge>;
+  pageInfo: PageInfo;
+};
+
+export type DistributionBucketFamilyMetadataCreateInput = {
+  region?: Maybe<Scalars['String']>;
+  description?: Maybe<Scalars['String']>;
+};
+
+export type DistributionBucketFamilyMetadataEdge = {
+  node: DistributionBucketFamilyMetadata;
+  cursor: Scalars['String'];
+};
+
+export enum DistributionBucketFamilyMetadataOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  RegionAsc = 'region_ASC',
+  RegionDesc = 'region_DESC',
+  DescriptionAsc = 'description_ASC',
+  DescriptionDesc = 'description_DESC'
+}
+
+export type DistributionBucketFamilyMetadataUpdateInput = {
+  region?: Maybe<Scalars['String']>;
+  description?: Maybe<Scalars['String']>;
+};
+
+export type DistributionBucketFamilyMetadataWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  region_eq?: Maybe<Scalars['String']>;
+  region_contains?: Maybe<Scalars['String']>;
+  region_startsWith?: Maybe<Scalars['String']>;
+  region_endsWith?: Maybe<Scalars['String']>;
+  region_in?: Maybe<Array<Scalars['String']>>;
+  description_eq?: Maybe<Scalars['String']>;
+  description_contains?: Maybe<Scalars['String']>;
+  description_startsWith?: Maybe<Scalars['String']>;
+  description_endsWith?: Maybe<Scalars['String']>;
+  description_in?: Maybe<Array<Scalars['String']>>;
+  boundary_none?: Maybe<GeoCoordinatesWhereInput>;
+  boundary_some?: Maybe<GeoCoordinatesWhereInput>;
+  boundary_every?: Maybe<GeoCoordinatesWhereInput>;
+  distributionbucketfamilymetadata_none?: Maybe<DistributionBucketFamilyWhereInput>;
+  distributionbucketfamilymetadata_some?: Maybe<DistributionBucketFamilyWhereInput>;
+  distributionbucketfamilymetadata_every?: Maybe<DistributionBucketFamilyWhereInput>;
+  AND?: Maybe<Array<DistributionBucketFamilyMetadataWhereInput>>;
+  OR?: Maybe<Array<DistributionBucketFamilyMetadataWhereInput>>;
+};
+
+export type DistributionBucketFamilyMetadataWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export enum DistributionBucketFamilyOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  MetadataAsc = 'metadata_ASC',
+  MetadataDesc = 'metadata_DESC'
+}
+
+export type DistributionBucketFamilyUpdateInput = {
+  metadata?: Maybe<Scalars['ID']>;
+};
+
+export type DistributionBucketFamilyWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  metadata_eq?: Maybe<Scalars['ID']>;
+  metadata_in?: Maybe<Array<Scalars['ID']>>;
+  metadata?: Maybe<DistributionBucketFamilyMetadataWhereInput>;
+  buckets_none?: Maybe<DistributionBucketWhereInput>;
+  buckets_some?: Maybe<DistributionBucketWhereInput>;
+  buckets_every?: Maybe<DistributionBucketWhereInput>;
+  AND?: Maybe<Array<DistributionBucketFamilyWhereInput>>;
+  OR?: Maybe<Array<DistributionBucketFamilyWhereInput>>;
+};
+
+export type DistributionBucketFamilyWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type DistributionBucketOperator = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  distributionBucket: DistributionBucket;
+  distributionBucketId: Scalars['String'];
+  /** ID of the distribution group worker */
+  workerId: Scalars['Int'];
+  /** Current operator status */
+  status: DistributionBucketOperatorStatus;
+  metadata?: Maybe<DistributionBucketOperatorMetadata>;
+  metadataId?: Maybe<Scalars['String']>;
+};
+
+export type DistributionBucketOperatorConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<DistributionBucketOperatorEdge>;
+  pageInfo: PageInfo;
+};
+
+export type DistributionBucketOperatorCreateInput = {
+  distributionBucket: Scalars['ID'];
+  workerId: Scalars['Float'];
+  status: DistributionBucketOperatorStatus;
+  metadata?: Maybe<Scalars['ID']>;
+};
+
+export type DistributionBucketOperatorEdge = {
+  node: DistributionBucketOperator;
+  cursor: Scalars['String'];
+};
+
+export type DistributionBucketOperatorMetadata = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  /** Root distributor node api endpoint */
+  nodeEndpoint?: Maybe<Scalars['String']>;
+  nodeLocation?: Maybe<NodeLocationMetadata>;
+  nodeLocationId?: Maybe<Scalars['String']>;
+  /** Additional information about the node/operator */
+  extra?: Maybe<Scalars['String']>;
+  distributionbucketoperatormetadata?: Maybe<Array<DistributionBucketOperator>>;
+};
+
+export type DistributionBucketOperatorMetadataConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<DistributionBucketOperatorMetadataEdge>;
+  pageInfo: PageInfo;
+};
+
+export type DistributionBucketOperatorMetadataCreateInput = {
+  nodeEndpoint?: Maybe<Scalars['String']>;
+  nodeLocation?: Maybe<Scalars['ID']>;
+  extra?: Maybe<Scalars['String']>;
+};
+
+export type DistributionBucketOperatorMetadataEdge = {
+  node: DistributionBucketOperatorMetadata;
+  cursor: Scalars['String'];
+};
+
+export enum DistributionBucketOperatorMetadataOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  NodeEndpointAsc = 'nodeEndpoint_ASC',
+  NodeEndpointDesc = 'nodeEndpoint_DESC',
+  NodeLocationAsc = 'nodeLocation_ASC',
+  NodeLocationDesc = 'nodeLocation_DESC',
+  ExtraAsc = 'extra_ASC',
+  ExtraDesc = 'extra_DESC'
+}
+
+export type DistributionBucketOperatorMetadataUpdateInput = {
+  nodeEndpoint?: Maybe<Scalars['String']>;
+  nodeLocation?: Maybe<Scalars['ID']>;
+  extra?: Maybe<Scalars['String']>;
+};
+
+export type DistributionBucketOperatorMetadataWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  nodeEndpoint_eq?: Maybe<Scalars['String']>;
+  nodeEndpoint_contains?: Maybe<Scalars['String']>;
+  nodeEndpoint_startsWith?: Maybe<Scalars['String']>;
+  nodeEndpoint_endsWith?: Maybe<Scalars['String']>;
+  nodeEndpoint_in?: Maybe<Array<Scalars['String']>>;
+  nodeLocation_eq?: Maybe<Scalars['ID']>;
+  nodeLocation_in?: Maybe<Array<Scalars['ID']>>;
+  extra_eq?: Maybe<Scalars['String']>;
+  extra_contains?: Maybe<Scalars['String']>;
+  extra_startsWith?: Maybe<Scalars['String']>;
+  extra_endsWith?: Maybe<Scalars['String']>;
+  extra_in?: Maybe<Array<Scalars['String']>>;
+  nodeLocation?: Maybe<NodeLocationMetadataWhereInput>;
+  distributionbucketoperatormetadata_none?: Maybe<DistributionBucketOperatorWhereInput>;
+  distributionbucketoperatormetadata_some?: Maybe<DistributionBucketOperatorWhereInput>;
+  distributionbucketoperatormetadata_every?: Maybe<DistributionBucketOperatorWhereInput>;
+  AND?: Maybe<Array<DistributionBucketOperatorMetadataWhereInput>>;
+  OR?: Maybe<Array<DistributionBucketOperatorMetadataWhereInput>>;
+};
+
+export type DistributionBucketOperatorMetadataWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export enum DistributionBucketOperatorOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  DistributionBucketAsc = 'distributionBucket_ASC',
+  DistributionBucketDesc = 'distributionBucket_DESC',
+  WorkerIdAsc = 'workerId_ASC',
+  WorkerIdDesc = 'workerId_DESC',
+  StatusAsc = 'status_ASC',
+  StatusDesc = 'status_DESC',
+  MetadataAsc = 'metadata_ASC',
+  MetadataDesc = 'metadata_DESC'
+}
+
+export enum DistributionBucketOperatorStatus {
+  Invited = 'INVITED',
+  Active = 'ACTIVE'
+}
+
+export type DistributionBucketOperatorUpdateInput = {
+  distributionBucket?: Maybe<Scalars['ID']>;
+  workerId?: Maybe<Scalars['Float']>;
+  status?: Maybe<DistributionBucketOperatorStatus>;
+  metadata?: Maybe<Scalars['ID']>;
+};
+
+export type DistributionBucketOperatorWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  distributionBucket_eq?: Maybe<Scalars['ID']>;
+  distributionBucket_in?: Maybe<Array<Scalars['ID']>>;
+  workerId_eq?: Maybe<Scalars['Int']>;
+  workerId_gt?: Maybe<Scalars['Int']>;
+  workerId_gte?: Maybe<Scalars['Int']>;
+  workerId_lt?: Maybe<Scalars['Int']>;
+  workerId_lte?: Maybe<Scalars['Int']>;
+  workerId_in?: Maybe<Array<Scalars['Int']>>;
+  status_eq?: Maybe<DistributionBucketOperatorStatus>;
+  status_in?: Maybe<Array<DistributionBucketOperatorStatus>>;
+  metadata_eq?: Maybe<Scalars['ID']>;
+  metadata_in?: Maybe<Array<Scalars['ID']>>;
+  distributionBucket?: Maybe<DistributionBucketWhereInput>;
+  metadata?: Maybe<DistributionBucketOperatorMetadataWhereInput>;
+  AND?: Maybe<Array<DistributionBucketOperatorWhereInput>>;
+  OR?: Maybe<Array<DistributionBucketOperatorWhereInput>>;
+};
+
+export type DistributionBucketOperatorWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export enum DistributionBucketOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  FamilyAsc = 'family_ASC',
+  FamilyDesc = 'family_DESC',
+  AcceptingNewBagsAsc = 'acceptingNewBags_ASC',
+  AcceptingNewBagsDesc = 'acceptingNewBags_DESC',
+  DistributingAsc = 'distributing_ASC',
+  DistributingDesc = 'distributing_DESC'
+}
+
+export type DistributionBucketUpdateInput = {
+  family?: Maybe<Scalars['ID']>;
+  acceptingNewBags?: Maybe<Scalars['Boolean']>;
+  distributing?: Maybe<Scalars['Boolean']>;
+};
+
+export type DistributionBucketWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  family_eq?: Maybe<Scalars['ID']>;
+  family_in?: Maybe<Array<Scalars['ID']>>;
+  acceptingNewBags_eq?: Maybe<Scalars['Boolean']>;
+  acceptingNewBags_in?: Maybe<Array<Scalars['Boolean']>>;
+  distributing_eq?: Maybe<Scalars['Boolean']>;
+  distributing_in?: Maybe<Array<Scalars['Boolean']>>;
+  family?: Maybe<DistributionBucketFamilyWhereInput>;
+  operators_none?: Maybe<DistributionBucketOperatorWhereInput>;
+  operators_some?: Maybe<DistributionBucketOperatorWhereInput>;
+  operators_every?: Maybe<DistributionBucketOperatorWhereInput>;
+  bagAssignments_none?: Maybe<StorageBagDistributionAssignmentWhereInput>;
+  bagAssignments_some?: Maybe<StorageBagDistributionAssignmentWhereInput>;
+  bagAssignments_every?: Maybe<StorageBagDistributionAssignmentWhereInput>;
+  AND?: Maybe<Array<DistributionBucketWhereInput>>;
+  OR?: Maybe<Array<DistributionBucketWhereInput>>;
+};
+
+export type DistributionBucketWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type GeoCoordinates = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  latitude: Scalars['Float'];
+  longitude: Scalars['Float'];
+  boundarySourceBucketFamilyMeta?: Maybe<DistributionBucketFamilyMetadata>;
+  boundarySourceBucketFamilyMetaId?: Maybe<Scalars['String']>;
+  nodelocationmetadatacoordinates?: Maybe<Array<NodeLocationMetadata>>;
+};
+
+export type GeoCoordinatesConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<GeoCoordinatesEdge>;
+  pageInfo: PageInfo;
+};
+
+export type GeoCoordinatesCreateInput = {
+  latitude: Scalars['Float'];
+  longitude: Scalars['Float'];
+  boundarySourceBucketFamilyMeta?: Maybe<Scalars['ID']>;
+};
+
+export type GeoCoordinatesEdge = {
+  node: GeoCoordinates;
+  cursor: Scalars['String'];
+};
+
+export enum GeoCoordinatesOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  LatitudeAsc = 'latitude_ASC',
+  LatitudeDesc = 'latitude_DESC',
+  LongitudeAsc = 'longitude_ASC',
+  LongitudeDesc = 'longitude_DESC',
+  BoundarySourceBucketFamilyMetaAsc = 'boundarySourceBucketFamilyMeta_ASC',
+  BoundarySourceBucketFamilyMetaDesc = 'boundarySourceBucketFamilyMeta_DESC'
+}
+
+export type GeoCoordinatesUpdateInput = {
+  latitude?: Maybe<Scalars['Float']>;
+  longitude?: Maybe<Scalars['Float']>;
+  boundarySourceBucketFamilyMeta?: Maybe<Scalars['ID']>;
+};
+
+export type GeoCoordinatesWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  latitude_eq?: Maybe<Scalars['Float']>;
+  latitude_gt?: Maybe<Scalars['Float']>;
+  latitude_gte?: Maybe<Scalars['Float']>;
+  latitude_lt?: Maybe<Scalars['Float']>;
+  latitude_lte?: Maybe<Scalars['Float']>;
+  latitude_in?: Maybe<Array<Scalars['Float']>>;
+  longitude_eq?: Maybe<Scalars['Float']>;
+  longitude_gt?: Maybe<Scalars['Float']>;
+  longitude_gte?: Maybe<Scalars['Float']>;
+  longitude_lt?: Maybe<Scalars['Float']>;
+  longitude_lte?: Maybe<Scalars['Float']>;
+  longitude_in?: Maybe<Array<Scalars['Float']>>;
+  boundarySourceBucketFamilyMeta_eq?: Maybe<Scalars['ID']>;
+  boundarySourceBucketFamilyMeta_in?: Maybe<Array<Scalars['ID']>>;
+  boundarySourceBucketFamilyMeta?: Maybe<DistributionBucketFamilyMetadataWhereInput>;
+  nodelocationmetadatacoordinates_none?: Maybe<NodeLocationMetadataWhereInput>;
+  nodelocationmetadatacoordinates_some?: Maybe<NodeLocationMetadataWhereInput>;
+  nodelocationmetadatacoordinates_every?: Maybe<NodeLocationMetadataWhereInput>;
+  AND?: Maybe<Array<GeoCoordinatesWhereInput>>;
+  OR?: Maybe<Array<GeoCoordinatesWhereInput>>;
+};
+
+export type GeoCoordinatesWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+
+export type Language = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  /** Language identifier ISO 639-1 */
+  iso: Scalars['String'];
+  createdInBlock: Scalars['Int'];
+  channellanguage?: Maybe<Array<Channel>>;
+  videolanguage?: Maybe<Array<Video>>;
+};
+
+export type LanguageConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<LanguageEdge>;
+  pageInfo: PageInfo;
+};
+
+export type LanguageCreateInput = {
+  iso: Scalars['String'];
+  createdInBlock: Scalars['Float'];
+};
+
+export type LanguageEdge = {
+  node: Language;
+  cursor: Scalars['String'];
+};
+
+export enum LanguageOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  IsoAsc = 'iso_ASC',
+  IsoDesc = 'iso_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC'
+}
+
+export type LanguageUpdateInput = {
+  iso?: Maybe<Scalars['String']>;
+  createdInBlock?: Maybe<Scalars['Float']>;
+};
+
+export type LanguageWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  iso_eq?: Maybe<Scalars['String']>;
+  iso_contains?: Maybe<Scalars['String']>;
+  iso_startsWith?: Maybe<Scalars['String']>;
+  iso_endsWith?: Maybe<Scalars['String']>;
+  iso_in?: Maybe<Array<Scalars['String']>>;
+  createdInBlock_eq?: Maybe<Scalars['Int']>;
+  createdInBlock_gt?: Maybe<Scalars['Int']>;
+  createdInBlock_gte?: Maybe<Scalars['Int']>;
+  createdInBlock_lt?: Maybe<Scalars['Int']>;
+  createdInBlock_lte?: Maybe<Scalars['Int']>;
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>;
+  channellanguage_none?: Maybe<ChannelWhereInput>;
+  channellanguage_some?: Maybe<ChannelWhereInput>;
+  channellanguage_every?: Maybe<ChannelWhereInput>;
+  videolanguage_none?: Maybe<VideoWhereInput>;
+  videolanguage_some?: Maybe<VideoWhereInput>;
+  videolanguage_every?: Maybe<VideoWhereInput>;
+  AND?: Maybe<Array<LanguageWhereInput>>;
+  OR?: Maybe<Array<LanguageWhereInput>>;
+};
+
+export type LanguageWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export enum LiaisonJudgement {
+  Pending = 'PENDING',
+  Accepted = 'ACCEPTED'
+}
+
+export type License = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  /** License code defined by Joystream */
+  code?: Maybe<Scalars['Int']>;
+  /** Attribution (if required by the license) */
+  attribution?: Maybe<Scalars['String']>;
+  /** Custom license content */
+  customText?: Maybe<Scalars['String']>;
+  videolicense?: Maybe<Array<Video>>;
+};
+
+export type LicenseConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<LicenseEdge>;
+  pageInfo: PageInfo;
+};
+
+export type LicenseCreateInput = {
+  code?: Maybe<Scalars['Float']>;
+  attribution?: Maybe<Scalars['String']>;
+  customText?: Maybe<Scalars['String']>;
+};
+
+export type LicenseEdge = {
+  node: License;
+  cursor: Scalars['String'];
+};
+
+export enum LicenseOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  CodeAsc = 'code_ASC',
+  CodeDesc = 'code_DESC',
+  AttributionAsc = 'attribution_ASC',
+  AttributionDesc = 'attribution_DESC',
+  CustomTextAsc = 'customText_ASC',
+  CustomTextDesc = 'customText_DESC'
+}
+
+export type LicenseUpdateInput = {
+  code?: Maybe<Scalars['Float']>;
+  attribution?: Maybe<Scalars['String']>;
+  customText?: Maybe<Scalars['String']>;
+};
+
+export type LicenseWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  code_eq?: Maybe<Scalars['Int']>;
+  code_gt?: Maybe<Scalars['Int']>;
+  code_gte?: Maybe<Scalars['Int']>;
+  code_lt?: Maybe<Scalars['Int']>;
+  code_lte?: Maybe<Scalars['Int']>;
+  code_in?: Maybe<Array<Scalars['Int']>>;
+  attribution_eq?: Maybe<Scalars['String']>;
+  attribution_contains?: Maybe<Scalars['String']>;
+  attribution_startsWith?: Maybe<Scalars['String']>;
+  attribution_endsWith?: Maybe<Scalars['String']>;
+  attribution_in?: Maybe<Array<Scalars['String']>>;
+  customText_eq?: Maybe<Scalars['String']>;
+  customText_contains?: Maybe<Scalars['String']>;
+  customText_startsWith?: Maybe<Scalars['String']>;
+  customText_endsWith?: Maybe<Scalars['String']>;
+  customText_in?: Maybe<Array<Scalars['String']>>;
+  videolicense_none?: Maybe<VideoWhereInput>;
+  videolicense_some?: Maybe<VideoWhereInput>;
+  videolicense_every?: Maybe<VideoWhereInput>;
+  AND?: Maybe<Array<LicenseWhereInput>>;
+  OR?: Maybe<Array<LicenseWhereInput>>;
+};
+
+export type LicenseWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type MembersByHandleFtsOutput = {
+  item: MembersByHandleSearchResult;
+  rank: Scalars['Float'];
+  isTypeOf: Scalars['String'];
+  highlight: Scalars['String'];
+};
+
+export type MembersByHandleSearchResult = Membership;
+
+/** Stored information about a registered user */
+export type Membership = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  /** The unique handle chosen by member */
+  handle: Scalars['String'];
+  /** A Url to member's Avatar image */
+  avatarUri?: Maybe<Scalars['String']>;
+  /** Short text chosen by member to share information about themselves */
+  about?: Maybe<Scalars['String']>;
+  /** Member's controller account id */
+  controllerAccount: Scalars['String'];
+  /** Member's root account id */
+  rootAccount: Scalars['String'];
+  /** Blocknumber when member was registered */
+  createdInBlock: Scalars['Int'];
+  /** How the member was registered */
+  entry: MembershipEntryMethod;
+  /** The type of subscription the member has purchased if any. */
+  subscription?: Maybe<Scalars['Int']>;
+  channels: Array<Channel>;
+};
+
+export type MembershipConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<MembershipEdge>;
+  pageInfo: PageInfo;
+};
+
+export type MembershipCreateInput = {
+  handle: Scalars['String'];
+  avatarUri?: Maybe<Scalars['String']>;
+  about?: Maybe<Scalars['String']>;
+  controllerAccount: Scalars['String'];
+  rootAccount: Scalars['String'];
+  createdInBlock: Scalars['Float'];
+  entry: MembershipEntryMethod;
+  subscription?: Maybe<Scalars['Float']>;
+};
+
+export type MembershipEdge = {
+  node: Membership;
+  cursor: Scalars['String'];
+};
+
+export enum MembershipEntryMethod {
+  Paid = 'PAID',
+  Screening = 'SCREENING',
+  Genesis = 'GENESIS'
+}
+
+export enum MembershipOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  HandleAsc = 'handle_ASC',
+  HandleDesc = 'handle_DESC',
+  AvatarUriAsc = 'avatarUri_ASC',
+  AvatarUriDesc = 'avatarUri_DESC',
+  AboutAsc = 'about_ASC',
+  AboutDesc = 'about_DESC',
+  ControllerAccountAsc = 'controllerAccount_ASC',
+  ControllerAccountDesc = 'controllerAccount_DESC',
+  RootAccountAsc = 'rootAccount_ASC',
+  RootAccountDesc = 'rootAccount_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+  EntryAsc = 'entry_ASC',
+  EntryDesc = 'entry_DESC',
+  SubscriptionAsc = 'subscription_ASC',
+  SubscriptionDesc = 'subscription_DESC'
+}
+
+export type MembershipUpdateInput = {
+  handle?: Maybe<Scalars['String']>;
+  avatarUri?: Maybe<Scalars['String']>;
+  about?: Maybe<Scalars['String']>;
+  controllerAccount?: Maybe<Scalars['String']>;
+  rootAccount?: Maybe<Scalars['String']>;
+  createdInBlock?: Maybe<Scalars['Float']>;
+  entry?: Maybe<MembershipEntryMethod>;
+  subscription?: Maybe<Scalars['Float']>;
+};
+
+export type MembershipWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  handle_eq?: Maybe<Scalars['String']>;
+  handle_contains?: Maybe<Scalars['String']>;
+  handle_startsWith?: Maybe<Scalars['String']>;
+  handle_endsWith?: Maybe<Scalars['String']>;
+  handle_in?: Maybe<Array<Scalars['String']>>;
+  avatarUri_eq?: Maybe<Scalars['String']>;
+  avatarUri_contains?: Maybe<Scalars['String']>;
+  avatarUri_startsWith?: Maybe<Scalars['String']>;
+  avatarUri_endsWith?: Maybe<Scalars['String']>;
+  avatarUri_in?: Maybe<Array<Scalars['String']>>;
+  about_eq?: Maybe<Scalars['String']>;
+  about_contains?: Maybe<Scalars['String']>;
+  about_startsWith?: Maybe<Scalars['String']>;
+  about_endsWith?: Maybe<Scalars['String']>;
+  about_in?: Maybe<Array<Scalars['String']>>;
+  controllerAccount_eq?: Maybe<Scalars['String']>;
+  controllerAccount_contains?: Maybe<Scalars['String']>;
+  controllerAccount_startsWith?: Maybe<Scalars['String']>;
+  controllerAccount_endsWith?: Maybe<Scalars['String']>;
+  controllerAccount_in?: Maybe<Array<Scalars['String']>>;
+  rootAccount_eq?: Maybe<Scalars['String']>;
+  rootAccount_contains?: Maybe<Scalars['String']>;
+  rootAccount_startsWith?: Maybe<Scalars['String']>;
+  rootAccount_endsWith?: Maybe<Scalars['String']>;
+  rootAccount_in?: Maybe<Array<Scalars['String']>>;
+  createdInBlock_eq?: Maybe<Scalars['Int']>;
+  createdInBlock_gt?: Maybe<Scalars['Int']>;
+  createdInBlock_gte?: Maybe<Scalars['Int']>;
+  createdInBlock_lt?: Maybe<Scalars['Int']>;
+  createdInBlock_lte?: Maybe<Scalars['Int']>;
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>;
+  entry_eq?: Maybe<MembershipEntryMethod>;
+  entry_in?: Maybe<Array<MembershipEntryMethod>>;
+  subscription_eq?: Maybe<Scalars['Int']>;
+  subscription_gt?: Maybe<Scalars['Int']>;
+  subscription_gte?: Maybe<Scalars['Int']>;
+  subscription_lt?: Maybe<Scalars['Int']>;
+  subscription_lte?: Maybe<Scalars['Int']>;
+  subscription_in?: Maybe<Array<Scalars['Int']>>;
+  channels_none?: Maybe<ChannelWhereInput>;
+  channels_some?: Maybe<ChannelWhereInput>;
+  channels_every?: Maybe<ChannelWhereInput>;
+  AND?: Maybe<Array<MembershipWhereInput>>;
+  OR?: Maybe<Array<MembershipWhereInput>>;
+};
+
+export type MembershipWhereUniqueInput = {
+  id?: Maybe<Scalars['ID']>;
+  handle?: Maybe<Scalars['String']>;
+};
+
+export type NextEntityId = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  /** Next deterministic id for entities without custom id */
+  nextId: Scalars['Int'];
+};
+
+export type NextEntityIdConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<NextEntityIdEdge>;
+  pageInfo: PageInfo;
+};
+
+export type NextEntityIdCreateInput = {
+  nextId: Scalars['Float'];
+};
+
+export type NextEntityIdEdge = {
+  node: NextEntityId;
+  cursor: Scalars['String'];
+};
+
+export enum NextEntityIdOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  NextIdAsc = 'nextId_ASC',
+  NextIdDesc = 'nextId_DESC'
+}
+
+export type NextEntityIdUpdateInput = {
+  nextId?: Maybe<Scalars['Float']>;
+};
+
+export type NextEntityIdWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  nextId_eq?: Maybe<Scalars['Int']>;
+  nextId_gt?: Maybe<Scalars['Int']>;
+  nextId_gte?: Maybe<Scalars['Int']>;
+  nextId_lt?: Maybe<Scalars['Int']>;
+  nextId_lte?: Maybe<Scalars['Int']>;
+  nextId_in?: Maybe<Array<Scalars['Int']>>;
+  AND?: Maybe<Array<NextEntityIdWhereInput>>;
+  OR?: Maybe<Array<NextEntityIdWhereInput>>;
+};
+
+export type NextEntityIdWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type NodeLocationMetadata = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  /** ISO 3166-1 alpha-2 country code (2 letters) */
+  countryCode?: Maybe<Scalars['String']>;
+  /** City name */
+  city?: Maybe<Scalars['String']>;
+  coordinates?: Maybe<GeoCoordinates>;
+  coordinatesId?: Maybe<Scalars['String']>;
+  distributionbucketoperatormetadatanodeLocation?: Maybe<Array<DistributionBucketOperatorMetadata>>;
+  storagebucketoperatormetadatanodeLocation?: Maybe<Array<StorageBucketOperatorMetadata>>;
+};
+
+export type NodeLocationMetadataConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<NodeLocationMetadataEdge>;
+  pageInfo: PageInfo;
+};
+
+export type NodeLocationMetadataCreateInput = {
+  countryCode?: Maybe<Scalars['String']>;
+  city?: Maybe<Scalars['String']>;
+  coordinates?: Maybe<Scalars['ID']>;
+};
+
+export type NodeLocationMetadataEdge = {
+  node: NodeLocationMetadata;
+  cursor: Scalars['String'];
+};
+
+export enum NodeLocationMetadataOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  CountryCodeAsc = 'countryCode_ASC',
+  CountryCodeDesc = 'countryCode_DESC',
+  CityAsc = 'city_ASC',
+  CityDesc = 'city_DESC',
+  CoordinatesAsc = 'coordinates_ASC',
+  CoordinatesDesc = 'coordinates_DESC'
+}
+
+export type NodeLocationMetadataUpdateInput = {
+  countryCode?: Maybe<Scalars['String']>;
+  city?: Maybe<Scalars['String']>;
+  coordinates?: Maybe<Scalars['ID']>;
+};
+
+export type NodeLocationMetadataWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  countryCode_eq?: Maybe<Scalars['String']>;
+  countryCode_contains?: Maybe<Scalars['String']>;
+  countryCode_startsWith?: Maybe<Scalars['String']>;
+  countryCode_endsWith?: Maybe<Scalars['String']>;
+  countryCode_in?: Maybe<Array<Scalars['String']>>;
+  city_eq?: Maybe<Scalars['String']>;
+  city_contains?: Maybe<Scalars['String']>;
+  city_startsWith?: Maybe<Scalars['String']>;
+  city_endsWith?: Maybe<Scalars['String']>;
+  city_in?: Maybe<Array<Scalars['String']>>;
+  coordinates_eq?: Maybe<Scalars['ID']>;
+  coordinates_in?: Maybe<Array<Scalars['ID']>>;
+  coordinates?: Maybe<GeoCoordinatesWhereInput>;
+  distributionbucketoperatormetadatanodeLocation_none?: Maybe<DistributionBucketOperatorMetadataWhereInput>;
+  distributionbucketoperatormetadatanodeLocation_some?: Maybe<DistributionBucketOperatorMetadataWhereInput>;
+  distributionbucketoperatormetadatanodeLocation_every?: Maybe<DistributionBucketOperatorMetadataWhereInput>;
+  storagebucketoperatormetadatanodeLocation_none?: Maybe<StorageBucketOperatorMetadataWhereInput>;
+  storagebucketoperatormetadatanodeLocation_some?: Maybe<StorageBucketOperatorMetadataWhereInput>;
+  storagebucketoperatormetadatanodeLocation_every?: Maybe<StorageBucketOperatorMetadataWhereInput>;
+  AND?: Maybe<Array<NodeLocationMetadataWhereInput>>;
+  OR?: Maybe<Array<NodeLocationMetadataWhereInput>>;
+};
+
+export type NodeLocationMetadataWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type PageInfo = {
+  hasNextPage: Scalars['Boolean'];
+  hasPreviousPage: Scalars['Boolean'];
+  startCursor?: Maybe<Scalars['String']>;
+  endCursor?: Maybe<Scalars['String']>;
+};
+
+export type ProcessorState = {
+  lastCompleteBlock: Scalars['Float'];
+  lastProcessedEvent: Scalars['String'];
+  indexerHead: Scalars['Float'];
+  chainHead: Scalars['Float'];
+};
+
+export type Query = {
+  channelCategories: Array<ChannelCategory>;
+  channelCategoryByUniqueInput?: Maybe<ChannelCategory>;
+  channelCategoriesConnection: ChannelCategoryConnection;
+  channels: Array<Channel>;
+  channelByUniqueInput?: Maybe<Channel>;
+  channelsConnection: ChannelConnection;
+  curatorGroups: Array<CuratorGroup>;
+  curatorGroupByUniqueInput?: Maybe<CuratorGroup>;
+  curatorGroupsConnection: CuratorGroupConnection;
+  dataObjects: Array<DataObject>;
+  dataObjectByUniqueInput?: Maybe<DataObject>;
+  dataObjectsConnection: DataObjectConnection;
+  distributionBucketFamilyMetadata: Array<DistributionBucketFamilyMetadata>;
+  distributionBucketFamilyMetadataByUniqueInput?: Maybe<DistributionBucketFamilyMetadata>;
+  distributionBucketFamilyMetadataConnection: DistributionBucketFamilyMetadataConnection;
+  distributionBucketFamilies: Array<DistributionBucketFamily>;
+  distributionBucketFamilyByUniqueInput?: Maybe<DistributionBucketFamily>;
+  distributionBucketFamiliesConnection: DistributionBucketFamilyConnection;
+  distributionBucketOperatorMetadata: Array<DistributionBucketOperatorMetadata>;
+  distributionBucketOperatorMetadataByUniqueInput?: Maybe<DistributionBucketOperatorMetadata>;
+  distributionBucketOperatorMetadataConnection: DistributionBucketOperatorMetadataConnection;
+  distributionBucketOperators: Array<DistributionBucketOperator>;
+  distributionBucketOperatorByUniqueInput?: Maybe<DistributionBucketOperator>;
+  distributionBucketOperatorsConnection: DistributionBucketOperatorConnection;
+  distributionBuckets: Array<DistributionBucket>;
+  distributionBucketByUniqueInput?: Maybe<DistributionBucket>;
+  distributionBucketsConnection: DistributionBucketConnection;
+  geoCoordinates: Array<GeoCoordinates>;
+  geoCoordinatesByUniqueInput?: Maybe<GeoCoordinates>;
+  geoCoordinatesConnection: GeoCoordinatesConnection;
+  languages: Array<Language>;
+  languageByUniqueInput?: Maybe<Language>;
+  languagesConnection: LanguageConnection;
+  licenses: Array<License>;
+  licenseByUniqueInput?: Maybe<License>;
+  licensesConnection: LicenseConnection;
+  memberships: Array<Membership>;
+  membershipByUniqueInput?: Maybe<Membership>;
+  membershipsConnection: MembershipConnection;
+  nextEntityIds: Array<NextEntityId>;
+  nextEntityIdByUniqueInput?: Maybe<NextEntityId>;
+  nextEntityIdsConnection: NextEntityIdConnection;
+  nodeLocationMetadata: Array<NodeLocationMetadata>;
+  nodeLocationMetadataByUniqueInput?: Maybe<NodeLocationMetadata>;
+  nodeLocationMetadataConnection: NodeLocationMetadataConnection;
+  channelCategoriesByName: Array<ChannelCategoriesByNameFtsOutput>;
+  membersByHandle: Array<MembersByHandleFtsOutput>;
+  search: Array<SearchFtsOutput>;
+  videoCategoriesByName: Array<VideoCategoriesByNameFtsOutput>;
+  storageBagDistributionAssignments: Array<StorageBagDistributionAssignment>;
+  storageBagDistributionAssignmentByUniqueInput?: Maybe<StorageBagDistributionAssignment>;
+  storageBagDistributionAssignmentsConnection: StorageBagDistributionAssignmentConnection;
+  storageBagStorageAssignments: Array<StorageBagStorageAssignment>;
+  storageBagStorageAssignmentByUniqueInput?: Maybe<StorageBagStorageAssignment>;
+  storageBagStorageAssignmentsConnection: StorageBagStorageAssignmentConnection;
+  storageBags: Array<StorageBag>;
+  storageBagByUniqueInput?: Maybe<StorageBag>;
+  storageBagsConnection: StorageBagConnection;
+  storageBucketOperatorMetadata: Array<StorageBucketOperatorMetadata>;
+  storageBucketOperatorMetadataByUniqueInput?: Maybe<StorageBucketOperatorMetadata>;
+  storageBucketOperatorMetadataConnection: StorageBucketOperatorMetadataConnection;
+  storageBuckets: Array<StorageBucket>;
+  storageBucketByUniqueInput?: Maybe<StorageBucket>;
+  storageBucketsConnection: StorageBucketConnection;
+  storageDataObjects: Array<StorageDataObject>;
+  storageDataObjectByUniqueInput?: Maybe<StorageDataObject>;
+  storageDataObjectsConnection: StorageDataObjectConnection;
+  storageSystemParameters: Array<StorageSystemParameters>;
+  storageSystemParametersByUniqueInput?: Maybe<StorageSystemParameters>;
+  storageSystemParametersConnection: StorageSystemParametersConnection;
+  videoCategories: Array<VideoCategory>;
+  videoCategoryByUniqueInput?: Maybe<VideoCategory>;
+  videoCategoriesConnection: VideoCategoryConnection;
+  videoMediaEncodings: Array<VideoMediaEncoding>;
+  videoMediaEncodingByUniqueInput?: Maybe<VideoMediaEncoding>;
+  videoMediaEncodingsConnection: VideoMediaEncodingConnection;
+  videoMediaMetadata: Array<VideoMediaMetadata>;
+  videoMediaMetadataByUniqueInput?: Maybe<VideoMediaMetadata>;
+  videoMediaMetadataConnection: VideoMediaMetadataConnection;
+  videos: Array<Video>;
+  videoByUniqueInput?: Maybe<Video>;
+  videosConnection: VideoConnection;
+  workers: Array<Worker>;
+  workerByUniqueInput?: Maybe<Worker>;
+  workersConnection: WorkerConnection;
+};
+
+
+export type QueryChannelCategoriesArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<ChannelCategoryWhereInput>;
+  orderBy?: Maybe<Array<ChannelCategoryOrderByInput>>;
+};
+
+
+export type QueryChannelCategoryByUniqueInputArgs = {
+  where: ChannelCategoryWhereUniqueInput;
+};
+
+
+export type QueryChannelCategoriesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<ChannelCategoryWhereInput>;
+  orderBy?: Maybe<Array<ChannelCategoryOrderByInput>>;
+};
+
+
+export type QueryChannelsArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<ChannelWhereInput>;
+  orderBy?: Maybe<Array<ChannelOrderByInput>>;
+};
+
+
+export type QueryChannelByUniqueInputArgs = {
+  where: ChannelWhereUniqueInput;
+};
+
+
+export type QueryChannelsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<ChannelWhereInput>;
+  orderBy?: Maybe<Array<ChannelOrderByInput>>;
+};
+
+
+export type QueryCuratorGroupsArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<CuratorGroupWhereInput>;
+  orderBy?: Maybe<Array<CuratorGroupOrderByInput>>;
+};
+
+
+export type QueryCuratorGroupByUniqueInputArgs = {
+  where: CuratorGroupWhereUniqueInput;
+};
+
+
+export type QueryCuratorGroupsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<CuratorGroupWhereInput>;
+  orderBy?: Maybe<Array<CuratorGroupOrderByInput>>;
+};
+
+
+export type QueryDataObjectsArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<DataObjectWhereInput>;
+  orderBy?: Maybe<Array<DataObjectOrderByInput>>;
+};
+
+
+export type QueryDataObjectByUniqueInputArgs = {
+  where: DataObjectWhereUniqueInput;
+};
+
+
+export type QueryDataObjectsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<DataObjectWhereInput>;
+  orderBy?: Maybe<Array<DataObjectOrderByInput>>;
+};
+
+
+export type QueryDistributionBucketFamilyMetadataArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<DistributionBucketFamilyMetadataWhereInput>;
+  orderBy?: Maybe<Array<DistributionBucketFamilyMetadataOrderByInput>>;
+};
+
+
+export type QueryDistributionBucketFamilyMetadataByUniqueInputArgs = {
+  where: DistributionBucketFamilyMetadataWhereUniqueInput;
+};
+
+
+export type QueryDistributionBucketFamilyMetadataConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<DistributionBucketFamilyMetadataWhereInput>;
+  orderBy?: Maybe<Array<DistributionBucketFamilyMetadataOrderByInput>>;
+};
+
+
+export type QueryDistributionBucketFamiliesArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<DistributionBucketFamilyWhereInput>;
+  orderBy?: Maybe<Array<DistributionBucketFamilyOrderByInput>>;
+};
+
+
+export type QueryDistributionBucketFamilyByUniqueInputArgs = {
+  where: DistributionBucketFamilyWhereUniqueInput;
+};
+
+
+export type QueryDistributionBucketFamiliesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<DistributionBucketFamilyWhereInput>;
+  orderBy?: Maybe<Array<DistributionBucketFamilyOrderByInput>>;
+};
+
+
+export type QueryDistributionBucketOperatorMetadataArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<DistributionBucketOperatorMetadataWhereInput>;
+  orderBy?: Maybe<Array<DistributionBucketOperatorMetadataOrderByInput>>;
+};
+
+
+export type QueryDistributionBucketOperatorMetadataByUniqueInputArgs = {
+  where: DistributionBucketOperatorMetadataWhereUniqueInput;
+};
+
+
+export type QueryDistributionBucketOperatorMetadataConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<DistributionBucketOperatorMetadataWhereInput>;
+  orderBy?: Maybe<Array<DistributionBucketOperatorMetadataOrderByInput>>;
+};
+
+
+export type QueryDistributionBucketOperatorsArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<DistributionBucketOperatorWhereInput>;
+  orderBy?: Maybe<Array<DistributionBucketOperatorOrderByInput>>;
+};
+
+
+export type QueryDistributionBucketOperatorByUniqueInputArgs = {
+  where: DistributionBucketOperatorWhereUniqueInput;
+};
+
+
+export type QueryDistributionBucketOperatorsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<DistributionBucketOperatorWhereInput>;
+  orderBy?: Maybe<Array<DistributionBucketOperatorOrderByInput>>;
+};
+
+
+export type QueryDistributionBucketsArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<DistributionBucketWhereInput>;
+  orderBy?: Maybe<Array<DistributionBucketOrderByInput>>;
+};
+
+
+export type QueryDistributionBucketByUniqueInputArgs = {
+  where: DistributionBucketWhereUniqueInput;
+};
+
+
+export type QueryDistributionBucketsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<DistributionBucketWhereInput>;
+  orderBy?: Maybe<Array<DistributionBucketOrderByInput>>;
+};
+
+
+export type QueryGeoCoordinatesArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<GeoCoordinatesWhereInput>;
+  orderBy?: Maybe<Array<GeoCoordinatesOrderByInput>>;
+};
+
+
+export type QueryGeoCoordinatesByUniqueInputArgs = {
+  where: GeoCoordinatesWhereUniqueInput;
+};
+
+
+export type QueryGeoCoordinatesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<GeoCoordinatesWhereInput>;
+  orderBy?: Maybe<Array<GeoCoordinatesOrderByInput>>;
+};
+
+
+export type QueryLanguagesArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<LanguageWhereInput>;
+  orderBy?: Maybe<Array<LanguageOrderByInput>>;
+};
+
+
+export type QueryLanguageByUniqueInputArgs = {
+  where: LanguageWhereUniqueInput;
+};
+
+
+export type QueryLanguagesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<LanguageWhereInput>;
+  orderBy?: Maybe<Array<LanguageOrderByInput>>;
+};
+
+
+export type QueryLicensesArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<LicenseWhereInput>;
+  orderBy?: Maybe<Array<LicenseOrderByInput>>;
+};
+
+
+export type QueryLicenseByUniqueInputArgs = {
+  where: LicenseWhereUniqueInput;
+};
+
+
+export type QueryLicensesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<LicenseWhereInput>;
+  orderBy?: Maybe<Array<LicenseOrderByInput>>;
+};
+
+
+export type QueryMembershipsArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<MembershipWhereInput>;
+  orderBy?: Maybe<Array<MembershipOrderByInput>>;
+};
+
+
+export type QueryMembershipByUniqueInputArgs = {
+  where: MembershipWhereUniqueInput;
+};
+
+
+export type QueryMembershipsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<MembershipWhereInput>;
+  orderBy?: Maybe<Array<MembershipOrderByInput>>;
+};
+
+
+export type QueryNextEntityIdsArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<NextEntityIdWhereInput>;
+  orderBy?: Maybe<Array<NextEntityIdOrderByInput>>;
+};
+
+
+export type QueryNextEntityIdByUniqueInputArgs = {
+  where: NextEntityIdWhereUniqueInput;
+};
+
+
+export type QueryNextEntityIdsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<NextEntityIdWhereInput>;
+  orderBy?: Maybe<Array<NextEntityIdOrderByInput>>;
+};
+
+
+export type QueryNodeLocationMetadataArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<NodeLocationMetadataWhereInput>;
+  orderBy?: Maybe<Array<NodeLocationMetadataOrderByInput>>;
+};
+
+
+export type QueryNodeLocationMetadataByUniqueInputArgs = {
+  where: NodeLocationMetadataWhereUniqueInput;
+};
+
+
+export type QueryNodeLocationMetadataConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<NodeLocationMetadataWhereInput>;
+  orderBy?: Maybe<Array<NodeLocationMetadataOrderByInput>>;
+};
+
+
+export type QueryChannelCategoriesByNameArgs = {
+  whereChannelCategory?: Maybe<ChannelCategoryWhereInput>;
+  skip?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  text: Scalars['String'];
+};
+
+
+export type QueryMembersByHandleArgs = {
+  whereMembership?: Maybe<MembershipWhereInput>;
+  skip?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  text: Scalars['String'];
+};
+
+
+export type QuerySearchArgs = {
+  whereVideo?: Maybe<VideoWhereInput>;
+  whereChannel?: Maybe<ChannelWhereInput>;
+  skip?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  text: Scalars['String'];
+};
+
+
+export type QueryVideoCategoriesByNameArgs = {
+  whereVideoCategory?: Maybe<VideoCategoryWhereInput>;
+  skip?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  text: Scalars['String'];
+};
+
+
+export type QueryStorageBagDistributionAssignmentsArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<StorageBagDistributionAssignmentWhereInput>;
+  orderBy?: Maybe<Array<StorageBagDistributionAssignmentOrderByInput>>;
+};
+
+
+export type QueryStorageBagDistributionAssignmentByUniqueInputArgs = {
+  where: StorageBagDistributionAssignmentWhereUniqueInput;
+};
+
+
+export type QueryStorageBagDistributionAssignmentsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<StorageBagDistributionAssignmentWhereInput>;
+  orderBy?: Maybe<Array<StorageBagDistributionAssignmentOrderByInput>>;
+};
+
+
+export type QueryStorageBagStorageAssignmentsArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<StorageBagStorageAssignmentWhereInput>;
+  orderBy?: Maybe<Array<StorageBagStorageAssignmentOrderByInput>>;
+};
+
+
+export type QueryStorageBagStorageAssignmentByUniqueInputArgs = {
+  where: StorageBagStorageAssignmentWhereUniqueInput;
+};
+
+
+export type QueryStorageBagStorageAssignmentsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<StorageBagStorageAssignmentWhereInput>;
+  orderBy?: Maybe<Array<StorageBagStorageAssignmentOrderByInput>>;
+};
+
+
+export type QueryStorageBagsArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<StorageBagWhereInput>;
+  orderBy?: Maybe<Array<StorageBagOrderByInput>>;
+};
+
+
+export type QueryStorageBagByUniqueInputArgs = {
+  where: StorageBagWhereUniqueInput;
+};
+
+
+export type QueryStorageBagsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<StorageBagWhereInput>;
+  orderBy?: Maybe<Array<StorageBagOrderByInput>>;
+};
+
+
+export type QueryStorageBucketOperatorMetadataArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<StorageBucketOperatorMetadataWhereInput>;
+  orderBy?: Maybe<Array<StorageBucketOperatorMetadataOrderByInput>>;
+};
+
+
+export type QueryStorageBucketOperatorMetadataByUniqueInputArgs = {
+  where: StorageBucketOperatorMetadataWhereUniqueInput;
+};
+
+
+export type QueryStorageBucketOperatorMetadataConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<StorageBucketOperatorMetadataWhereInput>;
+  orderBy?: Maybe<Array<StorageBucketOperatorMetadataOrderByInput>>;
+};
+
+
+export type QueryStorageBucketsArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<StorageBucketWhereInput>;
+  orderBy?: Maybe<Array<StorageBucketOrderByInput>>;
+};
+
+
+export type QueryStorageBucketByUniqueInputArgs = {
+  where: StorageBucketWhereUniqueInput;
+};
+
+
+export type QueryStorageBucketsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<StorageBucketWhereInput>;
+  orderBy?: Maybe<Array<StorageBucketOrderByInput>>;
+};
+
+
+export type QueryStorageDataObjectsArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<StorageDataObjectWhereInput>;
+  orderBy?: Maybe<Array<StorageDataObjectOrderByInput>>;
+};
+
+
+export type QueryStorageDataObjectByUniqueInputArgs = {
+  where: StorageDataObjectWhereUniqueInput;
+};
+
+
+export type QueryStorageDataObjectsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<StorageDataObjectWhereInput>;
+  orderBy?: Maybe<Array<StorageDataObjectOrderByInput>>;
+};
+
+
+export type QueryStorageSystemParametersArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<StorageSystemParametersWhereInput>;
+  orderBy?: Maybe<Array<StorageSystemParametersOrderByInput>>;
+};
+
+
+export type QueryStorageSystemParametersByUniqueInputArgs = {
+  where: StorageSystemParametersWhereUniqueInput;
+};
+
+
+export type QueryStorageSystemParametersConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<StorageSystemParametersWhereInput>;
+  orderBy?: Maybe<Array<StorageSystemParametersOrderByInput>>;
+};
+
+
+export type QueryVideoCategoriesArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<VideoCategoryWhereInput>;
+  orderBy?: Maybe<Array<VideoCategoryOrderByInput>>;
+};
+
+
+export type QueryVideoCategoryByUniqueInputArgs = {
+  where: VideoCategoryWhereUniqueInput;
+};
+
+
+export type QueryVideoCategoriesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<VideoCategoryWhereInput>;
+  orderBy?: Maybe<Array<VideoCategoryOrderByInput>>;
+};
+
+
+export type QueryVideoMediaEncodingsArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<VideoMediaEncodingWhereInput>;
+  orderBy?: Maybe<Array<VideoMediaEncodingOrderByInput>>;
+};
+
+
+export type QueryVideoMediaEncodingByUniqueInputArgs = {
+  where: VideoMediaEncodingWhereUniqueInput;
+};
+
+
+export type QueryVideoMediaEncodingsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<VideoMediaEncodingWhereInput>;
+  orderBy?: Maybe<Array<VideoMediaEncodingOrderByInput>>;
+};
+
+
+export type QueryVideoMediaMetadataArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<VideoMediaMetadataWhereInput>;
+  orderBy?: Maybe<Array<VideoMediaMetadataOrderByInput>>;
+};
+
+
+export type QueryVideoMediaMetadataByUniqueInputArgs = {
+  where: VideoMediaMetadataWhereUniqueInput;
+};
+
+
+export type QueryVideoMediaMetadataConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<VideoMediaMetadataWhereInput>;
+  orderBy?: Maybe<Array<VideoMediaMetadataOrderByInput>>;
+};
+
+
+export type QueryVideosArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<VideoWhereInput>;
+  orderBy?: Maybe<Array<VideoOrderByInput>>;
+};
+
+
+export type QueryVideoByUniqueInputArgs = {
+  where: VideoWhereUniqueInput;
+};
+
+
+export type QueryVideosConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<VideoWhereInput>;
+  orderBy?: Maybe<Array<VideoOrderByInput>>;
+};
+
+
+export type QueryWorkersArgs = {
+  offset?: Maybe<Scalars['Int']>;
+  limit?: Maybe<Scalars['Int']>;
+  where?: Maybe<WorkerWhereInput>;
+  orderBy?: Maybe<Array<WorkerOrderByInput>>;
+};
+
+
+export type QueryWorkerByUniqueInputArgs = {
+  where: WorkerWhereUniqueInput;
+};
+
+
+export type QueryWorkersConnectionArgs = {
+  first?: Maybe<Scalars['Int']>;
+  after?: Maybe<Scalars['String']>;
+  last?: Maybe<Scalars['Int']>;
+  before?: Maybe<Scalars['String']>;
+  where?: Maybe<WorkerWhereInput>;
+  orderBy?: Maybe<Array<WorkerOrderByInput>>;
+};
+
+export type SearchFtsOutput = {
+  item: SearchSearchResult;
+  rank: Scalars['Float'];
+  isTypeOf: Scalars['String'];
+  highlight: Scalars['String'];
+};
+
+export type SearchSearchResult = Channel | Video;
+
+export type StandardDeleteResponse = {
+  id: Scalars['ID'];
+};
+
+export type StorageBag = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  objects: Array<StorageDataObject>;
+  storageAssignments: Array<StorageBagStorageAssignment>;
+  distirbutionAssignments: Array<StorageBagDistributionAssignment>;
+  /** Owner of the storage bag */
+  owner: StorageBagOwner;
+};
+
+export type StorageBagConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<StorageBagEdge>;
+  pageInfo: PageInfo;
+};
+
+export type StorageBagCreateInput = {
+  owner: Scalars['JSONObject'];
+};
+
+export type StorageBagDistributionAssignment = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  storageBag: StorageBag;
+  storageBagId: Scalars['String'];
+  distributionBucket: DistributionBucket;
+  distributionBucketId: Scalars['String'];
+};
+
+export type StorageBagDistributionAssignmentConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<StorageBagDistributionAssignmentEdge>;
+  pageInfo: PageInfo;
+};
+
+export type StorageBagDistributionAssignmentCreateInput = {
+  storageBag: Scalars['ID'];
+  distributionBucket: Scalars['ID'];
+};
+
+export type StorageBagDistributionAssignmentEdge = {
+  node: StorageBagDistributionAssignment;
+  cursor: Scalars['String'];
+};
+
+export enum StorageBagDistributionAssignmentOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  StorageBagAsc = 'storageBag_ASC',
+  StorageBagDesc = 'storageBag_DESC',
+  DistributionBucketAsc = 'distributionBucket_ASC',
+  DistributionBucketDesc = 'distributionBucket_DESC'
+}
+
+export type StorageBagDistributionAssignmentUpdateInput = {
+  storageBag?: Maybe<Scalars['ID']>;
+  distributionBucket?: Maybe<Scalars['ID']>;
+};
+
+export type StorageBagDistributionAssignmentWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  storageBag_eq?: Maybe<Scalars['ID']>;
+  storageBag_in?: Maybe<Array<Scalars['ID']>>;
+  distributionBucket_eq?: Maybe<Scalars['ID']>;
+  distributionBucket_in?: Maybe<Array<Scalars['ID']>>;
+  storageBag?: Maybe<StorageBagWhereInput>;
+  distributionBucket?: Maybe<DistributionBucketWhereInput>;
+  AND?: Maybe<Array<StorageBagDistributionAssignmentWhereInput>>;
+  OR?: Maybe<Array<StorageBagDistributionAssignmentWhereInput>>;
+};
+
+export type StorageBagDistributionAssignmentWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type StorageBagEdge = {
+  node: StorageBag;
+  cursor: Scalars['String'];
+};
+
+export enum StorageBagOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC'
+}
+
+export type StorageBagOwner = StorageBagOwnerCouncil | StorageBagOwnerWorkingGroup | StorageBagOwnerMember | StorageBagOwnerChannel | StorageBagOwnerDao;
+
+export type StorageBagOwnerChannel = {
+  channelId?: Maybe<Scalars['Int']>;
+};
+
+export type StorageBagOwnerChannelCreateInput = {
+  channelId?: Maybe<Scalars['Float']>;
+};
+
+export type StorageBagOwnerChannelUpdateInput = {
+  channelId?: Maybe<Scalars['Float']>;
+};
+
+export type StorageBagOwnerChannelWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  channelId_eq?: Maybe<Scalars['Int']>;
+  channelId_gt?: Maybe<Scalars['Int']>;
+  channelId_gte?: Maybe<Scalars['Int']>;
+  channelId_lt?: Maybe<Scalars['Int']>;
+  channelId_lte?: Maybe<Scalars['Int']>;
+  channelId_in?: Maybe<Array<Scalars['Int']>>;
+  AND?: Maybe<Array<StorageBagOwnerChannelWhereInput>>;
+  OR?: Maybe<Array<StorageBagOwnerChannelWhereInput>>;
+};
+
+export type StorageBagOwnerChannelWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type StorageBagOwnerCouncil = {
+  phantom?: Maybe<Scalars['Int']>;
+};
+
+export type StorageBagOwnerCouncilCreateInput = {
+  phantom?: Maybe<Scalars['Float']>;
+};
+
+export type StorageBagOwnerCouncilUpdateInput = {
+  phantom?: Maybe<Scalars['Float']>;
+};
+
+export type StorageBagOwnerCouncilWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  phantom_eq?: Maybe<Scalars['Int']>;
+  phantom_gt?: Maybe<Scalars['Int']>;
+  phantom_gte?: Maybe<Scalars['Int']>;
+  phantom_lt?: Maybe<Scalars['Int']>;
+  phantom_lte?: Maybe<Scalars['Int']>;
+  phantom_in?: Maybe<Array<Scalars['Int']>>;
+  AND?: Maybe<Array<StorageBagOwnerCouncilWhereInput>>;
+  OR?: Maybe<Array<StorageBagOwnerCouncilWhereInput>>;
+};
+
+export type StorageBagOwnerCouncilWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type StorageBagOwnerDao = {
+  daoId?: Maybe<Scalars['Int']>;
+};
+
+export type StorageBagOwnerDaoCreateInput = {
+  daoId?: Maybe<Scalars['Float']>;
+};
+
+export type StorageBagOwnerDaoUpdateInput = {
+  daoId?: Maybe<Scalars['Float']>;
+};
+
+export type StorageBagOwnerDaoWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  daoId_eq?: Maybe<Scalars['Int']>;
+  daoId_gt?: Maybe<Scalars['Int']>;
+  daoId_gte?: Maybe<Scalars['Int']>;
+  daoId_lt?: Maybe<Scalars['Int']>;
+  daoId_lte?: Maybe<Scalars['Int']>;
+  daoId_in?: Maybe<Array<Scalars['Int']>>;
+  AND?: Maybe<Array<StorageBagOwnerDaoWhereInput>>;
+  OR?: Maybe<Array<StorageBagOwnerDaoWhereInput>>;
+};
+
+export type StorageBagOwnerDaoWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type StorageBagOwnerMember = {
+  memberId?: Maybe<Scalars['Int']>;
+};
+
+export type StorageBagOwnerMemberCreateInput = {
+  memberId?: Maybe<Scalars['Float']>;
+};
+
+export type StorageBagOwnerMemberUpdateInput = {
+  memberId?: Maybe<Scalars['Float']>;
+};
+
+export type StorageBagOwnerMemberWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  memberId_eq?: Maybe<Scalars['Int']>;
+  memberId_gt?: Maybe<Scalars['Int']>;
+  memberId_gte?: Maybe<Scalars['Int']>;
+  memberId_lt?: Maybe<Scalars['Int']>;
+  memberId_lte?: Maybe<Scalars['Int']>;
+  memberId_in?: Maybe<Array<Scalars['Int']>>;
+  AND?: Maybe<Array<StorageBagOwnerMemberWhereInput>>;
+  OR?: Maybe<Array<StorageBagOwnerMemberWhereInput>>;
+};
+
+export type StorageBagOwnerMemberWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type StorageBagOwnerWorkingGroup = {
+  workingGroupId?: Maybe<Scalars['String']>;
+};
+
+export type StorageBagOwnerWorkingGroupCreateInput = {
+  workingGroupId?: Maybe<Scalars['String']>;
+};
+
+export type StorageBagOwnerWorkingGroupUpdateInput = {
+  workingGroupId?: Maybe<Scalars['String']>;
+};
+
+export type StorageBagOwnerWorkingGroupWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  workingGroupId_eq?: Maybe<Scalars['String']>;
+  workingGroupId_contains?: Maybe<Scalars['String']>;
+  workingGroupId_startsWith?: Maybe<Scalars['String']>;
+  workingGroupId_endsWith?: Maybe<Scalars['String']>;
+  workingGroupId_in?: Maybe<Array<Scalars['String']>>;
+  AND?: Maybe<Array<StorageBagOwnerWorkingGroupWhereInput>>;
+  OR?: Maybe<Array<StorageBagOwnerWorkingGroupWhereInput>>;
+};
+
+export type StorageBagOwnerWorkingGroupWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type StorageBagStorageAssignment = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  storageBag: StorageBag;
+  storageBagId: Scalars['String'];
+  storageBucket: StorageBucket;
+  storageBucketId: Scalars['String'];
+};
+
+export type StorageBagStorageAssignmentConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<StorageBagStorageAssignmentEdge>;
+  pageInfo: PageInfo;
+};
+
+export type StorageBagStorageAssignmentCreateInput = {
+  storageBag: Scalars['ID'];
+  storageBucket: Scalars['ID'];
+};
+
+export type StorageBagStorageAssignmentEdge = {
+  node: StorageBagStorageAssignment;
+  cursor: Scalars['String'];
+};
+
+export enum StorageBagStorageAssignmentOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  StorageBagAsc = 'storageBag_ASC',
+  StorageBagDesc = 'storageBag_DESC',
+  StorageBucketAsc = 'storageBucket_ASC',
+  StorageBucketDesc = 'storageBucket_DESC'
+}
+
+export type StorageBagStorageAssignmentUpdateInput = {
+  storageBag?: Maybe<Scalars['ID']>;
+  storageBucket?: Maybe<Scalars['ID']>;
+};
+
+export type StorageBagStorageAssignmentWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  storageBag_eq?: Maybe<Scalars['ID']>;
+  storageBag_in?: Maybe<Array<Scalars['ID']>>;
+  storageBucket_eq?: Maybe<Scalars['ID']>;
+  storageBucket_in?: Maybe<Array<Scalars['ID']>>;
+  storageBag?: Maybe<StorageBagWhereInput>;
+  storageBucket?: Maybe<StorageBucketWhereInput>;
+  AND?: Maybe<Array<StorageBagStorageAssignmentWhereInput>>;
+  OR?: Maybe<Array<StorageBagStorageAssignmentWhereInput>>;
+};
+
+export type StorageBagStorageAssignmentWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type StorageBagUpdateInput = {
+  owner?: Maybe<Scalars['JSONObject']>;
+};
+
+export type StorageBagWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  owner_json?: Maybe<Scalars['JSONObject']>;
+  objects_none?: Maybe<StorageDataObjectWhereInput>;
+  objects_some?: Maybe<StorageDataObjectWhereInput>;
+  objects_every?: Maybe<StorageDataObjectWhereInput>;
+  storageAssignments_none?: Maybe<StorageBagStorageAssignmentWhereInput>;
+  storageAssignments_some?: Maybe<StorageBagStorageAssignmentWhereInput>;
+  storageAssignments_every?: Maybe<StorageBagStorageAssignmentWhereInput>;
+  distirbutionAssignments_none?: Maybe<StorageBagDistributionAssignmentWhereInput>;
+  distirbutionAssignments_some?: Maybe<StorageBagDistributionAssignmentWhereInput>;
+  distirbutionAssignments_every?: Maybe<StorageBagDistributionAssignmentWhereInput>;
+  AND?: Maybe<Array<StorageBagWhereInput>>;
+  OR?: Maybe<Array<StorageBagWhereInput>>;
+};
+
+export type StorageBagWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type StorageBucket = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  /** Current bucket operator status */
+  operatorStatus: StorageBucketOperatorStatus;
+  operatorMetadata?: Maybe<StorageBucketOperatorMetadata>;
+  operatorMetadataId?: Maybe<Scalars['String']>;
+  /** Whether the bucket is accepting any new storage bags */
+  acceptingNewBags: Scalars['Boolean'];
+  bagAssignments: Array<StorageBagStorageAssignment>;
+  /** Bucket's data object size limit in bytes */
+  dataObjectsSizeLimit: Scalars['BigInt'];
+  /** Bucket's data object count limit */
+  dataObjectCountLimit: Scalars['BigInt'];
+};
+
+export type StorageBucketConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<StorageBucketEdge>;
+  pageInfo: PageInfo;
+};
+
+export type StorageBucketCreateInput = {
+  operatorStatus: Scalars['JSONObject'];
+  operatorMetadata?: Maybe<Scalars['ID']>;
+  acceptingNewBags: Scalars['Boolean'];
+  dataObjectsSizeLimit: Scalars['BigInt'];
+  dataObjectCountLimit: Scalars['BigInt'];
+};
+
+export type StorageBucketEdge = {
+  node: StorageBucket;
+  cursor: Scalars['String'];
+};
+
+export type StorageBucketOperatorMetadata = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  /** Root node endpoint */
+  nodeEndpoint?: Maybe<Scalars['String']>;
+  nodeLocation?: Maybe<NodeLocationMetadata>;
+  nodeLocationId?: Maybe<Scalars['String']>;
+  /** Additional information about the node/operator */
+  extra?: Maybe<Scalars['String']>;
+  storagebucketoperatorMetadata?: Maybe<Array<StorageBucket>>;
+};
+
+export type StorageBucketOperatorMetadataConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<StorageBucketOperatorMetadataEdge>;
+  pageInfo: PageInfo;
+};
+
+export type StorageBucketOperatorMetadataCreateInput = {
+  nodeEndpoint?: Maybe<Scalars['String']>;
+  nodeLocation?: Maybe<Scalars['ID']>;
+  extra?: Maybe<Scalars['String']>;
+};
+
+export type StorageBucketOperatorMetadataEdge = {
+  node: StorageBucketOperatorMetadata;
+  cursor: Scalars['String'];
+};
+
+export enum StorageBucketOperatorMetadataOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  NodeEndpointAsc = 'nodeEndpoint_ASC',
+  NodeEndpointDesc = 'nodeEndpoint_DESC',
+  NodeLocationAsc = 'nodeLocation_ASC',
+  NodeLocationDesc = 'nodeLocation_DESC',
+  ExtraAsc = 'extra_ASC',
+  ExtraDesc = 'extra_DESC'
+}
+
+export type StorageBucketOperatorMetadataUpdateInput = {
+  nodeEndpoint?: Maybe<Scalars['String']>;
+  nodeLocation?: Maybe<Scalars['ID']>;
+  extra?: Maybe<Scalars['String']>;
+};
+
+export type StorageBucketOperatorMetadataWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  nodeEndpoint_eq?: Maybe<Scalars['String']>;
+  nodeEndpoint_contains?: Maybe<Scalars['String']>;
+  nodeEndpoint_startsWith?: Maybe<Scalars['String']>;
+  nodeEndpoint_endsWith?: Maybe<Scalars['String']>;
+  nodeEndpoint_in?: Maybe<Array<Scalars['String']>>;
+  nodeLocation_eq?: Maybe<Scalars['ID']>;
+  nodeLocation_in?: Maybe<Array<Scalars['ID']>>;
+  extra_eq?: Maybe<Scalars['String']>;
+  extra_contains?: Maybe<Scalars['String']>;
+  extra_startsWith?: Maybe<Scalars['String']>;
+  extra_endsWith?: Maybe<Scalars['String']>;
+  extra_in?: Maybe<Array<Scalars['String']>>;
+  nodeLocation?: Maybe<NodeLocationMetadataWhereInput>;
+  storagebucketoperatorMetadata_none?: Maybe<StorageBucketWhereInput>;
+  storagebucketoperatorMetadata_some?: Maybe<StorageBucketWhereInput>;
+  storagebucketoperatorMetadata_every?: Maybe<StorageBucketWhereInput>;
+  AND?: Maybe<Array<StorageBucketOperatorMetadataWhereInput>>;
+  OR?: Maybe<Array<StorageBucketOperatorMetadataWhereInput>>;
+};
+
+export type StorageBucketOperatorMetadataWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type StorageBucketOperatorStatus = StorageBucketOperatorStatusMissing | StorageBucketOperatorStatusInvited | StorageBucketOperatorStatusActive;
+
+export type StorageBucketOperatorStatusActive = {
+  workerId: Scalars['Int'];
+};
+
+export type StorageBucketOperatorStatusActiveCreateInput = {
+  workerId: Scalars['Float'];
+};
+
+export type StorageBucketOperatorStatusActiveUpdateInput = {
+  workerId?: Maybe<Scalars['Float']>;
+};
+
+export type StorageBucketOperatorStatusActiveWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  workerId_eq?: Maybe<Scalars['Int']>;
+  workerId_gt?: Maybe<Scalars['Int']>;
+  workerId_gte?: Maybe<Scalars['Int']>;
+  workerId_lt?: Maybe<Scalars['Int']>;
+  workerId_lte?: Maybe<Scalars['Int']>;
+  workerId_in?: Maybe<Array<Scalars['Int']>>;
+  AND?: Maybe<Array<StorageBucketOperatorStatusActiveWhereInput>>;
+  OR?: Maybe<Array<StorageBucketOperatorStatusActiveWhereInput>>;
+};
+
+export type StorageBucketOperatorStatusActiveWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type StorageBucketOperatorStatusInvited = {
+  workerId: Scalars['Int'];
+};
+
+export type StorageBucketOperatorStatusInvitedCreateInput = {
+  workerId: Scalars['Float'];
+};
+
+export type StorageBucketOperatorStatusInvitedUpdateInput = {
+  workerId?: Maybe<Scalars['Float']>;
+};
+
+export type StorageBucketOperatorStatusInvitedWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  workerId_eq?: Maybe<Scalars['Int']>;
+  workerId_gt?: Maybe<Scalars['Int']>;
+  workerId_gte?: Maybe<Scalars['Int']>;
+  workerId_lt?: Maybe<Scalars['Int']>;
+  workerId_lte?: Maybe<Scalars['Int']>;
+  workerId_in?: Maybe<Array<Scalars['Int']>>;
+  AND?: Maybe<Array<StorageBucketOperatorStatusInvitedWhereInput>>;
+  OR?: Maybe<Array<StorageBucketOperatorStatusInvitedWhereInput>>;
+};
+
+export type StorageBucketOperatorStatusInvitedWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type StorageBucketOperatorStatusMissing = {
+  phantom?: Maybe<Scalars['Int']>;
+};
+
+export type StorageBucketOperatorStatusMissingCreateInput = {
+  phantom?: Maybe<Scalars['Float']>;
+};
+
+export type StorageBucketOperatorStatusMissingUpdateInput = {
+  phantom?: Maybe<Scalars['Float']>;
+};
+
+export type StorageBucketOperatorStatusMissingWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  phantom_eq?: Maybe<Scalars['Int']>;
+  phantom_gt?: Maybe<Scalars['Int']>;
+  phantom_gte?: Maybe<Scalars['Int']>;
+  phantom_lt?: Maybe<Scalars['Int']>;
+  phantom_lte?: Maybe<Scalars['Int']>;
+  phantom_in?: Maybe<Array<Scalars['Int']>>;
+  AND?: Maybe<Array<StorageBucketOperatorStatusMissingWhereInput>>;
+  OR?: Maybe<Array<StorageBucketOperatorStatusMissingWhereInput>>;
+};
+
+export type StorageBucketOperatorStatusMissingWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export enum StorageBucketOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  OperatorMetadataAsc = 'operatorMetadata_ASC',
+  OperatorMetadataDesc = 'operatorMetadata_DESC',
+  AcceptingNewBagsAsc = 'acceptingNewBags_ASC',
+  AcceptingNewBagsDesc = 'acceptingNewBags_DESC',
+  DataObjectsSizeLimitAsc = 'dataObjectsSizeLimit_ASC',
+  DataObjectsSizeLimitDesc = 'dataObjectsSizeLimit_DESC',
+  DataObjectCountLimitAsc = 'dataObjectCountLimit_ASC',
+  DataObjectCountLimitDesc = 'dataObjectCountLimit_DESC'
+}
+
+export type StorageBucketUpdateInput = {
+  operatorStatus?: Maybe<Scalars['JSONObject']>;
+  operatorMetadata?: Maybe<Scalars['ID']>;
+  acceptingNewBags?: Maybe<Scalars['Boolean']>;
+  dataObjectsSizeLimit?: Maybe<Scalars['BigInt']>;
+  dataObjectCountLimit?: Maybe<Scalars['BigInt']>;
+};
+
+export type StorageBucketWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  operatorStatus_json?: Maybe<Scalars['JSONObject']>;
+  operatorMetadata_eq?: Maybe<Scalars['ID']>;
+  operatorMetadata_in?: Maybe<Array<Scalars['ID']>>;
+  acceptingNewBags_eq?: Maybe<Scalars['Boolean']>;
+  acceptingNewBags_in?: Maybe<Array<Scalars['Boolean']>>;
+  dataObjectsSizeLimit_eq?: Maybe<Scalars['BigInt']>;
+  dataObjectsSizeLimit_gt?: Maybe<Scalars['BigInt']>;
+  dataObjectsSizeLimit_gte?: Maybe<Scalars['BigInt']>;
+  dataObjectsSizeLimit_lt?: Maybe<Scalars['BigInt']>;
+  dataObjectsSizeLimit_lte?: Maybe<Scalars['BigInt']>;
+  dataObjectsSizeLimit_in?: Maybe<Array<Scalars['BigInt']>>;
+  dataObjectCountLimit_eq?: Maybe<Scalars['BigInt']>;
+  dataObjectCountLimit_gt?: Maybe<Scalars['BigInt']>;
+  dataObjectCountLimit_gte?: Maybe<Scalars['BigInt']>;
+  dataObjectCountLimit_lt?: Maybe<Scalars['BigInt']>;
+  dataObjectCountLimit_lte?: Maybe<Scalars['BigInt']>;
+  dataObjectCountLimit_in?: Maybe<Array<Scalars['BigInt']>>;
+  operatorMetadata?: Maybe<StorageBucketOperatorMetadataWhereInput>;
+  bagAssignments_none?: Maybe<StorageBagStorageAssignmentWhereInput>;
+  bagAssignments_some?: Maybe<StorageBagStorageAssignmentWhereInput>;
+  bagAssignments_every?: Maybe<StorageBagStorageAssignmentWhereInput>;
+  AND?: Maybe<Array<StorageBucketWhereInput>>;
+  OR?: Maybe<Array<StorageBucketWhereInput>>;
+};
+
+export type StorageBucketWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type StorageDataObject = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  /** Whether the data object was uploaded and accepted by the storage provider */
+  isAccepted: Scalars['Boolean'];
+  /** Data object size in bytes */
+  size: Scalars['BigInt'];
+  storageBag: StorageBag;
+  storageBagId: Scalars['String'];
+  /** IPFS content hash */
+  ipfsHash: Scalars['String'];
+  /** Public key used to authenticate the uploader by the storage provider */
+  authenticationKey?: Maybe<Scalars['String']>;
+};
+
+export type StorageDataObjectConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<StorageDataObjectEdge>;
+  pageInfo: PageInfo;
+};
+
+export type StorageDataObjectCreateInput = {
+  isAccepted: Scalars['Boolean'];
+  size: Scalars['BigInt'];
+  storageBag: Scalars['ID'];
+  ipfsHash: Scalars['String'];
+  authenticationKey?: Maybe<Scalars['String']>;
+};
+
+export type StorageDataObjectEdge = {
+  node: StorageDataObject;
+  cursor: Scalars['String'];
+};
+
+export enum StorageDataObjectOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  IsAcceptedAsc = 'isAccepted_ASC',
+  IsAcceptedDesc = 'isAccepted_DESC',
+  SizeAsc = 'size_ASC',
+  SizeDesc = 'size_DESC',
+  StorageBagAsc = 'storageBag_ASC',
+  StorageBagDesc = 'storageBag_DESC',
+  IpfsHashAsc = 'ipfsHash_ASC',
+  IpfsHashDesc = 'ipfsHash_DESC',
+  AuthenticationKeyAsc = 'authenticationKey_ASC',
+  AuthenticationKeyDesc = 'authenticationKey_DESC'
+}
+
+export type StorageDataObjectUpdateInput = {
+  isAccepted?: Maybe<Scalars['Boolean']>;
+  size?: Maybe<Scalars['BigInt']>;
+  storageBag?: Maybe<Scalars['ID']>;
+  ipfsHash?: Maybe<Scalars['String']>;
+  authenticationKey?: Maybe<Scalars['String']>;
+};
+
+export type StorageDataObjectWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  isAccepted_eq?: Maybe<Scalars['Boolean']>;
+  isAccepted_in?: Maybe<Array<Scalars['Boolean']>>;
+  size_eq?: Maybe<Scalars['BigInt']>;
+  size_gt?: Maybe<Scalars['BigInt']>;
+  size_gte?: Maybe<Scalars['BigInt']>;
+  size_lt?: Maybe<Scalars['BigInt']>;
+  size_lte?: Maybe<Scalars['BigInt']>;
+  size_in?: Maybe<Array<Scalars['BigInt']>>;
+  storageBag_eq?: Maybe<Scalars['ID']>;
+  storageBag_in?: Maybe<Array<Scalars['ID']>>;
+  ipfsHash_eq?: Maybe<Scalars['String']>;
+  ipfsHash_contains?: Maybe<Scalars['String']>;
+  ipfsHash_startsWith?: Maybe<Scalars['String']>;
+  ipfsHash_endsWith?: Maybe<Scalars['String']>;
+  ipfsHash_in?: Maybe<Array<Scalars['String']>>;
+  authenticationKey_eq?: Maybe<Scalars['String']>;
+  authenticationKey_contains?: Maybe<Scalars['String']>;
+  authenticationKey_startsWith?: Maybe<Scalars['String']>;
+  authenticationKey_endsWith?: Maybe<Scalars['String']>;
+  authenticationKey_in?: Maybe<Array<Scalars['String']>>;
+  storageBag?: Maybe<StorageBagWhereInput>;
+  AND?: Maybe<Array<StorageDataObjectWhereInput>>;
+  OR?: Maybe<Array<StorageDataObjectWhereInput>>;
+};
+
+export type StorageDataObjectWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+/** Global storage system parameters */
+export type StorageSystemParameters = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  /** Blacklisted content hashes */
+  blacklist: Array<Scalars['String']>;
+};
+
+export type StorageSystemParametersConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<StorageSystemParametersEdge>;
+  pageInfo: PageInfo;
+};
+
+export type StorageSystemParametersCreateInput = {
+  blacklist: Array<Scalars['String']>;
+};
+
+export type StorageSystemParametersEdge = {
+  node: StorageSystemParameters;
+  cursor: Scalars['String'];
+};
+
+export enum StorageSystemParametersOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC'
+}
+
+export type StorageSystemParametersUpdateInput = {
+  blacklist?: Maybe<Array<Scalars['String']>>;
+};
+
+export type StorageSystemParametersWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  AND?: Maybe<Array<StorageSystemParametersWhereInput>>;
+  OR?: Maybe<Array<StorageSystemParametersWhereInput>>;
+};
+
+export type StorageSystemParametersWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type Subscription = {
+  stateSubscription: ProcessorState;
+};
+
+export type Video = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  channel?: Maybe<Channel>;
+  channelId?: Maybe<Scalars['String']>;
+  category?: Maybe<VideoCategory>;
+  categoryId?: Maybe<Scalars['String']>;
+  /** The title of the video */
+  title?: Maybe<Scalars['String']>;
+  /** The description of the Video */
+  description?: Maybe<Scalars['String']>;
+  /** Video duration in seconds */
+  duration?: Maybe<Scalars['Int']>;
+  thumbnailPhotoDataObject?: Maybe<DataObject>;
+  thumbnailPhotoDataObjectId?: Maybe<Scalars['String']>;
+  /** URLs where the asset content can be accessed (if any) */
+  thumbnailPhotoUrls: Array<Scalars['String']>;
+  /** Availability meta information */
+  thumbnailPhotoAvailability: AssetAvailability;
+  language?: Maybe<Language>;
+  languageId?: Maybe<Scalars['String']>;
+  /** Whether or not Video contains marketing */
+  hasMarketing?: Maybe<Scalars['Boolean']>;
+  /** If the Video was published on other platform before beeing published on Joystream - the original publication date */
+  publishedBeforeJoystream?: Maybe<Scalars['DateTime']>;
+  /** Whether the Video is supposed to be publically displayed */
+  isPublic?: Maybe<Scalars['Boolean']>;
+  /** Flag signaling whether a video is censored. */
+  isCensored: Scalars['Boolean'];
+  /** Whether the Video contains explicit material. */
+  isExplicit?: Maybe<Scalars['Boolean']>;
+  license?: Maybe<License>;
+  licenseId?: Maybe<Scalars['String']>;
+  mediaDataObject?: Maybe<DataObject>;
+  mediaDataObjectId?: Maybe<Scalars['String']>;
+  /** URLs where the asset content can be accessed (if any) */
+  mediaUrls: Array<Scalars['String']>;
+  /** Availability meta information */
+  mediaAvailability: AssetAvailability;
+  mediaMetadata?: Maybe<VideoMediaMetadata>;
+  mediaMetadataId?: Maybe<Scalars['String']>;
+  createdInBlock: Scalars['Int'];
+  /** Is video featured or not */
+  isFeatured: Scalars['Boolean'];
+};
+
+export type VideoCategoriesByNameFtsOutput = {
+  item: VideoCategoriesByNameSearchResult;
+  rank: Scalars['Float'];
+  isTypeOf: Scalars['String'];
+  highlight: Scalars['String'];
+};
+
+export type VideoCategoriesByNameSearchResult = VideoCategory;
+
+export type VideoCategory = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  /** The name of the category */
+  name?: Maybe<Scalars['String']>;
+  videos: Array<Video>;
+  createdInBlock: Scalars['Int'];
+};
+
+export type VideoCategoryConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<VideoCategoryEdge>;
+  pageInfo: PageInfo;
+};
+
+export type VideoCategoryCreateInput = {
+  name?: Maybe<Scalars['String']>;
+  createdInBlock: Scalars['Float'];
+};
+
+export type VideoCategoryEdge = {
+  node: VideoCategory;
+  cursor: Scalars['String'];
+};
+
+export enum VideoCategoryOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  NameAsc = 'name_ASC',
+  NameDesc = 'name_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC'
+}
+
+export type VideoCategoryUpdateInput = {
+  name?: Maybe<Scalars['String']>;
+  createdInBlock?: Maybe<Scalars['Float']>;
+};
+
+export type VideoCategoryWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  name_eq?: Maybe<Scalars['String']>;
+  name_contains?: Maybe<Scalars['String']>;
+  name_startsWith?: Maybe<Scalars['String']>;
+  name_endsWith?: Maybe<Scalars['String']>;
+  name_in?: Maybe<Array<Scalars['String']>>;
+  createdInBlock_eq?: Maybe<Scalars['Int']>;
+  createdInBlock_gt?: Maybe<Scalars['Int']>;
+  createdInBlock_gte?: Maybe<Scalars['Int']>;
+  createdInBlock_lt?: Maybe<Scalars['Int']>;
+  createdInBlock_lte?: Maybe<Scalars['Int']>;
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>;
+  videos_none?: Maybe<VideoWhereInput>;
+  videos_some?: Maybe<VideoWhereInput>;
+  videos_every?: Maybe<VideoWhereInput>;
+  AND?: Maybe<Array<VideoCategoryWhereInput>>;
+  OR?: Maybe<Array<VideoCategoryWhereInput>>;
+};
+
+export type VideoCategoryWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type VideoConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<VideoEdge>;
+  pageInfo: PageInfo;
+};
+
+export type VideoCreateInput = {
+  channel?: Maybe<Scalars['ID']>;
+  category?: Maybe<Scalars['ID']>;
+  title?: Maybe<Scalars['String']>;
+  description?: Maybe<Scalars['String']>;
+  duration?: Maybe<Scalars['Float']>;
+  thumbnailPhotoDataObject?: Maybe<Scalars['ID']>;
+  thumbnailPhotoUrls: Array<Scalars['String']>;
+  thumbnailPhotoAvailability: AssetAvailability;
+  language?: Maybe<Scalars['ID']>;
+  hasMarketing?: Maybe<Scalars['Boolean']>;
+  publishedBeforeJoystream?: Maybe<Scalars['DateTime']>;
+  isPublic?: Maybe<Scalars['Boolean']>;
+  isCensored: Scalars['Boolean'];
+  isExplicit?: Maybe<Scalars['Boolean']>;
+  license?: Maybe<Scalars['ID']>;
+  mediaDataObject?: Maybe<Scalars['ID']>;
+  mediaUrls: Array<Scalars['String']>;
+  mediaAvailability: AssetAvailability;
+  mediaMetadata?: Maybe<Scalars['ID']>;
+  createdInBlock: Scalars['Float'];
+  isFeatured: Scalars['Boolean'];
+};
+
+export type VideoEdge = {
+  node: Video;
+  cursor: Scalars['String'];
+};
+
+export type VideoMediaEncoding = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  /** Encoding of the video media object */
+  codecName?: Maybe<Scalars['String']>;
+  /** Media container format */
+  container?: Maybe<Scalars['String']>;
+  /** Content MIME type */
+  mimeMediaType?: Maybe<Scalars['String']>;
+  videomediametadataencoding?: Maybe<Array<VideoMediaMetadata>>;
+};
+
+export type VideoMediaEncodingConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<VideoMediaEncodingEdge>;
+  pageInfo: PageInfo;
+};
+
+export type VideoMediaEncodingCreateInput = {
+  codecName?: Maybe<Scalars['String']>;
+  container?: Maybe<Scalars['String']>;
+  mimeMediaType?: Maybe<Scalars['String']>;
+};
+
+export type VideoMediaEncodingEdge = {
+  node: VideoMediaEncoding;
+  cursor: Scalars['String'];
+};
+
+export enum VideoMediaEncodingOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  CodecNameAsc = 'codecName_ASC',
+  CodecNameDesc = 'codecName_DESC',
+  ContainerAsc = 'container_ASC',
+  ContainerDesc = 'container_DESC',
+  MimeMediaTypeAsc = 'mimeMediaType_ASC',
+  MimeMediaTypeDesc = 'mimeMediaType_DESC'
+}
+
+export type VideoMediaEncodingUpdateInput = {
+  codecName?: Maybe<Scalars['String']>;
+  container?: Maybe<Scalars['String']>;
+  mimeMediaType?: Maybe<Scalars['String']>;
+};
+
+export type VideoMediaEncodingWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  codecName_eq?: Maybe<Scalars['String']>;
+  codecName_contains?: Maybe<Scalars['String']>;
+  codecName_startsWith?: Maybe<Scalars['String']>;
+  codecName_endsWith?: Maybe<Scalars['String']>;
+  codecName_in?: Maybe<Array<Scalars['String']>>;
+  container_eq?: Maybe<Scalars['String']>;
+  container_contains?: Maybe<Scalars['String']>;
+  container_startsWith?: Maybe<Scalars['String']>;
+  container_endsWith?: Maybe<Scalars['String']>;
+  container_in?: Maybe<Array<Scalars['String']>>;
+  mimeMediaType_eq?: Maybe<Scalars['String']>;
+  mimeMediaType_contains?: Maybe<Scalars['String']>;
+  mimeMediaType_startsWith?: Maybe<Scalars['String']>;
+  mimeMediaType_endsWith?: Maybe<Scalars['String']>;
+  mimeMediaType_in?: Maybe<Array<Scalars['String']>>;
+  videomediametadataencoding_none?: Maybe<VideoMediaMetadataWhereInput>;
+  videomediametadataencoding_some?: Maybe<VideoMediaMetadataWhereInput>;
+  videomediametadataencoding_every?: Maybe<VideoMediaMetadataWhereInput>;
+  AND?: Maybe<Array<VideoMediaEncodingWhereInput>>;
+  OR?: Maybe<Array<VideoMediaEncodingWhereInput>>;
+};
+
+export type VideoMediaEncodingWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type VideoMediaMetadata = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  encoding?: Maybe<VideoMediaEncoding>;
+  encodingId?: Maybe<Scalars['String']>;
+  /** Video media width in pixels */
+  pixelWidth?: Maybe<Scalars['Int']>;
+  /** Video media height in pixels */
+  pixelHeight?: Maybe<Scalars['Int']>;
+  /** Video media size in bytes */
+  size?: Maybe<Scalars['Int']>;
+  video?: Maybe<Video>;
+  createdInBlock: Scalars['Int'];
+};
+
+export type VideoMediaMetadataConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<VideoMediaMetadataEdge>;
+  pageInfo: PageInfo;
+};
+
+export type VideoMediaMetadataCreateInput = {
+  encoding?: Maybe<Scalars['ID']>;
+  pixelWidth?: Maybe<Scalars['Float']>;
+  pixelHeight?: Maybe<Scalars['Float']>;
+  size?: Maybe<Scalars['Float']>;
+  createdInBlock: Scalars['Float'];
+};
+
+export type VideoMediaMetadataEdge = {
+  node: VideoMediaMetadata;
+  cursor: Scalars['String'];
+};
+
+export enum VideoMediaMetadataOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  EncodingAsc = 'encoding_ASC',
+  EncodingDesc = 'encoding_DESC',
+  PixelWidthAsc = 'pixelWidth_ASC',
+  PixelWidthDesc = 'pixelWidth_DESC',
+  PixelHeightAsc = 'pixelHeight_ASC',
+  PixelHeightDesc = 'pixelHeight_DESC',
+  SizeAsc = 'size_ASC',
+  SizeDesc = 'size_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC'
+}
+
+export type VideoMediaMetadataUpdateInput = {
+  encoding?: Maybe<Scalars['ID']>;
+  pixelWidth?: Maybe<Scalars['Float']>;
+  pixelHeight?: Maybe<Scalars['Float']>;
+  size?: Maybe<Scalars['Float']>;
+  createdInBlock?: Maybe<Scalars['Float']>;
+};
+
+export type VideoMediaMetadataWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  encoding_eq?: Maybe<Scalars['ID']>;
+  encoding_in?: Maybe<Array<Scalars['ID']>>;
+  pixelWidth_eq?: Maybe<Scalars['Int']>;
+  pixelWidth_gt?: Maybe<Scalars['Int']>;
+  pixelWidth_gte?: Maybe<Scalars['Int']>;
+  pixelWidth_lt?: Maybe<Scalars['Int']>;
+  pixelWidth_lte?: Maybe<Scalars['Int']>;
+  pixelWidth_in?: Maybe<Array<Scalars['Int']>>;
+  pixelHeight_eq?: Maybe<Scalars['Int']>;
+  pixelHeight_gt?: Maybe<Scalars['Int']>;
+  pixelHeight_gte?: Maybe<Scalars['Int']>;
+  pixelHeight_lt?: Maybe<Scalars['Int']>;
+  pixelHeight_lte?: Maybe<Scalars['Int']>;
+  pixelHeight_in?: Maybe<Array<Scalars['Int']>>;
+  size_eq?: Maybe<Scalars['Int']>;
+  size_gt?: Maybe<Scalars['Int']>;
+  size_gte?: Maybe<Scalars['Int']>;
+  size_lt?: Maybe<Scalars['Int']>;
+  size_lte?: Maybe<Scalars['Int']>;
+  size_in?: Maybe<Array<Scalars['Int']>>;
+  createdInBlock_eq?: Maybe<Scalars['Int']>;
+  createdInBlock_gt?: Maybe<Scalars['Int']>;
+  createdInBlock_gte?: Maybe<Scalars['Int']>;
+  createdInBlock_lt?: Maybe<Scalars['Int']>;
+  createdInBlock_lte?: Maybe<Scalars['Int']>;
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>;
+  encoding?: Maybe<VideoMediaEncodingWhereInput>;
+  video?: Maybe<VideoWhereInput>;
+  AND?: Maybe<Array<VideoMediaMetadataWhereInput>>;
+  OR?: Maybe<Array<VideoMediaMetadataWhereInput>>;
+};
+
+export type VideoMediaMetadataWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export enum VideoOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  ChannelAsc = 'channel_ASC',
+  ChannelDesc = 'channel_DESC',
+  CategoryAsc = 'category_ASC',
+  CategoryDesc = 'category_DESC',
+  TitleAsc = 'title_ASC',
+  TitleDesc = 'title_DESC',
+  DescriptionAsc = 'description_ASC',
+  DescriptionDesc = 'description_DESC',
+  DurationAsc = 'duration_ASC',
+  DurationDesc = 'duration_DESC',
+  ThumbnailPhotoDataObjectAsc = 'thumbnailPhotoDataObject_ASC',
+  ThumbnailPhotoDataObjectDesc = 'thumbnailPhotoDataObject_DESC',
+  ThumbnailPhotoAvailabilityAsc = 'thumbnailPhotoAvailability_ASC',
+  ThumbnailPhotoAvailabilityDesc = 'thumbnailPhotoAvailability_DESC',
+  LanguageAsc = 'language_ASC',
+  LanguageDesc = 'language_DESC',
+  HasMarketingAsc = 'hasMarketing_ASC',
+  HasMarketingDesc = 'hasMarketing_DESC',
+  PublishedBeforeJoystreamAsc = 'publishedBeforeJoystream_ASC',
+  PublishedBeforeJoystreamDesc = 'publishedBeforeJoystream_DESC',
+  IsPublicAsc = 'isPublic_ASC',
+  IsPublicDesc = 'isPublic_DESC',
+  IsCensoredAsc = 'isCensored_ASC',
+  IsCensoredDesc = 'isCensored_DESC',
+  IsExplicitAsc = 'isExplicit_ASC',
+  IsExplicitDesc = 'isExplicit_DESC',
+  LicenseAsc = 'license_ASC',
+  LicenseDesc = 'license_DESC',
+  MediaDataObjectAsc = 'mediaDataObject_ASC',
+  MediaDataObjectDesc = 'mediaDataObject_DESC',
+  MediaAvailabilityAsc = 'mediaAvailability_ASC',
+  MediaAvailabilityDesc = 'mediaAvailability_DESC',
+  MediaMetadataAsc = 'mediaMetadata_ASC',
+  MediaMetadataDesc = 'mediaMetadata_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+  IsFeaturedAsc = 'isFeatured_ASC',
+  IsFeaturedDesc = 'isFeatured_DESC'
+}
+
+export type VideoUpdateInput = {
+  channel?: Maybe<Scalars['ID']>;
+  category?: Maybe<Scalars['ID']>;
+  title?: Maybe<Scalars['String']>;
+  description?: Maybe<Scalars['String']>;
+  duration?: Maybe<Scalars['Float']>;
+  thumbnailPhotoDataObject?: Maybe<Scalars['ID']>;
+  thumbnailPhotoUrls?: Maybe<Array<Scalars['String']>>;
+  thumbnailPhotoAvailability?: Maybe<AssetAvailability>;
+  language?: Maybe<Scalars['ID']>;
+  hasMarketing?: Maybe<Scalars['Boolean']>;
+  publishedBeforeJoystream?: Maybe<Scalars['DateTime']>;
+  isPublic?: Maybe<Scalars['Boolean']>;
+  isCensored?: Maybe<Scalars['Boolean']>;
+  isExplicit?: Maybe<Scalars['Boolean']>;
+  license?: Maybe<Scalars['ID']>;
+  mediaDataObject?: Maybe<Scalars['ID']>;
+  mediaUrls?: Maybe<Array<Scalars['String']>>;
+  mediaAvailability?: Maybe<AssetAvailability>;
+  mediaMetadata?: Maybe<Scalars['ID']>;
+  createdInBlock?: Maybe<Scalars['Float']>;
+  isFeatured?: Maybe<Scalars['Boolean']>;
+};
+
+export type VideoWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  channel_eq?: Maybe<Scalars['ID']>;
+  channel_in?: Maybe<Array<Scalars['ID']>>;
+  category_eq?: Maybe<Scalars['ID']>;
+  category_in?: Maybe<Array<Scalars['ID']>>;
+  title_eq?: Maybe<Scalars['String']>;
+  title_contains?: Maybe<Scalars['String']>;
+  title_startsWith?: Maybe<Scalars['String']>;
+  title_endsWith?: Maybe<Scalars['String']>;
+  title_in?: Maybe<Array<Scalars['String']>>;
+  description_eq?: Maybe<Scalars['String']>;
+  description_contains?: Maybe<Scalars['String']>;
+  description_startsWith?: Maybe<Scalars['String']>;
+  description_endsWith?: Maybe<Scalars['String']>;
+  description_in?: Maybe<Array<Scalars['String']>>;
+  duration_eq?: Maybe<Scalars['Int']>;
+  duration_gt?: Maybe<Scalars['Int']>;
+  duration_gte?: Maybe<Scalars['Int']>;
+  duration_lt?: Maybe<Scalars['Int']>;
+  duration_lte?: Maybe<Scalars['Int']>;
+  duration_in?: Maybe<Array<Scalars['Int']>>;
+  thumbnailPhotoDataObject_eq?: Maybe<Scalars['ID']>;
+  thumbnailPhotoDataObject_in?: Maybe<Array<Scalars['ID']>>;
+  thumbnailPhotoAvailability_eq?: Maybe<AssetAvailability>;
+  thumbnailPhotoAvailability_in?: Maybe<Array<AssetAvailability>>;
+  language_eq?: Maybe<Scalars['ID']>;
+  language_in?: Maybe<Array<Scalars['ID']>>;
+  hasMarketing_eq?: Maybe<Scalars['Boolean']>;
+  hasMarketing_in?: Maybe<Array<Scalars['Boolean']>>;
+  publishedBeforeJoystream_eq?: Maybe<Scalars['DateTime']>;
+  publishedBeforeJoystream_lt?: Maybe<Scalars['DateTime']>;
+  publishedBeforeJoystream_lte?: Maybe<Scalars['DateTime']>;
+  publishedBeforeJoystream_gt?: Maybe<Scalars['DateTime']>;
+  publishedBeforeJoystream_gte?: Maybe<Scalars['DateTime']>;
+  isPublic_eq?: Maybe<Scalars['Boolean']>;
+  isPublic_in?: Maybe<Array<Scalars['Boolean']>>;
+  isCensored_eq?: Maybe<Scalars['Boolean']>;
+  isCensored_in?: Maybe<Array<Scalars['Boolean']>>;
+  isExplicit_eq?: Maybe<Scalars['Boolean']>;
+  isExplicit_in?: Maybe<Array<Scalars['Boolean']>>;
+  license_eq?: Maybe<Scalars['ID']>;
+  license_in?: Maybe<Array<Scalars['ID']>>;
+  mediaDataObject_eq?: Maybe<Scalars['ID']>;
+  mediaDataObject_in?: Maybe<Array<Scalars['ID']>>;
+  mediaAvailability_eq?: Maybe<AssetAvailability>;
+  mediaAvailability_in?: Maybe<Array<AssetAvailability>>;
+  mediaMetadata_eq?: Maybe<Scalars['ID']>;
+  mediaMetadata_in?: Maybe<Array<Scalars['ID']>>;
+  createdInBlock_eq?: Maybe<Scalars['Int']>;
+  createdInBlock_gt?: Maybe<Scalars['Int']>;
+  createdInBlock_gte?: Maybe<Scalars['Int']>;
+  createdInBlock_lt?: Maybe<Scalars['Int']>;
+  createdInBlock_lte?: Maybe<Scalars['Int']>;
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>;
+  isFeatured_eq?: Maybe<Scalars['Boolean']>;
+  isFeatured_in?: Maybe<Array<Scalars['Boolean']>>;
+  channel?: Maybe<ChannelWhereInput>;
+  category?: Maybe<VideoCategoryWhereInput>;
+  thumbnailPhotoDataObject?: Maybe<DataObjectWhereInput>;
+  language?: Maybe<LanguageWhereInput>;
+  license?: Maybe<LicenseWhereInput>;
+  mediaDataObject?: Maybe<DataObjectWhereInput>;
+  mediaMetadata?: Maybe<VideoMediaMetadataWhereInput>;
+  AND?: Maybe<Array<VideoWhereInput>>;
+  OR?: Maybe<Array<VideoWhereInput>>;
+};
+
+export type VideoWhereUniqueInput = {
+  id: Scalars['ID'];
+};
+
+export type Worker = BaseGraphQlObject & {
+  id: Scalars['ID'];
+  createdAt: Scalars['DateTime'];
+  createdById: Scalars['String'];
+  updatedAt?: Maybe<Scalars['DateTime']>;
+  updatedById?: Maybe<Scalars['String']>;
+  deletedAt?: Maybe<Scalars['DateTime']>;
+  deletedById?: Maybe<Scalars['String']>;
+  version: Scalars['Int'];
+  /** Sign of worker still being active */
+  isActive: Scalars['Boolean'];
+  /** Runtime identifier */
+  workerId: Scalars['String'];
+  /** Associated working group */
+  type: WorkerType;
+  /** Custom metadata set by provider */
+  metadata?: Maybe<Scalars['String']>;
+  dataObjects: Array<DataObject>;
+};
+
+export type WorkerConnection = {
+  totalCount: Scalars['Int'];
+  edges: Array<WorkerEdge>;
+  pageInfo: PageInfo;
+};
+
+export type WorkerCreateInput = {
+  isActive: Scalars['Boolean'];
+  workerId: Scalars['String'];
+  type: WorkerType;
+  metadata?: Maybe<Scalars['String']>;
+};
+
+export type WorkerEdge = {
+  node: Worker;
+  cursor: Scalars['String'];
+};
+
+export enum WorkerOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  IsActiveAsc = 'isActive_ASC',
+  IsActiveDesc = 'isActive_DESC',
+  WorkerIdAsc = 'workerId_ASC',
+  WorkerIdDesc = 'workerId_DESC',
+  TypeAsc = 'type_ASC',
+  TypeDesc = 'type_DESC',
+  MetadataAsc = 'metadata_ASC',
+  MetadataDesc = 'metadata_DESC'
+}
+
+export enum WorkerType {
+  Gateway = 'GATEWAY',
+  Storage = 'STORAGE'
+}
+
+export type WorkerUpdateInput = {
+  isActive?: Maybe<Scalars['Boolean']>;
+  workerId?: Maybe<Scalars['String']>;
+  type?: Maybe<WorkerType>;
+  metadata?: Maybe<Scalars['String']>;
+};
+
+export type WorkerWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>;
+  id_in?: Maybe<Array<Scalars['ID']>>;
+  createdAt_eq?: Maybe<Scalars['DateTime']>;
+  createdAt_lt?: Maybe<Scalars['DateTime']>;
+  createdAt_lte?: Maybe<Scalars['DateTime']>;
+  createdAt_gt?: Maybe<Scalars['DateTime']>;
+  createdAt_gte?: Maybe<Scalars['DateTime']>;
+  createdById_eq?: Maybe<Scalars['ID']>;
+  createdById_in?: Maybe<Array<Scalars['ID']>>;
+  updatedAt_eq?: Maybe<Scalars['DateTime']>;
+  updatedAt_lt?: Maybe<Scalars['DateTime']>;
+  updatedAt_lte?: Maybe<Scalars['DateTime']>;
+  updatedAt_gt?: Maybe<Scalars['DateTime']>;
+  updatedAt_gte?: Maybe<Scalars['DateTime']>;
+  updatedById_eq?: Maybe<Scalars['ID']>;
+  updatedById_in?: Maybe<Array<Scalars['ID']>>;
+  deletedAt_all?: Maybe<Scalars['Boolean']>;
+  deletedAt_eq?: Maybe<Scalars['DateTime']>;
+  deletedAt_lt?: Maybe<Scalars['DateTime']>;
+  deletedAt_lte?: Maybe<Scalars['DateTime']>;
+  deletedAt_gt?: Maybe<Scalars['DateTime']>;
+  deletedAt_gte?: Maybe<Scalars['DateTime']>;
+  deletedById_eq?: Maybe<Scalars['ID']>;
+  deletedById_in?: Maybe<Array<Scalars['ID']>>;
+  isActive_eq?: Maybe<Scalars['Boolean']>;
+  isActive_in?: Maybe<Array<Scalars['Boolean']>>;
+  workerId_eq?: Maybe<Scalars['String']>;
+  workerId_contains?: Maybe<Scalars['String']>;
+  workerId_startsWith?: Maybe<Scalars['String']>;
+  workerId_endsWith?: Maybe<Scalars['String']>;
+  workerId_in?: Maybe<Array<Scalars['String']>>;
+  type_eq?: Maybe<WorkerType>;
+  type_in?: Maybe<Array<WorkerType>>;
+  metadata_eq?: Maybe<Scalars['String']>;
+  metadata_contains?: Maybe<Scalars['String']>;
+  metadata_startsWith?: Maybe<Scalars['String']>;
+  metadata_endsWith?: Maybe<Scalars['String']>;
+  metadata_in?: Maybe<Array<Scalars['String']>>;
+  dataObjects_none?: Maybe<DataObjectWhereInput>;
+  dataObjects_some?: Maybe<DataObjectWhereInput>;
+  dataObjects_every?: Maybe<DataObjectWhereInput>;
+  AND?: Maybe<Array<WorkerWhereInput>>;
+  OR?: Maybe<Array<WorkerWhereInput>>;
+};
+
+export type WorkerWhereUniqueInput = {
+  id: Scalars['ID'];
+};

+ 78 - 0
distributor-node/src/services/networking/query-node/queries/queries.graphql

@@ -0,0 +1,78 @@
+fragment DataObjectDetails on StorageDataObject {
+  id
+  size
+  ipfsHash
+  isAccepted
+  storageBag {
+    storageAssignments {
+      storageBucket {
+        id
+        operatorMetadata {
+          nodeEndpoint
+        }
+        operatorStatus {
+          __typename
+        }
+      }
+    }
+    distirbutionAssignments {
+      distributionBucket {
+        id
+        operators {
+          workerId
+          status
+        }
+      }
+    }
+  }
+}
+
+query getDataObjectDetails($id: ID!) {
+  storageDataObjectByUniqueInput(where: { id: $id }) {
+    ...DataObjectDetails
+  }
+}
+
+fragment DistirubtionBucketWithObjects on DistributionBucket {
+  id
+  bagAssignments {
+    storageBag {
+      objects {
+        id
+        size
+        ipfsHash
+      }
+    }
+  }
+}
+
+query getDistributionBucketsWithObjectsByIds($ids: [ID!]) {
+  distributionBuckets(where: { id_in: $ids }) {
+    ...DistirubtionBucketWithObjects
+  }
+}
+
+query getDistributionBucketsWithObjectsByWorkerId($workerId: Int!) {
+  distributionBuckets(where: { operators_some: { workerId_eq: $workerId, status_eq: ACTIVE } }) {
+    ...DistirubtionBucketWithObjects
+  }
+}
+
+fragment StorageBucketOperatorFields on StorageBucket {
+  id
+  operatorMetadata {
+    nodeEndpoint
+  }
+}
+
+query getActiveStorageBucketOperatorsData {
+  storageBuckets(
+    where: {
+      operatorStatus_json: { isTypeOf_eq: "StorageBucketOperatorStatusActive" }
+      operatorMetadata: { nodeEndpoint_contains: "http" }
+    }
+    limit: 9999
+  ) {
+    ...StorageBucketOperatorFields
+  }
+}

+ 145 - 0
distributor-node/src/services/networking/runtime/api.ts

@@ -0,0 +1,145 @@
+import { types } from '@joystream/types/'
+import { ApiPromise, WsProvider, SubmittableResult } from '@polkadot/api'
+import { SubmittableExtrinsic, AugmentedEvent } from '@polkadot/api/types'
+import { KeyringPair } from '@polkadot/keyring/types'
+import { Balance } from '@polkadot/types/interfaces'
+import { formatBalance } from '@polkadot/util'
+import { IEvent } from '@polkadot/types/types'
+import { DispatchError } from '@polkadot/types/interfaces/system'
+import { LoggingService } from '../../logging'
+import { Logger } from 'winston'
+
+export class ExtrinsicFailedError extends Error {}
+
+export class RuntimeApi {
+  private _api: ApiPromise
+  private logger: Logger
+
+  public isDevelopment = false
+
+  private constructor(logging: LoggingService, originalApi: ApiPromise, isDevelopment: boolean) {
+    this.isDevelopment = isDevelopment
+    this.logger = logging.createLogger('SubstrateApi')
+    this._api = originalApi
+  }
+
+  static async create(
+    logging: LoggingService,
+    apiUri: string,
+    metadataCache?: Record<string, any>
+  ): Promise<RuntimeApi> {
+    const { api, chainType } = await RuntimeApi.initApi(apiUri, metadataCache)
+    return new RuntimeApi(logging, api, chainType.isDevelopment || chainType.isLocal)
+  }
+
+  private static async initApi(apiUri: string, metadataCache?: Record<string, any>) {
+    const wsProvider: WsProvider = new WsProvider(apiUri)
+    const api = await ApiPromise.create({ provider: wsProvider, types, metadata: metadataCache })
+
+    // Initializing some api params based on pioneer/packages/react-api/Api.tsx
+    const [properties, chainType] = await Promise.all([api.rpc.system.properties(), api.rpc.system.chainType()])
+
+    const tokenSymbol = properties.tokenSymbol.unwrap()[0].toString()
+    const tokenDecimals = properties.tokenDecimals.unwrap()[0].toNumber()
+
+    // formatBlanace config
+    formatBalance.setDefaults({
+      decimals: tokenDecimals,
+      unit: tokenSymbol,
+    })
+
+    return { api, properties, chainType }
+  }
+
+  public get query(): ApiPromise['query'] {
+    return this._api.query
+  }
+
+  public get tx(): ApiPromise['tx'] {
+    return this._api.tx
+  }
+
+  public get consts(): ApiPromise['consts'] {
+    return this._api.consts
+  }
+
+  public get derive(): ApiPromise['derive'] {
+    return this._api.derive
+  }
+
+  public get createType(): ApiPromise['createType'] {
+    return this._api.createType.bind(this._api)
+  }
+
+  public sudo(tx: SubmittableExtrinsic<'promise'>): SubmittableExtrinsic<'promise'> {
+    return this._api.tx.sudo.sudo(tx)
+  }
+
+  public async estimateFee(account: KeyringPair, tx: SubmittableExtrinsic<'promise'>): Promise<Balance> {
+    const paymentInfo = await tx.paymentInfo(account)
+    return paymentInfo.partialFee
+  }
+
+  public findEvent<
+    S extends keyof ApiPromise['events'] & string,
+    M extends keyof ApiPromise['events'][S] & string,
+    EventType = ApiPromise['events'][S][M] extends AugmentedEvent<'promise', infer T> ? IEvent<T> : never
+  >(result: SubmittableResult, section: S, method: M): EventType | undefined {
+    return result.findRecord(section, method)?.event as EventType | undefined
+  }
+
+  public getEvent<
+    S extends keyof ApiPromise['events'] & string,
+    M extends keyof ApiPromise['events'][S] & string,
+    EventType = ApiPromise['events'][S][M] extends AugmentedEvent<'promise', infer T> ? IEvent<T> : never
+  >(result: SubmittableResult, section: S, method: M): EventType {
+    const event = this.findEvent(result, section, method)
+    if (!event) {
+      throw new Error(`Cannot find expected ${section}.${method} event in result: ${result.toHuman()}`)
+    }
+    return (event as unknown) as EventType
+  }
+
+  sendExtrinsic(keyPair: KeyringPair, tx: SubmittableExtrinsic<'promise'>): Promise<SubmittableResult> {
+    this.logger.info(`Sending ${tx.method.section}.${tx.method.method} extrinsic from ${keyPair.address}`)
+    return new Promise((resolve, reject) => {
+      let unsubscribe: () => void
+      tx.signAndSend(keyPair, {}, (result) => {
+        // Implementation loosely based on /pioneer/packages/react-signer/src/Modal.tsx
+        if (!result || !result.status) {
+          return
+        }
+
+        if (result.status.isInBlock) {
+          unsubscribe()
+          result.events
+            .filter(({ event }) => event.section === 'system')
+            .forEach(({ event }) => {
+              if (event.method === 'ExtrinsicFailed') {
+                const dispatchError = event.data[0] as DispatchError
+                let errorMsg = dispatchError.toString()
+                if (dispatchError.isModule) {
+                  try {
+                    const { name, documentation } = this._api.registry.findMetaError(dispatchError.asModule)
+                    errorMsg = `${name} (${documentation})`
+                  } catch (e) {
+                    // This probably means we don't have this error in the metadata
+                    // In this case - continue (we'll just display dispatchError.toString())
+                  }
+                }
+                reject(new ExtrinsicFailedError(`Extrinsic execution error: ${errorMsg}`))
+              } else if (event.method === 'ExtrinsicSuccess') {
+                resolve(result)
+              }
+            })
+        } else if (result.isError) {
+          reject(new ExtrinsicFailedError('Extrinsic execution error!'))
+        }
+      })
+        .then((unsubFunc) => (unsubscribe = unsubFunc))
+        .catch((e) =>
+          reject(new ExtrinsicFailedError(`Cannot send the extrinsic: ${e.message ? e.message : JSON.stringify(e)}`))
+        )
+    })
+  }
+}

+ 49 - 0
distributor-node/src/services/networking/storage-node/api.ts

@@ -0,0 +1,49 @@
+import { Configuration } from './generated'
+import { PublicApi } from './generated/api'
+import axios, { AxiosRequestConfig } from 'axios'
+import { LoggingService } from '../../logging'
+import { Logger } from 'winston'
+import { StorageNodeDownloadResponse } from '../../../types'
+import { parseAxiosError } from '../../parsers/errors'
+
+export class StorageNodeApi {
+  private logger: Logger
+  private publicApi: PublicApi
+  private endpoint: string
+
+  public constructor(endpoint: string, logging: LoggingService) {
+    const config = new Configuration({
+      basePath: endpoint,
+    })
+    this.publicApi = new PublicApi(config)
+    this.endpoint = new URL(endpoint).toString()
+    this.logger = logging.createLogger('StorageNodeApi', { endpoint })
+  }
+
+  public async isObjectAvailable(contentHash: string): Promise<boolean> {
+    this.logger.debug('Checking object availibility', { contentHash })
+    try {
+      await this.publicApi.publicApiGetFileHeaders(contentHash)
+      this.logger.debug('Data object available', { contentHash })
+      return true
+    } catch (err) {
+      if (axios.isAxiosError(err)) {
+        this.logger.debug('Data object not available', { err: parseAxiosError(err) })
+        return false
+      }
+      this.logger.error('Unexpected error while requesting data object', { err })
+      throw err
+    }
+  }
+
+  public async downloadObject(contentHash: string, startAt?: number): Promise<StorageNodeDownloadResponse> {
+    this.logger.verbose('Sending download request', { contentHash, startAt })
+    const options: AxiosRequestConfig = {
+      responseType: 'stream',
+    }
+    if (startAt) {
+      options.headers.Range = `bytes=${startAt}-`
+    }
+    return this.publicApi.publicApiGetFile(contentHash, options)
+  }
+}

+ 27 - 0
distributor-node/src/services/networking/storage-node/generated/.openapi-generator-ignore

@@ -0,0 +1,27 @@
+# OpenAPI Generator Ignore
+# Generated by openapi-generator https://github.com/openapitools/openapi-generator
+
+# Use this file to prevent files from being overwritten by the generator.
+# The patterns follow closely to .gitignore or .dockerignore.
+
+# As an example, the C# client generator defines ApiClient.cs.
+# You can make changes and tell OpenAPI Generator to ignore just this file by uncommenting the following line:
+#ApiClient.cs
+
+# You can match any string of characters against a directory, file or extension with a single asterisk (*):
+#foo/*/qux
+# The above matches foo/bar/qux and foo/baz/qux, but not foo/bar/baz/qux
+
+# You can recursively match patterns against a directory, file or extension with a double asterisk (**):
+#foo/**/qux
+# This matches foo/bar/qux, foo/baz/qux, and foo/bar/baz/qux
+
+# You can also negate patterns with an exclamation (!).
+# For example, you can ignore all files in a docs folder with the file extension .md:
+#docs/*.md
+# Then explicitly reverse the ignore rule for a single file:
+#!docs/README.md
+
+git_push.sh
+.npmignore
+.gitignore

+ 5 - 0
distributor-node/src/services/networking/storage-node/generated/.openapi-generator/FILES

@@ -0,0 +1,5 @@
+api.ts
+base.ts
+common.ts
+configuration.ts
+index.ts

+ 1 - 0
distributor-node/src/services/networking/storage-node/generated/.openapi-generator/VERSION

@@ -0,0 +1 @@
+5.2.0

+ 453 - 0
distributor-node/src/services/networking/storage-node/generated/api.ts

@@ -0,0 +1,453 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Storage node API
+ * Storage node API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+
+import { Configuration } from './configuration';
+import globalAxios, { AxiosPromise, AxiosInstance } from 'axios';
+// Some imports not used depending on template conditions
+// @ts-ignore
+import { DUMMY_BASE_URL, assertParamExists, setApiKeyToObject, setBasicAuthToObject, setBearerAuthToObject, setOAuthToObject, setSearchParams, serializeDataIfNeeded, toPathString, createRequestFunction } from './common';
+// @ts-ignore
+import { BASE_PATH, COLLECTION_FORMATS, RequestArgs, BaseAPI, RequiredError } from './base';
+
+/**
+ * 
+ * @export
+ * @interface ErrorResponse
+ */
+export interface ErrorResponse {
+    /**
+     * 
+     * @type {string}
+     * @memberof ErrorResponse
+     */
+    type?: string;
+    /**
+     * 
+     * @type {string}
+     * @memberof ErrorResponse
+     */
+    message: string;
+}
+/**
+ * 
+ * @export
+ * @interface InlineResponse201
+ */
+export interface InlineResponse201 {
+    /**
+     * 
+     * @type {string}
+     * @memberof InlineResponse201
+     */
+    id?: string;
+}
+/**
+ * 
+ * @export
+ * @interface InlineResponse2011
+ */
+export interface InlineResponse2011 {
+    /**
+     * 
+     * @type {string}
+     * @memberof InlineResponse2011
+     */
+    token?: string;
+}
+/**
+ * 
+ * @export
+ * @interface TokenRequest
+ */
+export interface TokenRequest {
+    /**
+     * 
+     * @type {TokenRequestData}
+     * @memberof TokenRequest
+     */
+    data: TokenRequestData;
+    /**
+     * 
+     * @type {string}
+     * @memberof TokenRequest
+     */
+    signature: string;
+}
+/**
+ * 
+ * @export
+ * @interface TokenRequestData
+ */
+export interface TokenRequestData {
+    /**
+     * 
+     * @type {number}
+     * @memberof TokenRequestData
+     */
+    memberId: number;
+    /**
+     * 
+     * @type {string}
+     * @memberof TokenRequestData
+     */
+    accountId: string;
+    /**
+     * 
+     * @type {number}
+     * @memberof TokenRequestData
+     */
+    dataObjectId: number;
+    /**
+     * 
+     * @type {number}
+     * @memberof TokenRequestData
+     */
+    storageBucketId: number;
+    /**
+     * 
+     * @type {string}
+     * @memberof TokenRequestData
+     */
+    bagId: string;
+}
+
+/**
+ * PublicApi - axios parameter creator
+ * @export
+ */
+export const PublicApiAxiosParamCreator = function (configuration?: Configuration) {
+    return {
+        /**
+         * Get auth token from a server.
+         * @param {TokenRequest} [tokenRequest] Token request parameters,
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        publicApiAuthTokenForUploading: async (tokenRequest?: TokenRequest, options: any = {}): Promise<RequestArgs> => {
+            const localVarPath = `/authToken`;
+            // use dummy base URL string because the URL constructor only accepts absolute URLs.
+            const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL);
+            let baseOptions;
+            if (configuration) {
+                baseOptions = configuration.baseOptions;
+            }
+
+            const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options};
+            const localVarHeaderParameter = {} as any;
+            const localVarQueryParameter = {} as any;
+
+
+    
+            localVarHeaderParameter['Content-Type'] = 'application/json';
+
+            setSearchParams(localVarUrlObj, localVarQueryParameter, options.query);
+            let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
+            localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
+            localVarRequestOptions.data = serializeDataIfNeeded(tokenRequest, localVarRequestOptions, configuration)
+
+            return {
+                url: toPathString(localVarUrlObj),
+                options: localVarRequestOptions,
+            };
+        },
+        /**
+         * Returns a media file.
+         * @param {string} cid Content ID
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        publicApiGetFile: async (cid: string, options: any = {}): Promise<RequestArgs> => {
+            // verify required parameter 'cid' is not null or undefined
+            assertParamExists('publicApiGetFile', 'cid', cid)
+            const localVarPath = `/files/{cid}`
+                .replace(`{${"cid"}}`, encodeURIComponent(String(cid)));
+            // use dummy base URL string because the URL constructor only accepts absolute URLs.
+            const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL);
+            let baseOptions;
+            if (configuration) {
+                baseOptions = configuration.baseOptions;
+            }
+
+            const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options};
+            const localVarHeaderParameter = {} as any;
+            const localVarQueryParameter = {} as any;
+
+
+    
+            setSearchParams(localVarUrlObj, localVarQueryParameter, options.query);
+            let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
+            localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
+
+            return {
+                url: toPathString(localVarUrlObj),
+                options: localVarRequestOptions,
+            };
+        },
+        /**
+         * Returns a media file headers.
+         * @param {string} cid Content ID
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        publicApiGetFileHeaders: async (cid: string, options: any = {}): Promise<RequestArgs> => {
+            // verify required parameter 'cid' is not null or undefined
+            assertParamExists('publicApiGetFileHeaders', 'cid', cid)
+            const localVarPath = `/files/{cid}`
+                .replace(`{${"cid"}}`, encodeURIComponent(String(cid)));
+            // use dummy base URL string because the URL constructor only accepts absolute URLs.
+            const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL);
+            let baseOptions;
+            if (configuration) {
+                baseOptions = configuration.baseOptions;
+            }
+
+            const localVarRequestOptions = { method: 'HEAD', ...baseOptions, ...options};
+            const localVarHeaderParameter = {} as any;
+            const localVarQueryParameter = {} as any;
+
+
+    
+            setSearchParams(localVarUrlObj, localVarQueryParameter, options.query);
+            let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
+            localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
+
+            return {
+                url: toPathString(localVarUrlObj),
+                options: localVarRequestOptions,
+            };
+        },
+        /**
+         * Upload data
+         * @param {string} dataObjectId Data object runtime ID
+         * @param {string} storageBucketId Storage bucket ID
+         * @param {string} bagId Bag ID
+         * @param {any} [file] Data file
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        publicApiUploadFile: async (dataObjectId: string, storageBucketId: string, bagId: string, file?: any, options: any = {}): Promise<RequestArgs> => {
+            // verify required parameter 'dataObjectId' is not null or undefined
+            assertParamExists('publicApiUploadFile', 'dataObjectId', dataObjectId)
+            // verify required parameter 'storageBucketId' is not null or undefined
+            assertParamExists('publicApiUploadFile', 'storageBucketId', storageBucketId)
+            // verify required parameter 'bagId' is not null or undefined
+            assertParamExists('publicApiUploadFile', 'bagId', bagId)
+            const localVarPath = `/files`;
+            // use dummy base URL string because the URL constructor only accepts absolute URLs.
+            const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL);
+            let baseOptions;
+            if (configuration) {
+                baseOptions = configuration.baseOptions;
+            }
+
+            const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options};
+            const localVarHeaderParameter = {} as any;
+            const localVarQueryParameter = {} as any;
+            const localVarFormParams = new ((configuration && configuration.formDataCtor) || FormData)();
+
+            // authentication UploadAuth required
+            await setApiKeyToObject(localVarHeaderParameter, "x-api-key", configuration)
+
+
+            if (file !== undefined) { 
+                localVarFormParams.append('file', file as any);
+            }
+    
+            if (dataObjectId !== undefined) { 
+                localVarFormParams.append('dataObjectId', dataObjectId as any);
+            }
+    
+            if (storageBucketId !== undefined) { 
+                localVarFormParams.append('storageBucketId', storageBucketId as any);
+            }
+    
+            if (bagId !== undefined) { 
+                localVarFormParams.append('bagId', bagId as any);
+            }
+    
+    
+            localVarHeaderParameter['Content-Type'] = 'multipart/form-data';
+    
+            setSearchParams(localVarUrlObj, localVarQueryParameter, options.query);
+            let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
+            localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
+            localVarRequestOptions.data = localVarFormParams;
+
+            return {
+                url: toPathString(localVarUrlObj),
+                options: localVarRequestOptions,
+            };
+        },
+    }
+};
+
+/**
+ * PublicApi - functional programming interface
+ * @export
+ */
+export const PublicApiFp = function(configuration?: Configuration) {
+    const localVarAxiosParamCreator = PublicApiAxiosParamCreator(configuration)
+    return {
+        /**
+         * Get auth token from a server.
+         * @param {TokenRequest} [tokenRequest] Token request parameters,
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        async publicApiAuthTokenForUploading(tokenRequest?: TokenRequest, options?: any): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<InlineResponse2011>> {
+            const localVarAxiosArgs = await localVarAxiosParamCreator.publicApiAuthTokenForUploading(tokenRequest, options);
+            return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration);
+        },
+        /**
+         * Returns a media file.
+         * @param {string} cid Content ID
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        async publicApiGetFile(cid: string, options?: any): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<any>> {
+            const localVarAxiosArgs = await localVarAxiosParamCreator.publicApiGetFile(cid, options);
+            return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration);
+        },
+        /**
+         * Returns a media file headers.
+         * @param {string} cid Content ID
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        async publicApiGetFileHeaders(cid: string, options?: any): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<void>> {
+            const localVarAxiosArgs = await localVarAxiosParamCreator.publicApiGetFileHeaders(cid, options);
+            return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration);
+        },
+        /**
+         * Upload data
+         * @param {string} dataObjectId Data object runtime ID
+         * @param {string} storageBucketId Storage bucket ID
+         * @param {string} bagId Bag ID
+         * @param {any} [file] Data file
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        async publicApiUploadFile(dataObjectId: string, storageBucketId: string, bagId: string, file?: any, options?: any): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<InlineResponse201>> {
+            const localVarAxiosArgs = await localVarAxiosParamCreator.publicApiUploadFile(dataObjectId, storageBucketId, bagId, file, options);
+            return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration);
+        },
+    }
+};
+
+/**
+ * PublicApi - factory interface
+ * @export
+ */
+export const PublicApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) {
+    const localVarFp = PublicApiFp(configuration)
+    return {
+        /**
+         * Get auth token from a server.
+         * @param {TokenRequest} [tokenRequest] Token request parameters,
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        publicApiAuthTokenForUploading(tokenRequest?: TokenRequest, options?: any): AxiosPromise<InlineResponse2011> {
+            return localVarFp.publicApiAuthTokenForUploading(tokenRequest, options).then((request) => request(axios, basePath));
+        },
+        /**
+         * Returns a media file.
+         * @param {string} cid Content ID
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        publicApiGetFile(cid: string, options?: any): AxiosPromise<any> {
+            return localVarFp.publicApiGetFile(cid, options).then((request) => request(axios, basePath));
+        },
+        /**
+         * Returns a media file headers.
+         * @param {string} cid Content ID
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        publicApiGetFileHeaders(cid: string, options?: any): AxiosPromise<void> {
+            return localVarFp.publicApiGetFileHeaders(cid, options).then((request) => request(axios, basePath));
+        },
+        /**
+         * Upload data
+         * @param {string} dataObjectId Data object runtime ID
+         * @param {string} storageBucketId Storage bucket ID
+         * @param {string} bagId Bag ID
+         * @param {any} [file] Data file
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        publicApiUploadFile(dataObjectId: string, storageBucketId: string, bagId: string, file?: any, options?: any): AxiosPromise<InlineResponse201> {
+            return localVarFp.publicApiUploadFile(dataObjectId, storageBucketId, bagId, file, options).then((request) => request(axios, basePath));
+        },
+    };
+};
+
+/**
+ * PublicApi - object-oriented interface
+ * @export
+ * @class PublicApi
+ * @extends {BaseAPI}
+ */
+export class PublicApi extends BaseAPI {
+    /**
+     * Get auth token from a server.
+     * @param {TokenRequest} [tokenRequest] Token request parameters,
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     * @memberof PublicApi
+     */
+    public publicApiAuthTokenForUploading(tokenRequest?: TokenRequest, options?: any) {
+        return PublicApiFp(this.configuration).publicApiAuthTokenForUploading(tokenRequest, options).then((request) => request(this.axios, this.basePath));
+    }
+
+    /**
+     * Returns a media file.
+     * @param {string} cid Content ID
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     * @memberof PublicApi
+     */
+    public publicApiGetFile(cid: string, options?: any) {
+        return PublicApiFp(this.configuration).publicApiGetFile(cid, options).then((request) => request(this.axios, this.basePath));
+    }
+
+    /**
+     * Returns a media file headers.
+     * @param {string} cid Content ID
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     * @memberof PublicApi
+     */
+    public publicApiGetFileHeaders(cid: string, options?: any) {
+        return PublicApiFp(this.configuration).publicApiGetFileHeaders(cid, options).then((request) => request(this.axios, this.basePath));
+    }
+
+    /**
+     * Upload data
+     * @param {string} dataObjectId Data object runtime ID
+     * @param {string} storageBucketId Storage bucket ID
+     * @param {string} bagId Bag ID
+     * @param {any} [file] Data file
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     * @memberof PublicApi
+     */
+    public publicApiUploadFile(dataObjectId: string, storageBucketId: string, bagId: string, file?: any, options?: any) {
+        return PublicApiFp(this.configuration).publicApiUploadFile(dataObjectId, storageBucketId, bagId, file, options).then((request) => request(this.axios, this.basePath));
+    }
+}
+
+

+ 71 - 0
distributor-node/src/services/networking/storage-node/generated/base.ts

@@ -0,0 +1,71 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Storage node API
+ * Storage node API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+
+import { Configuration } from "./configuration";
+// Some imports not used depending on template conditions
+// @ts-ignore
+import globalAxios, { AxiosPromise, AxiosInstance } from 'axios';
+
+export const BASE_PATH = "http://localhost:3333/api/v1".replace(/\/+$/, "");
+
+/**
+ *
+ * @export
+ */
+export const COLLECTION_FORMATS = {
+    csv: ",",
+    ssv: " ",
+    tsv: "\t",
+    pipes: "|",
+};
+
+/**
+ *
+ * @export
+ * @interface RequestArgs
+ */
+export interface RequestArgs {
+    url: string;
+    options: any;
+}
+
+/**
+ *
+ * @export
+ * @class BaseAPI
+ */
+export class BaseAPI {
+    protected configuration: Configuration | undefined;
+
+    constructor(configuration?: Configuration, protected basePath: string = BASE_PATH, protected axios: AxiosInstance = globalAxios) {
+        if (configuration) {
+            this.configuration = configuration;
+            this.basePath = configuration.basePath || this.basePath;
+        }
+    }
+};
+
+/**
+ *
+ * @export
+ * @class RequiredError
+ * @extends {Error}
+ */
+export class RequiredError extends Error {
+    name: "RequiredError" = "RequiredError";
+    constructor(public field: string, msg?: string) {
+        super(msg);
+    }
+}

+ 138 - 0
distributor-node/src/services/networking/storage-node/generated/common.ts

@@ -0,0 +1,138 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Storage node API
+ * Storage node API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+
+import { Configuration } from "./configuration";
+import { RequiredError, RequestArgs } from "./base";
+import { AxiosInstance } from 'axios';
+
+/**
+ *
+ * @export
+ */
+export const DUMMY_BASE_URL = 'https://example.com'
+
+/**
+ *
+ * @throws {RequiredError}
+ * @export
+ */
+export const assertParamExists = function (functionName: string, paramName: string, paramValue: unknown) {
+    if (paramValue === null || paramValue === undefined) {
+        throw new RequiredError(paramName, `Required parameter ${paramName} was null or undefined when calling ${functionName}.`);
+    }
+}
+
+/**
+ *
+ * @export
+ */
+export const setApiKeyToObject = async function (object: any, keyParamName: string, configuration?: Configuration) {
+    if (configuration && configuration.apiKey) {
+        const localVarApiKeyValue = typeof configuration.apiKey === 'function'
+            ? await configuration.apiKey(keyParamName)
+            : await configuration.apiKey;
+        object[keyParamName] = localVarApiKeyValue;
+    }
+}
+
+/**
+ *
+ * @export
+ */
+export const setBasicAuthToObject = function (object: any, configuration?: Configuration) {
+    if (configuration && (configuration.username || configuration.password)) {
+        object["auth"] = { username: configuration.username, password: configuration.password };
+    }
+}
+
+/**
+ *
+ * @export
+ */
+export const setBearerAuthToObject = async function (object: any, configuration?: Configuration) {
+    if (configuration && configuration.accessToken) {
+        const accessToken = typeof configuration.accessToken === 'function'
+            ? await configuration.accessToken()
+            : await configuration.accessToken;
+        object["Authorization"] = "Bearer " + accessToken;
+    }
+}
+
+/**
+ *
+ * @export
+ */
+export const setOAuthToObject = async function (object: any, name: string, scopes: string[], configuration?: Configuration) {
+    if (configuration && configuration.accessToken) {
+        const localVarAccessTokenValue = typeof configuration.accessToken === 'function'
+            ? await configuration.accessToken(name, scopes)
+            : await configuration.accessToken;
+        object["Authorization"] = "Bearer " + localVarAccessTokenValue;
+    }
+}
+
+/**
+ *
+ * @export
+ */
+export const setSearchParams = function (url: URL, ...objects: any[]) {
+    const searchParams = new URLSearchParams(url.search);
+    for (const object of objects) {
+        for (const key in object) {
+            if (Array.isArray(object[key])) {
+                searchParams.delete(key);
+                for (const item of object[key]) {
+                    searchParams.append(key, item);
+                }
+            } else {
+                searchParams.set(key, object[key]);
+            }
+        }
+    }
+    url.search = searchParams.toString();
+}
+
+/**
+ *
+ * @export
+ */
+export const serializeDataIfNeeded = function (value: any, requestOptions: any, configuration?: Configuration) {
+    const nonString = typeof value !== 'string';
+    const needsSerialization = nonString && configuration && configuration.isJsonMime
+        ? configuration.isJsonMime(requestOptions.headers['Content-Type'])
+        : nonString;
+    return needsSerialization
+        ? JSON.stringify(value !== undefined ? value : {})
+        : (value || "");
+}
+
+/**
+ *
+ * @export
+ */
+export const toPathString = function (url: URL) {
+    return url.pathname + url.search + url.hash
+}
+
+/**
+ *
+ * @export
+ */
+export const createRequestFunction = function (axiosArgs: RequestArgs, globalAxios: AxiosInstance, BASE_PATH: string, configuration?: Configuration) {
+    return (axios: AxiosInstance = globalAxios, basePath: string = BASE_PATH) => {
+        const axiosRequestArgs = {...axiosArgs.options, url: (configuration?.basePath || basePath) + axiosArgs.url};
+        return axios.request(axiosRequestArgs);
+    };
+}

+ 101 - 0
distributor-node/src/services/networking/storage-node/generated/configuration.ts

@@ -0,0 +1,101 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Storage node API
+ * Storage node API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+
+export interface ConfigurationParameters {
+    apiKey?: string | Promise<string> | ((name: string) => string) | ((name: string) => Promise<string>);
+    username?: string;
+    password?: string;
+    accessToken?: string | Promise<string> | ((name?: string, scopes?: string[]) => string) | ((name?: string, scopes?: string[]) => Promise<string>);
+    basePath?: string;
+    baseOptions?: any;
+    formDataCtor?: new () => any;
+}
+
+export class Configuration {
+    /**
+     * parameter for apiKey security
+     * @param name security name
+     * @memberof Configuration
+     */
+    apiKey?: string | Promise<string> | ((name: string) => string) | ((name: string) => Promise<string>);
+    /**
+     * parameter for basic security
+     *
+     * @type {string}
+     * @memberof Configuration
+     */
+    username?: string;
+    /**
+     * parameter for basic security
+     *
+     * @type {string}
+     * @memberof Configuration
+     */
+    password?: string;
+    /**
+     * parameter for oauth2 security
+     * @param name security name
+     * @param scopes oauth2 scope
+     * @memberof Configuration
+     */
+    accessToken?: string | Promise<string> | ((name?: string, scopes?: string[]) => string) | ((name?: string, scopes?: string[]) => Promise<string>);
+    /**
+     * override base path
+     *
+     * @type {string}
+     * @memberof Configuration
+     */
+    basePath?: string;
+    /**
+     * base options for axios calls
+     *
+     * @type {any}
+     * @memberof Configuration
+     */
+    baseOptions?: any;
+    /**
+     * The FormData constructor that will be used to create multipart form data
+     * requests. You can inject this here so that execution environments that
+     * do not support the FormData class can still run the generated client.
+     *
+     * @type {new () => FormData}
+     */
+    formDataCtor?: new () => any;
+
+    constructor(param: ConfigurationParameters = {}) {
+        this.apiKey = param.apiKey;
+        this.username = param.username;
+        this.password = param.password;
+        this.accessToken = param.accessToken;
+        this.basePath = param.basePath;
+        this.baseOptions = param.baseOptions;
+        this.formDataCtor = param.formDataCtor;
+    }
+
+    /**
+     * Check if the given MIME is a JSON MIME.
+     * JSON MIME examples:
+     *   application/json
+     *   application/json; charset=UTF8
+     *   APPLICATION/JSON
+     *   application/vnd.company+json
+     * @param mime - MIME (Multipurpose Internet Mail Extensions)
+     * @return True if the given MIME is JSON, false otherwise.
+     */
+    public isJsonMime(mime: string): boolean {
+        const jsonMime: RegExp = new RegExp('^(application\/json|[^;/ \t]+\/[^;/ \t]+[+]json)[ \t]*(;.*)?$', 'i');
+        return mime !== null && (jsonMime.test(mime) || mime.toLowerCase() === 'application/json-patch+json');
+    }
+}

+ 18 - 0
distributor-node/src/services/networking/storage-node/generated/index.ts

@@ -0,0 +1,18 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Storage node API
+ * Storage node API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+
+export * from "./api";
+export * from "./configuration";
+

+ 63 - 0
distributor-node/src/services/parsers/BagIdParserService.ts

@@ -0,0 +1,63 @@
+import { BagId } from '@joystream/types/storage'
+import { registry } from '@joystream/types'
+import { createType } from '@polkadot/types'
+import { InterfaceTypes } from '@polkadot/types/types'
+import { WorkingGroup } from '@joystream/types/common'
+
+export class BagIdParserService {
+  private createType<T extends keyof InterfaceTypes>(type: T, value: any) {
+    return createType(registry, type, value)
+  }
+
+  public parseBagId(bagId: string): BagId {
+    const bagIdParts = bagId.toLowerCase().split(':')
+
+    if (bagIdParts.length > 3 || bagIdParts.length < 2) {
+      throw new Error(`Invalid bagId: ${bagId}`)
+    }
+
+    if (bagIdParts[0] === 'static') {
+      return this.parseStaticBagId(bagId, bagIdParts)
+    }
+
+    if (bagIdParts[0] === 'dynamic') {
+      return this.parseDynamicBagId()
+    }
+
+    throw new Error(`Invalid bagId: ${bagId}`)
+  }
+
+  public parseStaticBagId(bagId: string, bagIdParts: string[]): BagId {
+    // Try to construct static council bag ID.
+    if (bagIdParts[1] === 'council') {
+      if (bagIdParts.length === 2) {
+        const staticBagId = this.createType('StaticBagId', 'Council')
+        const constructedBagId = this.createType('BagId', {
+          'Static': staticBagId,
+        })
+
+        return constructedBagId
+      }
+    }
+
+    // Try to construct static working group bag ID.
+    if (bagIdParts[1] === 'wg' && bagIdParts.length === 3) {
+      const groups = Object.keys(WorkingGroup.typeDefinitions)
+      const inputGroup = bagIdParts[2]
+
+      if (groups.find((g) => g.toLocaleLowerCase() === inputGroup)) {
+        return this.createType('BagId', {
+          Static: {
+            WorkingGroup: inputGroup,
+          },
+        })
+      }
+    }
+
+    throw new Error(`Invalid bagId: ${bagId}`)
+  }
+
+  public parseDynamicBagId(): BagId {
+    throw new Error('Function not implemented.')
+  }
+}

+ 114 - 0
distributor-node/src/services/parsers/ConfigParserService.ts

@@ -0,0 +1,114 @@
+import { ValidationService } from '../validation/ValidationService'
+import { Config } from '../../types'
+import fs from 'fs'
+import path from 'path'
+import YAML from 'yaml'
+import _ from 'lodash'
+import configSchema, { bytesizeUnits } from '../validation/schemas/configSchema'
+import { JSONSchema4 } from 'json-schema'
+
+const MIN_CACHE_SIZE = 20 * Math.pow(1024, 3)
+
+export class ConfigParserService {
+  validator: ValidationService
+
+  constructor() {
+    this.validator = new ValidationService()
+  }
+
+  public resolveConfigDirectoryPaths(paths: Config['directories'], configFilePath: string): Config['directories'] {
+    return _.mapValues(paths, (v) => path.resolve(path.dirname(configFilePath), v))
+  }
+
+  private parseBytesize(bytesize: string) {
+    const intValue = parseInt(bytesize)
+    const unit = bytesize[bytesize.length - 1]
+
+    return intValue * Math.pow(1024, bytesizeUnits.indexOf(unit))
+  }
+
+  private schemaTypeOf(schema: JSONSchema4, path: string[]): JSONSchema4['type'] {
+    if (path.length === 0) {
+      return undefined
+    }
+    if (schema.properties && schema.properties[path[0]]) {
+      const item = schema.properties[path[0]]
+      if (item.type === 'object') {
+        return this.schemaTypeOf(item, path.slice(1))
+      } else {
+        return item.type
+      }
+    }
+  }
+
+  private mergeEnvConfigWith(config: Record<string, unknown>) {
+    Object.entries(process.env)
+      .filter(([k]) => k.startsWith('JOYSTREAM_DISTRIBUTOR__'))
+      .forEach(([k, v]) => {
+        const path = k
+          .replace('JOYSTREAM_DISTRIBUTOR__', '')
+          .split('__')
+          .map((k) => _.camelCase(k))
+
+        const valueType = this.schemaTypeOf(configSchema, path)
+        if (valueType === undefined) {
+          // Invalid key - skip
+        } else if (valueType === 'integer') {
+          _.set(config, path, parseInt(v || ''))
+        } else if (valueType === 'number') {
+          _.set(config, path, parseFloat(v || ''))
+        } else if (valueType === 'boolean') {
+          _.set(config, path, !!v)
+        } else if (valueType === 'array') {
+          try {
+            const parsed = JSON.parse(v || 'undefined')
+            _.set(config, path, parsed)
+          } catch (e) {
+            throw new Error(`Env value ${k} is not a valid JSON array`)
+          }
+        } else {
+          _.set(config, path, v)
+        }
+      })
+  }
+
+  public loadConfing(configPath: string): Config {
+    let inputConfig: Record<string, unknown> = {}
+    // Try to load config from file if exists
+    if (fs.existsSync(configPath)) {
+      const fileContent = fs.readFileSync(configPath).toString()
+      if (path.extname(configPath) === '.json') {
+        inputConfig = JSON.parse(fileContent)
+      } else if (path.extname(configPath) === '.yml' || path.extname(configPath) === '.yaml') {
+        inputConfig = YAML.parse(fileContent)
+      } else {
+        throw new Error('Unrecognized config format (use .yml or .json)')
+      }
+    }
+
+    // Override config with env variables
+    this.mergeEnvConfigWith(inputConfig)
+
+    // Validate the config
+    const configJson = this.validator.validate('Config', inputConfig)
+
+    // Normalize values
+    const directories = this.resolveConfigDirectoryPaths(configJson.directories, configPath)
+    const storageLimit = this.parseBytesize(configJson.limits.storage)
+
+    if (storageLimit < MIN_CACHE_SIZE) {
+      throw new Error('Cache storage limit should be at least 20G!')
+    }
+
+    const parsedConfig: Config = {
+      ...configJson,
+      directories,
+      limits: {
+        ...configJson.limits,
+        storage: storageLimit,
+      },
+    }
+
+    return parsedConfig
+  }
+}

+ 14 - 0
distributor-node/src/services/parsers/errors.ts

@@ -0,0 +1,14 @@
+import { AxiosError } from 'axios'
+
+export function parseAxiosError(e: AxiosError) {
+  return {
+    message: e.message,
+    stack: e.stack,
+    response: {
+      data: e.response?.data,
+      status: e.response?.status,
+      statusText: e.response?.statusText,
+      headers: e.response?.headers,
+    },
+  }
+}

+ 117 - 0
distributor-node/src/services/server/ServerService.ts

@@ -0,0 +1,117 @@
+import express from 'express'
+import path from 'path'
+import cors from 'cors'
+import * as OpenApiValidator from 'express-openapi-validator'
+import { HttpError } from 'express-openapi-validator/dist/framework/types'
+import { ReadonlyConfig } from '../../types/config'
+import expressWinston from 'express-winston'
+import { LoggingService } from '../../services/logging'
+import { PublicApiController } from './controllers/public'
+import { StateCacheService } from '../cache/StateCacheService'
+import { NetworkingService } from '../networking'
+import { Logger } from 'winston'
+import { ContentService } from '../content/ContentService'
+import { Server } from 'http'
+
+const OPENAPI_SPEC_PATH = path.join(__dirname, '../../api-spec/openapi.yml')
+
+export class ServerService {
+  private config: ReadonlyConfig
+  private logger: Logger
+  private expressApp: express.Application
+  private httpServer: Server | undefined
+
+  private routeWrapper<T>(
+    handler: (req: express.Request<T>, res: express.Response, next: express.NextFunction) => Promise<void>
+  ) {
+    return async (req: express.Request<T>, res: express.Response, next: express.NextFunction) => {
+      try {
+        await handler(req, res, next)
+      } catch (err) {
+        next(err)
+      }
+    }
+  }
+
+  public constructor(
+    config: ReadonlyConfig,
+    stateCache: StateCacheService,
+    content: ContentService,
+    logging: LoggingService,
+    networking: NetworkingService
+  ) {
+    this.logger = logging.createLogger('ExpressServer')
+    this.config = config
+
+    const publicController = new PublicApiController(config, logging, networking, stateCache, content)
+
+    const app = express()
+    app.use(cors())
+    app.use(express.json())
+
+    // Request logger
+    app.use(
+      expressWinston.logger({
+        winstonInstance: this.logger,
+        level: 'http',
+      })
+    )
+
+    // Setup OpenAPiValidator
+    app.use(
+      OpenApiValidator.middleware({
+        apiSpec: OPENAPI_SPEC_PATH,
+        validateApiSpec: true,
+        validateResponses: true,
+        validateRequests: true,
+      })
+    )
+
+    // Routes
+    app.head('/api/v1/asset/:objectId', this.routeWrapper(publicController.assetHead.bind(publicController)))
+    app.get('/api/v1/asset/:objectId', this.routeWrapper(publicController.asset.bind(publicController)))
+    app.get('/api/v1/status', this.routeWrapper(publicController.status.bind(publicController)))
+    app.get('/api/v1/buckets', this.routeWrapper(publicController.buckets.bind(publicController)))
+
+    // Error logger
+    app.use(
+      expressWinston.errorLogger({
+        winstonInstance: this.logger,
+        level: 'error',
+      })
+    )
+
+    // Error handler
+    app.use((err: HttpError, req: express.Request, res: express.Response, next: express.NextFunction) => {
+      if (res.headersSent) {
+        return next(err)
+      }
+      if (err.status && err.status >= 400 && err.status < 500) {
+        res
+          .status(err.status)
+          .json({
+            type: 'request_validation',
+            message: err.message,
+            errors: err.errors,
+          })
+          .end()
+      } else {
+        res.status(err.status || 500).json({ type: 'exception', message: err.message })
+      }
+    })
+
+    this.expressApp = app
+  }
+
+  public start(): void {
+    const { port } = this.config
+    this.httpServer = this.expressApp.listen(port, () => {
+      this.logger.info(`Express server started listening on port ${port}`)
+    })
+  }
+
+  public stop(): void {
+    this.httpServer?.close()
+    this.logger.info(`Express server stopped`)
+  }
+}

+ 258 - 0
distributor-node/src/services/server/controllers/public.ts

@@ -0,0 +1,258 @@
+import * as express from 'express'
+import { Logger } from 'winston'
+import send from 'send'
+import { StateCacheService } from '../../../services/cache/StateCacheService'
+import { NetworkingService } from '../../../services/networking'
+import { AssetRouteParams, BucketsResponse, ErrorResponse, StatusResponse } from '../../../types/api'
+import { LoggingService } from '../../logging'
+import { ContentService, DEFAULT_CONTENT_TYPE } from '../../content/ContentService'
+import proxy from 'express-http-proxy'
+import { ReadonlyConfig } from '../../../types'
+
+const CACHED_MAX_AGE = 31536000
+const PENDING_MAX_AGE = 180
+
+export class PublicApiController {
+  private config: ReadonlyConfig
+  private logger: Logger
+  private networking: NetworkingService
+  private stateCache: StateCacheService
+  private content: ContentService
+
+  public constructor(
+    config: ReadonlyConfig,
+    logging: LoggingService,
+    networking: NetworkingService,
+    stateCache: StateCacheService,
+    content: ContentService
+  ) {
+    this.config = config
+    this.logger = logging.createLogger('PublicApiController')
+    this.networking = networking
+    this.stateCache = stateCache
+    this.content = content
+  }
+
+  private serveAssetFromFilesystem(
+    req: express.Request<AssetRouteParams>,
+    res: express.Response,
+    next: express.NextFunction,
+    contentHash: string
+  ): void {
+    // TODO: FIXME: Actually check if we are still supposed to serve it and just remove after responding if not
+    // TODO: Limit the number of times useContent is trigerred for similar requests
+    // (for example: same ip, 3 different request within a minute = 1 request)
+    this.stateCache.useContent(contentHash)
+
+    const path = this.content.path(contentHash)
+    const stream = send(req, path, {
+      maxAge: CACHED_MAX_AGE,
+      lastModified: false,
+    })
+    const mimeType = this.stateCache.getContentMimeType(contentHash)
+
+    stream.on('headers', (res) => {
+      res.setHeader('x-cache', 'hit')
+      res.setHeader('x-data-source', 'local')
+      res.setHeader('content-disposition', 'inline')
+      res.setHeader('content-type', mimeType || DEFAULT_CONTENT_TYPE)
+    })
+
+    stream.on('error', (err) => {
+      this.logger.error('SendStream error while trying to serve an asset', { err })
+      // General error
+      const statusCode = err.status || 500
+      const errorRes: ErrorResponse = {
+        type: 'sendstream_error',
+        message: err.toString(),
+      }
+
+      res.status(statusCode).json(errorRes)
+    })
+
+    stream.pipe(res)
+  }
+
+  private async servePendingDownloadAsset(
+    req: express.Request<AssetRouteParams>,
+    res: express.Response,
+    next: express.NextFunction,
+    contentHash: string
+  ) {
+    const pendingDownload = this.stateCache.getPendingDownload(contentHash)
+    if (!pendingDownload) {
+      throw new Error('Trying to serve pending download asset that is not pending download!')
+    }
+
+    const { promise, objectSize } = pendingDownload
+    const response = await promise
+    const source = new URL(response.config.url!)
+    const contentType = response.headers['content-type'] || DEFAULT_CONTENT_TYPE
+    res.setHeader('content-type', contentType)
+    // Allow caching pendingDownload reponse only for very short period of time and requite revalidation,
+    // since the data coming from the source may not be valid
+    res.setHeader('cache-control', `max-age=${PENDING_MAX_AGE}, must-revalidate`)
+
+    // Handle request using pending download file if this makes sense in current context:
+    if (this.content.exists(contentHash)) {
+      const range = req.range(objectSize)
+      if (!range || range === -1 || range === -2 || range.length !== 1 || range.type !== 'bytes') {
+        // Range is not provided / invalid - serve data from pending download file
+        return this.servePendingDownloadAssetFromFile(req, res, next, contentHash, objectSize)
+      } else if (range[0].start === 0) {
+        // Range starts from the beginning of the content - serve data from pending download file
+        return this.servePendingDownloadAssetFromFile(req, res, next, contentHash, objectSize, range[0].end)
+      }
+    }
+
+    // Range doesn't start from the beginning of the content or the file was not found - froward request to source storage node
+    this.logger.verbose(`Forwarding request to ${source.href}`, { source: source.href })
+    res.setHeader('x-data-source', 'external')
+    return proxy(source.origin, { proxyReqPathResolver: () => source.pathname })(req, res, next)
+  }
+
+  private async servePendingDownloadAssetFromFile(
+    req: express.Request<AssetRouteParams>,
+    res: express.Response,
+    next: express.NextFunction,
+    contentHash: string,
+    objectSize: number,
+    rangeEnd?: number
+  ) {
+    const isRange = rangeEnd !== undefined
+    this.logger.verbose(`Serving pending download asset from file`, { contentHash, isRange, objectSize, rangeEnd })
+    const stream = this.content.createContinousReadStream(contentHash, {
+      end: isRange ? rangeEnd || 0 : objectSize - 1,
+    })
+    res.status(isRange ? 206 : 200)
+    res.setHeader('accept-ranges', 'bytes')
+    res.setHeader('x-data-source', 'local')
+    res.setHeader('content-disposition', 'inline')
+    if (isRange) {
+      res.setHeader('content-range', `bytes 0-${rangeEnd}/${objectSize}`)
+    }
+    stream.pipe(res)
+    req.on('close', () => {
+      stream.destroy()
+      res.destroy()
+    })
+  }
+
+  public async assetHead(req: express.Request<AssetRouteParams>, res: express.Response): Promise<void> {
+    const objectId = req.params.objectId
+    const contentHash = this.stateCache.getObjectContentHash(objectId)
+    const pendingDownload = contentHash && this.stateCache.getPendingDownload(contentHash)
+
+    res.setHeader('timing-allow-origin', '*')
+    res.setHeader('accept-ranges', 'bytes')
+    res.setHeader('content-disposition', 'inline')
+
+    if (contentHash && !pendingDownload && this.content.exists(contentHash)) {
+      res.status(200)
+      res.setHeader('x-cache', 'hit')
+      res.setHeader('cache-control', `max-age=${CACHED_MAX_AGE}`)
+      res.setHeader('content-type', this.stateCache.getContentMimeType(contentHash) || DEFAULT_CONTENT_TYPE)
+      res.setHeader('content-length', this.content.fileSize(contentHash))
+    } else if (contentHash && pendingDownload) {
+      res.status(200)
+      res.setHeader('x-cache', 'pending')
+      res.setHeader('cache-control', `max-age=${PENDING_MAX_AGE}, must-revalidate`)
+      res.setHeader('content-length', pendingDownload.objectSize)
+    } else {
+      const objectInfo = await this.networking.dataObjectInfo(objectId)
+      if (!objectInfo.exists) {
+        res.status(404)
+      } else if (!objectInfo.isSupported) {
+        res.status(421)
+      } else {
+        res.status(200)
+        res.setHeader('x-cache', 'miss')
+        res.setHeader('cache-control', `max-age=${PENDING_MAX_AGE}, must-revalidate`)
+        res.setHeader('content-length', objectInfo.data?.size || 0)
+      }
+    }
+
+    res.send()
+  }
+
+  public async asset(
+    req: express.Request<AssetRouteParams>,
+    res: express.Response,
+    next: express.NextFunction
+  ): Promise<void> {
+    const objectId = req.params.objectId
+    const contentHash = this.stateCache.getObjectContentHash(objectId)
+    const pendingDownload = contentHash && this.stateCache.getPendingDownload(contentHash)
+
+    this.logger.verbose('Data object requested', {
+      objectId,
+      contentHash,
+      status: pendingDownload && pendingDownload.status,
+    })
+
+    res.setHeader('timing-allow-origin', '*')
+
+    if (contentHash && !pendingDownload && this.content.exists(contentHash)) {
+      this.logger.verbose('Requested file found in filesystem', { path: this.content.path(contentHash) })
+      return this.serveAssetFromFilesystem(req, res, next, contentHash)
+    } else if (contentHash && pendingDownload) {
+      this.logger.verbose('Requested file is in pending download state', { path: this.content.path(contentHash) })
+      res.setHeader('x-cache', 'pending')
+      return this.servePendingDownloadAsset(req, res, next, contentHash)
+    } else {
+      this.logger.verbose('Requested file not found in filesystem')
+      const objectInfo = await this.networking.dataObjectInfo(objectId)
+      if (!objectInfo.exists) {
+        const errorRes: ErrorResponse = {
+          message: 'Data object does not exist',
+        }
+        res.status(404).json(errorRes)
+      } else if (!objectInfo.isSupported) {
+        const errorRes: ErrorResponse = {
+          message: 'Data object not served by this node',
+        }
+        res.status(421).json(errorRes)
+        // TODO: Try to direct to a node that supports it?
+      } else {
+        const { data: objectData } = objectInfo
+        if (!objectData) {
+          throw new Error('Missing data object data')
+        }
+        const { contentHash, size } = objectData
+
+        const downloadResponse = await this.networking.downloadDataObject({ objectData })
+
+        if (downloadResponse) {
+          // Note: Await will only wait unil the file is created, so we may serve the response from it
+          await this.content.handleNewContent(contentHash, size, downloadResponse.data)
+          res.setHeader('x-cache', 'miss')
+        } else {
+          res.setHeader('x-cache', 'pending')
+        }
+        return this.servePendingDownloadAsset(req, res, next, contentHash)
+      }
+    }
+  }
+
+  public async status(req: express.Request, res: express.Response<StatusResponse>): Promise<void> {
+    const data: StatusResponse = {
+      id: this.config.id,
+      objectsInCache: this.stateCache.getCachedContentLength(),
+      storageLimit: this.config.limits.storage,
+      storageUsed: this.content.usedSpace,
+      uptime: Math.floor(process.uptime()),
+      downloadsInProgress: this.stateCache.getPendingDownloadsCount(),
+    }
+    res.status(200).json(data)
+  }
+
+  public async buckets(req: express.Request, res: express.Response<BucketsResponse>): Promise<void> {
+    res
+      .status(200)
+      .json(
+        this.config.buckets === 'all'
+          ? { allByWorkerId: this.config.workerId }
+          : { bucketIds: [...this.config.buckets] }
+      )
+  }
+}

+ 30 - 0
distributor-node/src/services/validation/ValidationService.ts

@@ -0,0 +1,30 @@
+import Ajv from 'ajv'
+import { SchemaKey, schemas, TypeBySchemaKey } from './schemas'
+
+class ValidationError extends Error {
+  public readonly errors: string[]
+
+  public constructor(message: string, errors: string[]) {
+    super(`${message}\n\n${errors.join('\n')}`)
+    this.errors = errors
+  }
+}
+
+export class ValidationService {
+  private ajv: Ajv
+
+  public constructor() {
+    this.ajv = new Ajv({ allErrors: true, schemas })
+  }
+
+  validate<SK extends SchemaKey>(schemaKey: SK, input: unknown): TypeBySchemaKey<SK> {
+    const valid = this.ajv.validate(schemaKey, input) as boolean
+    if (!valid) {
+      throw new ValidationError(
+        `${schemaKey} is not valid`,
+        this.ajv.errors?.map((e) => `${e.dataPath}: ${e.message} (${JSON.stringify(e.params)})`) || []
+      )
+    }
+    return input as TypeBySchemaKey<SK>
+  }
+}

+ 13 - 0
distributor-node/src/services/validation/generateTypes.ts

@@ -0,0 +1,13 @@
+import fs from 'fs'
+import path from 'path'
+import { compile } from 'json-schema-to-typescript'
+import { schemas } from './schemas'
+
+// eslint-disable-next-line @typescript-eslint/no-var-requires
+const prettierConfig = require('@joystream/prettier-config')
+
+Object.entries(schemas).forEach(([schemaKey, schema]) => {
+  compile(schema, `${schemaKey}Json`, { style: prettierConfig }).then((output) =>
+    fs.writeFileSync(path.resolve(__dirname, `../../types/generated/${schemaKey}Json.d.ts`), output)
+  )
+})

+ 56 - 0
distributor-node/src/services/validation/schemas/configSchema.ts

@@ -0,0 +1,56 @@
+import { JSONSchema4 } from 'json-schema'
+import { strictObject } from './utils'
+import winston from 'winston'
+
+export const bytesizeUnits = ['B', 'K', 'M', 'G', 'T']
+export const bytesizeRegex = new RegExp(`^[0-9]+(${bytesizeUnits.join('|')})$`)
+
+export const configSchema: JSONSchema4 = {
+  type: 'object',
+  required: ['id', 'endpoints', 'directories', 'buckets', 'keys', 'port', 'workerId', 'limits'],
+  additionalProperties: false,
+  properties: {
+    id: { type: 'string' },
+    endpoints: {
+      type: 'object',
+      additionalProperties: false,
+      required: ['queryNode', 'substrateNode'],
+      properties: {
+        queryNode: { type: 'string' },
+        substrateNode: { type: 'string' },
+        elasticSearch: { type: 'string' },
+      },
+    },
+    directories: strictObject({
+      data: { type: 'string' },
+      cache: { type: 'string' },
+      logs: { type: 'string' },
+    }),
+    log: {
+      type: 'object',
+      additionalProperties: false,
+      properties: {
+        file: { type: 'string', enum: [...Object.keys(winston.config.npm.levels), 'off'] },
+        console: { type: 'string', enum: [...Object.keys(winston.config.npm.levels), 'off'] },
+        elastic: { type: 'string', enum: [...Object.keys(winston.config.npm.levels), 'off'] },
+      },
+    },
+    limits: strictObject({
+      storage: { type: 'string', pattern: bytesizeRegex.source },
+      maxConcurrentStorageNodeDownloads: { type: 'integer', minimum: 1 },
+      maxConcurrentOutboundConnections: { type: 'integer', minimum: 1 },
+      outboundRequestsTimeout: { type: 'integer', minimum: 1 },
+    }),
+    port: { type: 'integer', minimum: 0 },
+    keys: { type: 'array', items: { type: 'string' }, minItems: 1 },
+    buckets: {
+      oneOf: [
+        { type: 'array', items: { type: 'integer', minimum: 0 }, minItems: 1 },
+        { type: 'string', enum: ['all'] },
+      ],
+    },
+    workerId: { type: 'integer', minimum: 0 },
+  },
+}
+
+export default configSchema

+ 23 - 0
distributor-node/src/services/validation/schemas/familyMetadataSchema.ts

@@ -0,0 +1,23 @@
+import { JSONSchema4 } from 'json-schema'
+
+export const familyMetadataSchema: JSONSchema4 = {
+  type: 'object',
+  additionalProperties: false,
+  properties: {
+    region: { type: 'string' },
+    description: { type: 'string' },
+    boundary: {
+      type: 'array',
+      items: {
+        type: 'object',
+        additionalProperties: false,
+        properties: {
+          latitude: { type: 'number', minimum: -180, maximum: 180 },
+          longitude: { type: 'number', minimum: -180, maximum: 180 },
+        },
+      },
+    },
+  },
+}
+
+export default familyMetadataSchema

+ 24 - 0
distributor-node/src/services/validation/schemas/index.ts

@@ -0,0 +1,24 @@
+import { ConfigJson } from '../../../types/generated/ConfigJson'
+import { OperatorMetadataJson } from '../../../types/generated/OperatorMetadataJson'
+import { FamilyMetadataJson } from '../../../types/generated/FamilyMetadataJson'
+import { configSchema } from './configSchema'
+import { familyMetadataSchema } from './familyMetadataSchema'
+import { operatorMetadataSchema } from './operatorMetadataSchema'
+
+export const schemas = {
+  Config: configSchema,
+  OperatorMetadata: operatorMetadataSchema,
+  FamilyMetadata: familyMetadataSchema,
+} as const
+
+export type SchemaKey = keyof typeof schemas & string
+
+export type TypeBySchemaKey<T extends SchemaKey> = T extends 'Config'
+  ? ConfigJson
+  : T extends 'OperatorMetadata'
+  ? OperatorMetadataJson
+  : T extends 'FamilyMetadata'
+  ? FamilyMetadataJson
+  : never
+
+export default schemas

+ 28 - 0
distributor-node/src/services/validation/schemas/operatorMetadataSchema.ts

@@ -0,0 +1,28 @@
+import { JSONSchema4 } from 'json-schema'
+
+export const operatorMetadataSchema: JSONSchema4 = {
+  type: 'object',
+  additionalProperties: false,
+  properties: {
+    endpoint: { type: 'string' },
+    location: {
+      type: 'object',
+      additionalProperties: false,
+      properties: {
+        countryCode: { type: 'string' },
+        city: { type: 'string' },
+        coordinates: {
+          type: 'object',
+          additionalProperties: false,
+          properties: {
+            latitude: { type: 'number', minimum: -180, maximum: 180 },
+            longitude: { type: 'number', minimum: -180, maximum: 180 },
+          },
+        },
+      },
+    },
+    extra: { type: 'string' },
+  },
+}
+
+export default operatorMetadataSchema

+ 10 - 0
distributor-node/src/services/validation/schemas/utils.ts

@@ -0,0 +1,10 @@
+import { JSONSchema4 } from 'json-schema'
+
+export function strictObject(properties: Exclude<JSONSchema4['properties'], undefined>): JSONSchema4 {
+  return {
+    type: 'object',
+    additionalProperties: false,
+    required: Object.keys(properties),
+    properties,
+  }
+}

+ 5 - 0
distributor-node/src/types/api.ts

@@ -0,0 +1,5 @@
+import { components, operations } from './generated/OpenApi'
+export type AssetRouteParams = operations['public.asset']['parameters']['path']
+export type ErrorResponse = components['schemas']['ErrorResponse']
+export type StatusResponse = components['schemas']['StatusResponse']
+export type BucketsResponse = components['schemas']['BucketsResponse']

+ 1 - 0
distributor-node/src/types/common.ts

@@ -0,0 +1 @@
+export type DeepReadonly<T> = { readonly [K in keyof T]: DeepReadonly<T[K]> }

+ 9 - 0
distributor-node/src/types/config.ts

@@ -0,0 +1,9 @@
+import { ConfigJson } from './generated/ConfigJson'
+import { DeepReadonly } from './common'
+
+export type Config = Omit<ConfigJson, 'limits'> & {
+  limits: Omit<ConfigJson['limits'], 'storage'> & {
+    storage: number
+  }
+}
+export type ReadonlyConfig = DeepReadonly<Config>

+ 35 - 0
distributor-node/src/types/generated/ConfigJson.d.ts

@@ -0,0 +1,35 @@
+/* tslint:disable */
+/**
+ * This file was automatically generated by json-schema-to-typescript.
+ * DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file,
+ * and run json-schema-to-typescript to regenerate this file.
+ */
+
+export interface ConfigJson {
+  id: string
+  endpoints: {
+    queryNode: string
+    substrateNode: string
+    elasticSearch?: string
+  }
+  directories: {
+    data: string
+    cache: string
+    logs: string
+  }
+  log?: {
+    file?: 'error' | 'warn' | 'info' | 'http' | 'verbose' | 'debug' | 'silly' | 'off'
+    console?: 'error' | 'warn' | 'info' | 'http' | 'verbose' | 'debug' | 'silly' | 'off'
+    elastic?: 'error' | 'warn' | 'info' | 'http' | 'verbose' | 'debug' | 'silly' | 'off'
+  }
+  limits: {
+    storage: string
+    maxConcurrentStorageNodeDownloads: number
+    maxConcurrentOutboundConnections: number
+    outboundRequestsTimeout: number
+  }
+  port: number
+  keys: [string, ...string[]]
+  buckets: [number, ...number[]] | 'all'
+  workerId: number
+}

+ 15 - 0
distributor-node/src/types/generated/FamilyMetadataJson.d.ts

@@ -0,0 +1,15 @@
+/* tslint:disable */
+/**
+ * This file was automatically generated by json-schema-to-typescript.
+ * DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file,
+ * and run json-schema-to-typescript to regenerate this file.
+ */
+
+export interface FamilyMetadataJson {
+  region?: string
+  description?: string
+  boundary?: {
+    latitude?: number
+    longitude?: number
+  }[]
+}

+ 148 - 0
distributor-node/src/types/generated/OpenApi.ts

@@ -0,0 +1,148 @@
+/**
+ * This file was auto-generated by openapi-typescript.
+ * Do not make direct changes to the file.
+ */
+
+export interface paths {
+  '/status': {
+    /** Returns json object describing current node status. */
+    'get': operations['public.status']
+  }
+  '/buckets': {
+    /** Returns list of distributed buckets */
+    'get': operations['public.buckets']
+  }
+  '/asset/{objectId}': {
+    /** Returns a media file. */
+    'get': operations['public.asset']
+    /** Returns asset response headers (cache status, content type and/or length, accepted ranges etc.) */
+    'head': operations['public.assetHead']
+  }
+}
+
+export interface components {
+  schemas: {
+    'ErrorResponse': {
+      'type'?: string
+      'message': string
+    }
+    'StatusResponse': {
+      'id': string
+      'objectsInCache': number
+      'storageLimit': number
+      'storageUsed': number
+      'uptime': number
+      'downloadsInProgress': number
+    }
+    'BucketsResponse':
+      | {
+          'bucketIds': number[]
+        }
+      | {
+          'allByWorkerId': number
+        }
+  }
+  parameters: {
+    /** Data Object ID */
+    'ObjectId': string
+  }
+  headers: {
+    /** Describes cache status of an object. Hit - object is already fully fetched in distributor node's cache. Pending - object is still beeing fetched from the storage node. Miss - object is neither in cache not currently beeing fetched. Fetching from storage node may be triggered. */
+    'X-Cache'?: 'hit' | 'pending' | 'miss'
+    /** Describes the source of data stream. External - the request was proxied to a storage node. Local - the data is streamed from local file. */
+    'X-Data-Source'?: 'external' | 'local'
+  }
+}
+
+export interface operations {
+  /** Returns json object describing current node status. */
+  'public.status': {
+    responses: {
+      /** OK */
+      200: {
+        content: {
+          'application/json': components['schemas']['StatusResponse']
+        }
+      }
+      /** Unexpected server error */
+      500: unknown
+    }
+  }
+  /** Returns list of distributed buckets */
+  'public.buckets': {
+    responses: {
+      /** OK */
+      200: {
+        content: {
+          'application/json': components['schemas']['BucketsResponse']
+        }
+      }
+      /** Unexpected server error */
+      500: unknown
+    }
+  }
+  /** Returns a media file. */
+  'public.asset': {
+    parameters: {
+      path: {
+        /** Data Object ID */
+        'objectId': components['parameters']['ObjectId']
+      }
+    }
+    responses: {
+      /** Full available object data sent */
+      200: {
+        headers: {}
+        content: {
+          'image/*': string
+          'audio/*': string
+          'video/*': string
+        }
+      }
+      /** Requested partial object data sent */
+      206: {
+        headers: {}
+        content: {
+          'image/*': string
+          'audio/*': string
+          'video/*': string
+        }
+      }
+      /** Data object does not exist. */
+      404: {
+        content: {
+          'application/json': components['schemas']['ErrorResponse']
+        }
+      }
+      /** Misdirected request. Data object not supported. */
+      421: {
+        content: {
+          'application/json': components['schemas']['ErrorResponse']
+        }
+      }
+      /** Unexpected server error */
+      500: unknown
+    }
+  }
+  /** Returns asset response headers (cache status, content type and/or length, accepted ranges etc.) */
+  'public.assetHead': {
+    parameters: {
+      path: {
+        /** Data Object ID */
+        'objectId': components['parameters']['ObjectId']
+      }
+    }
+    responses: {
+      /** Object is supported and should be send on GET request. */
+      200: unknown
+      /** Data object does not exist. */
+      404: unknown
+      /** Misdirected request. Data object not supported by the node. */
+      421: unknown
+      /** Unexpected server error */
+      500: unknown
+    }
+  }
+}
+
+export interface external {}

+ 19 - 0
distributor-node/src/types/generated/OperatorMetadataJson.d.ts

@@ -0,0 +1,19 @@
+/* tslint:disable */
+/**
+ * This file was automatically generated by json-schema-to-typescript.
+ * DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file,
+ * and run json-schema-to-typescript to regenerate this file.
+ */
+
+export interface OperatorMetadataJson {
+  endpoint?: string
+  location?: {
+    countryCode?: string
+    city?: string
+    coordinates?: {
+      latitude?: number
+      longitude?: number
+    }
+  }
+  extra?: string
+}

Some files were not shown because too many files changed in this diff