Parcourir la source

Distributor node - initial commit

Leszek Wiesner il y a 3 ans
Parent
commit
1a65e808d4
69 fichiers modifiés avec 5828 ajouts et 10 suppressions
  1. 2 1
      devops/vscode/settings.json
  2. 1 0
      distributor-node/.eslintignore
  3. 8 0
      distributor-node/.gitignore
  4. 2 0
      distributor-node/.prettierignore
  5. 67 0
      distributor-node/README.md
  6. 3 0
      distributor-node/bin/run
  7. 3 0
      distributor-node/bin/run.cmd
  8. 13 0
      distributor-node/config/docker/distributor-dev.docker.yml
  9. 5 0
      distributor-node/config/docker/filebeat.Dockerfile
  10. 19 0
      distributor-node/config/docker/filebeat.docker.yml
  11. 76 0
      distributor-node/docker-compose.yml
  12. 7 0
      distributor-node/openapitools.json
  13. 98 0
      distributor-node/package.json
  14. 1 0
      distributor-node/src/@types/@elastic/esc-winston-format/index.d.ts
  15. 91 0
      distributor-node/src/api-spec/openapi.yml
  16. 98 0
      distributor-node/src/app/index.ts
  17. 18 0
      distributor-node/src/command-base/default.ts
  18. 56 0
      distributor-node/src/commands/start.ts
  19. 1 0
      distributor-node/src/index.ts
  20. 126 0
      distributor-node/src/services/cache/StateCacheService.ts
  21. 86 0
      distributor-node/src/services/content/ContentService.ts
  22. 75 0
      distributor-node/src/services/content/FileContinousReadStream.ts
  23. 34 0
      distributor-node/src/services/logging/LoggingService.ts
  24. 1 0
      distributor-node/src/services/logging/index.ts
  25. 179 0
      distributor-node/src/services/networking/NetworkingService.ts
  26. 27 0
      distributor-node/src/services/networking/distributor-node/generated/.openapi-generator-ignore
  27. 5 0
      distributor-node/src/services/networking/distributor-node/generated/.openapi-generator/FILES
  28. 1 0
      distributor-node/src/services/networking/distributor-node/generated/.openapi-generator/VERSION
  29. 144 0
      distributor-node/src/services/networking/distributor-node/generated/api.ts
  30. 71 0
      distributor-node/src/services/networking/distributor-node/generated/base.ts
  31. 138 0
      distributor-node/src/services/networking/distributor-node/generated/common.ts
  32. 101 0
      distributor-node/src/services/networking/distributor-node/generated/configuration.ts
  33. 18 0
      distributor-node/src/services/networking/distributor-node/generated/index.ts
  34. 1 0
      distributor-node/src/services/networking/index.ts
  35. 68 0
      distributor-node/src/services/networking/query-node/api.ts
  36. 34 0
      distributor-node/src/services/networking/query-node/codegen.yml
  37. 89 0
      distributor-node/src/services/networking/query-node/generated/queries.ts
  38. 1135 0
      distributor-node/src/services/networking/query-node/generated/schema.ts
  39. 1037 0
      distributor-node/src/services/networking/query-node/mock.graphql
  40. 42 0
      distributor-node/src/services/networking/query-node/queries/queries.graphql
  41. 1 0
      distributor-node/src/services/networking/runtime/api.ts
  42. 54 0
      distributor-node/src/services/networking/storage-node/api.ts
  43. 27 0
      distributor-node/src/services/networking/storage-node/generated/.openapi-generator-ignore
  44. 5 0
      distributor-node/src/services/networking/storage-node/generated/.openapi-generator/FILES
  45. 1 0
      distributor-node/src/services/networking/storage-node/generated/.openapi-generator/VERSION
  46. 390 0
      distributor-node/src/services/networking/storage-node/generated/api.ts
  47. 71 0
      distributor-node/src/services/networking/storage-node/generated/base.ts
  48. 138 0
      distributor-node/src/services/networking/storage-node/generated/common.ts
  49. 101 0
      distributor-node/src/services/networking/storage-node/generated/configuration.ts
  50. 18 0
      distributor-node/src/services/networking/storage-node/generated/index.ts
  51. 93 0
      distributor-node/src/services/server/ServerService.ts
  52. 154 0
      distributor-node/src/services/server/controllers/public.ts
  53. 3 0
      distributor-node/src/types/api.ts
  54. 1 0
      distributor-node/src/types/common.ts
  55. 5 0
      distributor-node/src/types/config.ts
  56. 23 0
      distributor-node/src/types/dataObject.ts
  57. 27 0
      distributor-node/src/types/generated/ConfigJson.d.ts
  58. 66 0
      distributor-node/src/types/generated/OpenApi.ts
  59. 2 0
      distributor-node/src/types/index.ts
  60. 11 0
      distributor-node/src/validation/generateTypes.ts
  61. 33 0
      distributor-node/src/validation/schemas/configSchema.ts
  62. 1 0
      distributor-node/src/validation/schemas/index.ts
  63. 10 0
      distributor-node/src/validation/schemas/utils.ts
  64. 17 0
      distributor-node/test/commands/hello.test.ts
  65. 5 0
      distributor-node/test/mocha.opts
  66. 7 0
      distributor-node/test/tsconfig.json
  67. 28 0
      distributor-node/tsconfig.json
  68. 1 0
      package.json
  69. 554 9
      yarn.lock

+ 2 - 1
devops/vscode/settings.json

@@ -5,6 +5,7 @@
     "./tests/network-tests",
     "./types",
     "./storage-node",
-    "./atlas"
+    "./atlas",
+    "./distributor-node"
   ]
 }

+ 1 - 0
distributor-node/.eslintignore

@@ -0,0 +1 @@
+src/types/generated

+ 8 - 0
distributor-node/.gitignore

@@ -0,0 +1,8 @@
+*-debug.log
+*-error.log
+/.nyc_output
+/dist
+/lib
+/package-lock.json
+/tmp
+node_modules

+ 2 - 0
distributor-node/.prettierignore

@@ -0,0 +1,2 @@
+/**/generated
+/**/mock.graphql

+ 67 - 0
distributor-node/README.md

@@ -0,0 +1,67 @@
+@joystream/distributor-cli
+==========================
+
+Joystream distributor node CLI
+
+[![oclif](https://img.shields.io/badge/cli-oclif-brightgreen.svg)](https://oclif.io)
+[![Version](https://img.shields.io/npm/v/@joystream/distributor-cli.svg)](https://npmjs.org/package/@joystream/distributor-cli)
+[![Downloads/week](https://img.shields.io/npm/dw/@joystream/distributor-cli.svg)](https://npmjs.org/package/@joystream/distributor-cli)
+[![License](https://img.shields.io/npm/l/@joystream/distributor-cli.svg)](https://github.com/Joystream/joystream/blob/master/package.json)
+
+<!-- toc -->
+* [Usage](#usage)
+* [Commands](#commands)
+<!-- tocstop -->
+# Usage
+<!-- usage -->
+```sh-session
+$ npm install -g @joystream/distributor-cli
+$ joystream-distributor COMMAND
+running command...
+$ joystream-distributor (-v|--version|version)
+@joystream/distributor-cli/0.1.0 linux-x64 node-v14.17.1
+$ joystream-distributor --help [COMMAND]
+USAGE
+  $ joystream-distributor COMMAND
+...
+```
+<!-- usagestop -->
+# Commands
+<!-- commands -->
+* [`joystream-distributor help [COMMAND]`](#joystream-distributor-help-command)
+* [`joystream-distributor start [CONFIG]`](#joystream-distributor-start-config)
+
+## `joystream-distributor help [COMMAND]`
+
+display help for joystream-distributor
+
+```
+USAGE
+  $ joystream-distributor help [COMMAND]
+
+ARGUMENTS
+  COMMAND  command to show help for
+
+OPTIONS
+  --all  see all commands in CLI
+```
+
+_See code: [@oclif/plugin-help](https://github.com/oclif/plugin-help/blob/v2.2.3/src/commands/help.ts)_
+
+## `joystream-distributor start [CONFIG]`
+
+Start the node
+
+```
+USAGE
+  $ joystream-distributor start [CONFIG]
+
+ARGUMENTS
+  CONFIG  [default: ./config.yml] Path to YAML configuration file
+
+EXAMPLE
+  $ joystream-distributor start
+```
+
+_See code: [src/commands/start.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/start.ts)_
+<!-- commandsstop -->

+ 3 - 0
distributor-node/bin/run

@@ -0,0 +1,3 @@
+#!/usr/bin/env node
+
+require('@oclif/command').run().then(require('@oclif/command/flush')).catch(require('@oclif/errors/handle'))

+ 3 - 0
distributor-node/bin/run.cmd

@@ -0,0 +1,3 @@
+@echo off
+
+node "%~dp0\run" %*

+ 13 - 0
distributor-node/config/docker/distributor-dev.docker.yml

@@ -0,0 +1,13 @@
+endpoints:
+  queryNode: http://graphql-server-mnt:4002/graphql
+  substrateNode: ws://joystream-node:9944
+directories:
+  data: /data
+  cache: /cache
+  logs: /logs
+log:
+  file: debug
+  console: debug
+port: 3334
+keys: [//Alice]
+buckets: [1]

+ 5 - 0
distributor-node/config/docker/filebeat.Dockerfile

@@ -0,0 +1,5 @@
+FROM docker.elastic.co/beats/filebeat:7.13.3
+COPY ./filebeat.docker.yml /usr/share/filebeat/filebeat.yml
+USER root
+RUN chown root:filebeat /usr/share/filebeat/filebeat.yml
+USER filebeat

+ 19 - 0
distributor-node/config/docker/filebeat.docker.yml

@@ -0,0 +1,19 @@
+setup.kibana:
+  host: 'kibana:5601'
+output.elasticsearch:
+  hosts: ['elasticsearch:9200']
+# Using log files:
+filebeat.inputs:
+  - type: log
+    enabled: true
+    paths: /logs/*.json
+    json.keys_under_root: true
+    json.overwrite_keys: true
+    json.add_error_key: true
+    json.expand_keys: true
+# Docker autodiscover alternative:
+# filebeat.autodiscover:
+#   providers:
+#     - type: docker
+#       hints.enabled: true
+#       hints.default_config.enabled: false

+ 76 - 0
distributor-node/docker-compose.yml

@@ -0,0 +1,76 @@
+version: '3.4'
+
+services:
+  distributor-node:
+    image: node:14
+    labels:
+      co.elastic.logs/enabled: true
+      co.elastic.logs/json.keys_under_root: true
+      co.elastic.logs/json.overwrite_keys: true
+      co.elastic.logs/json.add_error_key: true
+      co.elastic.logs/json.expand_keys: true
+    volumes:
+      - type: bind
+        source: ..
+        target: /joystream
+      - data:/data
+      - cache:/cache
+      - logs:/logs
+    networks:
+      - joystream
+    ports:
+      - 127.0.0.1:3334:3334
+    working_dir: /joystream/distributor-node
+    init: true
+    entrypoint: ["./bin/run"]
+    command: ["start", "./config/docker/distributor-dev.docker.yml"]
+  # Ref: https://www.elastic.co/guide/en/elasticsearch/reference/7.13/docker.html
+  elasticsearch:
+    image: docker.elastic.co/elasticsearch/elasticsearch:7.13.2
+    container_name: elasticsearch
+    environment:
+      - discovery.type=single-node
+      - bootstrap.memory_lock=true
+      - "ES_JAVA_OPTS=-Xms512m -Xmx512m"
+    ulimits:
+      memlock:
+        soft: -1
+        hard: -1
+    volumes:
+      - es-data:/usr/share/elasticsearch/data
+    ports:
+      - 127.0.0.1:9200:9200
+  # Ref: https://www.elastic.co/guide/en/kibana/7.13/docker.html
+  kibana:
+    image: docker.elastic.co/kibana/kibana:7.13.2
+    container_name: kibana
+    ports:
+      - 127.0.0.1:5601:5601
+    environment:
+      ELASTICSEARCH_HOSTS: http://elasticsearch:9200
+  # Ref: https://www.elastic.co/guide/en/beats/filebeat/current/running-on-docker.html
+  filebeat:
+    user: root
+    image: joystream/distributor-filebeat
+    build:
+      context: ./config/docker
+      dockerfile: ./filebeat.Dockerfile
+    volumes:
+      - /var/run/docker.sock:/var/run/docker.sock:ro
+      - logs:/logs
+
+volumes:
+  es-data:
+    driver: local
+  logs:
+    driver: local
+  cache:
+    driver: local
+  data:
+    driver: local
+
+# Join default joystream network (from root docker-compose)
+networks:
+  joystream:
+    external: true
+    name: joystream_default

+ 7 - 0
distributor-node/openapitools.json

@@ -0,0 +1,7 @@
+{
+  "$schema": "node_modules/@openapitools/openapi-generator-cli/config.schema.json",
+  "spaces": 2,
+  "generator-cli": {
+    "version": "5.2.0"
+  }
+}

+ 98 - 0
distributor-node/package.json

@@ -0,0 +1,98 @@
+{
+  "name": "@joystream/distributor-cli",
+  "description": "Joystream distributor node CLI",
+  "version": "0.1.0",
+  "author": "Joystream contributors",
+  "bin": {
+    "joystream-distributor": "./bin/run"
+  },
+  "bugs": "https://github.com/Joystream/joystream/issues",
+  "dependencies": {
+    "@oclif/command": "^1",
+    "@oclif/config": "^1",
+    "@oclif/plugin-help": "^2",
+    "tslib": "^1",
+    "yaml": "^1.10.2",
+    "ajv": "^7",
+    "@elastic/ecs-winston-format": "^1.1.0",
+    "cross-fetch": "^3.1.4",
+    "lodash": "^4.17.21",
+    "lru-cache": "^6.0.0",
+    "express": "^4.17.1",
+    "express-winston": "^4.1.0",
+    "proper-lockfile": "^4.1.2",
+    "axios": "^0.21.1",
+    "send": "^0.17.1",
+    "read-chunk": "^3.2.0",
+    "file-type": "^16.5.1",
+    "node-cleanup": "^2.1.2"
+  },
+  "devDependencies": {
+    "@oclif/dev-cli": "^1",
+    "@oclif/test": "^1",
+    "@types/chai": "^4",
+    "@types/mocha": "^5",
+    "@types/node": "^10",
+    "chai": "^4",
+    "globby": "^10",
+    "mocha": "^5",
+    "nyc": "^14",
+    "ts-node": "^8",
+    "typescript": "^3.3",
+    "@graphql-codegen/cli": "^1.21.4",
+    "@graphql-codegen/typescript": "^1.22.0",
+    "@graphql-codegen/import-types-preset": "^1.18.1",
+    "@graphql-codegen/typescript-operations": "^1.17.16",
+    "@graphql-codegen/typescript-document-nodes": "^1.17.11",
+    "json-schema-to-typescript": "^10.1.4",
+    "openapi-typescript": "^4.0.2",
+    "@openapitools/openapi-generator-cli": "^2.3.6",
+    "@types/node-cleanup": "^2.1.1"
+  },
+  "engines": {
+    "node": ">=14.16.1"
+  },
+  "files": [
+    "/bin",
+    "/lib",
+    "/npm-shrinkwrap.json",
+    "/oclif.manifest.json"
+  ],
+  "homepage": "https://github.com/Joystream/joystream",
+  "keywords": [
+    "oclif"
+  ],
+  "license": "GPL-3.0-only",
+  "main": "lib/index.js",
+  "oclif": {
+    "commands": "./lib/commands",
+    "bin": "joystream-distributor",
+    "plugins": [
+      "@oclif/plugin-help"
+    ]
+  },
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/Joystream/joystream",
+    "directory": "distributor-node"
+  },
+  "scripts": {
+    "postpack": "rm -f oclif.manifest.json",
+    "prepack": "rm -rf lib && tsc -b && oclif-dev manifest && oclif-dev readme",
+    "test": "nyc --extension .ts mocha --forbid-only \"test/**/*.test.ts\"",
+    "version": "oclif-dev readme && git add README.md",
+    "generate:types:json-schema": "yarn ts-node ./src/validation/generateTypes.ts",
+    "generate:types:graphql": "yarn graphql-codegen -c ./src/services/networking/query-node/codegen.yml",
+    "generate:types:openapi": "yarn openapi-typescript ./src/api-spec/openapi.yml -o ./src/types/generated/OpenApi.ts -c ../prettierrc.js",
+    "generate:types:all": "yarn generate:types:json-schema && yarn generate:types:graphql && yarn generate:types:openapi",
+    "generate:api:storage-node": "yarn openapi-generator-cli generate -i ../storage-node-v2/src/api-spec/openapi.yaml -g typescript-axios -o ./src/services/networking/storage-node/generated",
+    "generate:api:distributor-node": "yarn openapi-generator-cli generate -i ./src/api-spec/openapi.yml -g typescript-axios -o ./src/services/networking/distributor-node/generated",
+    "generate:api:all": "yarn generate:api:storage-node && yarn generate:api:distributor-node",
+    "generate:all": "yarn generate:types:all && yarn generate:api:all",
+    "build": "tsc --build tsconfig.json",
+    "lint": "eslint ./src --ext .ts",
+    "format": "prettier ./ --write",
+    "checks": "tsc --noEmit --pretty && prettier ./ --check && yarn lint"
+  },
+  "types": "lib/index.d.ts"
+}

+ 1 - 0
distributor-node/src/@types/@elastic/esc-winston-format/index.d.ts

@@ -0,0 +1 @@
+declare module '@elastic/ecs-winston-format'

+ 91 - 0
distributor-node/src/api-spec/openapi.yml

@@ -0,0 +1,91 @@
+openapi: 3.0.3
+info:
+  title: Distributor node API
+  description: Distributor node API
+  contact:
+    email: info@joystream.org
+  license:
+    name: MIT
+    url: https://opensource.org/licenses/MIT
+  version: 0.1.0
+externalDocs:
+  description: Distributor node API
+  url: https://github.com/Joystream/joystream/issues/2224
+servers:
+  - url: http://localhost:3334/api/v1/
+
+tags:
+  - name: public
+    description: Public distributor node API
+
+paths:
+  /asset/{objectId}:
+    get:
+      operationId: public.asset
+      description: Returns a media file.
+      tags:
+        - public
+      parameters:
+        - name: objectId
+          required: true
+          in: path
+          description: Data Object ID
+          schema:
+            type: string
+      responses:
+        200:
+          description: Full available object data sent
+          content:
+            image/*:
+              schema:
+                type: string
+                format: binary
+            audio/*:
+              schema:
+                type: string
+                format: binary
+            video/*:
+              schema:
+                type: string
+                format: binary
+        206:
+          description: Requested partial object data sent
+          content:
+            image/*:
+              schema:
+                type: string
+                format: binary
+            audio/*:
+              schema:
+                type: string
+                format: binary
+            video/*:
+              schema:
+                type: string
+                format: binary
+        400:
+          description: Invalid request. Data object not supported.
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/ErrorResponse'
+        404:
+          description: Data object does not exist.
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/ErrorResponse'
+        500:
+          description: Unexpected server error
+
+components:
+  schemas:
+    ErrorResponse:
+      type: object
+      required:
+        - message
+      properties:
+        type:
+          type: string
+        message:
+          type: string

+ 98 - 0
distributor-node/src/app/index.ts

@@ -0,0 +1,98 @@
+import { ReadonlyConfig } from '../types'
+import { NetworkingService } from '../services/networking'
+import { LoggingService } from '../services/logging'
+import { StateCacheService } from '../services/cache/StateCacheService'
+import { ContentService } from '../services/content/ContentService'
+import { ServerService } from '../services/server/ServerService'
+import { Logger } from 'winston'
+import fs from 'fs'
+import nodeCleanup from 'node-cleanup'
+
+export class App {
+  private config: ReadonlyConfig
+  private content: ContentService
+  private stateCache: StateCacheService
+  private networking: NetworkingService
+  private server: ServerService
+  private logging: LoggingService
+  private logger: Logger
+
+  constructor(config: ReadonlyConfig) {
+    this.config = config
+    this.logging = new LoggingService(config)
+    this.stateCache = new StateCacheService(config, this.logging)
+    this.content = new ContentService(config, this.logging, this.stateCache)
+    this.networking = new NetworkingService(config, this.stateCache, this.logging)
+    this.server = new ServerService(config, this.stateCache, this.content, this.logging, this.networking)
+    this.logger = this.logging.createLogger('App')
+  }
+
+  private checkConfigDirectories(): void {
+    Object.entries(this.config.directories).forEach(([name, path]) => {
+      const dirInfo = `${name} directory (${path})`
+      if (!fs.existsSync(path)) {
+        throw new Error(`${dirInfo} doesn't exists!`)
+      }
+      try {
+        fs.accessSync(path, fs.constants.R_OK)
+      } catch (e) {
+        throw new Error(`${dirInfo} is not readable`)
+      }
+      try {
+        fs.accessSync(path, fs.constants.W_OK)
+      } catch (e) {
+        throw new Error(`${dirInfo} is not writable`)
+      }
+    })
+  }
+
+  public async start(): Promise<void> {
+    this.logger.info('Starting the app')
+    this.checkConfigDirectories()
+    this.stateCache.load()
+    const dataObjects = await this.networking.fetchSupportedDataObjects()
+    // TODO: Try to actually save as much content as possible by downloading missing data
+    await this.content.startupSync(dataObjects)
+    this.server.start()
+    nodeCleanup(this.exitHandler.bind(this))
+  }
+
+  private async exitGracefully(): Promise<void> {
+    this.logger.info('Graceful exit initialized')
+    // Async exit handler - ideally should not take more than 10 sec
+    // We can try to wait until some pending downloads are finished here etc.
+    await this.stateCache.save()
+    this.logger.info('Graceful exit succesful')
+  }
+
+  private exitCritically(): void {
+    this.logger.info('Critical exit initialized')
+    // Handling exits due to an error - only some critical, synchronous work can be done here
+    this.stateCache.saveSync()
+    this.logger.close()
+    this.logger.info('Critical exit succesful')
+  }
+
+  private exitHandler(exitCode: number | null, signal: string | null): boolean | undefined {
+    this.logger.info('Exiting')
+    this.stateCache.clearInterval()
+    if (signal) {
+      // Async exit can be executed
+      this.exitGracefully()
+        .then(() => {
+          this.logger.close()
+          process.kill(process.pid, signal)
+        })
+        .catch((err) => {
+          this.logger.error('Graceful exit error', { err })
+          this.logger.close()
+          process.kill(process.pid, signal)
+        })
+      nodeCleanup.uninstall()
+      return false
+    } else {
+      // Only synchronous work can be done here
+      this.exitCritically()
+    }
+  }
+}

+ 18 - 0
distributor-node/src/command-base/default.ts

@@ -0,0 +1,18 @@
+import Ajv from 'ajv'
+import { JSONSchema4 } from 'json-schema'
+import Command from '@oclif/command'
+import { CLIError } from '@oclif/errors/lib/errors/cli'
+
+export default abstract class DefaultCommandBase extends Command {
+  asValidatedInput<ValidInputType>(schema: JSONSchema4, input: unknown, inputName = 'Input'): ValidInputType {
+    const ajv = new Ajv({ allErrors: true })
+    const valid = ajv.validate(schema, input) as boolean
+    if (!valid) {
+      throw new CLIError(
+        `${inputName} is not valid:\n` +
+          ajv.errors?.map((e) => `${e.instancePath}: ${e.message} (${JSON.stringify(e.params)})`).join('\n')
+      )
+    }
+    return input as ValidInputType
+  }
+}

+ 56 - 0
distributor-node/src/commands/start.ts

@@ -0,0 +1,56 @@
+import fs from 'fs'
+import path from 'path'
+import YAML from 'yaml'
+import { CLIError } from '@oclif/errors'
+import DefaultCommandBase from '../command-base/default'
+import { Config, ReadonlyConfig } from '../types/config'
+import { configSchema } from '../validation/schemas'
+import { App } from '../app'
+import _ from 'lodash'
+
+export default class StartNode extends DefaultCommandBase {
+  static description = 'Start the node'
+
+  static examples = [`$ joystream-distributor start /path/to/config.yml`]
+
+  // TODO: Allow overriding config through flags
+
+  static args = [
+    {
+      name: 'config',
+      description: 'Path to YAML configuration file',
+      default: './config.yml',
+    },
+  ]
+
+  resolveDirectoryPaths(paths: Config['directories'], configFilePath: string): Config['directories'] {
+    return _.mapValues(paths, (v) => path.resolve(configFilePath, v))
+  }
+
+  getConfing(configPath: string): Config {
+    const fileContent = fs.readFileSync(configPath).toString()
+    let config: unknown
+    if (path.extname(configPath) === '.json') {
+      config = JSON.parse(fileContent)
+    } else if (path.extname(configPath) === '.yml') {
+      config = YAML.parse(fileContent)
+    } else {
+      throw new CLIError('Unrecognized config format (use .yml or .json)')
+    }
+
+    return this.asValidatedInput<Config>(configSchema, config, 'Configuration file')
+  }
+
+  async run(): Promise<void> {
+    const { args } = this.parse(StartNode)
+    const configPath = args.config
+    const config = this.getConfing(configPath)
+    config.directories = this.resolveDirectoryPaths(config.directories, configPath)
+    const app = new App(config as ReadonlyConfig)
+    app.start()
+  }
+
+  async finally(): Promise<void> {
+    /* Do nothing */
+  }
+}

+ 1 - 0
distributor-node/src/index.ts

@@ -0,0 +1 @@
+export { run } from '@oclif/command'

+ 126 - 0
distributor-node/src/services/cache/StateCacheService.ts

@@ -0,0 +1,126 @@
+import { Logger } from 'winston'
+import { ReadonlyConfig } from '../../types'
+import { LoggingService } from '../logging'
+import fs from 'fs'
+
+export interface PendingDownloadData {
+  objectSize: number
+  availableEndpoints: string[]
+  pendingAvailabilityEndpointsCount: number
+  downloadAttempts: number
+  isAttemptPending: boolean
+}
+
+export class StateCacheService {
+  private logger: Logger
+  private config: ReadonlyConfig
+  private cacheFilePath: string
+  private saveInterval: NodeJS.Timeout
+  private cacheData = {
+    lruContentHashes: new Set<string>(),
+    pendingDownloadsByContentHash: new Map<string, PendingDownloadData>(),
+    mimeTypeByContentHash: new Map<string, string>(),
+    contentHashByObjectId: new Map<string, string>(),
+  }
+
+  public constructor(config: ReadonlyConfig, logging: LoggingService, saveIntervalMs = 60 * 1000) {
+    this.logger = logging.createLogger('StateCacheService')
+    this.cacheFilePath = `${config.directories.cache}/cache.json`
+    this.config = config
+    this.saveInterval = setInterval(() => this.save(), saveIntervalMs)
+  }
+
+  public setContentMimeType(contentHash: string, mimeType: string): void {
+    this.cacheData.mimeTypeByContentHash.set(contentHash, mimeType)
+  }
+
+  public getContentMimeType(contentHash: string): string | undefined {
+    return this.cacheData.mimeTypeByContentHash.get(contentHash)
+  }
+
+  public setObjectContentHash(objectId: string, hash: string): void {
+    this.cacheData.contentHashByObjectId.set(objectId, hash)
+  }
+
+  public getObjectContentHash(objectId: string): string | undefined {
+    return this.cacheData.contentHashByObjectId.get(objectId)
+  }
+
+  public useContent(contentHash: string): void {
+    if (this.cacheData.lruContentHashes.has(contentHash)) {
+      this.cacheData.lruContentHashes.delete(contentHash)
+    }
+    this.cacheData.lruContentHashes.add(contentHash)
+  }
+
+  public newPendingDownload(contentHash: string, objectSize: number): PendingDownloadData {
+    const pendingDownload: PendingDownloadData = {
+      objectSize,
+      availableEndpoints: [],
+      pendingAvailabilityEndpointsCount: 0,
+      downloadAttempts: 0,
+      isAttemptPending: false,
+    }
+    this.cacheData.pendingDownloadsByContentHash.set(contentHash, pendingDownload)
+    return pendingDownload
+  }
+
+  public getPendingDownload(contentHash: string): PendingDownloadData | undefined {
+    return this.cacheData.pendingDownloadsByContentHash.get(contentHash)
+  }
+
+  public dropPendingDownload(contentHash: string): void {
+    this.cacheData.pendingDownloadsByContentHash.delete(contentHash)
+  }
+
+  public dropByHash(contentHash: string): void {
+    this.cacheData.mimeTypeByContentHash.delete(contentHash)
+    this.cacheData.lruContentHashes.delete(contentHash)
+  }
+
+  private serializeData() {
+    // Only serializes data we can't easily reproduce during startup
+    const { lruContentHashes, mimeTypeByContentHash } = this.cacheData
+    return JSON.stringify({
+      lruContentHashes: Array.from(lruContentHashes),
+      mimeTypeByContentHash: Array.from(mimeTypeByContentHash.entries()),
+    })
+  }
+
+  public async save(): Promise<boolean> {
+    return new Promise((resolve) => {
+      const serialized = this.serializeData()
+      const fd = fs.openSync(this.cacheFilePath, 'w')
+      fs.write(fd, serialized, (err) => {
+        fs.closeSync(fd)
+        if (err) {
+          this.logger.error('Cache file save error', { err })
+          resolve(false)
+        } else {
+          this.logger.info('Cache file updated')
+          resolve(true)
+        }
+      })
+    })
+  }
+
+  public saveSync(): void {
+    const serialized = this.serializeData()
+    fs.writeFileSync(this.cacheFilePath, serialized)
+  }
+
+  public load(): void {
+    if (fs.existsSync(this.cacheFilePath)) {
+      this.logger.info('Loading cache from file', { file: this.cacheFilePath })
+      const fileContent = JSON.parse(fs.readFileSync(this.cacheFilePath).toString())
+      this.cacheData.lruContentHashes = new Set<string>(fileContent.lruContentHashes || [])
+      this.cacheData.mimeTypeByContentHash = new Map<string, string>(fileContent.mimeTypeByContentHash || [])
+    } else {
+      this.logger.warn(`Cache file (${this.cacheFilePath}) is empty. Starting from scratch`)
+    }
+  }
+
+  public clearInterval(): void {
+    clearInterval(this.saveInterval)
+  }
+}

+ 86 - 0
distributor-node/src/services/content/ContentService.ts

@@ -0,0 +1,86 @@
+import fs from 'fs'
+import { ReadonlyConfig } from '../../types'
+import { StateCacheService } from '../cache/StateCacheService'
+import { LoggingService } from '../logging'
+import { Logger } from 'winston'
+import { FileContinousReadStream, FileContinousReadStreamOptions } from './FileContinousReadStream'
+import { DataObjectData } from '../../types/dataObject'
+import readChunk from 'read-chunk'
+import FileType from 'file-type'
+import _ from 'lodash'
+
+export const DEFAULT_CONTENT_TYPE = 'application/octet-stream'
+
+export class ContentService {
+  private config: ReadonlyConfig
+  private dataDir: string
+  private logger: Logger
+  private stateCache: StateCacheService
+
+  public constructor(config: ReadonlyConfig, logging: LoggingService, stateCache: StateCacheService) {
+    this.config = config
+    this.logger = logging.createLogger('ContentService')
+    this.stateCache = stateCache
+    this.dataDir = config.directories.data
+  }
+
+  public async startupSync(supportedObjects: DataObjectData[]): Promise<void> {
+    const dataObjectsByHash = _.groupBy(supportedObjects, (o) => o.contentHash)
+    const dataDirFiles = fs.readdirSync(this.dataDir)
+    for (const contentHash of dataDirFiles) {
+      this.logger.verbose('Checking content file', { contentHash })
+      const objectsByHash = dataObjectsByHash[contentHash] || []
+      if (!objectsByHash.length) {
+        this.drop(contentHash, 'Not supported')
+        return
+      }
+      const { size } = objectsByHash[0]
+      const fileSize = fs.statSync(this.path(contentHash)).size
+      if (fileSize !== size) {
+        this.drop(contentHash, 'Invalid file size')
+        return
+      }
+      if (!this.stateCache.getContentMimeType(contentHash)) {
+        this.stateCache.setContentMimeType(contentHash, await this.guessMimeType(contentHash))
+      }
+      objectsByHash.forEach(({ contentHash, objectId }) => {
+        this.stateCache.setObjectContentHash(objectId, contentHash)
+      })
+    }
+  }
+
+  public drop(contentHash: string, reason?: string): void {
+    this.logger.info('Dropping content', { contentHash, reason })
+    fs.unlinkSync(this.path(contentHash))
+    this.stateCache.dropByHash(contentHash)
+  }
+
+  public path(contentHash: string): string {
+    return `${this.dataDir}/${contentHash}`
+  }
+
+  public exists(contentHash: string): boolean {
+    return fs.existsSync(this.path(contentHash))
+  }
+
+  public createReadStream(contentHash: string): fs.ReadStream {
+    return fs.createReadStream(this.path(contentHash))
+  }
+
+  public createWriteStream(contentHash: string): fs.WriteStream {
+    return fs.createWriteStream(this.path(contentHash))
+  }
+
+  public createContinousReadStream(
+    contentHash: string,
+    options: FileContinousReadStreamOptions
+  ): FileContinousReadStream {
+    return new FileContinousReadStream(this.path(contentHash), options)
+  }
+
+  public async guessMimeType(contentHash: string): Promise<string> {
+    const chunk = await readChunk(this.path(contentHash), 0, 4100)
+    const guessResult = await FileType.fromBuffer(chunk)
+    return guessResult?.mime || DEFAULT_CONTENT_TYPE
+  }
+}

+ 75 - 0
distributor-node/src/services/content/FileContinousReadStream.ts

@@ -0,0 +1,75 @@
+import fs from 'fs'
+
+export interface FileContinousReadStreamOptions {
+  end: number
+  start?: number
+  chunkSize?: number
+  missingDataRetryTime?: number
+  maxRetries?: number
+}
+
+export class FileContinousReadStream {
+  private fd: number
+  private position: number
+  private end: number
+  private chunkSize: number
+  private missingDataRetryTime: number
+  private maxRetries: number
+  private finished: boolean
+
+  public constructor(path: string, options: FileContinousReadStreamOptions) {
+    this.fd = fs.openSync(path, 'r')
+    this.position = options.start || 0
+    this.end = options.end
+    this.chunkSize = options.chunkSize || 1 * 1024 * 1024 // 1 MB
+    this.missingDataRetryTime = options.missingDataRetryTime || 50 // 50 ms
+    this.maxRetries = options.maxRetries || 2400 // 2400 retries x 50 ms = 120s timeout
+    this.finished = false
+  }
+
+  private finish() {
+    fs.closeSync(this.fd)
+    this.finished = true
+  }
+
+  private readChunkSync(): Buffer | null {
+    const chunk = Buffer.alloc(this.chunkSize)
+    const readBytes = fs.readSync(this.fd, chunk, 0, this.chunkSize, this.position)
+    const newPosition = this.position + readBytes
+    if (readBytes < this.chunkSize && newPosition <= this.end) {
+      return null
+    }
+    if (newPosition > this.end) {
+      this.finish()
+      return chunk.slice(0, readBytes)
+    }
+    this.position = newPosition
+    return chunk
+  }
+
+  public readChunk(): Promise<Buffer | null> {
+    return new Promise((resolve, reject) => {
+      if (this.finished) {
+        return resolve(null)
+      }
+
+      const chunk = this.readChunkSync()
+      if (chunk === null) {
+        let retries = 0
+        const interval = setInterval(() => {
+          const chunk = this.readChunkSync()
+          if (chunk !== null) {
+            clearInterval(interval)
+            return resolve(chunk)
+          }
+          if (++retries >= this.maxRetries) {
+            clearInterval(interval)
+            return reject(new Error('Max missing data retries limit reached'))
+          }
+        }, this.missingDataRetryTime)
+      } else {
+        resolve(chunk)
+      }
+    })
+  }
+}

+ 34 - 0
distributor-node/src/services/logging/LoggingService.ts

@@ -0,0 +1,34 @@
+import winston, { Logger, LoggerOptions } from 'winston'
+import escFormat from '@elastic/ecs-winston-format'
+import { ReadonlyConfig } from '../../types'
+
+export class LoggingService {
+  private loggerOptions: LoggerOptions
+
+  public constructor(config: ReadonlyConfig) {
+    const transports: winston.LoggerOptions['transports'] = [
+      new winston.transports.File({
+        filename: `${config.directories.logs}/logs.json`,
+        level: config.log?.file || 'debug',
+      }),
+    ]
+    if (config.log?.console) {
+      transports.push(
+        new winston.transports.Console({
+          level: config.log.console,
+        })
+      )
+    }
+    this.loggerOptions = {
+      format: escFormat(),
+      transports,
+    }
+  }
+
+  public createLogger(label: string): Logger {
+    return winston.createLogger({
+      ...this.loggerOptions,
+      defaultMeta: { label },
+    })
+  }
+}

+ 1 - 0
distributor-node/src/services/logging/index.ts

@@ -0,0 +1 @@
+export { LoggingService } from './LoggingService'

+ 179 - 0
distributor-node/src/services/networking/NetworkingService.ts

@@ -0,0 +1,179 @@
+import { ReadonlyConfig } from '../../types/config'
+import { QueryNodeApi } from './query-node/api'
+import { Logger } from 'winston'
+import { LoggingService } from '../logging'
+import { DataObjectAccessPoints, DataObjectData, DataObjectInfo } from '../../types/dataObject'
+import { StorageNodeApi } from './storage-node/api'
+import { StateCacheService } from '../cache/StateCacheService'
+import { DataObjectDetailsFragment } from './query-node/generated/queries'
+import { AxiosResponse } from 'axios'
+
+export class NetworkingService {
+  private config: ReadonlyConfig
+  private queryNodeApi: QueryNodeApi
+  // private runtimeApi: RuntimeApi
+  private logging: LoggingService
+  private stateCache: StateCacheService
+  private logger: Logger
+
+  constructor(config: ReadonlyConfig, stateCache: StateCacheService, logging: LoggingService) {
+    this.config = config
+    this.logging = logging
+    this.stateCache = stateCache
+    this.logger = logging.createLogger('NetworkingManager')
+    this.queryNodeApi = new QueryNodeApi(config.endpoints.queryNode)
+    // this.runtimeApi = new RuntimeApi(config.endpoints.substrateNode)
+  }
+
+  private validateNodeEndpoint(endpoint: string): void {
+    const endpointUrl = new URL(endpoint)
+    if (endpointUrl.protocol !== 'http:' && endpointUrl.protocol !== 'https:') {
+      throw new Error(`Invalid endpoint protocol: ${endpointUrl.protocol}`)
+    }
+  }
+
+  private prepareStorageNodeEndpoints(details: DataObjectDetailsFragment) {
+    return details.storageBag.storedBy
+      .filter((b) => b.operatorStatus.__typename === 'StorageBucketOperatorStatusActive')
+      .map((b) => ({
+        bucketId: b.id,
+        endpoint: Buffer.from(b.operatorMetadata.replace('0x', ''), 'hex').toString(),
+      }))
+      .filter((b) => {
+        try {
+          this.validateNodeEndpoint(b.endpoint)
+          return true
+        } catch (e) {
+          this.logger.warn('Invalid storage endpoint detected', {
+            bucketId: b.bucketId,
+            endpoint: b.endpoint,
+            error: e.toString(),
+          })
+          return false
+        }
+      })
+  }
+
+  private parseDataObjectAccessPoints(details: DataObjectDetailsFragment): DataObjectAccessPoints {
+    return {
+      storageNodes: this.prepareStorageNodeEndpoints(details),
+      // TODO:
+      distributorNodes: [],
+    }
+  }
+
+  public async dataObjectInfo(objectId: string): Promise<DataObjectInfo> {
+    const details = await this.queryNodeApi.getDataObjectDetails(objectId)
+    if (details) {
+      this.stateCache.setObjectContentHash(objectId, details.ipfsHash)
+    }
+    return {
+      exists: !!details,
+      isSupported: this.config.buckets.some((bucketId) =>
+        details?.storageBag.distributedBy.map((b) => b.id).includes(bucketId.toString())
+      ),
+      data: details
+        ? {
+            objectId,
+            accessPoints: this.parseDataObjectAccessPoints(details),
+            contentHash: details.ipfsHash,
+            size: parseInt(details.size),
+          }
+        : undefined,
+    }
+  }
+
+  public downloadDataObject(objectData: DataObjectData): Promise<AxiosResponse<NodeJS.ReadableStream>> | null {
+    const { contentHash, accessPoints, size } = objectData
+
+    if (this.stateCache.getPendingDownload(contentHash)) {
+      return null
+    }
+
+    const pendingDownload = this.stateCache.newPendingDownload(contentHash, size)
+
+    return new Promise<AxiosResponse<NodeJS.ReadableStream>>((resolve, reject) => {
+      const storageEndpoints = accessPoints?.storageNodes.map((n) => n.endpoint)
+
+      this.logger.info('Downloading new data object', { contentHash, storageEndpoints })
+      if (!storageEndpoints || !storageEndpoints.length) {
+        return reject(new Error('No storage endpoints available to download the data object from'))
+      }
+      const availabilityPromises = storageEndpoints.map(async (endpoint) => {
+        const api = new StorageNodeApi(endpoint, this.logging)
+        const available = await api.isObjectAvailable(contentHash)
+        if (!available) {
+          throw new Error('Not avilable')
+        }
+        return endpoint
+      })
+
+      pendingDownload.pendingAvailabilityEndpointsCount = availabilityPromises.length
+      availabilityPromises.forEach((availableNodePromise) =>
+        availableNodePromise
+          .then(async (endpoint) => {
+            pendingDownload.availableEndpoints.push(endpoint)
+            if (!pendingDownload.isAttemptPending) {
+              this.attemptDataObjectDownload(contentHash)
+                .then(resolve)
+                .catch(() => {
+                  if (!pendingDownload.pendingAvailabilityEndpointsCount && !pendingDownload.isAttemptPending) {
+                    return reject(new Error('Cannot download data object from any node'))
+                  }
+                })
+            }
+          })
+          .finally(() => --pendingDownload.pendingAvailabilityEndpointsCount)
+      )
+    })
+  }
+
+  private async attemptDataObjectDownload(contentHash: string): Promise<AxiosResponse<NodeJS.ReadableStream>> {
+    const pendingDownload = this.stateCache.getPendingDownload(contentHash)
+    if (!pendingDownload) {
+      throw new Error('Attempting data object download with missing pending download data')
+    }
+    if (pendingDownload.isAttemptPending) {
+      throw new Error('Attempting data object download during an already pending attempt')
+    }
+    const endpoint = pendingDownload.availableEndpoints.shift()
+    if (!endpoint) {
+      throw new Error('Attempting data object download without any available endpoint')
+    }
+    pendingDownload.isAttemptPending = true
+    this.logger.info('Requesting data object from storage node', { contentHash, endpoint })
+    const api = new StorageNodeApi(endpoint, this.logging)
+    try {
+      const response = await api.downloadObject(contentHash)
+      ++pendingDownload.downloadAttempts
+      pendingDownload.isAttemptPending = false
+      // TODO: Validate reponse? (ie. object size etc.)
+      return response
+    } catch (e) {
+      ++pendingDownload.downloadAttempts
+      pendingDownload.isAttemptPending = false
+      if (pendingDownload.availableEndpoints.length) {
+        return this.attemptDataObjectDownload(contentHash)
+      } else {
+        throw e
+      }
+    }
+  }
+
+  async fetchSupportedDataObjects(): Promise<DataObjectData[]> {
+    const data = await this.queryNodeApi.getDistributionBucketsWithObjects(
+      this.config.buckets.map((id) => id.toString())
+    )
+    const objectsData: DataObjectData[] = []
+    data.forEach((bucket) => {
+      bucket.distributedBags.forEach((bag) => {
+        bag.objects.forEach((object) => {
+          const { ipfsHash, id, size } = object
+          objectsData.push({ contentHash: ipfsHash, objectId: id, size })
+        })
+      })
+    })
+
+    return objectsData
+  }
+}

+ 27 - 0
distributor-node/src/services/networking/distributor-node/generated/.openapi-generator-ignore

@@ -0,0 +1,27 @@
+# OpenAPI Generator Ignore
+# Generated by openapi-generator https://github.com/openapitools/openapi-generator
+
+# Use this file to prevent files from being overwritten by the generator.
+# The patterns follow closely to .gitignore or .dockerignore.
+
+# As an example, the C# client generator defines ApiClient.cs.
+# You can make changes and tell OpenAPI Generator to ignore just this file by uncommenting the following line:
+#ApiClient.cs
+
+# You can match any string of characters against a directory, file or extension with a single asterisk (*):
+#foo/*/qux
+# The above matches foo/bar/qux and foo/baz/qux, but not foo/bar/baz/qux
+
+# You can recursively match patterns against a directory, file or extension with a double asterisk (**):
+#foo/**/qux
+# This matches foo/bar/qux, foo/baz/qux, and foo/bar/baz/qux
+
+# You can also negate patterns with an exclamation (!).
+# For example, you can ignore all files in a docs folder with the file extension .md:
+#docs/*.md
+# Then explicitly reverse the ignore rule for a single file:
+#!docs/README.md
+
+git_push.sh
+.npmignore
+.gitignore

+ 5 - 0
distributor-node/src/services/networking/distributor-node/generated/.openapi-generator/FILES

@@ -0,0 +1,5 @@
+api.ts
+base.ts
+common.ts
+configuration.ts
+index.ts

+ 1 - 0
distributor-node/src/services/networking/distributor-node/generated/.openapi-generator/VERSION

@@ -0,0 +1 @@
+5.2.0

+ 144 - 0
distributor-node/src/services/networking/distributor-node/generated/api.ts

@@ -0,0 +1,144 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Distributor node API
+ * Distributor node API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+
+import { Configuration } from './configuration';
+import globalAxios, { AxiosPromise, AxiosInstance } from 'axios';
+// Some imports not used depending on template conditions
+// @ts-ignore
+import { DUMMY_BASE_URL, assertParamExists, setApiKeyToObject, setBasicAuthToObject, setBearerAuthToObject, setOAuthToObject, setSearchParams, serializeDataIfNeeded, toPathString, createRequestFunction } from './common';
+// @ts-ignore
+import { BASE_PATH, COLLECTION_FORMATS, RequestArgs, BaseAPI, RequiredError } from './base';
+
+/**
+ * 
+ * @export
+ * @interface ErrorResponse
+ */
+export interface ErrorResponse {
+    /**
+     * 
+     * @type {string}
+     * @memberof ErrorResponse
+     */
+    type?: string;
+    /**
+     * 
+     * @type {string}
+     * @memberof ErrorResponse
+     */
+    message: string;
+}
+
+/**
+ * PublicApi - axios parameter creator
+ * @export
+ */
+export const PublicApiAxiosParamCreator = function (configuration?: Configuration) {
+    return {
+        /**
+         * Returns a media file.
+         * @param {string} objectId Data Object ID
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        publicAsset: async (objectId: string, options: any = {}): Promise<RequestArgs> => {
+            // verify required parameter 'objectId' is not null or undefined
+            assertParamExists('publicAsset', 'objectId', objectId)
+            const localVarPath = `/asset/{objectId}`
+                .replace(`{${"objectId"}}`, encodeURIComponent(String(objectId)));
+            // use dummy base URL string because the URL constructor only accepts absolute URLs.
+            const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL);
+            let baseOptions;
+            if (configuration) {
+                baseOptions = configuration.baseOptions;
+            }
+
+            const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options};
+            const localVarHeaderParameter = {} as any;
+            const localVarQueryParameter = {} as any;
+
+
+    
+            setSearchParams(localVarUrlObj, localVarQueryParameter, options.query);
+            let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
+            localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
+
+            return {
+                url: toPathString(localVarUrlObj),
+                options: localVarRequestOptions,
+            };
+        },
+    }
+};
+
+/**
+ * PublicApi - functional programming interface
+ * @export
+ */
+export const PublicApiFp = function(configuration?: Configuration) {
+    const localVarAxiosParamCreator = PublicApiAxiosParamCreator(configuration)
+    return {
+        /**
+         * Returns a media file.
+         * @param {string} objectId Data Object ID
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        async publicAsset(objectId: string, options?: any): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<any>> {
+            const localVarAxiosArgs = await localVarAxiosParamCreator.publicAsset(objectId, options);
+            return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration);
+        },
+    }
+};
+
+/**
+ * PublicApi - factory interface
+ * @export
+ */
+export const PublicApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) {
+    const localVarFp = PublicApiFp(configuration)
+    return {
+        /**
+         * Returns a media file.
+         * @param {string} objectId Data Object ID
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        publicAsset(objectId: string, options?: any): AxiosPromise<any> {
+            return localVarFp.publicAsset(objectId, options).then((request) => request(axios, basePath));
+        },
+    };
+};
+
+/**
+ * PublicApi - object-oriented interface
+ * @export
+ * @class PublicApi
+ * @extends {BaseAPI}
+ */
+export class PublicApi extends BaseAPI {
+    /**
+     * Returns a media file.
+     * @param {string} objectId Data Object ID
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     * @memberof PublicApi
+     */
+    public publicAsset(objectId: string, options?: any) {
+        return PublicApiFp(this.configuration).publicAsset(objectId, options).then((request) => request(this.axios, this.basePath));
+    }
+}
+
+

+ 71 - 0
distributor-node/src/services/networking/distributor-node/generated/base.ts

@@ -0,0 +1,71 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Distributor node API
+ * Distributor node API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+
+import { Configuration } from "./configuration";
+// Some imports not used depending on template conditions
+// @ts-ignore
+import globalAxios, { AxiosPromise, AxiosInstance } from 'axios';
+
+export const BASE_PATH = "http://localhost:3334/api/v1".replace(/\/+$/, "");
+
+/**
+ *
+ * @export
+ */
+export const COLLECTION_FORMATS = {
+    csv: ",",
+    ssv: " ",
+    tsv: "\t",
+    pipes: "|",
+};
+
+/**
+ *
+ * @export
+ * @interface RequestArgs
+ */
+export interface RequestArgs {
+    url: string;
+    options: any;
+}
+
+/**
+ *
+ * @export
+ * @class BaseAPI
+ */
+export class BaseAPI {
+    protected configuration: Configuration | undefined;
+
+    constructor(configuration?: Configuration, protected basePath: string = BASE_PATH, protected axios: AxiosInstance = globalAxios) {
+        if (configuration) {
+            this.configuration = configuration;
+            this.basePath = configuration.basePath || this.basePath;
+        }
+    }
+};
+
+/**
+ *
+ * @export
+ * @class RequiredError
+ * @extends {Error}
+ */
+export class RequiredError extends Error {
+    name: "RequiredError" = "RequiredError";
+    constructor(public field: string, msg?: string) {
+        super(msg);
+    }
+}

+ 138 - 0
distributor-node/src/services/networking/distributor-node/generated/common.ts

@@ -0,0 +1,138 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Distributor node API
+ * Distributor node API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+
+import { Configuration } from "./configuration";
+import { RequiredError, RequestArgs } from "./base";
+import { AxiosInstance } from 'axios';
+
+/**
+ *
+ * @export
+ */
+export const DUMMY_BASE_URL = 'https://example.com'
+
+/**
+ *
+ * @throws {RequiredError}
+ * @export
+ */
+export const assertParamExists = function (functionName: string, paramName: string, paramValue: unknown) {
+    if (paramValue === null || paramValue === undefined) {
+        throw new RequiredError(paramName, `Required parameter ${paramName} was null or undefined when calling ${functionName}.`);
+    }
+}
+
+/**
+ *
+ * @export
+ */
+export const setApiKeyToObject = async function (object: any, keyParamName: string, configuration?: Configuration) {
+    if (configuration && configuration.apiKey) {
+        const localVarApiKeyValue = typeof configuration.apiKey === 'function'
+            ? await configuration.apiKey(keyParamName)
+            : await configuration.apiKey;
+        object[keyParamName] = localVarApiKeyValue;
+    }
+}
+
+/**
+ *
+ * @export
+ */
+export const setBasicAuthToObject = function (object: any, configuration?: Configuration) {
+    if (configuration && (configuration.username || configuration.password)) {
+        object["auth"] = { username: configuration.username, password: configuration.password };
+    }
+}
+
+/**
+ *
+ * @export
+ */
+export const setBearerAuthToObject = async function (object: any, configuration?: Configuration) {
+    if (configuration && configuration.accessToken) {
+        const accessToken = typeof configuration.accessToken === 'function'
+            ? await configuration.accessToken()
+            : await configuration.accessToken;
+        object["Authorization"] = "Bearer " + accessToken;
+    }
+}
+
+/**
+ *
+ * @export
+ */
+export const setOAuthToObject = async function (object: any, name: string, scopes: string[], configuration?: Configuration) {
+    if (configuration && configuration.accessToken) {
+        const localVarAccessTokenValue = typeof configuration.accessToken === 'function'
+            ? await configuration.accessToken(name, scopes)
+            : await configuration.accessToken;
+        object["Authorization"] = "Bearer " + localVarAccessTokenValue;
+    }
+}
+
+/**
+ *
+ * @export
+ */
+export const setSearchParams = function (url: URL, ...objects: any[]) {
+    const searchParams = new URLSearchParams(url.search);
+    for (const object of objects) {
+        for (const key in object) {
+            if (Array.isArray(object[key])) {
+                searchParams.delete(key);
+                for (const item of object[key]) {
+                    searchParams.append(key, item);
+                }
+            } else {
+                searchParams.set(key, object[key]);
+            }
+        }
+    }
+    url.search = searchParams.toString();
+}
+
+/**
+ *
+ * @export
+ */
+export const serializeDataIfNeeded = function (value: any, requestOptions: any, configuration?: Configuration) {
+    const nonString = typeof value !== 'string';
+    const needsSerialization = nonString && configuration && configuration.isJsonMime
+        ? configuration.isJsonMime(requestOptions.headers['Content-Type'])
+        : nonString;
+    return needsSerialization
+        ? JSON.stringify(value !== undefined ? value : {})
+        : (value || "");
+}
+
+/**
+ *
+ * @export
+ */
+export const toPathString = function (url: URL) {
+    return url.pathname + url.search + url.hash
+}
+
+/**
+ *
+ * @export
+ */
+export const createRequestFunction = function (axiosArgs: RequestArgs, globalAxios: AxiosInstance, BASE_PATH: string, configuration?: Configuration) {
+    return (axios: AxiosInstance = globalAxios, basePath: string = BASE_PATH) => {
+        const axiosRequestArgs = {...axiosArgs.options, url: (configuration?.basePath || basePath) + axiosArgs.url};
+        return axios.request(axiosRequestArgs);
+    };
+}

+ 101 - 0
distributor-node/src/services/networking/distributor-node/generated/configuration.ts

@@ -0,0 +1,101 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Distributor node API
+ * Distributor node API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+
+export interface ConfigurationParameters {
+    apiKey?: string | Promise<string> | ((name: string) => string) | ((name: string) => Promise<string>);
+    username?: string;
+    password?: string;
+    accessToken?: string | Promise<string> | ((name?: string, scopes?: string[]) => string) | ((name?: string, scopes?: string[]) => Promise<string>);
+    basePath?: string;
+    baseOptions?: any;
+    formDataCtor?: new () => any;
+}
+
+export class Configuration {
+    /**
+     * parameter for apiKey security
+     * @param name security name
+     * @memberof Configuration
+     */
+    apiKey?: string | Promise<string> | ((name: string) => string) | ((name: string) => Promise<string>);
+    /**
+     * parameter for basic security
+     *
+     * @type {string}
+     * @memberof Configuration
+     */
+    username?: string;
+    /**
+     * parameter for basic security
+     *
+     * @type {string}
+     * @memberof Configuration
+     */
+    password?: string;
+    /**
+     * parameter for oauth2 security
+     * @param name security name
+     * @param scopes oauth2 scope
+     * @memberof Configuration
+     */
+    accessToken?: string | Promise<string> | ((name?: string, scopes?: string[]) => string) | ((name?: string, scopes?: string[]) => Promise<string>);
+    /**
+     * override base path
+     *
+     * @type {string}
+     * @memberof Configuration
+     */
+    basePath?: string;
+    /**
+     * base options for axios calls
+     *
+     * @type {any}
+     * @memberof Configuration
+     */
+    baseOptions?: any;
+    /**
+     * The FormData constructor that will be used to create multipart form data
+     * requests. You can inject this here so that execution environments that
+     * do not support the FormData class can still run the generated client.
+     *
+     * @type {new () => FormData}
+     */
+    formDataCtor?: new () => any;
+
+    constructor(param: ConfigurationParameters = {}) {
+        this.apiKey = param.apiKey;
+        this.username = param.username;
+        this.password = param.password;
+        this.accessToken = param.accessToken;
+        this.basePath = param.basePath;
+        this.baseOptions = param.baseOptions;
+        this.formDataCtor = param.formDataCtor;
+    }
+
+    /**
+     * Check if the given MIME is a JSON MIME.
+     * JSON MIME examples:
+     *   application/json
+     *   application/json; charset=UTF8
+     *   APPLICATION/JSON
+     *   application/vnd.company+json
+     * @param mime - MIME (Multipurpose Internet Mail Extensions)
+     * @return True if the given MIME is JSON, false otherwise.
+     */
+    public isJsonMime(mime: string): boolean {
+        const jsonMime: RegExp = new RegExp('^(application\/json|[^;/ \t]+\/[^;/ \t]+[+]json)[ \t]*(;.*)?$', 'i');
+        return mime !== null && (jsonMime.test(mime) || mime.toLowerCase() === 'application/json-patch+json');
+    }
+}

+ 18 - 0
distributor-node/src/services/networking/distributor-node/generated/index.ts

@@ -0,0 +1,18 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Distributor node API
+ * Distributor node API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+
+export * from "./api";
+export * from "./configuration";
+

+ 1 - 0
distributor-node/src/services/networking/index.ts

@@ -0,0 +1 @@
+export { NetworkingService } from './NetworkingService'

+ 68 - 0
distributor-node/src/services/networking/query-node/api.ts

@@ -0,0 +1,68 @@
+import { ApolloClient, NormalizedCacheObject, HttpLink, InMemoryCache, DocumentNode } from '@apollo/client'
+import fetch from 'cross-fetch'
+import {
+  DataObjectDetailsFragment,
+  GetDataObjectDetails,
+  GetDataObjectDetailsQuery,
+  GetDataObjectDetailsQueryVariables,
+  DistirubtionBucketsWithObjectsFragment,
+  GetDistributionBucketsWithObjectsQuery,
+  GetDistributionBucketsWithObjectsQueryVariables,
+  GetDistributionBucketsWithObjects,
+} from './generated/queries'
+import { Maybe } from './generated/schema'
+
+export class QueryNodeApi {
+  private apolloClient: ApolloClient<NormalizedCacheObject>
+
+  public constructor(endpoint: string) {
+    this.apolloClient = new ApolloClient({
+      link: new HttpLink({ uri: endpoint, fetch }),
+      cache: new InMemoryCache(),
+      defaultOptions: { query: { fetchPolicy: 'no-cache', errorPolicy: 'all' } },
+    })
+  }
+
+  // Get entity by unique input
+  protected async uniqueEntityQuery<
+    QueryT extends { [k: string]: Maybe<Record<string, unknown>> | undefined },
+    VariablesT extends Record<string, unknown>
+  >(
+    query: DocumentNode,
+    variables: VariablesT,
+    resultKey: keyof QueryT
+  ): Promise<Required<QueryT>[keyof QueryT] | null> {
+    return (await this.apolloClient.query<QueryT, VariablesT>({ query, variables })).data[resultKey] || null
+  }
+
+  // Get entities by "non-unique" input and return first result
+  protected async firstEntityQuery<
+    QueryT extends { [k: string]: unknown[] },
+    VariablesT extends Record<string, unknown>
+  >(query: DocumentNode, variables: VariablesT, resultKey: keyof QueryT): Promise<QueryT[keyof QueryT][number] | null> {
+    return (await this.apolloClient.query<QueryT, VariablesT>({ query, variables })).data[resultKey][0] || null
+  }
+
+  // Query-node: get multiple entities
+  protected async multipleEntitiesQuery<
+    QueryT extends { [k: string]: unknown[] },
+    VariablesT extends Record<string, unknown>
+  >(query: DocumentNode, variables: VariablesT, resultKey: keyof QueryT): Promise<QueryT[keyof QueryT]> {
+    return (await this.apolloClient.query<QueryT, VariablesT>({ query, variables })).data[resultKey]
+  }
+
+  public getDataObjectDetails(objectId: string): Promise<DataObjectDetailsFragment | null> {
+    return this.uniqueEntityQuery<GetDataObjectDetailsQuery, GetDataObjectDetailsQueryVariables>(
+      GetDataObjectDetails,
+      { id: objectId },
+      'storageDataObjectByUniqueInput'
+    )
+  }
+
+  public getDistributionBucketsWithObjects(ids: string[]): Promise<DistirubtionBucketsWithObjectsFragment[]> {
+    return this.multipleEntitiesQuery<
+      GetDistributionBucketsWithObjectsQuery,
+      GetDistributionBucketsWithObjectsQueryVariables
+    >(GetDistributionBucketsWithObjects, { ids }, 'distributionBuckets')
+  }
+}

+ 34 - 0
distributor-node/src/services/networking/query-node/codegen.yml

@@ -0,0 +1,34 @@
+# Paths are relative to root distribution-node directory
+overwrite: true
+
+# schema: '../../../../../query-node/generated/graphql-server/generated/schema.graphql'
+schema: 'src/services/networking/query-node/mock.graphql'
+
+documents:
+  - 'src/services/networking/query-node/queries/*.graphql'
+
+config:
+  scalars:
+    Date: Date
+  preResolveTypes: true # avoid using Pick
+  skipTypename: true # skip __typename field in typings unless it's part of the query
+
+generates:
+  src/services/networking/query-node/generated/schema.ts:
+    hooks:
+      afterOneFileWrite:
+        - prettier --write
+        - eslint --fix
+    plugins:
+      - typescript
+  src/services/networking/query-node/generated/queries.ts:
+    preset: import-types
+    presetConfig:
+      typesPath: ./schema
+    hooks:
+      afterOneFileWrite:
+        - prettier --write
+        - eslint --fix
+    plugins:
+      - typescript-operations
+      - typescript-document-nodes

+ 89 - 0
distributor-node/src/services/networking/query-node/generated/queries.ts

@@ -0,0 +1,89 @@
+import * as Types from './schema'
+
+import gql from 'graphql-tag'
+export type DataObjectDetailsFragment = {
+  id: string
+  size: any
+  ipfsHash: string
+  isAccepted: boolean
+  storageBag: {
+    storedBy: Array<{
+      id: string
+      operatorMetadata?: Types.Maybe<any>
+      operatorStatus:
+        | { __typename: 'StorageBucketOperatorStatusMissing' }
+        | { __typename: 'StorageBucketOperatorStatusInvited' }
+        | { __typename: 'StorageBucketOperatorStatusActive' }
+    }>
+    distributedBy: Array<{ id: string; operatorMetadata?: Types.Maybe<any> }>
+  }
+}
+
+export type GetDataObjectDetailsQueryVariables = Types.Exact<{
+  id: Types.Scalars['ID']
+}>
+
+export type GetDataObjectDetailsQuery = { storageDataObjectByUniqueInput?: Types.Maybe<DataObjectDetailsFragment> }
+
+export type DistirubtionBucketsWithObjectsFragment = {
+  id: string
+  distributedBags: Array<{ objects: Array<{ id: string; size: any; ipfsHash: string }> }>
+}
+
+export type GetDistributionBucketsWithObjectsQueryVariables = Types.Exact<{
+  ids?: Types.Maybe<Array<Types.Scalars['ID']> | Types.Scalars['ID']>
+}>
+
+export type GetDistributionBucketsWithObjectsQuery = {
+  distributionBuckets: Array<DistirubtionBucketsWithObjectsFragment>
+}
+
+export const DataObjectDetails = gql`
+  fragment DataObjectDetails on StorageDataObject {
+    id
+    size
+    ipfsHash
+    isAccepted
+    storageBag {
+      storedBy {
+        id
+        operatorMetadata
+        operatorStatus {
+          __typename
+        }
+      }
+      distributedBy {
+        id
+        operatorMetadata
+      }
+    }
+  }
+`
+export const DistirubtionBucketsWithObjects = gql`
+  fragment DistirubtionBucketsWithObjects on DistributionBucket {
+    id
+    distributedBags {
+      objects {
+        id
+        size
+        ipfsHash
+      }
+    }
+  }
+`
+export const GetDataObjectDetails = gql`
+  query getDataObjectDetails($id: ID!) {
+    storageDataObjectByUniqueInput(where: { id: $id }) {
+      ...DataObjectDetails
+    }
+  }
+  ${DataObjectDetails}
+`
+export const GetDistributionBucketsWithObjects = gql`
+  query getDistributionBucketsWithObjects($ids: [ID!]) {
+    distributionBuckets(where: { id_in: $ids }) {
+      ...DistirubtionBucketsWithObjects
+    }
+  }
+  ${DistirubtionBucketsWithObjects}
+`

+ 1135 - 0
distributor-node/src/services/networking/query-node/generated/schema.ts

@@ -0,0 +1,1135 @@
+export type Maybe<T> = T | null
+export type Exact<T extends { [key: string]: unknown }> = { [K in keyof T]: T[K] }
+export type MakeOptional<T, K extends keyof T> = Omit<T, K> & { [SubKey in K]?: Maybe<T[SubKey]> }
+export type MakeMaybe<T, K extends keyof T> = Omit<T, K> & { [SubKey in K]: Maybe<T[SubKey]> }
+/** All built-in and custom scalars, mapped to their actual values */
+export type Scalars = {
+  ID: string
+  String: string
+  Boolean: boolean
+  Int: number
+  Float: number
+  /** The javascript `Date` as string. Type represents date and time as the ISO Date string. */
+  DateTime: any
+  /** GraphQL representation of Bytes */
+  Bytes: any
+  /** The `JSONObject` scalar type represents JSON objects as specified by [ECMA-404](http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf). */
+  JSONObject: any
+  /** GraphQL representation of BigInt */
+  BigInt: any
+}
+
+export type BaseGraphQlObject = {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+}
+
+export type BaseModel = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+}
+
+export type BaseModelUuid = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+}
+
+export type BaseWhereInput = {
+  id_eq?: Maybe<Scalars['String']>
+  id_in?: Maybe<Array<Scalars['String']>>
+  createdAt_eq?: Maybe<Scalars['String']>
+  createdAt_lt?: Maybe<Scalars['String']>
+  createdAt_lte?: Maybe<Scalars['String']>
+  createdAt_gt?: Maybe<Scalars['String']>
+  createdAt_gte?: Maybe<Scalars['String']>
+  createdById_eq?: Maybe<Scalars['String']>
+  updatedAt_eq?: Maybe<Scalars['String']>
+  updatedAt_lt?: Maybe<Scalars['String']>
+  updatedAt_lte?: Maybe<Scalars['String']>
+  updatedAt_gt?: Maybe<Scalars['String']>
+  updatedAt_gte?: Maybe<Scalars['String']>
+  updatedById_eq?: Maybe<Scalars['String']>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['String']>
+  deletedAt_lt?: Maybe<Scalars['String']>
+  deletedAt_lte?: Maybe<Scalars['String']>
+  deletedAt_gt?: Maybe<Scalars['String']>
+  deletedAt_gte?: Maybe<Scalars['String']>
+  deletedById_eq?: Maybe<Scalars['String']>
+}
+
+export type DeleteResponse = {
+  id: Scalars['ID']
+}
+
+export type DistributionBucket = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  distributedBags: Array<StorageBag>
+  /** Distribution bucket operator metadata */
+  operatorMetadata?: Maybe<Scalars['Bytes']>
+}
+
+export type DistributionBucketConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<DistributionBucketEdge>
+  pageInfo: PageInfo
+}
+
+export type DistributionBucketCreateInput = {
+  operatorMetadata?: Maybe<Scalars['Bytes']>
+}
+
+export type DistributionBucketEdge = {
+  node: DistributionBucket
+  cursor: Scalars['String']
+}
+
+export enum DistributionBucketOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  OperatorMetadataAsc = 'operatorMetadata_ASC',
+  OperatorMetadataDesc = 'operatorMetadata_DESC',
+}
+
+export type DistributionBucketUpdateInput = {
+  operatorMetadata?: Maybe<Scalars['Bytes']>
+}
+
+export type DistributionBucketWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  operatorMetadata_eq?: Maybe<Scalars['Bytes']>
+  operatorMetadata_in?: Maybe<Array<Scalars['Bytes']>>
+  distributedBags_none?: Maybe<StorageBagWhereInput>
+  distributedBags_some?: Maybe<StorageBagWhereInput>
+  distributedBags_every?: Maybe<StorageBagWhereInput>
+  AND?: Maybe<Array<DistributionBucketWhereInput>>
+  OR?: Maybe<Array<DistributionBucketWhereInput>>
+}
+
+export type DistributionBucketWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type PageInfo = {
+  hasNextPage: Scalars['Boolean']
+  hasPreviousPage: Scalars['Boolean']
+  startCursor?: Maybe<Scalars['String']>
+  endCursor?: Maybe<Scalars['String']>
+}
+
+export type ProcessorState = {
+  lastCompleteBlock: Scalars['Float']
+  lastProcessedEvent: Scalars['String']
+  indexerHead: Scalars['Float']
+  chainHead: Scalars['Float']
+}
+
+export type Query = {
+  distributionBuckets: Array<DistributionBucket>
+  distributionBucketByUniqueInput?: Maybe<DistributionBucket>
+  distributionBucketsConnection: DistributionBucketConnection
+  storageBags: Array<StorageBag>
+  storageBagByUniqueInput?: Maybe<StorageBag>
+  storageBagsConnection: StorageBagConnection
+  storageBuckets: Array<StorageBucket>
+  storageBucketByUniqueInput?: Maybe<StorageBucket>
+  storageBucketsConnection: StorageBucketConnection
+  storageDataObjects: Array<StorageDataObject>
+  storageDataObjectByUniqueInput?: Maybe<StorageDataObject>
+  storageDataObjectsConnection: StorageDataObjectConnection
+  storageSystemParameters: Array<StorageSystemParameters>
+  storageSystemParametersByUniqueInput?: Maybe<StorageSystemParameters>
+  storageSystemParametersConnection: StorageSystemParametersConnection
+}
+
+export type QueryDistributionBucketsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<DistributionBucketWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketOrderByInput>>
+}
+
+export type QueryDistributionBucketByUniqueInputArgs = {
+  where: DistributionBucketWhereUniqueInput
+}
+
+export type QueryDistributionBucketsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<DistributionBucketWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketOrderByInput>>
+}
+
+export type QueryStorageBagsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<StorageBagWhereInput>
+  orderBy?: Maybe<Array<StorageBagOrderByInput>>
+}
+
+export type QueryStorageBagByUniqueInputArgs = {
+  where: StorageBagWhereUniqueInput
+}
+
+export type QueryStorageBagsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<StorageBagWhereInput>
+  orderBy?: Maybe<Array<StorageBagOrderByInput>>
+}
+
+export type QueryStorageBucketsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<StorageBucketWhereInput>
+  orderBy?: Maybe<Array<StorageBucketOrderByInput>>
+}
+
+export type QueryStorageBucketByUniqueInputArgs = {
+  where: StorageBucketWhereUniqueInput
+}
+
+export type QueryStorageBucketsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<StorageBucketWhereInput>
+  orderBy?: Maybe<Array<StorageBucketOrderByInput>>
+}
+
+export type QueryStorageDataObjectsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<StorageDataObjectWhereInput>
+  orderBy?: Maybe<Array<StorageDataObjectOrderByInput>>
+}
+
+export type QueryStorageDataObjectByUniqueInputArgs = {
+  where: StorageDataObjectWhereUniqueInput
+}
+
+export type QueryStorageDataObjectsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<StorageDataObjectWhereInput>
+  orderBy?: Maybe<Array<StorageDataObjectOrderByInput>>
+}
+
+export type QueryStorageSystemParametersArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<StorageSystemParametersWhereInput>
+  orderBy?: Maybe<Array<StorageSystemParametersOrderByInput>>
+}
+
+export type QueryStorageSystemParametersByUniqueInputArgs = {
+  where: StorageSystemParametersWhereUniqueInput
+}
+
+export type QueryStorageSystemParametersConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<StorageSystemParametersWhereInput>
+  orderBy?: Maybe<Array<StorageSystemParametersOrderByInput>>
+}
+
+export type StandardDeleteResponse = {
+  id: Scalars['ID']
+}
+
+export type StorageBag = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Last time the bag contents (data objects) was updated */
+  contentsUpdatedAt?: Maybe<Scalars['DateTime']>
+  objects: Array<StorageDataObject>
+  storedBy: Array<StorageBucket>
+  distributedBy: Array<DistributionBucket>
+  /** Owner of the storage bag */
+  owner: StorageBagOwner
+}
+
+export type StorageBagConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<StorageBagEdge>
+  pageInfo: PageInfo
+}
+
+export type StorageBagCreateInput = {
+  contentsUpdatedAt?: Maybe<Scalars['DateTime']>
+  owner: Scalars['JSONObject']
+}
+
+export type StorageBagEdge = {
+  node: StorageBag
+  cursor: Scalars['String']
+}
+
+export enum StorageBagOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  ContentsUpdatedAtAsc = 'contentsUpdatedAt_ASC',
+  ContentsUpdatedAtDesc = 'contentsUpdatedAt_DESC',
+}
+
+export type StorageBagOwner =
+  | StorageBagOwnerCouncil
+  | StorageBagOwnerWorkingGroup
+  | StorageBagOwnerMember
+  | StorageBagOwnerChannel
+  | StorageBagOwnerDao
+
+export type StorageBagOwnerChannel = {
+  channelId?: Maybe<Scalars['Int']>
+}
+
+export type StorageBagOwnerChannelCreateInput = {
+  channelId?: Maybe<Scalars['Float']>
+}
+
+export type StorageBagOwnerChannelUpdateInput = {
+  channelId?: Maybe<Scalars['Float']>
+}
+
+export type StorageBagOwnerChannelWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  channelId_eq?: Maybe<Scalars['Int']>
+  channelId_gt?: Maybe<Scalars['Int']>
+  channelId_gte?: Maybe<Scalars['Int']>
+  channelId_lt?: Maybe<Scalars['Int']>
+  channelId_lte?: Maybe<Scalars['Int']>
+  channelId_in?: Maybe<Array<Scalars['Int']>>
+  AND?: Maybe<Array<StorageBagOwnerChannelWhereInput>>
+  OR?: Maybe<Array<StorageBagOwnerChannelWhereInput>>
+}
+
+export type StorageBagOwnerChannelWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageBagOwnerCouncil = {
+  phantom?: Maybe<Scalars['Int']>
+}
+
+export type StorageBagOwnerCouncilCreateInput = {
+  phantom?: Maybe<Scalars['Float']>
+}
+
+export type StorageBagOwnerCouncilUpdateInput = {
+  phantom?: Maybe<Scalars['Float']>
+}
+
+export type StorageBagOwnerCouncilWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  phantom_eq?: Maybe<Scalars['Int']>
+  phantom_gt?: Maybe<Scalars['Int']>
+  phantom_gte?: Maybe<Scalars['Int']>
+  phantom_lt?: Maybe<Scalars['Int']>
+  phantom_lte?: Maybe<Scalars['Int']>
+  phantom_in?: Maybe<Array<Scalars['Int']>>
+  AND?: Maybe<Array<StorageBagOwnerCouncilWhereInput>>
+  OR?: Maybe<Array<StorageBagOwnerCouncilWhereInput>>
+}
+
+export type StorageBagOwnerCouncilWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageBagOwnerDao = {
+  daoId?: Maybe<Scalars['Int']>
+}
+
+export type StorageBagOwnerDaoCreateInput = {
+  daoId?: Maybe<Scalars['Float']>
+}
+
+export type StorageBagOwnerDaoUpdateInput = {
+  daoId?: Maybe<Scalars['Float']>
+}
+
+export type StorageBagOwnerDaoWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  daoId_eq?: Maybe<Scalars['Int']>
+  daoId_gt?: Maybe<Scalars['Int']>
+  daoId_gte?: Maybe<Scalars['Int']>
+  daoId_lt?: Maybe<Scalars['Int']>
+  daoId_lte?: Maybe<Scalars['Int']>
+  daoId_in?: Maybe<Array<Scalars['Int']>>
+  AND?: Maybe<Array<StorageBagOwnerDaoWhereInput>>
+  OR?: Maybe<Array<StorageBagOwnerDaoWhereInput>>
+}
+
+export type StorageBagOwnerDaoWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageBagOwnerMember = {
+  memberId?: Maybe<Scalars['Int']>
+}
+
+export type StorageBagOwnerMemberCreateInput = {
+  memberId?: Maybe<Scalars['Float']>
+}
+
+export type StorageBagOwnerMemberUpdateInput = {
+  memberId?: Maybe<Scalars['Float']>
+}
+
+export type StorageBagOwnerMemberWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  memberId_eq?: Maybe<Scalars['Int']>
+  memberId_gt?: Maybe<Scalars['Int']>
+  memberId_gte?: Maybe<Scalars['Int']>
+  memberId_lt?: Maybe<Scalars['Int']>
+  memberId_lte?: Maybe<Scalars['Int']>
+  memberId_in?: Maybe<Array<Scalars['Int']>>
+  AND?: Maybe<Array<StorageBagOwnerMemberWhereInput>>
+  OR?: Maybe<Array<StorageBagOwnerMemberWhereInput>>
+}
+
+export type StorageBagOwnerMemberWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageBagOwnerWorkingGroup = {
+  workingGroupId?: Maybe<Scalars['String']>
+}
+
+export type StorageBagOwnerWorkingGroupCreateInput = {
+  workingGroupId?: Maybe<Scalars['String']>
+}
+
+export type StorageBagOwnerWorkingGroupUpdateInput = {
+  workingGroupId?: Maybe<Scalars['String']>
+}
+
+export type StorageBagOwnerWorkingGroupWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  workingGroupId_eq?: Maybe<Scalars['String']>
+  workingGroupId_contains?: Maybe<Scalars['String']>
+  workingGroupId_startsWith?: Maybe<Scalars['String']>
+  workingGroupId_endsWith?: Maybe<Scalars['String']>
+  workingGroupId_in?: Maybe<Array<Scalars['String']>>
+  AND?: Maybe<Array<StorageBagOwnerWorkingGroupWhereInput>>
+  OR?: Maybe<Array<StorageBagOwnerWorkingGroupWhereInput>>
+}
+
+export type StorageBagOwnerWorkingGroupWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageBagUpdateInput = {
+  contentsUpdatedAt?: Maybe<Scalars['DateTime']>
+  owner?: Maybe<Scalars['JSONObject']>
+}
+
+export type StorageBagWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  contentsUpdatedAt_eq?: Maybe<Scalars['DateTime']>
+  contentsUpdatedAt_lt?: Maybe<Scalars['DateTime']>
+  contentsUpdatedAt_lte?: Maybe<Scalars['DateTime']>
+  contentsUpdatedAt_gt?: Maybe<Scalars['DateTime']>
+  contentsUpdatedAt_gte?: Maybe<Scalars['DateTime']>
+  owner_json?: Maybe<Scalars['JSONObject']>
+  objects_none?: Maybe<StorageDataObjectWhereInput>
+  objects_some?: Maybe<StorageDataObjectWhereInput>
+  objects_every?: Maybe<StorageDataObjectWhereInput>
+  storedBy_none?: Maybe<StorageBucketWhereInput>
+  storedBy_some?: Maybe<StorageBucketWhereInput>
+  storedBy_every?: Maybe<StorageBucketWhereInput>
+  distributedBy_none?: Maybe<DistributionBucketWhereInput>
+  distributedBy_some?: Maybe<DistributionBucketWhereInput>
+  distributedBy_every?: Maybe<DistributionBucketWhereInput>
+  AND?: Maybe<Array<StorageBagWhereInput>>
+  OR?: Maybe<Array<StorageBagWhereInput>>
+}
+
+export type StorageBagWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageBucket = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Current bucket operator status */
+  operatorStatus: StorageBucketOperatorStatus
+  /** Storage bucket operator metadata */
+  operatorMetadata?: Maybe<Scalars['Bytes']>
+  /** Whether the bucket is accepting any new storage bags */
+  acceptingNewBags: Scalars['Boolean']
+  storedBags: Array<StorageBag>
+  /** Bucket's data object size limit in bytes */
+  dataObjectsSizeLimit: Scalars['BigInt']
+  /** Bucket's data object count limit */
+  dataObjectCountLimit: Scalars['BigInt']
+}
+
+export type StorageBucketConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<StorageBucketEdge>
+  pageInfo: PageInfo
+}
+
+export type StorageBucketCreateInput = {
+  operatorStatus: Scalars['JSONObject']
+  operatorMetadata?: Maybe<Scalars['Bytes']>
+  acceptingNewBags: Scalars['Boolean']
+  dataObjectsSizeLimit: Scalars['BigInt']
+  dataObjectCountLimit: Scalars['BigInt']
+}
+
+export type StorageBucketEdge = {
+  node: StorageBucket
+  cursor: Scalars['String']
+}
+
+export type StorageBucketOperatorStatus =
+  | StorageBucketOperatorStatusMissing
+  | StorageBucketOperatorStatusInvited
+  | StorageBucketOperatorStatusActive
+
+export type StorageBucketOperatorStatusActive = {
+  workerId: Scalars['Int']
+}
+
+export type StorageBucketOperatorStatusActiveCreateInput = {
+  workerId: Scalars['Float']
+}
+
+export type StorageBucketOperatorStatusActiveUpdateInput = {
+  workerId?: Maybe<Scalars['Float']>
+}
+
+export type StorageBucketOperatorStatusActiveWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  workerId_eq?: Maybe<Scalars['Int']>
+  workerId_gt?: Maybe<Scalars['Int']>
+  workerId_gte?: Maybe<Scalars['Int']>
+  workerId_lt?: Maybe<Scalars['Int']>
+  workerId_lte?: Maybe<Scalars['Int']>
+  workerId_in?: Maybe<Array<Scalars['Int']>>
+  AND?: Maybe<Array<StorageBucketOperatorStatusActiveWhereInput>>
+  OR?: Maybe<Array<StorageBucketOperatorStatusActiveWhereInput>>
+}
+
+export type StorageBucketOperatorStatusActiveWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageBucketOperatorStatusInvited = {
+  workerId: Scalars['Int']
+}
+
+export type StorageBucketOperatorStatusInvitedCreateInput = {
+  workerId: Scalars['Float']
+}
+
+export type StorageBucketOperatorStatusInvitedUpdateInput = {
+  workerId?: Maybe<Scalars['Float']>
+}
+
+export type StorageBucketOperatorStatusInvitedWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  workerId_eq?: Maybe<Scalars['Int']>
+  workerId_gt?: Maybe<Scalars['Int']>
+  workerId_gte?: Maybe<Scalars['Int']>
+  workerId_lt?: Maybe<Scalars['Int']>
+  workerId_lte?: Maybe<Scalars['Int']>
+  workerId_in?: Maybe<Array<Scalars['Int']>>
+  AND?: Maybe<Array<StorageBucketOperatorStatusInvitedWhereInput>>
+  OR?: Maybe<Array<StorageBucketOperatorStatusInvitedWhereInput>>
+}
+
+export type StorageBucketOperatorStatusInvitedWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageBucketOperatorStatusMissing = {
+  phantom?: Maybe<Scalars['Int']>
+}
+
+export type StorageBucketOperatorStatusMissingCreateInput = {
+  phantom?: Maybe<Scalars['Float']>
+}
+
+export type StorageBucketOperatorStatusMissingUpdateInput = {
+  phantom?: Maybe<Scalars['Float']>
+}
+
+export type StorageBucketOperatorStatusMissingWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  phantom_eq?: Maybe<Scalars['Int']>
+  phantom_gt?: Maybe<Scalars['Int']>
+  phantom_gte?: Maybe<Scalars['Int']>
+  phantom_lt?: Maybe<Scalars['Int']>
+  phantom_lte?: Maybe<Scalars['Int']>
+  phantom_in?: Maybe<Array<Scalars['Int']>>
+  AND?: Maybe<Array<StorageBucketOperatorStatusMissingWhereInput>>
+  OR?: Maybe<Array<StorageBucketOperatorStatusMissingWhereInput>>
+}
+
+export type StorageBucketOperatorStatusMissingWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export enum StorageBucketOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  OperatorMetadataAsc = 'operatorMetadata_ASC',
+  OperatorMetadataDesc = 'operatorMetadata_DESC',
+  AcceptingNewBagsAsc = 'acceptingNewBags_ASC',
+  AcceptingNewBagsDesc = 'acceptingNewBags_DESC',
+  DataObjectsSizeLimitAsc = 'dataObjectsSizeLimit_ASC',
+  DataObjectsSizeLimitDesc = 'dataObjectsSizeLimit_DESC',
+  DataObjectCountLimitAsc = 'dataObjectCountLimit_ASC',
+  DataObjectCountLimitDesc = 'dataObjectCountLimit_DESC',
+}
+
+export type StorageBucketUpdateInput = {
+  operatorStatus?: Maybe<Scalars['JSONObject']>
+  operatorMetadata?: Maybe<Scalars['Bytes']>
+  acceptingNewBags?: Maybe<Scalars['Boolean']>
+  dataObjectsSizeLimit?: Maybe<Scalars['BigInt']>
+  dataObjectCountLimit?: Maybe<Scalars['BigInt']>
+}
+
+export type StorageBucketWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  operatorStatus_json?: Maybe<Scalars['JSONObject']>
+  operatorMetadata_eq?: Maybe<Scalars['Bytes']>
+  operatorMetadata_in?: Maybe<Array<Scalars['Bytes']>>
+  acceptingNewBags_eq?: Maybe<Scalars['Boolean']>
+  acceptingNewBags_in?: Maybe<Array<Scalars['Boolean']>>
+  dataObjectsSizeLimit_eq?: Maybe<Scalars['BigInt']>
+  dataObjectsSizeLimit_gt?: Maybe<Scalars['BigInt']>
+  dataObjectsSizeLimit_gte?: Maybe<Scalars['BigInt']>
+  dataObjectsSizeLimit_lt?: Maybe<Scalars['BigInt']>
+  dataObjectsSizeLimit_lte?: Maybe<Scalars['BigInt']>
+  dataObjectsSizeLimit_in?: Maybe<Array<Scalars['BigInt']>>
+  dataObjectCountLimit_eq?: Maybe<Scalars['BigInt']>
+  dataObjectCountLimit_gt?: Maybe<Scalars['BigInt']>
+  dataObjectCountLimit_gte?: Maybe<Scalars['BigInt']>
+  dataObjectCountLimit_lt?: Maybe<Scalars['BigInt']>
+  dataObjectCountLimit_lte?: Maybe<Scalars['BigInt']>
+  dataObjectCountLimit_in?: Maybe<Array<Scalars['BigInt']>>
+  storedBags_none?: Maybe<StorageBagWhereInput>
+  storedBags_some?: Maybe<StorageBagWhereInput>
+  storedBags_every?: Maybe<StorageBagWhereInput>
+  AND?: Maybe<Array<StorageBucketWhereInput>>
+  OR?: Maybe<Array<StorageBucketWhereInput>>
+}
+
+export type StorageBucketWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageDataObject = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Whether the data object was uploaded and accepted by the storage provider */
+  isAccepted: Scalars['Boolean']
+  /** Data object size in bytes */
+  size: Scalars['BigInt']
+  storageBag: StorageBag
+  storageBagId: Scalars['String']
+  /** IPFS content hash */
+  ipfsHash: Scalars['String']
+  /** Public key used to authenticate the uploader by the storage provider */
+  authenticationKey?: Maybe<Scalars['String']>
+}
+
+export type StorageDataObjectConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<StorageDataObjectEdge>
+  pageInfo: PageInfo
+}
+
+export type StorageDataObjectCreateInput = {
+  isAccepted: Scalars['Boolean']
+  size: Scalars['BigInt']
+  storageBag: Scalars['ID']
+  ipfsHash: Scalars['String']
+  authenticationKey?: Maybe<Scalars['String']>
+}
+
+export type StorageDataObjectEdge = {
+  node: StorageDataObject
+  cursor: Scalars['String']
+}
+
+export enum StorageDataObjectOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  IsAcceptedAsc = 'isAccepted_ASC',
+  IsAcceptedDesc = 'isAccepted_DESC',
+  SizeAsc = 'size_ASC',
+  SizeDesc = 'size_DESC',
+  StorageBagAsc = 'storageBag_ASC',
+  StorageBagDesc = 'storageBag_DESC',
+  IpfsHashAsc = 'ipfsHash_ASC',
+  IpfsHashDesc = 'ipfsHash_DESC',
+  AuthenticationKeyAsc = 'authenticationKey_ASC',
+  AuthenticationKeyDesc = 'authenticationKey_DESC',
+}
+
+export type StorageDataObjectUpdateInput = {
+  isAccepted?: Maybe<Scalars['Boolean']>
+  size?: Maybe<Scalars['BigInt']>
+  storageBag?: Maybe<Scalars['ID']>
+  ipfsHash?: Maybe<Scalars['String']>
+  authenticationKey?: Maybe<Scalars['String']>
+}
+
+export type StorageDataObjectWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  isAccepted_eq?: Maybe<Scalars['Boolean']>
+  isAccepted_in?: Maybe<Array<Scalars['Boolean']>>
+  size_eq?: Maybe<Scalars['BigInt']>
+  size_gt?: Maybe<Scalars['BigInt']>
+  size_gte?: Maybe<Scalars['BigInt']>
+  size_lt?: Maybe<Scalars['BigInt']>
+  size_lte?: Maybe<Scalars['BigInt']>
+  size_in?: Maybe<Array<Scalars['BigInt']>>
+  storageBag_eq?: Maybe<Scalars['ID']>
+  storageBag_in?: Maybe<Array<Scalars['ID']>>
+  ipfsHash_eq?: Maybe<Scalars['String']>
+  ipfsHash_contains?: Maybe<Scalars['String']>
+  ipfsHash_startsWith?: Maybe<Scalars['String']>
+  ipfsHash_endsWith?: Maybe<Scalars['String']>
+  ipfsHash_in?: Maybe<Array<Scalars['String']>>
+  authenticationKey_eq?: Maybe<Scalars['String']>
+  authenticationKey_contains?: Maybe<Scalars['String']>
+  authenticationKey_startsWith?: Maybe<Scalars['String']>
+  authenticationKey_endsWith?: Maybe<Scalars['String']>
+  authenticationKey_in?: Maybe<Array<Scalars['String']>>
+  storageBag?: Maybe<StorageBagWhereInput>
+  AND?: Maybe<Array<StorageDataObjectWhereInput>>
+  OR?: Maybe<Array<StorageDataObjectWhereInput>>
+}
+
+export type StorageDataObjectWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+/** Global storage system parameters */
+export type StorageSystemParameters = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Blacklisted content hashes */
+  blacklist: Array<Scalars['String']>
+}
+
+export type StorageSystemParametersConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<StorageSystemParametersEdge>
+  pageInfo: PageInfo
+}
+
+export type StorageSystemParametersCreateInput = {
+  blacklist: Array<Scalars['String']>
+}
+
+export type StorageSystemParametersEdge = {
+  node: StorageSystemParameters
+  cursor: Scalars['String']
+}
+
+export enum StorageSystemParametersOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+}
+
+export type StorageSystemParametersUpdateInput = {
+  blacklist?: Maybe<Array<Scalars['String']>>
+}
+
+export type StorageSystemParametersWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  AND?: Maybe<Array<StorageSystemParametersWhereInput>>
+  OR?: Maybe<Array<StorageSystemParametersWhereInput>>
+}
+
+export type StorageSystemParametersWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type Subscription = {
+  stateSubscription: ProcessorState
+}

+ 1037 - 0
distributor-node/src/services/networking/query-node/mock.graphql

@@ -0,0 +1,1037 @@
+interface BaseGraphQLObject {
+  id: ID!
+  createdAt: DateTime!
+  createdById: String!
+  updatedAt: DateTime
+  updatedById: String
+  deletedAt: DateTime
+  deletedById: String
+  version: Int!
+}
+
+type BaseModel implements BaseGraphQLObject {
+  id: ID!
+  createdAt: DateTime!
+  createdById: String!
+  updatedAt: DateTime
+  updatedById: String
+  deletedAt: DateTime
+  deletedById: String
+  version: Int!
+}
+
+type BaseModelUUID implements BaseGraphQLObject {
+  id: ID!
+  createdAt: DateTime!
+  createdById: String!
+  updatedAt: DateTime
+  updatedById: String
+  deletedAt: DateTime
+  deletedById: String
+  version: Int!
+}
+
+input BaseWhereInput {
+  id_eq: String
+  id_in: [String!]
+  createdAt_eq: String
+  createdAt_lt: String
+  createdAt_lte: String
+  createdAt_gt: String
+  createdAt_gte: String
+  createdById_eq: String
+  updatedAt_eq: String
+  updatedAt_lt: String
+  updatedAt_lte: String
+  updatedAt_gt: String
+  updatedAt_gte: String
+  updatedById_eq: String
+  deletedAt_all: Boolean
+  deletedAt_eq: String
+  deletedAt_lt: String
+  deletedAt_lte: String
+  deletedAt_gt: String
+  deletedAt_gte: String
+  deletedById_eq: String
+}
+
+"""GraphQL representation of BigInt"""
+scalar BigInt
+
+"""GraphQL representation of Bytes"""
+scalar Bytes
+
+"""
+The javascript `Date` as string. Type represents date and time as the ISO Date string.
+"""
+scalar DateTime
+
+interface DeleteResponse {
+  id: ID!
+}
+
+type DistributionBucket implements BaseGraphQLObject {
+  id: ID!
+  createdAt: DateTime!
+  createdById: String!
+  updatedAt: DateTime
+  updatedById: String
+  deletedAt: DateTime
+  deletedById: String
+  version: Int!
+  distributedBags: [StorageBag!]!
+
+  """Distribution bucket operator metadata"""
+  operatorMetadata: Bytes
+}
+
+type DistributionBucketConnection {
+  totalCount: Int!
+  edges: [DistributionBucketEdge!]!
+  pageInfo: PageInfo!
+}
+
+input DistributionBucketCreateInput {
+  operatorMetadata: Bytes
+}
+
+type DistributionBucketEdge {
+  node: DistributionBucket!
+  cursor: String!
+}
+
+enum DistributionBucketOrderByInput {
+  createdAt_ASC
+  createdAt_DESC
+  updatedAt_ASC
+  updatedAt_DESC
+  deletedAt_ASC
+  deletedAt_DESC
+  operatorMetadata_ASC
+  operatorMetadata_DESC
+}
+
+input DistributionBucketUpdateInput {
+  operatorMetadata: Bytes
+}
+
+input DistributionBucketWhereInput {
+  id_eq: ID
+  id_in: [ID!]
+  createdAt_eq: DateTime
+  createdAt_lt: DateTime
+  createdAt_lte: DateTime
+  createdAt_gt: DateTime
+  createdAt_gte: DateTime
+  createdById_eq: ID
+  createdById_in: [ID!]
+  updatedAt_eq: DateTime
+  updatedAt_lt: DateTime
+  updatedAt_lte: DateTime
+  updatedAt_gt: DateTime
+  updatedAt_gte: DateTime
+  updatedById_eq: ID
+  updatedById_in: [ID!]
+  deletedAt_all: Boolean
+  deletedAt_eq: DateTime
+  deletedAt_lt: DateTime
+  deletedAt_lte: DateTime
+  deletedAt_gt: DateTime
+  deletedAt_gte: DateTime
+  deletedById_eq: ID
+  deletedById_in: [ID!]
+  operatorMetadata_eq: Bytes
+  operatorMetadata_in: [Bytes!]
+  distributedBags_none: StorageBagWhereInput
+  distributedBags_some: StorageBagWhereInput
+  distributedBags_every: StorageBagWhereInput
+  AND: [DistributionBucketWhereInput!]
+  OR: [DistributionBucketWhereInput!]
+}
+
+input DistributionBucketWhereUniqueInput {
+  id: ID!
+}
+
+"""
+The `JSONObject` scalar type represents JSON objects as specified by [ECMA-404](http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf).
+"""
+scalar JSONObject
+
+type PageInfo {
+  hasNextPage: Boolean!
+  hasPreviousPage: Boolean!
+  startCursor: String
+  endCursor: String
+}
+
+type ProcessorState {
+  lastCompleteBlock: Float!
+  lastProcessedEvent: String!
+  indexerHead: Float!
+  chainHead: Float!
+}
+
+type Query {
+  distributionBuckets(offset: Int, limit: Int = 50, where: DistributionBucketWhereInput, orderBy: [DistributionBucketOrderByInput!]): [DistributionBucket!]!
+  distributionBucketByUniqueInput(where: DistributionBucketWhereUniqueInput!): DistributionBucket
+  distributionBucketsConnection(first: Int, after: String, last: Int, before: String, where: DistributionBucketWhereInput, orderBy: [DistributionBucketOrderByInput!]): DistributionBucketConnection!
+  storageBags(offset: Int, limit: Int = 50, where: StorageBagWhereInput, orderBy: [StorageBagOrderByInput!]): [StorageBag!]!
+  storageBagByUniqueInput(where: StorageBagWhereUniqueInput!): StorageBag
+  storageBagsConnection(first: Int, after: String, last: Int, before: String, where: StorageBagWhereInput, orderBy: [StorageBagOrderByInput!]): StorageBagConnection!
+  storageBuckets(offset: Int, limit: Int = 50, where: StorageBucketWhereInput, orderBy: [StorageBucketOrderByInput!]): [StorageBucket!]!
+  storageBucketByUniqueInput(where: StorageBucketWhereUniqueInput!): StorageBucket
+  storageBucketsConnection(first: Int, after: String, last: Int, before: String, where: StorageBucketWhereInput, orderBy: [StorageBucketOrderByInput!]): StorageBucketConnection!
+  storageDataObjects(offset: Int, limit: Int = 50, where: StorageDataObjectWhereInput, orderBy: [StorageDataObjectOrderByInput!]): [StorageDataObject!]!
+  storageDataObjectByUniqueInput(where: StorageDataObjectWhereUniqueInput!): StorageDataObject
+  storageDataObjectsConnection(first: Int, after: String, last: Int, before: String, where: StorageDataObjectWhereInput, orderBy: [StorageDataObjectOrderByInput!]): StorageDataObjectConnection!
+  storageSystemParameters(offset: Int, limit: Int = 50, where: StorageSystemParametersWhereInput, orderBy: [StorageSystemParametersOrderByInput!]): [StorageSystemParameters!]!
+  storageSystemParametersByUniqueInput(where: StorageSystemParametersWhereUniqueInput!): StorageSystemParameters
+  storageSystemParametersConnection(first: Int, after: String, last: Int, before: String, where: StorageSystemParametersWhereInput, orderBy: [StorageSystemParametersOrderByInput!]): StorageSystemParametersConnection!
+}
+
+type StandardDeleteResponse {
+  id: ID!
+}
+
+type StorageBag implements BaseGraphQLObject {
+  id: ID!
+  createdAt: DateTime!
+  createdById: String!
+  updatedAt: DateTime
+  updatedById: String
+  deletedAt: DateTime
+  deletedById: String
+  version: Int!
+
+  """Last time the bag contents (data objects) was updated"""
+  contentsUpdatedAt: DateTime
+  objects: [StorageDataObject!]!
+  storedBy: [StorageBucket!]!
+  distributedBy: [DistributionBucket!]!
+
+  """Owner of the storage bag"""
+  owner: StorageBagOwner!
+}
+
+type StorageBagConnection {
+  totalCount: Int!
+  edges: [StorageBagEdge!]!
+  pageInfo: PageInfo!
+}
+
+input StorageBagCreateInput {
+  contentsUpdatedAt: DateTime
+  owner: JSONObject!
+}
+
+type StorageBagEdge {
+  node: StorageBag!
+  cursor: String!
+}
+
+enum StorageBagOrderByInput {
+  createdAt_ASC
+  createdAt_DESC
+  updatedAt_ASC
+  updatedAt_DESC
+  deletedAt_ASC
+  deletedAt_DESC
+  contentsUpdatedAt_ASC
+  contentsUpdatedAt_DESC
+}
+
+union StorageBagOwner = StorageBagOwnerCouncil | StorageBagOwnerWorkingGroup | StorageBagOwnerMember | StorageBagOwnerChannel | StorageBagOwnerDAO
+
+type StorageBagOwnerChannel {
+  channelId: Int
+}
+
+input StorageBagOwnerChannelCreateInput {
+  channelId: Float
+}
+
+input StorageBagOwnerChannelUpdateInput {
+  channelId: Float
+}
+
+input StorageBagOwnerChannelWhereInput {
+  id_eq: ID
+  id_in: [ID!]
+  createdAt_eq: DateTime
+  createdAt_lt: DateTime
+  createdAt_lte: DateTime
+  createdAt_gt: DateTime
+  createdAt_gte: DateTime
+  createdById_eq: ID
+  createdById_in: [ID!]
+  updatedAt_eq: DateTime
+  updatedAt_lt: DateTime
+  updatedAt_lte: DateTime
+  updatedAt_gt: DateTime
+  updatedAt_gte: DateTime
+  updatedById_eq: ID
+  updatedById_in: [ID!]
+  deletedAt_all: Boolean
+  deletedAt_eq: DateTime
+  deletedAt_lt: DateTime
+  deletedAt_lte: DateTime
+  deletedAt_gt: DateTime
+  deletedAt_gte: DateTime
+  deletedById_eq: ID
+  deletedById_in: [ID!]
+  channelId_eq: Int
+  channelId_gt: Int
+  channelId_gte: Int
+  channelId_lt: Int
+  channelId_lte: Int
+  channelId_in: [Int!]
+  AND: [StorageBagOwnerChannelWhereInput!]
+  OR: [StorageBagOwnerChannelWhereInput!]
+}
+
+input StorageBagOwnerChannelWhereUniqueInput {
+  id: ID!
+}
+
+type StorageBagOwnerCouncil {
+  phantom: Int
+}
+
+input StorageBagOwnerCouncilCreateInput {
+  phantom: Float
+}
+
+input StorageBagOwnerCouncilUpdateInput {
+  phantom: Float
+}
+
+input StorageBagOwnerCouncilWhereInput {
+  id_eq: ID
+  id_in: [ID!]
+  createdAt_eq: DateTime
+  createdAt_lt: DateTime
+  createdAt_lte: DateTime
+  createdAt_gt: DateTime
+  createdAt_gte: DateTime
+  createdById_eq: ID
+  createdById_in: [ID!]
+  updatedAt_eq: DateTime
+  updatedAt_lt: DateTime
+  updatedAt_lte: DateTime
+  updatedAt_gt: DateTime
+  updatedAt_gte: DateTime
+  updatedById_eq: ID
+  updatedById_in: [ID!]
+  deletedAt_all: Boolean
+  deletedAt_eq: DateTime
+  deletedAt_lt: DateTime
+  deletedAt_lte: DateTime
+  deletedAt_gt: DateTime
+  deletedAt_gte: DateTime
+  deletedById_eq: ID
+  deletedById_in: [ID!]
+  phantom_eq: Int
+  phantom_gt: Int
+  phantom_gte: Int
+  phantom_lt: Int
+  phantom_lte: Int
+  phantom_in: [Int!]
+  AND: [StorageBagOwnerCouncilWhereInput!]
+  OR: [StorageBagOwnerCouncilWhereInput!]
+}
+
+input StorageBagOwnerCouncilWhereUniqueInput {
+  id: ID!
+}
+
+type StorageBagOwnerDAO {
+  daoId: Int
+}
+
+input StorageBagOwnerDAOCreateInput {
+  daoId: Float
+}
+
+input StorageBagOwnerDAOUpdateInput {
+  daoId: Float
+}
+
+input StorageBagOwnerDAOWhereInput {
+  id_eq: ID
+  id_in: [ID!]
+  createdAt_eq: DateTime
+  createdAt_lt: DateTime
+  createdAt_lte: DateTime
+  createdAt_gt: DateTime
+  createdAt_gte: DateTime
+  createdById_eq: ID
+  createdById_in: [ID!]
+  updatedAt_eq: DateTime
+  updatedAt_lt: DateTime
+  updatedAt_lte: DateTime
+  updatedAt_gt: DateTime
+  updatedAt_gte: DateTime
+  updatedById_eq: ID
+  updatedById_in: [ID!]
+  deletedAt_all: Boolean
+  deletedAt_eq: DateTime
+  deletedAt_lt: DateTime
+  deletedAt_lte: DateTime
+  deletedAt_gt: DateTime
+  deletedAt_gte: DateTime
+  deletedById_eq: ID
+  deletedById_in: [ID!]
+  daoId_eq: Int
+  daoId_gt: Int
+  daoId_gte: Int
+  daoId_lt: Int
+  daoId_lte: Int
+  daoId_in: [Int!]
+  AND: [StorageBagOwnerDAOWhereInput!]
+  OR: [StorageBagOwnerDAOWhereInput!]
+}
+
+input StorageBagOwnerDAOWhereUniqueInput {
+  id: ID!
+}
+
+type StorageBagOwnerMember {
+  memberId: Int
+}
+
+input StorageBagOwnerMemberCreateInput {
+  memberId: Float
+}
+
+input StorageBagOwnerMemberUpdateInput {
+  memberId: Float
+}
+
+input StorageBagOwnerMemberWhereInput {
+  id_eq: ID
+  id_in: [ID!]
+  createdAt_eq: DateTime
+  createdAt_lt: DateTime
+  createdAt_lte: DateTime
+  createdAt_gt: DateTime
+  createdAt_gte: DateTime
+  createdById_eq: ID
+  createdById_in: [ID!]
+  updatedAt_eq: DateTime
+  updatedAt_lt: DateTime
+  updatedAt_lte: DateTime
+  updatedAt_gt: DateTime
+  updatedAt_gte: DateTime
+  updatedById_eq: ID
+  updatedById_in: [ID!]
+  deletedAt_all: Boolean
+  deletedAt_eq: DateTime
+  deletedAt_lt: DateTime
+  deletedAt_lte: DateTime
+  deletedAt_gt: DateTime
+  deletedAt_gte: DateTime
+  deletedById_eq: ID
+  deletedById_in: [ID!]
+  memberId_eq: Int
+  memberId_gt: Int
+  memberId_gte: Int
+  memberId_lt: Int
+  memberId_lte: Int
+  memberId_in: [Int!]
+  AND: [StorageBagOwnerMemberWhereInput!]
+  OR: [StorageBagOwnerMemberWhereInput!]
+}
+
+input StorageBagOwnerMemberWhereUniqueInput {
+  id: ID!
+}
+
+type StorageBagOwnerWorkingGroup {
+  workingGroupId: String
+}
+
+input StorageBagOwnerWorkingGroupCreateInput {
+  workingGroupId: String
+}
+
+input StorageBagOwnerWorkingGroupUpdateInput {
+  workingGroupId: String
+}
+
+input StorageBagOwnerWorkingGroupWhereInput {
+  id_eq: ID
+  id_in: [ID!]
+  createdAt_eq: DateTime
+  createdAt_lt: DateTime
+  createdAt_lte: DateTime
+  createdAt_gt: DateTime
+  createdAt_gte: DateTime
+  createdById_eq: ID
+  createdById_in: [ID!]
+  updatedAt_eq: DateTime
+  updatedAt_lt: DateTime
+  updatedAt_lte: DateTime
+  updatedAt_gt: DateTime
+  updatedAt_gte: DateTime
+  updatedById_eq: ID
+  updatedById_in: [ID!]
+  deletedAt_all: Boolean
+  deletedAt_eq: DateTime
+  deletedAt_lt: DateTime
+  deletedAt_lte: DateTime
+  deletedAt_gt: DateTime
+  deletedAt_gte: DateTime
+  deletedById_eq: ID
+  deletedById_in: [ID!]
+  workingGroupId_eq: String
+  workingGroupId_contains: String
+  workingGroupId_startsWith: String
+  workingGroupId_endsWith: String
+  workingGroupId_in: [String!]
+  AND: [StorageBagOwnerWorkingGroupWhereInput!]
+  OR: [StorageBagOwnerWorkingGroupWhereInput!]
+}
+
+input StorageBagOwnerWorkingGroupWhereUniqueInput {
+  id: ID!
+}
+
+input StorageBagUpdateInput {
+  contentsUpdatedAt: DateTime
+  owner: JSONObject
+}
+
+input StorageBagWhereInput {
+  id_eq: ID
+  id_in: [ID!]
+  createdAt_eq: DateTime
+  createdAt_lt: DateTime
+  createdAt_lte: DateTime
+  createdAt_gt: DateTime
+  createdAt_gte: DateTime
+  createdById_eq: ID
+  createdById_in: [ID!]
+  updatedAt_eq: DateTime
+  updatedAt_lt: DateTime
+  updatedAt_lte: DateTime
+  updatedAt_gt: DateTime
+  updatedAt_gte: DateTime
+  updatedById_eq: ID
+  updatedById_in: [ID!]
+  deletedAt_all: Boolean
+  deletedAt_eq: DateTime
+  deletedAt_lt: DateTime
+  deletedAt_lte: DateTime
+  deletedAt_gt: DateTime
+  deletedAt_gte: DateTime
+  deletedById_eq: ID
+  deletedById_in: [ID!]
+  contentsUpdatedAt_eq: DateTime
+  contentsUpdatedAt_lt: DateTime
+  contentsUpdatedAt_lte: DateTime
+  contentsUpdatedAt_gt: DateTime
+  contentsUpdatedAt_gte: DateTime
+  owner_json: JSONObject
+  objects_none: StorageDataObjectWhereInput
+  objects_some: StorageDataObjectWhereInput
+  objects_every: StorageDataObjectWhereInput
+  storedBy_none: StorageBucketWhereInput
+  storedBy_some: StorageBucketWhereInput
+  storedBy_every: StorageBucketWhereInput
+  distributedBy_none: DistributionBucketWhereInput
+  distributedBy_some: DistributionBucketWhereInput
+  distributedBy_every: DistributionBucketWhereInput
+  AND: [StorageBagWhereInput!]
+  OR: [StorageBagWhereInput!]
+}
+
+input StorageBagWhereUniqueInput {
+  id: ID!
+}
+
+type StorageBucket implements BaseGraphQLObject {
+  id: ID!
+  createdAt: DateTime!
+  createdById: String!
+  updatedAt: DateTime
+  updatedById: String
+  deletedAt: DateTime
+  deletedById: String
+  version: Int!
+
+  """Current bucket operator status"""
+  operatorStatus: StorageBucketOperatorStatus!
+
+  """Storage bucket operator metadata"""
+  operatorMetadata: Bytes
+
+  """Whether the bucket is accepting any new storage bags"""
+  acceptingNewBags: Boolean!
+  storedBags: [StorageBag!]!
+
+  """Bucket's data object size limit in bytes"""
+  dataObjectsSizeLimit: BigInt!
+
+  """Bucket's data object count limit"""
+  dataObjectCountLimit: BigInt!
+}
+
+type StorageBucketConnection {
+  totalCount: Int!
+  edges: [StorageBucketEdge!]!
+  pageInfo: PageInfo!
+}
+
+input StorageBucketCreateInput {
+  operatorStatus: JSONObject!
+  operatorMetadata: Bytes
+  acceptingNewBags: Boolean!
+  dataObjectsSizeLimit: BigInt!
+  dataObjectCountLimit: BigInt!
+}
+
+type StorageBucketEdge {
+  node: StorageBucket!
+  cursor: String!
+}
+
+union StorageBucketOperatorStatus = StorageBucketOperatorStatusMissing | StorageBucketOperatorStatusInvited | StorageBucketOperatorStatusActive
+
+type StorageBucketOperatorStatusActive {
+  workerId: Int!
+}
+
+input StorageBucketOperatorStatusActiveCreateInput {
+  workerId: Float!
+}
+
+input StorageBucketOperatorStatusActiveUpdateInput {
+  workerId: Float
+}
+
+input StorageBucketOperatorStatusActiveWhereInput {
+  id_eq: ID
+  id_in: [ID!]
+  createdAt_eq: DateTime
+  createdAt_lt: DateTime
+  createdAt_lte: DateTime
+  createdAt_gt: DateTime
+  createdAt_gte: DateTime
+  createdById_eq: ID
+  createdById_in: [ID!]
+  updatedAt_eq: DateTime
+  updatedAt_lt: DateTime
+  updatedAt_lte: DateTime
+  updatedAt_gt: DateTime
+  updatedAt_gte: DateTime
+  updatedById_eq: ID
+  updatedById_in: [ID!]
+  deletedAt_all: Boolean
+  deletedAt_eq: DateTime
+  deletedAt_lt: DateTime
+  deletedAt_lte: DateTime
+  deletedAt_gt: DateTime
+  deletedAt_gte: DateTime
+  deletedById_eq: ID
+  deletedById_in: [ID!]
+  workerId_eq: Int
+  workerId_gt: Int
+  workerId_gte: Int
+  workerId_lt: Int
+  workerId_lte: Int
+  workerId_in: [Int!]
+  AND: [StorageBucketOperatorStatusActiveWhereInput!]
+  OR: [StorageBucketOperatorStatusActiveWhereInput!]
+}
+
+input StorageBucketOperatorStatusActiveWhereUniqueInput {
+  id: ID!
+}
+
+type StorageBucketOperatorStatusInvited {
+  workerId: Int!
+}
+
+input StorageBucketOperatorStatusInvitedCreateInput {
+  workerId: Float!
+}
+
+input StorageBucketOperatorStatusInvitedUpdateInput {
+  workerId: Float
+}
+
+input StorageBucketOperatorStatusInvitedWhereInput {
+  id_eq: ID
+  id_in: [ID!]
+  createdAt_eq: DateTime
+  createdAt_lt: DateTime
+  createdAt_lte: DateTime
+  createdAt_gt: DateTime
+  createdAt_gte: DateTime
+  createdById_eq: ID
+  createdById_in: [ID!]
+  updatedAt_eq: DateTime
+  updatedAt_lt: DateTime
+  updatedAt_lte: DateTime
+  updatedAt_gt: DateTime
+  updatedAt_gte: DateTime
+  updatedById_eq: ID
+  updatedById_in: [ID!]
+  deletedAt_all: Boolean
+  deletedAt_eq: DateTime
+  deletedAt_lt: DateTime
+  deletedAt_lte: DateTime
+  deletedAt_gt: DateTime
+  deletedAt_gte: DateTime
+  deletedById_eq: ID
+  deletedById_in: [ID!]
+  workerId_eq: Int
+  workerId_gt: Int
+  workerId_gte: Int
+  workerId_lt: Int
+  workerId_lte: Int
+  workerId_in: [Int!]
+  AND: [StorageBucketOperatorStatusInvitedWhereInput!]
+  OR: [StorageBucketOperatorStatusInvitedWhereInput!]
+}
+
+input StorageBucketOperatorStatusInvitedWhereUniqueInput {
+  id: ID!
+}
+
+type StorageBucketOperatorStatusMissing {
+  phantom: Int
+}
+
+input StorageBucketOperatorStatusMissingCreateInput {
+  phantom: Float
+}
+
+input StorageBucketOperatorStatusMissingUpdateInput {
+  phantom: Float
+}
+
+input StorageBucketOperatorStatusMissingWhereInput {
+  id_eq: ID
+  id_in: [ID!]
+  createdAt_eq: DateTime
+  createdAt_lt: DateTime
+  createdAt_lte: DateTime
+  createdAt_gt: DateTime
+  createdAt_gte: DateTime
+  createdById_eq: ID
+  createdById_in: [ID!]
+  updatedAt_eq: DateTime
+  updatedAt_lt: DateTime
+  updatedAt_lte: DateTime
+  updatedAt_gt: DateTime
+  updatedAt_gte: DateTime
+  updatedById_eq: ID
+  updatedById_in: [ID!]
+  deletedAt_all: Boolean
+  deletedAt_eq: DateTime
+  deletedAt_lt: DateTime
+  deletedAt_lte: DateTime
+  deletedAt_gt: DateTime
+  deletedAt_gte: DateTime
+  deletedById_eq: ID
+  deletedById_in: [ID!]
+  phantom_eq: Int
+  phantom_gt: Int
+  phantom_gte: Int
+  phantom_lt: Int
+  phantom_lte: Int
+  phantom_in: [Int!]
+  AND: [StorageBucketOperatorStatusMissingWhereInput!]
+  OR: [StorageBucketOperatorStatusMissingWhereInput!]
+}
+
+input StorageBucketOperatorStatusMissingWhereUniqueInput {
+  id: ID!
+}
+
+enum StorageBucketOrderByInput {
+  createdAt_ASC
+  createdAt_DESC
+  updatedAt_ASC
+  updatedAt_DESC
+  deletedAt_ASC
+  deletedAt_DESC
+  operatorMetadata_ASC
+  operatorMetadata_DESC
+  acceptingNewBags_ASC
+  acceptingNewBags_DESC
+  dataObjectsSizeLimit_ASC
+  dataObjectsSizeLimit_DESC
+  dataObjectCountLimit_ASC
+  dataObjectCountLimit_DESC
+}
+
+input StorageBucketUpdateInput {
+  operatorStatus: JSONObject
+  operatorMetadata: Bytes
+  acceptingNewBags: Boolean
+  dataObjectsSizeLimit: BigInt
+  dataObjectCountLimit: BigInt
+}
+
+input StorageBucketWhereInput {
+  id_eq: ID
+  id_in: [ID!]
+  createdAt_eq: DateTime
+  createdAt_lt: DateTime
+  createdAt_lte: DateTime
+  createdAt_gt: DateTime
+  createdAt_gte: DateTime
+  createdById_eq: ID
+  createdById_in: [ID!]
+  updatedAt_eq: DateTime
+  updatedAt_lt: DateTime
+  updatedAt_lte: DateTime
+  updatedAt_gt: DateTime
+  updatedAt_gte: DateTime
+  updatedById_eq: ID
+  updatedById_in: [ID!]
+  deletedAt_all: Boolean
+  deletedAt_eq: DateTime
+  deletedAt_lt: DateTime
+  deletedAt_lte: DateTime
+  deletedAt_gt: DateTime
+  deletedAt_gte: DateTime
+  deletedById_eq: ID
+  deletedById_in: [ID!]
+  operatorStatus_json: JSONObject
+  operatorMetadata_eq: Bytes
+  operatorMetadata_in: [Bytes!]
+  acceptingNewBags_eq: Boolean
+  acceptingNewBags_in: [Boolean!]
+  dataObjectsSizeLimit_eq: BigInt
+  dataObjectsSizeLimit_gt: BigInt
+  dataObjectsSizeLimit_gte: BigInt
+  dataObjectsSizeLimit_lt: BigInt
+  dataObjectsSizeLimit_lte: BigInt
+  dataObjectsSizeLimit_in: [BigInt!]
+  dataObjectCountLimit_eq: BigInt
+  dataObjectCountLimit_gt: BigInt
+  dataObjectCountLimit_gte: BigInt
+  dataObjectCountLimit_lt: BigInt
+  dataObjectCountLimit_lte: BigInt
+  dataObjectCountLimit_in: [BigInt!]
+  storedBags_none: StorageBagWhereInput
+  storedBags_some: StorageBagWhereInput
+  storedBags_every: StorageBagWhereInput
+  AND: [StorageBucketWhereInput!]
+  OR: [StorageBucketWhereInput!]
+}
+
+input StorageBucketWhereUniqueInput {
+  id: ID!
+}
+
+type StorageDataObject implements BaseGraphQLObject {
+  id: ID!
+  createdAt: DateTime!
+  createdById: String!
+  updatedAt: DateTime
+  updatedById: String
+  deletedAt: DateTime
+  deletedById: String
+  version: Int!
+
+  """
+  Whether the data object was uploaded and accepted by the storage provider
+  """
+  isAccepted: Boolean!
+
+  """Data object size in bytes"""
+  size: BigInt!
+  storageBag: StorageBag!
+  storageBagId: String!
+
+  """IPFS content hash"""
+  ipfsHash: String!
+
+  """Public key used to authenticate the uploader by the storage provider"""
+  authenticationKey: String
+}
+
+type StorageDataObjectConnection {
+  totalCount: Int!
+  edges: [StorageDataObjectEdge!]!
+  pageInfo: PageInfo!
+}
+
+input StorageDataObjectCreateInput {
+  isAccepted: Boolean!
+  size: BigInt!
+  storageBag: ID!
+  ipfsHash: String!
+  authenticationKey: String
+}
+
+type StorageDataObjectEdge {
+  node: StorageDataObject!
+  cursor: String!
+}
+
+enum StorageDataObjectOrderByInput {
+  createdAt_ASC
+  createdAt_DESC
+  updatedAt_ASC
+  updatedAt_DESC
+  deletedAt_ASC
+  deletedAt_DESC
+  isAccepted_ASC
+  isAccepted_DESC
+  size_ASC
+  size_DESC
+  storageBag_ASC
+  storageBag_DESC
+  ipfsHash_ASC
+  ipfsHash_DESC
+  authenticationKey_ASC
+  authenticationKey_DESC
+}
+
+input StorageDataObjectUpdateInput {
+  isAccepted: Boolean
+  size: BigInt
+  storageBag: ID
+  ipfsHash: String
+  authenticationKey: String
+}
+
+input StorageDataObjectWhereInput {
+  id_eq: ID
+  id_in: [ID!]
+  createdAt_eq: DateTime
+  createdAt_lt: DateTime
+  createdAt_lte: DateTime
+  createdAt_gt: DateTime
+  createdAt_gte: DateTime
+  createdById_eq: ID
+  createdById_in: [ID!]
+  updatedAt_eq: DateTime
+  updatedAt_lt: DateTime
+  updatedAt_lte: DateTime
+  updatedAt_gt: DateTime
+  updatedAt_gte: DateTime
+  updatedById_eq: ID
+  updatedById_in: [ID!]
+  deletedAt_all: Boolean
+  deletedAt_eq: DateTime
+  deletedAt_lt: DateTime
+  deletedAt_lte: DateTime
+  deletedAt_gt: DateTime
+  deletedAt_gte: DateTime
+  deletedById_eq: ID
+  deletedById_in: [ID!]
+  isAccepted_eq: Boolean
+  isAccepted_in: [Boolean!]
+  size_eq: BigInt
+  size_gt: BigInt
+  size_gte: BigInt
+  size_lt: BigInt
+  size_lte: BigInt
+  size_in: [BigInt!]
+  storageBag_eq: ID
+  storageBag_in: [ID!]
+  ipfsHash_eq: String
+  ipfsHash_contains: String
+  ipfsHash_startsWith: String
+  ipfsHash_endsWith: String
+  ipfsHash_in: [String!]
+  authenticationKey_eq: String
+  authenticationKey_contains: String
+  authenticationKey_startsWith: String
+  authenticationKey_endsWith: String
+  authenticationKey_in: [String!]
+  storageBag: StorageBagWhereInput
+  AND: [StorageDataObjectWhereInput!]
+  OR: [StorageDataObjectWhereInput!]
+}
+
+input StorageDataObjectWhereUniqueInput {
+  id: ID!
+}
+
+"""Global storage system parameters"""
+type StorageSystemParameters implements BaseGraphQLObject {
+  id: ID!
+  createdAt: DateTime!
+  createdById: String!
+  updatedAt: DateTime
+  updatedById: String
+  deletedAt: DateTime
+  deletedById: String
+  version: Int!
+
+  """Blacklisted content hashes"""
+  blacklist: [String!]!
+}
+
+type StorageSystemParametersConnection {
+  totalCount: Int!
+  edges: [StorageSystemParametersEdge!]!
+  pageInfo: PageInfo!
+}
+
+input StorageSystemParametersCreateInput {
+  blacklist: [String!]!
+}
+
+type StorageSystemParametersEdge {
+  node: StorageSystemParameters!
+  cursor: String!
+}
+
+enum StorageSystemParametersOrderByInput {
+  createdAt_ASC
+  createdAt_DESC
+  updatedAt_ASC
+  updatedAt_DESC
+  deletedAt_ASC
+  deletedAt_DESC
+}
+
+input StorageSystemParametersUpdateInput {
+  blacklist: [String!]
+}
+
+input StorageSystemParametersWhereInput {
+  id_eq: ID
+  id_in: [ID!]
+  createdAt_eq: DateTime
+  createdAt_lt: DateTime
+  createdAt_lte: DateTime
+  createdAt_gt: DateTime
+  createdAt_gte: DateTime
+  createdById_eq: ID
+  createdById_in: [ID!]
+  updatedAt_eq: DateTime
+  updatedAt_lt: DateTime
+  updatedAt_lte: DateTime
+  updatedAt_gt: DateTime
+  updatedAt_gte: DateTime
+  updatedById_eq: ID
+  updatedById_in: [ID!]
+  deletedAt_all: Boolean
+  deletedAt_eq: DateTime
+  deletedAt_lt: DateTime
+  deletedAt_lte: DateTime
+  deletedAt_gt: DateTime
+  deletedAt_gte: DateTime
+  deletedById_eq: ID
+  deletedById_in: [ID!]
+  AND: [StorageSystemParametersWhereInput!]
+  OR: [StorageSystemParametersWhereInput!]
+}
+
+input StorageSystemParametersWhereUniqueInput {
+  id: ID!
+}
+
+type Subscription {
+  stateSubscription: ProcessorState!
+}

+ 42 - 0
distributor-node/src/services/networking/query-node/queries/queries.graphql

@@ -0,0 +1,42 @@
+fragment DataObjectDetails on StorageDataObject {
+  id
+  size
+  ipfsHash
+  isAccepted
+  storageBag {
+    storedBy {
+      id
+      operatorMetadata
+      operatorStatus {
+        __typename
+      }
+    }
+    distributedBy {
+      id
+      operatorMetadata
+    }
+  }
+}
+
+query getDataObjectDetails($id: ID!) {
+  storageDataObjectByUniqueInput(where: { id: $id }) {
+    ...DataObjectDetails
+  }
+}
+
+fragment DistirubtionBucketsWithObjects on DistributionBucket {
+  id
+  distributedBags {
+    objects {
+      id
+      size
+      ipfsHash
+    }
+  }
+}
+
+query getDistributionBucketsWithObjects($ids: [ID!]) {
+  distributionBuckets(where: { id_in: $ids }) {
+    ...DistirubtionBucketsWithObjects
+  }
+}

+ 1 - 0
distributor-node/src/services/networking/runtime/api.ts

@@ -0,0 +1 @@
+export class RuntimeApi {}

+ 54 - 0
distributor-node/src/services/networking/storage-node/api.ts

@@ -0,0 +1,54 @@
+import { Configuration } from './generated'
+import { PublicApi } from './generated/api'
+import axios, { AxiosRequestConfig, AxiosResponse } from 'axios'
+import { LoggingService } from '../../logging'
+import { Logger } from 'winston'
+
+const AXIOS_TIMEOUT = 10000
+
+export class StorageNodeApi {
+  private logger: Logger
+  private publicApi: PublicApi
+  private endpoint: string
+
+  public constructor(endpoint: string, logging: LoggingService) {
+    const axiosConfig: AxiosRequestConfig = {
+      timeout: AXIOS_TIMEOUT,
+    }
+    const config = new Configuration({
+      basePath: endpoint,
+      baseOptions: axiosConfig,
+    })
+    this.publicApi = new PublicApi(config)
+    this.endpoint = new URL(endpoint).toString()
+    this.logger = logging.createLogger('StorageNodeApi')
+  }
+
+  public async isObjectAvailable(contentHash: string): Promise<boolean> {
+    const options: AxiosRequestConfig = {
+      headers: {
+        Range: 'bytes=0-0',
+      },
+    }
+    this.logger.info('Checking object availibility', { endpoint: this.endpoint, contentHash })
+    try {
+      await this.publicApi.publicApiFiles(contentHash, options)
+      this.logger.info('Data object available', { contentHash, endpoint: this.endpoint })
+      return true
+    } catch (err) {
+      if (axios.isAxiosError(err)) {
+        this.logger.info('Data object not available', { err })
+        return false
+      }
+      this.logger.error('Unexpected error while requesting data object', { err })
+      throw err
+    }
+  }
+
+  public async downloadObject(contentHash: string): Promise<AxiosResponse<NodeJS.ReadableStream>> {
+    const options: AxiosRequestConfig = {
+      responseType: 'stream',
+    }
+    return this.publicApi.publicApiFiles(contentHash, options)
+  }
+}

+ 27 - 0
distributor-node/src/services/networking/storage-node/generated/.openapi-generator-ignore

@@ -0,0 +1,27 @@
+# OpenAPI Generator Ignore
+# Generated by openapi-generator https://github.com/openapitools/openapi-generator
+
+# Use this file to prevent files from being overwritten by the generator.
+# The patterns follow closely to .gitignore or .dockerignore.
+
+# As an example, the C# client generator defines ApiClient.cs.
+# You can make changes and tell OpenAPI Generator to ignore just this file by uncommenting the following line:
+#ApiClient.cs
+
+# You can match any string of characters against a directory, file or extension with a single asterisk (*):
+#foo/*/qux
+# The above matches foo/bar/qux and foo/baz/qux, but not foo/bar/baz/qux
+
+# You can recursively match patterns against a directory, file or extension with a double asterisk (**):
+#foo/**/qux
+# This matches foo/bar/qux, foo/baz/qux, and foo/bar/baz/qux
+
+# You can also negate patterns with an exclamation (!).
+# For example, you can ignore all files in a docs folder with the file extension .md:
+#docs/*.md
+# Then explicitly reverse the ignore rule for a single file:
+#!docs/README.md
+
+git_push.sh
+.npmignore
+.gitignore

+ 5 - 0
distributor-node/src/services/networking/storage-node/generated/.openapi-generator/FILES

@@ -0,0 +1,5 @@
+api.ts
+base.ts
+common.ts
+configuration.ts
+index.ts

+ 1 - 0
distributor-node/src/services/networking/storage-node/generated/.openapi-generator/VERSION

@@ -0,0 +1 @@
+5.2.0

+ 390 - 0
distributor-node/src/services/networking/storage-node/generated/api.ts

@@ -0,0 +1,390 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Storage node API
+ * Storage node API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+
+import { Configuration } from './configuration';
+import globalAxios, { AxiosPromise, AxiosInstance } from 'axios';
+// Some imports not used depending on template conditions
+// @ts-ignore
+import { DUMMY_BASE_URL, assertParamExists, setApiKeyToObject, setBasicAuthToObject, setBearerAuthToObject, setOAuthToObject, setSearchParams, serializeDataIfNeeded, toPathString, createRequestFunction } from './common';
+// @ts-ignore
+import { BASE_PATH, COLLECTION_FORMATS, RequestArgs, BaseAPI, RequiredError } from './base';
+
+/**
+ * 
+ * @export
+ * @interface ErrorResponse
+ */
+export interface ErrorResponse {
+    /**
+     * 
+     * @type {string}
+     * @memberof ErrorResponse
+     */
+    type?: string;
+    /**
+     * 
+     * @type {string}
+     * @memberof ErrorResponse
+     */
+    message: string;
+}
+/**
+ * 
+ * @export
+ * @interface InlineResponse201
+ */
+export interface InlineResponse201 {
+    /**
+     * 
+     * @type {string}
+     * @memberof InlineResponse201
+     */
+    status?: string;
+}
+/**
+ * 
+ * @export
+ * @interface InlineResponse2011
+ */
+export interface InlineResponse2011 {
+    /**
+     * 
+     * @type {string}
+     * @memberof InlineResponse2011
+     */
+    token?: string;
+}
+/**
+ * 
+ * @export
+ * @interface TokenRequest
+ */
+export interface TokenRequest {
+    /**
+     * 
+     * @type {TokenRequestData}
+     * @memberof TokenRequest
+     */
+    data: TokenRequestData;
+    /**
+     * 
+     * @type {string}
+     * @memberof TokenRequest
+     */
+    signature: string;
+}
+/**
+ * 
+ * @export
+ * @interface TokenRequestData
+ */
+export interface TokenRequestData {
+    /**
+     * 
+     * @type {number}
+     * @memberof TokenRequestData
+     */
+    memberId: number;
+    /**
+     * 
+     * @type {string}
+     * @memberof TokenRequestData
+     */
+    accountId: string;
+    /**
+     * 
+     * @type {number}
+     * @memberof TokenRequestData
+     */
+    dataObjectId: number;
+    /**
+     * 
+     * @type {number}
+     * @memberof TokenRequestData
+     */
+    storageBucketId: number;
+    /**
+     * 
+     * @type {string}
+     * @memberof TokenRequestData
+     */
+    bagId: string;
+}
+
+/**
+ * PublicApi - axios parameter creator
+ * @export
+ */
+export const PublicApiAxiosParamCreator = function (configuration?: Configuration) {
+    return {
+        /**
+         * Get auth token from a server.
+         * @param {TokenRequest} [tokenRequest] Token request parameters,
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        publicApiAuthToken: async (tokenRequest?: TokenRequest, options: any = {}): Promise<RequestArgs> => {
+            const localVarPath = `/authToken`;
+            // use dummy base URL string because the URL constructor only accepts absolute URLs.
+            const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL);
+            let baseOptions;
+            if (configuration) {
+                baseOptions = configuration.baseOptions;
+            }
+
+            const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options};
+            const localVarHeaderParameter = {} as any;
+            const localVarQueryParameter = {} as any;
+
+
+    
+            localVarHeaderParameter['Content-Type'] = 'application/json';
+
+            setSearchParams(localVarUrlObj, localVarQueryParameter, options.query);
+            let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
+            localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
+            localVarRequestOptions.data = serializeDataIfNeeded(tokenRequest, localVarRequestOptions, configuration)
+
+            return {
+                url: toPathString(localVarUrlObj),
+                options: localVarRequestOptions,
+            };
+        },
+        /**
+         * Returns a media file.
+         * @param {string} cid Content ID
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        publicApiFiles: async (cid: string, options: any = {}): Promise<RequestArgs> => {
+            // verify required parameter 'cid' is not null or undefined
+            assertParamExists('publicApiFiles', 'cid', cid)
+            const localVarPath = `/files/{cid}`
+                .replace(`{${"cid"}}`, encodeURIComponent(String(cid)));
+            // use dummy base URL string because the URL constructor only accepts absolute URLs.
+            const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL);
+            let baseOptions;
+            if (configuration) {
+                baseOptions = configuration.baseOptions;
+            }
+
+            const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options};
+            const localVarHeaderParameter = {} as any;
+            const localVarQueryParameter = {} as any;
+
+
+    
+            setSearchParams(localVarUrlObj, localVarQueryParameter, options.query);
+            let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
+            localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
+
+            return {
+                url: toPathString(localVarUrlObj),
+                options: localVarRequestOptions,
+            };
+        },
+        /**
+         * Upload data
+         * @param {string} dataObjectId Data object runtime ID
+         * @param {string} storageBucketId Storage bucket ID
+         * @param {string} bagId Bag ID
+         * @param {any} [file] Data file
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        publicApiUpload: async (dataObjectId: string, storageBucketId: string, bagId: string, file?: any, options: any = {}): Promise<RequestArgs> => {
+            // verify required parameter 'dataObjectId' is not null or undefined
+            assertParamExists('publicApiUpload', 'dataObjectId', dataObjectId)
+            // verify required parameter 'storageBucketId' is not null or undefined
+            assertParamExists('publicApiUpload', 'storageBucketId', storageBucketId)
+            // verify required parameter 'bagId' is not null or undefined
+            assertParamExists('publicApiUpload', 'bagId', bagId)
+            const localVarPath = `/upload`;
+            // use dummy base URL string because the URL constructor only accepts absolute URLs.
+            const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL);
+            let baseOptions;
+            if (configuration) {
+                baseOptions = configuration.baseOptions;
+            }
+
+            const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options};
+            const localVarHeaderParameter = {} as any;
+            const localVarQueryParameter = {} as any;
+            const localVarFormParams = new ((configuration && configuration.formDataCtor) || FormData)();
+
+            // authentication UploadAuth required
+            await setApiKeyToObject(localVarHeaderParameter, "x-api-key", configuration)
+
+
+            if (file !== undefined) { 
+                localVarFormParams.append('file', file as any);
+            }
+    
+            if (dataObjectId !== undefined) { 
+                localVarFormParams.append('dataObjectId', dataObjectId as any);
+            }
+    
+            if (storageBucketId !== undefined) { 
+                localVarFormParams.append('storageBucketId', storageBucketId as any);
+            }
+    
+            if (bagId !== undefined) { 
+                localVarFormParams.append('bagId', bagId as any);
+            }
+    
+    
+            localVarHeaderParameter['Content-Type'] = 'multipart/form-data';
+    
+            setSearchParams(localVarUrlObj, localVarQueryParameter, options.query);
+            let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
+            localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
+            localVarRequestOptions.data = localVarFormParams;
+
+            return {
+                url: toPathString(localVarUrlObj),
+                options: localVarRequestOptions,
+            };
+        },
+    }
+};
+
+/**
+ * PublicApi - functional programming interface
+ * @export
+ */
+export const PublicApiFp = function(configuration?: Configuration) {
+    const localVarAxiosParamCreator = PublicApiAxiosParamCreator(configuration)
+    return {
+        /**
+         * Get auth token from a server.
+         * @param {TokenRequest} [tokenRequest] Token request parameters,
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        async publicApiAuthToken(tokenRequest?: TokenRequest, options?: any): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<InlineResponse2011>> {
+            const localVarAxiosArgs = await localVarAxiosParamCreator.publicApiAuthToken(tokenRequest, options);
+            return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration);
+        },
+        /**
+         * Returns a media file.
+         * @param {string} cid Content ID
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        async publicApiFiles(cid: string, options?: any): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<any>> {
+            const localVarAxiosArgs = await localVarAxiosParamCreator.publicApiFiles(cid, options);
+            return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration);
+        },
+        /**
+         * Upload data
+         * @param {string} dataObjectId Data object runtime ID
+         * @param {string} storageBucketId Storage bucket ID
+         * @param {string} bagId Bag ID
+         * @param {any} [file] Data file
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        async publicApiUpload(dataObjectId: string, storageBucketId: string, bagId: string, file?: any, options?: any): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<InlineResponse201>> {
+            const localVarAxiosArgs = await localVarAxiosParamCreator.publicApiUpload(dataObjectId, storageBucketId, bagId, file, options);
+            return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration);
+        },
+    }
+};
+
+/**
+ * PublicApi - factory interface
+ * @export
+ */
+export const PublicApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) {
+    const localVarFp = PublicApiFp(configuration)
+    return {
+        /**
+         * Get auth token from a server.
+         * @param {TokenRequest} [tokenRequest] Token request parameters,
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        publicApiAuthToken(tokenRequest?: TokenRequest, options?: any): AxiosPromise<InlineResponse2011> {
+            return localVarFp.publicApiAuthToken(tokenRequest, options).then((request) => request(axios, basePath));
+        },
+        /**
+         * Returns a media file.
+         * @param {string} cid Content ID
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        publicApiFiles(cid: string, options?: any): AxiosPromise<any> {
+            return localVarFp.publicApiFiles(cid, options).then((request) => request(axios, basePath));
+        },
+        /**
+         * Upload data
+         * @param {string} dataObjectId Data object runtime ID
+         * @param {string} storageBucketId Storage bucket ID
+         * @param {string} bagId Bag ID
+         * @param {any} [file] Data file
+         * @param {*} [options] Override http request option.
+         * @throws {RequiredError}
+         */
+        publicApiUpload(dataObjectId: string, storageBucketId: string, bagId: string, file?: any, options?: any): AxiosPromise<InlineResponse201> {
+            return localVarFp.publicApiUpload(dataObjectId, storageBucketId, bagId, file, options).then((request) => request(axios, basePath));
+        },
+    };
+};
+
+/**
+ * PublicApi - object-oriented interface
+ * @export
+ * @class PublicApi
+ * @extends {BaseAPI}
+ */
+export class PublicApi extends BaseAPI {
+    /**
+     * Get auth token from a server.
+     * @param {TokenRequest} [tokenRequest] Token request parameters,
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     * @memberof PublicApi
+     */
+    public publicApiAuthToken(tokenRequest?: TokenRequest, options?: any) {
+        return PublicApiFp(this.configuration).publicApiAuthToken(tokenRequest, options).then((request) => request(this.axios, this.basePath));
+    }
+
+    /**
+     * Returns a media file.
+     * @param {string} cid Content ID
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     * @memberof PublicApi
+     */
+    public publicApiFiles(cid: string, options?: any) {
+        return PublicApiFp(this.configuration).publicApiFiles(cid, options).then((request) => request(this.axios, this.basePath));
+    }
+
+    /**
+     * Upload data
+     * @param {string} dataObjectId Data object runtime ID
+     * @param {string} storageBucketId Storage bucket ID
+     * @param {string} bagId Bag ID
+     * @param {any} [file] Data file
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     * @memberof PublicApi
+     */
+    public publicApiUpload(dataObjectId: string, storageBucketId: string, bagId: string, file?: any, options?: any) {
+        return PublicApiFp(this.configuration).publicApiUpload(dataObjectId, storageBucketId, bagId, file, options).then((request) => request(this.axios, this.basePath));
+    }
+}
+
+

+ 71 - 0
distributor-node/src/services/networking/storage-node/generated/base.ts

@@ -0,0 +1,71 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Storage node API
+ * Storage node API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+
+import { Configuration } from "./configuration";
+// Some imports not used depending on template conditions
+// @ts-ignore
+import globalAxios, { AxiosPromise, AxiosInstance } from 'axios';
+
+export const BASE_PATH = "http://localhost:3333/api/v1".replace(/\/+$/, "");
+
+/**
+ *
+ * @export
+ */
+export const COLLECTION_FORMATS = {
+    csv: ",",
+    ssv: " ",
+    tsv: "\t",
+    pipes: "|",
+};
+
+/**
+ *
+ * @export
+ * @interface RequestArgs
+ */
+export interface RequestArgs {
+    url: string;
+    options: any;
+}
+
+/**
+ *
+ * @export
+ * @class BaseAPI
+ */
+export class BaseAPI {
+    protected configuration: Configuration | undefined;
+
+    constructor(configuration?: Configuration, protected basePath: string = BASE_PATH, protected axios: AxiosInstance = globalAxios) {
+        if (configuration) {
+            this.configuration = configuration;
+            this.basePath = configuration.basePath || this.basePath;
+        }
+    }
+};
+
+/**
+ *
+ * @export
+ * @class RequiredError
+ * @extends {Error}
+ */
+export class RequiredError extends Error {
+    name: "RequiredError" = "RequiredError";
+    constructor(public field: string, msg?: string) {
+        super(msg);
+    }
+}

+ 138 - 0
distributor-node/src/services/networking/storage-node/generated/common.ts

@@ -0,0 +1,138 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Storage node API
+ * Storage node API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+
+import { Configuration } from "./configuration";
+import { RequiredError, RequestArgs } from "./base";
+import { AxiosInstance } from 'axios';
+
+/**
+ *
+ * @export
+ */
+export const DUMMY_BASE_URL = 'https://example.com'
+
+/**
+ *
+ * @throws {RequiredError}
+ * @export
+ */
+export const assertParamExists = function (functionName: string, paramName: string, paramValue: unknown) {
+    if (paramValue === null || paramValue === undefined) {
+        throw new RequiredError(paramName, `Required parameter ${paramName} was null or undefined when calling ${functionName}.`);
+    }
+}
+
+/**
+ *
+ * @export
+ */
+export const setApiKeyToObject = async function (object: any, keyParamName: string, configuration?: Configuration) {
+    if (configuration && configuration.apiKey) {
+        const localVarApiKeyValue = typeof configuration.apiKey === 'function'
+            ? await configuration.apiKey(keyParamName)
+            : await configuration.apiKey;
+        object[keyParamName] = localVarApiKeyValue;
+    }
+}
+
+/**
+ *
+ * @export
+ */
+export const setBasicAuthToObject = function (object: any, configuration?: Configuration) {
+    if (configuration && (configuration.username || configuration.password)) {
+        object["auth"] = { username: configuration.username, password: configuration.password };
+    }
+}
+
+/**
+ *
+ * @export
+ */
+export const setBearerAuthToObject = async function (object: any, configuration?: Configuration) {
+    if (configuration && configuration.accessToken) {
+        const accessToken = typeof configuration.accessToken === 'function'
+            ? await configuration.accessToken()
+            : await configuration.accessToken;
+        object["Authorization"] = "Bearer " + accessToken;
+    }
+}
+
+/**
+ *
+ * @export
+ */
+export const setOAuthToObject = async function (object: any, name: string, scopes: string[], configuration?: Configuration) {
+    if (configuration && configuration.accessToken) {
+        const localVarAccessTokenValue = typeof configuration.accessToken === 'function'
+            ? await configuration.accessToken(name, scopes)
+            : await configuration.accessToken;
+        object["Authorization"] = "Bearer " + localVarAccessTokenValue;
+    }
+}
+
+/**
+ *
+ * @export
+ */
+export const setSearchParams = function (url: URL, ...objects: any[]) {
+    const searchParams = new URLSearchParams(url.search);
+    for (const object of objects) {
+        for (const key in object) {
+            if (Array.isArray(object[key])) {
+                searchParams.delete(key);
+                for (const item of object[key]) {
+                    searchParams.append(key, item);
+                }
+            } else {
+                searchParams.set(key, object[key]);
+            }
+        }
+    }
+    url.search = searchParams.toString();
+}
+
+/**
+ *
+ * @export
+ */
+export const serializeDataIfNeeded = function (value: any, requestOptions: any, configuration?: Configuration) {
+    const nonString = typeof value !== 'string';
+    const needsSerialization = nonString && configuration && configuration.isJsonMime
+        ? configuration.isJsonMime(requestOptions.headers['Content-Type'])
+        : nonString;
+    return needsSerialization
+        ? JSON.stringify(value !== undefined ? value : {})
+        : (value || "");
+}
+
+/**
+ *
+ * @export
+ */
+export const toPathString = function (url: URL) {
+    return url.pathname + url.search + url.hash
+}
+
+/**
+ *
+ * @export
+ */
+export const createRequestFunction = function (axiosArgs: RequestArgs, globalAxios: AxiosInstance, BASE_PATH: string, configuration?: Configuration) {
+    return (axios: AxiosInstance = globalAxios, basePath: string = BASE_PATH) => {
+        const axiosRequestArgs = {...axiosArgs.options, url: (configuration?.basePath || basePath) + axiosArgs.url};
+        return axios.request(axiosRequestArgs);
+    };
+}

+ 101 - 0
distributor-node/src/services/networking/storage-node/generated/configuration.ts

@@ -0,0 +1,101 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Storage node API
+ * Storage node API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+
+export interface ConfigurationParameters {
+    apiKey?: string | Promise<string> | ((name: string) => string) | ((name: string) => Promise<string>);
+    username?: string;
+    password?: string;
+    accessToken?: string | Promise<string> | ((name?: string, scopes?: string[]) => string) | ((name?: string, scopes?: string[]) => Promise<string>);
+    basePath?: string;
+    baseOptions?: any;
+    formDataCtor?: new () => any;
+}
+
+export class Configuration {
+    /**
+     * parameter for apiKey security
+     * @param name security name
+     * @memberof Configuration
+     */
+    apiKey?: string | Promise<string> | ((name: string) => string) | ((name: string) => Promise<string>);
+    /**
+     * parameter for basic security
+     *
+     * @type {string}
+     * @memberof Configuration
+     */
+    username?: string;
+    /**
+     * parameter for basic security
+     *
+     * @type {string}
+     * @memberof Configuration
+     */
+    password?: string;
+    /**
+     * parameter for oauth2 security
+     * @param name security name
+     * @param scopes oauth2 scope
+     * @memberof Configuration
+     */
+    accessToken?: string | Promise<string> | ((name?: string, scopes?: string[]) => string) | ((name?: string, scopes?: string[]) => Promise<string>);
+    /**
+     * override base path
+     *
+     * @type {string}
+     * @memberof Configuration
+     */
+    basePath?: string;
+    /**
+     * base options for axios calls
+     *
+     * @type {any}
+     * @memberof Configuration
+     */
+    baseOptions?: any;
+    /**
+     * The FormData constructor that will be used to create multipart form data
+     * requests. You can inject this here so that execution environments that
+     * do not support the FormData class can still run the generated client.
+     *
+     * @type {new () => FormData}
+     */
+    formDataCtor?: new () => any;
+
+    constructor(param: ConfigurationParameters = {}) {
+        this.apiKey = param.apiKey;
+        this.username = param.username;
+        this.password = param.password;
+        this.accessToken = param.accessToken;
+        this.basePath = param.basePath;
+        this.baseOptions = param.baseOptions;
+        this.formDataCtor = param.formDataCtor;
+    }
+
+    /**
+     * Check if the given MIME is a JSON MIME.
+     * JSON MIME examples:
+     *   application/json
+     *   application/json; charset=UTF8
+     *   APPLICATION/JSON
+     *   application/vnd.company+json
+     * @param mime - MIME (Multipurpose Internet Mail Extensions)
+     * @return True if the given MIME is JSON, false otherwise.
+     */
+    public isJsonMime(mime: string): boolean {
+        const jsonMime: RegExp = new RegExp('^(application\/json|[^;/ \t]+\/[^;/ \t]+[+]json)[ \t]*(;.*)?$', 'i');
+        return mime !== null && (jsonMime.test(mime) || mime.toLowerCase() === 'application/json-patch+json');
+    }
+}

+ 18 - 0
distributor-node/src/services/networking/storage-node/generated/index.ts

@@ -0,0 +1,18 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Storage node API
+ * Storage node API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+
+export * from "./api";
+export * from "./configuration";
+

+ 93 - 0
distributor-node/src/services/server/ServerService.ts

@@ -0,0 +1,93 @@
+import express from 'express'
+import path from 'path'
+import cors from 'cors'
+import * as OpenApiValidator from 'express-openapi-validator'
+import { HttpError } from 'express-openapi-validator/dist/framework/types'
+import { ReadonlyConfig } from '../../types/config'
+import expressWinston from 'express-winston'
+import { LoggingService } from '../../services/logging'
+import { PublicApiController } from './controllers/public'
+import { StateCacheService } from '../cache/StateCacheService'
+import { NetworkingService } from '../networking'
+import { Logger } from 'winston'
+import { ContentService } from '../content/ContentService'
+
+const OPENAPI_SPEC_PATH = path.join(__dirname, '../../api-spec/openapi.yml')
+
+export class ServerService {
+  private config: ReadonlyConfig
+  private logger: Logger
+  private expressApp: express.Application
+
+  public constructor(
+    config: ReadonlyConfig,
+    stateCache: StateCacheService,
+    content: ContentService,
+    logging: LoggingService,
+    networking: NetworkingService
+  ) {
+    this.logger = logging.createLogger('ExpressServer')
+    this.config = config
+
+    const publicController = new PublicApiController(logging, networking, stateCache, content)
+
+    const app = express()
+    app.use(cors())
+    app.use(express.json())
+
+    // Request logger
+    app.use(
+      expressWinston.logger({
+        winstonInstance: this.logger,
+      })
+    )
+
+    // Setup OpenAPiValidator
+    app.use(
+      OpenApiValidator.middleware({
+        apiSpec: OPENAPI_SPEC_PATH,
+        validateApiSpec: true,
+        validateResponses: true,
+        validateRequests: true,
+      })
+    )
+
+    // Routes
+    app.use('/api/v1/asset/:objectId', publicController.asset.bind(publicController))
+
+    // Error logger
+    app.use(
+      expressWinston.errorLogger({
+        winstonInstance: this.logger,
+      })
+    )
+
+    // Error handler
+    app.use((err: HttpError, req: express.Request, res: express.Response, next: express.NextFunction) => {
+      if (res.headersSent) {
+        return next(err)
+      }
+      if (err.status && err.status >= 400 && err.status < 500) {
+        res
+          .status(err.status)
+          .json({
+            type: 'request_validation',
+            message: err.message,
+            errors: err.errors,
+          })
+          .end()
+      } else {
+        next(err)
+      }
+    })
+
+    this.expressApp = app
+  }
+
+  public start(): void {
+    const { port } = this.config
+    this.expressApp.listen(port, () => {
+      this.logger.info(`Express server started listening on port ${port}`)
+    })
+  }
+}

+ 154 - 0
distributor-node/src/services/server/controllers/public.ts

@@ -0,0 +1,154 @@
+import * as express from 'express'
+import { Logger } from 'winston'
+import send from 'send'
+import { StateCacheService } from '../../../services/cache/StateCacheService'
+import { NetworkingService } from '../../../services/networking'
+import { ErrorResponse, RouteParams } from '../../../types/api'
+import { LoggingService } from '../../logging'
+import { ContentService, DEFAULT_CONTENT_TYPE } from '../../content/ContentService'
+
+export class PublicApiController {
+  private logger: Logger
+  private networking: NetworkingService
+  private stateCache: StateCacheService
+  private content: ContentService
+
+  public constructor(
+    logging: LoggingService,
+    networking: NetworkingService,
+    stateCache: StateCacheService,
+    content: ContentService
+  ) {
+    this.logger = logging.createLogger('PublicApiController')
+    this.networking = networking
+    this.stateCache = stateCache
+    this.content = content
+  }
+
+  private serveAvailableAsset(req: express.Request, res: express.Response, contentHash: string): void {
+    // TODO: FIXME: Actually check if we are still supposed to serve it and just remove after responding if not
+    this.stateCache.useContent(contentHash)
+
+    const path = this.content.path(contentHash)
+    const stream = send(req, path)
+    const mimeType = this.stateCache.getContentMimeType(contentHash)
+
+    stream.on('headers', (res) => {
+      res.setHeader('content-disposition', 'inline')
+      res.setHeader('content-type', mimeType || DEFAULT_CONTENT_TYPE)
+    })
+
+    stream.on('error', (err) => {
+      this.logger.error('SendStream error while trying to serve an asset', { err })
+      // General error
+      const statusCode = err.status || 500
+      const errorRes: ErrorResponse = {
+        type: 'sendstream_error',
+        message: err.toString(),
+      }
+
+      res.status(statusCode).json(errorRes)
+    })
+
+    stream.pipe(res)
+  }
+
+  private async servePendingDownloadAsset(req: express.Request, res: express.Response, contentHash: string) {
+    let closed = false
+    req.on('close', () => {
+      closed = true
+    })
+    const pendingDownload = this.stateCache.getPendingDownload(contentHash)
+    if (!pendingDownload) {
+      throw new Error('Trying to serve pending download asset that is not pending download!')
+    }
+    const { objectSize } = pendingDownload
+    const mimeType = this.stateCache.getContentMimeType(contentHash)
+    const requestedRanges = req.range(objectSize)
+    const range =
+      Array.isArray(requestedRanges) && requestedRanges.type === 'bytes' && requestedRanges.length === 1
+        ? requestedRanges[0]
+        : null
+    const start = range?.start || 0
+    const end = range?.end || objectSize
+
+    res.status(range ? 206 : 200)
+    res.setHeader('content-disposition', 'inline')
+    res.setHeader('content-type', mimeType || DEFAULT_CONTENT_TYPE)
+    res.setHeader('content-length', end - start + 1)
+    if (range) {
+      res.setHeader('content-range', `bytes ${start}-${end}/${objectSize}`)
+    }
+    const stream = this.content.createContinousReadStream(contentHash, { start, end })
+    let chunk = null
+    while ((chunk = await stream.readChunk()) !== null) {
+      if (closed) {
+        break
+      } else {
+        res.write(chunk)
+      }
+    }
+    res.end()
+  }
+
+  public async asset(req: express.Request<RouteParams<'public.asset'>>, res: express.Response): Promise<void> {
+    req.on('close', () => {
+      res.end()
+    })
+    // TODO: objectId validation
+    const objectId = req.params.objectId
+    const contentHash = this.stateCache.getObjectContentHash(objectId)
+    const pendingDownload = contentHash && this.stateCache.getPendingDownload(contentHash)
+
+    this.logger.verbose('Data object state', { contentHash, pendingDownload })
+
+    if (contentHash && !pendingDownload && this.content.exists(contentHash)) {
+      this.logger.info('Requested file found in filesystem', { path: this.content.path(contentHash) })
+      this.stateCache.useContent(contentHash)
+      this.serveAvailableAsset(req, res, contentHash)
+    } else if (contentHash && pendingDownload) {
+      this.logger.info('Requested file is in pending download state', { path: this.content.path(contentHash) })
+      this.servePendingDownloadAsset(req, res, contentHash)
+    } else {
+      this.logger.info('Requested file not found in filesystem')
+      const objectInfo = await this.networking.dataObjectInfo(objectId)
+      if (!objectInfo.exists) {
+        const errorRes: ErrorResponse = {
+          message: 'Data object does not exist',
+        }
+        res.status(404).json(errorRes)
+      } else if (!objectInfo.isSupported) {
+        const errorRes: ErrorResponse = {
+          message: 'Data object not served by this node',
+        }
+        res.status(400).json(errorRes)
+        // TODO: Redirect to other node that supports it?
+      } else {
+        const { data: objectData } = objectInfo
+        if (!objectData) {
+          throw new Error('Missing data object data')
+        }
+        const { contentHash } = objectData
+        const downloadResponse = await this.networking.downloadDataObject(objectData)
+        if (!downloadResponse) {
+          // Object should be already in pending download
+          this.servePendingDownloadAsset(req, res, contentHash)
+          return
+        }
+        const fileStream = this.content.createWriteStream(contentHash)
+        const { data, headers } = downloadResponse
+        fileStream.on('ready', () => {
+          // TODO: Determine mimeType by chunk processing if header not send?
+          const mimeType = headers['content-type'] || DEFAULT_CONTENT_TYPE
+          this.stateCache.setContentMimeType(contentHash, mimeType)
+          data.pipe(fileStream)
+          this.servePendingDownloadAsset(req, res, contentHash)
+        })
+        fileStream.on('finish', () => {
+          // TODO: Validate file?
+          this.stateCache.dropPendingDownload(contentHash)
+        })
+      }
+    }
+  }
+}

+ 3 - 0
distributor-node/src/types/api.ts

@@ -0,0 +1,3 @@
+import { components, operations } from './generated/OpenApi'
+export type RouteParams<Name extends keyof operations> = operations[Name]['parameters']['path']
+export type ErrorResponse = components['schemas']['ErrorResponse']

+ 1 - 0
distributor-node/src/types/common.ts

@@ -0,0 +1 @@
+export type DeepReadonly<T> = { readonly [K in keyof T]: DeepReadonly<T[K]> }

+ 5 - 0
distributor-node/src/types/config.ts

@@ -0,0 +1,5 @@
+import { ConfigJson } from './generated/ConfigJson'
+import { DeepReadonly } from './common'
+
+export type Config = ConfigJson
+export type ReadonlyConfig = DeepReadonly<Config>

+ 23 - 0
distributor-node/src/types/dataObject.ts

@@ -0,0 +1,23 @@
+export type DataObjectAccessPoints = {
+  storageNodes: {
+    bucketId: string
+    endpoint: string
+  }[]
+  distributorNodes: {
+    bucketId: string
+    endpoint: string
+  }[]
+}
+
+export type DataObjectData = {
+  objectId: string
+  size: number
+  contentHash: string
+  accessPoints?: DataObjectAccessPoints
+}
+
+export type DataObjectInfo = {
+  exists: boolean
+  isSupported: boolean
+  data?: DataObjectData
+}

+ 27 - 0
distributor-node/src/types/generated/ConfigJson.d.ts

@@ -0,0 +1,27 @@
+/* tslint:disable */
+/**
+ * This file was automatically generated by json-schema-to-typescript.
+ * DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file,
+ * and run json-schema-to-typescript to regenerate this file.
+ */
+
+export interface ConfigJson {
+  endpoints: {
+    queryNode: string
+    substrateNode: string
+  }
+  directories: {
+    data: string
+    cache: string
+    logs: string
+  }
+  log?: {
+    file?: 'error' | 'warn' | 'info' | 'http' | 'verbose' | 'debug' | 'silly'
+    console?: 'error' | 'warn' | 'info' | 'http' | 'verbose' | 'debug' | 'silly'
+    [k: string]: unknown
+  }
+  port: number
+  keys: [string, ...string[]]
+  buckets: [number, ...number[]]
+  [k: string]: unknown
+}

+ 66 - 0
distributor-node/src/types/generated/OpenApi.ts

@@ -0,0 +1,66 @@
+/**
+ * This file was auto-generated by openapi-typescript.
+ * Do not make direct changes to the file.
+ */
+
+export interface paths {
+  '/asset/{objectId}': {
+    /** Returns a media file. */
+    'get': operations['public.asset']
+  }
+}
+
+export interface components {
+  schemas: {
+    'ErrorResponse': {
+      'type'?: string
+      'message': string
+    }
+  }
+}
+
+export interface operations {
+  /** Returns a media file. */
+  'public.asset': {
+    parameters: {
+      path: {
+        /** Data Object ID */
+        'objectId': string
+      }
+    }
+    responses: {
+      /** Full available object data sent */
+      200: {
+        content: {
+          'image/*': string
+          'audio/*': string
+          'video/*': string
+        }
+      }
+      /** Requested partial object data sent */
+      206: {
+        content: {
+          'image/*': string
+          'audio/*': string
+          'video/*': string
+        }
+      }
+      /** Invalid request. Data object not supported. */
+      400: {
+        content: {
+          'application/json': components['schemas']['ErrorResponse']
+        }
+      }
+      /** Data object does not exist. */
+      404: {
+        content: {
+          'application/json': components['schemas']['ErrorResponse']
+        }
+      }
+      /** Unexpected server error */
+      500: unknown
+    }
+  }
+}
+
+export interface external {}

+ 2 - 0
distributor-node/src/types/index.ts

@@ -0,0 +1,2 @@
+export * from './config'
+export * from './common'

+ 11 - 0
distributor-node/src/validation/generateTypes.ts

@@ -0,0 +1,11 @@
+import fs from 'fs'
+import path from 'path'
+import { compile } from 'json-schema-to-typescript'
+import { configSchema } from './schemas'
+
+// eslint-disable-next-line @typescript-eslint/no-var-requires
+const prettierConfig = require('@joystream/prettier-config')
+
+compile(configSchema, 'ConfigJson', { style: prettierConfig }).then((output) =>
+  fs.writeFileSync(path.resolve(__dirname, '../types/generated/ConfigJson.d.ts'), output)
+)

+ 33 - 0
distributor-node/src/validation/schemas/configSchema.ts

@@ -0,0 +1,33 @@
+import { JSONSchema4 } from 'json-schema'
+import { strictObject } from './utils'
+import winston from 'winston'
+
+export const configSchema: JSONSchema4 = {
+  type: 'object',
+  required: ['endpoints', 'directories', 'buckets', 'keys', 'port'],
+  additionalProperties: false,
+  properties: {
+    endpoints: strictObject({
+      queryNode: { type: 'string' },
+      substrateNode: { type: 'string' },
+    }),
+    directories: strictObject({
+      data: { type: 'string' },
+      cache: { type: 'string' },
+      logs: { type: 'string' },
+    }),
+    log: {
+      type: 'object',
+      additionalProperties: false,
+      properties: {
+        file: { type: 'string', enum: Object.keys(winston.config.npm.levels) },
+        console: { type: 'string', enum: Object.keys(winston.config.npm.levels) },
+      },
+    },
+    port: { type: 'number' },
+    keys: { type: 'array', items: { type: 'string' }, minItems: 1 },
+    buckets: { type: 'array', items: { type: 'number' }, minItems: 1 },
+  },
+}
+
+export default configSchema

+ 1 - 0
distributor-node/src/validation/schemas/index.ts

@@ -0,0 +1 @@
+export { configSchema } from './configSchema'

+ 10 - 0
distributor-node/src/validation/schemas/utils.ts

@@ -0,0 +1,10 @@
+import { JSONSchema4 } from 'json-schema'
+
+export function strictObject(properties: Exclude<JSONSchema4['properties'], undefined>): JSONSchema4 {
+  return {
+    type: 'object',
+    additionalProperties: false,
+    required: Object.keys(properties),
+    properties,
+  }
+}

+ 17 - 0
distributor-node/test/commands/hello.test.ts

@@ -0,0 +1,17 @@
+import { expect, test } from '@oclif/test'
+
+describe('hello', () => {
+  test
+    .stdout()
+    .command(['hello'])
+    .it('runs hello', (ctx) => {
+      expect(ctx.stdout).to.contain('hello world')
+    })
+
+  test
+    .stdout()
+    .command(['hello', '--name', 'jeff'])
+    .it('runs hello --name jeff', (ctx) => {
+      expect(ctx.stdout).to.contain('hello jeff')
+    })
+})

+ 5 - 0
distributor-node/test/mocha.opts

@@ -0,0 +1,5 @@
+--require ts-node/register
+--watch-extensions ts
+--recursive
+--reporter spec
+--timeout 5000

+ 7 - 0
distributor-node/test/tsconfig.json

@@ -0,0 +1,7 @@
+{
+  "extends": "../tsconfig",
+  "compilerOptions": {
+    "noEmit": true
+  },
+  "references": [{ "path": ".." }]
+}

+ 28 - 0
distributor-node/tsconfig.json

@@ -0,0 +1,28 @@
+{
+  "compilerOptions": {
+    "declaration": true,
+    "importHelpers": true,
+    "module": "commonjs",
+    "outDir": "lib",
+    "rootDir": "src",
+    "strict": true,
+    "target": "es2017",
+    "skipLibCheck": true,
+    "baseUrl": ".",
+    "esModuleInterop": true,
+    "types" : [ "node", "mocha" ],
+    "resolveJsonModule": true,
+    "strictNullChecks": true,
+    "noUnusedLocals": false, // FIXME: Temporarly disabled during initial development
+    "noUnusedParameters": false, // FIXME: Temporarly disabled during initial development
+    "paths": {
+      "@polkadot/types/augment": ["../types/augment-codec/augment-types.ts"],
+      "@polkadot/api/augment": ["../types/augment-codec/augment-api.ts"]
+    },
+  },
+  "include": [
+    "src/**/*"
+  ]
+}
+
+

+ 1 - 0
package.json

@@ -18,6 +18,7 @@
     "types",
     "storage-node-v2",
     "storage-node",
+    "distributor-node",
     "storage-node/packages/*",
     "devops/eslint-config",
     "devops/prettier-config",

Fichier diff supprimé car celui-ci est trop grand
+ 554 - 9
yarn.lock


Certains fichiers n'ont pas été affichés car il y a eu trop de fichiers modifiés dans ce diff