Browse Source

Merge pull request #2675 from shamil-gadelshin/storage_node_v2_sync5

Storage node v2: p2p sync and colossus.DockerFile and docker-compose.yml.

No PR review!! For devops purposes only.
shamil-gadelshin 3 years ago
parent
commit
5775bc2b53
72 changed files with 9618 additions and 821 deletions
  1. 1 0
      .dockerignore
  2. 5 7
      .env
  3. 3 0
      Cargo.lock
  4. 11 3
      apps.Dockerfile
  5. 23 7
      colossus.Dockerfile
  6. 23 42
      docker-compose.yml
  7. 1 1
      metadata-protobuf/package.json
  8. 0 1
      query-node/build.sh
  9. 17 0
      query-node/kill-img.sh
  10. 4 4
      query-node/package.json
  11. 23 0
      query-node/start-img.sh
  12. 8 2
      query-node/start.sh
  13. 1 11
      runtime-modules/common/src/lib.rs
  14. 7 13
      runtime-modules/common/src/storage.rs
  15. 4 0
      runtime-modules/common/src/working_group.rs
  16. 10 2
      runtime-modules/content/Cargo.toml
  17. 14 1
      runtime-modules/content/src/errors.rs
  18. 325 217
      runtime-modules/content/src/lib.rs
  19. 3 4
      runtime-modules/content/src/permissions/mod.rs
  20. 307 43
      runtime-modules/content/src/tests/channels.rs
  21. 364 30
      runtime-modules/content/src/tests/mock.rs
  22. 148 17
      runtime-modules/content/src/tests/videos.rs
  23. 9 3
      runtime-modules/storage/src/lib.rs
  24. 1 0
      runtime-modules/storage/src/tests/mocks.rs
  25. 1 34
      runtime/src/lib.rs
  26. 1 0
      storage-node-v2/.eslintignore
  27. 1 0
      storage-node-v2/.eslintrc.js
  28. 104 69
      storage-node-v2/README.md
  29. 37 6
      storage-node-v2/package.json
  30. 200 0
      storage-node-v2/scripts/generate-test-data.ts
  31. 12 0
      storage-node-v2/scripts/operatorMetadata.json
  32. 94 6
      storage-node-v2/src/api-spec/openapi.yaml
  33. 40 16
      storage-node-v2/src/command-base/ApiCommandBase.ts
  34. 2 0
      storage-node-v2/src/command-base/ExitCodes.ts
  35. 65 0
      storage-node-v2/src/commands/dev/sync.ts
  36. 5 23
      storage-node-v2/src/commands/dev/verify-bag-id.ts
  37. 6 18
      storage-node-v2/src/commands/leader/update-bag.ts
  38. 1 1
      storage-node-v2/src/commands/leader/update-dynamic-bag-policy.ts
  39. 20 8
      storage-node-v2/src/commands/operator/set-metadata.ts
  40. 156 3
      storage-node-v2/src/commands/server.ts
  41. 38 27
      storage-node-v2/src/services/helpers/bagTypes.ts
  42. 2 2
      storage-node-v2/src/services/helpers/tokenNonceKeeper.ts
  43. 110 12
      storage-node-v2/src/services/logger.ts
  44. 20 0
      storage-node-v2/src/services/metadata/generateTypes.ts
  45. 19 0
      storage-node-v2/src/services/metadata/generated/OperatorMetadataJson.d.ts
  46. 12 0
      storage-node-v2/src/services/metadata/schemas/index.ts
  47. 29 0
      storage-node-v2/src/services/metadata/schemas/operatorMetadataSchema.ts
  48. 36 0
      storage-node-v2/src/services/metadata/validationService.ts
  49. 170 0
      storage-node-v2/src/services/queryNode/api.ts
  50. 33 0
      storage-node-v2/src/services/queryNode/codegen.yml
  51. 99 0
      storage-node-v2/src/services/queryNode/generated/queries.ts
  52. 4752 0
      storage-node-v2/src/services/queryNode/generated/schema.ts
  53. 49 0
      storage-node-v2/src/services/queryNode/queries/queries.graphql
  54. 27 18
      storage-node-v2/src/services/runtime/api.ts
  55. 7 2
      storage-node-v2/src/services/runtime/extrinsics.ts
  56. 32 5
      storage-node-v2/src/services/runtime/transactionNonceKeeper.ts
  57. 63 0
      storage-node-v2/src/services/sync/remoteStorageData.ts
  58. 241 0
      storage-node-v2/src/services/sync/storageObligations.ts
  59. 155 0
      storage-node-v2/src/services/sync/synchronizer.ts
  60. 153 0
      storage-node-v2/src/services/sync/tasks.ts
  61. 129 0
      storage-node-v2/src/services/sync/workingProcess.ts
  62. 100 25
      storage-node-v2/src/services/webApi/app.ts
  63. 161 0
      storage-node-v2/src/services/webApi/controllers/common.ts
  64. 56 125
      storage-node-v2/src/services/webApi/controllers/publicApi.ts
  65. 182 0
      storage-node-v2/src/services/webApi/controllers/stateApi.ts
  66. 1 0
      storage-node-v2/tsconfig.json
  67. 5 5
      types/augment-codec/augment-api-tx.ts
  68. 2 0
      types/augment/all/defs.json
  69. 2 0
      types/augment/all/types.ts
  70. 5 5
      types/augment/augment-api-tx.ts
  71. 2 1
      types/src/common.ts
  72. 869 2
      yarn.lock

+ 1 - 0
.dockerignore

@@ -7,3 +7,4 @@ query-node/lib
 cli/
 tests/
 devops/
+metadata-protobuf/lib

+ 5 - 7
.env

@@ -2,21 +2,18 @@ COMPOSE_PROJECT_NAME=joystream
 PROJECT_NAME=query_node
 
 # We will use a single postgres service with multiple databases
-# The env variables below are by default used by all services and should be 
+# The env variables below are by default used by all services and should be
 # overriden in local env files
 # DB config
 INDEXER_DB_NAME=query_node_indexer
 DB_NAME=query_node_processor
 DB_USER=postgres
 DB_PASS=postgres
-DB_HOST=localhost
+DB_HOST=db
 DB_PORT=5432
 DEBUG=index-builder:*
 TYPEORM_LOGGING=error
 
-DEBUG=index-builder:*
-TYPEORM_LOGGING=error
-
 ###########################
 #    Indexer options      #
 ###########################
@@ -30,9 +27,10 @@ BLOCK_HEIGHT=0
 ###############################
 
 GRAPHQL_SERVER_PORT=4002
-GRAPHQL_SERVER_HOST=localhost
+GRAPHQL_SERVER_HOST=graphql-server
+
 WARTHOG_APP_PORT=4002
-WARTHOG_APP_HOST=localhost
+WARTHOG_APP_HOST=hydra-indexer-gateway
 
 # Default configuration is to use the docker container
 WS_PROVIDER_ENDPOINT_URI=ws://joystream-node:9944/

+ 3 - 0
Cargo.lock

@@ -3828,6 +3828,9 @@ dependencies = [
  "frame-system",
  "pallet-balances",
  "pallet-common",
+ "pallet-membership",
+ "pallet-randomness-collective-flip",
+ "pallet-storage",
  "pallet-timestamp",
  "parity-scale-codec",
  "serde",

+ 11 - 3
apps.Dockerfile

@@ -1,17 +1,25 @@
+FROM mikefarah/yq as manifest-maker
+# Change metadata.source in manifest file. It's not possible to override it via flag/env.
+USER root
+ARG WS_PROVIDER_ENDPOINT_URI
+COPY ./query-node/manifest.yml /joystream/qn-manifest.yml
+RUN yq e -i ".typegen.metadata.source = \"$WS_PROVIDER_ENDPOINT_URI\"" /joystream/qn-manifest.yml
+
 FROM --platform=linux/x86-64 node:14 as builder
 
 WORKDIR /joystream
 COPY . /joystream
-RUN  rm -fr /joystream/pioneer
+COPY --from=manifest-maker /joystream/qn-manifest.yml /joystream/query-node/manifest.yml
+
+RUN rm -fr /joystream/pioneer
 
 # Do not set NODE_ENV=production until after running yarn install
 # to ensure dev dependencies are installed.
 RUN yarn --forzen-lockfile
 
 RUN yarn workspace @joystream/types build
-RUN yarn workspace @joystream/content-metadata-protobuf build:ts
+RUN yarn workspace @joystream/metadata-protobuf build
 RUN yarn workspace query-node-root build
-RUN yarn workspace storage-node build
 
 # Second stage to reduce image size, enable it when
 # all packages have correctly identified what is a devDependency and what is not.

+ 23 - 7
colossus.Dockerfile

@@ -2,15 +2,31 @@ FROM --platform=linux/x86-64 node:14 as builder
 
 WORKDIR /joystream
 COPY . /joystream
-RUN  rm -fr /joystream/pioneer
 
-EXPOSE 3001
-
-RUN yarn --frozen-lockfile
+RUN yarn
 
 RUN yarn workspace @joystream/types build
-RUN yarn workspace storage-node build
+RUN yarn workspace storage-node-v2 build
 
-RUN yarn
+# Use these volumes to persist uploading data and to pass the keyfile.
+VOLUME ["/data", "/keystore"]
+
+# Required variables
+ENV WS_PROVIDER_ENDPOINT_URI=ws://not-set
+ENV COLOSSUS_PORT=3333
+ENV QUERY_NODE_HOST=not-set
+ENV WORKER_ID=not-set
+# - set external key file using the `/keystore` volume
+ENV ACCOUNT_KEYFILE=
+ENV ACCOUNT_PWD=
+# Optional variables
+ENV SYNC_INTERVAL=1
+ENV ELASTIC_SEARCH_HOST=
+# - overrides account key file
+ENV ACCOUNT_URI=
+
+# Colossus node port
+EXPOSE ${COLOSSUS_PORT}
 
-ENTRYPOINT yarn colossus --dev --ws-provider $WS_PROVIDER_ENDPOINT_URI
+WORKDIR /joystream/storage-node-v2
+ENTRYPOINT yarn storage-node server --queryNodeHost ${QUERY_NODE_HOST} --port ${COLOSSUS_PORT} --uploads /data --worker ${WORKER_ID} --apiUrl ${WS_PROVIDER_ENDPOINT_URI} --sync --syncInterval=${SYNC_INTERVAL} --keyFile=${ACCOUNT_KEYFILE} --elasticSearchHost=${ELASTIC_SEARCH_HOST}

+ 23 - 42
docker-compose.yml

@@ -18,40 +18,27 @@ services:
       - "127.0.0.1:9944:9944"
       - "127.0.0.1:9933:9933"
 
-  ipfs:
-    image: ipfs/go-ipfs:latest
-    ports:
-      - '127.0.0.1:5001:5001'
-      - '127.0.0.1:8080:8080'
-    volumes:
-      - /data/ipfs
-    entrypoint: ''
-    command: |
-      /bin/sh -c "
-        set -e
-        /usr/local/bin/start_ipfs config profile apply lowpower
-        /usr/local/bin/start_ipfs config --json Gateway.PublicGateways '{\"localhost\": null }'
-        /sbin/tini -- /usr/local/bin/start_ipfs daemon --migrate=true
-      "
-
   colossus:
-    image: joystream/apps
+    image: joystream/colossus:latest
     restart: on-failure
-    depends_on:
-      - "ipfs"
     build:
       context: .
-      dockerfile: apps.Dockerfile
+      dockerfile: colossus.Dockerfile
+    depends_on:
+      - graphql-server-mnt
+    volumes:
+      - /data
+      - /keystore
+    ports:
+      - '127.0.0.1:3333:3333'
     env_file:
       # relative to working directory where docker-compose was run from
       - .env
     environment:
-      - WS_PROVIDER_ENDPOINT_URI=${WS_PROVIDER_ENDPOINT_URI}
-    ports:
-      - '127.0.0.1:3001:3001'
-    command: colossus --dev --ws-provider ${WS_PROVIDER_ENDPOINT_URI} --ipfs-host ipfs
-    environment:
-      - DEBUG=*
+      - COLOSSUS_PORT=3333
+      - QUERY_NODE_HOST=graphql-server-mnt:8081
+      - WORKER_ID=0
+      - ACCOUNT_URI=//Alice
 
   db:
     image: postgres:12
@@ -74,12 +61,12 @@ services:
     build:
       context: .
       dockerfile: apps.Dockerfile
+      network: joystream_default
+      args:
+        - WS_PROVIDER_ENDPOINT_URI=${WS_PROVIDER_ENDPOINT_URI}
     env_file:
       # relative to working directory where docker-compose was run from
       - .env
-    environment:
-      - DB_HOST=db
-      - DB_NAME=${DB_NAME}
     ports:
       - "127.0.0.1:8081:${GRAPHQL_SERVER_PORT}"
     depends_on:
@@ -92,9 +79,6 @@ services:
     env_file:
       # relative to working directory where docker-compose was run from
       - .env
-    environment:
-      - DB_HOST=db
-      - DB_NAME=${DB_NAME}
     ports:
       - "127.0.0.1:8081:${GRAPHQL_SERVER_PORT}"
     depends_on:
@@ -112,15 +96,17 @@ services:
     build:
       context: .
       dockerfile: apps.Dockerfile
+      network: joystream_default
+      args:
+        - WS_PROVIDER_ENDPOINT_URI=${WS_PROVIDER_ENDPOINT_URI}
     env_file:
       # relative to working directory where docker-compose was run from
       - .env
     environment:
       - INDEXER_ENDPOINT_URL=http://hydra-indexer-gateway:${WARTHOG_APP_PORT}/graphql
-      - TYPEORM_HOST=db
+      - TYPEORM_HOST=${DB_HOST}
       - TYPEORM_DATABASE=${DB_NAME}
-      - DEBUG=index-builder:*
-      - WS_PROVIDER_ENDPOINT_URI=ws://joystream-node:9944
+      - WS_PROVIDER_ENDPOINT_URI=${WS_PROVIDER_ENDPOINT_URI}
     volumes:
       - ./types/augment/all/defs.json:/joystream/query-node/mappings/lib/generated/types/typedefs.json
     depends_on:
@@ -135,10 +121,8 @@ services:
       - .env
     environment:
       - INDEXER_ENDPOINT_URL=http://hydra-indexer-gateway:${WARTHOG_APP_PORT}/graphql
-      - TYPEORM_HOST=db
+      - TYPEORM_HOST=${DB_HOST}
       - TYPEORM_DATABASE=${DB_NAME}
-      - DEBUG=index-builder:*
-      - WS_PROVIDER_ENDPOINT_URI=ws://joystream-node:9944
     depends_on:
       - hydra-indexer-gateway
     volumes:
@@ -155,12 +139,9 @@ services:
       # relative to working directory where docker-compose was run from
       - .env
     environment:
-      - DB_HOST=db
       - DB_NAME=${INDEXER_DB_NAME}
       - INDEXER_WORKERS=5
       - REDIS_URI=redis://redis:6379/0
-      - DEBUG=index-builder:*
-      - WS_PROVIDER_ENDPOINT_URI=${WS_PROVIDER_ENDPOINT_URI}
       - TYPES_JSON=types.json
     depends_on:
       - db
@@ -187,7 +168,7 @@ services:
       - PORT=${WARTHOG_APP_PORT}
       - DEBUG=*
     ports:
-      - "127.0.0.1:4000:4002"
+      - "127.0.0.1:4000:${WARTHOG_APP_PORT}"
     depends_on:
       - redis
       - db

+ 1 - 1
metadata-protobuf/package.json

@@ -20,7 +20,7 @@
   "license": "MIT",
   "private": false,
   "scripts": {
-    "build": "yarn compile && tsc",
+    "build": "yarn compile && rm -rf lib && tsc",
     "compile": "yarn ts-node ./scripts/compile.ts",
     "generate-doc": "./generate-md-doc.sh",
     "test": "env TS_NODE_COMPILER_OPTIONS='{\"module\": \"commonjs\" }' mocha --inline-diffs -r ts-node/register 'test/**/*.ts'",

+ 0 - 1
query-node/build.sh

@@ -9,7 +9,6 @@ set -a
 . ../.env
 set +a
 
-# only use this when new Hydra releases and contents of `generated/` folder needs to be refreshed
 yarn clean
 yarn codegen:noinstall
 yarn typegen # if this fails try to run this command outside of yarn workspaces

+ 17 - 0
query-node/kill-img.sh

@@ -0,0 +1,17 @@
+#!/usr/bin/env bash
+set -e
+
+SCRIPT_PATH="$(dirname "${BASH_SOURCE[0]}")"
+cd $SCRIPT_PATH
+
+set -a
+. ../.env
+set +a
+
+# Only remove query-node related services
+docker-compose rm -vsf processor
+docker-compose rm -vsf graphql-server
+docker-compose rm -vsf indexer
+docker-compose rm -vsf hydra-indexer-gateway
+docker-compose rm -vsf redis
+docker-compose rm -vsf db

+ 4 - 4
query-node/package.json

@@ -28,10 +28,10 @@
     "typegen:configure": "NODE_URL=${NODE_URL:-ws://localhost:9000} envsub typegen.template.yml typegen.yml",
     "typegen": "rm -rf ./mappings/generated && hydra-typegen typegen manifest.yml --debug",
     "mappings:build": "yarn workspace query-node-mappings build",
-    "docker:build": "docker build . -f docker/Dockerfile.hydra -t hydra-kit:latest",
-    "docker:db:up": "(cd ../ && docker-compose up -d db)",
-    "docker:db:migrate": "docker run --env-file .env --env DB_HOST=db --env TYPEORM_HOST=db --network container:${PWD##*/}_db_1 hydra-kit:latest yarn db:migrate",
-    "docker:up": "docker-compose up -d"
+    "start:dev": "./start.sh",
+    "start": "./start-img.sh",
+    "kill:dev": "./kill.sh",
+    "kill": "./kill-img.sh"
   },
   "author": "",
   "license": "ISC",

+ 23 - 0
query-node/start-img.sh

@@ -0,0 +1,23 @@
+#!/usr/bin/env bash
+set -e
+
+SCRIPT_PATH="$(dirname "${BASH_SOURCE[0]}")"
+cd $SCRIPT_PATH
+
+set -a
+. ../.env
+set +a
+
+# Start the joystream-node first to allow fetching Olympia metadata during build (typegen)
+docker-compose up -d joystream-node
+
+# Bring up db
+docker-compose up -d db
+
+# Setup the db
+docker run --rm --env-file ../.env --network joystream_default joystream/apps workspace query-node-root db:prepare
+docker run --rm --env-file ../.env --network joystream_default joystream/apps workspace query-node-root db:migrate
+
+# Start processor and graphql server
+docker-compose up -d processor
+docker-compose up -d graphql-server

+ 8 - 2
query-node/start.sh

@@ -17,6 +17,9 @@ docker-compose up -d joystream-node
 # Bring up db
 docker-compose up -d db
 
+# Override DB_HOST for db setup
+export DB_HOST=localhost
+
 # Make sure we use dev config for db migrations (prevents "Cannot create database..." and some other errors)
 yarn workspace query-node config:dev
 
@@ -24,7 +27,10 @@ yarn workspace query-node config:dev
 yarn workspace query-node-root db:prepare
 yarn workspace query-node-root db:migrate
 
-docker-compose up -d graphql-server-mnt
+# Set DB_HOST back to docker-service one
+export DB_HOST=db
 
-# Starting up processor will bring up all services it depends on
+# Start processor and graphql server
 docker-compose up -d processor-mnt
+docker-compose up -d graphql-server-mnt
+

+ 1 - 11
runtime-modules/common/src/lib.rs

@@ -24,6 +24,7 @@ pub type ActorId<T> = <T as MembershipTypes>::ActorId;
 
 /// HTTP Url string
 pub type Url = Vec<u8>;
+pub type AssetUrls = Vec<Url>;
 
 /// Generic trait for membership dependent pallets.
 pub trait MembershipTypes: frame_system::Trait {
@@ -65,17 +66,6 @@ pub trait StorageOwnership {
         + Ord
         + PartialEq;
 
-    /// DAO id representation.
-    type DAOId: Parameter
-        + Member
-        + BaseArithmetic
-        + Codec
-        + Default
-        + Copy
-        + MaybeSerialize
-        + Ord
-        + PartialEq;
-
     /// Content id representation.
     type ContentId: Parameter + Member + Codec + Default + Copy + MaybeSerialize + Ord + PartialEq;
 

+ 7 - 13
runtime-modules/common/src/storage.rs

@@ -16,42 +16,36 @@ pub struct ContentParameters<ContentId, DataObjectTypeId> {
 // New owner type for storage object struct
 #[cfg_attr(feature = "std", derive(Serialize, Deserialize))]
 #[derive(Clone, Encode, Decode, PartialEq, Eq, Debug)]
-pub enum StorageObjectOwner<MemberId, ChannelId, DAOId> {
+pub enum StorageObjectOwner<MemberId, ChannelId> {
     Member(MemberId),
-    Channel(ChannelId), // acts through content directory module, where again DAOs can own channels for example
+    Channel(ChannelId), // acts through content directory module,
     #[allow(clippy::upper_case_acronyms)]
-    DAO(DAOId), // acts through upcoming `content_finance` module
-    Council,            // acts through proposal frame_system
+    Council, // acts through proposal frame_system
     WorkingGroup(WorkingGroup), // acts through new extrinsic in working group
 }
 
-impl<MemberId, ChannelId, DAOId> Default for StorageObjectOwner<MemberId, ChannelId, DAOId> {
-    fn default() -> Self {
-        Self::Council
-    }
-}
 // To be implemented by current storage data_directory runtime module.
 // Defined in 'common' package
 pub trait StorageSystem<T: crate::StorageOwnership + crate::MembershipTypes> {
     fn atomically_add_content(
-        owner: StorageObjectOwner<T::MemberId, T::ChannelId, T::DAOId>,
+        owner: StorageObjectOwner<T::MemberId, T::ChannelId>,
         content_parameters: Vec<ContentParameters<T::ContentId, T::DataObjectTypeId>>,
     ) -> DispatchResult;
 
     // Checks if given owner can add provided content to the storage frame_system
     fn can_add_content(
-        owner: StorageObjectOwner<T::MemberId, T::ChannelId, T::DAOId>,
+        owner: StorageObjectOwner<T::MemberId, T::ChannelId>,
         content_parameters: Vec<ContentParameters<T::ContentId, T::DataObjectTypeId>>,
     ) -> DispatchResult;
 
     fn atomically_remove_content(
-        owner: &StorageObjectOwner<T::MemberId, T::ChannelId, T::DAOId>,
+        owner: &StorageObjectOwner<T::MemberId, T::ChannelId>,
         content_ids: &[T::ContentId],
     ) -> DispatchResult;
 
     // Checks if given owner can remove content under given content ids from the storage frame_system
     fn can_remove_content(
-        owner: &StorageObjectOwner<T::MemberId, T::ChannelId, T::DAOId>,
+        owner: &StorageObjectOwner<T::MemberId, T::ChannelId>,
         content_ids: &[T::ContentId],
     ) -> DispatchResult;
 }

+ 4 - 0
runtime-modules/common/src/working_group.rs

@@ -11,6 +11,10 @@ use strum_macros::EnumIter;
 #[cfg_attr(feature = "std", derive(Serialize, Deserialize, EnumIter))]
 #[derive(Encode, Decode, Clone, PartialEq, Eq, Copy, Debug, PartialOrd, Ord)]
 pub enum WorkingGroup {
+    /* Reserved
+        // working_group::Instance0.
+        Reserved,
+    */
     /* Reserved
         /// Forum working group: working_group::Instance1.
         Forum,

+ 10 - 2
runtime-modules/content/Cargo.toml

@@ -13,12 +13,17 @@ sp-arithmetic = { package = 'sp-arithmetic', default-features = false, git = 'ht
 codec = { package = 'parity-scale-codec', version = '1.3.4', default-features = false, features = ['derive'] }
 serde = {version = '1.0.101', features = ['derive'], optional = true}
 common = { package = 'pallet-common', default-features = false, path = '../common'}
+storage = { package = 'pallet-storage', default-features = false, path = '../storage'}
+membership = { package = 'pallet-membership', default-features = false, path = '../membership'}
+balances = { package = 'pallet-balances', default-features = false, git = 'https://github.com/paritytech/substrate.git', rev = '2cd20966cc09b059817c3ebe12fc130cdd850d62'}
+
 
 [dev-dependencies]
 sp-io = { package = 'sp-io', default-features = false, git = 'https://github.com/paritytech/substrate.git', rev = '2cd20966cc09b059817c3ebe12fc130cdd850d62'}
 sp-core = { package = 'sp-core', default-features = false, git = 'https://github.com/paritytech/substrate.git', rev = '2cd20966cc09b059817c3ebe12fc130cdd850d62'}
-balances = { package = 'pallet-balances', default-features = false, git = 'https://github.com/paritytech/substrate.git', rev = '2cd20966cc09b059817c3ebe12fc130cdd850d62'}
 pallet-timestamp = { package = 'pallet-timestamp', default-features = false, git = 'https://github.com/paritytech/substrate.git', rev = '2cd20966cc09b059817c3ebe12fc130cdd850d62'}
+randomness-collective-flip = { package = 'pallet-randomness-collective-flip', default-features = false, git = 'https://github.com/paritytech/substrate.git', rev = '2cd20966cc09b059817c3ebe12fc130cdd850d62'}
+
 
 [features]
 default = ['std']
@@ -30,5 +35,8 @@ std = [
 	'sp-arithmetic/std',
 	'codec/std',
 	'serde',
-	'common/std'
+	'common/std',
+	'storage/std',
+	'balances/std',
+	'membership/std',
 ]

+ 14 - 1
runtime-modules/content/src/errors.rs

@@ -62,6 +62,19 @@ decl_error! {
         VideoInSeason,
 
         /// Curators can only censor non-curator group owned channels
-        CannotCensoreCuratorGroupOwnedChannels
+        CannotCensoreCuratorGroupOwnedChannels,
+
+        /// No assets to be removed have been specified
+        NoAssetsSpecified,
+
+        /// Channel assets feasibility
+        InvalidAssetsProvided,
+
+        /// Channel Contains Video
+        ChannelContainsVideos,
+
+        /// Channel Contains Assets
+        ChannelContainsAssets,
+
     }
 }

File diff suppressed because it is too large
+ 325 - 217
runtime-modules/content/src/lib.rs


+ 3 - 4
runtime-modules/content/src/permissions/mod.rs

@@ -5,7 +5,6 @@ pub use curator_group::*;
 pub use crate::errors::*;
 use crate::*;
 pub use codec::{Codec, Decode, Encode};
-pub use common::MembershipTypes;
 use core::fmt::Debug;
 use frame_support::{ensure, Parameter};
 #[cfg(feature = "std")]
@@ -15,7 +14,7 @@ use sp_runtime::traits::{MaybeSerializeDeserialize, Member};
 // use frame_system::ensure_root;
 
 /// Model of authentication manager.
-pub trait ContentActorAuthenticator: frame_system::Trait + MembershipTypes {
+pub trait ContentActorAuthenticator: frame_system::Trait + membership::Trait {
     /// Curator identifier
     type CuratorId: Parameter
         + Member
@@ -132,7 +131,7 @@ pub fn ensure_actor_authorized_to_create_channel<T: Trait>(
 pub fn ensure_actor_authorized_to_update_channel<T: Trait>(
     origin: T::Origin,
     actor: &ContentActor<T::CuratorGroupId, T::CuratorId, T::MemberId>,
-    owner: &ChannelOwner<T::MemberId, T::CuratorGroupId, T::DAOId>,
+    owner: &ChannelOwner<T::MemberId, T::CuratorGroupId>,
 ) -> DispatchResult {
     // Only owner of a channel can update and delete channel assets.
     // Lead can update and delete curator group owned channel assets.
@@ -199,7 +198,7 @@ pub fn ensure_actor_authorized_to_set_featured_videos<T: Trait>(
 pub fn ensure_actor_authorized_to_censor<T: Trait>(
     origin: T::Origin,
     actor: &ContentActor<T::CuratorGroupId, T::CuratorId, T::MemberId>,
-    owner: &ChannelOwner<T::MemberId, T::CuratorGroupId, T::DAOId>,
+    owner: &ChannelOwner<T::MemberId, T::CuratorGroupId>,
 ) -> DispatchResult {
     // Only lead and curators can censor channels and videos
     // Only lead can censor curator group owned channels and videos

+ 307 - 43
runtime-modules/content/src/tests/channels.rs

@@ -2,9 +2,273 @@
 
 use super::curators;
 use super::mock::*;
+use crate::sp_api_hidden_includes_decl_storage::hidden_include::traits::Currency;
 use crate::*;
 use frame_support::{assert_err, assert_ok};
 
+#[test]
+fn successful_channel_deletion() {
+    with_default_mock_builder(|| {
+        // Run to block one to see emitted events
+        run_to_block(1);
+
+        // create an account with enought balance
+        let _ = balances::Module::<Test>::deposit_creating(
+            &FIRST_MEMBER_ORIGIN,
+            <Test as balances::Trait>::Balance::from(100u32),
+        );
+
+        // 3 assets
+        let assets = NewAssets::<Test>::Upload(CreationUploadParameters {
+            object_creation_list: vec![
+                DataObjectCreationParameters {
+                    size: 3,
+                    ipfs_content_id: b"first".to_vec(),
+                },
+                DataObjectCreationParameters {
+                    size: 3,
+                    ipfs_content_id: b"second".to_vec(),
+                },
+                DataObjectCreationParameters {
+                    size: 3,
+                    ipfs_content_id: b"third".to_vec(),
+                },
+            ],
+            expected_data_size_fee: storage::DataObjectPerMegabyteFee::<Test>::get(),
+        });
+
+        let channel_id = NextChannelId::<Test>::get();
+
+        // create channel
+        create_channel_mock(
+            FIRST_MEMBER_ORIGIN,
+            ContentActor::Member(FIRST_MEMBER_ID),
+            ChannelCreationParametersRecord {
+                assets: assets,
+                meta: vec![],
+                reward_account: None,
+            },
+            Ok(()),
+        );
+
+        // attempt to delete channel with non zero assets
+        delete_channel_mock(
+            FIRST_MEMBER_ORIGIN,
+            ContentActor::Member(FIRST_MEMBER_ID),
+            channel_id,
+            Err(Error::<Test>::ChannelContainsAssets.into()),
+        );
+
+        // delete assets
+        let assets_to_delete = [0u64, 1u64, 2u64]
+            .iter()
+            .map(|&x| x)
+            .collect::<BTreeSet<_>>();
+
+        // delete channel assets
+        delete_channel_assets_mock(
+            FIRST_MEMBER_ORIGIN,
+            ContentActor::Member(FIRST_MEMBER_ID),
+            channel_id,
+            assets_to_delete,
+            Ok(()),
+        );
+
+        // successful deletion
+        delete_channel_mock(
+            FIRST_MEMBER_ORIGIN,
+            ContentActor::Member(FIRST_MEMBER_ID),
+            channel_id,
+            Ok(()),
+        );
+    })
+}
+
+#[test]
+fn successful_channel_assets_deletion() {
+    with_default_mock_builder(|| {
+        // Run to block one to see emitted events
+        run_to_block(1);
+
+        // create an account with enought balance
+        let _ = balances::Module::<Test>::deposit_creating(
+            &FIRST_MEMBER_ORIGIN,
+            <Test as balances::Trait>::Balance::from(100u32),
+        );
+
+        // 3 assets
+        let assets = NewAssets::<Test>::Upload(CreationUploadParameters {
+            object_creation_list: vec![
+                DataObjectCreationParameters {
+                    size: 3,
+                    ipfs_content_id: b"first".to_vec(),
+                },
+                DataObjectCreationParameters {
+                    size: 3,
+                    ipfs_content_id: b"second".to_vec(),
+                },
+                DataObjectCreationParameters {
+                    size: 3,
+                    ipfs_content_id: b"third".to_vec(),
+                },
+            ],
+            expected_data_size_fee: storage::DataObjectPerMegabyteFee::<Test>::get(),
+        });
+
+        let channel_id = NextChannelId::<Test>::get();
+        // create channel
+        create_channel_mock(
+            FIRST_MEMBER_ORIGIN,
+            ContentActor::Member(FIRST_MEMBER_ID),
+            ChannelCreationParametersRecord {
+                assets: assets,
+                meta: vec![],
+                reward_account: None,
+            },
+            Ok(()),
+        );
+
+        // delete assets
+        let assets_to_delete = [0u64, 1u64].iter().map(|&x| x).collect::<BTreeSet<_>>();
+
+        // delete channel assets
+        delete_channel_assets_mock(
+            FIRST_MEMBER_ORIGIN,
+            ContentActor::Member(FIRST_MEMBER_ID),
+            channel_id,
+            assets_to_delete,
+            Ok(()),
+        );
+    })
+}
+
+#[test]
+fn succesful_channel_update() {
+    with_default_mock_builder(|| {
+        // Run to block one to see emitted events
+        run_to_block(1);
+
+        // create an account with enought balance
+        let _ = balances::Module::<Test>::deposit_creating(
+            &FIRST_MEMBER_ORIGIN,
+            <Test as balances::Trait>::Balance::from(100u32),
+        );
+
+        // 2 + 1 assets to be uploaded
+        let assets = NewAssets::<Test>::Upload(CreationUploadParameters {
+            object_creation_list: vec![
+                DataObjectCreationParameters {
+                    size: 3,
+                    ipfs_content_id: b"first".to_vec(),
+                },
+                DataObjectCreationParameters {
+                    size: 3,
+                    ipfs_content_id: b"second".to_vec(),
+                },
+            ],
+            expected_data_size_fee: storage::DataObjectPerMegabyteFee::<Test>::get(),
+        });
+
+        let new_assets = NewAssets::<Test>::Upload(CreationUploadParameters {
+            object_creation_list: vec![
+                DataObjectCreationParameters {
+                    size: 3,
+                    ipfs_content_id: b"first".to_vec(),
+                },
+                DataObjectCreationParameters {
+                    size: 3,
+                    ipfs_content_id: b"second".to_vec(),
+                },
+            ],
+            expected_data_size_fee: storage::DataObjectPerMegabyteFee::<Test>::get(),
+        });
+
+        let channel_id = NextChannelId::<Test>::get();
+        // create channel
+        create_channel_mock(
+            FIRST_MEMBER_ORIGIN,
+            ContentActor::Member(FIRST_MEMBER_ID),
+            ChannelCreationParametersRecord {
+                assets: assets,
+                meta: vec![],
+                reward_account: None,
+            },
+            Ok(()),
+        );
+
+        // update channel
+        update_channel_mock(
+            FIRST_MEMBER_ORIGIN,
+            ContentActor::Member(FIRST_MEMBER_ID),
+            channel_id,
+            ChannelUpdateParametersRecord {
+                assets: Some(new_assets),
+                new_meta: None,
+                reward_account: None,
+            },
+            Ok(()),
+        );
+
+        // update with 0 assets
+        update_channel_mock(
+            FIRST_MEMBER_ORIGIN,
+            ContentActor::Member(FIRST_MEMBER_ID),
+            channel_id,
+            ChannelUpdateParametersRecord {
+                assets: None,
+                new_meta: None,
+                reward_account: None,
+            },
+            Ok(()),
+        );
+    })
+}
+
+#[test]
+fn succesful_channel_creation() {
+    with_default_mock_builder(|| {
+        // Run to block one to see emitted events
+        run_to_block(1);
+
+        // create an account with enought balance
+        let _ = balances::Module::<Test>::deposit_creating(
+            &FIRST_MEMBER_ORIGIN,
+            <Test as balances::Trait>::Balance::from(100u32),
+        );
+
+        // 3 assets to be uploaded
+        let assets = NewAssets::<Test>::Upload(CreationUploadParameters {
+            object_creation_list: vec![
+                DataObjectCreationParameters {
+                    size: 3,
+                    ipfs_content_id: b"first".to_vec(),
+                },
+                DataObjectCreationParameters {
+                    size: 3,
+                    ipfs_content_id: b"second".to_vec(),
+                },
+                DataObjectCreationParameters {
+                    size: 3,
+                    ipfs_content_id: b"third".to_vec(),
+                },
+            ],
+            expected_data_size_fee: storage::DataObjectPerMegabyteFee::<Test>::get(),
+        });
+
+        // create channel
+        create_channel_mock(
+            FIRST_MEMBER_ORIGIN,
+            ContentActor::Member(FIRST_MEMBER_ID),
+            ChannelCreationParametersRecord {
+                assets: assets,
+                meta: vec![],
+                reward_account: None,
+            },
+            Ok(()),
+        );
+    })
+}
+
 #[test]
 fn lead_cannot_create_channel() {
     with_default_mock_builder(|| {
@@ -12,8 +276,8 @@ fn lead_cannot_create_channel() {
             Content::create_channel(
                 Origin::signed(LEAD_ORIGIN),
                 ContentActor::Lead,
-                ChannelCreationParameters {
-                    assets: vec![],
+                ChannelCreationParametersRecord {
+                    assets: NewAssets::<Test>::Urls(vec![]),
                     meta: vec![],
                     reward_account: None,
                 }
@@ -34,8 +298,8 @@ fn curator_owned_channels() {
             Content::create_channel(
                 Origin::signed(FIRST_CURATOR_ORIGIN),
                 ContentActor::Curator(FIRST_CURATOR_GROUP_ID, FIRST_CURATOR_ID),
-                ChannelCreationParameters {
-                    assets: vec![],
+                ChannelCreationParametersRecord {
+                    assets: NewAssets::<Test>::Urls(vec![]),
                     meta: vec![],
                     reward_account: None,
                 }
@@ -51,8 +315,8 @@ fn curator_owned_channels() {
             Content::create_channel(
                 Origin::signed(SECOND_CURATOR_ORIGIN),
                 ContentActor::Curator(FIRST_CURATOR_GROUP_ID, SECOND_CURATOR_ID),
-                ChannelCreationParameters {
-                    assets: vec![],
+                ChannelCreationParametersRecord {
+                    assets: NewAssets::<Test>::Urls(vec![]),
                     meta: vec![],
                     reward_account: None,
                 }
@@ -65,8 +329,8 @@ fn curator_owned_channels() {
             Content::create_channel(
                 Origin::signed(SECOND_CURATOR_ORIGIN),
                 ContentActor::Curator(FIRST_CURATOR_GROUP_ID, FIRST_CURATOR_ID),
-                ChannelCreationParameters {
-                    assets: vec![],
+                ChannelCreationParametersRecord {
+                    assets: NewAssets::<Test>::Urls(vec![]),
                     meta: vec![],
                     reward_account: None,
                 }
@@ -80,8 +344,8 @@ fn curator_owned_channels() {
         assert_ok!(Content::create_channel(
             Origin::signed(FIRST_CURATOR_ORIGIN),
             ContentActor::Curator(FIRST_CURATOR_GROUP_ID, FIRST_CURATOR_ID),
-            ChannelCreationParameters {
-                assets: vec![],
+            ChannelCreationParametersRecord {
+                assets: NewAssets::<Test>::Urls(vec![]),
                 meta: vec![],
                 reward_account: None,
             }
@@ -94,14 +358,14 @@ fn curator_owned_channels() {
                 channel_id,
                 ChannelRecord {
                     owner: ChannelOwner::CuratorGroup(FIRST_CURATOR_GROUP_ID),
-                    videos: vec![],
-                    playlists: vec![],
-                    series: vec![],
                     is_censored: false,
                     reward_account: None,
+                    deletion_prize_source_account_id: FIRST_CURATOR_ORIGIN,
+                    num_assets: 0,
+                    num_videos: 0,
                 },
-                ChannelCreationParameters {
-                    assets: vec![],
+                ChannelCreationParametersRecord {
+                    assets: NewAssets::<Test>::Urls(vec![]),
                     meta: vec![],
                     reward_account: None,
                 }
@@ -113,7 +377,7 @@ fn curator_owned_channels() {
             Origin::signed(FIRST_CURATOR_ORIGIN),
             ContentActor::Curator(FIRST_CURATOR_GROUP_ID, FIRST_CURATOR_ID),
             channel_id,
-            ChannelUpdateParameters {
+            ChannelUpdateParametersRecord {
                 assets: None,
                 new_meta: None,
                 reward_account: None,
@@ -125,7 +389,7 @@ fn curator_owned_channels() {
             Origin::signed(LEAD_ORIGIN),
             ContentActor::Lead,
             channel_id,
-            ChannelUpdateParameters {
+            ChannelUpdateParametersRecord {
                 assets: None,
                 new_meta: None,
                 reward_account: None,
@@ -145,8 +409,8 @@ fn member_owned_channels() {
             Content::create_channel(
                 Origin::signed(UNKNOWN_ORIGIN),
                 ContentActor::Member(MEMBERS_COUNT + 1),
-                ChannelCreationParameters {
-                    assets: vec![],
+                ChannelCreationParametersRecord {
+                    assets: NewAssets::<Test>::Urls(vec![]),
                     meta: vec![],
                     reward_account: None,
                 }
@@ -160,8 +424,8 @@ fn member_owned_channels() {
         assert_ok!(Content::create_channel(
             Origin::signed(FIRST_MEMBER_ORIGIN),
             ContentActor::Member(FIRST_MEMBER_ID),
-            ChannelCreationParameters {
-                assets: vec![],
+            ChannelCreationParametersRecord {
+                assets: NewAssets::<Test>::Urls(vec![]),
                 meta: vec![],
                 reward_account: None,
             }
@@ -174,14 +438,14 @@ fn member_owned_channels() {
                 channel_id_1,
                 ChannelRecord {
                     owner: ChannelOwner::Member(FIRST_MEMBER_ID),
-                    videos: vec![],
-                    playlists: vec![],
-                    series: vec![],
                     is_censored: false,
                     reward_account: None,
+                    deletion_prize_source_account_id: FIRST_MEMBER_ORIGIN,
+                    num_assets: 0,
+                    num_videos: 0,
                 },
-                ChannelCreationParameters {
-                    assets: vec![],
+                ChannelCreationParametersRecord {
+                    assets: NewAssets::<Test>::Urls(vec![]),
                     meta: vec![],
                     reward_account: None,
                 }
@@ -194,8 +458,8 @@ fn member_owned_channels() {
         assert_ok!(Content::create_channel(
             Origin::signed(SECOND_MEMBER_ORIGIN),
             ContentActor::Member(SECOND_MEMBER_ID),
-            ChannelCreationParameters {
-                assets: vec![],
+            ChannelCreationParametersRecord {
+                assets: NewAssets::<Test>::Urls(vec![]),
                 meta: vec![],
                 reward_account: None,
             }
@@ -208,14 +472,14 @@ fn member_owned_channels() {
                 channel_id_2,
                 ChannelRecord {
                     owner: ChannelOwner::Member(SECOND_MEMBER_ID),
-                    videos: vec![],
-                    playlists: vec![],
-                    series: vec![],
                     is_censored: false,
                     reward_account: None,
+                    deletion_prize_source_account_id: SECOND_MEMBER_ORIGIN,
+                    num_assets: 0,
+                    num_videos: 0,
                 },
-                ChannelCreationParameters {
-                    assets: vec![],
+                ChannelCreationParametersRecord {
+                    assets: NewAssets::<Test>::Urls(vec![]),
                     meta: vec![],
                     reward_account: None,
                 }
@@ -227,7 +491,7 @@ fn member_owned_channels() {
             Origin::signed(FIRST_MEMBER_ORIGIN),
             ContentActor::Member(FIRST_MEMBER_ID),
             channel_id_1,
-            ChannelUpdateParameters {
+            ChannelUpdateParametersRecord {
                 assets: None,
                 new_meta: None,
                 reward_account: None,
@@ -241,13 +505,13 @@ fn member_owned_channels() {
                 channel_id_1,
                 ChannelRecord {
                     owner: ChannelOwner::Member(FIRST_MEMBER_ID),
-                    videos: vec![],
-                    playlists: vec![],
-                    series: vec![],
                     is_censored: false,
                     reward_account: None,
+                    deletion_prize_source_account_id: FIRST_MEMBER_ORIGIN,
+                    num_assets: 0,
+                    num_videos: 0,
                 },
-                ChannelUpdateParameters {
+                ChannelUpdateParametersRecord {
                     assets: None,
                     new_meta: None,
                     reward_account: None,
@@ -261,7 +525,7 @@ fn member_owned_channels() {
                 Origin::signed(FIRST_MEMBER_ORIGIN),
                 ContentActor::Member(FIRST_MEMBER_ID),
                 channel_id_2,
-                ChannelUpdateParameters {
+                ChannelUpdateParametersRecord {
                     assets: None,
                     new_meta: None,
                     reward_account: None,
@@ -282,8 +546,8 @@ fn channel_censoring() {
         assert_ok!(Content::create_channel(
             Origin::signed(FIRST_MEMBER_ORIGIN),
             ContentActor::Member(FIRST_MEMBER_ID),
-            ChannelCreationParameters {
-                assets: vec![],
+            ChannelCreationParametersRecord {
+                assets: NewAssets::<Test>::Urls(vec![]),
                 meta: vec![],
                 reward_account: None,
             }
@@ -358,8 +622,8 @@ fn channel_censoring() {
         assert_ok!(Content::create_channel(
             Origin::signed(FIRST_CURATOR_ORIGIN),
             ContentActor::Curator(group_id, FIRST_CURATOR_ID),
-            ChannelCreationParameters {
-                assets: vec![],
+            ChannelCreationParametersRecord {
+                assets: NewAssets::<Test>::Urls(vec![]),
                 meta: vec![],
                 reward_account: None,
             }

+ 364 - 30
runtime-modules/content/src/tests/mock.rs

@@ -2,25 +2,24 @@
 
 use crate::*;
 
+use frame_support::dispatch::{DispatchError, DispatchResult};
 use frame_support::traits::{OnFinalize, OnInitialize};
 use frame_support::{impl_outer_event, impl_outer_origin, parameter_types};
 use sp_core::H256;
 use sp_runtime::{
     testing::Header,
     traits::{BlakeTwo256, IdentityLookup},
-    Perbill,
+    ModuleId, Perbill,
 };
 
 use crate::ContentActorAuthenticator;
 use crate::Trait;
 use common::currency::GovernanceCurrency;
-use common::storage::StorageSystem;
 
 pub type CuratorId = <Test as ContentActorAuthenticator>::CuratorId;
 pub type CuratorGroupId = <Test as ContentActorAuthenticator>::CuratorGroupId;
 pub type MemberId = <Test as MembershipTypes>::MemberId;
 pub type ChannelId = <Test as StorageOwnership>::ChannelId;
-// pub type DAOId = <Test as StorageOwnership>::DAOId;
 
 /// Origins
 
@@ -55,11 +54,21 @@ mod content {
     pub use crate::Event;
 }
 
+mod storage_mod {
+    pub use storage::Event;
+}
+
+mod membership_mod {
+    pub use membership::Event;
+}
+
 impl_outer_event! {
     pub enum MetaEvent for Test {
         content<T>,
         frame_system<T>,
         balances<T>,
+        membership_mod<T>,
+        storage_mod<T>,
     }
 }
 
@@ -116,7 +125,6 @@ impl common::MembershipTypes for Test {
 
 impl common::StorageOwnership for Test {
     type ChannelId = u64;
-    type DAOId = u64;
     type ContentId = u64;
     type DataObjectTypeId = u64;
 }
@@ -139,6 +147,19 @@ impl GovernanceCurrency for Test {
     type Currency = balances::Module<Self>;
 }
 
+parameter_types! {
+    pub const ScreenedMemberMaxInitialBalance: u64 = 5000;
+}
+
+impl membership::Trait for Test {
+    type Event = MetaEvent;
+    type MemberId = u64;
+    type PaidTermId = u64;
+    type SubscriptionId = u64;
+    type ActorId = u64;
+    type ScreenedMemberMaxInitialBalance = ();
+}
+
 impl ContentActorAuthenticator for Test {
     type CuratorId = u64;
     type CuratorGroupId = u64;
@@ -166,39 +187,144 @@ impl ContentActorAuthenticator for Test {
     }
 }
 
-pub struct MockStorageSystem {}
+parameter_types! {
+    pub const MaxNumberOfDataObjectsPerBag: u64 = 4;
+    pub const MaxDistributionBucketFamilyNumber: u64 = 4;
+    pub const MaxDistributionBucketNumberPerFamily: u64 = 10;
+    pub const DataObjectDeletionPrize: u64 = 10;
+    pub const StorageModuleId: ModuleId = ModuleId(*b"mstorage"); // module storage
+    pub const BlacklistSizeLimit: u64 = 1;
+    pub const MaxNumberOfPendingInvitationsPerDistributionBucket: u64 = 1;
+    pub const StorageBucketsPerBagValueConstraint: storage::StorageBucketsPerBagValueConstraint =
+        storage::StorageBucketsPerBagValueConstraint {min: 3, max_min_diff: 7};
+    pub const InitialStorageBucketsNumberForDynamicBag: u64 = 3;
+    pub const MaxRandomIterationNumber: u64 = 3;
+    pub const DefaultMemberDynamicBagNumberOfStorageBuckets: u64 = 3;
+    pub const DefaultChannelDynamicBagNumberOfStorageBuckets: u64 = 4;
+    pub const DistributionBucketsPerBagValueConstraint: storage::DistributionBucketsPerBagValueConstraint =
+        storage::StorageBucketsPerBagValueConstraint {min: 3, max_min_diff: 7};
+    pub const MaxDataObjectSize: u64 = 400;
+}
 
-// Anyone can upload and delete without restriction
-impl StorageSystem<Test> for MockStorageSystem {
-    fn atomically_add_content(
-        _owner: StorageObjectOwner<Test>,
-        _content_parameters: Vec<ContentParameters<Test>>,
-    ) -> DispatchResult {
-        Ok(())
+pub const STORAGE_WG_LEADER_ACCOUNT_ID: u64 = 100001;
+pub const DEFAULT_STORAGE_PROVIDER_ACCOUNT_ID: u64 = 100002;
+pub const DEFAULT_DISTRIBUTION_PROVIDER_ACCOUNT_ID: u64 = 100003;
+pub const DISTRIBUTION_WG_LEADER_ACCOUNT_ID: u64 = 100004;
+pub const DEFAULT_STORAGE_PROVIDER_ID: u64 = 10;
+pub const ANOTHER_STORAGE_PROVIDER_ID: u64 = 11;
+pub const DEFAULT_DISTRIBUTION_PROVIDER_ID: u64 = 12;
+pub const ANOTHER_DISTRIBUTION_PROVIDER_ID: u64 = 13;
+
+impl storage::Trait for Test {
+    type Event = MetaEvent;
+    type DataObjectId = u64;
+    type StorageBucketId = u64;
+    type DistributionBucketId = u64;
+    type DistributionBucketFamilyId = u64;
+    type DistributionBucketOperatorId = u64;
+    type ChannelId = u64;
+    type MaxNumberOfDataObjectsPerBag = MaxNumberOfDataObjectsPerBag;
+    type DataObjectDeletionPrize = DataObjectDeletionPrize;
+    type BlacklistSizeLimit = BlacklistSizeLimit;
+    type ModuleId = StorageModuleId;
+    type MemberOriginValidator = ();
+    type StorageBucketsPerBagValueConstraint = StorageBucketsPerBagValueConstraint;
+    type DefaultMemberDynamicBagNumberOfStorageBuckets =
+        DefaultMemberDynamicBagNumberOfStorageBuckets;
+    type DefaultChannelDynamicBagNumberOfStorageBuckets =
+        DefaultChannelDynamicBagNumberOfStorageBuckets;
+    type Randomness = CollectiveFlip;
+    type MaxRandomIterationNumber = MaxRandomIterationNumber;
+    type MaxDistributionBucketFamilyNumber = MaxDistributionBucketFamilyNumber;
+    type MaxDistributionBucketNumberPerFamily = MaxDistributionBucketNumberPerFamily;
+    type DistributionBucketsPerBagValueConstraint = DistributionBucketsPerBagValueConstraint;
+    type MaxNumberOfPendingInvitationsPerDistributionBucket =
+        MaxNumberOfPendingInvitationsPerDistributionBucket;
+    type ContentId = u64;
+    type MaxDataObjectSize = MaxDataObjectSize;
+
+    fn ensure_storage_working_group_leader_origin(origin: Self::Origin) -> DispatchResult {
+        let account_id = ensure_signed(origin)?;
+
+        if account_id != STORAGE_WG_LEADER_ACCOUNT_ID {
+            Err(DispatchError::BadOrigin)
+        } else {
+            Ok(())
+        }
+    }
+
+    fn ensure_storage_worker_origin(origin: Self::Origin, _: u64) -> DispatchResult {
+        let account_id = ensure_signed(origin)?;
+
+        if account_id != DEFAULT_STORAGE_PROVIDER_ACCOUNT_ID {
+            Err(DispatchError::BadOrigin)
+        } else {
+            Ok(())
+        }
+    }
+
+    fn ensure_storage_worker_exists(worker_id: &u64) -> DispatchResult {
+        let allowed_storage_providers =
+            vec![DEFAULT_STORAGE_PROVIDER_ID, ANOTHER_STORAGE_PROVIDER_ID];
+
+        if !allowed_storage_providers.contains(worker_id) {
+            Err(DispatchError::Other("Invalid worker"))
+        } else {
+            Ok(())
+        }
+    }
+
+    fn ensure_distribution_working_group_leader_origin(origin: Self::Origin) -> DispatchResult {
+        let account_id = ensure_signed(origin)?;
+
+        if account_id != DISTRIBUTION_WG_LEADER_ACCOUNT_ID {
+            Err(DispatchError::BadOrigin)
+        } else {
+            Ok(())
+        }
     }
 
-    fn can_add_content(
-        _owner: StorageObjectOwner<Test>,
-        _content_parameters: Vec<ContentParameters<Test>>,
-    ) -> DispatchResult {
-        Ok(())
+    fn ensure_distribution_worker_origin(origin: Self::Origin, _: u64) -> DispatchResult {
+        let account_id = ensure_signed(origin)?;
+
+        if account_id != DEFAULT_DISTRIBUTION_PROVIDER_ACCOUNT_ID {
+            Err(DispatchError::BadOrigin)
+        } else {
+            Ok(())
+        }
     }
 
-    fn atomically_remove_content(
-        _owner: &StorageObjectOwner<Test>,
-        _content_ids: &[u64],
-    ) -> DispatchResult {
-        Ok(())
+    fn ensure_distribution_worker_exists(worker_id: &u64) -> DispatchResult {
+        let allowed_providers = vec![
+            DEFAULT_DISTRIBUTION_PROVIDER_ID,
+            ANOTHER_DISTRIBUTION_PROVIDER_ID,
+        ];
+
+        if !allowed_providers.contains(worker_id) {
+            Err(DispatchError::Other("Invalid worker"))
+        } else {
+            Ok(())
+        }
     }
+}
 
-    fn can_remove_content(
-        _owner: &StorageObjectOwner<Test>,
-        _content_ids: &[u64],
-    ) -> DispatchResult {
-        Ok(())
+pub const DEFAULT_MEMBER_ID: u64 = 100;
+pub const DEFAULT_MEMBER_ACCOUNT_ID: u64 = 101;
+
+impl common::origin::ActorOriginValidator<Origin, u64, u64> for () {
+    fn ensure_actor_origin(origin: Origin, member_id: u64) -> Result<u64, &'static str> {
+        let signed_account_id = frame_system::ensure_signed(origin)?;
+
+        if signed_account_id == DEFAULT_MEMBER_ACCOUNT_ID && member_id == DEFAULT_MEMBER_ID {
+            Ok(signed_account_id)
+        } else {
+            Err(DispatchError::BadOrigin.into())
+        }
     }
 }
 
+// Anyone can upload and delete without restriction
+
 parameter_types! {
     pub const MaxNumberOfCuratorsPerGroup: u32 = 10;
     pub const ChannelOwnershipPaymentEscrowId: [u8; 8] = *b"12345678";
@@ -234,9 +360,6 @@ impl Trait for Test {
 
     /// The maximum number of curators per group constraint
     type MaxNumberOfCuratorsPerGroup = MaxNumberOfCuratorsPerGroup;
-
-    // Type that handles asset uploads to storage frame_system
-    type StorageSystem = MockStorageSystem;
 }
 
 pub type System = frame_system::Module<Test>;
@@ -307,3 +430,214 @@ pub fn run_to_block(n: u64) {
         <System as OnInitialize<u64>>::on_initialize(System::block_number());
     }
 }
+
+pub type CollectiveFlip = randomness_collective_flip::Module<Test>;
+
+pub fn create_channel_mock(
+    sender: u64,
+    actor: ContentActor<CuratorGroupId, CuratorId, MemberId>,
+    params: ChannelCreationParameters<Test>,
+    result: DispatchResult,
+) {
+    let channel_id = Content::next_channel_id();
+
+    assert_eq!(
+        Content::create_channel(Origin::signed(sender), actor.clone(), params.clone()),
+        result.clone(),
+    );
+
+    if result.is_ok() {
+        let num_assets = match params.assets.clone() {
+            NewAssets::<Test>::Urls(v) => v.len() as u64,
+            NewAssets::<Test>::Upload(c) => c.object_creation_list.len() as u64,
+        };
+        let owner = Content::actor_to_channel_owner(&actor).unwrap();
+
+        assert_eq!(
+            System::events().last().unwrap().event,
+            MetaEvent::content(RawEvent::ChannelCreated(
+                actor.clone(),
+                channel_id,
+                ChannelRecord {
+                    owner: owner,
+                    is_censored: false,
+                    reward_account: params.reward_account,
+                    deletion_prize_source_account_id: sender,
+                    num_assets: num_assets,
+                    num_videos: 0,
+                },
+                params.clone(),
+            ))
+        );
+    }
+}
+
+pub fn update_channel_mock(
+    sender: u64,
+    actor: ContentActor<CuratorGroupId, CuratorId, MemberId>,
+    channel_id: ChannelId,
+    params: ChannelUpdateParameters<Test>,
+    result: DispatchResult,
+) {
+    let channel_pre = ChannelById::<Test>::get(channel_id.clone());
+
+    assert_eq!(
+        Content::update_channel(
+            Origin::signed(sender),
+            actor.clone(),
+            channel_id.clone(),
+            params.clone()
+        ),
+        result.clone(),
+    );
+
+    if result.is_ok() {
+        let maybe_num_assets = params.assets.clone().map_or(None, |assets| match assets {
+            NewAssets::<Test>::Urls(v) => Some(v.len() as u64),
+            NewAssets::<Test>::Upload(c) => Some(c.object_creation_list.len() as u64),
+        });
+        assert_eq!(
+            System::events().last().unwrap().event,
+            MetaEvent::content(RawEvent::ChannelUpdated(
+                actor.clone(),
+                channel_id,
+                ChannelRecord {
+                    owner: channel_pre.owner.clone(),
+                    is_censored: channel_pre.is_censored,
+                    reward_account: channel_pre.reward_account.clone(),
+                    deletion_prize_source_account_id: sender,
+                    num_assets: channel_pre.num_assets + maybe_num_assets.unwrap_or(0),
+                    num_videos: channel_pre.num_videos,
+                },
+                params.clone(),
+            ))
+        );
+    }
+}
+
+pub fn delete_channel_assets_mock(
+    sender: u64,
+    actor: ContentActor<CuratorGroupId, CuratorId, MemberId>,
+    channel_id: ChannelId,
+    assets: BTreeSet<<Test as storage::Trait>::DataObjectId>,
+    result: DispatchResult,
+) {
+    let channel_pre = ChannelById::<Test>::get(channel_id.clone());
+
+    assert_eq!(
+        Content::remove_channel_assets(
+            Origin::signed(sender),
+            actor.clone(),
+            channel_id.clone(),
+            assets.clone(),
+        ),
+        result.clone(),
+    );
+
+    if result.is_ok() {
+        let num_assets_removed = assets.len();
+        assert_eq!(
+            System::events().last().unwrap().event,
+            MetaEvent::content(RawEvent::ChannelAssetsRemoved(
+                actor.clone(),
+                channel_id,
+                assets.clone(),
+                ChannelRecord {
+                    owner: channel_pre.owner.clone(),
+                    is_censored: channel_pre.is_censored,
+                    reward_account: channel_pre.reward_account.clone(),
+                    deletion_prize_source_account_id: sender,
+                    num_assets: channel_pre.num_assets - (num_assets_removed as u64),
+                    num_videos: channel_pre.num_videos,
+                },
+            ))
+        );
+    }
+}
+
+pub fn delete_channel_mock(
+    sender: u64,
+    actor: ContentActor<CuratorGroupId, CuratorId, MemberId>,
+    channel_id: ChannelId,
+    result: DispatchResult,
+) {
+    assert_eq!(
+        Content::delete_channel(Origin::signed(sender), actor.clone(), channel_id.clone()),
+        result.clone(),
+    );
+
+    if result.is_ok() {
+        assert_eq!(
+            System::events().last().unwrap().event,
+            MetaEvent::content(RawEvent::ChannelDeleted(actor.clone(), channel_id))
+        )
+    }
+}
+
+pub fn create_video_mock(
+    sender: u64,
+    actor: ContentActor<CuratorGroupId, CuratorId, MemberId>,
+    channel_id: ChannelId,
+    params: VideoCreationParameters<Test>,
+    result: DispatchResult,
+) {
+    let video_id = Content::next_video_id();
+    let num_videos_pre = Content::channel_by_id(channel_id).num_videos;
+
+    assert_eq!(
+        Content::create_video(
+            Origin::signed(sender),
+            actor.clone(),
+            channel_id.clone(),
+            params.clone()
+        ),
+        result.clone(),
+    );
+
+    if result.is_ok() {
+        assert_eq!(
+            System::events().last().unwrap().event,
+            MetaEvent::content(RawEvent::VideoCreated(
+                actor.clone(),
+                channel_id,
+                video_id,
+                params.clone(),
+            ))
+        );
+        assert_eq!(
+            num_videos_pre + 1,
+            Content::channel_by_id(channel_id).num_videos,
+        );
+    }
+}
+pub fn update_video_mock(
+    sender: u64,
+    actor: ContentActor<CuratorGroupId, CuratorId, MemberId>,
+    video_id: <Test as Trait>::VideoId,
+    params: VideoUpdateParameters<Test>,
+    result: DispatchResult,
+) {
+    // let channel_id = Content::video_by_id(video_id.clone()).in_channel;
+    // let num_videos_pre = Content::channel_by_id(channel_id).num_videos;
+
+    assert_eq!(
+        Content::update_video(
+            Origin::signed(sender),
+            actor.clone(),
+            video_id.clone(),
+            params.clone()
+        ),
+        result.clone(),
+    );
+
+    if result.is_ok() {
+        assert_eq!(
+            System::events().last().unwrap().event,
+            MetaEvent::content(RawEvent::VideoUpdated(
+                actor.clone(),
+                video_id,
+                params.clone(),
+            ))
+        );
+    }
+}

+ 148 - 17
runtime-modules/content/src/tests/videos.rs

@@ -2,6 +2,7 @@
 
 use super::curators;
 use super::mock::*;
+use crate::sp_api_hidden_includes_decl_storage::hidden_include::traits::Currency;
 use crate::*;
 use frame_support::{assert_err, assert_ok};
 
@@ -12,8 +13,8 @@ fn create_member_channel() -> ChannelId {
     assert_ok!(Content::create_channel(
         Origin::signed(FIRST_MEMBER_ORIGIN),
         ContentActor::Member(FIRST_MEMBER_ID),
-        ChannelCreationParameters {
-            assets: vec![],
+        ChannelCreationParametersRecord {
+            assets: NewAssets::<Test>::Urls(vec![]),
             meta: vec![],
             reward_account: None,
         }
@@ -22,6 +23,136 @@ fn create_member_channel() -> ChannelId {
     channel_id
 }
 
+#[test]
+fn video_creation_successful() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        // depositi initial balance
+        let _ = balances::Module::<Test>::deposit_creating(
+            &FIRST_MEMBER_ORIGIN,
+            <Test as balances::Trait>::Balance::from(100u32),
+        );
+
+        let channel_id = NextChannelId::<Test>::get();
+
+        create_channel_mock(
+            FIRST_MEMBER_ORIGIN,
+            ContentActor::Member(FIRST_MEMBER_ID),
+            ChannelCreationParametersRecord {
+                assets: NewAssets::<Test>::Urls(vec![]),
+                meta: vec![],
+                reward_account: None,
+            },
+            Ok(()),
+        );
+
+        let params = VideoCreationParametersRecord {
+            assets: NewAssets::<Test>::Upload(CreationUploadParameters {
+                object_creation_list: vec![
+                    DataObjectCreationParameters {
+                        size: 3,
+                        ipfs_content_id: b"first".to_vec(),
+                    },
+                    DataObjectCreationParameters {
+                        size: 3,
+                        ipfs_content_id: b"second".to_vec(),
+                    },
+                    DataObjectCreationParameters {
+                        size: 3,
+                        ipfs_content_id: b"third".to_vec(),
+                    },
+                ],
+                expected_data_size_fee: storage::DataObjectPerMegabyteFee::<Test>::get(),
+            }),
+            meta: b"test".to_vec(),
+        };
+
+        create_video_mock(
+            FIRST_MEMBER_ORIGIN,
+            ContentActor::Member(FIRST_MEMBER_ID),
+            channel_id,
+            params,
+            Ok(()),
+        )
+    })
+}
+
+#[test]
+fn video_update_successful() {
+    with_default_mock_builder(|| {
+        run_to_block(1);
+
+        let _ = balances::Module::<Test>::deposit_creating(
+            &FIRST_MEMBER_ORIGIN,
+            <Test as balances::Trait>::Balance::from(100u32),
+        );
+
+        let channel_id = NextChannelId::<Test>::get();
+
+        create_channel_mock(
+            FIRST_MEMBER_ORIGIN,
+            ContentActor::Member(FIRST_MEMBER_ID),
+            ChannelCreationParametersRecord {
+                assets: NewAssets::<Test>::Urls(vec![]),
+                meta: vec![],
+                reward_account: None,
+            },
+            Ok(()),
+        );
+
+        let params = VideoCreationParametersRecord {
+            assets: NewAssets::<Test>::Upload(CreationUploadParameters {
+                object_creation_list: vec![
+                    DataObjectCreationParameters {
+                        size: 3,
+                        ipfs_content_id: b"first".to_vec(),
+                    },
+                    DataObjectCreationParameters {
+                        size: 3,
+                        ipfs_content_id: b"second".to_vec(),
+                    },
+                    DataObjectCreationParameters {
+                        size: 3,
+                        ipfs_content_id: b"third".to_vec(),
+                    },
+                ],
+                expected_data_size_fee: storage::DataObjectPerMegabyteFee::<Test>::get(),
+            }),
+            meta: b"test".to_vec(),
+        };
+
+        let video_id = Content::next_video_id();
+
+        create_video_mock(
+            FIRST_MEMBER_ORIGIN,
+            ContentActor::Member(FIRST_MEMBER_ID),
+            channel_id,
+            params,
+            Ok(()),
+        );
+
+        let update_params = VideoUpdateParametersRecord {
+            assets: Some(NewAssets::<Test>::Upload(CreationUploadParameters {
+                object_creation_list: vec![DataObjectCreationParameters {
+                    size: 3,
+                    ipfs_content_id: b"first".to_vec(),
+                }],
+                expected_data_size_fee: storage::DataObjectPerMegabyteFee::<Test>::get(),
+            })),
+            new_meta: None,
+        };
+
+        update_video_mock(
+            FIRST_MEMBER_ORIGIN,
+            ContentActor::Member(FIRST_MEMBER_ID),
+            video_id,
+            update_params,
+            Ok(()),
+        );
+    })
+}
+
 #[test]
 fn member_can_create_videos() {
     with_default_mock_builder(|| {
@@ -34,8 +165,8 @@ fn member_can_create_videos() {
             Origin::signed(FIRST_MEMBER_ORIGIN),
             ContentActor::Member(FIRST_MEMBER_ID),
             channel_id,
-            VideoCreationParameters {
-                assets: vec![NewAsset::Urls(vec![b"https://somewhere.com/".to_vec()])],
+            VideoCreationParametersRecord {
+                assets: NewAssets::<Test>::Urls(vec![vec![b"https://somewhere.com/".to_vec()]]),
                 meta: b"metablob".to_vec(),
             }
         ));
@@ -46,8 +177,8 @@ fn member_can_create_videos() {
                 ContentActor::Member(FIRST_MEMBER_ID),
                 channel_id,
                 video_id,
-                VideoCreationParameters {
-                    assets: vec![NewAsset::Urls(vec![b"https://somewhere.com/".to_vec()])],
+                VideoCreationParametersRecord {
+                    assets: NewAssets::<Test>::Urls(vec![vec![b"https://somewhere.com/".to_vec()]]),
                     meta: b"metablob".to_vec(),
                 }
             ))
@@ -62,10 +193,10 @@ fn member_can_create_videos() {
             Origin::signed(FIRST_MEMBER_ORIGIN),
             ContentActor::Member(FIRST_MEMBER_ID),
             video_id,
-            VideoUpdateParameters {
-                assets: Some(vec![NewAsset::Urls(vec![
+            VideoUpdateParametersRecord {
+                assets: Some(NewAssets::<Test>::Urls(vec![vec![
                     b"https://somewhere-else.com/".to_vec()
-                ])]),
+                ]])),
                 new_meta: Some(b"newmetablob".to_vec()),
             }
         ));
@@ -75,10 +206,10 @@ fn member_can_create_videos() {
             MetaEvent::content(RawEvent::VideoUpdated(
                 ContentActor::Member(FIRST_MEMBER_ID),
                 video_id,
-                VideoUpdateParameters {
-                    assets: Some(vec![NewAsset::Urls(vec![
+                VideoUpdateParametersRecord {
+                    assets: Some(NewAssets::<Test>::Urls(vec![vec![
                         b"https://somewhere-else.com/".to_vec()
-                    ])]),
+                    ]])),
                     new_meta: Some(b"newmetablob".to_vec()),
                 }
             ))
@@ -90,8 +221,8 @@ fn member_can_create_videos() {
                 Origin::signed(SECOND_MEMBER_ORIGIN),
                 ContentActor::Member(SECOND_MEMBER_ID),
                 channel_id,
-                VideoCreationParameters {
-                    assets: vec![],
+                VideoCreationParametersRecord {
+                    assets: NewAssets::<Test>::Urls(vec![]),
                     meta: vec![],
                 }
             ),
@@ -104,7 +235,7 @@ fn member_can_create_videos() {
                 Origin::signed(SECOND_MEMBER_ORIGIN),
                 ContentActor::Member(SECOND_MEMBER_ID),
                 video_id,
-                VideoUpdateParameters {
+                VideoUpdateParametersRecord {
                     assets: None,
                     new_meta: None,
                 }
@@ -151,8 +282,8 @@ fn curators_can_censor_videos() {
             Origin::signed(FIRST_MEMBER_ORIGIN),
             ContentActor::Member(FIRST_MEMBER_ID),
             channel_id,
-            VideoCreationParameters {
-                assets: vec![NewAsset::Urls(vec![b"https://somewhere.com/".to_vec()])],
+            VideoCreationParametersRecord {
+                assets: NewAssets::<Test>::Urls(vec![vec![b"https://somewhere.com/".to_vec()]]),
                 meta: b"metablob".to_vec(),
             }
         ));

+ 9 - 3
runtime-modules/storage/src/lib.rs

@@ -118,6 +118,8 @@
 
 // Internal Substrate warning (decl_event).
 #![allow(clippy::unused_unit)]
+// needed for step iteration over DataObjectId range
+#![feature(step_trait)]
 
 #[cfg(test)]
 mod tests;
@@ -216,6 +218,9 @@ pub trait Trait: frame_system::Trait + balances::Trait + membership::Trait {
     /// Storage event type.
     type Event: From<Event<Self>> + Into<<Self as frame_system::Trait>::Event>;
 
+    /// Content id representation.
+    type ContentId: Parameter + Member + Codec + Default + Copy + MaybeSerialize + Ord + PartialEq;
+
     /// Data object ID type.
     type DataObjectId: Parameter
         + Member
@@ -224,7 +229,8 @@ pub trait Trait: frame_system::Trait + balances::Trait + membership::Trait {
         + Default
         + Copy
         + MaybeSerialize
-        + PartialEq;
+        + PartialEq
+        + iter::Step; // needed for iteration
 
     /// Storage bucket ID type.
     type StorageBucketId: Parameter
@@ -462,7 +468,7 @@ pub type BalanceOf<T> = <T as balances::Trait>::Balance;
 #[cfg_attr(feature = "std", derive(Serialize, Deserialize))]
 #[derive(Encode, Decode, Default, Clone, PartialEq, Eq, Debug)]
 pub struct DataObject<Balance> {
-    /// Defines whether the data object was accepted by a liaison.
+    /// Defines whether the data object was accepted by a liason.
     pub accepted: bool,
 
     /// A reward for the data object deletion.
@@ -932,7 +938,7 @@ decl_storage! {
         /// "Max objects size for a storage bucket voucher" number limit.
         pub VoucherMaxObjectsSizeLimit get (fn voucher_max_objects_size_limit): u64;
 
-        /// "Max objects number for a storage bucket voucher" number limit.
+        /// "Max objects number for a storage  bucket voucher" number limit.
         pub VoucherMaxObjectsNumberLimit get (fn voucher_max_objects_number_limit): u64;
 
         /// DynamicBagCreationPolicy by bag type storage map.

+ 1 - 0
runtime-modules/storage/src/tests/mocks.rs

@@ -103,6 +103,7 @@ impl crate::Trait for Test {
     type MaxNumberOfPendingInvitationsPerDistributionBucket =
         MaxNumberOfPendingInvitationsPerDistributionBucket;
     type MaxDataObjectSize = MaxDataObjectSize;
+    type ContentId = u64;
 
     fn ensure_storage_working_group_leader_origin(origin: Self::Origin) -> DispatchResult {
         let account_id = ensure_signed(origin)?;

+ 1 - 34
runtime/src/lib.rs

@@ -75,7 +75,6 @@ pub use pallet_staking::StakerStatus;
 pub use proposals_codex::ProposalsConfigParameters;
 pub use working_group;
 
-use common::storage::{ContentParameters, StorageObjectOwner};
 pub use content;
 pub use content::MaxNumber;
 
@@ -443,38 +442,6 @@ impl content::Trait for Runtime {
     type SeriesId = SeriesId;
     type ChannelOwnershipTransferRequestId = ChannelOwnershipTransferRequestId;
     type MaxNumberOfCuratorsPerGroup = MaxNumberOfCuratorsPerGroup;
-    type StorageSystem = (); // TODO: Add storage integration
-}
-
-// TODO: Remove after the integration with the Content pallet.
-impl common::storage::StorageSystem<Runtime> for () {
-    fn atomically_add_content(
-        _: StorageObjectOwner<MemberId, ChannelId, DAOId>,
-        _: Vec<ContentParameters<ContentId, DataObjectTypeId>>,
-    ) -> sp_runtime::DispatchResult {
-        todo!()
-    }
-
-    fn can_add_content(
-        _: StorageObjectOwner<MemberId, ChannelId, DAOId>,
-        _: Vec<ContentParameters<ContentId, DataObjectTypeId>>,
-    ) -> sp_runtime::DispatchResult {
-        todo!()
-    }
-
-    fn atomically_remove_content(
-        _: &StorageObjectOwner<MemberId, ChannelId, DAOId>,
-        _: &[ContentId],
-    ) -> sp_runtime::DispatchResult {
-        todo!()
-    }
-
-    fn can_remove_content(
-        _: &StorageObjectOwner<MemberId, ChannelId, DAOId>,
-        _: &[ContentId],
-    ) -> sp_runtime::DispatchResult {
-        todo!()
-    }
 }
 
 impl hiring::Trait for Runtime {
@@ -531,7 +498,6 @@ impl common::MembershipTypes for Runtime {
 
 impl common::StorageOwnership for Runtime {
     type ChannelId = ChannelId;
-    type DAOId = DAOId;
     type ContentId = ContentId;
     type DataObjectTypeId = DataObjectTypeId;
 }
@@ -723,6 +689,7 @@ impl storage::Trait for Runtime {
     type MaxNumberOfPendingInvitationsPerDistributionBucket =
         MaxNumberOfPendingInvitationsPerDistributionBucket;
     type MaxDataObjectSize = MaxDataObjectSize;
+    type ContentId = ContentId;
 
     fn ensure_storage_working_group_leader_origin(origin: Self::Origin) -> DispatchResult {
         StorageWorkingGroup::ensure_origin_is_active_leader(origin)

+ 1 - 0
storage-node-v2/.eslintignore

@@ -1,2 +1,3 @@
 /lib
 .eslintrc.js
+**/generated/*

+ 1 - 0
storage-node-v2/.eslintrc.js

@@ -9,6 +9,7 @@ module.exports = {
   rules: {
     'no-console': 'warn', // use dedicated logger
     'no-unused-vars': 'off', // Required by the typescript rule below
+    'prettier/prettier': 'off', // prettier-eslint conflicts inherited from @joystream/eslint-config
     '@typescript-eslint/no-unused-vars': ['error'],
     '@typescript-eslint/no-floating-promises': 'error',
   },

+ 104 - 69
storage-node-v2/README.md

@@ -30,6 +30,7 @@ USAGE
 <!-- commands -->
 * [`storage-node dev:init`](#storage-node-devinit)
 * [`storage-node dev:multihash`](#storage-node-devmultihash)
+* [`storage-node dev:sync`](#storage-node-devsync)
 * [`storage-node dev:upload`](#storage-node-devupload)
 * [`storage-node dev:verify-bag-id`](#storage-node-devverify-bag-id)
 * [`storage-node help [COMMAND]`](#storage-node-help-command)
@@ -38,17 +39,17 @@ USAGE
 * [`storage-node leader:delete-bucket`](#storage-node-leaderdelete-bucket)
 * [`storage-node leader:invite-operator`](#storage-node-leaderinvite-operator)
 * [`storage-node leader:remove-operator`](#storage-node-leaderremove-operator)
-* [`storage-node leader:set-bucket-limits`](#storage-node-leaderset-bucket-limits)
-* [`storage-node leader:set-global-uploading-status`](#storage-node-leaderset-global-uploading-status)
+* [`storage-node leader:set-uploading-block`](#storage-node-leaderset-uploading-block)
 * [`storage-node leader:update-bag`](#storage-node-leaderupdate-bag)
 * [`storage-node leader:update-bag-limit`](#storage-node-leaderupdate-bag-limit)
 * [`storage-node leader:update-blacklist`](#storage-node-leaderupdate-blacklist)
-* [`storage-node leader:update-bucket-status`](#storage-node-leaderupdate-bucket-status)
 * [`storage-node leader:update-data-fee`](#storage-node-leaderupdate-data-fee)
 * [`storage-node leader:update-dynamic-bag-policy`](#storage-node-leaderupdate-dynamic-bag-policy)
 * [`storage-node leader:update-voucher-limits`](#storage-node-leaderupdate-voucher-limits)
 * [`storage-node operator:accept-invitation`](#storage-node-operatoraccept-invitation)
+* [`storage-node operator:set-bucket-limits`](#storage-node-operatorset-bucket-limits)
 * [`storage-node operator:set-metadata`](#storage-node-operatorset-metadata)
+* [`storage-node operator:update-bucket-status`](#storage-node-operatorupdate-bucket-status)
 * [`storage-node server`](#storage-node-server)
 
 ## `storage-node dev:init`
@@ -84,6 +85,30 @@ OPTIONS
 
 _See code: [src/commands/dev/multihash.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/dev/multihash.ts)_
 
+## `storage-node dev:sync`
+
+Synchronizes data - it fixes the differences between local data folder and worker ID obligations from the runtime.
+
+```
+USAGE
+  $ storage-node dev:sync
+
+OPTIONS
+  -d, --uploads=uploads                                (required) Data uploading directory (absolute path).
+  -h, --help                                           show CLI help
+
+  -o, --dataSourceOperatorHost=dataSourceOperatorHost  Storage node host and port (e.g.: some.com:8081) to get data
+                                                       from.
+
+  -p, --syncWorkersNumber=syncWorkersNumber            Sync workers number (max async operations in progress).
+
+  -q, --queryNodeHost=queryNodeHost                    Query node host and port (e.g.: some.com:8081)
+
+  -w, --workerId=workerId                              (required) Storage node operator worker ID.
+```
+
+_See code: [src/commands/dev/sync.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/dev/sync.ts)_
+
 ## `storage-node dev:upload`
 
 Upload data object (development mode only).
@@ -261,45 +286,25 @@ OPTIONS
 
 _See code: [src/commands/leader/remove-operator.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/remove-operator.ts)_
 
-## `storage-node leader:set-bucket-limits`
-
-Set VoucherObjectsSizeLimit and VoucherObjectsNumberLimit for the storage bucket.
-
-```
-USAGE
-  $ storage-node leader:set-bucket-limits
-
-OPTIONS
-  -h, --help               show CLI help
-  -i, --bucketId=bucketId  (required) Storage bucket ID
-  -k, --keyfile=keyfile    Key file for the account. Mandatory in non-dev environment.
-  -m, --dev                Use development mode
-  -o, --objects=objects    (required) New 'voucher object number limit' value
-  -p, --password=password  Key file password (optional).
-  -s, --size=size          (required) New 'voucher object size limit' value
-  -u, --apiUrl=apiUrl      Runtime API URL. Mandatory in non-dev environment. Default is ws://localhost:9944
-```
-
-_See code: [src/commands/leader/set-bucket-limits.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/set-bucket-limits.ts)_
-
-## `storage-node leader:set-global-uploading-status`
+## `storage-node leader:set-uploading-block`
 
 Set global uploading block. Requires storage working group leader permissions.
 
 ```
 USAGE
-  $ storage-node leader:set-global-uploading-status
+  $ storage-node leader:set-uploading-block
 
 OPTIONS
+  -d, --disable            Disables global uploading block.
+  -e, --enable             Enables global uploading block (default).
   -h, --help               show CLI help
   -k, --keyfile=keyfile    Key file for the account. Mandatory in non-dev environment.
   -m, --dev                Use development mode
   -p, --password=password  Key file password (optional).
-  -s, --set=(on|off)       (required) Sets global uploading block (on/off).
   -u, --apiUrl=apiUrl      Runtime API URL. Mandatory in non-dev environment. Default is ws://localhost:9944
 ```
 
-_See code: [src/commands/leader/set-global-uploading-status.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/set-global-uploading-status.ts)_
+_See code: [src/commands/leader/set-uploading-block.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/set-uploading-block.ts)_
 
 ## `storage-node leader:update-bag`
 
@@ -310,8 +315,8 @@ USAGE
   $ storage-node leader:update-bag
 
 OPTIONS
-  -a, --add=add
-      [default: ] ID of a bucket to add to bag
+  -b, --bucket=bucket
+      (required) Storage bucket ID
 
   -h, --help
       show CLI help
@@ -339,8 +344,8 @@ OPTIONS
   -p, --password=password
       Key file password (optional).
 
-  -r, --remove=remove
-      [default: ] ID of a bucket to remove from bag
+  -r, --remove
+      Remove a bucket from the bag
 
   -u, --apiUrl=apiUrl
       Runtime API URL. Mandatory in non-dev environment. Default is ws://localhost:9944
@@ -376,39 +381,17 @@ USAGE
   $ storage-node leader:update-blacklist
 
 OPTIONS
-  -a, --add=add            [default: ] Content ID to add
+  -c, --cid=cid            (required) Content ID
   -h, --help               show CLI help
   -k, --keyfile=keyfile    Key file for the account. Mandatory in non-dev environment.
   -m, --dev                Use development mode
   -p, --password=password  Key file password (optional).
-  -r, --remove=remove      [default: ] Content ID to remove
+  -r, --remove             Remove a content ID from the blaclist
   -u, --apiUrl=apiUrl      Runtime API URL. Mandatory in non-dev environment. Default is ws://localhost:9944
 ```
 
 _See code: [src/commands/leader/update-blacklist.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/update-blacklist.ts)_
 
-## `storage-node leader:update-bucket-status`
-
-Update storage bucket status (accepting new bags).
-
-```
-USAGE
-  $ storage-node leader:update-bucket-status
-
-OPTIONS
-  -d, --disable            Disables accepting new bags.
-  -e, --enable             Enables accepting new bags (default).
-  -h, --help               show CLI help
-  -i, --bucketId=bucketId  (required) Storage bucket ID
-  -k, --keyfile=keyfile    Key file for the account. Mandatory in non-dev environment.
-  -m, --dev                Use development mode
-  -p, --password=password  Key file password (optional).
-  -s, --set=(on|off)       (required) Sets 'accepting new bags' parameter for the bucket (on/off).
-  -u, --apiUrl=apiUrl      Runtime API URL. Mandatory in non-dev environment. Default is ws://localhost:9944
-```
-
-_See code: [src/commands/leader/update-bucket-status.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/update-bucket-status.ts)_
-
 ## `storage-node leader:update-data-fee`
 
 Update data size fee. Requires storage working group leader permissions.
@@ -437,13 +420,14 @@ USAGE
   $ storage-node leader:update-dynamic-bag-policy
 
 OPTIONS
-  -h, --help                      show CLI help
-  -k, --keyfile=keyfile           Key file for the account. Mandatory in non-dev environment.
-  -m, --dev                       Use development mode
-  -n, --number=number             (required) New storage buckets number
-  -p, --password=password         Key file password (optional).
-  -t, --bagType=(Channel|Member)  (required) Dynamic bag type (Channel, Member).
-  -u, --apiUrl=apiUrl             Runtime API URL. Mandatory in non-dev environment. Default is ws://localhost:9944
+  -c, --channel            Channel dynamic bag type
+  -e, --member             Member dynamic bag type (default)
+  -h, --help               show CLI help
+  -k, --keyfile=keyfile    Key file for the account. Mandatory in non-dev environment.
+  -m, --dev                Use development mode
+  -n, --number=number      (required) New storage buckets number
+  -p, --password=password  Key file password (optional).
+  -u, --apiUrl=apiUrl      Runtime API URL. Mandatory in non-dev environment. Default is ws://localhost:9944
 ```
 
 _See code: [src/commands/leader/update-dynamic-bag-policy.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/update-dynamic-bag-policy.ts)_
@@ -488,6 +472,28 @@ OPTIONS
 
 _See code: [src/commands/operator/accept-invitation.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/operator/accept-invitation.ts)_
 
+## `storage-node operator:set-bucket-limits`
+
+Set VoucherObjectsSizeLimit and VoucherObjectsNumberLimit for the storage bucket.
+
+```
+USAGE
+  $ storage-node operator:set-bucket-limits
+
+OPTIONS
+  -h, --help               show CLI help
+  -i, --bucketId=bucketId  (required) Storage bucket ID
+  -k, --keyfile=keyfile    Key file for the account. Mandatory in non-dev environment.
+  -m, --dev                Use development mode
+  -o, --objects=objects    (required) New 'voucher object number limit' value
+  -p, --password=password  Key file password (optional).
+  -s, --size=size          (required) New 'voucher object size limit' value
+  -u, --apiUrl=apiUrl      Runtime API URL. Mandatory in non-dev environment. Default is ws://localhost:9944
+  -w, --workerId=workerId  (required) Storage operator worker ID
+```
+
+_See code: [src/commands/operator/set-bucket-limits.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/operator/set-bucket-limits.ts)_
+
 ## `storage-node operator:set-metadata`
 
 Accept pending storage bucket invitation.
@@ -509,23 +515,52 @@ OPTIONS
 
 _See code: [src/commands/operator/set-metadata.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/operator/set-metadata.ts)_
 
-## `storage-node server`
+## `storage-node operator:update-bucket-status`
 
-Starts the storage node server.
+Update storage bucket status (accepting new bags).
 
 ```
 USAGE
-  $ storage-node server
+  $ storage-node operator:update-bucket-status
 
 OPTIONS
-  -d, --uploads=uploads    (required) Data uploading directory (absolute path).
+  -d, --disable            Disables accepting new bags.
+  -e, --enable             Enables accepting new bags (default).
   -h, --help               show CLI help
+  -i, --bucketId=bucketId  (required) Storage bucket ID
   -k, --keyfile=keyfile    Key file for the account. Mandatory in non-dev environment.
   -m, --dev                Use development mode
-  -o, --port=port          (required) Server port.
   -p, --password=password  Key file password (optional).
   -u, --apiUrl=apiUrl      Runtime API URL. Mandatory in non-dev environment. Default is ws://localhost:9944
-  -w, --worker=worker      (required) Storage provider worker ID
+  -w, --workerId=workerId  (required) Storage operator worker ID
+```
+
+_See code: [src/commands/operator/update-bucket-status.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/operator/update-bucket-status.ts)_
+
+## `storage-node server`
+
+Starts the storage node server.
+
+```
+USAGE
+  $ storage-node server
+
+OPTIONS
+  -d, --uploads=uploads                      (required) Data uploading directory (absolute path).
+  -h, --help                                 show CLI help
+  -i, --syncInterval=syncInterval            [default: 1] Interval between syncronizations (in minutes)
+  -k, --keyfile=keyfile                      Key file for the account. Mandatory in non-dev environment.
+  -m, --dev                                  Use development mode
+  -o, --port=port                            (required) Server port.
+  -p, --password=password                    Key file password (optional).
+  -q, --queryNodeHost=queryNodeHost          Query node host and port (e.g.: some.com:8081)
+  -r, --syncWorkersNumber=syncWorkersNumber  [default: 20] Sync workers number (max async operations in progress).
+  -s, --sync                                 Enable data synchronization.
+
+  -u, --apiUrl=apiUrl                        Runtime API URL. Mandatory in non-dev environment. Default is
+                                             ws://localhost:9944
+
+  -w, --worker=worker                        (required) Storage provider worker ID
 ```
 
 _See code: [src/commands/server.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/server.ts)_

+ 37 - 6
storage-node-v2/package.json

@@ -8,7 +8,9 @@
   },
   "bugs": "https://github.com/Joystream/joystream/issues",
   "dependencies": {
+    "@apollo/client": "^3.3.21",
     "@joystream/types": "^0.17.0",
+    "@joystream/metadata-protobuf": "^1.0.0",
     "@oclif/command": "^1",
     "@oclif/config": "^1",
     "@oclif/plugin-help": "^3",
@@ -16,34 +18,56 @@
     "@types/base64url": "^2.0.0",
     "@types/express": "4.17.13",
     "@types/file-type": "^10.9.1",
+    "@types/lodash": "^4.14.171",
     "@types/multer": "^1.4.5",
     "@types/node-cache": "^4.2.5",
+    "@types/promise-timeout": "^1.3.0",
     "@types/read-chunk": "^3.1.0",
+    "@types/rimraf": "^3.0.2",
     "@types/send": "^0.17.0",
+    "@types/superagent": "^4.1.12",
+    "@types/url-join": "^4.0.1",
+    "@types/uuid": "^8.3.1",
     "@types/winston": "^2.4.4",
+    "ajv": "^7",
     "await-lock": "^2.1.0",
     "base64url": "^3.0.1",
     "blake3": "^2.1.4",
+    "cross-fetch": "^3.1.4",
     "express": "4.17.1",
-    "express-openapi-validator": "^4.12.4",
+    "express-openapi-validator": "4.12.4",
     "express-winston": "^4.1.0",
+    "fast-folder-size": "^1.4.0",
     "file-type": "^16.5.0",
     "lodash": "^4.17.21",
     "multihashes": "^4.0.2",
     "node-cache": "^5.1.2",
     "openapi-editor": "^0.3.0",
+    "promise-timeout": "^1.3.0",
     "read-chunk": "^3.2.0",
+    "rimraf": "^3.0.2",
     "send": "^0.17.1",
+    "sleep-promise": "^9.1.0",
+    "superagent": "^6.1.0",
     "tslib": "^1",
-    "winston": "^3.3.3"
+    "url-join": "^4.0.1",
+    "uuid": "^8.3.2",
+    "winston": "^3.3.3",
+    "winston-elasticsearch": "^0.15.8"
   },
   "devDependencies": {
+    "@graphql-codegen/cli": "^1.21.4",
+    "@graphql-codegen/import-types-preset": "^1.18.1",
+    "@graphql-codegen/typescript": "^1.22.0",
+    "@graphql-codegen/typescript-document-nodes": "^1.17.11",
+    "@graphql-codegen/typescript-operations": "^1.17.16",
     "@joystream/eslint-config": "^1.0.0",
     "@oclif/dev-cli": "^1",
     "@oclif/test": "^1",
     "@types/chai": "^4",
     "@types/mocha": "^5",
     "@types/node": "^10",
+    "@types/pg": "^8.6.1",
     "@types/swagger-ui-express": "^4.1.2",
     "@typescript-eslint/eslint-plugin": "3.8.0",
     "@typescript-eslint/parser": "3.8.0",
@@ -54,6 +78,7 @@
     "globby": "^10",
     "mocha": "^5",
     "nyc": "^14",
+    "pg": "^8.7.1",
     "prettier": "^2.3.0",
     "sinon": "^11.1.1",
     "swagger-ui-express": "^4.1.6",
@@ -88,11 +113,14 @@
       "@oclif/plugin-help"
     ],
     "topics": {
-      "wg": {
-        "description": "Storage working group commands."
+      "dev": {
+        "description": "Development mode commands."
       },
-      "wg:leader": {
+      "leader": {
         "description": "Storage working group leader commands."
+      },
+      "operator": {
+        "description": "Storage provider(operator) commands."
       }
     }
   },
@@ -109,7 +137,10 @@
     "build": "tsc --build tsconfig.json",
     "format": "prettier ./src --write",
     "lint": "eslint ./src --ext .ts",
-    "api:edit": "openapi-editor --file ./src/api-spec/openapi.yaml --port 10021"
+    "api:edit": "openapi-editor --file ./src/api-spec/openapi.yaml --port 10021",
+    "generate:types:graphql": "yarn graphql-codegen -c ./src/services/queryNode/codegen.yml",
+    "generate:types:json-schema": "yarn ts-node ./src/services/metadata/generateTypes.ts",
+    "ensure": "yarn format && yarn lint --fix && yarn build"
   },
   "types": "lib/index.d.ts"
 }

+ 200 - 0
storage-node-v2/scripts/generate-test-data.ts

@@ -0,0 +1,200 @@
+#!/usr/bin/env ts-node
+
+import fs from 'fs'
+import path from 'path'
+const fsPromises = fs.promises
+import { Client, ClientConfig,QueryResult } from 'pg'
+import { exit } from 'process'
+
+async function doJob(): Promise<void> {
+  const uploadDirectory = '/Users/shamix/uploads5'
+  const fileSize = 1000
+
+  const objectNumber = 10000
+  const bagNumber = 10
+  const bucketNumber = 10
+
+  const urls = [
+    `http://localhost:3333/`,
+    `http://localhost:3334/`,
+    `http://localhost:3335/`,
+  ]
+
+  const updateDb = false
+  const generateFiles = true
+
+  if (updateDb) {
+    const config : ClientConfig = {
+      user: 'postgres',
+      password: 'postgres',
+      database: 'query_node_processor',
+      host: 'localhost'
+    }
+    const client = new Client(config)
+    await client.connect()
+
+    // Cleanup
+    await client.query('TRUNCATE storage_data_object')
+    await client.query('TRUNCATE storage_bucket CASCADE')
+    await client.query('TRUNCATE storage_bag CASCADE')
+    await client.query('TRUNCATE storage_bag_storage_bucket')
+  
+    // Generate objects
+    await createBags(client, bagNumber)
+    await createBuckets(client, bucketNumber)
+    await createBagBucketLinks(client)
+    await createBucketWorkerLinks(client)
+    await createBucketOperatorUrls(client, urls)
+    const dbTasks = createDataObjects(client, objectNumber)
+    await Promise.all(dbTasks)
+
+    await client.end()
+  }
+  
+  if (generateFiles) {
+    await createFiles(uploadDirectory, fileSize, objectNumber)
+  }
+}
+
+function createDataObjects(client: Client, objectNumber: number): Promise<QueryResult<any>>[] {
+  const tasks: any[] = []
+
+  const bagId = '1'
+  for(let i: number = 1; i <= objectNumber; i++){
+    const name = i.toString()
+
+    console.log(`Writing ${i} data object...`)
+
+    const dbTask = client.query(
+      `INSERT INTO storage_data_object(storage_bag_id, ipfs_hash, id, created_by_id, version, is_accepted, size) 
+       values(${bagId}, ${name}, ${name}, 'some', '1', true, 100)`
+    )
+
+    tasks.push(dbTask)
+  }
+
+  return tasks
+}
+
+async function createFiles(uploadDirectory: string, fileSize: number, objectNumber: number): Promise<void> {
+  const data = new Uint8Array(fileSize)
+  let tasks: any[] = []
+  for(let i: number = 1; i <= objectNumber; i++){
+    const name = i.toString()
+
+    console.log(`Writing ${i} file...`)
+
+    const fileTask = fsPromises.writeFile(
+      path.join(uploadDirectory, name), 
+      data
+    )
+
+    tasks.push(fileTask)
+
+    if (i % 100 === 0){
+      await Promise.all(tasks)
+      tasks.length = 0
+    }
+  }
+
+  if (tasks.length > 0) {
+    await Promise.all(tasks)
+  }
+}
+
+async function createBags(client: Client, bagNumber: number): Promise<void> {
+  for(let i: number = 1; i <= bagNumber; i++){
+    const name = i.toString()
+
+    console.log(`Writing ${i} bag...`)
+
+    await client.query(
+      `INSERT INTO storage_bag(id, created_by_id, version, owner) 
+       values(${name}, 'some', '1',  '{}')`
+    )
+  }
+}
+
+async function createBuckets(client: Client, bucketNumber: number): Promise<void> {
+  const missingWorkerId = `{"isTypeOf": "StorageBucketOperatorStatusMissing"}`
+  for(let i: number = 1; i <= bucketNumber; i++){
+    const name = i.toString()
+
+    console.log(`Writing ${i} bucket...`)
+
+    await client.query(
+      `INSERT INTO storage_bucket(id, created_by_id, version, operator_status, accepting_new_bags, data_objects_size_limit,data_object_count_limit) 
+       values(${name}, 'some', '1',  '${missingWorkerId}', true, 100000000, 100000000)`
+    )
+  }
+}
+
+
+async function createBagBucketLinks(client: Client): Promise<void> {
+    console.log(`Writing bag to bucket links...`)
+
+    // Bucket1 to Bag1
+    await client.query(
+      `INSERT INTO storage_bag_storage_bucket(storage_bag_id, storage_bucket_id) 
+       values('1', '1')`
+    )
+    // Bucket2 to Bag1
+    await client.query(
+      `INSERT INTO storage_bag_storage_bucket(storage_bag_id, storage_bucket_id) 
+       values('1', '2')`
+    )    
+    // Bucket3 to Bag1
+    await client.query(
+      `INSERT INTO storage_bag_storage_bucket(storage_bag_id, storage_bucket_id) 
+       values('1', '3')`
+    )
+}
+
+async function createBucketWorkerLinks(client: Client): Promise<void> {
+    console.log(`Writing bucket worker links...`)
+
+    const assignedWorker0 = `{"isTypeOf": "StorageBucketOperatorStatusActive", "workerId": 0}`
+    const assignedWorker1 = `{"isTypeOf": "StorageBucketOperatorStatusActive", "workerId": 1}`
+    const assignedWorker2 = `{"isTypeOf": "StorageBucketOperatorStatusActive", "workerId": 2}`
+
+    // Bucket1 to Worker0
+    await client.query(
+      `UPDATE storage_bucket
+       SET operator_status = '${assignedWorker0}'
+       WHERE id = '1'`
+    )
+    // Bucket2 to Worker1
+    await client.query(
+      `UPDATE storage_bucket
+       SET operator_status = '${assignedWorker1}'
+       WHERE id = '2'`
+    )   
+     // Bucket3 to Worker2
+    await client.query(
+      `UPDATE storage_bucket
+       SET operator_status = '${assignedWorker2}'
+       WHERE id = '3'`
+    )
+}
+
+async function createBucketOperatorUrls(client: Client, urls: string[]): Promise<void> {
+    console.log(`Writing bucket operator URLs...`)
+
+    for (let i = 0; i < urls.length; i++) {
+      const bucketId = i + 1
+      const metadata = urls[i]
+
+      await client.query(
+        `UPDATE storage_bucket
+         SET operator_metadata = '${metadata}'
+         WHERE id = '${bucketId}'`
+      )
+    }
+}
+
+doJob().then(() => {
+  console.log('Done')
+}).catch((err) => {
+  console.log(err)
+  exit(1)
+})

+ 12 - 0
storage-node-v2/scripts/operatorMetadata.json

@@ -0,0 +1,12 @@
+{
+  "endpoint": "http://localhost:3333",
+  "location": {
+    "countryCode": "US",
+    "city": "Chicago",
+    "coordinates": {
+      "latitude": 50,
+      "longitude": 50
+    }
+  },
+  "extra": "Extra"
+}

+ 94 - 6
storage-node-v2/src/api-spec/openapi.yaml

@@ -15,8 +15,10 @@ servers:
   - url: http://localhost:3333/api/v1/
 
 tags:
-  - name: public
-    description: Public storage node API
+  - name: files
+    description: Storage node Files API
+  - name: state
+    description: Storage node State API
 
 paths:
   /files/{cid}:
@@ -24,7 +26,7 @@ paths:
       operationId: publicApi.getFile
       description: Returns a media file.
       tags:
-        - public
+        - files
       parameters:
         - name: cid
           required: true
@@ -76,7 +78,7 @@ paths:
       operationId: publicApi.getFileHeaders
       description: Returns a media file headers.
       tags:
-        - public
+        - files
       parameters:
         - name: cid
           required: true
@@ -100,7 +102,7 @@ paths:
       description: Upload data
       operationId: publicApi.uploadFile
       tags:
-        - public
+        - files
       requestBody:
         content:
           multipart/form-data:
@@ -151,7 +153,7 @@ paths:
       description: Get auth token from a server.
       operationId: publicApi.authTokenForUploading
       tags:
-        - public
+        - files
       requestBody:
         description: Token request parameters,
         content:
@@ -181,6 +183,61 @@ paths:
               schema:
                 $ref: '#/components/schemas/ErrorResponse'
 
+  /state/data-objects:
+    get:
+      operationId: stateApi.getAllLocalDataObjects
+      description: Returns all local data objects.
+      tags:
+        - state
+      responses:
+        200:
+          description: Ok
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/DataObjectResponse'
+
+  /state/bags/{bagId}/data-objects:
+    get:
+      operationId: stateApi.getLocalDataObjectsByBagId
+      description: Returns local data objects for the bag.
+      tags:
+        - state
+      responses:
+        200:
+          description: Ok
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/DataObjectResponse'
+
+  /version:
+    get:
+      operationId: stateApi.getVersion
+      description: Returns server version.
+      tags:
+        - state
+      responses:
+        200:
+          description: Ok
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/VersionResponse'
+  /state/data:
+    get:
+      operationId: stateApi.getLocalDataStats
+      description: Returns local uploading directory stats.
+      tags:
+        - state
+      responses:
+        200:
+          description: Ok
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/DataStatsResponse'
+
 components:
   securitySchemes:
     UploadAuth:
@@ -227,3 +284,34 @@ components:
           type: string
         message:
           type: string
+    DataStatsResponse:
+      type: object
+      required:
+        - totalSize
+        - objectNumber
+      properties:
+        totalSize:
+          type: integer
+          format: int64
+        objectNumber:
+          type: integer
+          format: int64
+        tempDirSize:
+          type: integer
+          format: int64
+        tempDownloads:
+          type: integer
+          format: int64
+    VersionResponse:
+      type: object
+      required:
+        - version
+      properties:
+        version:
+          type: string
+        userAgent:
+          type: string
+    DataObjectResponse:
+      type: array
+      items:
+        type: string

+ 40 - 16
storage-node-v2/src/command-base/ApiCommandBase.ts

@@ -1,6 +1,7 @@
 import { Command, flags } from '@oclif/command'
 import { createApi } from '../services/runtime/api'
 import { getAccountFromJsonFile, getAlicePair, getAccountFromUri } from '../services/runtime/accounts'
+import { parseBagId } from '../services/helpers/bagTypes'
 import { KeyringPair } from '@polkadot/keyring/types'
 import { ApiPromise } from '@polkadot/api'
 import logger from '../services/logger'
@@ -23,7 +24,7 @@ export default abstract class ApiCommandBase extends Command {
       description: 'Runtime API URL. Mandatory in non-dev environment.',
       default: 'ws://localhost:9944',
     }),
-    keyfile: flags.string({
+    keyFile: flags.string({
       char: 'k',
       description: 'Key file for the account. Mandatory in non-dev environment.',
     }),
@@ -31,10 +32,29 @@ export default abstract class ApiCommandBase extends Command {
       char: 'p',
       description: 'Key file password (optional). Could be overriden by ACCOUNT_PWD environment variable.',
     }),
-    accountURI: flags.string({
+    accountUri: flags.string({
       char: 'y',
       description:
-        'Account URI (optional). Has a priority over the keyfile and password flags. Could be overriden by ACCOUNT_URI environment variable.',
+        'Account URI (optional). Has a priority over the keyFile and password flags. Could be overriden by ACCOUNT_URI environment variable.',
+    }),
+  }
+
+  static extraFlags = {
+    bagId: flags.build({
+      parse: (value: string) => {
+        return parseBagId(value)
+      },
+      description: `Bag ID. Format: {bag_type}:{sub_type}:{id}.
+    - Bag types: 'static', 'dynamic'
+    - Sub types: 'static:council', 'static:wg', 'dynamic:member', 'dynamic:channel'
+    - Id:
+      - absent for 'static:council'
+      - working group name for 'static:wg'
+      - integer for 'dynamic:member' and 'dynamic:channel'
+    Examples:
+    - static:council
+    - static:wg:storage
+    - dynamic:member:4`,
     }),
   }
 
@@ -74,9 +94,13 @@ export default abstract class ApiCommandBase extends Command {
 
     // Some dev commands doesn't contain flags variables.
     const apiUrl = flags.apiUrl ?? 'ws://localhost:9944'
-    this.api = await createApi(apiUrl)
 
     logger.info(`Initialized runtime connection: ${apiUrl}`)
+    try {
+      this.api = await createApi(apiUrl)
+    } catch (err) {
+      logger.error(`Creating runtime API error: ${err.target?._url}`)
+    }
 
     await this.getApi()
   }
@@ -104,21 +128,21 @@ export default abstract class ApiCommandBase extends Command {
    * JSON-file or loads 'Alice' Keypair when in the development mode.
    *
    * @param dev - indicates the development mode (optional).
-   * @param keyfile - key file path (optional).
+   * @param keyFile - key file path (optional).
    * @param password - password for the key file (optional).
-   * @param accountURI - accountURI (optional). Overrides keyfile and password flags.
+   * @param accountURI - accountURI (optional). Overrides keyFile and password flags.
    * @returns KeyringPair instance.
    */
-  getAccount(flags: { dev: boolean; keyfile?: string; password?: string; accountURI?: string }): KeyringPair {
+  getAccount(flags: { dev: boolean; keyFile?: string; password?: string; accountUri?: string }): KeyringPair {
     // Select account URI variable from flags key and environment variable.
-    let accountURI = flags.accountURI ?? ''
+    let accountUri = flags.accountUri ?? ''
     if (!_.isEmpty(process.env.ACCOUNT_URI)) {
-      if (!_.isEmpty(flags.accountURI)) {
+      if (!_.isEmpty(flags.accountUri)) {
         logger.warn(
           `Both enviroment variable and command line argument were provided for the account URI. Environment variable has a priority.`
         )
       }
-      accountURI = process.env.ACCOUNT_URI ?? ''
+      accountUri = process.env.ACCOUNT_URI ?? ''
     }
 
     // Select password variable from flags key and environment variable.
@@ -132,18 +156,18 @@ export default abstract class ApiCommandBase extends Command {
       password = process.env.ACCOUNT_PWD ?? ''
     }
 
-    const keyfile = flags.keyfile ?? ''
+    const keyFile = flags.keyFile ?? ''
     // Create the Alice account for development mode.
     if (flags.dev) {
       return getAlicePair()
     }
     // Create an account using account URI
-    else if (!_.isEmpty(accountURI)) {
-      return getAccountFromUri(accountURI)
+    else if (!_.isEmpty(accountUri)) {
+      return getAccountFromUri(accountUri)
     }
-    // Create an account using the keyfile and password.
-    else if (!_.isEmpty(keyfile)) {
-      const account = getAccountFromJsonFile(keyfile)
+    // Create an account using the keyFile and password.
+    else if (!_.isEmpty(keyFile)) {
+      const account = getAccountFromJsonFile(keyFile)
       account.unlock(password)
 
       return account

+ 2 - 0
storage-node-v2/src/command-base/ExitCodes.ts

@@ -8,6 +8,8 @@ enum ExitCodes {
   InvalidParameters = 100,
   DevelopmentModeOnly,
   FileError,
+  InvalidWorkerId,
+  InvalidIntegerArray,
   ApiError = 200,
   UnsuccessfulRuntimeCall,
 }

+ 65 - 0
storage-node-v2/src/commands/dev/sync.ts

@@ -0,0 +1,65 @@
+import { Command, flags } from '@oclif/command'
+import { performSync } from '../../services/sync/synchronizer'
+import logger from '../../services/logger'
+
+/**
+ * CLI command:
+ * Synchronizes data: fixes the difference between node obligations and local
+ * storage.
+ *
+ * @remarks
+ * Should be run only during the development.
+ * Shell command: "dev:upload"
+ */
+export default class DevSync extends Command {
+  static description =
+    'Synchronizes the data - it fixes the differences between local data folder and worker ID obligations from the runtime.'
+
+  static flags = {
+    help: flags.help({ char: 'h' }),
+    workerId: flags.integer({
+      char: 'w',
+      required: true,
+      description: 'Storage node operator worker ID.',
+    }),
+    syncWorkersNumber: flags.integer({
+      char: 'p',
+      required: false,
+      description: 'Sync workers number (max async operations in progress).',
+    }),
+    queryNodeHost: flags.string({
+      char: 'q',
+      required: false,
+      description: 'Query node host and port (e.g.: some.com:8081)',
+    }),
+    dataSourceOperatorHost: flags.string({
+      char: 'o',
+      required: false,
+      description: 'Storage node host and port (e.g.: some.com:8081) to get data from.',
+    }),
+    uploads: flags.string({
+      char: 'd',
+      required: true,
+      description: 'Data uploading directory (absolute path).',
+    }),
+  }
+
+  async run(): Promise<void> {
+    const { flags } = this.parse(DevSync)
+
+    logger.info('Syncing...')
+
+    const queryNodeHost = flags.queryNodeHost ?? 'localhost:8081'
+    const queryNodeUrl = `http://${queryNodeHost}/graphql`
+    const syncWorkersNumber = flags.syncWorkersNumber ?? 20
+    const dataSourceOperatorHost = flags.dataSourceOperatorHost ?? 'localhost:3333'
+    const operatorUrl = `http://${dataSourceOperatorHost}/`
+
+    try {
+      await performSync(flags.workerId, syncWorkersNumber, queryNodeUrl, flags.uploads, operatorUrl)
+    } catch (err) {
+      logger.error(err)
+      logger.error(JSON.stringify(err, null, 2))
+    }
+  }
+}

+ 5 - 23
storage-node-v2/src/commands/dev/verify-bag-id.ts

@@ -1,6 +1,5 @@
-import { flags } from '@oclif/command'
+import { Command, flags } from '@oclif/command'
 import ApiCommandBase from '../../command-base/ApiCommandBase'
-import { parseBagId } from '../../services/helpers/bagTypes'
 import logger from '../../services/logger'
 
 /**
@@ -11,37 +10,20 @@ import logger from '../../services/logger'
  * Should be run only during the development.
  * Shell command: "dev:verify-bag-id"
  */
-export default class DevVerifyBagId extends ApiCommandBase {
+export default class DevVerifyBagId extends Command {
   static description = 'The command verifies bag id supported by the storage node. Requires chain connection.'
 
   static flags = {
-    bagId: flags.string({
+    help: flags.help({ char: 'h' }),
+    bagId: ApiCommandBase.extraFlags.bagId({
       char: 'i',
       required: true,
-      description: `
-      Bag ID. Format: {bag_type}:{sub_type}:{id}.
-      - Bag types: 'static', 'dynamic'
-      - Sub types: 'static:council', 'static:wg', 'dynamic:member', 'dynamic:channel'
-      - Id: 
-        - absent for 'static:council'
-        - working group name for 'static:wg'
-        - integer for 'dynamic:member' and 'dynamic:channel'
-      Examples:
-      - static:council
-      - static:wg:storage
-      - dynamic:member:4
-      `,
     }),
-    ...ApiCommandBase.flags,
   }
 
   async run(): Promise<void> {
     const { flags } = this.parse(DevVerifyBagId)
 
-    const api = await this.getApi()
-    const parsedBagId = parseBagId(api, flags.bagId)
-
-    logger.info(`Correct bag id: ${flags.bagId}`)
-    logger.info(`Parsed: ${parsedBagId}`)
+    logger.info(`Parsed: ${flags.bagId}`)
   }
 }

+ 6 - 18
storage-node-v2/src/commands/leader/update-bag.ts

@@ -1,10 +1,10 @@
 import { flags } from '@oclif/command'
 import { updateStorageBucketsForBag } from '../../services/runtime/extrinsics'
 import ApiCommandBase from '../../command-base/ApiCommandBase'
-import { parseBagId } from '../../services/helpers/bagTypes'
 import logger from '../../services/logger'
 import ExitCodes from '../../command-base/ExitCodes'
 import _ from 'lodash'
+import { CLIError } from '@oclif/errors'
 
 // Custom 'integer array' oclif flag.
 const integerArrFlags = {
@@ -12,7 +12,9 @@ const integerArrFlags = {
     parse: (value: string) => {
       const arr: number[] = value.split(',').map((v) => {
         if (!/^-?\d+$/.test(v)) {
-          throw new Error(`Expected comma-separated integers, but received: ${value}`)
+          throw new CLIError(`Expected comma-separated integers, but received: ${value}`, {
+            exit: ExitCodes.InvalidIntegerArray,
+          })
         }
         return parseInt(v)
       })
@@ -43,22 +45,9 @@ export default class LeaderUpdateBag extends ApiCommandBase {
       description: 'ID of a bucket to remove from bag',
       default: [],
     }),
-    bagId: flags.string({
+    bagId: ApiCommandBase.extraFlags.bagId({
       char: 'i',
       required: true,
-      description: `
-      Bag ID. Format: {bag_type}:{sub_type}:{id}.
-      - Bag types: 'static', 'dynamic'
-      - Sub types: 'static:council', 'static:wg', 'dynamic:member', 'dynamic:channel'
-      - Id: 
-        - absent for 'static:council'
-        - working group name for 'static:wg'
-        - integer for 'dynamic:member' and 'dynamic:channel'
-      Examples:
-      - static:council
-      - static:wg:storage
-      - dynamic:member:4
-      `,
     }),
     ...ApiCommandBase.flags,
   }
@@ -78,9 +67,8 @@ export default class LeaderUpdateBag extends ApiCommandBase {
 
     const account = this.getAccount(flags)
     const api = await this.getApi()
-    const bagId = parseBagId(api, flags.bagId)
 
-    const success = await updateStorageBucketsForBag(api, bagId, account, flags.add, flags.remove)
+    const success = await updateStorageBucketsForBag(api, flags.bagId, account, flags.add, flags.remove)
 
     this.exitAfterRuntimeCall(success)
   }

+ 1 - 1
storage-node-v2/src/commands/leader/update-dynamic-bag-policy.ts

@@ -44,7 +44,7 @@ export default class LeaderUpdateDynamicBagPolicy extends ApiCommandBase {
     const newNumber = flags.number
 
     const api = await this.getApi()
-    const dynamicBagType = parseDynamicBagType(api, flags.bagType)
+    const dynamicBagType = parseDynamicBagType(flags.bagType)
     const success = await updateNumberOfStorageBucketsInDynamicBagCreationPolicy(
       api,
       account,

+ 20 - 8
storage-node-v2/src/commands/operator/set-metadata.ts

@@ -2,7 +2,9 @@ import { flags } from '@oclif/command'
 import { setStorageOperatorMetadata } from '../../services/runtime/extrinsics'
 import ApiCommandBase from '../../command-base/ApiCommandBase'
 import logger from '../../services/logger'
-
+import { ValidationService } from '../../services/metadata/validationService'
+import { StorageBucketOperatorMetadata, IStorageBucketOperatorMetadata } from '@joystream/metadata-protobuf'
+import fs from 'fs'
 /**
  * CLI command:
  * Sets metadata for the storage bucket.
@@ -26,19 +28,29 @@ export default class OperatorSetMetadata extends ApiCommandBase {
       required: true,
       description: 'Storage bucket operator ID (storage group worker ID)',
     }),
-    metadata: flags.string({
-      char: 'm',
-      description: 'Storage bucket operator metadata',
+    endpoint: flags.string({
+      char: 'e',
+      description: 'Root distribution node endpoint',
+      exclusive: ['input'],
+    }),
+    jsonFile: flags.string({
+      char: 'j',
+      description: 'Path to JSON metadata file',
+      exclusive: ['endpoint'],
     }),
     ...ApiCommandBase.flags,
   }
 
   async run(): Promise<void> {
     const { flags } = this.parse(OperatorSetMetadata)
+    const { operatorId, bucketId, jsonFile, endpoint } = flags
+
+    const validation = new ValidationService()
+    const metadata: IStorageBucketOperatorMetadata = jsonFile
+      ? validation.validate('OperatorMetadata', JSON.parse(fs.readFileSync(jsonFile).toString()))
+      : { endpoint }
 
-    const operator = flags.operatorId
-    const bucket = flags.bucketId
-    const metadata = flags.metadata ?? ''
+    const encodedMetadata = '0x' + Buffer.from(StorageBucketOperatorMetadata.encode(metadata).finish()).toString('hex')
 
     logger.info('Setting the storage operator metadata...')
     if (flags.dev) {
@@ -48,7 +60,7 @@ export default class OperatorSetMetadata extends ApiCommandBase {
     const account = this.getAccount(flags)
 
     const api = await this.getApi()
-    const success = await setStorageOperatorMetadata(api, account, operator, bucket, metadata)
+    const success = await setStorageOperatorMetadata(api, account, operatorId, bucketId, encodedMetadata)
 
     this.exitAfterRuntimeCall(success)
   }

+ 156 - 3
storage-node-v2/src/commands/server.ts

@@ -1,7 +1,18 @@
 import { flags } from '@oclif/command'
 import { createApp } from '../services/webApi/app'
 import ApiCommandBase from '../command-base/ApiCommandBase'
-import logger from '../services/logger'
+import logger, { initElasticLogger } from '../services/logger'
+import { ApiPromise } from '@polkadot/api'
+import { performSync } from '../services/sync/synchronizer'
+import sleep from 'sleep-promise'
+import rimraf from 'rimraf'
+import _ from 'lodash'
+import path from 'path'
+import { promisify } from 'util'
+import { KeyringPair } from '@polkadot/keyring/types'
+import ExitCodes from './../command-base/ExitCodes'
+import { CLIError } from '@oclif/errors'
+import { Worker } from '@joystream/types/working-group'
 
 /**
  * CLI command:
@@ -29,30 +40,96 @@ export default class Server extends ApiCommandBase {
       required: true,
       description: 'Server port.',
     }),
+    sync: flags.boolean({
+      char: 's',
+      description: 'Enable data synchronization.',
+      default: false,
+    }),
+    syncInterval: flags.integer({
+      char: 'i',
+      description: 'Interval between synchronizations (in minutes)',
+      default: 1,
+    }),
+    queryNodeHost: flags.string({
+      char: 'q',
+      required: true,
+      description: 'Query node host and port (e.g.: some.com:8081)',
+    }),
+    syncWorkersNumber: flags.integer({
+      char: 'r',
+      required: false,
+      description: 'Sync workers number (max async operations in progress).',
+      default: 20,
+    }),
+    elasticSearchHost: flags.string({
+      char: 'e',
+      required: false,
+      description: 'Elasticsearch host and port (e.g.: some.com:8081).',
+    }),
+    disableUploadAuth: flags.boolean({
+      char: 'a',
+      description: 'Disable uploading authentication (should be used in testing-context only).',
+      default: false,
+    }),
     ...ApiCommandBase.flags,
   }
 
   async run(): Promise<void> {
     const { flags } = this.parse(Server)
 
+    const tempDirName = 'temp'
+    await removeTempDirectory(flags.uploads, tempDirName)
+
+    let elasticUrl
+    if (!_.isEmpty(flags.elasticSearchHost)) {
+      elasticUrl = `http://${flags.elasticSearchHost}`
+      initElasticLogger(elasticUrl)
+    }
+
+    const queryNodeUrl = `http://${flags.queryNodeHost}/graphql`
+    logger.info(`Query node endpoint set: ${queryNodeUrl}`)
+
     if (flags.dev) {
       await this.ensureDevelopmentChain()
     }
 
+    if (flags.disableUploadAuth) {
+      logger.warn(`Uploading auth-schema disabled.`)
+    }
+
+    if (flags.sync) {
+      logger.info(`Synchronization enabled.`)
+
+      runSyncWithInterval(flags.worker, queryNodeUrl, flags.uploads, flags.syncWorkersNumber, flags.syncInterval)
+    }
+
     const account = this.getAccount(flags)
     const api = await this.getApi()
 
+    await verifyWorkerId(api, flags.worker, account)
+
     try {
       const port = flags.port
       const workerId = flags.worker ?? 0
       const maxFileSize = await api.consts.storage.maxDataObjectSize.toNumber()
       logger.debug(`Max file size runtime parameter: ${maxFileSize}`)
 
-      const app = await createApp(api, account, workerId, flags.uploads, maxFileSize)
+      const app = await createApp({
+        api,
+        account,
+        workerId,
+        maxFileSize,
+        uploadsDir: flags.uploads,
+        tempDirName,
+        process: this.config,
+        queryNodeUrl,
+        enableUploadingAuth: !flags.disableUploadAuth,
+        elasticSearchEndpoint: elasticUrl,
+      })
       logger.info(`Listening on http://localhost:${port}`)
       app.listen(port)
     } catch (err) {
-      logger.error(`Error: ${err}`)
+      logger.error(`Server error: ${err}`)
     }
   }
 
@@ -60,3 +137,79 @@ export default class Server extends ApiCommandBase {
   /* eslint-disable @typescript-eslint/no-empty-function */
   async finally(): Promise<void> {}
 }
+
+/**
+ * Run the data syncronization process.
+ *
+ * @param workerId - worker ID
+ * @param queryNodeUrl - Query Node for data fetching
+ * @param uploadsDir - data uploading directory
+ * @param syncWorkersNumber - defines a number of the async processes for sync
+ * @param syncIntervalMinutes - defines an interval between sync runs
+ *
+ * @returns void promise.
+ */
+function runSyncWithInterval(
+  workerId: number,
+  queryNodeUrl: string,
+  uploadsDirectory: string,
+  syncWorkersNumber: number,
+  syncIntervalMinutes: number
+) {
+  setTimeout(async () => {
+    const sleepIntevalInSeconds = syncIntervalMinutes * 60 * 1000
+
+    logger.info(`Sync paused for ${syncIntervalMinutes} minute(s).`)
+    await sleep(sleepIntevalInSeconds)
+    logger.info(`Resume syncing....`)
+
+    try {
+      await performSync(workerId, syncWorkersNumber, queryNodeUrl, uploadsDirectory)
+    } catch (err) {
+      logger.error(`Critical sync error: ${err}`)
+    }
+
+    runSyncWithInterval(workerId, queryNodeUrl, uploadsDirectory, syncWorkersNumber, syncIntervalMinutes)
+  }, 0)
+}
+
+/**
+ * Removes the temporary directory from the uploading directory.
+ * All files in the temp directory are deleted.
+ *
+ * @param uploadsDir - data uploading directory
+ * @param tempDirName - temporary directory name within the uploading directory
+ * @returns void promise.
+ */
+async function removeTempDirectory(uploadsDir: string, tempDirName: string): Promise<void> {
+  try {
+    logger.info(`Removing temp directory ...`)
+    const tempFileUploadingDir = path.join(uploadsDir, tempDirName)
+
+    const rimrafAsync = promisify(rimraf)
+    await rimrafAsync(tempFileUploadingDir)
+  } catch (err) {
+    logger.error(`Removing temp directory error: ${err}`)
+  }
+}
+
+/**
+ * Verifies the worker ID from the command line argument and provided Joystream account.
+ * It throws an error when not matched.
+ *
+ * @param api - runtime API promise
+ * @param workerId - worker ID from the command line arguments
+ * @param account - Joystream account KeyringPair
+ * @returns void promise.
+ */
+async function verifyWorkerId(api: ApiPromise, workerId: number, account: KeyringPair): Promise<void> {
+  // Cast Codec type to Worker type
+  const workerObj = (await api.query.storageWorkingGroup.workerById(workerId)) as unknown
+  const worker = workerObj as Worker
+
+  if (worker.role_account_id.toString() !== account.address) {
+    throw new CLIError(`Provided worker ID doesn't match the Joystream account.`, {
+      exit: ExitCodes.InvalidWorkerId,
+    })
+  }
+}

+ 38 - 27
storage-node-v2/src/services/helpers/bagTypes.ts

@@ -1,9 +1,23 @@
 import { BagId, DynamicBagType, DynamicBagTypeKey, Static, Dynamic } from '@joystream/types/storage'
 import { WorkingGroup } from '@joystream/types/common'
-import { ApiPromise } from '@polkadot/api'
+import { registry } from '@joystream/types'
+import { createType } from '@polkadot/types'
+import { InterfaceTypes } from '@polkadot/types/types'
 import ExitCodes from '../../command-base/ExitCodes'
 import { CLIError } from '@oclif/errors'
 
+/**
+ * Special error type for bagId parsing. Extends the CLIError with setting
+ * the `InvalidParameters` exit code.
+ */
+export class BagIdValidationError extends CLIError {
+  constructor(err: string) {
+    super(err, {
+      exit: ExitCodes.InvalidParameters,
+    })
+  }
+}
+
 /**
  * Parses the type string and returns the DynamicBagType instance.
  *
@@ -14,8 +28,8 @@ import { CLIError } from '@oclif/errors'
  * @param bagType - dynamic bag type string
  * @returns The DynamicBagType instance.
  */
-export function parseDynamicBagType(api: ApiPromise, bagType: DynamicBagTypeKey): DynamicBagType {
-  return api.createType('DynamicBagType', bagType)
+export function parseDynamicBagType(bagType: DynamicBagTypeKey): DynamicBagType {
+  return createJoystreamType('DynamicBagType', bagType)
 }
 
 /**
@@ -29,8 +43,8 @@ export function parseDynamicBagType(api: ApiPromise, bagType: DynamicBagTypeKey)
  * @param bagId - bag ID in string format
  * @returns The BagId instance.
  */
-export function parseBagId(api: ApiPromise, bagId: string): BagId {
-  const parser = new BagIdParser(api, bagId)
+export function parseBagId(bagId: string): BagId {
+  const parser = new BagIdParser(bagId)
 
   return parser.parse()
 }
@@ -40,19 +54,15 @@ export function parseBagId(api: ApiPromise, bagId: string): BagId {
  */
 class BagIdParser {
   bagId: string
-  api: ApiPromise
   bagIdParts: string[]
 
-  constructor(api: ApiPromise, bagId: string) {
+  constructor(bagId: string) {
     this.bagId = bagId
-    this.api = api
 
     this.bagIdParts = bagId.trim().toLowerCase().split(':')
 
     if (this.bagIdParts.length > 3 || this.bagIdParts.length < 2) {
-      throw new CLIError(`Invalid bagId: ${bagId}`, {
-        exit: ExitCodes.InvalidParameters,
-      })
+      throw new BagIdValidationError(`Invalid bagId: ${bagId}`)
     }
   }
 
@@ -69,9 +79,7 @@ class BagIdParser {
       return this.parseDynamicBagId()
     }
 
-    throw new CLIError(`Invalid bagId: ${this.bagId}`, {
-      exit: ExitCodes.InvalidParameters,
-    })
+    throw new BagIdValidationError(`Invalid bagId: ${this.bagId}`)
   }
 
   /**
@@ -81,8 +89,8 @@ class BagIdParser {
     // Try to construct static council bag ID.
     if (this.bagIdParts[1] === 'council') {
       if (this.bagIdParts.length === 2) {
-        const staticBagId: Static = this.api.createType('Static', 'Council')
-        const constructedBagId: BagId = this.api.createType('BagId', {
+        const staticBagId: Static = createJoystreamType('Static', 'Council')
+        const constructedBagId: BagId = createJoystreamType('BagId', {
           'Static': staticBagId,
         })
 
@@ -98,11 +106,11 @@ class BagIdParser {
 
         for (const group of groups) {
           if (group.toLowerCase() === actualGroup) {
-            const workingGroup: WorkingGroup = this.api.createType('WorkingGroup', group)
-            const staticBagId: Static = this.api.createType('Static', {
+            const workingGroup: WorkingGroup = createJoystreamType('WorkingGroup', group)
+            const staticBagId: Static = createJoystreamType('Static', {
               'WorkingGroup': workingGroup,
             })
-            const constructedBagId: BagId = this.api.createType('BagId', {
+            const constructedBagId: BagId = createJoystreamType('BagId', {
               'Static': staticBagId,
             })
 
@@ -112,9 +120,7 @@ class BagIdParser {
       }
     }
 
-    throw new CLIError(`Invalid static bagId: ${this.bagId}`, {
-      exit: ExitCodes.InvalidParameters,
-    })
+    throw new BagIdValidationError(`Invalid static bagId: ${this.bagId}`)
   }
 
   /**
@@ -136,8 +142,8 @@ class BagIdParser {
             const dynamic = {} as Record<DynamicBagTypeKey, number>
             dynamic[dynamicBagType as DynamicBagTypeKey] = parsedId
 
-            const dynamicBagId: Dynamic = this.api.createType('Dynamic', dynamic)
-            const constructedBagId: BagId = this.api.createType('BagId', {
+            const dynamicBagId: Dynamic = createJoystreamType('Dynamic', dynamic)
+            const constructedBagId: BagId = createJoystreamType('BagId', {
               'Dynamic': dynamicBagId,
             })
 
@@ -147,8 +153,13 @@ class BagIdParser {
       }
     }
 
-    throw new CLIError(`Invalid dynamic bagId: ${this.bagId}`, {
-      exit: ExitCodes.InvalidParameters,
-    })
+    throw new BagIdValidationError(`Invalid dynamic bagId: ${this.bagId}`)
   }
 }
+
+/**
+ * Creates Joystream type using type registry.
+ */
+function createJoystreamType<T extends keyof InterfaceTypes>(type: T, value: unknown): InterfaceTypes[T] {
+  return createType(registry, type, value)
+}

+ 2 - 2
storage-node-v2/src/services/helpers/tokenNonceKeeper.ts

@@ -1,7 +1,7 @@
 import NodeCache from 'node-cache'
 
 // Expiration period in seconds for the local nonce cache.
-const TokenExpirationPeriod: number = 30 * 1000 // seconds
+const TokenExpirationPeriod = 30 // seconds
 
 // Max nonce number in local cache
 const MaxNonces = 100000
@@ -17,7 +17,7 @@ const nonceCache = new NodeCache({
  * Constructs and returns an expiration time for a token.
  */
 export function getTokenExpirationTime(): number {
-  return Date.now() + TokenExpirationPeriod
+  return Date.now() + 1000 * TokenExpirationPeriod
 }
 
 /**

+ 110 - 12
storage-node-v2/src/services/logger.ts

@@ -1,14 +1,16 @@
-import winston from 'winston'
+import winston, { transport } from 'winston'
 import expressWinston from 'express-winston'
 import { Handler, ErrorRequestHandler } from 'express'
+import { ElasticsearchTransport } from 'winston-elasticsearch'
 
 /**
  * Creates basic Winston logger. Console output redirected to the stderr.
  *
- * @returns Winston logger
+ * @returns Winston logger options
  *
  */
-function createDefaultLogger(): winston.Logger {
+function createDefaultLoggerOptions(): winston.LoggerOptions {
+  // Levels
   const levels = {
     error: 0,
     warn: 1,
@@ -23,6 +25,7 @@ function createDefaultLogger(): winston.Logger {
     return isDevelopment ? 'debug' : 'warn'
   }
 
+  // Colors
   const colors = {
     error: 'red',
     warn: 'yellow',
@@ -30,39 +33,74 @@ function createDefaultLogger(): winston.Logger {
     http: 'magenta',
     debug: 'white',
   }
-
   winston.addColors(colors)
 
+  // Formats
   const format = winston.format.combine(
     winston.format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss:ms' }),
-    winston.format.colorize({ all: true }),
+    winston.format.colorize(),
     winston.format.printf((info) => `${info.timestamp} ${info.level}: ${info.message}`)
   )
 
   // Redirect all logs to the stderr
   const transports = [new winston.transports.Console({ stderrLevels: Object.keys(levels) })]
 
-  return winston.createLogger({
+  return {
     level: level(),
     levels,
     format,
     transports,
-  })
+  }
+}
+
+/**
+ * Creates basic Winston logger.
+ *
+ * @returns Winston logger
+ *
+ */
+function createDefaultLogger(): winston.Logger {
+  const defaultOptions = createDefaultLoggerOptions()
+
+  return winston.createLogger(defaultOptions)
 }
 
-const Logger = createDefaultLogger()
+// Default global logger variable
+let InnerLogger = createDefaultLogger()
+
+// Enables changing the underlying logger which is default import in other modules.
+const proxy = new Proxy(InnerLogger, {
+  get(target: winston.Logger, propKey: symbol) {
+    const method = Reflect.get(target, propKey)
+    return (...args: unknown[]) => {
+      return method.apply(InnerLogger, args)
+    }
+  },
+})
+
+export default proxy
 
-export default Logger
 /**
  * Creates Express-Winston logger handler.
  *
+ * @param elasticSearchEndpoint - elastic search engine endpoint (optional).
  * @returns  Express-Winston logger handler
  *
  */
-export function httpLogger(): Handler {
+export function httpLogger(elasticSearchEndpoint?: string): Handler {
+  const transports: winston.transport[] = [new winston.transports.Console()]
+
+  if (elasticSearchEndpoint) {
+    const esTransport = createElasticTransport(elasticSearchEndpoint)
+    transports.push(esTransport)
+  }
+
   const opts: expressWinston.LoggerOptions = {
-    transports: [new winston.transports.Console()],
-    format: winston.format.combine(winston.format.json()),
+    transports,
+    format: winston.format.combine(
+      winston.format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss:ms' }),
+      winston.format.json()
+    ),
     meta: true,
     msg: 'HTTP {{req.method}} {{req.url}}',
     expressFormat: true,
@@ -109,3 +147,63 @@ export function createStdConsoleLogger(): winston.Logger {
     transports,
   })
 }
+/**
+ * Creates Winston logger with Elastic search.
+ *
+ * @returns Winston logger
+ *
+ */
+function createElasticLogger(elasticSearchEndpoint: string): winston.Logger {
+  const loggerOptions = createDefaultLoggerOptions()
+
+  // Formats
+  loggerOptions.format = winston.format.combine(
+    winston.format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss:ms' }),
+    winston.format.printf((info) => `${info.timestamp} ${info.level}: ${info.message}`)
+  )
+
+  // Transports
+  let transports: transport[] = []
+  if (loggerOptions.transports !== undefined) {
+    transports = Array.isArray(loggerOptions.transports) ? loggerOptions.transports : [loggerOptions.transports]
+  }
+
+  const esTransport = createElasticTransport(elasticSearchEndpoint)
+  transports.push(esTransport)
+
+  // Logger
+  const logger = winston.createLogger(loggerOptions)
+
+  // Handle logger error.
+  logger.on('error', (err) => {
+    // Allow console for logging errors of the logger.
+    /* eslint-disable no-console */
+    console.error('Error in logger caught:', err)
+  })
+
+  return logger
+}
+
+/**
+ * Updates the default system logger with elastic search capabilities.
+ *
+ * @param elasticSearchEndpoint - elastic search engine endpoint.
+ */
+export function initElasticLogger(elasticSearchEndpoint: string): void {
+  InnerLogger = createElasticLogger(elasticSearchEndpoint)
+}
+
+/**
+ * Creates winston logger transport for the elastic search engine.
+ *
+ * @param elasticSearchEndpoint - elastic search engine endpoint.
+ * @returns elastic search winston transport
+ */
+function createElasticTransport(elasticSearchEndpoint: string): winston.transport {
+  const esTransportOpts = {
+    level: 'warn',
+    clientOpts: { node: elasticSearchEndpoint, maxRetries: 5 },
+    index: 'storage-node',
+  }
+  return new ElasticsearchTransport(esTransportOpts)
+}

+ 20 - 0
storage-node-v2/src/services/metadata/generateTypes.ts

@@ -0,0 +1,20 @@
+/**
+ * OperatorMetadataJson file generating script. 
+ */
+
+import fs from 'fs'
+import path from 'path'
+import { compile } from 'json-schema-to-typescript'
+import { schemas } from './schemas'
+
+// eslint-disable-next-line @typescript-eslint/no-var-requires
+const prettierConfig = require('@joystream/prettier-config')
+
+Object.entries(schemas).forEach(([schemaKey, schema]) => {
+  compile(schema, `${schemaKey}Json`, { style: prettierConfig }).then(
+    (output) => fs.writeFileSync(path.resolve(__dirname, `./generated/${schemaKey}Json.d.ts`), output),
+    () => {
+      // onReject
+    }
+  )
+})

+ 19 - 0
storage-node-v2/src/services/metadata/generated/OperatorMetadataJson.d.ts

@@ -0,0 +1,19 @@
+/* tslint:disable */
+/**
+ * This file was automatically generated by json-schema-to-typescript.
+ * DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file,
+ * and run json-schema-to-typescript to regenerate this file.
+ */
+
+export interface OperatorMetadataJson {
+  endpoint?: string
+  location?: {
+    countryCode?: string
+    city?: string
+    coordinates?: {
+      latitude?: number
+      longitude?: number
+    }
+  }
+  extra?: string
+}

+ 12 - 0
storage-node-v2/src/services/metadata/schemas/index.ts

@@ -0,0 +1,12 @@
+import { OperatorMetadataJson } from '../generated/OperatorMetadataJson'
+import { operatorMetadataSchema } from './operatorMetadataSchema'
+
+export const schemas = {
+  OperatorMetadata: operatorMetadataSchema,
+} as const
+
+export type SchemaKey = keyof typeof schemas & string
+
+export type TypeBySchemaKey<T extends SchemaKey> = T extends 'OperatorMetadata' ? OperatorMetadataJson : never
+
+export default schemas

+ 29 - 0
storage-node-v2/src/services/metadata/schemas/operatorMetadataSchema.ts

@@ -0,0 +1,29 @@
+import { JSONSchema4 } from 'json-schema'
+
+// Storage node operator metadata JSON schema.
+export const operatorMetadataSchema: JSONSchema4 = {
+  type: 'object',
+  additionalProperties: false,
+  properties: {
+    endpoint: { type: 'string' },
+    location: {
+      type: 'object',
+      additionalProperties: false,
+      properties: {
+        countryCode: { type: 'string' },
+        city: { type: 'string' },
+        coordinates: {
+          type: 'object',
+          additionalProperties: false,
+          properties: {
+            latitude: { type: 'number', minimum: -180, maximum: 180 },
+            longitude: { type: 'number', minimum: -180, maximum: 180 },
+          },
+        },
+      },
+    },
+    extra: { type: 'string' },
+  },
+}
+
+export default operatorMetadataSchema

+ 36 - 0
storage-node-v2/src/services/metadata/validationService.ts

@@ -0,0 +1,36 @@
+import Ajv from 'ajv'
+import { SchemaKey, schemas, TypeBySchemaKey } from './schemas'
+
+/**
+ * JSON schema validation error
+ */
+class ValidationError extends Error {
+  public readonly errors: string[]
+
+  public constructor(message: string, errors: string[]) {
+    super(`${message}\n\n${errors.join('\n')}`)
+    this.errors = errors
+  }
+}
+
+/**
+ * Validates JSON schema for the storage operator metadata
+ */
+export class ValidationService {
+  private ajv: Ajv
+
+  public constructor() {
+    this.ajv = new Ajv({ allErrors: true, schemas })
+  }
+
+  validate<SK extends SchemaKey>(schemaKey: SK, input: unknown): TypeBySchemaKey<SK> {
+    const valid = this.ajv.validate(schemaKey, input) as boolean
+    if (!valid) {
+      throw new ValidationError(
+        `${schemaKey} is not valid`,
+        this.ajv.errors?.map((e) => `${e.dataPath}: ${e.message} (${JSON.stringify(e.params)})`) || []
+      )
+    }
+    return input as TypeBySchemaKey<SK>
+  }
+}

+ 170 - 0
storage-node-v2/src/services/queryNode/api.ts

@@ -0,0 +1,170 @@
+import { ApolloClient, NormalizedCacheObject, HttpLink, InMemoryCache, DocumentNode } from '@apollo/client'
+import fetch from 'cross-fetch'
+import {
+  GetStorageBucketDetails,
+  GetStorageBucketDetailsQuery,
+  GetStorageBucketDetailsQueryVariables,
+  StorageBucketDetailsFragment,
+  GetStorageBagDetailsQuery,
+  GetStorageBagDetails,
+  StorageBagDetailsFragment,
+  GetStorageBagDetailsQueryVariables,
+  DataObjectDetailsFragment,
+  GetDataObjectDetailsQuery,
+  GetDataObjectDetailsQueryVariables,
+  GetDataObjectDetails,
+} from './generated/queries'
+import { Maybe, StorageBagWhereInput } from './generated/schema'
+
+/**
+ * Query node class helper. Incapsulates custom queries.
+ *
+ */
+export class QueryNodeApi {
+  private apolloClient: ApolloClient<NormalizedCacheObject>
+
+  public constructor(endpoint: string) {
+    this.apolloClient = new ApolloClient({
+      link: new HttpLink({ uri: endpoint, fetch }),
+      cache: new InMemoryCache(),
+      defaultOptions: {
+        query: { fetchPolicy: 'no-cache', errorPolicy: 'all' },
+      },
+    })
+  }
+
+  /**
+   * Get entity by unique input
+   *
+   * @param query - actual query
+   * @param variables - query parameters
+   * @param resultKey - hepls result parsing
+   */
+  protected async uniqueEntityQuery<
+    QueryT extends { [k: string]: Maybe<Record<string, unknown>> | undefined },
+    VariablesT extends Record<string, unknown>
+  >(
+    query: DocumentNode,
+    variables: VariablesT,
+    resultKey: keyof QueryT
+  ): Promise<Required<QueryT>[keyof QueryT] | null> {
+    const result = await this.apolloClient.query<QueryT, VariablesT>({
+      query,
+      variables,
+    })
+
+    if (result?.data === null) {
+      return null
+    }
+
+    return result.data[resultKey]
+  }
+
+  // Get entities by "non-unique" input and return first result
+  protected async firstEntityQuery<
+    QueryT extends { [k: string]: unknown[] },
+    VariablesT extends Record<string, unknown>
+  >(query: DocumentNode, variables: VariablesT, resultKey: keyof QueryT): Promise<QueryT[keyof QueryT][number] | null> {
+    const result = await this.apolloClient.query<QueryT, VariablesT>({
+      query,
+      variables,
+    })
+
+    if (result?.data === null) {
+      return null
+    }
+    return result.data[resultKey][0]
+  }
+
+  /**
+   * Query-node: get multiple entities
+   *
+   * @param query - actual query
+   * @param variables - query parameters
+   * @param resultKey - hepls result parsing
+   */
+  protected async multipleEntitiesQuery<
+    QueryT extends { [k: string]: unknown[] },
+    VariablesT extends Record<string, unknown>
+  >(query: DocumentNode, variables: VariablesT, resultKey: keyof QueryT): Promise<QueryT[keyof QueryT] | null> {
+    const result = await this.apolloClient.query<QueryT, VariablesT>({
+      query,
+      variables,
+    })
+
+    if (result?.data === null) {
+      return null
+    }
+    return result.data[resultKey]
+  }
+
+  /**
+   * Returns storage bucket info by pages.
+   *
+   * @param offset - starting record of the page
+   * @param limit - page size
+   */
+  public async getStorageBucketDetails(offset: number, limit: number): Promise<Array<StorageBucketDetailsFragment>> {
+    const result = await this.multipleEntitiesQuery<
+      GetStorageBucketDetailsQuery,
+      GetStorageBucketDetailsQueryVariables
+    >(GetStorageBucketDetails, { offset, limit }, 'storageBuckets')
+
+    if (result === null) {
+      return []
+    }
+
+    return result
+  }
+
+  /**
+   * Returns storage bag info by pages for the given buckets.
+   *
+   * @param bucketIds - query filter: bucket IDs
+   * @param offset - starting record of the page
+   * @param limit - page size
+   */
+  public async getStorageBagsDetails(
+    bucketIds: string[],
+    offset: number,
+    limit: number
+  ): Promise<Array<StorageBagDetailsFragment>> {
+    const result = await this.multipleEntitiesQuery<GetStorageBagDetailsQuery, GetStorageBagDetailsQueryVariables>(
+      GetStorageBagDetails,
+      { offset, limit, bucketIds },
+      'storageBags'
+    )
+
+    if (result === null) {
+      return []
+    }
+
+    return result
+  }
+
+  /**
+   * Returns data objects info by pages for the given bags.
+   *
+   * @param bagIds - query filter: bag IDs
+   * @param offset - starting record of the page
+   * @param limit - page size
+   */
+  public async getDataObjectDetails(
+    bagIds: string[],
+    offset: number,
+    limit: number
+  ): Promise<Array<DataObjectDetailsFragment>> {
+    const input: StorageBagWhereInput = { id_in: bagIds }
+    const result = await this.multipleEntitiesQuery<GetDataObjectDetailsQuery, GetDataObjectDetailsQueryVariables>(
+      GetDataObjectDetails,
+      { offset, limit, bagIds: input },
+      'storageDataObjects'
+    )
+
+    if (result === null) {
+      return []
+    }
+
+    return result
+  }
+}

+ 33 - 0
storage-node-v2/src/services/queryNode/codegen.yml

@@ -0,0 +1,33 @@
+# Paths are relative to root package directory
+overwrite: true
+
+schema: '../query-node/generated/graphql-server/generated/schema.graphql'
+
+documents:
+  - 'src/services/queryNode/queries/*.graphql'
+
+config:
+  scalars:
+    Date: Date
+  preResolveTypes: true # avoid using Pick
+  skipTypename: true # skip __typename field in typings unless it's part of the query
+
+generates:
+  src/services/queryNode/generated/schema.ts:
+    hooks:
+      afterOneFileWrite:
+        - prettier --write
+        - eslint --fix
+    plugins:
+      - typescript
+  src/services/queryNode/generated/queries.ts:
+    preset: import-types
+    presetConfig:
+      typesPath: ./schema
+    hooks:
+      afterOneFileWrite:
+        - prettier --write
+        - eslint --fix
+    plugins:
+      - typescript-operations
+      - typescript-document-nodes

+ 99 - 0
storage-node-v2/src/services/queryNode/generated/queries.ts

@@ -0,0 +1,99 @@
+import * as Types from './schema'
+
+import gql from 'graphql-tag'
+export type StorageBucketDetailsFragment = {
+  id: string
+  operatorMetadata?: Types.Maybe<{ id: string; nodeEndpoint?: Types.Maybe<string> }>
+  operatorStatus: { workerId: number } | { workerId: number }
+}
+
+export type GetStorageBucketDetailsQueryVariables = Types.Exact<{
+  offset?: Types.Maybe<Types.Scalars['Int']>
+  limit?: Types.Maybe<Types.Scalars['Int']>
+}>
+
+export type GetStorageBucketDetailsQuery = { storageBuckets: Array<StorageBucketDetailsFragment> }
+
+export type StorageBagDetailsFragment = { id: string; storageAssignments: Array<{ storageBucket: { id: string } }> }
+
+export type GetStorageBagDetailsQueryVariables = Types.Exact<{
+  bucketIds?: Types.Maybe<Array<Types.Scalars['String']> | Types.Scalars['String']>
+  offset?: Types.Maybe<Types.Scalars['Int']>
+  limit?: Types.Maybe<Types.Scalars['Int']>
+}>
+
+export type GetStorageBagDetailsQuery = { storageBags: Array<StorageBagDetailsFragment> }
+
+export type DataObjectDetailsFragment = { ipfsHash: string; storageBag: { id: string } }
+
+export type GetDataObjectDetailsQueryVariables = Types.Exact<{
+  bagIds?: Types.Maybe<Types.StorageBagWhereInput>
+  offset?: Types.Maybe<Types.Scalars['Int']>
+  limit?: Types.Maybe<Types.Scalars['Int']>
+}>
+
+export type GetDataObjectDetailsQuery = { storageDataObjects: Array<DataObjectDetailsFragment> }
+
+export const StorageBucketDetails = gql`
+  fragment StorageBucketDetails on StorageBucket {
+    id
+    operatorMetadata {
+      id
+      nodeEndpoint
+    }
+    operatorStatus {
+      ... on StorageBucketOperatorStatusActive {
+        workerId
+      }
+      ... on StorageBucketOperatorStatusInvited {
+        workerId
+      }
+    }
+  }
+`
+export const StorageBagDetails = gql`
+  fragment StorageBagDetails on StorageBag {
+    id
+    storageAssignments {
+      storageBucket {
+        id
+      }
+    }
+  }
+`
+export const DataObjectDetails = gql`
+  fragment DataObjectDetails on StorageDataObject {
+    ipfsHash
+    storageBag {
+      id
+    }
+  }
+`
+export const GetStorageBucketDetails = gql`
+  query getStorageBucketDetails($offset: Int, $limit: Int) {
+    storageBuckets(offset: $offset, limit: $limit) {
+      ...StorageBucketDetails
+    }
+  }
+  ${StorageBucketDetails}
+`
+export const GetStorageBagDetails = gql`
+  query getStorageBagDetails($bucketIds: [String!], $offset: Int, $limit: Int) {
+    storageBags(
+      offset: $offset
+      limit: $limit
+      where: { storageAssignments_some: { storageBucketId_in: $bucketIds } }
+    ) {
+      ...StorageBagDetails
+    }
+  }
+  ${StorageBagDetails}
+`
+export const GetDataObjectDetails = gql`
+  query getDataObjectDetails($bagIds: StorageBagWhereInput, $offset: Int, $limit: Int) {
+    storageDataObjects(offset: $offset, limit: $limit, where: { storageBag: $bagIds, isAccepted_eq: true }) {
+      ...DataObjectDetails
+    }
+  }
+  ${DataObjectDetails}
+`

+ 4752 - 0
storage-node-v2/src/services/queryNode/generated/schema.ts

@@ -0,0 +1,4752 @@
+export type Maybe<T> = T | null
+export type Exact<T extends { [key: string]: unknown }> = { [K in keyof T]: T[K] }
+export type MakeOptional<T, K extends keyof T> = Omit<T, K> & { [SubKey in K]?: Maybe<T[SubKey]> }
+export type MakeMaybe<T, K extends keyof T> = Omit<T, K> & { [SubKey in K]: Maybe<T[SubKey]> }
+/** All built-in and custom scalars, mapped to their actual values */
+export type Scalars = {
+  ID: string
+  String: string
+  Boolean: boolean
+  Int: number
+  Float: number
+  /** The javascript `Date` as string. Type represents date and time as the ISO Date string. */
+  DateTime: any
+  /** The `JSONObject` scalar type represents JSON objects as specified by [ECMA-404](http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf). */
+  JSONObject: any
+  /** GraphQL representation of BigInt */
+  BigInt: any
+}
+
+export enum AssetAvailability {
+  Accepted = 'ACCEPTED',
+  Pending = 'PENDING',
+  Invalid = 'INVALID',
+}
+
+export type BaseGraphQlObject = {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+}
+
+export type BaseModel = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+}
+
+export type BaseModelUuid = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+}
+
+export type BaseWhereInput = {
+  id_eq?: Maybe<Scalars['String']>
+  id_in?: Maybe<Array<Scalars['String']>>
+  createdAt_eq?: Maybe<Scalars['String']>
+  createdAt_lt?: Maybe<Scalars['String']>
+  createdAt_lte?: Maybe<Scalars['String']>
+  createdAt_gt?: Maybe<Scalars['String']>
+  createdAt_gte?: Maybe<Scalars['String']>
+  createdById_eq?: Maybe<Scalars['String']>
+  updatedAt_eq?: Maybe<Scalars['String']>
+  updatedAt_lt?: Maybe<Scalars['String']>
+  updatedAt_lte?: Maybe<Scalars['String']>
+  updatedAt_gt?: Maybe<Scalars['String']>
+  updatedAt_gte?: Maybe<Scalars['String']>
+  updatedById_eq?: Maybe<Scalars['String']>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['String']>
+  deletedAt_lt?: Maybe<Scalars['String']>
+  deletedAt_lte?: Maybe<Scalars['String']>
+  deletedAt_gt?: Maybe<Scalars['String']>
+  deletedAt_gte?: Maybe<Scalars['String']>
+  deletedById_eq?: Maybe<Scalars['String']>
+}
+
+export type Channel = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  ownerMember?: Maybe<Membership>
+  ownerMemberId?: Maybe<Scalars['String']>
+  ownerCuratorGroup?: Maybe<CuratorGroup>
+  ownerCuratorGroupId?: Maybe<Scalars['String']>
+  category?: Maybe<ChannelCategory>
+  categoryId?: Maybe<Scalars['String']>
+  /** Reward account where revenue is sent if set. */
+  rewardAccount?: Maybe<Scalars['String']>
+  /** The title of the Channel */
+  title?: Maybe<Scalars['String']>
+  /** The description of a Channel */
+  description?: Maybe<Scalars['String']>
+  coverPhotoDataObject?: Maybe<DataObject>
+  coverPhotoDataObjectId?: Maybe<Scalars['String']>
+  /** URLs where the asset content can be accessed (if any) */
+  coverPhotoUrls: Array<Scalars['String']>
+  /** Availability meta information */
+  coverPhotoAvailability: AssetAvailability
+  avatarPhotoDataObject?: Maybe<DataObject>
+  avatarPhotoDataObjectId?: Maybe<Scalars['String']>
+  /** URLs where the asset content can be accessed (if any) */
+  avatarPhotoUrls: Array<Scalars['String']>
+  /** Availability meta information */
+  avatarPhotoAvailability: AssetAvailability
+  /** Flag signaling whether a channel is public. */
+  isPublic?: Maybe<Scalars['Boolean']>
+  /** Flag signaling whether a channel is censored. */
+  isCensored: Scalars['Boolean']
+  language?: Maybe<Language>
+  languageId?: Maybe<Scalars['String']>
+  videos: Array<Video>
+  createdInBlock: Scalars['Int']
+}
+
+export type ChannelCategoriesByNameFtsOutput = {
+  item: ChannelCategoriesByNameSearchResult
+  rank: Scalars['Float']
+  isTypeOf: Scalars['String']
+  highlight: Scalars['String']
+}
+
+export type ChannelCategoriesByNameSearchResult = ChannelCategory
+
+/** Category of media channel */
+export type ChannelCategory = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** The name of the category */
+  name?: Maybe<Scalars['String']>
+  channels: Array<Channel>
+  createdInBlock: Scalars['Int']
+}
+
+export type ChannelCategoryConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<ChannelCategoryEdge>
+  pageInfo: PageInfo
+}
+
+export type ChannelCategoryCreateInput = {
+  name?: Maybe<Scalars['String']>
+  createdInBlock: Scalars['Float']
+}
+
+export type ChannelCategoryEdge = {
+  node: ChannelCategory
+  cursor: Scalars['String']
+}
+
+export enum ChannelCategoryOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  NameAsc = 'name_ASC',
+  NameDesc = 'name_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+}
+
+export type ChannelCategoryUpdateInput = {
+  name?: Maybe<Scalars['String']>
+  createdInBlock?: Maybe<Scalars['Float']>
+}
+
+export type ChannelCategoryWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  name_eq?: Maybe<Scalars['String']>
+  name_contains?: Maybe<Scalars['String']>
+  name_startsWith?: Maybe<Scalars['String']>
+  name_endsWith?: Maybe<Scalars['String']>
+  name_in?: Maybe<Array<Scalars['String']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  channels_none?: Maybe<ChannelWhereInput>
+  channels_some?: Maybe<ChannelWhereInput>
+  channels_every?: Maybe<ChannelWhereInput>
+  AND?: Maybe<Array<ChannelCategoryWhereInput>>
+  OR?: Maybe<Array<ChannelCategoryWhereInput>>
+}
+
+export type ChannelCategoryWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type ChannelConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<ChannelEdge>
+  pageInfo: PageInfo
+}
+
+export type ChannelCreateInput = {
+  ownerMember?: Maybe<Scalars['ID']>
+  ownerCuratorGroup?: Maybe<Scalars['ID']>
+  category?: Maybe<Scalars['ID']>
+  rewardAccount?: Maybe<Scalars['String']>
+  title?: Maybe<Scalars['String']>
+  description?: Maybe<Scalars['String']>
+  coverPhotoDataObject?: Maybe<Scalars['ID']>
+  coverPhotoUrls: Array<Scalars['String']>
+  coverPhotoAvailability: AssetAvailability
+  avatarPhotoDataObject?: Maybe<Scalars['ID']>
+  avatarPhotoUrls: Array<Scalars['String']>
+  avatarPhotoAvailability: AssetAvailability
+  isPublic?: Maybe<Scalars['Boolean']>
+  isCensored: Scalars['Boolean']
+  language?: Maybe<Scalars['ID']>
+  createdInBlock: Scalars['Float']
+}
+
+export type ChannelEdge = {
+  node: Channel
+  cursor: Scalars['String']
+}
+
+export enum ChannelOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  OwnerMemberAsc = 'ownerMember_ASC',
+  OwnerMemberDesc = 'ownerMember_DESC',
+  OwnerCuratorGroupAsc = 'ownerCuratorGroup_ASC',
+  OwnerCuratorGroupDesc = 'ownerCuratorGroup_DESC',
+  CategoryAsc = 'category_ASC',
+  CategoryDesc = 'category_DESC',
+  RewardAccountAsc = 'rewardAccount_ASC',
+  RewardAccountDesc = 'rewardAccount_DESC',
+  TitleAsc = 'title_ASC',
+  TitleDesc = 'title_DESC',
+  DescriptionAsc = 'description_ASC',
+  DescriptionDesc = 'description_DESC',
+  CoverPhotoDataObjectAsc = 'coverPhotoDataObject_ASC',
+  CoverPhotoDataObjectDesc = 'coverPhotoDataObject_DESC',
+  CoverPhotoAvailabilityAsc = 'coverPhotoAvailability_ASC',
+  CoverPhotoAvailabilityDesc = 'coverPhotoAvailability_DESC',
+  AvatarPhotoDataObjectAsc = 'avatarPhotoDataObject_ASC',
+  AvatarPhotoDataObjectDesc = 'avatarPhotoDataObject_DESC',
+  AvatarPhotoAvailabilityAsc = 'avatarPhotoAvailability_ASC',
+  AvatarPhotoAvailabilityDesc = 'avatarPhotoAvailability_DESC',
+  IsPublicAsc = 'isPublic_ASC',
+  IsPublicDesc = 'isPublic_DESC',
+  IsCensoredAsc = 'isCensored_ASC',
+  IsCensoredDesc = 'isCensored_DESC',
+  LanguageAsc = 'language_ASC',
+  LanguageDesc = 'language_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+}
+
+export type ChannelUpdateInput = {
+  ownerMember?: Maybe<Scalars['ID']>
+  ownerCuratorGroup?: Maybe<Scalars['ID']>
+  category?: Maybe<Scalars['ID']>
+  rewardAccount?: Maybe<Scalars['String']>
+  title?: Maybe<Scalars['String']>
+  description?: Maybe<Scalars['String']>
+  coverPhotoDataObject?: Maybe<Scalars['ID']>
+  coverPhotoUrls?: Maybe<Array<Scalars['String']>>
+  coverPhotoAvailability?: Maybe<AssetAvailability>
+  avatarPhotoDataObject?: Maybe<Scalars['ID']>
+  avatarPhotoUrls?: Maybe<Array<Scalars['String']>>
+  avatarPhotoAvailability?: Maybe<AssetAvailability>
+  isPublic?: Maybe<Scalars['Boolean']>
+  isCensored?: Maybe<Scalars['Boolean']>
+  language?: Maybe<Scalars['ID']>
+  createdInBlock?: Maybe<Scalars['Float']>
+}
+
+export type ChannelWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  ownerMember_eq?: Maybe<Scalars['ID']>
+  ownerMember_in?: Maybe<Array<Scalars['ID']>>
+  ownerCuratorGroup_eq?: Maybe<Scalars['ID']>
+  ownerCuratorGroup_in?: Maybe<Array<Scalars['ID']>>
+  category_eq?: Maybe<Scalars['ID']>
+  category_in?: Maybe<Array<Scalars['ID']>>
+  rewardAccount_eq?: Maybe<Scalars['String']>
+  rewardAccount_contains?: Maybe<Scalars['String']>
+  rewardAccount_startsWith?: Maybe<Scalars['String']>
+  rewardAccount_endsWith?: Maybe<Scalars['String']>
+  rewardAccount_in?: Maybe<Array<Scalars['String']>>
+  title_eq?: Maybe<Scalars['String']>
+  title_contains?: Maybe<Scalars['String']>
+  title_startsWith?: Maybe<Scalars['String']>
+  title_endsWith?: Maybe<Scalars['String']>
+  title_in?: Maybe<Array<Scalars['String']>>
+  description_eq?: Maybe<Scalars['String']>
+  description_contains?: Maybe<Scalars['String']>
+  description_startsWith?: Maybe<Scalars['String']>
+  description_endsWith?: Maybe<Scalars['String']>
+  description_in?: Maybe<Array<Scalars['String']>>
+  coverPhotoDataObject_eq?: Maybe<Scalars['ID']>
+  coverPhotoDataObject_in?: Maybe<Array<Scalars['ID']>>
+  coverPhotoAvailability_eq?: Maybe<AssetAvailability>
+  coverPhotoAvailability_in?: Maybe<Array<AssetAvailability>>
+  avatarPhotoDataObject_eq?: Maybe<Scalars['ID']>
+  avatarPhotoDataObject_in?: Maybe<Array<Scalars['ID']>>
+  avatarPhotoAvailability_eq?: Maybe<AssetAvailability>
+  avatarPhotoAvailability_in?: Maybe<Array<AssetAvailability>>
+  isPublic_eq?: Maybe<Scalars['Boolean']>
+  isPublic_in?: Maybe<Array<Scalars['Boolean']>>
+  isCensored_eq?: Maybe<Scalars['Boolean']>
+  isCensored_in?: Maybe<Array<Scalars['Boolean']>>
+  language_eq?: Maybe<Scalars['ID']>
+  language_in?: Maybe<Array<Scalars['ID']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  ownerMember?: Maybe<MembershipWhereInput>
+  ownerCuratorGroup?: Maybe<CuratorGroupWhereInput>
+  category?: Maybe<ChannelCategoryWhereInput>
+  coverPhotoDataObject?: Maybe<DataObjectWhereInput>
+  avatarPhotoDataObject?: Maybe<DataObjectWhereInput>
+  language?: Maybe<LanguageWhereInput>
+  videos_none?: Maybe<VideoWhereInput>
+  videos_some?: Maybe<VideoWhereInput>
+  videos_every?: Maybe<VideoWhereInput>
+  AND?: Maybe<Array<ChannelWhereInput>>
+  OR?: Maybe<Array<ChannelWhereInput>>
+}
+
+export type ChannelWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type CuratorGroup = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Curators belonging to this group */
+  curatorIds: Array<Scalars['Int']>
+  /** Is group active or not */
+  isActive: Scalars['Boolean']
+  channels: Array<Channel>
+}
+
+export type CuratorGroupConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<CuratorGroupEdge>
+  pageInfo: PageInfo
+}
+
+export type CuratorGroupCreateInput = {
+  curatorIds: Array<Scalars['Int']>
+  isActive: Scalars['Boolean']
+}
+
+export type CuratorGroupEdge = {
+  node: CuratorGroup
+  cursor: Scalars['String']
+}
+
+export enum CuratorGroupOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  IsActiveAsc = 'isActive_ASC',
+  IsActiveDesc = 'isActive_DESC',
+}
+
+export type CuratorGroupUpdateInput = {
+  curatorIds?: Maybe<Array<Scalars['Int']>>
+  isActive?: Maybe<Scalars['Boolean']>
+}
+
+export type CuratorGroupWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  isActive_eq?: Maybe<Scalars['Boolean']>
+  isActive_in?: Maybe<Array<Scalars['Boolean']>>
+  channels_none?: Maybe<ChannelWhereInput>
+  channels_some?: Maybe<ChannelWhereInput>
+  channels_every?: Maybe<ChannelWhereInput>
+  AND?: Maybe<Array<CuratorGroupWhereInput>>
+  OR?: Maybe<Array<CuratorGroupWhereInput>>
+}
+
+export type CuratorGroupWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+/** Manages content ids, type and storage provider decision about it */
+export type DataObject = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Content owner */
+  owner: DataObjectOwner
+  /** Content added at */
+  createdInBlock: Scalars['Int']
+  /** Content type id */
+  typeId: Scalars['Int']
+  /** Content size in bytes */
+  size: Scalars['Int']
+  liaison?: Maybe<Worker>
+  liaisonId?: Maybe<Scalars['String']>
+  /** Storage provider as liaison judgment */
+  liaisonJudgement: LiaisonJudgement
+  /** IPFS content id */
+  ipfsContentId: Scalars['String']
+  /** Joystream runtime content */
+  joystreamContentId: Scalars['String']
+  channelcoverPhotoDataObject?: Maybe<Array<Channel>>
+  channelavatarPhotoDataObject?: Maybe<Array<Channel>>
+  videothumbnailPhotoDataObject?: Maybe<Array<Video>>
+  videomediaDataObject?: Maybe<Array<Video>>
+}
+
+export type DataObjectConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<DataObjectEdge>
+  pageInfo: PageInfo
+}
+
+export type DataObjectCreateInput = {
+  owner: Scalars['JSONObject']
+  createdInBlock: Scalars['Float']
+  typeId: Scalars['Float']
+  size: Scalars['Float']
+  liaison?: Maybe<Scalars['ID']>
+  liaisonJudgement: LiaisonJudgement
+  ipfsContentId: Scalars['String']
+  joystreamContentId: Scalars['String']
+}
+
+export type DataObjectEdge = {
+  node: DataObject
+  cursor: Scalars['String']
+}
+
+export enum DataObjectOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+  TypeIdAsc = 'typeId_ASC',
+  TypeIdDesc = 'typeId_DESC',
+  SizeAsc = 'size_ASC',
+  SizeDesc = 'size_DESC',
+  LiaisonAsc = 'liaison_ASC',
+  LiaisonDesc = 'liaison_DESC',
+  LiaisonJudgementAsc = 'liaisonJudgement_ASC',
+  LiaisonJudgementDesc = 'liaisonJudgement_DESC',
+  IpfsContentIdAsc = 'ipfsContentId_ASC',
+  IpfsContentIdDesc = 'ipfsContentId_DESC',
+  JoystreamContentIdAsc = 'joystreamContentId_ASC',
+  JoystreamContentIdDesc = 'joystreamContentId_DESC',
+}
+
+export type DataObjectOwner =
+  | DataObjectOwnerMember
+  | DataObjectOwnerChannel
+  | DataObjectOwnerDao
+  | DataObjectOwnerCouncil
+  | DataObjectOwnerWorkingGroup
+
+export type DataObjectOwnerChannel = {
+  /** Channel identifier */
+  channel: Scalars['Int']
+  /** Variant needs to have at least one property. This value is not used. */
+  dummy?: Maybe<Scalars['Int']>
+}
+
+export type DataObjectOwnerChannelCreateInput = {
+  channel: Scalars['Float']
+  dummy?: Maybe<Scalars['Float']>
+}
+
+export type DataObjectOwnerChannelUpdateInput = {
+  channel?: Maybe<Scalars['Float']>
+  dummy?: Maybe<Scalars['Float']>
+}
+
+export type DataObjectOwnerChannelWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  channel_eq?: Maybe<Scalars['Int']>
+  channel_gt?: Maybe<Scalars['Int']>
+  channel_gte?: Maybe<Scalars['Int']>
+  channel_lt?: Maybe<Scalars['Int']>
+  channel_lte?: Maybe<Scalars['Int']>
+  channel_in?: Maybe<Array<Scalars['Int']>>
+  dummy_eq?: Maybe<Scalars['Int']>
+  dummy_gt?: Maybe<Scalars['Int']>
+  dummy_gte?: Maybe<Scalars['Int']>
+  dummy_lt?: Maybe<Scalars['Int']>
+  dummy_lte?: Maybe<Scalars['Int']>
+  dummy_in?: Maybe<Array<Scalars['Int']>>
+  AND?: Maybe<Array<DataObjectOwnerChannelWhereInput>>
+  OR?: Maybe<Array<DataObjectOwnerChannelWhereInput>>
+}
+
+export type DataObjectOwnerChannelWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type DataObjectOwnerCouncil = {
+  /** Variant needs to have at least one property. This value is not used. */
+  dummy?: Maybe<Scalars['Int']>
+}
+
+export type DataObjectOwnerCouncilCreateInput = {
+  dummy?: Maybe<Scalars['Float']>
+}
+
+export type DataObjectOwnerCouncilUpdateInput = {
+  dummy?: Maybe<Scalars['Float']>
+}
+
+export type DataObjectOwnerCouncilWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  dummy_eq?: Maybe<Scalars['Int']>
+  dummy_gt?: Maybe<Scalars['Int']>
+  dummy_gte?: Maybe<Scalars['Int']>
+  dummy_lt?: Maybe<Scalars['Int']>
+  dummy_lte?: Maybe<Scalars['Int']>
+  dummy_in?: Maybe<Array<Scalars['Int']>>
+  AND?: Maybe<Array<DataObjectOwnerCouncilWhereInput>>
+  OR?: Maybe<Array<DataObjectOwnerCouncilWhereInput>>
+}
+
+export type DataObjectOwnerCouncilWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type DataObjectOwnerDao = {
+  /** DAO identifier */
+  dao: Scalars['Int']
+}
+
+export type DataObjectOwnerDaoCreateInput = {
+  dao: Scalars['Float']
+}
+
+export type DataObjectOwnerDaoUpdateInput = {
+  dao?: Maybe<Scalars['Float']>
+}
+
+export type DataObjectOwnerDaoWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  dao_eq?: Maybe<Scalars['Int']>
+  dao_gt?: Maybe<Scalars['Int']>
+  dao_gte?: Maybe<Scalars['Int']>
+  dao_lt?: Maybe<Scalars['Int']>
+  dao_lte?: Maybe<Scalars['Int']>
+  dao_in?: Maybe<Array<Scalars['Int']>>
+  AND?: Maybe<Array<DataObjectOwnerDaoWhereInput>>
+  OR?: Maybe<Array<DataObjectOwnerDaoWhereInput>>
+}
+
+export type DataObjectOwnerDaoWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type DataObjectOwnerMember = {
+  /** Member identifier */
+  member: Scalars['Int']
+  /** Variant needs to have at least one property. This value is not used. */
+  dummy?: Maybe<Scalars['Int']>
+}
+
+export type DataObjectOwnerMemberCreateInput = {
+  member: Scalars['Float']
+  dummy?: Maybe<Scalars['Float']>
+}
+
+export type DataObjectOwnerMemberUpdateInput = {
+  member?: Maybe<Scalars['Float']>
+  dummy?: Maybe<Scalars['Float']>
+}
+
+export type DataObjectOwnerMemberWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  member_eq?: Maybe<Scalars['Int']>
+  member_gt?: Maybe<Scalars['Int']>
+  member_gte?: Maybe<Scalars['Int']>
+  member_lt?: Maybe<Scalars['Int']>
+  member_lte?: Maybe<Scalars['Int']>
+  member_in?: Maybe<Array<Scalars['Int']>>
+  dummy_eq?: Maybe<Scalars['Int']>
+  dummy_gt?: Maybe<Scalars['Int']>
+  dummy_gte?: Maybe<Scalars['Int']>
+  dummy_lt?: Maybe<Scalars['Int']>
+  dummy_lte?: Maybe<Scalars['Int']>
+  dummy_in?: Maybe<Array<Scalars['Int']>>
+  AND?: Maybe<Array<DataObjectOwnerMemberWhereInput>>
+  OR?: Maybe<Array<DataObjectOwnerMemberWhereInput>>
+}
+
+export type DataObjectOwnerMemberWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type DataObjectOwnerWorkingGroup = {
+  /** Working group identifier */
+  workingGroup: Scalars['Int']
+}
+
+export type DataObjectOwnerWorkingGroupCreateInput = {
+  workingGroup: Scalars['Float']
+}
+
+export type DataObjectOwnerWorkingGroupUpdateInput = {
+  workingGroup?: Maybe<Scalars['Float']>
+}
+
+export type DataObjectOwnerWorkingGroupWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  workingGroup_eq?: Maybe<Scalars['Int']>
+  workingGroup_gt?: Maybe<Scalars['Int']>
+  workingGroup_gte?: Maybe<Scalars['Int']>
+  workingGroup_lt?: Maybe<Scalars['Int']>
+  workingGroup_lte?: Maybe<Scalars['Int']>
+  workingGroup_in?: Maybe<Array<Scalars['Int']>>
+  AND?: Maybe<Array<DataObjectOwnerWorkingGroupWhereInput>>
+  OR?: Maybe<Array<DataObjectOwnerWorkingGroupWhereInput>>
+}
+
+export type DataObjectOwnerWorkingGroupWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type DataObjectUpdateInput = {
+  owner?: Maybe<Scalars['JSONObject']>
+  createdInBlock?: Maybe<Scalars['Float']>
+  typeId?: Maybe<Scalars['Float']>
+  size?: Maybe<Scalars['Float']>
+  liaison?: Maybe<Scalars['ID']>
+  liaisonJudgement?: Maybe<LiaisonJudgement>
+  ipfsContentId?: Maybe<Scalars['String']>
+  joystreamContentId?: Maybe<Scalars['String']>
+}
+
+export type DataObjectWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  owner_json?: Maybe<Scalars['JSONObject']>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  typeId_eq?: Maybe<Scalars['Int']>
+  typeId_gt?: Maybe<Scalars['Int']>
+  typeId_gte?: Maybe<Scalars['Int']>
+  typeId_lt?: Maybe<Scalars['Int']>
+  typeId_lte?: Maybe<Scalars['Int']>
+  typeId_in?: Maybe<Array<Scalars['Int']>>
+  size_eq?: Maybe<Scalars['Int']>
+  size_gt?: Maybe<Scalars['Int']>
+  size_gte?: Maybe<Scalars['Int']>
+  size_lt?: Maybe<Scalars['Int']>
+  size_lte?: Maybe<Scalars['Int']>
+  size_in?: Maybe<Array<Scalars['Int']>>
+  liaison_eq?: Maybe<Scalars['ID']>
+  liaison_in?: Maybe<Array<Scalars['ID']>>
+  liaisonJudgement_eq?: Maybe<LiaisonJudgement>
+  liaisonJudgement_in?: Maybe<Array<LiaisonJudgement>>
+  ipfsContentId_eq?: Maybe<Scalars['String']>
+  ipfsContentId_contains?: Maybe<Scalars['String']>
+  ipfsContentId_startsWith?: Maybe<Scalars['String']>
+  ipfsContentId_endsWith?: Maybe<Scalars['String']>
+  ipfsContentId_in?: Maybe<Array<Scalars['String']>>
+  joystreamContentId_eq?: Maybe<Scalars['String']>
+  joystreamContentId_contains?: Maybe<Scalars['String']>
+  joystreamContentId_startsWith?: Maybe<Scalars['String']>
+  joystreamContentId_endsWith?: Maybe<Scalars['String']>
+  joystreamContentId_in?: Maybe<Array<Scalars['String']>>
+  liaison?: Maybe<WorkerWhereInput>
+  channelcoverPhotoDataObject_none?: Maybe<ChannelWhereInput>
+  channelcoverPhotoDataObject_some?: Maybe<ChannelWhereInput>
+  channelcoverPhotoDataObject_every?: Maybe<ChannelWhereInput>
+  channelavatarPhotoDataObject_none?: Maybe<ChannelWhereInput>
+  channelavatarPhotoDataObject_some?: Maybe<ChannelWhereInput>
+  channelavatarPhotoDataObject_every?: Maybe<ChannelWhereInput>
+  videothumbnailPhotoDataObject_none?: Maybe<VideoWhereInput>
+  videothumbnailPhotoDataObject_some?: Maybe<VideoWhereInput>
+  videothumbnailPhotoDataObject_every?: Maybe<VideoWhereInput>
+  videomediaDataObject_none?: Maybe<VideoWhereInput>
+  videomediaDataObject_some?: Maybe<VideoWhereInput>
+  videomediaDataObject_every?: Maybe<VideoWhereInput>
+  AND?: Maybe<Array<DataObjectWhereInput>>
+  OR?: Maybe<Array<DataObjectWhereInput>>
+}
+
+export type DataObjectWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type DeleteResponse = {
+  id: Scalars['ID']
+}
+
+export type DistributionBucket = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  family: DistributionBucketFamily
+  familyId: Scalars['String']
+  operators: Array<DistributionBucketOperator>
+  /** Whether the bucket is accepting any new bags */
+  acceptingNewBags: Scalars['Boolean']
+  /** Whether the bucket is currently distributing content */
+  distributing: Scalars['Boolean']
+  bagAssignments: Array<StorageBagDistributionAssignment>
+}
+
+export type DistributionBucketConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<DistributionBucketEdge>
+  pageInfo: PageInfo
+}
+
+export type DistributionBucketCreateInput = {
+  family: Scalars['ID']
+  acceptingNewBags: Scalars['Boolean']
+  distributing: Scalars['Boolean']
+}
+
+export type DistributionBucketEdge = {
+  node: DistributionBucket
+  cursor: Scalars['String']
+}
+
+export type DistributionBucketFamily = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  metadata?: Maybe<DistributionBucketFamilyMetadata>
+  metadataId?: Maybe<Scalars['String']>
+  buckets: Array<DistributionBucket>
+}
+
+export type DistributionBucketFamilyConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<DistributionBucketFamilyEdge>
+  pageInfo: PageInfo
+}
+
+export type DistributionBucketFamilyCreateInput = {
+  metadata?: Maybe<Scalars['ID']>
+}
+
+export type DistributionBucketFamilyEdge = {
+  node: DistributionBucketFamily
+  cursor: Scalars['String']
+}
+
+export type DistributionBucketFamilyMetadata = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Name of the geographical region covered by the family (ie.: us-east-1) */
+  region?: Maybe<Scalars['String']>
+  /** Optional, more specific description of the region covered by the family */
+  description?: Maybe<Scalars['String']>
+  boundary: Array<GeoCoordinates>
+  distributionbucketfamilymetadata?: Maybe<Array<DistributionBucketFamily>>
+}
+
+export type DistributionBucketFamilyMetadataConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<DistributionBucketFamilyMetadataEdge>
+  pageInfo: PageInfo
+}
+
+export type DistributionBucketFamilyMetadataCreateInput = {
+  region?: Maybe<Scalars['String']>
+  description?: Maybe<Scalars['String']>
+}
+
+export type DistributionBucketFamilyMetadataEdge = {
+  node: DistributionBucketFamilyMetadata
+  cursor: Scalars['String']
+}
+
+export enum DistributionBucketFamilyMetadataOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  RegionAsc = 'region_ASC',
+  RegionDesc = 'region_DESC',
+  DescriptionAsc = 'description_ASC',
+  DescriptionDesc = 'description_DESC',
+}
+
+export type DistributionBucketFamilyMetadataUpdateInput = {
+  region?: Maybe<Scalars['String']>
+  description?: Maybe<Scalars['String']>
+}
+
+export type DistributionBucketFamilyMetadataWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  region_eq?: Maybe<Scalars['String']>
+  region_contains?: Maybe<Scalars['String']>
+  region_startsWith?: Maybe<Scalars['String']>
+  region_endsWith?: Maybe<Scalars['String']>
+  region_in?: Maybe<Array<Scalars['String']>>
+  description_eq?: Maybe<Scalars['String']>
+  description_contains?: Maybe<Scalars['String']>
+  description_startsWith?: Maybe<Scalars['String']>
+  description_endsWith?: Maybe<Scalars['String']>
+  description_in?: Maybe<Array<Scalars['String']>>
+  boundary_none?: Maybe<GeoCoordinatesWhereInput>
+  boundary_some?: Maybe<GeoCoordinatesWhereInput>
+  boundary_every?: Maybe<GeoCoordinatesWhereInput>
+  distributionbucketfamilymetadata_none?: Maybe<DistributionBucketFamilyWhereInput>
+  distributionbucketfamilymetadata_some?: Maybe<DistributionBucketFamilyWhereInput>
+  distributionbucketfamilymetadata_every?: Maybe<DistributionBucketFamilyWhereInput>
+  AND?: Maybe<Array<DistributionBucketFamilyMetadataWhereInput>>
+  OR?: Maybe<Array<DistributionBucketFamilyMetadataWhereInput>>
+}
+
+export type DistributionBucketFamilyMetadataWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export enum DistributionBucketFamilyOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  MetadataAsc = 'metadata_ASC',
+  MetadataDesc = 'metadata_DESC',
+}
+
+export type DistributionBucketFamilyUpdateInput = {
+  metadata?: Maybe<Scalars['ID']>
+}
+
+export type DistributionBucketFamilyWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  metadata_eq?: Maybe<Scalars['ID']>
+  metadata_in?: Maybe<Array<Scalars['ID']>>
+  metadata?: Maybe<DistributionBucketFamilyMetadataWhereInput>
+  buckets_none?: Maybe<DistributionBucketWhereInput>
+  buckets_some?: Maybe<DistributionBucketWhereInput>
+  buckets_every?: Maybe<DistributionBucketWhereInput>
+  AND?: Maybe<Array<DistributionBucketFamilyWhereInput>>
+  OR?: Maybe<Array<DistributionBucketFamilyWhereInput>>
+}
+
+export type DistributionBucketFamilyWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type DistributionBucketOperator = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  distributionBucket: DistributionBucket
+  distributionBucketId: Scalars['String']
+  /** ID of the distribution group worker */
+  workerId: Scalars['Int']
+  /** Current operator status */
+  status: DistributionBucketOperatorStatus
+  metadata?: Maybe<DistributionBucketOperatorMetadata>
+  metadataId?: Maybe<Scalars['String']>
+}
+
+export type DistributionBucketOperatorConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<DistributionBucketOperatorEdge>
+  pageInfo: PageInfo
+}
+
+export type DistributionBucketOperatorCreateInput = {
+  distributionBucket: Scalars['ID']
+  workerId: Scalars['Float']
+  status: DistributionBucketOperatorStatus
+  metadata?: Maybe<Scalars['ID']>
+}
+
+export type DistributionBucketOperatorEdge = {
+  node: DistributionBucketOperator
+  cursor: Scalars['String']
+}
+
+export type DistributionBucketOperatorMetadata = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Root distributor node api endpoint */
+  nodeEndpoint?: Maybe<Scalars['String']>
+  nodeLocation?: Maybe<NodeLocationMetadata>
+  nodeLocationId?: Maybe<Scalars['String']>
+  /** Additional information about the node/operator */
+  extra?: Maybe<Scalars['String']>
+  distributionbucketoperatormetadata?: Maybe<Array<DistributionBucketOperator>>
+}
+
+export type DistributionBucketOperatorMetadataConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<DistributionBucketOperatorMetadataEdge>
+  pageInfo: PageInfo
+}
+
+export type DistributionBucketOperatorMetadataCreateInput = {
+  nodeEndpoint?: Maybe<Scalars['String']>
+  nodeLocation?: Maybe<Scalars['ID']>
+  extra?: Maybe<Scalars['String']>
+}
+
+export type DistributionBucketOperatorMetadataEdge = {
+  node: DistributionBucketOperatorMetadata
+  cursor: Scalars['String']
+}
+
+export enum DistributionBucketOperatorMetadataOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  NodeEndpointAsc = 'nodeEndpoint_ASC',
+  NodeEndpointDesc = 'nodeEndpoint_DESC',
+  NodeLocationAsc = 'nodeLocation_ASC',
+  NodeLocationDesc = 'nodeLocation_DESC',
+  ExtraAsc = 'extra_ASC',
+  ExtraDesc = 'extra_DESC',
+}
+
+export type DistributionBucketOperatorMetadataUpdateInput = {
+  nodeEndpoint?: Maybe<Scalars['String']>
+  nodeLocation?: Maybe<Scalars['ID']>
+  extra?: Maybe<Scalars['String']>
+}
+
+export type DistributionBucketOperatorMetadataWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  nodeEndpoint_eq?: Maybe<Scalars['String']>
+  nodeEndpoint_contains?: Maybe<Scalars['String']>
+  nodeEndpoint_startsWith?: Maybe<Scalars['String']>
+  nodeEndpoint_endsWith?: Maybe<Scalars['String']>
+  nodeEndpoint_in?: Maybe<Array<Scalars['String']>>
+  nodeLocation_eq?: Maybe<Scalars['ID']>
+  nodeLocation_in?: Maybe<Array<Scalars['ID']>>
+  extra_eq?: Maybe<Scalars['String']>
+  extra_contains?: Maybe<Scalars['String']>
+  extra_startsWith?: Maybe<Scalars['String']>
+  extra_endsWith?: Maybe<Scalars['String']>
+  extra_in?: Maybe<Array<Scalars['String']>>
+  nodeLocation?: Maybe<NodeLocationMetadataWhereInput>
+  distributionbucketoperatormetadata_none?: Maybe<DistributionBucketOperatorWhereInput>
+  distributionbucketoperatormetadata_some?: Maybe<DistributionBucketOperatorWhereInput>
+  distributionbucketoperatormetadata_every?: Maybe<DistributionBucketOperatorWhereInput>
+  AND?: Maybe<Array<DistributionBucketOperatorMetadataWhereInput>>
+  OR?: Maybe<Array<DistributionBucketOperatorMetadataWhereInput>>
+}
+
+export type DistributionBucketOperatorMetadataWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export enum DistributionBucketOperatorOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  DistributionBucketAsc = 'distributionBucket_ASC',
+  DistributionBucketDesc = 'distributionBucket_DESC',
+  WorkerIdAsc = 'workerId_ASC',
+  WorkerIdDesc = 'workerId_DESC',
+  StatusAsc = 'status_ASC',
+  StatusDesc = 'status_DESC',
+  MetadataAsc = 'metadata_ASC',
+  MetadataDesc = 'metadata_DESC',
+}
+
+export enum DistributionBucketOperatorStatus {
+  Invited = 'INVITED',
+  Active = 'ACTIVE',
+}
+
+export type DistributionBucketOperatorUpdateInput = {
+  distributionBucket?: Maybe<Scalars['ID']>
+  workerId?: Maybe<Scalars['Float']>
+  status?: Maybe<DistributionBucketOperatorStatus>
+  metadata?: Maybe<Scalars['ID']>
+}
+
+export type DistributionBucketOperatorWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  distributionBucket_eq?: Maybe<Scalars['ID']>
+  distributionBucket_in?: Maybe<Array<Scalars['ID']>>
+  workerId_eq?: Maybe<Scalars['Int']>
+  workerId_gt?: Maybe<Scalars['Int']>
+  workerId_gte?: Maybe<Scalars['Int']>
+  workerId_lt?: Maybe<Scalars['Int']>
+  workerId_lte?: Maybe<Scalars['Int']>
+  workerId_in?: Maybe<Array<Scalars['Int']>>
+  status_eq?: Maybe<DistributionBucketOperatorStatus>
+  status_in?: Maybe<Array<DistributionBucketOperatorStatus>>
+  metadata_eq?: Maybe<Scalars['ID']>
+  metadata_in?: Maybe<Array<Scalars['ID']>>
+  distributionBucket?: Maybe<DistributionBucketWhereInput>
+  metadata?: Maybe<DistributionBucketOperatorMetadataWhereInput>
+  AND?: Maybe<Array<DistributionBucketOperatorWhereInput>>
+  OR?: Maybe<Array<DistributionBucketOperatorWhereInput>>
+}
+
+export type DistributionBucketOperatorWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export enum DistributionBucketOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  FamilyAsc = 'family_ASC',
+  FamilyDesc = 'family_DESC',
+  AcceptingNewBagsAsc = 'acceptingNewBags_ASC',
+  AcceptingNewBagsDesc = 'acceptingNewBags_DESC',
+  DistributingAsc = 'distributing_ASC',
+  DistributingDesc = 'distributing_DESC',
+}
+
+export type DistributionBucketUpdateInput = {
+  family?: Maybe<Scalars['ID']>
+  acceptingNewBags?: Maybe<Scalars['Boolean']>
+  distributing?: Maybe<Scalars['Boolean']>
+}
+
+export type DistributionBucketWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  family_eq?: Maybe<Scalars['ID']>
+  family_in?: Maybe<Array<Scalars['ID']>>
+  acceptingNewBags_eq?: Maybe<Scalars['Boolean']>
+  acceptingNewBags_in?: Maybe<Array<Scalars['Boolean']>>
+  distributing_eq?: Maybe<Scalars['Boolean']>
+  distributing_in?: Maybe<Array<Scalars['Boolean']>>
+  family?: Maybe<DistributionBucketFamilyWhereInput>
+  operators_none?: Maybe<DistributionBucketOperatorWhereInput>
+  operators_some?: Maybe<DistributionBucketOperatorWhereInput>
+  operators_every?: Maybe<DistributionBucketOperatorWhereInput>
+  bagAssignments_none?: Maybe<StorageBagDistributionAssignmentWhereInput>
+  bagAssignments_some?: Maybe<StorageBagDistributionAssignmentWhereInput>
+  bagAssignments_every?: Maybe<StorageBagDistributionAssignmentWhereInput>
+  AND?: Maybe<Array<DistributionBucketWhereInput>>
+  OR?: Maybe<Array<DistributionBucketWhereInput>>
+}
+
+export type DistributionBucketWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type GeoCoordinates = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  latitude: Scalars['Float']
+  longitude: Scalars['Float']
+  boundarySourceBucketFamilyMeta?: Maybe<DistributionBucketFamilyMetadata>
+  boundarySourceBucketFamilyMetaId?: Maybe<Scalars['String']>
+  nodelocationmetadatacoordinates?: Maybe<Array<NodeLocationMetadata>>
+}
+
+export type GeoCoordinatesConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<GeoCoordinatesEdge>
+  pageInfo: PageInfo
+}
+
+export type GeoCoordinatesCreateInput = {
+  latitude: Scalars['Float']
+  longitude: Scalars['Float']
+  boundarySourceBucketFamilyMeta?: Maybe<Scalars['ID']>
+}
+
+export type GeoCoordinatesEdge = {
+  node: GeoCoordinates
+  cursor: Scalars['String']
+}
+
+export enum GeoCoordinatesOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  LatitudeAsc = 'latitude_ASC',
+  LatitudeDesc = 'latitude_DESC',
+  LongitudeAsc = 'longitude_ASC',
+  LongitudeDesc = 'longitude_DESC',
+  BoundarySourceBucketFamilyMetaAsc = 'boundarySourceBucketFamilyMeta_ASC',
+  BoundarySourceBucketFamilyMetaDesc = 'boundarySourceBucketFamilyMeta_DESC',
+}
+
+export type GeoCoordinatesUpdateInput = {
+  latitude?: Maybe<Scalars['Float']>
+  longitude?: Maybe<Scalars['Float']>
+  boundarySourceBucketFamilyMeta?: Maybe<Scalars['ID']>
+}
+
+export type GeoCoordinatesWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  latitude_eq?: Maybe<Scalars['Float']>
+  latitude_gt?: Maybe<Scalars['Float']>
+  latitude_gte?: Maybe<Scalars['Float']>
+  latitude_lt?: Maybe<Scalars['Float']>
+  latitude_lte?: Maybe<Scalars['Float']>
+  latitude_in?: Maybe<Array<Scalars['Float']>>
+  longitude_eq?: Maybe<Scalars['Float']>
+  longitude_gt?: Maybe<Scalars['Float']>
+  longitude_gte?: Maybe<Scalars['Float']>
+  longitude_lt?: Maybe<Scalars['Float']>
+  longitude_lte?: Maybe<Scalars['Float']>
+  longitude_in?: Maybe<Array<Scalars['Float']>>
+  boundarySourceBucketFamilyMeta_eq?: Maybe<Scalars['ID']>
+  boundarySourceBucketFamilyMeta_in?: Maybe<Array<Scalars['ID']>>
+  boundarySourceBucketFamilyMeta?: Maybe<DistributionBucketFamilyMetadataWhereInput>
+  nodelocationmetadatacoordinates_none?: Maybe<NodeLocationMetadataWhereInput>
+  nodelocationmetadatacoordinates_some?: Maybe<NodeLocationMetadataWhereInput>
+  nodelocationmetadatacoordinates_every?: Maybe<NodeLocationMetadataWhereInput>
+  AND?: Maybe<Array<GeoCoordinatesWhereInput>>
+  OR?: Maybe<Array<GeoCoordinatesWhereInput>>
+}
+
+export type GeoCoordinatesWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type Language = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Language identifier ISO 639-1 */
+  iso: Scalars['String']
+  createdInBlock: Scalars['Int']
+  channellanguage?: Maybe<Array<Channel>>
+  videolanguage?: Maybe<Array<Video>>
+}
+
+export type LanguageConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<LanguageEdge>
+  pageInfo: PageInfo
+}
+
+export type LanguageCreateInput = {
+  iso: Scalars['String']
+  createdInBlock: Scalars['Float']
+}
+
+export type LanguageEdge = {
+  node: Language
+  cursor: Scalars['String']
+}
+
+export enum LanguageOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  IsoAsc = 'iso_ASC',
+  IsoDesc = 'iso_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+}
+
+export type LanguageUpdateInput = {
+  iso?: Maybe<Scalars['String']>
+  createdInBlock?: Maybe<Scalars['Float']>
+}
+
+export type LanguageWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  iso_eq?: Maybe<Scalars['String']>
+  iso_contains?: Maybe<Scalars['String']>
+  iso_startsWith?: Maybe<Scalars['String']>
+  iso_endsWith?: Maybe<Scalars['String']>
+  iso_in?: Maybe<Array<Scalars['String']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  channellanguage_none?: Maybe<ChannelWhereInput>
+  channellanguage_some?: Maybe<ChannelWhereInput>
+  channellanguage_every?: Maybe<ChannelWhereInput>
+  videolanguage_none?: Maybe<VideoWhereInput>
+  videolanguage_some?: Maybe<VideoWhereInput>
+  videolanguage_every?: Maybe<VideoWhereInput>
+  AND?: Maybe<Array<LanguageWhereInput>>
+  OR?: Maybe<Array<LanguageWhereInput>>
+}
+
+export type LanguageWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export enum LiaisonJudgement {
+  Pending = 'PENDING',
+  Accepted = 'ACCEPTED',
+}
+
+export type License = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** License code defined by Joystream */
+  code?: Maybe<Scalars['Int']>
+  /** Attribution (if required by the license) */
+  attribution?: Maybe<Scalars['String']>
+  /** Custom license content */
+  customText?: Maybe<Scalars['String']>
+  videolicense?: Maybe<Array<Video>>
+}
+
+export type LicenseConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<LicenseEdge>
+  pageInfo: PageInfo
+}
+
+export type LicenseCreateInput = {
+  code?: Maybe<Scalars['Float']>
+  attribution?: Maybe<Scalars['String']>
+  customText?: Maybe<Scalars['String']>
+}
+
+export type LicenseEdge = {
+  node: License
+  cursor: Scalars['String']
+}
+
+export enum LicenseOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  CodeAsc = 'code_ASC',
+  CodeDesc = 'code_DESC',
+  AttributionAsc = 'attribution_ASC',
+  AttributionDesc = 'attribution_DESC',
+  CustomTextAsc = 'customText_ASC',
+  CustomTextDesc = 'customText_DESC',
+}
+
+export type LicenseUpdateInput = {
+  code?: Maybe<Scalars['Float']>
+  attribution?: Maybe<Scalars['String']>
+  customText?: Maybe<Scalars['String']>
+}
+
+export type LicenseWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  code_eq?: Maybe<Scalars['Int']>
+  code_gt?: Maybe<Scalars['Int']>
+  code_gte?: Maybe<Scalars['Int']>
+  code_lt?: Maybe<Scalars['Int']>
+  code_lte?: Maybe<Scalars['Int']>
+  code_in?: Maybe<Array<Scalars['Int']>>
+  attribution_eq?: Maybe<Scalars['String']>
+  attribution_contains?: Maybe<Scalars['String']>
+  attribution_startsWith?: Maybe<Scalars['String']>
+  attribution_endsWith?: Maybe<Scalars['String']>
+  attribution_in?: Maybe<Array<Scalars['String']>>
+  customText_eq?: Maybe<Scalars['String']>
+  customText_contains?: Maybe<Scalars['String']>
+  customText_startsWith?: Maybe<Scalars['String']>
+  customText_endsWith?: Maybe<Scalars['String']>
+  customText_in?: Maybe<Array<Scalars['String']>>
+  videolicense_none?: Maybe<VideoWhereInput>
+  videolicense_some?: Maybe<VideoWhereInput>
+  videolicense_every?: Maybe<VideoWhereInput>
+  AND?: Maybe<Array<LicenseWhereInput>>
+  OR?: Maybe<Array<LicenseWhereInput>>
+}
+
+export type LicenseWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type MembersByHandleFtsOutput = {
+  item: MembersByHandleSearchResult
+  rank: Scalars['Float']
+  isTypeOf: Scalars['String']
+  highlight: Scalars['String']
+}
+
+export type MembersByHandleSearchResult = Membership
+
+/** Stored information about a registered user */
+export type Membership = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** The unique handle chosen by member */
+  handle: Scalars['String']
+  /** A Url to member's Avatar image */
+  avatarUri?: Maybe<Scalars['String']>
+  /** Short text chosen by member to share information about themselves */
+  about?: Maybe<Scalars['String']>
+  /** Member's controller account id */
+  controllerAccount: Scalars['String']
+  /** Member's root account id */
+  rootAccount: Scalars['String']
+  /** Blocknumber when member was registered */
+  createdInBlock: Scalars['Int']
+  /** How the member was registered */
+  entry: MembershipEntryMethod
+  /** The type of subscription the member has purchased if any. */
+  subscription?: Maybe<Scalars['Int']>
+  channels: Array<Channel>
+}
+
+export type MembershipConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<MembershipEdge>
+  pageInfo: PageInfo
+}
+
+export type MembershipCreateInput = {
+  handle: Scalars['String']
+  avatarUri?: Maybe<Scalars['String']>
+  about?: Maybe<Scalars['String']>
+  controllerAccount: Scalars['String']
+  rootAccount: Scalars['String']
+  createdInBlock: Scalars['Float']
+  entry: MembershipEntryMethod
+  subscription?: Maybe<Scalars['Float']>
+}
+
+export type MembershipEdge = {
+  node: Membership
+  cursor: Scalars['String']
+}
+
+export enum MembershipEntryMethod {
+  Paid = 'PAID',
+  Screening = 'SCREENING',
+  Genesis = 'GENESIS',
+}
+
+export enum MembershipOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  HandleAsc = 'handle_ASC',
+  HandleDesc = 'handle_DESC',
+  AvatarUriAsc = 'avatarUri_ASC',
+  AvatarUriDesc = 'avatarUri_DESC',
+  AboutAsc = 'about_ASC',
+  AboutDesc = 'about_DESC',
+  ControllerAccountAsc = 'controllerAccount_ASC',
+  ControllerAccountDesc = 'controllerAccount_DESC',
+  RootAccountAsc = 'rootAccount_ASC',
+  RootAccountDesc = 'rootAccount_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+  EntryAsc = 'entry_ASC',
+  EntryDesc = 'entry_DESC',
+  SubscriptionAsc = 'subscription_ASC',
+  SubscriptionDesc = 'subscription_DESC',
+}
+
+export type MembershipUpdateInput = {
+  handle?: Maybe<Scalars['String']>
+  avatarUri?: Maybe<Scalars['String']>
+  about?: Maybe<Scalars['String']>
+  controllerAccount?: Maybe<Scalars['String']>
+  rootAccount?: Maybe<Scalars['String']>
+  createdInBlock?: Maybe<Scalars['Float']>
+  entry?: Maybe<MembershipEntryMethod>
+  subscription?: Maybe<Scalars['Float']>
+}
+
+export type MembershipWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  handle_eq?: Maybe<Scalars['String']>
+  handle_contains?: Maybe<Scalars['String']>
+  handle_startsWith?: Maybe<Scalars['String']>
+  handle_endsWith?: Maybe<Scalars['String']>
+  handle_in?: Maybe<Array<Scalars['String']>>
+  avatarUri_eq?: Maybe<Scalars['String']>
+  avatarUri_contains?: Maybe<Scalars['String']>
+  avatarUri_startsWith?: Maybe<Scalars['String']>
+  avatarUri_endsWith?: Maybe<Scalars['String']>
+  avatarUri_in?: Maybe<Array<Scalars['String']>>
+  about_eq?: Maybe<Scalars['String']>
+  about_contains?: Maybe<Scalars['String']>
+  about_startsWith?: Maybe<Scalars['String']>
+  about_endsWith?: Maybe<Scalars['String']>
+  about_in?: Maybe<Array<Scalars['String']>>
+  controllerAccount_eq?: Maybe<Scalars['String']>
+  controllerAccount_contains?: Maybe<Scalars['String']>
+  controllerAccount_startsWith?: Maybe<Scalars['String']>
+  controllerAccount_endsWith?: Maybe<Scalars['String']>
+  controllerAccount_in?: Maybe<Array<Scalars['String']>>
+  rootAccount_eq?: Maybe<Scalars['String']>
+  rootAccount_contains?: Maybe<Scalars['String']>
+  rootAccount_startsWith?: Maybe<Scalars['String']>
+  rootAccount_endsWith?: Maybe<Scalars['String']>
+  rootAccount_in?: Maybe<Array<Scalars['String']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  entry_eq?: Maybe<MembershipEntryMethod>
+  entry_in?: Maybe<Array<MembershipEntryMethod>>
+  subscription_eq?: Maybe<Scalars['Int']>
+  subscription_gt?: Maybe<Scalars['Int']>
+  subscription_gte?: Maybe<Scalars['Int']>
+  subscription_lt?: Maybe<Scalars['Int']>
+  subscription_lte?: Maybe<Scalars['Int']>
+  subscription_in?: Maybe<Array<Scalars['Int']>>
+  channels_none?: Maybe<ChannelWhereInput>
+  channels_some?: Maybe<ChannelWhereInput>
+  channels_every?: Maybe<ChannelWhereInput>
+  AND?: Maybe<Array<MembershipWhereInput>>
+  OR?: Maybe<Array<MembershipWhereInput>>
+}
+
+export type MembershipWhereUniqueInput = {
+  id?: Maybe<Scalars['ID']>
+  handle?: Maybe<Scalars['String']>
+}
+
+export type NextEntityId = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Next deterministic id for entities without custom id */
+  nextId: Scalars['Int']
+}
+
+export type NextEntityIdConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<NextEntityIdEdge>
+  pageInfo: PageInfo
+}
+
+export type NextEntityIdCreateInput = {
+  nextId: Scalars['Float']
+}
+
+export type NextEntityIdEdge = {
+  node: NextEntityId
+  cursor: Scalars['String']
+}
+
+export enum NextEntityIdOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  NextIdAsc = 'nextId_ASC',
+  NextIdDesc = 'nextId_DESC',
+}
+
+export type NextEntityIdUpdateInput = {
+  nextId?: Maybe<Scalars['Float']>
+}
+
+export type NextEntityIdWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  nextId_eq?: Maybe<Scalars['Int']>
+  nextId_gt?: Maybe<Scalars['Int']>
+  nextId_gte?: Maybe<Scalars['Int']>
+  nextId_lt?: Maybe<Scalars['Int']>
+  nextId_lte?: Maybe<Scalars['Int']>
+  nextId_in?: Maybe<Array<Scalars['Int']>>
+  AND?: Maybe<Array<NextEntityIdWhereInput>>
+  OR?: Maybe<Array<NextEntityIdWhereInput>>
+}
+
+export type NextEntityIdWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type NodeLocationMetadata = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** ISO 3166-1 alpha-2 country code (2 letters) */
+  countryCode?: Maybe<Scalars['String']>
+  /** City name */
+  city?: Maybe<Scalars['String']>
+  coordinates?: Maybe<GeoCoordinates>
+  coordinatesId?: Maybe<Scalars['String']>
+  distributionbucketoperatormetadatanodeLocation?: Maybe<Array<DistributionBucketOperatorMetadata>>
+  storagebucketoperatormetadatanodeLocation?: Maybe<Array<StorageBucketOperatorMetadata>>
+}
+
+export type NodeLocationMetadataConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<NodeLocationMetadataEdge>
+  pageInfo: PageInfo
+}
+
+export type NodeLocationMetadataCreateInput = {
+  countryCode?: Maybe<Scalars['String']>
+  city?: Maybe<Scalars['String']>
+  coordinates?: Maybe<Scalars['ID']>
+}
+
+export type NodeLocationMetadataEdge = {
+  node: NodeLocationMetadata
+  cursor: Scalars['String']
+}
+
+export enum NodeLocationMetadataOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  CountryCodeAsc = 'countryCode_ASC',
+  CountryCodeDesc = 'countryCode_DESC',
+  CityAsc = 'city_ASC',
+  CityDesc = 'city_DESC',
+  CoordinatesAsc = 'coordinates_ASC',
+  CoordinatesDesc = 'coordinates_DESC',
+}
+
+export type NodeLocationMetadataUpdateInput = {
+  countryCode?: Maybe<Scalars['String']>
+  city?: Maybe<Scalars['String']>
+  coordinates?: Maybe<Scalars['ID']>
+}
+
+export type NodeLocationMetadataWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  countryCode_eq?: Maybe<Scalars['String']>
+  countryCode_contains?: Maybe<Scalars['String']>
+  countryCode_startsWith?: Maybe<Scalars['String']>
+  countryCode_endsWith?: Maybe<Scalars['String']>
+  countryCode_in?: Maybe<Array<Scalars['String']>>
+  city_eq?: Maybe<Scalars['String']>
+  city_contains?: Maybe<Scalars['String']>
+  city_startsWith?: Maybe<Scalars['String']>
+  city_endsWith?: Maybe<Scalars['String']>
+  city_in?: Maybe<Array<Scalars['String']>>
+  coordinates_eq?: Maybe<Scalars['ID']>
+  coordinates_in?: Maybe<Array<Scalars['ID']>>
+  coordinates?: Maybe<GeoCoordinatesWhereInput>
+  distributionbucketoperatormetadatanodeLocation_none?: Maybe<DistributionBucketOperatorMetadataWhereInput>
+  distributionbucketoperatormetadatanodeLocation_some?: Maybe<DistributionBucketOperatorMetadataWhereInput>
+  distributionbucketoperatormetadatanodeLocation_every?: Maybe<DistributionBucketOperatorMetadataWhereInput>
+  storagebucketoperatormetadatanodeLocation_none?: Maybe<StorageBucketOperatorMetadataWhereInput>
+  storagebucketoperatormetadatanodeLocation_some?: Maybe<StorageBucketOperatorMetadataWhereInput>
+  storagebucketoperatormetadatanodeLocation_every?: Maybe<StorageBucketOperatorMetadataWhereInput>
+  AND?: Maybe<Array<NodeLocationMetadataWhereInput>>
+  OR?: Maybe<Array<NodeLocationMetadataWhereInput>>
+}
+
+export type NodeLocationMetadataWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type PageInfo = {
+  hasNextPage: Scalars['Boolean']
+  hasPreviousPage: Scalars['Boolean']
+  startCursor?: Maybe<Scalars['String']>
+  endCursor?: Maybe<Scalars['String']>
+}
+
+export type ProcessorState = {
+  lastCompleteBlock: Scalars['Float']
+  lastProcessedEvent: Scalars['String']
+  indexerHead: Scalars['Float']
+  chainHead: Scalars['Float']
+}
+
+export type Query = {
+  channelCategories: Array<ChannelCategory>
+  channelCategoryByUniqueInput?: Maybe<ChannelCategory>
+  channelCategoriesConnection: ChannelCategoryConnection
+  channels: Array<Channel>
+  channelByUniqueInput?: Maybe<Channel>
+  channelsConnection: ChannelConnection
+  curatorGroups: Array<CuratorGroup>
+  curatorGroupByUniqueInput?: Maybe<CuratorGroup>
+  curatorGroupsConnection: CuratorGroupConnection
+  dataObjects: Array<DataObject>
+  dataObjectByUniqueInput?: Maybe<DataObject>
+  dataObjectsConnection: DataObjectConnection
+  distributionBucketFamilyMetadata: Array<DistributionBucketFamilyMetadata>
+  distributionBucketFamilyMetadataByUniqueInput?: Maybe<DistributionBucketFamilyMetadata>
+  distributionBucketFamilyMetadataConnection: DistributionBucketFamilyMetadataConnection
+  distributionBucketFamilies: Array<DistributionBucketFamily>
+  distributionBucketFamilyByUniqueInput?: Maybe<DistributionBucketFamily>
+  distributionBucketFamiliesConnection: DistributionBucketFamilyConnection
+  distributionBucketOperatorMetadata: Array<DistributionBucketOperatorMetadata>
+  distributionBucketOperatorMetadataByUniqueInput?: Maybe<DistributionBucketOperatorMetadata>
+  distributionBucketOperatorMetadataConnection: DistributionBucketOperatorMetadataConnection
+  distributionBucketOperators: Array<DistributionBucketOperator>
+  distributionBucketOperatorByUniqueInput?: Maybe<DistributionBucketOperator>
+  distributionBucketOperatorsConnection: DistributionBucketOperatorConnection
+  distributionBuckets: Array<DistributionBucket>
+  distributionBucketByUniqueInput?: Maybe<DistributionBucket>
+  distributionBucketsConnection: DistributionBucketConnection
+  geoCoordinates: Array<GeoCoordinates>
+  geoCoordinatesByUniqueInput?: Maybe<GeoCoordinates>
+  geoCoordinatesConnection: GeoCoordinatesConnection
+  languages: Array<Language>
+  languageByUniqueInput?: Maybe<Language>
+  languagesConnection: LanguageConnection
+  licenses: Array<License>
+  licenseByUniqueInput?: Maybe<License>
+  licensesConnection: LicenseConnection
+  memberships: Array<Membership>
+  membershipByUniqueInput?: Maybe<Membership>
+  membershipsConnection: MembershipConnection
+  nextEntityIds: Array<NextEntityId>
+  nextEntityIdByUniqueInput?: Maybe<NextEntityId>
+  nextEntityIdsConnection: NextEntityIdConnection
+  nodeLocationMetadata: Array<NodeLocationMetadata>
+  nodeLocationMetadataByUniqueInput?: Maybe<NodeLocationMetadata>
+  nodeLocationMetadataConnection: NodeLocationMetadataConnection
+  channelCategoriesByName: Array<ChannelCategoriesByNameFtsOutput>
+  membersByHandle: Array<MembersByHandleFtsOutput>
+  search: Array<SearchFtsOutput>
+  videoCategoriesByName: Array<VideoCategoriesByNameFtsOutput>
+  storageBagDistributionAssignments: Array<StorageBagDistributionAssignment>
+  storageBagDistributionAssignmentByUniqueInput?: Maybe<StorageBagDistributionAssignment>
+  storageBagDistributionAssignmentsConnection: StorageBagDistributionAssignmentConnection
+  storageBagStorageAssignments: Array<StorageBagStorageAssignment>
+  storageBagStorageAssignmentByUniqueInput?: Maybe<StorageBagStorageAssignment>
+  storageBagStorageAssignmentsConnection: StorageBagStorageAssignmentConnection
+  storageBags: Array<StorageBag>
+  storageBagByUniqueInput?: Maybe<StorageBag>
+  storageBagsConnection: StorageBagConnection
+  storageBucketOperatorMetadata: Array<StorageBucketOperatorMetadata>
+  storageBucketOperatorMetadataByUniqueInput?: Maybe<StorageBucketOperatorMetadata>
+  storageBucketOperatorMetadataConnection: StorageBucketOperatorMetadataConnection
+  storageBuckets: Array<StorageBucket>
+  storageBucketByUniqueInput?: Maybe<StorageBucket>
+  storageBucketsConnection: StorageBucketConnection
+  storageDataObjects: Array<StorageDataObject>
+  storageDataObjectByUniqueInput?: Maybe<StorageDataObject>
+  storageDataObjectsConnection: StorageDataObjectConnection
+  storageSystemParameters: Array<StorageSystemParameters>
+  storageSystemParametersByUniqueInput?: Maybe<StorageSystemParameters>
+  storageSystemParametersConnection: StorageSystemParametersConnection
+  videoCategories: Array<VideoCategory>
+  videoCategoryByUniqueInput?: Maybe<VideoCategory>
+  videoCategoriesConnection: VideoCategoryConnection
+  videoMediaEncodings: Array<VideoMediaEncoding>
+  videoMediaEncodingByUniqueInput?: Maybe<VideoMediaEncoding>
+  videoMediaEncodingsConnection: VideoMediaEncodingConnection
+  videoMediaMetadata: Array<VideoMediaMetadata>
+  videoMediaMetadataByUniqueInput?: Maybe<VideoMediaMetadata>
+  videoMediaMetadataConnection: VideoMediaMetadataConnection
+  videos: Array<Video>
+  videoByUniqueInput?: Maybe<Video>
+  videosConnection: VideoConnection
+  workers: Array<Worker>
+  workerByUniqueInput?: Maybe<Worker>
+  workersConnection: WorkerConnection
+}
+
+export type QueryChannelCategoriesArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<ChannelCategoryWhereInput>
+  orderBy?: Maybe<Array<ChannelCategoryOrderByInput>>
+}
+
+export type QueryChannelCategoryByUniqueInputArgs = {
+  where: ChannelCategoryWhereUniqueInput
+}
+
+export type QueryChannelCategoriesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<ChannelCategoryWhereInput>
+  orderBy?: Maybe<Array<ChannelCategoryOrderByInput>>
+}
+
+export type QueryChannelsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<ChannelWhereInput>
+  orderBy?: Maybe<Array<ChannelOrderByInput>>
+}
+
+export type QueryChannelByUniqueInputArgs = {
+  where: ChannelWhereUniqueInput
+}
+
+export type QueryChannelsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<ChannelWhereInput>
+  orderBy?: Maybe<Array<ChannelOrderByInput>>
+}
+
+export type QueryCuratorGroupsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<CuratorGroupWhereInput>
+  orderBy?: Maybe<Array<CuratorGroupOrderByInput>>
+}
+
+export type QueryCuratorGroupByUniqueInputArgs = {
+  where: CuratorGroupWhereUniqueInput
+}
+
+export type QueryCuratorGroupsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<CuratorGroupWhereInput>
+  orderBy?: Maybe<Array<CuratorGroupOrderByInput>>
+}
+
+export type QueryDataObjectsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<DataObjectWhereInput>
+  orderBy?: Maybe<Array<DataObjectOrderByInput>>
+}
+
+export type QueryDataObjectByUniqueInputArgs = {
+  where: DataObjectWhereUniqueInput
+}
+
+export type QueryDataObjectsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<DataObjectWhereInput>
+  orderBy?: Maybe<Array<DataObjectOrderByInput>>
+}
+
+export type QueryDistributionBucketFamilyMetadataArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<DistributionBucketFamilyMetadataWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketFamilyMetadataOrderByInput>>
+}
+
+export type QueryDistributionBucketFamilyMetadataByUniqueInputArgs = {
+  where: DistributionBucketFamilyMetadataWhereUniqueInput
+}
+
+export type QueryDistributionBucketFamilyMetadataConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<DistributionBucketFamilyMetadataWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketFamilyMetadataOrderByInput>>
+}
+
+export type QueryDistributionBucketFamiliesArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<DistributionBucketFamilyWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketFamilyOrderByInput>>
+}
+
+export type QueryDistributionBucketFamilyByUniqueInputArgs = {
+  where: DistributionBucketFamilyWhereUniqueInput
+}
+
+export type QueryDistributionBucketFamiliesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<DistributionBucketFamilyWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketFamilyOrderByInput>>
+}
+
+export type QueryDistributionBucketOperatorMetadataArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<DistributionBucketOperatorMetadataWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketOperatorMetadataOrderByInput>>
+}
+
+export type QueryDistributionBucketOperatorMetadataByUniqueInputArgs = {
+  where: DistributionBucketOperatorMetadataWhereUniqueInput
+}
+
+export type QueryDistributionBucketOperatorMetadataConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<DistributionBucketOperatorMetadataWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketOperatorMetadataOrderByInput>>
+}
+
+export type QueryDistributionBucketOperatorsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<DistributionBucketOperatorWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketOperatorOrderByInput>>
+}
+
+export type QueryDistributionBucketOperatorByUniqueInputArgs = {
+  where: DistributionBucketOperatorWhereUniqueInput
+}
+
+export type QueryDistributionBucketOperatorsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<DistributionBucketOperatorWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketOperatorOrderByInput>>
+}
+
+export type QueryDistributionBucketsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<DistributionBucketWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketOrderByInput>>
+}
+
+export type QueryDistributionBucketByUniqueInputArgs = {
+  where: DistributionBucketWhereUniqueInput
+}
+
+export type QueryDistributionBucketsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<DistributionBucketWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketOrderByInput>>
+}
+
+export type QueryGeoCoordinatesArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<GeoCoordinatesWhereInput>
+  orderBy?: Maybe<Array<GeoCoordinatesOrderByInput>>
+}
+
+export type QueryGeoCoordinatesByUniqueInputArgs = {
+  where: GeoCoordinatesWhereUniqueInput
+}
+
+export type QueryGeoCoordinatesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<GeoCoordinatesWhereInput>
+  orderBy?: Maybe<Array<GeoCoordinatesOrderByInput>>
+}
+
+export type QueryLanguagesArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<LanguageWhereInput>
+  orderBy?: Maybe<Array<LanguageOrderByInput>>
+}
+
+export type QueryLanguageByUniqueInputArgs = {
+  where: LanguageWhereUniqueInput
+}
+
+export type QueryLanguagesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<LanguageWhereInput>
+  orderBy?: Maybe<Array<LanguageOrderByInput>>
+}
+
+export type QueryLicensesArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<LicenseWhereInput>
+  orderBy?: Maybe<Array<LicenseOrderByInput>>
+}
+
+export type QueryLicenseByUniqueInputArgs = {
+  where: LicenseWhereUniqueInput
+}
+
+export type QueryLicensesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<LicenseWhereInput>
+  orderBy?: Maybe<Array<LicenseOrderByInput>>
+}
+
+export type QueryMembershipsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<MembershipWhereInput>
+  orderBy?: Maybe<Array<MembershipOrderByInput>>
+}
+
+export type QueryMembershipByUniqueInputArgs = {
+  where: MembershipWhereUniqueInput
+}
+
+export type QueryMembershipsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<MembershipWhereInput>
+  orderBy?: Maybe<Array<MembershipOrderByInput>>
+}
+
+export type QueryNextEntityIdsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<NextEntityIdWhereInput>
+  orderBy?: Maybe<Array<NextEntityIdOrderByInput>>
+}
+
+export type QueryNextEntityIdByUniqueInputArgs = {
+  where: NextEntityIdWhereUniqueInput
+}
+
+export type QueryNextEntityIdsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<NextEntityIdWhereInput>
+  orderBy?: Maybe<Array<NextEntityIdOrderByInput>>
+}
+
+export type QueryNodeLocationMetadataArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<NodeLocationMetadataWhereInput>
+  orderBy?: Maybe<Array<NodeLocationMetadataOrderByInput>>
+}
+
+export type QueryNodeLocationMetadataByUniqueInputArgs = {
+  where: NodeLocationMetadataWhereUniqueInput
+}
+
+export type QueryNodeLocationMetadataConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<NodeLocationMetadataWhereInput>
+  orderBy?: Maybe<Array<NodeLocationMetadataOrderByInput>>
+}
+
+export type QueryChannelCategoriesByNameArgs = {
+  whereChannelCategory?: Maybe<ChannelCategoryWhereInput>
+  skip?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  text: Scalars['String']
+}
+
+export type QueryMembersByHandleArgs = {
+  whereMembership?: Maybe<MembershipWhereInput>
+  skip?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  text: Scalars['String']
+}
+
+export type QuerySearchArgs = {
+  whereVideo?: Maybe<VideoWhereInput>
+  whereChannel?: Maybe<ChannelWhereInput>
+  skip?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  text: Scalars['String']
+}
+
+export type QueryVideoCategoriesByNameArgs = {
+  whereVideoCategory?: Maybe<VideoCategoryWhereInput>
+  skip?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  text: Scalars['String']
+}
+
+export type QueryStorageBagDistributionAssignmentsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<StorageBagDistributionAssignmentWhereInput>
+  orderBy?: Maybe<Array<StorageBagDistributionAssignmentOrderByInput>>
+}
+
+export type QueryStorageBagDistributionAssignmentByUniqueInputArgs = {
+  where: StorageBagDistributionAssignmentWhereUniqueInput
+}
+
+export type QueryStorageBagDistributionAssignmentsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<StorageBagDistributionAssignmentWhereInput>
+  orderBy?: Maybe<Array<StorageBagDistributionAssignmentOrderByInput>>
+}
+
+export type QueryStorageBagStorageAssignmentsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<StorageBagStorageAssignmentWhereInput>
+  orderBy?: Maybe<Array<StorageBagStorageAssignmentOrderByInput>>
+}
+
+export type QueryStorageBagStorageAssignmentByUniqueInputArgs = {
+  where: StorageBagStorageAssignmentWhereUniqueInput
+}
+
+export type QueryStorageBagStorageAssignmentsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<StorageBagStorageAssignmentWhereInput>
+  orderBy?: Maybe<Array<StorageBagStorageAssignmentOrderByInput>>
+}
+
+export type QueryStorageBagsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<StorageBagWhereInput>
+  orderBy?: Maybe<Array<StorageBagOrderByInput>>
+}
+
+export type QueryStorageBagByUniqueInputArgs = {
+  where: StorageBagWhereUniqueInput
+}
+
+export type QueryStorageBagsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<StorageBagWhereInput>
+  orderBy?: Maybe<Array<StorageBagOrderByInput>>
+}
+
+export type QueryStorageBucketOperatorMetadataArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<StorageBucketOperatorMetadataWhereInput>
+  orderBy?: Maybe<Array<StorageBucketOperatorMetadataOrderByInput>>
+}
+
+export type QueryStorageBucketOperatorMetadataByUniqueInputArgs = {
+  where: StorageBucketOperatorMetadataWhereUniqueInput
+}
+
+export type QueryStorageBucketOperatorMetadataConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<StorageBucketOperatorMetadataWhereInput>
+  orderBy?: Maybe<Array<StorageBucketOperatorMetadataOrderByInput>>
+}
+
+export type QueryStorageBucketsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<StorageBucketWhereInput>
+  orderBy?: Maybe<Array<StorageBucketOrderByInput>>
+}
+
+export type QueryStorageBucketByUniqueInputArgs = {
+  where: StorageBucketWhereUniqueInput
+}
+
+export type QueryStorageBucketsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<StorageBucketWhereInput>
+  orderBy?: Maybe<Array<StorageBucketOrderByInput>>
+}
+
+export type QueryStorageDataObjectsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<StorageDataObjectWhereInput>
+  orderBy?: Maybe<Array<StorageDataObjectOrderByInput>>
+}
+
+export type QueryStorageDataObjectByUniqueInputArgs = {
+  where: StorageDataObjectWhereUniqueInput
+}
+
+export type QueryStorageDataObjectsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<StorageDataObjectWhereInput>
+  orderBy?: Maybe<Array<StorageDataObjectOrderByInput>>
+}
+
+export type QueryStorageSystemParametersArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<StorageSystemParametersWhereInput>
+  orderBy?: Maybe<Array<StorageSystemParametersOrderByInput>>
+}
+
+export type QueryStorageSystemParametersByUniqueInputArgs = {
+  where: StorageSystemParametersWhereUniqueInput
+}
+
+export type QueryStorageSystemParametersConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<StorageSystemParametersWhereInput>
+  orderBy?: Maybe<Array<StorageSystemParametersOrderByInput>>
+}
+
+export type QueryVideoCategoriesArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<VideoCategoryWhereInput>
+  orderBy?: Maybe<Array<VideoCategoryOrderByInput>>
+}
+
+export type QueryVideoCategoryByUniqueInputArgs = {
+  where: VideoCategoryWhereUniqueInput
+}
+
+export type QueryVideoCategoriesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<VideoCategoryWhereInput>
+  orderBy?: Maybe<Array<VideoCategoryOrderByInput>>
+}
+
+export type QueryVideoMediaEncodingsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<VideoMediaEncodingWhereInput>
+  orderBy?: Maybe<Array<VideoMediaEncodingOrderByInput>>
+}
+
+export type QueryVideoMediaEncodingByUniqueInputArgs = {
+  where: VideoMediaEncodingWhereUniqueInput
+}
+
+export type QueryVideoMediaEncodingsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<VideoMediaEncodingWhereInput>
+  orderBy?: Maybe<Array<VideoMediaEncodingOrderByInput>>
+}
+
+export type QueryVideoMediaMetadataArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<VideoMediaMetadataWhereInput>
+  orderBy?: Maybe<Array<VideoMediaMetadataOrderByInput>>
+}
+
+export type QueryVideoMediaMetadataByUniqueInputArgs = {
+  where: VideoMediaMetadataWhereUniqueInput
+}
+
+export type QueryVideoMediaMetadataConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<VideoMediaMetadataWhereInput>
+  orderBy?: Maybe<Array<VideoMediaMetadataOrderByInput>>
+}
+
+export type QueryVideosArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<VideoWhereInput>
+  orderBy?: Maybe<Array<VideoOrderByInput>>
+}
+
+export type QueryVideoByUniqueInputArgs = {
+  where: VideoWhereUniqueInput
+}
+
+export type QueryVideosConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<VideoWhereInput>
+  orderBy?: Maybe<Array<VideoOrderByInput>>
+}
+
+export type QueryWorkersArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<WorkerWhereInput>
+  orderBy?: Maybe<Array<WorkerOrderByInput>>
+}
+
+export type QueryWorkerByUniqueInputArgs = {
+  where: WorkerWhereUniqueInput
+}
+
+export type QueryWorkersConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<WorkerWhereInput>
+  orderBy?: Maybe<Array<WorkerOrderByInput>>
+}
+
+export type SearchFtsOutput = {
+  item: SearchSearchResult
+  rank: Scalars['Float']
+  isTypeOf: Scalars['String']
+  highlight: Scalars['String']
+}
+
+export type SearchSearchResult = Channel | Video
+
+export type StandardDeleteResponse = {
+  id: Scalars['ID']
+}
+
+export type StorageBag = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  objects: Array<StorageDataObject>
+  storageAssignments: Array<StorageBagStorageAssignment>
+  distirbutionAssignments: Array<StorageBagDistributionAssignment>
+  /** Owner of the storage bag */
+  owner: StorageBagOwner
+}
+
+export type StorageBagConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<StorageBagEdge>
+  pageInfo: PageInfo
+}
+
+export type StorageBagCreateInput = {
+  owner: Scalars['JSONObject']
+}
+
+export type StorageBagDistributionAssignment = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  storageBag: StorageBag
+  storageBagId?: Maybe<Scalars['String']>
+  distributionBucket: DistributionBucket
+  distributionBucketId?: Maybe<Scalars['String']>
+}
+
+export type StorageBagDistributionAssignmentConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<StorageBagDistributionAssignmentEdge>
+  pageInfo: PageInfo
+}
+
+export type StorageBagDistributionAssignmentCreateInput = {
+  storageBag: Scalars['ID']
+  distributionBucket: Scalars['ID']
+  storageBagId?: Maybe<Scalars['String']>
+  distributionBucketId?: Maybe<Scalars['String']>
+}
+
+export type StorageBagDistributionAssignmentEdge = {
+  node: StorageBagDistributionAssignment
+  cursor: Scalars['String']
+}
+
+export enum StorageBagDistributionAssignmentOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  StorageBagAsc = 'storageBag_ASC',
+  StorageBagDesc = 'storageBag_DESC',
+  DistributionBucketAsc = 'distributionBucket_ASC',
+  DistributionBucketDesc = 'distributionBucket_DESC',
+  StorageBagIdAsc = 'storageBagId_ASC',
+  StorageBagIdDesc = 'storageBagId_DESC',
+  DistributionBucketIdAsc = 'distributionBucketId_ASC',
+  DistributionBucketIdDesc = 'distributionBucketId_DESC',
+}
+
+export type StorageBagDistributionAssignmentUpdateInput = {
+  storageBag?: Maybe<Scalars['ID']>
+  distributionBucket?: Maybe<Scalars['ID']>
+  storageBagId?: Maybe<Scalars['String']>
+  distributionBucketId?: Maybe<Scalars['String']>
+}
+
+export type StorageBagDistributionAssignmentWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  storageBag_eq?: Maybe<Scalars['ID']>
+  storageBag_in?: Maybe<Array<Scalars['ID']>>
+  distributionBucket_eq?: Maybe<Scalars['ID']>
+  distributionBucket_in?: Maybe<Array<Scalars['ID']>>
+  storageBagId_eq?: Maybe<Scalars['String']>
+  storageBagId_contains?: Maybe<Scalars['String']>
+  storageBagId_startsWith?: Maybe<Scalars['String']>
+  storageBagId_endsWith?: Maybe<Scalars['String']>
+  storageBagId_in?: Maybe<Array<Scalars['String']>>
+  distributionBucketId_eq?: Maybe<Scalars['String']>
+  distributionBucketId_contains?: Maybe<Scalars['String']>
+  distributionBucketId_startsWith?: Maybe<Scalars['String']>
+  distributionBucketId_endsWith?: Maybe<Scalars['String']>
+  distributionBucketId_in?: Maybe<Array<Scalars['String']>>
+  storageBag?: Maybe<StorageBagWhereInput>
+  distributionBucket?: Maybe<DistributionBucketWhereInput>
+  AND?: Maybe<Array<StorageBagDistributionAssignmentWhereInput>>
+  OR?: Maybe<Array<StorageBagDistributionAssignmentWhereInput>>
+}
+
+export type StorageBagDistributionAssignmentWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageBagEdge = {
+  node: StorageBag
+  cursor: Scalars['String']
+}
+
+export enum StorageBagOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+}
+
+export type StorageBagOwner =
+  | StorageBagOwnerCouncil
+  | StorageBagOwnerWorkingGroup
+  | StorageBagOwnerMember
+  | StorageBagOwnerChannel
+  | StorageBagOwnerDao
+
+export type StorageBagOwnerChannel = {
+  channelId?: Maybe<Scalars['Int']>
+}
+
+export type StorageBagOwnerChannelCreateInput = {
+  channelId?: Maybe<Scalars['Float']>
+}
+
+export type StorageBagOwnerChannelUpdateInput = {
+  channelId?: Maybe<Scalars['Float']>
+}
+
+export type StorageBagOwnerChannelWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  channelId_eq?: Maybe<Scalars['Int']>
+  channelId_gt?: Maybe<Scalars['Int']>
+  channelId_gte?: Maybe<Scalars['Int']>
+  channelId_lt?: Maybe<Scalars['Int']>
+  channelId_lte?: Maybe<Scalars['Int']>
+  channelId_in?: Maybe<Array<Scalars['Int']>>
+  AND?: Maybe<Array<StorageBagOwnerChannelWhereInput>>
+  OR?: Maybe<Array<StorageBagOwnerChannelWhereInput>>
+}
+
+export type StorageBagOwnerChannelWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageBagOwnerCouncil = {
+  phantom?: Maybe<Scalars['Int']>
+}
+
+export type StorageBagOwnerCouncilCreateInput = {
+  phantom?: Maybe<Scalars['Float']>
+}
+
+export type StorageBagOwnerCouncilUpdateInput = {
+  phantom?: Maybe<Scalars['Float']>
+}
+
+export type StorageBagOwnerCouncilWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  phantom_eq?: Maybe<Scalars['Int']>
+  phantom_gt?: Maybe<Scalars['Int']>
+  phantom_gte?: Maybe<Scalars['Int']>
+  phantom_lt?: Maybe<Scalars['Int']>
+  phantom_lte?: Maybe<Scalars['Int']>
+  phantom_in?: Maybe<Array<Scalars['Int']>>
+  AND?: Maybe<Array<StorageBagOwnerCouncilWhereInput>>
+  OR?: Maybe<Array<StorageBagOwnerCouncilWhereInput>>
+}
+
+export type StorageBagOwnerCouncilWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageBagOwnerDao = {
+  daoId?: Maybe<Scalars['Int']>
+}
+
+export type StorageBagOwnerDaoCreateInput = {
+  daoId?: Maybe<Scalars['Float']>
+}
+
+export type StorageBagOwnerDaoUpdateInput = {
+  daoId?: Maybe<Scalars['Float']>
+}
+
+export type StorageBagOwnerDaoWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  daoId_eq?: Maybe<Scalars['Int']>
+  daoId_gt?: Maybe<Scalars['Int']>
+  daoId_gte?: Maybe<Scalars['Int']>
+  daoId_lt?: Maybe<Scalars['Int']>
+  daoId_lte?: Maybe<Scalars['Int']>
+  daoId_in?: Maybe<Array<Scalars['Int']>>
+  AND?: Maybe<Array<StorageBagOwnerDaoWhereInput>>
+  OR?: Maybe<Array<StorageBagOwnerDaoWhereInput>>
+}
+
+export type StorageBagOwnerDaoWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageBagOwnerMember = {
+  memberId?: Maybe<Scalars['Int']>
+}
+
+export type StorageBagOwnerMemberCreateInput = {
+  memberId?: Maybe<Scalars['Float']>
+}
+
+export type StorageBagOwnerMemberUpdateInput = {
+  memberId?: Maybe<Scalars['Float']>
+}
+
+export type StorageBagOwnerMemberWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  memberId_eq?: Maybe<Scalars['Int']>
+  memberId_gt?: Maybe<Scalars['Int']>
+  memberId_gte?: Maybe<Scalars['Int']>
+  memberId_lt?: Maybe<Scalars['Int']>
+  memberId_lte?: Maybe<Scalars['Int']>
+  memberId_in?: Maybe<Array<Scalars['Int']>>
+  AND?: Maybe<Array<StorageBagOwnerMemberWhereInput>>
+  OR?: Maybe<Array<StorageBagOwnerMemberWhereInput>>
+}
+
+export type StorageBagOwnerMemberWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageBagOwnerWorkingGroup = {
+  workingGroupId?: Maybe<Scalars['String']>
+}
+
+export type StorageBagOwnerWorkingGroupCreateInput = {
+  workingGroupId?: Maybe<Scalars['String']>
+}
+
+export type StorageBagOwnerWorkingGroupUpdateInput = {
+  workingGroupId?: Maybe<Scalars['String']>
+}
+
+export type StorageBagOwnerWorkingGroupWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  workingGroupId_eq?: Maybe<Scalars['String']>
+  workingGroupId_contains?: Maybe<Scalars['String']>
+  workingGroupId_startsWith?: Maybe<Scalars['String']>
+  workingGroupId_endsWith?: Maybe<Scalars['String']>
+  workingGroupId_in?: Maybe<Array<Scalars['String']>>
+  AND?: Maybe<Array<StorageBagOwnerWorkingGroupWhereInput>>
+  OR?: Maybe<Array<StorageBagOwnerWorkingGroupWhereInput>>
+}
+
+export type StorageBagOwnerWorkingGroupWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageBagStorageAssignment = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  storageBag: StorageBag
+  storageBagId?: Maybe<Scalars['String']>
+  storageBucket: StorageBucket
+  storageBucketId?: Maybe<Scalars['String']>
+}
+
+export type StorageBagStorageAssignmentConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<StorageBagStorageAssignmentEdge>
+  pageInfo: PageInfo
+}
+
+export type StorageBagStorageAssignmentCreateInput = {
+  storageBag: Scalars['ID']
+  storageBucket: Scalars['ID']
+  storageBagId?: Maybe<Scalars['String']>
+  storageBucketId?: Maybe<Scalars['String']>
+}
+
+export type StorageBagStorageAssignmentEdge = {
+  node: StorageBagStorageAssignment
+  cursor: Scalars['String']
+}
+
+export enum StorageBagStorageAssignmentOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  StorageBagAsc = 'storageBag_ASC',
+  StorageBagDesc = 'storageBag_DESC',
+  StorageBucketAsc = 'storageBucket_ASC',
+  StorageBucketDesc = 'storageBucket_DESC',
+  StorageBagIdAsc = 'storageBagId_ASC',
+  StorageBagIdDesc = 'storageBagId_DESC',
+  StorageBucketIdAsc = 'storageBucketId_ASC',
+  StorageBucketIdDesc = 'storageBucketId_DESC',
+}
+
+export type StorageBagStorageAssignmentUpdateInput = {
+  storageBag?: Maybe<Scalars['ID']>
+  storageBucket?: Maybe<Scalars['ID']>
+  storageBagId?: Maybe<Scalars['String']>
+  storageBucketId?: Maybe<Scalars['String']>
+}
+
+export type StorageBagStorageAssignmentWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  storageBag_eq?: Maybe<Scalars['ID']>
+  storageBag_in?: Maybe<Array<Scalars['ID']>>
+  storageBucket_eq?: Maybe<Scalars['ID']>
+  storageBucket_in?: Maybe<Array<Scalars['ID']>>
+  storageBagId_eq?: Maybe<Scalars['String']>
+  storageBagId_contains?: Maybe<Scalars['String']>
+  storageBagId_startsWith?: Maybe<Scalars['String']>
+  storageBagId_endsWith?: Maybe<Scalars['String']>
+  storageBagId_in?: Maybe<Array<Scalars['String']>>
+  storageBucketId_eq?: Maybe<Scalars['String']>
+  storageBucketId_contains?: Maybe<Scalars['String']>
+  storageBucketId_startsWith?: Maybe<Scalars['String']>
+  storageBucketId_endsWith?: Maybe<Scalars['String']>
+  storageBucketId_in?: Maybe<Array<Scalars['String']>>
+  storageBag?: Maybe<StorageBagWhereInput>
+  storageBucket?: Maybe<StorageBucketWhereInput>
+  AND?: Maybe<Array<StorageBagStorageAssignmentWhereInput>>
+  OR?: Maybe<Array<StorageBagStorageAssignmentWhereInput>>
+}
+
+export type StorageBagStorageAssignmentWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageBagUpdateInput = {
+  owner?: Maybe<Scalars['JSONObject']>
+}
+
+export type StorageBagWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  owner_json?: Maybe<Scalars['JSONObject']>
+  objects_none?: Maybe<StorageDataObjectWhereInput>
+  objects_some?: Maybe<StorageDataObjectWhereInput>
+  objects_every?: Maybe<StorageDataObjectWhereInput>
+  storageAssignments_none?: Maybe<StorageBagStorageAssignmentWhereInput>
+  storageAssignments_some?: Maybe<StorageBagStorageAssignmentWhereInput>
+  storageAssignments_every?: Maybe<StorageBagStorageAssignmentWhereInput>
+  distirbutionAssignments_none?: Maybe<StorageBagDistributionAssignmentWhereInput>
+  distirbutionAssignments_some?: Maybe<StorageBagDistributionAssignmentWhereInput>
+  distirbutionAssignments_every?: Maybe<StorageBagDistributionAssignmentWhereInput>
+  AND?: Maybe<Array<StorageBagWhereInput>>
+  OR?: Maybe<Array<StorageBagWhereInput>>
+}
+
+export type StorageBagWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageBucket = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Current bucket operator status */
+  operatorStatus: StorageBucketOperatorStatus
+  operatorMetadata?: Maybe<StorageBucketOperatorMetadata>
+  operatorMetadataId?: Maybe<Scalars['String']>
+  /** Whether the bucket is accepting any new storage bags */
+  acceptingNewBags: Scalars['Boolean']
+  bagAssignments: Array<StorageBagStorageAssignment>
+  /** Bucket's data object size limit in bytes */
+  dataObjectsSizeLimit: Scalars['BigInt']
+  /** Bucket's data object count limit */
+  dataObjectCountLimit: Scalars['BigInt']
+  /** Number of assigned data objects */
+  dataObjectsCount: Scalars['BigInt']
+  /** Total size of assigned data objects */
+  dataObjectsSize: Scalars['BigInt']
+}
+
+export type StorageBucketConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<StorageBucketEdge>
+  pageInfo: PageInfo
+}
+
+export type StorageBucketCreateInput = {
+  operatorStatus: Scalars['JSONObject']
+  operatorMetadata?: Maybe<Scalars['ID']>
+  acceptingNewBags: Scalars['Boolean']
+  dataObjectsSizeLimit: Scalars['BigInt']
+  dataObjectCountLimit: Scalars['BigInt']
+  dataObjectsCount: Scalars['BigInt']
+  dataObjectsSize: Scalars['BigInt']
+}
+
+export type StorageBucketEdge = {
+  node: StorageBucket
+  cursor: Scalars['String']
+}
+
+export type StorageBucketOperatorMetadata = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Root node endpoint */
+  nodeEndpoint?: Maybe<Scalars['String']>
+  nodeLocation?: Maybe<NodeLocationMetadata>
+  nodeLocationId?: Maybe<Scalars['String']>
+  /** Additional information about the node/operator */
+  extra?: Maybe<Scalars['String']>
+  storagebucketoperatorMetadata?: Maybe<Array<StorageBucket>>
+}
+
+export type StorageBucketOperatorMetadataConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<StorageBucketOperatorMetadataEdge>
+  pageInfo: PageInfo
+}
+
+export type StorageBucketOperatorMetadataCreateInput = {
+  nodeEndpoint?: Maybe<Scalars['String']>
+  nodeLocation?: Maybe<Scalars['ID']>
+  extra?: Maybe<Scalars['String']>
+}
+
+export type StorageBucketOperatorMetadataEdge = {
+  node: StorageBucketOperatorMetadata
+  cursor: Scalars['String']
+}
+
+export enum StorageBucketOperatorMetadataOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  NodeEndpointAsc = 'nodeEndpoint_ASC',
+  NodeEndpointDesc = 'nodeEndpoint_DESC',
+  NodeLocationAsc = 'nodeLocation_ASC',
+  NodeLocationDesc = 'nodeLocation_DESC',
+  ExtraAsc = 'extra_ASC',
+  ExtraDesc = 'extra_DESC',
+}
+
+export type StorageBucketOperatorMetadataUpdateInput = {
+  nodeEndpoint?: Maybe<Scalars['String']>
+  nodeLocation?: Maybe<Scalars['ID']>
+  extra?: Maybe<Scalars['String']>
+}
+
+export type StorageBucketOperatorMetadataWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  nodeEndpoint_eq?: Maybe<Scalars['String']>
+  nodeEndpoint_contains?: Maybe<Scalars['String']>
+  nodeEndpoint_startsWith?: Maybe<Scalars['String']>
+  nodeEndpoint_endsWith?: Maybe<Scalars['String']>
+  nodeEndpoint_in?: Maybe<Array<Scalars['String']>>
+  nodeLocation_eq?: Maybe<Scalars['ID']>
+  nodeLocation_in?: Maybe<Array<Scalars['ID']>>
+  extra_eq?: Maybe<Scalars['String']>
+  extra_contains?: Maybe<Scalars['String']>
+  extra_startsWith?: Maybe<Scalars['String']>
+  extra_endsWith?: Maybe<Scalars['String']>
+  extra_in?: Maybe<Array<Scalars['String']>>
+  nodeLocation?: Maybe<NodeLocationMetadataWhereInput>
+  storagebucketoperatorMetadata_none?: Maybe<StorageBucketWhereInput>
+  storagebucketoperatorMetadata_some?: Maybe<StorageBucketWhereInput>
+  storagebucketoperatorMetadata_every?: Maybe<StorageBucketWhereInput>
+  AND?: Maybe<Array<StorageBucketOperatorMetadataWhereInput>>
+  OR?: Maybe<Array<StorageBucketOperatorMetadataWhereInput>>
+}
+
+export type StorageBucketOperatorMetadataWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageBucketOperatorStatus =
+  | StorageBucketOperatorStatusMissing
+  | StorageBucketOperatorStatusInvited
+  | StorageBucketOperatorStatusActive
+
+export type StorageBucketOperatorStatusActive = {
+  workerId: Scalars['Int']
+}
+
+export type StorageBucketOperatorStatusActiveCreateInput = {
+  workerId: Scalars['Float']
+}
+
+export type StorageBucketOperatorStatusActiveUpdateInput = {
+  workerId?: Maybe<Scalars['Float']>
+}
+
+export type StorageBucketOperatorStatusActiveWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  workerId_eq?: Maybe<Scalars['Int']>
+  workerId_gt?: Maybe<Scalars['Int']>
+  workerId_gte?: Maybe<Scalars['Int']>
+  workerId_lt?: Maybe<Scalars['Int']>
+  workerId_lte?: Maybe<Scalars['Int']>
+  workerId_in?: Maybe<Array<Scalars['Int']>>
+  AND?: Maybe<Array<StorageBucketOperatorStatusActiveWhereInput>>
+  OR?: Maybe<Array<StorageBucketOperatorStatusActiveWhereInput>>
+}
+
+export type StorageBucketOperatorStatusActiveWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageBucketOperatorStatusInvited = {
+  workerId: Scalars['Int']
+}
+
+export type StorageBucketOperatorStatusInvitedCreateInput = {
+  workerId: Scalars['Float']
+}
+
+export type StorageBucketOperatorStatusInvitedUpdateInput = {
+  workerId?: Maybe<Scalars['Float']>
+}
+
+export type StorageBucketOperatorStatusInvitedWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  workerId_eq?: Maybe<Scalars['Int']>
+  workerId_gt?: Maybe<Scalars['Int']>
+  workerId_gte?: Maybe<Scalars['Int']>
+  workerId_lt?: Maybe<Scalars['Int']>
+  workerId_lte?: Maybe<Scalars['Int']>
+  workerId_in?: Maybe<Array<Scalars['Int']>>
+  AND?: Maybe<Array<StorageBucketOperatorStatusInvitedWhereInput>>
+  OR?: Maybe<Array<StorageBucketOperatorStatusInvitedWhereInput>>
+}
+
+export type StorageBucketOperatorStatusInvitedWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageBucketOperatorStatusMissing = {
+  phantom?: Maybe<Scalars['Int']>
+}
+
+export type StorageBucketOperatorStatusMissingCreateInput = {
+  phantom?: Maybe<Scalars['Float']>
+}
+
+export type StorageBucketOperatorStatusMissingUpdateInput = {
+  phantom?: Maybe<Scalars['Float']>
+}
+
+export type StorageBucketOperatorStatusMissingWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  phantom_eq?: Maybe<Scalars['Int']>
+  phantom_gt?: Maybe<Scalars['Int']>
+  phantom_gte?: Maybe<Scalars['Int']>
+  phantom_lt?: Maybe<Scalars['Int']>
+  phantom_lte?: Maybe<Scalars['Int']>
+  phantom_in?: Maybe<Array<Scalars['Int']>>
+  AND?: Maybe<Array<StorageBucketOperatorStatusMissingWhereInput>>
+  OR?: Maybe<Array<StorageBucketOperatorStatusMissingWhereInput>>
+}
+
+export type StorageBucketOperatorStatusMissingWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export enum StorageBucketOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  OperatorMetadataAsc = 'operatorMetadata_ASC',
+  OperatorMetadataDesc = 'operatorMetadata_DESC',
+  AcceptingNewBagsAsc = 'acceptingNewBags_ASC',
+  AcceptingNewBagsDesc = 'acceptingNewBags_DESC',
+  DataObjectsSizeLimitAsc = 'dataObjectsSizeLimit_ASC',
+  DataObjectsSizeLimitDesc = 'dataObjectsSizeLimit_DESC',
+  DataObjectCountLimitAsc = 'dataObjectCountLimit_ASC',
+  DataObjectCountLimitDesc = 'dataObjectCountLimit_DESC',
+  DataObjectsCountAsc = 'dataObjectsCount_ASC',
+  DataObjectsCountDesc = 'dataObjectsCount_DESC',
+  DataObjectsSizeAsc = 'dataObjectsSize_ASC',
+  DataObjectsSizeDesc = 'dataObjectsSize_DESC',
+}
+
+export type StorageBucketUpdateInput = {
+  operatorStatus?: Maybe<Scalars['JSONObject']>
+  operatorMetadata?: Maybe<Scalars['ID']>
+  acceptingNewBags?: Maybe<Scalars['Boolean']>
+  dataObjectsSizeLimit?: Maybe<Scalars['BigInt']>
+  dataObjectCountLimit?: Maybe<Scalars['BigInt']>
+  dataObjectsCount?: Maybe<Scalars['BigInt']>
+  dataObjectsSize?: Maybe<Scalars['BigInt']>
+}
+
+export type StorageBucketWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  operatorStatus_json?: Maybe<Scalars['JSONObject']>
+  operatorMetadata_eq?: Maybe<Scalars['ID']>
+  operatorMetadata_in?: Maybe<Array<Scalars['ID']>>
+  acceptingNewBags_eq?: Maybe<Scalars['Boolean']>
+  acceptingNewBags_in?: Maybe<Array<Scalars['Boolean']>>
+  dataObjectsSizeLimit_eq?: Maybe<Scalars['BigInt']>
+  dataObjectsSizeLimit_gt?: Maybe<Scalars['BigInt']>
+  dataObjectsSizeLimit_gte?: Maybe<Scalars['BigInt']>
+  dataObjectsSizeLimit_lt?: Maybe<Scalars['BigInt']>
+  dataObjectsSizeLimit_lte?: Maybe<Scalars['BigInt']>
+  dataObjectsSizeLimit_in?: Maybe<Array<Scalars['BigInt']>>
+  dataObjectCountLimit_eq?: Maybe<Scalars['BigInt']>
+  dataObjectCountLimit_gt?: Maybe<Scalars['BigInt']>
+  dataObjectCountLimit_gte?: Maybe<Scalars['BigInt']>
+  dataObjectCountLimit_lt?: Maybe<Scalars['BigInt']>
+  dataObjectCountLimit_lte?: Maybe<Scalars['BigInt']>
+  dataObjectCountLimit_in?: Maybe<Array<Scalars['BigInt']>>
+  dataObjectsCount_eq?: Maybe<Scalars['BigInt']>
+  dataObjectsCount_gt?: Maybe<Scalars['BigInt']>
+  dataObjectsCount_gte?: Maybe<Scalars['BigInt']>
+  dataObjectsCount_lt?: Maybe<Scalars['BigInt']>
+  dataObjectsCount_lte?: Maybe<Scalars['BigInt']>
+  dataObjectsCount_in?: Maybe<Array<Scalars['BigInt']>>
+  dataObjectsSize_eq?: Maybe<Scalars['BigInt']>
+  dataObjectsSize_gt?: Maybe<Scalars['BigInt']>
+  dataObjectsSize_gte?: Maybe<Scalars['BigInt']>
+  dataObjectsSize_lt?: Maybe<Scalars['BigInt']>
+  dataObjectsSize_lte?: Maybe<Scalars['BigInt']>
+  dataObjectsSize_in?: Maybe<Array<Scalars['BigInt']>>
+  operatorMetadata?: Maybe<StorageBucketOperatorMetadataWhereInput>
+  bagAssignments_none?: Maybe<StorageBagStorageAssignmentWhereInput>
+  bagAssignments_some?: Maybe<StorageBagStorageAssignmentWhereInput>
+  bagAssignments_every?: Maybe<StorageBagStorageAssignmentWhereInput>
+  AND?: Maybe<Array<StorageBucketWhereInput>>
+  OR?: Maybe<Array<StorageBucketWhereInput>>
+}
+
+export type StorageBucketWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageDataObject = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Whether the data object was uploaded and accepted by the storage provider */
+  isAccepted: Scalars['Boolean']
+  /** Data object size in bytes */
+  size: Scalars['BigInt']
+  storageBag: StorageBag
+  storageBagId: Scalars['String']
+  /** IPFS content hash */
+  ipfsHash: Scalars['String']
+}
+
+export type StorageDataObjectConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<StorageDataObjectEdge>
+  pageInfo: PageInfo
+}
+
+export type StorageDataObjectCreateInput = {
+  isAccepted: Scalars['Boolean']
+  size: Scalars['BigInt']
+  storageBag: Scalars['ID']
+  ipfsHash: Scalars['String']
+}
+
+export type StorageDataObjectEdge = {
+  node: StorageDataObject
+  cursor: Scalars['String']
+}
+
+export enum StorageDataObjectOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  IsAcceptedAsc = 'isAccepted_ASC',
+  IsAcceptedDesc = 'isAccepted_DESC',
+  SizeAsc = 'size_ASC',
+  SizeDesc = 'size_DESC',
+  StorageBagAsc = 'storageBag_ASC',
+  StorageBagDesc = 'storageBag_DESC',
+  IpfsHashAsc = 'ipfsHash_ASC',
+  IpfsHashDesc = 'ipfsHash_DESC',
+}
+
+export type StorageDataObjectUpdateInput = {
+  isAccepted?: Maybe<Scalars['Boolean']>
+  size?: Maybe<Scalars['BigInt']>
+  storageBag?: Maybe<Scalars['ID']>
+  ipfsHash?: Maybe<Scalars['String']>
+}
+
+export type StorageDataObjectWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  isAccepted_eq?: Maybe<Scalars['Boolean']>
+  isAccepted_in?: Maybe<Array<Scalars['Boolean']>>
+  size_eq?: Maybe<Scalars['BigInt']>
+  size_gt?: Maybe<Scalars['BigInt']>
+  size_gte?: Maybe<Scalars['BigInt']>
+  size_lt?: Maybe<Scalars['BigInt']>
+  size_lte?: Maybe<Scalars['BigInt']>
+  size_in?: Maybe<Array<Scalars['BigInt']>>
+  storageBag_eq?: Maybe<Scalars['ID']>
+  storageBag_in?: Maybe<Array<Scalars['ID']>>
+  ipfsHash_eq?: Maybe<Scalars['String']>
+  ipfsHash_contains?: Maybe<Scalars['String']>
+  ipfsHash_startsWith?: Maybe<Scalars['String']>
+  ipfsHash_endsWith?: Maybe<Scalars['String']>
+  ipfsHash_in?: Maybe<Array<Scalars['String']>>
+  storageBag?: Maybe<StorageBagWhereInput>
+  AND?: Maybe<Array<StorageDataObjectWhereInput>>
+  OR?: Maybe<Array<StorageDataObjectWhereInput>>
+}
+
+export type StorageDataObjectWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+/** Global storage system parameters */
+export type StorageSystemParameters = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Blacklisted content hashes */
+  blacklist: Array<Scalars['String']>
+  /** How many buckets can be assigned to store a bag */
+  storageBucketsPerBagLimit: Scalars['Int']
+  /** How many buckets can be assigned to distribute a bag */
+  distributionBucketsPerBagLimit: Scalars['Int']
+  /** Whether the uploading is globally blocked */
+  uploadingBlocked: Scalars['Boolean']
+  /** Additional fee for storing 1 MB of data */
+  dataObjectFeePerMb: Scalars['BigInt']
+  /** Global max. number of objects a storage bucket can store (can also be further limitted the provider) */
+  storageBucketMaxObjectsCountLimit: Scalars['BigInt']
+  /** Global max. size of objects a storage bucket can store (can also be further limitted the provider) */
+  storageBucketMaxObjectsSizeLimit: Scalars['BigInt']
+}
+
+export type StorageSystemParametersConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<StorageSystemParametersEdge>
+  pageInfo: PageInfo
+}
+
+export type StorageSystemParametersCreateInput = {
+  blacklist: Array<Scalars['String']>
+  storageBucketsPerBagLimit: Scalars['Float']
+  distributionBucketsPerBagLimit: Scalars['Float']
+  uploadingBlocked: Scalars['Boolean']
+  dataObjectFeePerMb: Scalars['BigInt']
+  storageBucketMaxObjectsCountLimit: Scalars['BigInt']
+  storageBucketMaxObjectsSizeLimit: Scalars['BigInt']
+}
+
+export type StorageSystemParametersEdge = {
+  node: StorageSystemParameters
+  cursor: Scalars['String']
+}
+
+export enum StorageSystemParametersOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  StorageBucketsPerBagLimitAsc = 'storageBucketsPerBagLimit_ASC',
+  StorageBucketsPerBagLimitDesc = 'storageBucketsPerBagLimit_DESC',
+  DistributionBucketsPerBagLimitAsc = 'distributionBucketsPerBagLimit_ASC',
+  DistributionBucketsPerBagLimitDesc = 'distributionBucketsPerBagLimit_DESC',
+  UploadingBlockedAsc = 'uploadingBlocked_ASC',
+  UploadingBlockedDesc = 'uploadingBlocked_DESC',
+  DataObjectFeePerMbAsc = 'dataObjectFeePerMb_ASC',
+  DataObjectFeePerMbDesc = 'dataObjectFeePerMb_DESC',
+  StorageBucketMaxObjectsCountLimitAsc = 'storageBucketMaxObjectsCountLimit_ASC',
+  StorageBucketMaxObjectsCountLimitDesc = 'storageBucketMaxObjectsCountLimit_DESC',
+  StorageBucketMaxObjectsSizeLimitAsc = 'storageBucketMaxObjectsSizeLimit_ASC',
+  StorageBucketMaxObjectsSizeLimitDesc = 'storageBucketMaxObjectsSizeLimit_DESC',
+}
+
+export type StorageSystemParametersUpdateInput = {
+  blacklist?: Maybe<Array<Scalars['String']>>
+  storageBucketsPerBagLimit?: Maybe<Scalars['Float']>
+  distributionBucketsPerBagLimit?: Maybe<Scalars['Float']>
+  uploadingBlocked?: Maybe<Scalars['Boolean']>
+  dataObjectFeePerMb?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsCountLimit?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsSizeLimit?: Maybe<Scalars['BigInt']>
+}
+
+export type StorageSystemParametersWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  storageBucketsPerBagLimit_eq?: Maybe<Scalars['Int']>
+  storageBucketsPerBagLimit_gt?: Maybe<Scalars['Int']>
+  storageBucketsPerBagLimit_gte?: Maybe<Scalars['Int']>
+  storageBucketsPerBagLimit_lt?: Maybe<Scalars['Int']>
+  storageBucketsPerBagLimit_lte?: Maybe<Scalars['Int']>
+  storageBucketsPerBagLimit_in?: Maybe<Array<Scalars['Int']>>
+  distributionBucketsPerBagLimit_eq?: Maybe<Scalars['Int']>
+  distributionBucketsPerBagLimit_gt?: Maybe<Scalars['Int']>
+  distributionBucketsPerBagLimit_gte?: Maybe<Scalars['Int']>
+  distributionBucketsPerBagLimit_lt?: Maybe<Scalars['Int']>
+  distributionBucketsPerBagLimit_lte?: Maybe<Scalars['Int']>
+  distributionBucketsPerBagLimit_in?: Maybe<Array<Scalars['Int']>>
+  uploadingBlocked_eq?: Maybe<Scalars['Boolean']>
+  uploadingBlocked_in?: Maybe<Array<Scalars['Boolean']>>
+  dataObjectFeePerMb_eq?: Maybe<Scalars['BigInt']>
+  dataObjectFeePerMb_gt?: Maybe<Scalars['BigInt']>
+  dataObjectFeePerMb_gte?: Maybe<Scalars['BigInt']>
+  dataObjectFeePerMb_lt?: Maybe<Scalars['BigInt']>
+  dataObjectFeePerMb_lte?: Maybe<Scalars['BigInt']>
+  dataObjectFeePerMb_in?: Maybe<Array<Scalars['BigInt']>>
+  storageBucketMaxObjectsCountLimit_eq?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsCountLimit_gt?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsCountLimit_gte?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsCountLimit_lt?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsCountLimit_lte?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsCountLimit_in?: Maybe<Array<Scalars['BigInt']>>
+  storageBucketMaxObjectsSizeLimit_eq?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsSizeLimit_gt?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsSizeLimit_gte?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsSizeLimit_lt?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsSizeLimit_lte?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsSizeLimit_in?: Maybe<Array<Scalars['BigInt']>>
+  AND?: Maybe<Array<StorageSystemParametersWhereInput>>
+  OR?: Maybe<Array<StorageSystemParametersWhereInput>>
+}
+
+export type StorageSystemParametersWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type Subscription = {
+  stateSubscription: ProcessorState
+}
+
+export type Video = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  channel?: Maybe<Channel>
+  channelId?: Maybe<Scalars['String']>
+  category?: Maybe<VideoCategory>
+  categoryId?: Maybe<Scalars['String']>
+  /** The title of the video */
+  title?: Maybe<Scalars['String']>
+  /** The description of the Video */
+  description?: Maybe<Scalars['String']>
+  /** Video duration in seconds */
+  duration?: Maybe<Scalars['Int']>
+  thumbnailPhotoDataObject?: Maybe<DataObject>
+  thumbnailPhotoDataObjectId?: Maybe<Scalars['String']>
+  /** URLs where the asset content can be accessed (if any) */
+  thumbnailPhotoUrls: Array<Scalars['String']>
+  /** Availability meta information */
+  thumbnailPhotoAvailability: AssetAvailability
+  language?: Maybe<Language>
+  languageId?: Maybe<Scalars['String']>
+  /** Whether or not Video contains marketing */
+  hasMarketing?: Maybe<Scalars['Boolean']>
+  /** If the Video was published on other platform before beeing published on Joystream - the original publication date */
+  publishedBeforeJoystream?: Maybe<Scalars['DateTime']>
+  /** Whether the Video is supposed to be publically displayed */
+  isPublic?: Maybe<Scalars['Boolean']>
+  /** Flag signaling whether a video is censored. */
+  isCensored: Scalars['Boolean']
+  /** Whether the Video contains explicit material. */
+  isExplicit?: Maybe<Scalars['Boolean']>
+  license?: Maybe<License>
+  licenseId?: Maybe<Scalars['String']>
+  mediaDataObject?: Maybe<DataObject>
+  mediaDataObjectId?: Maybe<Scalars['String']>
+  /** URLs where the asset content can be accessed (if any) */
+  mediaUrls: Array<Scalars['String']>
+  /** Availability meta information */
+  mediaAvailability: AssetAvailability
+  mediaMetadata?: Maybe<VideoMediaMetadata>
+  mediaMetadataId?: Maybe<Scalars['String']>
+  createdInBlock: Scalars['Int']
+  /** Is video featured or not */
+  isFeatured: Scalars['Boolean']
+}
+
+export type VideoCategoriesByNameFtsOutput = {
+  item: VideoCategoriesByNameSearchResult
+  rank: Scalars['Float']
+  isTypeOf: Scalars['String']
+  highlight: Scalars['String']
+}
+
+export type VideoCategoriesByNameSearchResult = VideoCategory
+
+export type VideoCategory = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** The name of the category */
+  name?: Maybe<Scalars['String']>
+  videos: Array<Video>
+  createdInBlock: Scalars['Int']
+}
+
+export type VideoCategoryConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<VideoCategoryEdge>
+  pageInfo: PageInfo
+}
+
+export type VideoCategoryCreateInput = {
+  name?: Maybe<Scalars['String']>
+  createdInBlock: Scalars['Float']
+}
+
+export type VideoCategoryEdge = {
+  node: VideoCategory
+  cursor: Scalars['String']
+}
+
+export enum VideoCategoryOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  NameAsc = 'name_ASC',
+  NameDesc = 'name_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+}
+
+export type VideoCategoryUpdateInput = {
+  name?: Maybe<Scalars['String']>
+  createdInBlock?: Maybe<Scalars['Float']>
+}
+
+export type VideoCategoryWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  name_eq?: Maybe<Scalars['String']>
+  name_contains?: Maybe<Scalars['String']>
+  name_startsWith?: Maybe<Scalars['String']>
+  name_endsWith?: Maybe<Scalars['String']>
+  name_in?: Maybe<Array<Scalars['String']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  videos_none?: Maybe<VideoWhereInput>
+  videos_some?: Maybe<VideoWhereInput>
+  videos_every?: Maybe<VideoWhereInput>
+  AND?: Maybe<Array<VideoCategoryWhereInput>>
+  OR?: Maybe<Array<VideoCategoryWhereInput>>
+}
+
+export type VideoCategoryWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type VideoConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<VideoEdge>
+  pageInfo: PageInfo
+}
+
+export type VideoCreateInput = {
+  channel?: Maybe<Scalars['ID']>
+  category?: Maybe<Scalars['ID']>
+  title?: Maybe<Scalars['String']>
+  description?: Maybe<Scalars['String']>
+  duration?: Maybe<Scalars['Float']>
+  thumbnailPhotoDataObject?: Maybe<Scalars['ID']>
+  thumbnailPhotoUrls: Array<Scalars['String']>
+  thumbnailPhotoAvailability: AssetAvailability
+  language?: Maybe<Scalars['ID']>
+  hasMarketing?: Maybe<Scalars['Boolean']>
+  publishedBeforeJoystream?: Maybe<Scalars['DateTime']>
+  isPublic?: Maybe<Scalars['Boolean']>
+  isCensored: Scalars['Boolean']
+  isExplicit?: Maybe<Scalars['Boolean']>
+  license?: Maybe<Scalars['ID']>
+  mediaDataObject?: Maybe<Scalars['ID']>
+  mediaUrls: Array<Scalars['String']>
+  mediaAvailability: AssetAvailability
+  mediaMetadata?: Maybe<Scalars['ID']>
+  createdInBlock: Scalars['Float']
+  isFeatured: Scalars['Boolean']
+}
+
+export type VideoEdge = {
+  node: Video
+  cursor: Scalars['String']
+}
+
+export type VideoMediaEncoding = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Encoding of the video media object */
+  codecName?: Maybe<Scalars['String']>
+  /** Media container format */
+  container?: Maybe<Scalars['String']>
+  /** Content MIME type */
+  mimeMediaType?: Maybe<Scalars['String']>
+  videomediametadataencoding?: Maybe<Array<VideoMediaMetadata>>
+}
+
+export type VideoMediaEncodingConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<VideoMediaEncodingEdge>
+  pageInfo: PageInfo
+}
+
+export type VideoMediaEncodingCreateInput = {
+  codecName?: Maybe<Scalars['String']>
+  container?: Maybe<Scalars['String']>
+  mimeMediaType?: Maybe<Scalars['String']>
+}
+
+export type VideoMediaEncodingEdge = {
+  node: VideoMediaEncoding
+  cursor: Scalars['String']
+}
+
+export enum VideoMediaEncodingOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  CodecNameAsc = 'codecName_ASC',
+  CodecNameDesc = 'codecName_DESC',
+  ContainerAsc = 'container_ASC',
+  ContainerDesc = 'container_DESC',
+  MimeMediaTypeAsc = 'mimeMediaType_ASC',
+  MimeMediaTypeDesc = 'mimeMediaType_DESC',
+}
+
+export type VideoMediaEncodingUpdateInput = {
+  codecName?: Maybe<Scalars['String']>
+  container?: Maybe<Scalars['String']>
+  mimeMediaType?: Maybe<Scalars['String']>
+}
+
+export type VideoMediaEncodingWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  codecName_eq?: Maybe<Scalars['String']>
+  codecName_contains?: Maybe<Scalars['String']>
+  codecName_startsWith?: Maybe<Scalars['String']>
+  codecName_endsWith?: Maybe<Scalars['String']>
+  codecName_in?: Maybe<Array<Scalars['String']>>
+  container_eq?: Maybe<Scalars['String']>
+  container_contains?: Maybe<Scalars['String']>
+  container_startsWith?: Maybe<Scalars['String']>
+  container_endsWith?: Maybe<Scalars['String']>
+  container_in?: Maybe<Array<Scalars['String']>>
+  mimeMediaType_eq?: Maybe<Scalars['String']>
+  mimeMediaType_contains?: Maybe<Scalars['String']>
+  mimeMediaType_startsWith?: Maybe<Scalars['String']>
+  mimeMediaType_endsWith?: Maybe<Scalars['String']>
+  mimeMediaType_in?: Maybe<Array<Scalars['String']>>
+  videomediametadataencoding_none?: Maybe<VideoMediaMetadataWhereInput>
+  videomediametadataencoding_some?: Maybe<VideoMediaMetadataWhereInput>
+  videomediametadataencoding_every?: Maybe<VideoMediaMetadataWhereInput>
+  AND?: Maybe<Array<VideoMediaEncodingWhereInput>>
+  OR?: Maybe<Array<VideoMediaEncodingWhereInput>>
+}
+
+export type VideoMediaEncodingWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type VideoMediaMetadata = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  encoding?: Maybe<VideoMediaEncoding>
+  encodingId?: Maybe<Scalars['String']>
+  /** Video media width in pixels */
+  pixelWidth?: Maybe<Scalars['Int']>
+  /** Video media height in pixels */
+  pixelHeight?: Maybe<Scalars['Int']>
+  /** Video media size in bytes */
+  size?: Maybe<Scalars['Int']>
+  video?: Maybe<Video>
+  createdInBlock: Scalars['Int']
+}
+
+export type VideoMediaMetadataConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<VideoMediaMetadataEdge>
+  pageInfo: PageInfo
+}
+
+export type VideoMediaMetadataCreateInput = {
+  encoding?: Maybe<Scalars['ID']>
+  pixelWidth?: Maybe<Scalars['Float']>
+  pixelHeight?: Maybe<Scalars['Float']>
+  size?: Maybe<Scalars['Float']>
+  createdInBlock: Scalars['Float']
+}
+
+export type VideoMediaMetadataEdge = {
+  node: VideoMediaMetadata
+  cursor: Scalars['String']
+}
+
+export enum VideoMediaMetadataOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  EncodingAsc = 'encoding_ASC',
+  EncodingDesc = 'encoding_DESC',
+  PixelWidthAsc = 'pixelWidth_ASC',
+  PixelWidthDesc = 'pixelWidth_DESC',
+  PixelHeightAsc = 'pixelHeight_ASC',
+  PixelHeightDesc = 'pixelHeight_DESC',
+  SizeAsc = 'size_ASC',
+  SizeDesc = 'size_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+}
+
+export type VideoMediaMetadataUpdateInput = {
+  encoding?: Maybe<Scalars['ID']>
+  pixelWidth?: Maybe<Scalars['Float']>
+  pixelHeight?: Maybe<Scalars['Float']>
+  size?: Maybe<Scalars['Float']>
+  createdInBlock?: Maybe<Scalars['Float']>
+}
+
+export type VideoMediaMetadataWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  encoding_eq?: Maybe<Scalars['ID']>
+  encoding_in?: Maybe<Array<Scalars['ID']>>
+  pixelWidth_eq?: Maybe<Scalars['Int']>
+  pixelWidth_gt?: Maybe<Scalars['Int']>
+  pixelWidth_gte?: Maybe<Scalars['Int']>
+  pixelWidth_lt?: Maybe<Scalars['Int']>
+  pixelWidth_lte?: Maybe<Scalars['Int']>
+  pixelWidth_in?: Maybe<Array<Scalars['Int']>>
+  pixelHeight_eq?: Maybe<Scalars['Int']>
+  pixelHeight_gt?: Maybe<Scalars['Int']>
+  pixelHeight_gte?: Maybe<Scalars['Int']>
+  pixelHeight_lt?: Maybe<Scalars['Int']>
+  pixelHeight_lte?: Maybe<Scalars['Int']>
+  pixelHeight_in?: Maybe<Array<Scalars['Int']>>
+  size_eq?: Maybe<Scalars['Int']>
+  size_gt?: Maybe<Scalars['Int']>
+  size_gte?: Maybe<Scalars['Int']>
+  size_lt?: Maybe<Scalars['Int']>
+  size_lte?: Maybe<Scalars['Int']>
+  size_in?: Maybe<Array<Scalars['Int']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  encoding?: Maybe<VideoMediaEncodingWhereInput>
+  video?: Maybe<VideoWhereInput>
+  AND?: Maybe<Array<VideoMediaMetadataWhereInput>>
+  OR?: Maybe<Array<VideoMediaMetadataWhereInput>>
+}
+
+export type VideoMediaMetadataWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export enum VideoOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  ChannelAsc = 'channel_ASC',
+  ChannelDesc = 'channel_DESC',
+  CategoryAsc = 'category_ASC',
+  CategoryDesc = 'category_DESC',
+  TitleAsc = 'title_ASC',
+  TitleDesc = 'title_DESC',
+  DescriptionAsc = 'description_ASC',
+  DescriptionDesc = 'description_DESC',
+  DurationAsc = 'duration_ASC',
+  DurationDesc = 'duration_DESC',
+  ThumbnailPhotoDataObjectAsc = 'thumbnailPhotoDataObject_ASC',
+  ThumbnailPhotoDataObjectDesc = 'thumbnailPhotoDataObject_DESC',
+  ThumbnailPhotoAvailabilityAsc = 'thumbnailPhotoAvailability_ASC',
+  ThumbnailPhotoAvailabilityDesc = 'thumbnailPhotoAvailability_DESC',
+  LanguageAsc = 'language_ASC',
+  LanguageDesc = 'language_DESC',
+  HasMarketingAsc = 'hasMarketing_ASC',
+  HasMarketingDesc = 'hasMarketing_DESC',
+  PublishedBeforeJoystreamAsc = 'publishedBeforeJoystream_ASC',
+  PublishedBeforeJoystreamDesc = 'publishedBeforeJoystream_DESC',
+  IsPublicAsc = 'isPublic_ASC',
+  IsPublicDesc = 'isPublic_DESC',
+  IsCensoredAsc = 'isCensored_ASC',
+  IsCensoredDesc = 'isCensored_DESC',
+  IsExplicitAsc = 'isExplicit_ASC',
+  IsExplicitDesc = 'isExplicit_DESC',
+  LicenseAsc = 'license_ASC',
+  LicenseDesc = 'license_DESC',
+  MediaDataObjectAsc = 'mediaDataObject_ASC',
+  MediaDataObjectDesc = 'mediaDataObject_DESC',
+  MediaAvailabilityAsc = 'mediaAvailability_ASC',
+  MediaAvailabilityDesc = 'mediaAvailability_DESC',
+  MediaMetadataAsc = 'mediaMetadata_ASC',
+  MediaMetadataDesc = 'mediaMetadata_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+  IsFeaturedAsc = 'isFeatured_ASC',
+  IsFeaturedDesc = 'isFeatured_DESC',
+}
+
+export type VideoUpdateInput = {
+  channel?: Maybe<Scalars['ID']>
+  category?: Maybe<Scalars['ID']>
+  title?: Maybe<Scalars['String']>
+  description?: Maybe<Scalars['String']>
+  duration?: Maybe<Scalars['Float']>
+  thumbnailPhotoDataObject?: Maybe<Scalars['ID']>
+  thumbnailPhotoUrls?: Maybe<Array<Scalars['String']>>
+  thumbnailPhotoAvailability?: Maybe<AssetAvailability>
+  language?: Maybe<Scalars['ID']>
+  hasMarketing?: Maybe<Scalars['Boolean']>
+  publishedBeforeJoystream?: Maybe<Scalars['DateTime']>
+  isPublic?: Maybe<Scalars['Boolean']>
+  isCensored?: Maybe<Scalars['Boolean']>
+  isExplicit?: Maybe<Scalars['Boolean']>
+  license?: Maybe<Scalars['ID']>
+  mediaDataObject?: Maybe<Scalars['ID']>
+  mediaUrls?: Maybe<Array<Scalars['String']>>
+  mediaAvailability?: Maybe<AssetAvailability>
+  mediaMetadata?: Maybe<Scalars['ID']>
+  createdInBlock?: Maybe<Scalars['Float']>
+  isFeatured?: Maybe<Scalars['Boolean']>
+}
+
+export type VideoWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  channel_eq?: Maybe<Scalars['ID']>
+  channel_in?: Maybe<Array<Scalars['ID']>>
+  category_eq?: Maybe<Scalars['ID']>
+  category_in?: Maybe<Array<Scalars['ID']>>
+  title_eq?: Maybe<Scalars['String']>
+  title_contains?: Maybe<Scalars['String']>
+  title_startsWith?: Maybe<Scalars['String']>
+  title_endsWith?: Maybe<Scalars['String']>
+  title_in?: Maybe<Array<Scalars['String']>>
+  description_eq?: Maybe<Scalars['String']>
+  description_contains?: Maybe<Scalars['String']>
+  description_startsWith?: Maybe<Scalars['String']>
+  description_endsWith?: Maybe<Scalars['String']>
+  description_in?: Maybe<Array<Scalars['String']>>
+  duration_eq?: Maybe<Scalars['Int']>
+  duration_gt?: Maybe<Scalars['Int']>
+  duration_gte?: Maybe<Scalars['Int']>
+  duration_lt?: Maybe<Scalars['Int']>
+  duration_lte?: Maybe<Scalars['Int']>
+  duration_in?: Maybe<Array<Scalars['Int']>>
+  thumbnailPhotoDataObject_eq?: Maybe<Scalars['ID']>
+  thumbnailPhotoDataObject_in?: Maybe<Array<Scalars['ID']>>
+  thumbnailPhotoAvailability_eq?: Maybe<AssetAvailability>
+  thumbnailPhotoAvailability_in?: Maybe<Array<AssetAvailability>>
+  language_eq?: Maybe<Scalars['ID']>
+  language_in?: Maybe<Array<Scalars['ID']>>
+  hasMarketing_eq?: Maybe<Scalars['Boolean']>
+  hasMarketing_in?: Maybe<Array<Scalars['Boolean']>>
+  publishedBeforeJoystream_eq?: Maybe<Scalars['DateTime']>
+  publishedBeforeJoystream_lt?: Maybe<Scalars['DateTime']>
+  publishedBeforeJoystream_lte?: Maybe<Scalars['DateTime']>
+  publishedBeforeJoystream_gt?: Maybe<Scalars['DateTime']>
+  publishedBeforeJoystream_gte?: Maybe<Scalars['DateTime']>
+  isPublic_eq?: Maybe<Scalars['Boolean']>
+  isPublic_in?: Maybe<Array<Scalars['Boolean']>>
+  isCensored_eq?: Maybe<Scalars['Boolean']>
+  isCensored_in?: Maybe<Array<Scalars['Boolean']>>
+  isExplicit_eq?: Maybe<Scalars['Boolean']>
+  isExplicit_in?: Maybe<Array<Scalars['Boolean']>>
+  license_eq?: Maybe<Scalars['ID']>
+  license_in?: Maybe<Array<Scalars['ID']>>
+  mediaDataObject_eq?: Maybe<Scalars['ID']>
+  mediaDataObject_in?: Maybe<Array<Scalars['ID']>>
+  mediaAvailability_eq?: Maybe<AssetAvailability>
+  mediaAvailability_in?: Maybe<Array<AssetAvailability>>
+  mediaMetadata_eq?: Maybe<Scalars['ID']>
+  mediaMetadata_in?: Maybe<Array<Scalars['ID']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  isFeatured_eq?: Maybe<Scalars['Boolean']>
+  isFeatured_in?: Maybe<Array<Scalars['Boolean']>>
+  channel?: Maybe<ChannelWhereInput>
+  category?: Maybe<VideoCategoryWhereInput>
+  thumbnailPhotoDataObject?: Maybe<DataObjectWhereInput>
+  language?: Maybe<LanguageWhereInput>
+  license?: Maybe<LicenseWhereInput>
+  mediaDataObject?: Maybe<DataObjectWhereInput>
+  mediaMetadata?: Maybe<VideoMediaMetadataWhereInput>
+  AND?: Maybe<Array<VideoWhereInput>>
+  OR?: Maybe<Array<VideoWhereInput>>
+}
+
+export type VideoWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type Worker = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Sign of worker still being active */
+  isActive: Scalars['Boolean']
+  /** Runtime identifier */
+  workerId: Scalars['String']
+  /** Associated working group */
+  type: WorkerType
+  /** Custom metadata set by provider */
+  metadata?: Maybe<Scalars['String']>
+  dataObjects: Array<DataObject>
+}
+
+export type WorkerConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<WorkerEdge>
+  pageInfo: PageInfo
+}
+
+export type WorkerCreateInput = {
+  isActive: Scalars['Boolean']
+  workerId: Scalars['String']
+  type: WorkerType
+  metadata?: Maybe<Scalars['String']>
+}
+
+export type WorkerEdge = {
+  node: Worker
+  cursor: Scalars['String']
+}
+
+export enum WorkerOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  IsActiveAsc = 'isActive_ASC',
+  IsActiveDesc = 'isActive_DESC',
+  WorkerIdAsc = 'workerId_ASC',
+  WorkerIdDesc = 'workerId_DESC',
+  TypeAsc = 'type_ASC',
+  TypeDesc = 'type_DESC',
+  MetadataAsc = 'metadata_ASC',
+  MetadataDesc = 'metadata_DESC',
+}
+
+export enum WorkerType {
+  Gateway = 'GATEWAY',
+  Storage = 'STORAGE',
+}
+
+export type WorkerUpdateInput = {
+  isActive?: Maybe<Scalars['Boolean']>
+  workerId?: Maybe<Scalars['String']>
+  type?: Maybe<WorkerType>
+  metadata?: Maybe<Scalars['String']>
+}
+
+export type WorkerWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  isActive_eq?: Maybe<Scalars['Boolean']>
+  isActive_in?: Maybe<Array<Scalars['Boolean']>>
+  workerId_eq?: Maybe<Scalars['String']>
+  workerId_contains?: Maybe<Scalars['String']>
+  workerId_startsWith?: Maybe<Scalars['String']>
+  workerId_endsWith?: Maybe<Scalars['String']>
+  workerId_in?: Maybe<Array<Scalars['String']>>
+  type_eq?: Maybe<WorkerType>
+  type_in?: Maybe<Array<WorkerType>>
+  metadata_eq?: Maybe<Scalars['String']>
+  metadata_contains?: Maybe<Scalars['String']>
+  metadata_startsWith?: Maybe<Scalars['String']>
+  metadata_endsWith?: Maybe<Scalars['String']>
+  metadata_in?: Maybe<Array<Scalars['String']>>
+  dataObjects_none?: Maybe<DataObjectWhereInput>
+  dataObjects_some?: Maybe<DataObjectWhereInput>
+  dataObjects_every?: Maybe<DataObjectWhereInput>
+  AND?: Maybe<Array<WorkerWhereInput>>
+  OR?: Maybe<Array<WorkerWhereInput>>
+}
+
+export type WorkerWhereUniqueInput = {
+  id: Scalars['ID']
+}

+ 49 - 0
storage-node-v2/src/services/queryNode/queries/queries.graphql

@@ -0,0 +1,49 @@
+fragment StorageBucketDetails on StorageBucket {
+  id
+  operatorMetadata {
+    id
+    nodeEndpoint
+  }
+  operatorStatus {
+    ... on StorageBucketOperatorStatusActive {
+      workerId
+    }
+    ... on StorageBucketOperatorStatusInvited {
+      workerId
+    }
+  }
+}
+
+query getStorageBucketDetails($offset: Int, $limit: Int) {
+  storageBuckets(offset: $offset, limit: $limit) {
+    ...StorageBucketDetails
+  }
+}
+
+fragment StorageBagDetails on StorageBag {
+  id
+  storageAssignments {
+    storageBucket {
+      id
+    }
+  }
+}
+
+query getStorageBagDetails($bucketIds: [String!], $offset: Int, $limit: Int) {
+  storageBags(offset: $offset, limit: $limit, where: { storageAssignments_some: { storageBucketId_in: $bucketIds } }) {
+    ...StorageBagDetails
+  }
+}
+
+fragment DataObjectDetails on StorageDataObject {
+  ipfsHash
+  storageBag {
+    id
+  }
+}
+
+query getDataObjectDetails($bagIds: StorageBagWhereInput, $offset: Int, $limit: Int) {
+  storageDataObjects(offset: $offset, limit: $limit, where: { storageBag: $bagIds, isAccepted_eq: true }) {
+    ...DataObjectDetails
+  }
+}

+ 27 - 18
storage-node-v2/src/services/runtime/api.ts

@@ -6,7 +6,7 @@ import { TypeRegistry } from '@polkadot/types'
 import { KeyringPair } from '@polkadot/keyring/types'
 import { SubmittableExtrinsic, AugmentedEvent } from '@polkadot/api/types'
 import { DispatchError, DispatchResult } from '@polkadot/types/interfaces/system'
-import { getNonce } from './transactionNonceKeeper'
+import { getTransactionNonce, resetTransactionNonceCache } from './transactionNonceKeeper'
 import logger from '../../services/logger'
 import ExitCodes from '../../command-base/ExitCodes'
 import { CLIError } from '@oclif/errors'
@@ -24,8 +24,14 @@ export class ExtrinsicFailedError extends CLIError {}
  */
 export async function createApi(apiUrl: string): Promise<ApiPromise> {
   const provider = new WsProvider(apiUrl)
+  provider.on('error', (err) => logger.error(`Api provider error: ${err.target?._url}`))
 
-  return await ApiPromise.create({ provider, types })
+  const api = new ApiPromise({ provider, types })
+  await api.isReadyOrError
+
+  api.on('error', (err) => logger.error(`Api promise error: ${err.target?._url}`))
+
+  return api
 }
 
 /**
@@ -148,22 +154,25 @@ export async function sendAndFollowNamedTx<T>(
   sudoCall = false,
   eventParser: ((result: ISubmittableResult) => T) | null = null
 ): Promise<T | void> {
-  logger.debug(`Sending ${tx.method.section}.${tx.method.method} extrinsic...`)
-
-  if (sudoCall) {
-    tx = api.tx.sudo.sudo(tx)
+  try {
+    logger.debug(`Sending ${tx.method.section}.${tx.method.method} extrinsic...`)
+    if (sudoCall) {
+      tx = api.tx.sudo.sudo(tx)
+    }
+    const nonce = await getTransactionNonce(api, account)
+
+    const result = await sendExtrinsic(api, account, tx, nonce)
+    let eventResult: T | void
+    if (eventParser) {
+      eventResult = eventParser(result)
+    }
+    logger.debug(`Extrinsic successful!`)
+
+    return eventResult
+  } catch (err) {
+    await resetTransactionNonceCache()
+    throw err
   }
-  const nonce = await getNonce(api, account)
-
-  const result = await sendExtrinsic(api, account, tx, nonce)
-
-  let eventResult: T | void
-  if (eventParser) {
-    eventResult = eventParser(result)
-  }
-  logger.debug(`Extrinsic successful!`)
-
-  return eventResult
 }
 
 /**
@@ -202,7 +211,7 @@ export function getEvent<
   const event = result.findRecord(section, eventName)?.event as EventType | undefined
 
   if (!event) {
-    throw new Error(`Cannot find expected ${section}.${eventName} event in result: ${result.toHuman()}`)
+    throw new ExtrinsicFailedError(`Cannot find expected ${section}.${eventName} event in result: ${result.toHuman()}`)
   }
   return event as EventType
 }

+ 7 - 2
storage-node-v2/src/services/runtime/extrinsics.ts

@@ -4,6 +4,7 @@ import { KeyringPair } from '@polkadot/keyring/types'
 import { ApiPromise } from '@polkadot/api'
 import { BagId, DynamicBagType } from '@joystream/types/storage'
 import logger from '../../services/logger'
+import { timeout } from 'promise-timeout'
 
 /**
  * Creates storage bucket.
@@ -269,9 +270,13 @@ export async function inviteStorageBucketOperator(
  * after logging.
  * @returns promise with a success flag.
  */
-async function extrinsicWrapper(extrinsic: () => Promise<void>, throwErr = false): Promise<boolean> {
+async function extrinsicWrapper(
+  extrinsic: () => Promise<void>,
+  throwErr = false,
+  timeoutMs = 10000 // 10s - default extrinsic timeout
+): Promise<boolean> {
   try {
-    await extrinsic()
+    await timeout(extrinsic(), timeoutMs)
   } catch (err) {
     logger.error(`Api Error: ${err}`)
 

+ 32 - 5
storage-node-v2/src/services/runtime/transactionNonceKeeper.ts

@@ -4,8 +4,18 @@ import BN from 'bn.js'
 import AwaitLock from 'await-lock'
 import { ApiPromise } from '@polkadot/api'
 import logger from '../logger'
+import NodeCache from 'node-cache'
 
-let nonce: Index | null = null
+// Expiration period in seconds for the nonce cache.
+const NonceExpirationPeriod = 180 // seconds
+
+// Local in-memory cache for nonces.
+const nonceCache = new NodeCache({
+  stdTTL: NonceExpirationPeriod,
+  deleteOnExpire: true,
+})
+
+const nonceEntryName = 'transaction_nonce'
 const lock = new AwaitLock()
 
 /**
@@ -16,19 +26,36 @@ const lock = new AwaitLock()
  * @returns promise with transaction nonce for a given account.
  *
  */
-export async function getNonce(api: ApiPromise, account: KeyringPair): Promise<Index> {
+export async function getTransactionNonce(api: ApiPromise, account: KeyringPair): Promise<Index> {
   await lock.acquireAsync()
   try {
-    if (nonce === null) {
+    let nonce: Index | undefined = nonceCache.get(nonceEntryName)
+    if (nonce === undefined) {
       nonce = await api.rpc.system.accountNextIndex(account.address)
     } else {
       nonce = nonce.add(new BN(1)) as Index
     }
+
+    nonceCache.set(nonceEntryName, nonce)
+
+    logger.debug(`Last transaction nonce:${nonce}`)
+    return nonce as Index
   } finally {
     lock.release()
   }
+}
+
+/**
+ * Drops the transaction nonce cache.
+ *
+ * @returns empty promise.
+ *
+ */
+export async function resetTransactionNonceCache(): Promise<void> {
+  await lock.acquireAsync()
+  nonceCache.del(nonceEntryName)
 
-  logger.debug(`Last transaction nonce:${nonce}`)
+  logger.debug(`Transaction node cache was dropped.`)
 
-  return nonce as Index
+  lock.release()
 }

+ 63 - 0
storage-node-v2/src/services/sync/remoteStorageData.ts

@@ -0,0 +1,63 @@
+import superagent from 'superagent'
+import urljoin from 'url-join'
+import logger from '../logger'
+import NodeCache from 'node-cache'
+
+// Expiration period in seconds for the local cache.
+const ExpirationPeriod: number = 5 * 60 // minutes
+
+// Max data entries in local cache
+const MaxEntries = 10000
+
+// Local in-memory cache for CIDs by operator URL.
+const availableCidsCache = new NodeCache({
+  stdTTL: ExpirationPeriod,
+  deleteOnExpire: true,
+  maxKeys: MaxEntries,
+})
+
+// Local in-memory cache for faulty operator URL. Prevents fetching from the
+// offline storage nodes.
+const badOperatorUrls = new NodeCache({
+  stdTTL: ExpirationPeriod,
+  deleteOnExpire: true,
+  maxKeys: MaxEntries,
+})
+
+/**
+ * Queries the remote storage node for its data object IDs from the storage.
+ * It caches the result (including errors) for some limited time.
+ *
+ * @param operatorUrl - remote storage node URL
+ */
+export async function getRemoteDataObjects(operatorUrl: string): Promise<string[]> {
+  const url = urljoin(operatorUrl, 'api/v1/state/data-objects')
+
+  const faultyOperator = badOperatorUrls.has(operatorUrl)
+  if (faultyOperator) {
+    logger.debug(`Sync - cached error for the ${url} skipping ....`)
+    return []
+  }
+
+  const cachedData = availableCidsCache.get<string[]>(url)
+  if (cachedData) {
+    logger.debug(`Sync - getting from cache available data for ${url}`)
+    return cachedData
+  }
+
+  try {
+    logger.debug(`Sync - fetching available data for ${url}`)
+    const timeoutMs = 120 * 1000 // 2 min
+    const response = await superagent.get(url).timeout(timeoutMs)
+
+    const data = response.body
+    availableCidsCache.set(url, data, ExpirationPeriod)
+
+    return data
+  } catch (err) {
+    logger.error(`Sync - fetching data error from ${url}: ${err}`)
+    badOperatorUrls.set(operatorUrl, null, ExpirationPeriod)
+  }
+
+  return []
+}

+ 241 - 0
storage-node-v2/src/services/sync/storageObligations.ts

@@ -0,0 +1,241 @@
+import { QueryNodeApi } from '../queryNode/api'
+import logger from '../logger'
+import { u8aToString, hexToU8a } from '@polkadot/util'
+import {
+  StorageBagDetailsFragment,
+  StorageBucketDetailsFragment,
+  DataObjectDetailsFragment,
+} from '../queryNode/generated/queries'
+
+/**
+ * Defines storage provider data obligations.
+ */
+export type DataObligations = {
+  /**
+   * All storage buckets in the system.
+   */
+  storageBuckets: StorageBucket[]
+
+  /**
+   * Assigned bags for the storage provider.
+   */
+  bags: Bag[]
+
+  /**
+   * Assigned data objects for the storage provider.
+   */
+  dataObjects: DataObject[]
+}
+
+/**
+ * Storage bucket abstraction.
+ */
+type StorageBucket = {
+  /**
+   * Storage bucket ID
+   */
+  id: string
+
+  /**
+   * Storage operator URL
+   */
+  operatorUrl: string
+
+  /**
+   * Storage working group ID.
+   */
+  workerId: number
+}
+
+/**
+ * Storage bag abstracton.
+ */
+type Bag = {
+  /**
+   * Storage bag ID
+   */
+  id: string
+
+  /**
+   * Assigned storage bucket IDs.
+   */
+  buckets: string[]
+}
+
+/**
+ * Data object abstraction.
+ */
+type DataObject = {
+  /**
+   * Content ID (IPFS hash)
+   */
+  cid: string
+
+  /**
+   * Assigned bag ID
+   */
+  bagId: string
+}
+
+/**
+ * Get storage provider obligations like (assigned data objects) from the
+ * runtime (Query Node).
+ *
+ * @param queryNodeUrl - Query Node URL
+ * @param workerId - worker ID
+ * @returns promise for the DataObligations
+ */
+export async function getStorageObligationsFromRuntime(
+  queryNodeUrl: string,
+  workerId: number
+): Promise<DataObligations> {
+  const api = new QueryNodeApi(queryNodeUrl)
+
+  const allBuckets = await getAllBuckets(api)
+
+  const bucketIds = allBuckets
+    .filter((bucket) => bucket.operatorStatus?.workerId === workerId)
+    .map((bucket) => bucket.id)
+  const assignedBags = await getAllAssignedBags(api, bucketIds)
+
+  const bagIds = assignedBags.map((bag) => bag.id)
+  const assignedDataObjects = await getAllAssignedDataObjects(api, bagIds)
+
+  const model: DataObligations = {
+    storageBuckets: allBuckets.map((bucket) => ({
+      id: bucket.id,
+      operatorUrl: extractOperatorUrl(bucket.operatorMetadata?.nodeEndpoint),
+      workerId: bucket.operatorStatus?.workerId,
+    })),
+    bags: assignedBags.map((bag) => ({
+      id: bag.id,
+      buckets: bag.storageAssignments.map((bucketInBag) => bucketInBag.storageBucket.id),
+    })),
+    dataObjects: assignedDataObjects.map((dataObject) => ({
+      cid: dataObject.ipfsHash,
+      bagId: dataObject.storageBag.id,
+    })),
+  }
+
+  return model
+}
+
+/**
+ * Get storage bucket IDs assigned to the worker.
+ *
+ * @param queryNodeUrl - Query Node URL
+ * @param workerId - worker ID
+ * @returns storage bucket IDs
+ */
+export async function getStorageBucketIdsByWorkerId(queryNodeUrl: string, workerId: number): Promise<string[]> {
+  const api = new QueryNodeApi(queryNodeUrl)
+  const allBuckets = await getAllBuckets(api)
+
+  const bucketIds = allBuckets
+    .filter((bucket) => bucket.operatorStatus?.workerId === workerId)
+    .map((bucket) => bucket.id)
+
+  return bucketIds
+}
+
+/**
+ * Get IDs of the data objects assigned to the bag ID.
+ *
+ * @param api - initialiazed QueryNodeApi instance
+ * @param bagId - bag ID
+ * @returns data object IDs
+ */
+export async function getDataObjectIDsByBagId(queryNodeUrl: string, bagId: string): Promise<string[]> {
+  const api = new QueryNodeApi(queryNodeUrl)
+  const dataObjects = await getAllAssignedDataObjects(api, [bagId])
+
+  return dataObjects.map((obj) => obj.ipfsHash)
+}
+
+/**
+ * Get all storage buckets registered in the runtime (Query Node).
+ *
+ * @param api - initialiazed QueryNodeApi instance
+ * @returns storage buckets data
+ */
+async function getAllBuckets(api: QueryNodeApi): Promise<StorageBucketDetailsFragment[]> {
+  return await getAllObjectsWithPaging(
+    'all storage buckets',
+    async (offset, limit) => await api.getStorageBucketDetails(offset, limit)
+  )
+}
+
+/**
+ * Get all data objects assigned to storage provider.
+ *
+ * @param api - initialiazed QueryNodeApi instance
+ * @param bagIds - assigned storage bags' IDs
+ * @returns storage bag data
+ */
+async function getAllAssignedDataObjects(api: QueryNodeApi, bagIds: string[]): Promise<DataObjectDetailsFragment[]> {
+  return await getAllObjectsWithPaging(
+    'assigned data objects',
+    async (offset, limit) => await api.getDataObjectDetails(bagIds, offset, limit)
+  )
+}
+
+/**
+ * Get all bags assigned to storage provider.
+ *
+ * @param api - initialiazed QueryNodeApi instance
+ * @param bucketIds - assigned storage provider buckets' IDs
+ * @returns storage bag data
+ */
+async function getAllAssignedBags(api: QueryNodeApi, bucketIds: string[]): Promise<StorageBagDetailsFragment[]> {
+  return await getAllObjectsWithPaging(
+    'assigned bags',
+    async (offset, limit) => await api.getStorageBagsDetails(bucketIds, offset, limit)
+  )
+}
+
+/**
+ * Abstract object acquiring function for the QueryNode. It uses paging for
+ * queries and gets data using record offset and limit (hardcoded to 1000).
+ *
+ * @param objectName - object name(type) to get from the QueryNode
+ * @param query - actual query function
+ * @returns storage operator URL
+ */
+async function getAllObjectsWithPaging<T>(
+  objectName: string,
+  query: (offset: number, limit: number) => Promise<T[]>
+): Promise<T[]> {
+  const result = []
+  const limit = 1000
+  let offset = 0
+
+  let resultPart = []
+  do {
+    logger.debug(`Sync - getting ${objectName}: offset = ${offset}, limit = ${limit}`)
+    resultPart = await query(offset, limit)
+    offset += limit
+    result.push(...resultPart)
+
+    if (resultPart.length < limit) break
+  } while (resultPart.length > 0)
+
+  return result
+}
+
+/**
+ * Extract storage operator URL from the encoded metadata
+ *
+ * @param encodedString - encoded storage operator metadata
+ * @returns storage operator URL
+ */
+function extractOperatorUrl(encodedString: string | undefined | null): string {
+  try {
+    if (encodedString) {
+      return u8aToString(hexToU8a(encodedString))
+    }
+  } catch (err) {
+    logger.error(`Sync - ${err}`)
+  }
+
+  return ''
+}

+ 155 - 0
storage-node-v2/src/services/sync/synchronizer.ts

@@ -0,0 +1,155 @@
+import { getStorageObligationsFromRuntime, DataObligations } from './storageObligations'
+import logger from '../../services/logger'
+import { SyncTask, DownloadFileTask, DeleteLocalFileTask, PrepareDownloadFileTask } from './tasks'
+import { WorkingStack, TaskProcessorSpawner, TaskSink } from './workingProcess'
+import _ from 'lodash'
+import fs from 'fs'
+const fsPromises = fs.promises
+
+/**
+ * Runs the data synchronization workflow. It compares the current node's
+ * storage obligations with the local storage and fixes the difference.
+ * The sync process uses the QueryNode for defining storage obligations and
+ * remote storage nodes' URL for data obtaining.
+ *
+ * @param workerId - current storage provider ID
+ * @param asyncWorkersNumber - maximum parallel downloads number
+ * @param queryNodeUrl - Query Node endpoint URL
+ * @param uploadDirectory - local directory to get file names from
+ * @param operatorUrl - (optional) defines the data source URL. If not set
+ * the source URL is resolved for each data object separately using the Query
+ * Node information about the storage providers.
+ */
+export async function performSync(
+  workerId: number,
+  asyncWorkersNumber: number,
+  queryNodeUrl: string,
+  uploadDirectory: string,
+  operatorUrl?: string
+): Promise<void> {
+  logger.info('Started syncing...')
+  const [model, files] = await Promise.all([
+    getStorageObligationsFromRuntime(queryNodeUrl, workerId),
+    getLocalFileNames(uploadDirectory),
+  ])
+
+  const requiredCids = model.dataObjects.map((obj) => obj.cid)
+
+  const added = _.difference(requiredCids, files)
+  const deleted = _.difference(files, requiredCids)
+
+  logger.debug(`Sync - added objects: ${added.length}`)
+  logger.debug(`Sync - deleted objects: ${deleted.length}`)
+
+  const workingStack = new WorkingStack()
+  const deletedTasks = deleted.map((fileName) => new DeleteLocalFileTask(uploadDirectory, fileName))
+
+  let addedTasks: SyncTask[]
+  if (operatorUrl === undefined) {
+    addedTasks = await getPrepareDownloadTasks(model, added, uploadDirectory, workingStack)
+  } else {
+    addedTasks = await getDownloadTasks(operatorUrl, added, uploadDirectory)
+  }
+
+  logger.debug(`Sync - started processing...`)
+
+  const processSpawner = new TaskProcessorSpawner(workingStack, asyncWorkersNumber)
+
+  await workingStack.add(addedTasks)
+  await workingStack.add(deletedTasks)
+
+  await processSpawner.process()
+  logger.info('Sync ended.')
+}
+
+/**
+ * Returns file names from the local directory.
+ *
+ * @param directory - local directory to get file names from
+ */
+async function getLocalFileNames(directory: string): Promise<string[]> {
+  return fsPromises.readdir(directory)
+}
+
+/**
+ * Creates the download preparation tasks.
+ *
+ * @param dataObligations - defines the current data obligations for the node
+ * @param addedCids - data object IDs to download
+ * @param uploadDirectory - local directory for data uploading
+ * @param taskSink - a destination for the newly created tasks
+ */
+async function getPrepareDownloadTasks(
+  dataObligations: DataObligations,
+  addedCids: string[],
+  uploadDirectory: string,
+  taskSink: TaskSink
+): Promise<PrepareDownloadFileTask[]> {
+  const cidMap = new Map()
+  for (const entry of dataObligations.dataObjects) {
+    cidMap.set(entry.cid, entry.bagId)
+  }
+
+  const bucketMap = new Map()
+  for (const entry of dataObligations.storageBuckets) {
+    bucketMap.set(entry.id, entry.operatorUrl)
+  }
+
+  const bagMap = new Map()
+  for (const entry of dataObligations.bags) {
+    const operatorUrls = []
+
+    for (const bucket of entry.buckets) {
+      if (bucketMap.has(bucket)) {
+        const operatorUrl = bucketMap.get(bucket)
+        if (operatorUrl) {
+          operatorUrls.push(operatorUrl)
+        }
+      }
+    }
+
+    bagMap.set(entry.id, operatorUrls)
+  }
+
+  const tasks = addedCids.map((cid) => {
+    let operatorUrls: string[] = [] // can be empty after look up
+    if (cidMap.has(cid)) {
+      const bagid = cidMap.get(cid)
+      if (bagMap.has(bagid)) {
+        operatorUrls = bagMap.get(bagid)
+      }
+    }
+
+    return new PrepareDownloadFileTask(operatorUrls, cid, uploadDirectory, taskSink)
+  })
+
+  return tasks
+}
+
+/**
+ * Creates the download file tasks.
+ *
+ * @param operatorUrl - defines the data source URL.
+ * @param addedCids - data object IDs to download
+ * @param uploadDirectory - local directory for data uploading
+ */
+async function getDownloadTasks(
+  operatorUrl: string,
+  addedCids: string[],
+  uploadDirectory: string
+): Promise<DownloadFileTask[]> {
+  const addedTasks = addedCids.map((fileName) => new DownloadFileTask(operatorUrl, fileName, uploadDirectory))
+
+  return addedTasks
+}
+
+/**
+ * Returns local data objects info.
+ *
+ * @param uploadDirectory - local directory to get file names from
+ */
+export async function getLocalDataObjects(uploadDirectory: string): Promise<string[]> {
+  const localCids = await getLocalFileNames(uploadDirectory)
+
+  return localCids
+}

+ 153 - 0
storage-node-v2/src/services/sync/tasks.ts

@@ -0,0 +1,153 @@
+import fs from 'fs'
+import path from 'path'
+import { pipeline } from 'stream'
+import { promisify } from 'util'
+import superagent from 'superagent'
+import urljoin from 'url-join'
+import { v4 as uuidv4 } from 'uuid'
+import logger from '../../services/logger'
+import _ from 'lodash'
+import { getRemoteDataObjects } from './remoteStorageData'
+import { TaskSink } from './workingProcess'
+const fsPromises = fs.promises
+
+/**
+ * Defines syncronization task abstraction.
+ */
+export interface SyncTask {
+  /**
+   * Returns human-friendly task description.
+   */
+  description(): string
+
+  /**
+   * Performs the task.
+   */
+  execute(): Promise<void>
+}
+
+/**
+ * Deletes the file in the local storage by its name.
+ */
+export class DeleteLocalFileTask implements SyncTask {
+  uploadsDirectory: string
+  filename: string
+
+  constructor(uploadsDirectory: string, filename: string) {
+    this.uploadsDirectory = uploadsDirectory
+    this.filename = filename
+  }
+
+  description(): string {
+    return `Sync - deleting local file: ${this.filename} ....`
+  }
+
+  async execute(): Promise<void> {
+    const fullPath = path.join(this.uploadsDirectory, this.filename)
+    return fsPromises.unlink(fullPath)
+  }
+}
+
+/**
+ * Download the file from the remote storage node to the local storage.
+ */
+export class DownloadFileTask implements SyncTask {
+  id: string
+  uploadsDirectory: string
+  url: string
+
+  constructor(baseUrl: string, id: string, uploadsDirectory: string) {
+    this.id = id
+    this.uploadsDirectory = uploadsDirectory
+    this.url = urljoin(baseUrl, 'api/v1/files', id)
+  }
+
+  description(): string {
+    return `Sync - downloading file: ${this.url} to ${this.uploadsDirectory} ....`
+  }
+
+  async execute(): Promise<void> {
+    const streamPipeline = promisify(pipeline)
+    const filepath = path.join(this.uploadsDirectory, this.id)
+
+    try {
+      const timeoutMs = 30 * 60 * 1000 // 30 min for large files (~ 10 GB)
+      // Casting because of:
+      // https://stackoverflow.com/questions/38478034/pipe-superagent-response-to-express-response
+      const request = superagent.get(this.url).timeout(timeoutMs) as unknown as NodeJS.ReadableStream
+
+      // We create tempfile first to mitigate partial downloads on app (or remote node) crash.
+      // This partial downloads will be cleaned up during the next sync iteration.
+      const tempFilePath = path.join(this.uploadsDirectory, uuidv4())
+      const fileStream = fs.createWriteStream(tempFilePath)
+      await streamPipeline(request, fileStream)
+
+      await fsPromises.rename(tempFilePath, filepath)
+    } catch (err) {
+      logger.error(`Sync - fetching data error for ${this.url}: ${err}`)
+      try {
+        logger.warn(`Cleaning up file ${filepath}`)
+        await fs.unlinkSync(filepath)
+      } catch (err) {
+        logger.error(`Sync - cannot cleanup file ${filepath}: ${err}`)
+      }
+    }
+  }
+}
+
+/**
+ * Resolve remote storage node URLs and creates file downloading tasks (DownloadFileTask).
+ */
+export class PrepareDownloadFileTask implements SyncTask {
+  cid: string
+  operatorUrlCandidates: string[]
+  taskSink: TaskSink
+  uploadsDirectory: string
+
+  constructor(operatorUrlCandidates: string[], cid: string, uploadsDirectory: string, taskSink: TaskSink) {
+    this.cid = cid
+    this.taskSink = taskSink
+    this.operatorUrlCandidates = operatorUrlCandidates
+    this.uploadsDirectory = uploadsDirectory
+  }
+
+  description(): string {
+    return `Sync - preparing for download of: ${this.cid} ....`
+  }
+
+  async execute(): Promise<void> {
+    // Create an array of operator URL indices to maintain a random URL choice
+    // cannot use the original array because we shouldn't modify the original data.
+    // And cloning it seems like a heavy operation.
+    const operatorUrlIndices: number[] = [...Array(this.operatorUrlCandidates.length).keys()]
+
+    while (!_.isEmpty(operatorUrlIndices)) {
+      const randomUrlIndex = _.sample(operatorUrlIndices)
+      if (randomUrlIndex === undefined) {
+        logger.warn(`Sync - cannot get a random URL`)
+        break
+      }
+
+      const randomUrl = this.operatorUrlCandidates[randomUrlIndex]
+      logger.debug(`Sync - random storage node URL was chosen ${randomUrl}`)
+
+      // Remove random url from the original list.
+      _.remove(operatorUrlIndices, (index) => index === randomUrlIndex)
+
+      try {
+        const chosenBaseUrl = randomUrl
+        const remoteOperatorCids: string[] = await getRemoteDataObjects(chosenBaseUrl)
+
+        if (remoteOperatorCids.includes(this.cid)) {
+          const newTask = new DownloadFileTask(chosenBaseUrl, this.cid, this.uploadsDirectory)
+
+          return this.taskSink.add([newTask])
+        }
+      } catch (err) {
+        logger.error(`Sync - fetching data error for ${this.cid}: ${err}`)
+      }
+    }
+
+    logger.warn(`Sync - cannot get operator URLs for ${this.cid}`)
+  }
+}

+ 129 - 0
storage-node-v2/src/services/sync/workingProcess.ts

@@ -0,0 +1,129 @@
+import AwaitLock from 'await-lock'
+import sleep from 'sleep-promise'
+import { SyncTask } from './tasks'
+import logger from '../../services/logger'
+
+/**
+ * Defines task destination abstraction.
+ */
+export interface TaskSink {
+  /**
+   * Adds task array to the pending tasks collection.
+   *
+   * @param tasks tasks to add.
+   */
+  add(tasks: SyncTask[]): Promise<void>
+}
+
+/**
+ * Defines task source abstraction.
+ */
+export interface TaskSource {
+  /**
+   * Gets the next task from the pending tasks collection.
+   *
+   * @returns next task or null if empty.
+   */
+  get(): Promise<SyncTask | null>
+}
+
+/**
+ * Defines pending tasks collections. Implements LIFO semantics.
+ */
+export class WorkingStack implements TaskSink, TaskSource {
+  workingStack: SyncTask[]
+  lock: AwaitLock
+
+  constructor() {
+    this.workingStack = []
+    this.lock = new AwaitLock()
+  }
+
+  async get(): Promise<SyncTask | null> {
+    await this.lock.acquireAsync()
+    const task = this.workingStack.pop()
+    this.lock.release()
+
+    if (task !== undefined) {
+      return task
+    } else {
+      return null
+    }
+  }
+
+  async add(tasks: SyncTask[]): Promise<void> {
+    await this.lock.acquireAsync()
+
+    if (tasks !== null) {
+      this.workingStack.push(...tasks)
+    }
+    this.lock.release()
+  }
+}
+
+/**
+ * Defines working process. It consumes and executes tasks from the pending
+ * tasks source.
+ */
+export class TaskProcessor {
+  taskSource: TaskSource
+  exitOnCompletion: boolean
+
+  constructor(taskSource: TaskSource, exitOnCompletion = true) {
+    this.taskSource = taskSource
+    this.exitOnCompletion = exitOnCompletion
+  }
+
+  /**
+   * Starts the task processor that pick tasks one by one from the pending task
+   * collection and executes them. It exits on empty task source or pauses
+   * depending on the configuration.
+   *
+   * @returns empty promise
+   */
+  async process(): Promise<void> {
+    while (true) {
+      const task = await this.taskSource.get()
+
+      if (task !== null) {
+        logger.debug(task.description())
+        await task.execute()
+      } else {
+        if (this.exitOnCompletion) {
+          return
+        }
+
+        await sleep(3000)
+      }
+    }
+  }
+}
+
+/**
+ * Manages task processors pack. Runs multiple instances and waits for their
+ * execution.
+ */
+export class TaskProcessorSpawner {
+  processNumber: number
+  taskSource: TaskSource
+  constructor(taskSource: TaskSource, processNumber: number) {
+    this.taskSource = taskSource
+    this.processNumber = processNumber
+  }
+
+  /**
+   * Starts the task processor pack and waits for its completion.
+   *
+   * @returns empty promise
+   */
+  async process(): Promise<void> {
+    const processes = []
+
+    for (let i = 0; i < this.processNumber; i++) {
+      const processor = new TaskProcessor(this.taskSource)
+      processes.push(processor.process())
+    }
+
+    await Promise.all(processes)
+  }
+}

+ 100 - 25
storage-node-v2/src/services/webApi/app.ts

@@ -3,45 +3,96 @@ import path from 'path'
 import cors from 'cors'
 import { Express, NextFunction } from 'express-serve-static-core'
 import * as OpenApiValidator from 'express-openapi-validator'
-import { HttpError, OpenAPIV3 } from 'express-openapi-validator/dist/framework/types'
+import { HttpError, OpenAPIV3, ValidateSecurityOpts } from 'express-openapi-validator/dist/framework/types'
 import { KeyringPair } from '@polkadot/keyring/types'
 import { ApiPromise } from '@polkadot/api'
 import { RequestData, verifyTokenSignature, parseUploadToken, UploadToken } from '../helpers/auth'
 import { checkRemoveNonce } from '../../services/helpers/tokenNonceKeeper'
 import { httpLogger, errorLogger } from '../../services/logger'
 
+/**
+ * Web application parameters.
+ */
+export type AppConfig = {
+  /**
+   * Runtime API promise
+   */
+  api: ApiPromise
+
+  /**
+   * KeyringPair instance
+   */
+  account: KeyringPair
+
+  /**
+   * Storage provider ID (worker ID)
+   */
+  workerId: number
+
+  /**
+   * Directory for the file uploading
+   */
+  uploadsDir: string
+  /**
+   * Directory within the `uploadsDir` for temporary file uploading
+   */
+  tempDirName: string
+
+  /**
+   *  Environment configuration
+   */
+  process: {
+    version: string
+    userAgent: string
+  }
+
+  /**
+   * Query Node endpoint URL
+   */
+  queryNodeUrl: string
+
+  /**
+   * Enables uploading auth-schema validation
+   */
+  enableUploadingAuth: boolean
+
+  /**
+   * ElasticSearch logging endpoint URL
+   */
+  elasticSearchEndpoint?: string
+
+  /**
+   * Max file size for uploading limit.
+   */
+  maxFileSize: number
+}
+
 /**
  * Creates Express web application. Uses the OAS spec file for the API.
  *
- * @param api - runtime API promise
- * @param account - KeyringPair instance
- * @param workerId - storage provider ID (worker ID)
- * @param uploadsDir - directory for the file uploading
- * @param maxFileSize - max allowed file size
+ * @param config - web app configuration parameters
  * @returns Express promise.
  */
-export async function createApp(
-  api: ApiPromise,
-  account: KeyringPair,
-  workerId: number,
-  uploadsDir: string,
-  maxFileSize: number
-): Promise<Express> {
+export async function createApp(config: AppConfig): Promise<Express> {
   const spec = path.join(__dirname, './../../api-spec/openapi.yaml')
+  const tempFileUploadingDir = path.join(config.uploadsDir, config.tempDirName)
 
   const app = express()
 
   app.use(cors())
   app.use(express.json())
-  app.use(httpLogger())
+  app.use(httpLogger(config.elasticSearchEndpoint))
 
   app.use(
     // Set parameters for each request.
     (req: express.Request, res: express.Response, next: NextFunction) => {
-      res.locals.uploadsDir = uploadsDir
-      res.locals.storageProviderAccount = account
-      res.locals.workerId = workerId
-      res.locals.api = api
+      res.locals.uploadsDir = config.uploadsDir
+      res.locals.tempFileUploadingDir = tempFileUploadingDir
+      res.locals.storageProviderAccount = config.account
+      res.locals.workerId = config.workerId
+      res.locals.api = config.api
+      res.locals.config = config.process
+      res.locals.queryNodeUrl = config.queryNodeUrl
       next()
     },
     // Setup OpenAPiValidator
@@ -55,20 +106,16 @@ export async function createApp(
         resolver: OpenApiValidator.resolvers.modulePathResolver,
       },
       fileUploader: {
-        dest: uploadsDir,
+        dest: tempFileUploadingDir,
         // Busboy library settings
         limits: {
           // For multipart forms, the max number of file fields (Default: Infinity)
           files: 1,
           // For multipart forms, the max file size (in bytes) (Default: Infinity)
-          fileSize: maxFileSize,
-        },
-      },
-      validateSecurity: {
-        handlers: {
-          UploadAuth: validateUpload(api, account),
+          fileSize: config.maxFileSize,
         },
       },
+      validateSecurity: setupUploadingValidation(config.enableUploadingAuth, config.api, config.account),
     })
   ) // Required signature.
 
@@ -101,6 +148,34 @@ export async function createApp(
   return app
 }
 
+/**
+ * Setup uploading validation. It disables the validation or returns the
+ * 'validation security' configuration.
+ *
+ * @param enableUploadingAuth - enables uploading auth-schema validation
+ * @param api - runtime API promise
+ * @param account - KeyringPair instance
+ *
+ * @returns false (disabled validation) or validation options.
+ */
+function setupUploadingValidation(
+  enableUploadingAuth: boolean,
+  api: ApiPromise,
+  account: KeyringPair
+): boolean | ValidateSecurityOpts {
+  if (enableUploadingAuth) {
+    const opts = {
+      handlers: {
+        UploadAuth: validateUpload(api, account),
+      },
+    }
+
+    return opts
+  }
+
+  return false
+}
+
 // Defines a signature for a upload validation function.
 type ValidateUploadFunction = (
   req: express.Request,

+ 161 - 0
storage-node-v2/src/services/webApi/controllers/common.ts

@@ -0,0 +1,161 @@
+import * as express from 'express'
+import { CLIError } from '@oclif/errors'
+import { ExtrinsicFailedError } from '../../runtime/api'
+import { BagIdValidationError } from '../../helpers/bagTypes'
+
+/**
+ * Dedicated error for the web api requests.
+ */
+export class WebApiError extends CLIError {
+  httpStatusCode: number
+
+  constructor(err: string, httpStatusCode: number) {
+    super(err)
+
+    this.httpStatusCode = httpStatusCode
+  }
+}
+
+/**
+ * Dedicated server error for the web api requests.
+ */
+export class ServerError extends WebApiError {
+  constructor(err: string) {
+    super(err, 500)
+  }
+}
+
+/**
+ * Returns a directory for file uploading from the response.
+ *
+ * @remarks
+ * This is a helper function. It parses the response object for a variable and
+ * throws an error on failure.
+ */
+export function getUploadsDir(res: express.Response): string {
+  if (res.locals.uploadsDir) {
+    return res.locals.uploadsDir
+  }
+
+  throw new ServerError('No upload directory path loaded.')
+}
+
+/**
+ * Returns a directory for temporary file uploading from the response.
+ *
+ * @remarks
+ * This is a helper function. It parses the response object for a variable and
+ * throws an error on failure.
+ */
+export function getTempFileUploadingDir(res: express.Response): string {
+  if (res.locals.tempFileUploadingDir) {
+    return res.locals.tempFileUploadingDir
+  }
+
+  throw new ServerError('No temporary uploading directory path loaded.')
+}
+
+/**
+ * Returns worker ID from the response.
+ *
+ * @remarks
+ * This is a helper function. It parses the response object for a variable and
+ * throws an error on failure.
+ */
+export function getWorkerId(res: express.Response): number {
+  if (res.locals.workerId || res.locals.workerId === 0) {
+    return res.locals.workerId
+  }
+
+  throw new ServerError('No Joystream worker ID loaded.')
+}
+
+/**
+ * Returns the QueryNode URL from the starting parameters.
+ *
+ * @remarks
+ * This is a helper function. It parses the response object for a variable and
+ * throws an error on failure.
+ */
+export function getQueryNodeUrl(res: express.Response): string {
+  if (res.locals.queryNodeUrl) {
+    return res.locals.queryNodeUrl
+  }
+
+  throw new ServerError('No Query Node URL loaded.')
+}
+
+/**
+ * Returns a command config.
+ *
+ * @remarks
+ * This is a helper function. It parses the response object for a variable and
+ * throws an error on failure.
+ */
+export function getCommandConfig(res: express.Response): {
+  version: string
+  userAgent: string
+} {
+  if (res.locals.config) {
+    return res.locals.config
+  }
+
+  throw new ServerError('Cannot load command config.')
+}
+
+/**
+ * Handles errors and sends a response.
+ *
+ * @param res - Response instance
+ * @param err - error
+ * @param errorType - defines request type
+ * @returns void promise.
+ */
+export function sendResponseWithError(res: express.Response, err: Error, errorType: string): void {
+  const message = isNofileError(err) ? `File not found.` : err.toString()
+
+  res.status(getHttpStatusCodeByError(err)).json({
+    type: errorType,
+    message,
+  })
+}
+
+/**
+ * Checks the error for 'no-file' error (ENOENT).
+ *
+ * @param err - error
+ * @returns true when error code contains 'ENOENT'.
+ */
+function isNofileError(err: Error): boolean {
+  return err.toString().includes('ENOENT')
+}
+
+/**
+ * Get the status code by error.
+ *
+ * @param err - error
+ * @returns HTTP status code
+ */
+export function getHttpStatusCodeByError(err: Error): number {
+  if (isNofileError(err)) {
+    return 404
+  }
+
+  if (err instanceof ExtrinsicFailedError) {
+    return 400
+  }
+
+  if (err instanceof WebApiError) {
+    return err.httpStatusCode
+  }
+
+  if (err instanceof CLIError) {
+    return 400
+  }
+
+  if (err instanceof BagIdValidationError) {
+    return 400
+  }
+
+  return 500
+}

+ 56 - 125
storage-node-v2/src/services/webApi/controllers/publicApi.ts

@@ -1,5 +1,4 @@
 import { acceptPendingDataObjects } from '../../runtime/extrinsics'
-import { ExtrinsicFailedError } from '../../runtime/api'
 import {
   RequestData,
   UploadTokenRequest,
@@ -10,7 +9,6 @@ import {
 import { hashFile } from '../../../services/helpers/hashing'
 import { createNonce, getTokenExpirationTime } from '../../../services/helpers/tokenNonceKeeper'
 import { getFileInfo } from '../../../services/helpers/fileInfo'
-import { parseBagId } from '../../helpers/bagTypes'
 import { BagId } from '@joystream/types/storage'
 import logger from '../../../services/logger'
 import { KeyringPair } from '@polkadot/keyring/types'
@@ -19,32 +17,23 @@ import * as express from 'express'
 import fs from 'fs'
 import path from 'path'
 import send from 'send'
-import { CLIError } from '@oclif/errors'
 import { hexToString } from '@polkadot/util'
+import { parseBagId } from '../../helpers/bagTypes'
+import { timeout } from 'promise-timeout'
+import {
+  getUploadsDir,
+  getWorkerId,
+  getQueryNodeUrl,
+  WebApiError,
+  ServerError,
+  getCommandConfig,
+  sendResponseWithError,
+  getHttpStatusCodeByError,
+} from './common'
+import { getStorageBucketIdsByWorkerId } from '../../../services/sync/storageObligations'
+import { Membership } from '@joystream/types/members'
 const fsPromises = fs.promises
 
-/**
- * Dedicated error for the web api requests.
- */
-export class WebApiError extends CLIError {
-  httpStatusCode: number
-
-  constructor(err: string, httpStatusCode: number) {
-    super(err)
-
-    this.httpStatusCode = httpStatusCode
-  }
-}
-
-/**
- * Dedicated server error for the web api requests.
- */
-export class ServerError extends WebApiError {
-  constructor(err: string) {
-    super(err, 500)
-  }
-}
-
 /**
  * A public endpoint: serves files by CID.
  */
@@ -109,22 +98,26 @@ export async function uploadFile(req: express.Request, res: express.Response): P
     const fileObj = getFileObject(req)
     cleanupFileName = fileObj.path
 
-    const api = getApi(res)
-    await verifyFileMimeType(fileObj.path)
+    const queryNodeUrl = getQueryNodeUrl(res)
+    const workerId = getWorkerId(res)
 
-    const hash = await hashFile(fileObj.path)
-    const bagId = parseBagId(api, uploadRequest.bagId)
+    const [, hash] = await Promise.all([
+      verifyBucketId(queryNodeUrl, workerId, uploadRequest.storageBucketId),
+      hashFile(fileObj.path),
+    ])
 
+    const api = getApi(res)
+    const bagId = parseBagId(uploadRequest.bagId)
     const accepted = await verifyDataObjectInfo(api, bagId, uploadRequest.dataObjectId, fileObj.size, hash)
 
     // Prepare new file name
-    const newPath = fileObj.path.replace(fileObj.filename, hash)
+    const uploadsDir = getUploadsDir(res)
+    const newPath = path.join(uploadsDir, hash)
 
     // Overwrites existing file.
     await fsPromises.rename(fileObj.path, newPath)
     cleanupFileName = newPath
 
-    const workerId = getWorkerId(res)
     if (!accepted) {
       await acceptPendingDataObjects(api, bagId, getAccount(res), workerId, uploadRequest.storageBucketId, [
         uploadRequest.dataObjectId,
@@ -175,7 +168,7 @@ export async function authTokenForUploading(req: express.Request, res: express.R
  *
  * @remarks
  * This is a helper function. It parses the request object for a variable and
- * throws an error on failier.
+ * throws an error on failure.
  */
 function getFileObject(req: express.Request): Express.Multer.File {
   if (req.file) {
@@ -190,42 +183,12 @@ function getFileObject(req: express.Request): Express.Multer.File {
   throw new WebApiError('No file uploaded', 400)
 }
 
-/**
- * Returns worker ID from the response.
- *
- * @remarks
- * This is a helper function. It parses the response object for a variable and
- * throws an error on failure.
- */
-function getWorkerId(res: express.Response): number {
-  if (res.locals.workerId || res.locals.workerId === 0) {
-    return res.locals.workerId
-  }
-
-  throw new ServerError('No Joystream worker ID loaded.')
-}
-
-/**
- * Returns a directory for file uploading from the response.
- *
- * @remarks
- * This is a helper function. It parses the response object for a variable and
- * throws an error on failier.
- */
-function getUploadsDir(res: express.Response): string {
-  if (res.locals.uploadsDir) {
-    return res.locals.uploadsDir
-  }
-
-  throw new ServerError('No upload directory path loaded.')
-}
-
 /**
  * Returns a KeyPair instance from the response.
  *
  * @remarks
  * This is a helper function. It parses the response object for a variable and
- * throws an error on failier.
+ * throws an error on failure.
  */
 function getAccount(res: express.Response): KeyringPair {
   if (res.locals.storageProviderAccount) {
@@ -240,7 +203,7 @@ function getAccount(res: express.Response): KeyringPair {
  *
  * @remarks
  * This is a helper function. It parses the response object for a variable and
- * throws an error on failier.
+ * throws an error on failure.
  */
 function getApi(res: express.Response): ApiPromise {
   if (res.locals.api) {
@@ -255,7 +218,7 @@ function getApi(res: express.Response): ApiPromise {
  *
  * @remarks
  * This is a helper function. It parses the request object for a variable and
- * throws an error on failier.
+ * throws an error on failure.
  */
 function getCid(req: express.Request): string {
   const cid = req.params.cid || ''
@@ -271,7 +234,7 @@ function getCid(req: express.Request): string {
  *
  * @remarks
  * This is a helper function. It parses the request object for a variable and
- * throws an error on failier.
+ * throws an error on failure.
  */
 function getTokenRequest(req: express.Request): UploadTokenRequest {
   const tokenRequest = req.body as UploadTokenRequest
@@ -297,7 +260,10 @@ async function validateTokenRequest(api: ApiPromise, tokenRequest: UploadTokenRe
     throw new WebApiError('Invalid upload token request signature.', 401)
   }
 
-  const membership = await api.query.members.membershipById(tokenRequest.data.memberId)
+  const membershipPromise = api.query.members.membershipById(tokenRequest.data.memberId)
+
+  const membership = (await timeout(membershipPromise, 5000)) as Membership
+
   if (membership.controller_account.toString() !== tokenRequest.data.accountId) {
     throw new WebApiError(`Provided controller account and member id don't match.`, 401)
   }
@@ -359,73 +325,38 @@ async function cleanupFileOnError(cleanupFileName: string, error: string): Promi
 }
 
 /**
- * Verifies the mime type of the file by its content. It throws an exception
- * if the mime type differs from allowed list ('image/', 'video/', 'audio/').
- *
- * @param filePath - file path to detect mime types
- * @param error - external error
- * @returns void promise.
+ * A public endpoint: return the server version.
  */
-async function verifyFileMimeType(filePath: string): Promise<void> {
-  const allowedMimeTypes = ['image/', 'video/', 'audio/']
-
-  const fileInfo = await getFileInfo(filePath)
-  const correctMimeType = allowedMimeTypes.some((allowedType) => fileInfo.mimeType.startsWith(allowedType))
+export async function getVersion(req: express.Request, res: express.Response): Promise<void> {
+  try {
+    const config = getCommandConfig(res)
 
-  if (!correctMimeType) {
-    throw new WebApiError(`Incorrect mime type detected: ${fileInfo.mimeType}`, 400)
+    // Copy from an object, because the actual object could contain more data.
+    res.status(200).json({
+      version: config.version,
+      userAgent: config.userAgent,
+    })
+  } catch (err) {
+    res.status(500).json({
+      type: 'version',
+      message: err.toString(),
+    })
   }
 }
 
 /**
- * Handles errors and sends a response.
+ * Validates the storage bucket ID obligations for the worker (storage provider).
+ * It throws an error when storage bucket doesn't belong to the worker.
  *
- * @param res - Response instance
- * @param err - error
- * @param errorType - defines request type
+ * @param queryNodeUrl - Query Node URL
+ * @param workerId - worker(storage provider) ID
+ * @param bucketId - storage bucket ID
  * @returns void promise.
  */
-function sendResponseWithError(res: express.Response, err: Error, errorType: string): void {
-  const message = isNofileError(err) ? `File not found.` : err.toString()
+async function verifyBucketId(queryNodeUrl: string, workerId: number, bucketId: number): Promise<void> {
+  const bucketIds = await getStorageBucketIdsByWorkerId(queryNodeUrl, workerId)
 
-  res.status(getHttpStatusCodeByError(err)).json({
-    type: errorType,
-    message,
-  })
-}
-
-/**
- * Checks the error for 'no-file' error (ENOENT).
- *
- * @param err - error
- * @returns true when error code contains 'ENOENT'.
- */
-function isNofileError(err: Error): boolean {
-  return err.toString().includes('ENOENT')
-}
-
-/**
- * Get the status code by error.
- *
- * @param err - error
- * @returns HTTP status code
- */
-function getHttpStatusCodeByError(err: Error): number {
-  if (isNofileError(err)) {
-    return 404
-  }
-
-  if (err instanceof ExtrinsicFailedError) {
-    return 400
-  }
-
-  if (err instanceof WebApiError) {
-    return err.httpStatusCode
-  }
-
-  if (err instanceof CLIError) {
-    return 400
+  if (!bucketIds.includes(bucketId.toString())) {
+    throw new WebApiError('Incorrect storage bucket ID.', 400)
   }
-
-  return 500
 }

+ 182 - 0
storage-node-v2/src/services/webApi/controllers/stateApi.ts

@@ -0,0 +1,182 @@
+import { getLocalDataObjects } from '../../../services/sync/synchronizer'
+import * as express from 'express'
+import _ from 'lodash'
+import { getDataObjectIDsByBagId } from '../../sync/storageObligations'
+import {
+  getUploadsDir,
+  getTempFileUploadingDir,
+  getQueryNodeUrl,
+  WebApiError,
+  getCommandConfig,
+  sendResponseWithError,
+} from './common'
+import fastFolderSize from 'fast-folder-size'
+import { promisify } from 'util'
+import fs from 'fs'
+import path from 'path'
+import NodeCache from 'node-cache'
+const fsPromises = fs.promises
+
+// Expiration period in seconds for the local cache.
+const ExpirationPeriod = 30 // minutes
+
+// Local in-memory cache for data
+const dataCache = new NodeCache({
+  stdTTL: ExpirationPeriod,
+  deleteOnExpire: true,
+})
+
+/**
+ * A public endpoint: return all local data objects.
+ */
+export async function getAllLocalDataObjects(req: express.Request, res: express.Response): Promise<void> {
+  try {
+    const uploadsDir = getUploadsDir(res)
+    const tempFileDir = getTempFileUploadingDir(res)
+
+    const cids = await getCachedLocalDataObjects(uploadsDir, tempFileDir)
+
+    res.status(200).json(cids)
+  } catch (err) {
+    sendResponseWithError(res, err, 'all_data_objects')
+  }
+}
+
+/**
+ * A public endpoint: serves local data uploading directory stats.
+ *
+ *  @return total size and count of the data objects.
+ */
+export async function getLocalDataStats(req: express.Request, res: express.Response): Promise<void> {
+  try {
+    const uploadsDir = getUploadsDir(res)
+    const tempFileDir = getTempFileUploadingDir(res)
+    const fastFolderSizeAsync = promisify(fastFolderSize)
+
+    const tempFolderExists = fs.existsSync(tempFileDir)
+    const statsPromise = fsPromises.readdir(uploadsDir)
+    const sizePromise = fastFolderSizeAsync(uploadsDir)
+
+    const [stats, totalSize] = await Promise.all([statsPromise, sizePromise])
+
+    let objectNumber = stats.length
+    let tempDownloads = 0
+    let tempDirSize = 0
+    if (tempFolderExists) {
+      if (objectNumber > 0) {
+        objectNumber--
+      }
+
+      const tempDirStatsPromise = fsPromises.readdir(tempFileDir)
+      const tempDirSizePromise = fastFolderSizeAsync(tempFileDir)
+
+      const [tempDirStats, tempSize] = await Promise.all([tempDirStatsPromise, tempDirSizePromise])
+
+      tempDirSize = tempSize ?? 0
+      tempDownloads = tempDirStats.length
+    }
+
+    res.status(200).json({
+      objectNumber,
+      totalSize,
+      tempDownloads,
+      tempDirSize,
+    })
+  } catch (err) {
+    sendResponseWithError(res, err, 'local_data_stats')
+  }
+}
+
+/**
+ * A public endpoint: return local data objects for the bag.
+ */
+export async function getLocalDataObjectsByBagId(req: express.Request, res: express.Response): Promise<void> {
+  try {
+    const uploadsDir = getUploadsDir(res)
+    const tempFileDir = getTempFileUploadingDir(res)
+
+    const queryNodeUrl = getQueryNodeUrl(res)
+    const bagId = getBagId(req)
+
+    const [cids, requiredCids] = await Promise.all([
+      getCachedLocalDataObjects(uploadsDir, tempFileDir),
+      getCachedDataObjectsObligations(queryNodeUrl, bagId),
+    ])
+
+    const localDataForBag = _.intersection(cids, requiredCids)
+
+    res.status(200).json(localDataForBag)
+  } catch (err) {
+    sendResponseWithError(res, err, 'data_objects_by_bag')
+  }
+}
+
+/**
+ * A public endpoint: return the server version.
+ */
+export async function getVersion(req: express.Request, res: express.Response): Promise<void> {
+  try {
+    const config = getCommandConfig(res)
+
+    // Copy from an object, because the actual object could contain more data.
+    res.status(200).json({
+      version: config.version,
+      userAgent: config.userAgent,
+    })
+  } catch (err) {
+    sendResponseWithError(res, err, 'version')
+  }
+}
+
+/**
+ * Returns Bag ID from the request.
+ *
+ * @remarks
+ * This is a helper function. It parses the request object for a variable and
+ * throws an error on failure.
+ */
+function getBagId(req: express.Request): string {
+  const bagId = req.params.bagId || ''
+  if (bagId.length > 0) {
+    return bagId
+  }
+
+  throw new WebApiError('No bagId provided.', 400)
+}
+
+/**
+ * Returns cached data objects IDs from the local data storage. Data could be
+ * obsolete until cache expiration.
+ *
+ */
+async function getCachedLocalDataObjects(uploadsDir: string, tempDirName: string): Promise<string[]> {
+  const entryName = 'local_data_object'
+
+  if (!dataCache.has(entryName)) {
+    let data = await getLocalDataObjects(uploadsDir)
+
+    // Filter temporary directory name.
+    const tempDirectoryName = path.parse(tempDirName).name
+    data = data.filter((cid) => cid !== tempDirectoryName)
+
+    dataCache.set(entryName, data)
+  }
+  return dataCache.get(entryName) ?? []
+}
+
+/**
+ * Returns cached data objects IDs from the local data storage. Data could be
+ * obsolete until cache expiration.
+ *
+ */
+async function getCachedDataObjectsObligations(queryNodeUrl: string, bagId: string): Promise<string[]> {
+  const entryName = 'data_object_obligations'
+
+  if (!dataCache.has(entryName)) {
+    const data = await getDataObjectIDsByBagId(queryNodeUrl, bagId)
+
+    dataCache.set(entryName, data)
+  }
+
+  return dataCache.get(entryName) ?? []
+}

+ 1 - 0
storage-node-v2/tsconfig.json

@@ -6,6 +6,7 @@
     "outDir": "lib",
     "rootDir": "src",
     "strict": true,
+    "strictNullChecks": true,
     "target": "es2017",
     "skipLibCheck": true,
     "baseUrl": ".",

+ 5 - 5
types/augment-codec/augment-api-tx.ts

@@ -677,12 +677,12 @@ declare module '@polkadot/api/types/submittable' {
        * Create 'Begin review working group leader applications' proposal type.
        * This proposal uses `begin_applicant_review()` extrinsic from the Joystream `working group` module.
        **/
-      createBeginReviewWorkingGroupLeaderApplicationsProposal: AugmentedSubmittable<(memberId: MemberId | AnyNumber | Uint8Array, title: Bytes | string | Uint8Array, description: Bytes | string | Uint8Array, stakeBalance: Option<BalanceOf> | null | object | string | Uint8Array, openingId: OpeningId | AnyNumber | Uint8Array, workingGroup: WorkingGroup | 'Storage' | 'Content' | 'Operations' | 'Gateway' | number | Uint8Array) => SubmittableExtrinsic<ApiType>, [MemberId, Bytes, Bytes, Option<BalanceOf>, OpeningId, WorkingGroup]>;
+      createBeginReviewWorkingGroupLeaderApplicationsProposal: AugmentedSubmittable<(memberId: MemberId | AnyNumber | Uint8Array, title: Bytes | string | Uint8Array, description: Bytes | string | Uint8Array, stakeBalance: Option<BalanceOf> | null | object | string | Uint8Array, openingId: OpeningId | AnyNumber | Uint8Array, workingGroup: WorkingGroup | 'Reserved' | 'Forum' | 'Storage' | 'Content' | 'Operations' | 'Gateway' | number | Uint8Array) => SubmittableExtrinsic<ApiType>, [MemberId, Bytes, Bytes, Option<BalanceOf>, OpeningId, WorkingGroup]>;
       /**
        * Create 'decrease working group leader stake' proposal type.
        * This proposal uses `decrease_stake()` extrinsic from the `working-group`  module.
        **/
-      createDecreaseWorkingGroupLeaderStakeProposal: AugmentedSubmittable<(memberId: MemberId | AnyNumber | Uint8Array, title: Bytes | string | Uint8Array, description: Bytes | string | Uint8Array, stakeBalance: Option<BalanceOf> | null | object | string | Uint8Array, workerId: WorkerId | AnyNumber | Uint8Array, decreasingStake: BalanceOf | AnyNumber | Uint8Array, workingGroup: WorkingGroup | 'Storage' | 'Content' | 'Operations' | 'Gateway' | number | Uint8Array) => SubmittableExtrinsic<ApiType>, [MemberId, Bytes, Bytes, Option<BalanceOf>, WorkerId, BalanceOf, WorkingGroup]>;
+      createDecreaseWorkingGroupLeaderStakeProposal: AugmentedSubmittable<(memberId: MemberId | AnyNumber | Uint8Array, title: Bytes | string | Uint8Array, description: Bytes | string | Uint8Array, stakeBalance: Option<BalanceOf> | null | object | string | Uint8Array, workerId: WorkerId | AnyNumber | Uint8Array, decreasingStake: BalanceOf | AnyNumber | Uint8Array, workingGroup: WorkingGroup | 'Reserved' | 'Forum' | 'Storage' | 'Content' | 'Operations' | 'Gateway' | number | Uint8Array) => SubmittableExtrinsic<ApiType>, [MemberId, Bytes, Bytes, Option<BalanceOf>, WorkerId, BalanceOf, WorkingGroup]>;
       /**
        * Create 'Fill working group leader opening' proposal type.
        * This proposal uses `fill_opening()` extrinsic from the Joystream `working group` module.
@@ -707,17 +707,17 @@ declare module '@polkadot/api/types/submittable' {
        * Create 'set working group leader reward' proposal type.
        * This proposal uses `update_reward_amount()` extrinsic from the `working-group`  module.
        **/
-      createSetWorkingGroupLeaderRewardProposal: AugmentedSubmittable<(memberId: MemberId | AnyNumber | Uint8Array, title: Bytes | string | Uint8Array, description: Bytes | string | Uint8Array, stakeBalance: Option<BalanceOf> | null | object | string | Uint8Array, workerId: WorkerId | AnyNumber | Uint8Array, rewardAmount: BalanceOfMint | AnyNumber | Uint8Array, workingGroup: WorkingGroup | 'Storage' | 'Content' | 'Operations' | 'Gateway' | number | Uint8Array) => SubmittableExtrinsic<ApiType>, [MemberId, Bytes, Bytes, Option<BalanceOf>, WorkerId, BalanceOfMint, WorkingGroup]>;
+      createSetWorkingGroupLeaderRewardProposal: AugmentedSubmittable<(memberId: MemberId | AnyNumber | Uint8Array, title: Bytes | string | Uint8Array, description: Bytes | string | Uint8Array, stakeBalance: Option<BalanceOf> | null | object | string | Uint8Array, workerId: WorkerId | AnyNumber | Uint8Array, rewardAmount: BalanceOfMint | AnyNumber | Uint8Array, workingGroup: WorkingGroup | 'Reserved' | 'Forum' | 'Storage' | 'Content' | 'Operations' | 'Gateway' | number | Uint8Array) => SubmittableExtrinsic<ApiType>, [MemberId, Bytes, Bytes, Option<BalanceOf>, WorkerId, BalanceOfMint, WorkingGroup]>;
       /**
        * Create 'Set working group mint capacity' proposal type.
        * This proposal uses `set_mint_capacity()` extrinsic from the `working-group`  module.
        **/
-      createSetWorkingGroupMintCapacityProposal: AugmentedSubmittable<(memberId: MemberId | AnyNumber | Uint8Array, title: Bytes | string | Uint8Array, description: Bytes | string | Uint8Array, stakeBalance: Option<BalanceOf> | null | object | string | Uint8Array, mintBalance: BalanceOfMint | AnyNumber | Uint8Array, workingGroup: WorkingGroup | 'Storage' | 'Content' | 'Operations' | 'Gateway' | number | Uint8Array) => SubmittableExtrinsic<ApiType>, [MemberId, Bytes, Bytes, Option<BalanceOf>, BalanceOfMint, WorkingGroup]>;
+      createSetWorkingGroupMintCapacityProposal: AugmentedSubmittable<(memberId: MemberId | AnyNumber | Uint8Array, title: Bytes | string | Uint8Array, description: Bytes | string | Uint8Array, stakeBalance: Option<BalanceOf> | null | object | string | Uint8Array, mintBalance: BalanceOfMint | AnyNumber | Uint8Array, workingGroup: WorkingGroup | 'Reserved' | 'Forum' | 'Storage' | 'Content' | 'Operations' | 'Gateway' | number | Uint8Array) => SubmittableExtrinsic<ApiType>, [MemberId, Bytes, Bytes, Option<BalanceOf>, BalanceOfMint, WorkingGroup]>;
       /**
        * Create 'slash working group leader stake' proposal type.
        * This proposal uses `slash_stake()` extrinsic from the `working-group`  module.
        **/
-      createSlashWorkingGroupLeaderStakeProposal: AugmentedSubmittable<(memberId: MemberId | AnyNumber | Uint8Array, title: Bytes | string | Uint8Array, description: Bytes | string | Uint8Array, stakeBalance: Option<BalanceOf> | null | object | string | Uint8Array, workerId: WorkerId | AnyNumber | Uint8Array, slashingStake: BalanceOf | AnyNumber | Uint8Array, workingGroup: WorkingGroup | 'Storage' | 'Content' | 'Operations' | 'Gateway' | number | Uint8Array) => SubmittableExtrinsic<ApiType>, [MemberId, Bytes, Bytes, Option<BalanceOf>, WorkerId, BalanceOf, WorkingGroup]>;
+      createSlashWorkingGroupLeaderStakeProposal: AugmentedSubmittable<(memberId: MemberId | AnyNumber | Uint8Array, title: Bytes | string | Uint8Array, description: Bytes | string | Uint8Array, stakeBalance: Option<BalanceOf> | null | object | string | Uint8Array, workerId: WorkerId | AnyNumber | Uint8Array, slashingStake: BalanceOf | AnyNumber | Uint8Array, workingGroup: WorkingGroup | 'Reserved' | 'Forum' | 'Storage' | 'Content' | 'Operations' | 'Gateway' | number | Uint8Array) => SubmittableExtrinsic<ApiType>, [MemberId, Bytes, Bytes, Option<BalanceOf>, WorkerId, BalanceOf, WorkingGroup]>;
       /**
        * Create 'Spending' proposal type.
        * This proposal uses `spend_from_council_mint()` extrinsic from the `governance::council`  module.

+ 2 - 0
types/augment/all/defs.json

@@ -82,6 +82,8 @@
     },
     "WorkingGroup": {
         "_enum": [
+            "Reserved",
+            "Forum",
             "Storage",
             "Content",
             "Operations",

+ 2 - 0
types/augment/all/types.ts

@@ -1404,6 +1404,8 @@ export interface WorkerOf extends Struct {
 
 /** @name WorkingGroup */
 export interface WorkingGroup extends Enum {
+  readonly isReserved: boolean;
+  readonly isForum: boolean;
   readonly isStorage: boolean;
   readonly isContent: boolean;
   readonly isOperations: boolean;

+ 5 - 5
types/augment/augment-api-tx.ts

@@ -677,12 +677,12 @@ declare module '@polkadot/api/types/submittable' {
        * Create 'Begin review working group leader applications' proposal type.
        * This proposal uses `begin_applicant_review()` extrinsic from the Joystream `working group` module.
        **/
-      createBeginReviewWorkingGroupLeaderApplicationsProposal: AugmentedSubmittable<(memberId: MemberId | AnyNumber | Uint8Array, title: Bytes | string | Uint8Array, description: Bytes | string | Uint8Array, stakeBalance: Option<BalanceOf> | null | object | string | Uint8Array, openingId: OpeningId | AnyNumber | Uint8Array, workingGroup: WorkingGroup | 'Storage' | 'Content' | 'Operations' | 'Gateway' | number | Uint8Array) => SubmittableExtrinsic<ApiType>, [MemberId, Bytes, Bytes, Option<BalanceOf>, OpeningId, WorkingGroup]>;
+      createBeginReviewWorkingGroupLeaderApplicationsProposal: AugmentedSubmittable<(memberId: MemberId | AnyNumber | Uint8Array, title: Bytes | string | Uint8Array, description: Bytes | string | Uint8Array, stakeBalance: Option<BalanceOf> | null | object | string | Uint8Array, openingId: OpeningId | AnyNumber | Uint8Array, workingGroup: WorkingGroup | 'Reserved' | 'Forum' | 'Storage' | 'Content' | 'Operations' | 'Gateway' | number | Uint8Array) => SubmittableExtrinsic<ApiType>, [MemberId, Bytes, Bytes, Option<BalanceOf>, OpeningId, WorkingGroup]>;
       /**
        * Create 'decrease working group leader stake' proposal type.
        * This proposal uses `decrease_stake()` extrinsic from the `working-group`  module.
        **/
-      createDecreaseWorkingGroupLeaderStakeProposal: AugmentedSubmittable<(memberId: MemberId | AnyNumber | Uint8Array, title: Bytes | string | Uint8Array, description: Bytes | string | Uint8Array, stakeBalance: Option<BalanceOf> | null | object | string | Uint8Array, workerId: WorkerId | AnyNumber | Uint8Array, decreasingStake: BalanceOf | AnyNumber | Uint8Array, workingGroup: WorkingGroup | 'Storage' | 'Content' | 'Operations' | 'Gateway' | number | Uint8Array) => SubmittableExtrinsic<ApiType>, [MemberId, Bytes, Bytes, Option<BalanceOf>, WorkerId, BalanceOf, WorkingGroup]>;
+      createDecreaseWorkingGroupLeaderStakeProposal: AugmentedSubmittable<(memberId: MemberId | AnyNumber | Uint8Array, title: Bytes | string | Uint8Array, description: Bytes | string | Uint8Array, stakeBalance: Option<BalanceOf> | null | object | string | Uint8Array, workerId: WorkerId | AnyNumber | Uint8Array, decreasingStake: BalanceOf | AnyNumber | Uint8Array, workingGroup: WorkingGroup | 'Reserved' | 'Forum' | 'Storage' | 'Content' | 'Operations' | 'Gateway' | number | Uint8Array) => SubmittableExtrinsic<ApiType>, [MemberId, Bytes, Bytes, Option<BalanceOf>, WorkerId, BalanceOf, WorkingGroup]>;
       /**
        * Create 'Fill working group leader opening' proposal type.
        * This proposal uses `fill_opening()` extrinsic from the Joystream `working group` module.
@@ -707,17 +707,17 @@ declare module '@polkadot/api/types/submittable' {
        * Create 'set working group leader reward' proposal type.
        * This proposal uses `update_reward_amount()` extrinsic from the `working-group`  module.
        **/
-      createSetWorkingGroupLeaderRewardProposal: AugmentedSubmittable<(memberId: MemberId | AnyNumber | Uint8Array, title: Bytes | string | Uint8Array, description: Bytes | string | Uint8Array, stakeBalance: Option<BalanceOf> | null | object | string | Uint8Array, workerId: WorkerId | AnyNumber | Uint8Array, rewardAmount: BalanceOfMint | AnyNumber | Uint8Array, workingGroup: WorkingGroup | 'Storage' | 'Content' | 'Operations' | 'Gateway' | number | Uint8Array) => SubmittableExtrinsic<ApiType>, [MemberId, Bytes, Bytes, Option<BalanceOf>, WorkerId, BalanceOfMint, WorkingGroup]>;
+      createSetWorkingGroupLeaderRewardProposal: AugmentedSubmittable<(memberId: MemberId | AnyNumber | Uint8Array, title: Bytes | string | Uint8Array, description: Bytes | string | Uint8Array, stakeBalance: Option<BalanceOf> | null | object | string | Uint8Array, workerId: WorkerId | AnyNumber | Uint8Array, rewardAmount: BalanceOfMint | AnyNumber | Uint8Array, workingGroup: WorkingGroup | 'Reserved' | 'Forum' | 'Storage' | 'Content' | 'Operations' | 'Gateway' | number | Uint8Array) => SubmittableExtrinsic<ApiType>, [MemberId, Bytes, Bytes, Option<BalanceOf>, WorkerId, BalanceOfMint, WorkingGroup]>;
       /**
        * Create 'Set working group mint capacity' proposal type.
        * This proposal uses `set_mint_capacity()` extrinsic from the `working-group`  module.
        **/
-      createSetWorkingGroupMintCapacityProposal: AugmentedSubmittable<(memberId: MemberId | AnyNumber | Uint8Array, title: Bytes | string | Uint8Array, description: Bytes | string | Uint8Array, stakeBalance: Option<BalanceOf> | null | object | string | Uint8Array, mintBalance: BalanceOfMint | AnyNumber | Uint8Array, workingGroup: WorkingGroup | 'Storage' | 'Content' | 'Operations' | 'Gateway' | number | Uint8Array) => SubmittableExtrinsic<ApiType>, [MemberId, Bytes, Bytes, Option<BalanceOf>, BalanceOfMint, WorkingGroup]>;
+      createSetWorkingGroupMintCapacityProposal: AugmentedSubmittable<(memberId: MemberId | AnyNumber | Uint8Array, title: Bytes | string | Uint8Array, description: Bytes | string | Uint8Array, stakeBalance: Option<BalanceOf> | null | object | string | Uint8Array, mintBalance: BalanceOfMint | AnyNumber | Uint8Array, workingGroup: WorkingGroup | 'Reserved' | 'Forum' | 'Storage' | 'Content' | 'Operations' | 'Gateway' | number | Uint8Array) => SubmittableExtrinsic<ApiType>, [MemberId, Bytes, Bytes, Option<BalanceOf>, BalanceOfMint, WorkingGroup]>;
       /**
        * Create 'slash working group leader stake' proposal type.
        * This proposal uses `slash_stake()` extrinsic from the `working-group`  module.
        **/
-      createSlashWorkingGroupLeaderStakeProposal: AugmentedSubmittable<(memberId: MemberId | AnyNumber | Uint8Array, title: Bytes | string | Uint8Array, description: Bytes | string | Uint8Array, stakeBalance: Option<BalanceOf> | null | object | string | Uint8Array, workerId: WorkerId | AnyNumber | Uint8Array, slashingStake: BalanceOf | AnyNumber | Uint8Array, workingGroup: WorkingGroup | 'Storage' | 'Content' | 'Operations' | 'Gateway' | number | Uint8Array) => SubmittableExtrinsic<ApiType>, [MemberId, Bytes, Bytes, Option<BalanceOf>, WorkerId, BalanceOf, WorkingGroup]>;
+      createSlashWorkingGroupLeaderStakeProposal: AugmentedSubmittable<(memberId: MemberId | AnyNumber | Uint8Array, title: Bytes | string | Uint8Array, description: Bytes | string | Uint8Array, stakeBalance: Option<BalanceOf> | null | object | string | Uint8Array, workerId: WorkerId | AnyNumber | Uint8Array, slashingStake: BalanceOf | AnyNumber | Uint8Array, workingGroup: WorkingGroup | 'Reserved' | 'Forum' | 'Storage' | 'Content' | 'Operations' | 'Gateway' | number | Uint8Array) => SubmittableExtrinsic<ApiType>, [MemberId, Bytes, Bytes, Option<BalanceOf>, WorkerId, BalanceOf, WorkingGroup]>;
       /**
        * Create 'Spending' proposal type.
        * This proposal uses `spend_from_council_mint()` extrinsic from the `governance::council`  module.

+ 2 - 1
types/src/common.ts

@@ -107,7 +107,8 @@ export class InputValidationLengthConstraint
 }
 
 export const WorkingGroupDef = {
-  // TODO: Forum: Null,
+  Reserved: Null,
+  Forum: Null,
   Storage: Null,
   Content: Null,
   Operations: Null,

File diff suppressed because it is too large
+ 869 - 2
yarn.lock


Some files were not shown because too many files changed in this diff