Переглянути джерело

Merge branch 'giza' into giza_feature_NewContentFeaturesAndNFT

ignazio 3 роки тому
батько
коміт
f62058baba
93 змінених файлів з 8261 додано та 382 видалено
  1. 1 1
      .github/workflows/run-network-tests.yml
  2. 4 4
      Cargo.lock
  3. 0 0
      chain-metadata.json
  4. 2 2
      cli/package.json
  5. 5 0
      devops/kubernetes/orion/.gitignore
  6. 25 0
      devops/kubernetes/orion/Pulumi.yaml
  7. 119 0
      devops/kubernetes/orion/README.md
  8. 154 0
      devops/kubernetes/orion/index.ts
  9. 100 0
      devops/kubernetes/orion/mongo.ts
  10. 13 0
      devops/kubernetes/orion/package.json
  11. 18 0
      devops/kubernetes/orion/tsconfig.json
  12. 1 0
      distributor-node/config.yml
  13. 1 1
      distributor-node/package.json
  14. 1 1
      node/Cargo.toml
  15. 2 2
      package.json
  16. 8 0
      query-node/mappings/common.ts
  17. 6 11
      query-node/mappings/content/utils.ts
  18. 1 1
      query-node/mappings/package.json
  19. 3 2
      query-node/mappings/storage/index.ts
  20. 47 45
      query-node/mappings/storage/metadata.ts
  21. 36 0
      reproduce-giza-issue.sh
  22. 1 1
      runtime-modules/storage/Cargo.toml
  23. 74 6
      runtime-modules/storage/src/lib.rs
  24. 202 0
      runtime-modules/storage/src/tests/mod.rs
  25. 1 1
      runtime/Cargo.toml
  26. 1 1
      runtime/src/lib.rs
  27. 5 4
      start.sh
  28. 2 2
      storage-node/package.json
  29. 1 1
      tests/network-tests/.env
  30. 32 0
      tests/network-tests/codegen.yml
  31. 7 0
      tests/network-tests/openapitools.json
  32. 18 3
      tests/network-tests/package.json
  33. 48 0
      tests/network-tests/run-full-tests.sh
  34. 1 1
      tests/network-tests/run-test-scenario.sh
  35. 1 1
      tests/network-tests/run-tests.sh
  36. 145 142
      tests/network-tests/src/Api.ts
  37. 102 65
      tests/network-tests/src/QueryNodeApi.ts
  38. 26 10
      tests/network-tests/src/Scenario.ts
  39. 27 0
      tests/network-tests/src/apis/distributorNode/.openapi-generator-ignore
  40. 5 0
      tests/network-tests/src/apis/distributorNode/.openapi-generator/FILES
  41. 1 0
      tests/network-tests/src/apis/distributorNode/.openapi-generator/VERSION
  42. 410 0
      tests/network-tests/src/apis/distributorNode/api.ts
  43. 74 0
      tests/network-tests/src/apis/distributorNode/base.ts
  44. 150 0
      tests/network-tests/src/apis/distributorNode/common.ts
  45. 108 0
      tests/network-tests/src/apis/distributorNode/configuration.ts
  46. 16 0
      tests/network-tests/src/apis/distributorNode/index.ts
  47. 27 0
      tests/network-tests/src/apis/storageNode/.openapi-generator-ignore
  48. 5 0
      tests/network-tests/src/apis/storageNode/.openapi-generator/FILES
  49. 1 0
      tests/network-tests/src/apis/storageNode/.openapi-generator/VERSION
  50. 738 0
      tests/network-tests/src/apis/storageNode/api.ts
  51. 74 0
      tests/network-tests/src/apis/storageNode/base.ts
  52. 150 0
      tests/network-tests/src/apis/storageNode/common.ts
  53. 108 0
      tests/network-tests/src/apis/storageNode/configuration.ts
  54. 16 0
      tests/network-tests/src/apis/storageNode/index.ts
  55. 54 0
      tests/network-tests/src/cli/base.ts
  56. 48 0
      tests/network-tests/src/cli/distributor.ts
  57. 47 0
      tests/network-tests/src/cli/joystream.ts
  58. 66 0
      tests/network-tests/src/cli/storage.ts
  59. 126 0
      tests/network-tests/src/cli/utils.ts
  60. 1 1
      tests/network-tests/src/fixtures/membershipModule.ts
  61. 13 18
      tests/network-tests/src/fixtures/proposalsModule.ts
  62. 115 15
      tests/network-tests/src/fixtures/workingGroupModule.ts
  63. 71 0
      tests/network-tests/src/flows/clis/createChannel.ts
  64. 39 0
      tests/network-tests/src/flows/clis/initDistributionBucket.ts
  65. 53 0
      tests/network-tests/src/flows/clis/initStorageBucket.ts
  66. 23 0
      tests/network-tests/src/flows/membership/makeAliceMember.ts
  67. 6 6
      tests/network-tests/src/flows/proposals/manageLeaderRole.ts
  68. 3 0
      tests/network-tests/src/flows/proposals/workingGroupMintCapacityProposal.ts
  69. 227 0
      tests/network-tests/src/flows/storagev2/initDistribution.ts
  70. 159 0
      tests/network-tests/src/flows/storagev2/initStorage.ts
  71. 3 0
      tests/network-tests/src/flows/workingGroup/manageWorkerAsWorker.ts
  72. 3 0
      tests/network-tests/src/flows/workingGroup/workerPayout.ts
  73. 158 0
      tests/network-tests/src/graphql/generated/queries.ts
  74. 3715 0
      tests/network-tests/src/graphql/generated/schema.ts
  75. 74 0
      tests/network-tests/src/graphql/queries/storagev2.graphql
  76. 18 24
      tests/network-tests/src/scenarios/combined.ts
  77. 16 0
      tests/network-tests/src/scenarios/giza-issue-reproduction-setup.ts
  78. 16 0
      tests/network-tests/src/scenarios/init-storage-and-distribution.ts
  79. 28 0
      tests/network-tests/src/scenarios/proposals.ts
  80. 7 0
      tests/network-tests/src/scenarios/setup-new-chain.ts
  81. 2 2
      tests/network-tests/src/sender.ts
  82. 19 0
      tests/network-tests/src/utils.ts
  83. 2 1
      types/augment/all/defs.json
  84. 1 0
      types/augment/all/types.ts
  85. 2 2
      types/package.json
  86. 2 0
      types/src/storage.ts
  87. 1 1
      utils/api-scripts/package.json
  88. 1 1
      utils/chain-spec-builder/Cargo.toml
  89. 1 1
      utils/migration-scripts/package.json
  90. 1 1
      utils/migration-scripts/src/logging.ts
  91. 2 0
      utils/migration-scripts/src/sumer-giza/AssetsManager.ts
  92. 1 0
      utils/migration-scripts/src/sumer-giza/ContentMigration.ts
  93. 13 1
      yarn.lock

+ 1 - 1
.github/workflows/run-network-tests.yml

@@ -140,7 +140,7 @@ jobs:
       - name: Ensure tests are runnable
         run: yarn workspace network-tests build
       - name: Execute network tests
-        run: tests/network-tests/run-tests.sh full
+        run: tests/network-tests/run-full-tests.sh
 
   new_chain_setup:
     name: Initialize new chain

+ 4 - 4
Cargo.lock

@@ -731,7 +731,7 @@ dependencies = [
 
 [[package]]
 name = "chain-spec-builder"
-version = "3.3.0"
+version = "3.3.1"
 dependencies = [
  "ansi_term 0.12.1",
  "enum-utils",
@@ -2332,7 +2332,7 @@ dependencies = [
 
 [[package]]
 name = "joystream-node"
-version = "5.13.0"
+version = "5.14.0"
 dependencies = [
  "frame-benchmarking",
  "frame-benchmarking-cli",
@@ -2393,7 +2393,7 @@ dependencies = [
 
 [[package]]
 name = "joystream-node-runtime"
-version = "9.13.0"
+version = "9.14.0"
 dependencies = [
  "frame-benchmarking",
  "frame-executive",
@@ -4207,7 +4207,7 @@ dependencies = [
 
 [[package]]
 name = "pallet-storage"
-version = "4.0.0"
+version = "4.0.1"
 dependencies = [
  "frame-benchmarking",
  "frame-support",

Різницю між файлами не показано, бо вона завелика
+ 0 - 0
chain-metadata.json


+ 2 - 2
cli/package.json

@@ -11,7 +11,7 @@
     "@apidevtools/json-schema-ref-parser": "^9.0.6",
     "@ffprobe-installer/ffprobe": "^1.1.0",
     "@joystream/metadata-protobuf": "^1.0.0",
-    "@joystream/types": "^0.17.1",
+    "@joystream/types": "^0.17.2",
     "@oclif/command": "^1.5.19",
     "@oclif/config": "^1.14.0",
     "@oclif/plugin-autocomplete": "^0.2.0",
@@ -143,6 +143,6 @@
   "types": "lib/index.d.ts",
   "volta": {
     "node": "14.16.1",
-    "yarn": "1.22.4"
+    "yarn": "1.22.15"
   }
 }

+ 5 - 0
devops/kubernetes/orion/.gitignore

@@ -0,0 +1,5 @@
+/bin/
+/node_modules/
+kubeconfig*
+package-lock.json
+Pulumi.*.yaml

+ 25 - 0
devops/kubernetes/orion/Pulumi.yaml

@@ -0,0 +1,25 @@
+name: orion
+runtime: nodejs
+description: A Pulumi program to deploy Orion service to Kubernetes
+template:
+  config:
+    aws:profile:
+      default: joystream-user
+    aws:region:
+      default: us-east-1
+    isMinikube:
+      description: Whether you are deploying to minikube
+      default: false
+    queryNodeEndpoint:
+      description: Full URL for Query node endpoint
+    isLoadBalancerReady:
+      description: Whether the load balancer service is ready and has been assigned an IP
+      default: false
+    storage:
+      description: Amount of storage in gigabytes for ipfs volume
+      default: 40
+    orionImage:
+      description: The Orion image to use for running the orion node
+      default: joystream/orion:latest
+    contentSecret:
+      description: Orion featured content secret

+ 119 - 0
devops/kubernetes/orion/README.md

@@ -0,0 +1,119 @@
+# Amazon Kubernetes Cluster: Orion
+
+Deploy Orion to a Kubernetes cluster
+
+## Deploying the App
+
+To deploy your infrastructure, follow the below steps.
+
+### Prerequisites
+
+1. [Install Pulumi](https://www.pulumi.com/docs/get-started/install/)
+1. [Install Node.js](https://nodejs.org/en/download/)
+1. Install a package manager for Node.js, such as [npm](https://www.npmjs.com/get-npm) or [Yarn](https://yarnpkg.com/en/docs/install).
+1. [Configure AWS Credentials](https://www.pulumi.com/docs/intro/cloud-providers/aws/setup/)
+1. Optional (for debugging): [Install kubectl](https://kubernetes.io/docs/tasks/tools/)
+
+### Steps
+
+After cloning this repo, from this working directory, run these commands:
+
+1. Install the required Node.js packages:
+
+   This installs the dependent packages [needed](https://www.pulumi.com/docs/intro/concepts/how-pulumi-works/) for our Pulumi program.
+
+   ```bash
+   $ npm install
+   ```
+
+1. Create a new stack, which is an isolated deployment target for this example:
+
+   This will initialize the Pulumi program in TypeScript.
+
+   ```bash
+   $ pulumi stack init
+   ```
+
+1. Set the required configuration variables in `Pulumi.<stack>.yaml`
+
+   ```bash
+   $ pulumi config set-all --plaintext queryNodeEndpoint='http://host.minikube.internal:8081/graphql' \
+    --plaintext isMinikube=true --plaintext orionImage='joystream/orion:latest' \
+    --plaintext contentSecret='password123' \
+    --plaintext aws:region=us-east-1 --plaintext aws:profile=joystream-user
+   ```
+
+   If you want to build the stack on AWS set the `isMinikube` config to `false`
+
+   ```bash
+   $ pulumi config set isMinikube false
+   ```
+
+   You can also set the `storage` config parameter if required. Check `Pulumi.yaml` file for additional parameters.
+
+1. Stand up the EKS cluster:
+
+   Running `pulumi up -y` will deploy the EKS cluster. Note, provisioning a
+   new EKS cluster takes between 10-15 minutes.
+
+1. Once the stack if up and running, we will modify the Caddy config to get SSL certificate for the load balancer
+
+   Modify the config variable `isLoadBalancerReady`
+
+   ```bash
+   $ pulumi config set isLoadBalancerReady true
+   ```
+
+   Run `pulumi up -y` to update the Caddy config
+
+1. Access the Kubernetes Cluster using `kubectl`
+
+   To access your new Kubernetes cluster using `kubectl`, we need to set up the
+   `kubeconfig` file and download `kubectl`. We can leverage the Pulumi
+   stack output in the CLI, as Pulumi facilitates exporting these objects for us.
+
+   ```bash
+   $ pulumi stack output kubeconfig --show-secrets > kubeconfig
+   $ export KUBECONFIG=$PWD/kubeconfig
+   $ kubectl get nodes
+   ```
+
+   We can also use the stack output to query the cluster for our newly created Deployment:
+
+   ```bash
+   $ kubectl get deployment $(pulumi stack output deploymentName) --namespace=$(pulumi stack output namespaceName)
+   $ kubectl get service $(pulumi stack output serviceName) --namespace=$(pulumi stack output namespaceName)
+   ```
+
+   To get logs
+
+   ```bash
+   $ kubectl config set-context --current --namespace=$(pulumi stack output namespaceName)
+   $ kubectl get pods
+   $ kubectl logs <PODNAME> --all-containers
+   ```
+
+   To run a command on a pod
+
+   ```bash
+   $ kubectl exec ${POD_NAME} -c ${CONTAINER_NAME} -- ${CMD} ${ARG1}
+   ```
+
+   To see complete pulumi stack output
+
+   ```bash
+   $ pulumi stack output
+   ```
+
+   To execute a command
+
+   ```bash
+   $ kubectl exec --stdin --tty <PODNAME> -c colossus -- /bin/bash
+   ```
+
+1. Once you've finished experimenting, tear down your stack's resources by destroying and removing it:
+
+   ```bash
+   $ pulumi destroy --yes
+   $ pulumi stack rm --yes
+   ```

+ 154 - 0
devops/kubernetes/orion/index.ts

@@ -0,0 +1,154 @@
+import * as awsx from '@pulumi/awsx'
+import * as eks from '@pulumi/eks'
+import * as k8s from '@pulumi/kubernetes'
+import * as pulumi from '@pulumi/pulumi'
+import { CaddyServiceDeployment } from 'pulumi-common'
+import { MongoDBServiceDeployment } from './mongo'
+
+const awsConfig = new pulumi.Config('aws')
+const config = new pulumi.Config()
+
+const name = 'orion'
+
+const queryNodeHost = config.require('queryNodeEndpoint')
+const lbReady = config.get('isLoadBalancerReady') === 'true'
+const orionImage = config.get('orionImage') || `joystream/orion:latest`
+const contentSecret = config.require('contentSecret')
+const storage = parseInt(config.get('storage') || '40')
+const isMinikube = config.getBoolean('isMinikube')
+
+export let kubeconfig: pulumi.Output<any>
+let provider: k8s.Provider
+
+if (isMinikube) {
+  provider = new k8s.Provider('local', {})
+} else {
+  // Create a VPC for our cluster.
+  const vpc = new awsx.ec2.Vpc('orion-vpc', { numberOfAvailabilityZones: 2, numberOfNatGateways: 1 })
+
+  // Create an EKS cluster with the default configuration.
+  const cluster = new eks.Cluster('eksctl-orion-node', {
+    vpcId: vpc.id,
+    subnetIds: vpc.publicSubnetIds,
+    instanceType: 't2.medium',
+    providerCredentialOpts: {
+      profileName: awsConfig.get('profile'),
+    },
+  })
+  provider = cluster.provider
+
+  // Export the cluster's kubeconfig.
+  kubeconfig = cluster.kubeconfig
+}
+
+const resourceOptions = { provider: provider }
+
+// Create a Kubernetes Namespace
+const ns = new k8s.core.v1.Namespace(name, {}, resourceOptions)
+
+// Export the Namespace name
+export const namespaceName = ns.metadata.name
+
+const appLabels = { appClass: name }
+
+const mongoDb = new MongoDBServiceDeployment(
+  'mongo-db',
+  {
+    namespaceName: namespaceName,
+    storage: storage,
+  },
+  resourceOptions
+)
+
+// Create a Deployment
+const deployment = new k8s.apps.v1.Deployment(
+  name,
+  {
+    metadata: {
+      namespace: namespaceName,
+      labels: appLabels,
+    },
+    spec: {
+      replicas: 1,
+      selector: { matchLabels: appLabels },
+      template: {
+        metadata: {
+          labels: appLabels,
+        },
+        spec: {
+          containers: [
+            {
+              name: 'orion',
+              image: orionImage,
+              imagePullPolicy: 'IfNotPresent',
+              env: [
+                {
+                  name: 'ORION_PORT',
+                  value: '6116',
+                },
+                {
+                  name: 'ORION_MONGO_HOSTNAME',
+                  value: mongoDb.service.metadata.name,
+                },
+                {
+                  name: 'ORION_FEATURED_CONTENT_SECRET',
+                  value: contentSecret,
+                },
+                {
+                  name: 'ORION_QUERY_NODE_URL',
+                  value: queryNodeHost,
+                },
+              ],
+              ports: [{ containerPort: 6116 }],
+            },
+          ],
+        },
+      },
+    },
+  },
+  resourceOptions
+)
+
+// Create a LoadBalancer Service for the Deployment
+const service = new k8s.core.v1.Service(
+  name,
+  {
+    metadata: {
+      labels: appLabels,
+      namespace: namespaceName,
+      name: 'orion-node',
+    },
+    spec: {
+      type: isMinikube ? 'NodePort' : 'ClusterIP',
+      ports: [{ name: 'port-1', port: 6116 }],
+      selector: appLabels,
+    },
+  },
+  resourceOptions
+)
+
+// Export the Service name
+export const serviceName = service.metadata.name
+
+// Export the Deployment name
+export const deploymentName = deployment.metadata.name
+
+const caddyEndpoints = [
+  ` {
+    reverse_proxy orion-node:6116
+}`,
+]
+
+export let endpoint1: pulumi.Output<string> = pulumi.interpolate``
+export let endpoint2: pulumi.Output<string> = pulumi.interpolate``
+
+if (!isMinikube) {
+  const caddy = new CaddyServiceDeployment(
+    'caddy-proxy',
+    { lbReady, namespaceName: namespaceName, caddyEndpoints },
+    resourceOptions
+  )
+
+  endpoint1 = pulumi.interpolate`${caddy.primaryEndpoint}`
+  endpoint2 = pulumi.interpolate`${caddy.secondaryEndpoint}`
+}

+ 100 - 0
devops/kubernetes/orion/mongo.ts

@@ -0,0 +1,100 @@
+import * as k8s from '@pulumi/kubernetes'
+import * as pulumi from '@pulumi/pulumi'
+
+/**
+ * ServiceDeployment is an example abstraction that uses a class to fold together the common pattern of a
+ * Kubernetes Deployment and its associated Service object.
+ * This class delpoys a Mongo DB instance on a Persistent Volume
+ */
+export class MongoDBServiceDeployment extends pulumi.ComponentResource {
+  public readonly deployment: k8s.apps.v1.Deployment
+  public readonly service: k8s.core.v1.Service
+
+  constructor(name: string, args: ServiceDeploymentArgs, opts?: pulumi.ComponentResourceOptions) {
+    super('mongodb:service:PostgresServiceDeployment', name, {}, opts)
+
+    const databaseLabels = { app: name }
+    const pvcName = `${name}-pvc`
+
+    const pvc = new k8s.core.v1.PersistentVolumeClaim(
+      pvcName,
+      {
+        metadata: {
+          labels: databaseLabels,
+          namespace: args.namespaceName,
+          name: pvcName,
+        },
+        spec: {
+          accessModes: ['ReadWriteOnce'],
+          resources: {
+            requests: {
+              storage: `${args.storage}Gi`,
+            },
+          },
+        },
+      },
+      { parent: this }
+    )
+
+    this.deployment = new k8s.apps.v1.Deployment(
+      name,
+      {
+        metadata: {
+          namespace: args.namespaceName,
+          labels: databaseLabels,
+        },
+        spec: {
+          selector: { matchLabels: databaseLabels },
+          template: {
+            metadata: { labels: databaseLabels },
+            spec: {
+              containers: [
+                {
+                  name: 'mongo-db',
+                  image: 'library/mongo:4.4',
+                  volumeMounts: [
+                    {
+                      name: 'mongo-data',
+                      mountPath: '/data/db',
+                      subPath: 'mongo',
+                    },
+                  ],
+                },
+              ],
+              volumes: [
+                {
+                  name: 'mongo-data',
+                  persistentVolumeClaim: {
+                    claimName: pvcName,
+                  },
+                },
+              ],
+            },
+          },
+        },
+      },
+      { parent: this }
+    )
+
+    this.service = new k8s.core.v1.Service(
+      name,
+      {
+        metadata: {
+          namespace: args.namespaceName,
+          labels: this.deployment.metadata.labels,
+          name: name,
+        },
+        spec: {
+          ports: [{ port: 27017 }],
+          selector: this.deployment.spec.template.metadata.labels,
+        },
+      },
+      { parent: this }
+    )
+  }
+}
+
+export interface ServiceDeploymentArgs {
+  namespaceName: pulumi.Output<string>
+  storage: Number
+}

+ 13 - 0
devops/kubernetes/orion/package.json

@@ -0,0 +1,13 @@
+{
+  "name": "eks-cluster",
+  "devDependencies": {
+    "@types/node": "^10.0.0"
+  },
+  "dependencies": {
+    "@pulumi/awsx": "^0.30.0",
+    "@pulumi/eks": "^0.31.0",
+    "@pulumi/kubernetes": "^3.0.0",
+    "@pulumi/pulumi": "^3.0.0",
+    "pulumi-common": "file:../pulumi-common"
+  }
+}

+ 18 - 0
devops/kubernetes/orion/tsconfig.json

@@ -0,0 +1,18 @@
+{
+    "compilerOptions": {
+        "strict": true,
+        "outDir": "bin",
+        "target": "es2016",
+        "module": "commonjs",
+        "moduleResolution": "node",
+        "sourceMap": true,
+        "experimentalDecorators": true,
+        "pretty": true,
+        "noFallthroughCasesInSwitch": true,
+        "noImplicitReturns": true,
+        "forceConsistentCasingInFileNames": true
+    },
+    "files": [
+        "index.ts"
+    ]
+}

+ 1 - 0
distributor-node/config.yml

@@ -34,6 +34,7 @@ operatorApi:
   hmacSecret: this-is-not-so-secret
 keys:
   - suri: //Alice
+  - suri: //testing//worker//Distribution//0
   # - mnemonic: "escape naive annual throw tragic achieve grunt verify cram note harvest problem"
   #   type: ed25519
   # - keyfile: "/path/to/keyfile.json"

+ 1 - 1
distributor-node/package.json

@@ -11,7 +11,7 @@
     "@apollo/client": "^3.2.5",
     "@elastic/ecs-winston-format": "^1.1.0",
     "@joystream/metadata-protobuf": "^1.0.0",
-    "@joystream/types": "^0.17.1",
+    "@joystream/types": "^0.17.2",
     "@oclif/command": "^1",
     "@oclif/config": "^1",
     "@oclif/plugin-help": "^3",

+ 1 - 1
node/Cargo.toml

@@ -3,7 +3,7 @@ authors = ['Joystream contributors']
 build = 'build.rs'
 edition = '2018'
 name = 'joystream-node'
-version = '5.13.0'
+version = '5.14.0'
 default-run = "joystream-node"
 
 [[bin]]

+ 2 - 2
package.json

@@ -65,10 +65,10 @@
   },
   "engines": {
     "node": ">=14.0.0",
-    "yarn": "^1.22.0"
+    "yarn": "^1.22.15"
   },
   "volta": {
     "node": "14.18.0",
-    "yarn": "1.22.4"
+    "yarn": "1.22.15"
   }
 }

+ 8 - 0
query-node/mappings/common.ts

@@ -6,6 +6,7 @@ import { metaToObject } from '@joystream/metadata-protobuf/utils'
 import { AnyMetadataClass, DecodedMetadataObject } from '@joystream/metadata-protobuf/types'
 
 export const CURRENT_NETWORK = Network.GIZA
+
 /*
   Simple logger enabling error and informational reporting.
 
@@ -223,3 +224,10 @@ export async function getById<T extends BaseModel>(
 
   return result
 }
+
+export function deterministicEntityId(createdInEvent: SubstrateEvent, additionalIdentifier?: string | number): string {
+  return (
+    `${createdInEvent.blockNumber}-${createdInEvent.indexInBlock}` +
+    (additionalIdentifier ? `-${additionalIdentifier}` : '')
+  )
+}

+ 6 - 11
query-node/mappings/content/utils.ts

@@ -1,5 +1,5 @@
 import { DatabaseManager, EventContext, StoreContext } from '@joystream/hydra-common'
-import { FindConditions, Raw } from 'typeorm'
+import { FindConditions } from 'typeorm'
 import {
   IVideoMetadata,
   IPublishedBeforeJoystream,
@@ -8,7 +8,7 @@ import {
   IChannelMetadata,
 } from '@joystream/metadata-protobuf'
 import { integrateMeta, isSet, isValidLanguageCode } from '@joystream/metadata-protobuf/utils'
-import { invalidMetadata, inconsistentState, logger } from '../common'
+import { invalidMetadata, inconsistentState, logger, deterministicEntityId } from '../common'
 import {
   // primary entities
   CuratorGroup,
@@ -209,9 +209,8 @@ async function processVideoMediaEncoding(
   const encoding =
     existingVideoMediaEncoding ||
     new VideoMediaEncoding({
+      id: deterministicEntityId(event),
       createdAt: new Date(event.blockTimestamp),
-      createdById: '1',
-      updatedById: '1',
     })
   // integrate media encoding-related data
   integrateMeta(encoding, metadata, ['codecName', 'container', 'mimeMediaType'])
@@ -231,10 +230,9 @@ async function processVideoMediaMetadata(
   const videoMedia =
     existingVideoMedia ||
     new VideoMediaMetadata({
+      id: deterministicEntityId(event),
       createdInBlock: event.blockNumber,
       createdAt: new Date(event.blockTimestamp),
-      createdById: '1',
-      updatedById: '1',
     })
 
   // integrate media-related data
@@ -364,13 +362,11 @@ async function processLanguage(
 
   // create new language
   const newLanguage = new Language({
+    id: deterministicEntityId(event),
     iso: languageIso,
     createdInBlock: event.blockNumber,
     createdAt: new Date(event.blockTimestamp),
     updatedAt: new Date(event.blockTimestamp),
-    // TODO: remove these lines after Hydra auto-fills the values when cascading save (remove them on all places)
-    createdById: '1',
-    updatedById: '1',
   })
 
   await store.save<Language>(newLanguage)
@@ -397,9 +393,8 @@ async function updateVideoLicense(
     license =
       previousLicense ||
       new License({
+        id: deterministicEntityId(event),
         createdAt: new Date(event.blockTimestamp),
-        createdById: '1',
-        updatedById: '1',
       })
     license.updatedAt = new Date(event.blockTimestamp)
     integrateMeta(license, licenseMetadata, ['attribution', 'code', 'customText'])

+ 1 - 1
query-node/mappings/package.json

@@ -21,7 +21,7 @@
     "@joystream/hydra-common": "3.1.0-alpha.16",
     "@joystream/hydra-db-utils": "3.1.0-alpha.16",
     "@joystream/metadata-protobuf": "^1.0.0",
-    "@joystream/types": "^0.17.1",
+    "@joystream/types": "^0.17.2",
     "@joystream/warthog": "2.41.2",
     "@apollo/client": "^3.2.5"
   },

+ 3 - 2
query-node/mappings/storage/index.ts

@@ -76,6 +76,7 @@ export async function storage_StorageOperatorMetadataSet({ event, store }: Event
   const [bucketId, , metadataBytes] = new Storage.StorageOperatorMetadataSetEvent(event).params
   const storageBucket = await getStorageBucketWithOperatorMetadata(store, bucketId.toString())
   storageBucket.operatorMetadata = await processStorageOperatorMetadata(
+    event,
     store,
     storageBucket.operatorMetadata,
     metadataBytes
@@ -276,7 +277,7 @@ export async function storage_DistributionBucketFamilyMetadataSet({
   const [familyId, metadataBytes] = new Storage.DistributionBucketFamilyMetadataSetEvent(event).params
 
   const family = await getDistributionBucketFamilyWithMetadata(store, familyId.toString())
-  family.metadata = await processDistributionBucketFamilyMetadata(store, family.metadata, metadataBytes)
+  family.metadata = await processDistributionBucketFamilyMetadata(event, store, family.metadata, metadataBytes)
 
   await store.save<DistributionBucketFamily>(family)
 }
@@ -426,7 +427,7 @@ export async function storage_DistributionBucketMetadataSet({
   const [workerId, bucketId, metadataBytes] = new Storage.DistributionBucketMetadataSetEvent(event).params
 
   const operator = await getDistributionBucketOperatorWithMetadata(store, distributionOperatorId(bucketId, workerId))
-  operator.metadata = await processDistributionOperatorMetadata(store, operator.metadata, metadataBytes)
+  operator.metadata = await processDistributionOperatorMetadata(event, store, operator.metadata, metadataBytes)
 
   await store.save<DistributionBucketOperator>(operator)
 }

+ 47 - 45
query-node/mappings/storage/metadata.ts

@@ -1,4 +1,4 @@
-import { DatabaseManager } from '@joystream/hydra-common'
+import { DatabaseManager, SubstrateEvent } from '@joystream/hydra-common'
 import {
   DistributionBucketFamilyMetadata,
   DistributionBucketOperatorMetadata,
@@ -11,7 +11,7 @@ import {
   GeographicalAreaSubdivistion,
   DistributionBucketFamilyGeographicArea,
 } from 'query-node/dist/model'
-import { deserializeMetadata, invalidMetadata } from '../common'
+import { deserializeMetadata, deterministicEntityId, invalidMetadata } from '../common'
 import { Bytes } from '@polkadot/types'
 import {
   DistributionBucketOperatorMetadata as DistributionBucketOperatorMetadataProto,
@@ -21,6 +21,7 @@ import {
   GeographicalArea as GeographicalAreaProto,
 } from '@joystream/metadata-protobuf'
 import { isSet, isEmptyObject, isValidCountryCode, isValidSubdivisionCode } from '@joystream/metadata-protobuf/utils'
+import _ from 'lodash'
 
 const protobufContinentToGraphlContinent: { [key in GeographicalAreaProto.Continent]: Continent } = {
   [GeographicalAreaProto.Continent.AF]: Continent.AF,
@@ -33,11 +34,12 @@ const protobufContinentToGraphlContinent: { [key in GeographicalAreaProto.Contin
 }
 
 async function processNodeLocationMetadata(
+  event: SubstrateEvent,
   store: DatabaseManager,
   current: NodeLocationMetadata | undefined,
   meta: INodeLocationMetadata
 ): Promise<NodeLocationMetadata> {
-  const nodeLocation = current || new NodeLocationMetadata()
+  const nodeLocation = current || new NodeLocationMetadata({ id: deterministicEntityId(event) })
   if (isSet(meta.city)) {
     nodeLocation.city = meta.city
   }
@@ -45,7 +47,7 @@ async function processNodeLocationMetadata(
     if (isEmptyObject(meta.coordinates)) {
       nodeLocation.coordinates = null as any
     } else {
-      const coordinates = current?.coordinates || new GeoCoordinates()
+      const coordinates = current?.coordinates || new GeoCoordinates({ id: deterministicEntityId(event) })
       coordinates.latitude = meta.coordinates.latitude || coordinates.latitude || 0
       coordinates.longitude = meta.coordinates.longitude || coordinates.longitude || 0
       await store.save<GeoCoordinates>(coordinates)
@@ -65,6 +67,7 @@ async function processNodeLocationMetadata(
 }
 
 export async function processDistributionOperatorMetadata(
+  event: SubstrateEvent,
   store: DatabaseManager,
   current: DistributionBucketOperatorMetadata | undefined,
   metadataBytes: Bytes
@@ -73,14 +76,14 @@ export async function processDistributionOperatorMetadata(
   if (!meta) {
     return current
   }
-  const metadataEntity = current || new DistributionBucketOperatorMetadata()
+  const metadataEntity = current || new DistributionBucketOperatorMetadata({ id: deterministicEntityId(event) })
   if (isSet(meta.endpoint)) {
     metadataEntity.nodeEndpoint = meta.endpoint
   }
   if (isSet(meta.location)) {
     metadataEntity.nodeLocation = isEmptyObject(meta.location)
       ? (null as any)
-      : await processNodeLocationMetadata(store, metadataEntity.nodeLocation, meta.location)
+      : await processNodeLocationMetadata(event, store, metadataEntity.nodeLocation, meta.location)
   }
   if (isSet(meta.extra)) {
     metadataEntity.extra = meta.extra
@@ -92,6 +95,7 @@ export async function processDistributionOperatorMetadata(
 }
 
 export async function processStorageOperatorMetadata(
+  event: SubstrateEvent,
   store: DatabaseManager,
   current: StorageBucketOperatorMetadata | undefined,
   metadataBytes: Bytes
@@ -100,14 +104,14 @@ export async function processStorageOperatorMetadata(
   if (!meta) {
     return current
   }
-  const metadataEntity = current || new StorageBucketOperatorMetadata()
+  const metadataEntity = current || new StorageBucketOperatorMetadata({ id: deterministicEntityId(event) })
   if (isSet(meta.endpoint)) {
     metadataEntity.nodeEndpoint = meta.endpoint || (null as any)
   }
   if (isSet(meta.location)) {
     metadataEntity.nodeLocation = isEmptyObject(meta.location)
       ? (null as any)
-      : await processNodeLocationMetadata(store, metadataEntity.nodeLocation, meta.location)
+      : await processNodeLocationMetadata(event, store, metadataEntity.nodeLocation, meta.location)
   }
   if (isSet(meta.extra)) {
     metadataEntity.extra = meta.extra || (null as any)
@@ -119,6 +123,7 @@ export async function processStorageOperatorMetadata(
 }
 
 export async function processDistributionBucketFamilyMetadata(
+  event: SubstrateEvent,
   store: DatabaseManager,
   current: DistributionBucketFamilyMetadata | undefined,
   metadataBytes: Bytes
@@ -127,7 +132,7 @@ export async function processDistributionBucketFamilyMetadata(
   if (!meta) {
     return current
   }
-  const metadataEntity = current || new DistributionBucketFamilyMetadata()
+  const metadataEntity = current || new DistributionBucketFamilyMetadata({ id: deterministicEntityId(event) })
   if (isSet(meta.region)) {
     metadataEntity.region = meta.region || (null as any)
   }
@@ -138,7 +143,7 @@ export async function processDistributionBucketFamilyMetadata(
     metadataEntity.latencyTestTargets = meta.latencyTestTargets.filter((t) => t)
   }
 
-  await store.save<DistributionBucketOperatorMetadata>(metadataEntity)
+  await store.save<DistributionBucketFamilyMetadata>(metadataEntity)
 
   // Update areas after metadata is saved (since we need an id to reference)
   if (isSet(meta.areas)) {
@@ -146,45 +151,42 @@ export async function processDistributionBucketFamilyMetadata(
     await Promise.all(metadataEntity.areas?.map((a) => store.remove<DistributionBucketFamilyGeographicArea>(a)) || [])
     // Save new areas
     await Promise.all(
-      meta.areas
-        .filter((a) => !isEmptyObject(a))
-        .map(async (a) => {
-          const area = new DistributionBucketFamilyGeographicArea({
-            distributionBucketFamilyMetadata: metadataEntity,
-          })
+      _.uniqWith(
+        meta.areas.filter((a) => !isEmptyObject(a)),
+        _.isEqual
+      ).map(async (a, i) => {
+        const area = new DistributionBucketFamilyGeographicArea({
+          id: `${metadataEntity.id}-${i}`,
+          distributionBucketFamilyMetadata: metadataEntity,
+        })
 
-          if (a.continent) {
-            const continent = new GeographicalAreaContinent()
-            continent.code = protobufContinentToGraphlContinent[a.continent]
-            if (!continent.code) {
-              return invalidMetadata(`Unrecognized continent enum variant: ${a.continent}`)
-            }
-            area.id = `${metadataEntity.id}-C-${continent.code}`
-            area.area = continent
+        if (a.continent) {
+          const continent = new GeographicalAreaContinent()
+          continent.code = protobufContinentToGraphlContinent[a.continent]
+          if (!continent.code) {
+            return invalidMetadata(`Unrecognized continent enum variant: ${a.continent}`)
           }
-
-          if (a.countryCode) {
-            if (!isValidCountryCode(a.countryCode)) {
-              return invalidMetadata(`Invalid country code: ${a.countryCode}`)
-            }
-            const country = new GeographicalAreaCountry()
-            country.code = a.countryCode
-            area.id = `${metadataEntity.id}-c-${country.code}`
-            area.area = country
+          area.area = continent
+        } else if (a.countryCode) {
+          if (!isValidCountryCode(a.countryCode)) {
+            return invalidMetadata(`Invalid country code: ${a.countryCode}`)
           }
-
-          if (a.subdivisionCode) {
-            if (!isValidSubdivisionCode(a.subdivisionCode)) {
-              return invalidMetadata(`Invalid subdivision code: ${a.subdivisionCode}`)
-            }
-            const subdivision = new GeographicalAreaSubdivistion()
-            subdivision.code = a.subdivisionCode
-            area.id = `${metadataEntity.id}-s-${subdivision.code}`
-            area.area = subdivision
+          const country = new GeographicalAreaCountry()
+          country.code = a.countryCode
+          area.area = country
+        } else if (a.subdivisionCode) {
+          if (!isValidSubdivisionCode(a.subdivisionCode)) {
+            return invalidMetadata(`Invalid subdivision code: ${a.subdivisionCode}`)
           }
-
-          await store.save<DistributionBucketFamilyGeographicArea>(area)
-        })
+          const subdivision = new GeographicalAreaSubdivistion()
+          subdivision.code = a.subdivisionCode
+          area.area = subdivision
+        } else {
+          return
+        }
+
+        await store.save<DistributionBucketFamilyGeographicArea>(area)
+      })
     )
   }
 

+ 36 - 0
reproduce-giza-issue.sh

@@ -0,0 +1,36 @@
+#!/usr/bin/env bash
+set -e
+
+## Run a local development chain
+docker-compose up -d joystream-node
+
+# Init the chain with 2 storage buckets that have high limits set
+# The DynamicBagPolicy for Channel should be "numberOfStorageBuckets: 2" after this step is done
+./tests/network-tests/run-test-scenario.sh giza-issue-reproduction-setup
+
+# Set env for CLI's
+export AUTO_CONFIRM=true
+export ACCOUNT_URI=//testing//worker//Storage//0 # Storage lead account uri for storage CLI
+
+# Setup the CLI:
+yarn joystream-cli api:setUri ws://localhost:9944
+yarn joystream-cli account:choose --address 5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY
+yarn joystream-cli api:setQueryNodeEndpoint http://localhost:8081/graphql
+
+# Set very low limits for storage bucket 0
+yarn storage-node leader:set-bucket-limits -i 0 -s 100 -o 1
+# Create a channel (the transaction will fail due to low limits of bucket 0)
+yarn joystream-cli content:createChannel --context Member -i ./cli/examples/content/CreateChannel.json || true
+# Update DynamicBagPolicy to 1 storage bucket per channel bag
+yarn storage-node leader:update-dynamic-bag-policy -t Channel -n 1
+# Disable storage bucket 0
+yarn storage-node leader:update-bucket-status -i 0 --set off
+# Create a channel (the transaction still fails, which is unexpected)
+yarn joystream-cli content:createChannel --context Member -i ./cli/examples/content/CreateChannel.json || true
+# Increase limits of bucket 0
+yarn storage-node leader:set-bucket-limits -i 0 -s 1000000000 -o 1000
+# Create a channel
+yarn joystream-cli content:createChannel --context Member -i ./cli/examples/content/CreateChannel.json
+# Notice that channel bag get's assigned to both bucket 0 and 1, even though:
+# 1. Bucket 0 is disabled
+# 2. DynamicBagPolicy for Channel has "numberOfStorageBuckets: 1"

+ 1 - 1
runtime-modules/storage/Cargo.toml

@@ -1,6 +1,6 @@
 [package]
 name = 'pallet-storage'
-version = '4.0.0'
+version = '4.0.1'
 authors = ['Joystream contributors']
 edition = '2018'
 

+ 74 - 6
runtime-modules/storage/src/lib.rs

@@ -824,6 +824,26 @@ pub struct StorageBucketRecord<WorkerId, AccountId> {
 
     /// Defines limits for a bucket.
     pub voucher: Voucher,
+
+    /// Number of assigned bags.
+    pub assigned_bags: u64,
+}
+
+impl<WorkerId, AccountId> StorageBucketRecord<WorkerId, AccountId> {
+    // Increment the assigned bags number.
+    fn register_bag_assignment(&mut self) {
+        self.assigned_bags = self.assigned_bags.saturating_add(1);
+    }
+
+    // Decrement the assigned bags number.
+    fn unregister_bag_assignment(&mut self) {
+        self.assigned_bags = self.assigned_bags.saturating_sub(1);
+    }
+
+    // Checks the bag assignment number. Returns true if it equals zero.
+    fn no_bags_assigned(&self) -> bool {
+        self.assigned_bags == 0
+    }
 }
 
 // Helper-struct for the data object uploading.
@@ -1537,6 +1557,9 @@ decl_module! {
                 Error::<T>::CannotDeleteNonEmptyStorageBucket
             );
 
+            // Check that no assigned bags left.
+            ensure!(bucket.no_bags_assigned(), Error::<T>::StorageBucketIsBoundToBag);
+
             //
             // == MUTATION SAFE ==
             //
@@ -1716,6 +1739,7 @@ decl_module! {
                 operator_status,
                 accepting_new_bags,
                 voucher,
+                assigned_bags: 0,
             };
 
             let storage_bucket_id = Self::next_storage_bucket_id();
@@ -1773,6 +1797,9 @@ decl_module! {
                 );
             }
 
+            // Update bag counters.
+            Self::change_bag_assignments_for_storage_buckets(&add_buckets, &remove_buckets);
+
             Bags::<T>::mutate(&bag_id, |bag| {
                 bag.update_storage_buckets(&mut add_buckets.clone(), &remove_buckets);
             });
@@ -2223,7 +2250,10 @@ decl_module! {
                 bag.update_distribution_buckets(&mut add_buckets_ids.clone(), &remove_buckets_ids);
             });
 
-            Self::change_bag_assignments(&add_buckets_ids, &remove_buckets_ids);
+            Self::change_bag_assignments_for_distribution_buckets(
+                &add_buckets_ids,
+                &remove_buckets_ids
+            );
 
             Self::deposit_event(
                 RawEvent::DistributionBucketsUpdatedForBag(
@@ -2645,6 +2675,8 @@ impl<T: Trait> DataObjectStorage<T> for Module<T> {
 
         let bag_id: BagId<T> = dynamic_bag_id.clone().into();
 
+        let deleted_dynamic_bag = Self::dynamic_bag(&dynamic_bag_id);
+
         //
         // == MUTATION SAFE ==
         //
@@ -2655,6 +2687,16 @@ impl<T: Trait> DataObjectStorage<T> for Module<T> {
 
         <Bags<T>>::remove(&bag_id);
 
+        Self::change_bag_assignments_for_distribution_buckets(
+            &BTreeSet::new(),
+            &deleted_dynamic_bag.distributed_by,
+        );
+
+        Self::change_bag_assignments_for_storage_buckets(
+            &BTreeSet::new(),
+            &deleted_dynamic_bag.stored_by,
+        );
+
         Self::deposit_event(RawEvent::DynamicBagDeleted(
             deletion_prize_account_id,
             dynamic_bag_id,
@@ -2704,6 +2746,7 @@ impl<T: Trait> DataObjectStorage<T> for Module<T> {
         //
         // == MUTATION SAFE ==
         //
+
         Self::create_dynamic_bag_inner(
             &dynamic_bag_id,
             &deletion_prize,
@@ -2753,10 +2796,6 @@ impl<T: Trait> Module<T> {
         storage_buckets: &BTreeSet<T::StorageBucketId>,
         distribution_buckets: &BTreeSet<DistributionBucketId<T>>,
     ) -> DispatchResult {
-        //
-        // = MUTATION SAFE =
-        //
-
         if let Some(deletion_prize) = deletion_prize.clone() {
             <StorageTreasury<T>>::deposit(&deletion_prize.account_id, deletion_prize.prize)?;
         }
@@ -2772,6 +2811,13 @@ impl<T: Trait> Module<T> {
 
         <Bags<T>>::insert(&bag_id, bag);
 
+        Self::change_bag_assignments_for_distribution_buckets(
+            &distribution_buckets,
+            &BTreeSet::new(),
+        );
+
+        Self::change_bag_assignments_for_storage_buckets(&storage_buckets, &BTreeSet::new());
+
         Self::deposit_event(RawEvent::DynamicBagCreated(
             dynamic_bag_id.clone(),
             deletion_prize.clone(),
@@ -3826,7 +3872,7 @@ impl<T: Trait> Module<T> {
     }
 
     // Add and/or remove distribution buckets assignments to bags.
-    fn change_bag_assignments(
+    fn change_bag_assignments_for_distribution_buckets(
         add_buckets: &BTreeSet<DistributionBucketId<T>>,
         remove_buckets: &BTreeSet<DistributionBucketId<T>>,
     ) {
@@ -3861,6 +3907,28 @@ impl<T: Trait> Module<T> {
         }
     }
 
+    // Add and/or remove storage buckets assignments to bags.
+    fn change_bag_assignments_for_storage_buckets(
+        add_buckets: &BTreeSet<T::StorageBucketId>,
+        remove_buckets: &BTreeSet<T::StorageBucketId>,
+    ) {
+        for bucket_id in add_buckets.iter() {
+            if StorageBucketById::<T>::contains_key(bucket_id) {
+                StorageBucketById::<T>::mutate(bucket_id, |bucket| {
+                    bucket.register_bag_assignment();
+                })
+            }
+        }
+
+        for bucket_id in remove_buckets.iter() {
+            if StorageBucketById::<T>::contains_key(bucket_id) {
+                StorageBucketById::<T>::mutate(bucket_id, |bucket| {
+                    bucket.unregister_bag_assignment();
+                })
+            }
+        }
+    }
+
     // Checks distribution buckets for bag assignment number. Returns true only if all 'assigned_bags' are
     // zero.
     fn no_bags_assigned(family_id: &T::DistributionBucketFamilyId) -> bool {

+ 202 - 0
runtime-modules/storage/src/tests/mod.rs

@@ -329,6 +329,49 @@ fn update_storage_buckets_for_bags_succeeded() {
     });
 }
 
+#[test]
+fn update_storage_buckets_for_bags_succeeded_with_additioonal_checks_on_adding_and_removing() {
+    build_test_externalities().execute_with(|| {
+        set_default_update_storage_buckets_per_bag_limit();
+
+        let static_bag_id = StaticBagId::Council;
+        let bag_id: BagId<Test> = static_bag_id.into();
+
+        let bucket_id = CreateStorageBucketFixture::default()
+            .with_origin(RawOrigin::Signed(STORAGE_WG_LEADER_ACCOUNT_ID))
+            .call_and_assert(Ok(()))
+            .unwrap();
+
+        let add_buckets_ids = BTreeSet::from_iter(vec![bucket_id]);
+
+        UpdateStorageBucketForBagsFixture::default()
+            .with_origin(RawOrigin::Signed(STORAGE_WG_LEADER_ACCOUNT_ID))
+            .with_bag_id(bag_id.clone())
+            .with_add_bucket_ids(add_buckets_ids.clone())
+            .call_and_assert(Ok(()));
+
+        // Add check
+        let bag = Storage::bag(&bag_id);
+        assert_eq!(bag.stored_by, add_buckets_ids);
+
+        let bucket = Storage::storage_bucket_by_id(&bucket_id);
+        assert_eq!(bucket.assigned_bags, 1);
+
+        // ******
+        UpdateStorageBucketForBagsFixture::default()
+            .with_origin(RawOrigin::Signed(STORAGE_WG_LEADER_ACCOUNT_ID))
+            .with_bag_id(bag_id.clone())
+            .with_remove_bucket_ids(add_buckets_ids.clone())
+            .call_and_assert(Ok(()));
+
+        let bag = Storage::bag(&bag_id);
+        assert_eq!(bag.stored_by.len(), 0);
+
+        let bucket = Storage::storage_bucket_by_id(&bucket_id);
+        assert_eq!(bucket.assigned_bags, 0);
+    });
+}
+
 #[test]
 fn update_storage_buckets_for_bags_fails_with_non_existing_dynamic_bag() {
     build_test_externalities().execute_with(|| {
@@ -2620,6 +2663,119 @@ fn delete_dynamic_bags_succeeded() {
     });
 }
 
+#[test]
+fn delete_dynamic_bags_succeeded_with_assigned_distribution_buckets() {
+    build_test_externalities().execute_with(|| {
+        let initial_balance = 1000;
+        let deletion_prize_value = 77;
+        increase_account_balance(&DEFAULT_MEMBER_ACCOUNT_ID, initial_balance);
+
+        let distribution_buckets_number = 10;
+        let family_policy_number1 = 2;
+        let family_policy_number2 = 3;
+
+        create_storage_buckets(DEFAULT_STORAGE_BUCKETS_NUMBER);
+        let (family1, _) =
+            create_distribution_bucket_family_with_buckets(distribution_buckets_number);
+        let (family2, _) =
+            create_distribution_bucket_family_with_buckets(distribution_buckets_number);
+
+        let family_policy = BTreeMap::from_iter(vec![
+            (family1, family_policy_number1),
+            (family2, family_policy_number2),
+        ]);
+
+        UpdateFamiliesInDynamicBagCreationPolicyFixture::default()
+            .with_origin(RawOrigin::Signed(DISTRIBUTION_WG_LEADER_ACCOUNT_ID))
+            .with_families(family_policy)
+            .call_and_assert(Ok(()));
+
+        let dynamic_bag_id = DynamicBagId::<Test>::Member(DEFAULT_MEMBER_ID);
+        CreateDynamicBagFixture::default()
+            .with_bag_id(dynamic_bag_id.clone())
+            .with_deletion_prize(DynamicBagDeletionPrize::<Test> {
+                account_id: DEFAULT_MEMBER_ACCOUNT_ID,
+                prize: deletion_prize_value,
+            })
+            .call_and_assert(Ok(()));
+
+        let bag = Storage::dynamic_bag(&dynamic_bag_id);
+
+        let total_distributed_buckets_number = family_policy_number1 + family_policy_number2;
+        assert_eq!(
+            bag.distributed_by.len(),
+            total_distributed_buckets_number as usize
+        );
+
+        let distributed_by_bag = bag.distributed_by.clone();
+        for distribution_bucket_id in &distributed_by_bag {
+            let bucket = Storage::distribution_bucket_by_family_id_by_index(
+                distribution_bucket_id.distribution_bucket_family_id,
+                distribution_bucket_id.distribution_bucket_index,
+            );
+
+            assert_eq!(bucket.assigned_bags, 1);
+        }
+
+        DeleteDynamicBagFixture::default()
+            .with_bag_id(dynamic_bag_id.clone())
+            .with_deletion_account_id(DEFAULT_MEMBER_ACCOUNT_ID)
+            .call_and_assert(Ok(()));
+
+        for distribution_bucket_id in &distributed_by_bag {
+            let bucket = Storage::distribution_bucket_by_family_id_by_index(
+                distribution_bucket_id.distribution_bucket_family_id,
+                distribution_bucket_id.distribution_bucket_index,
+            );
+
+            assert_eq!(bucket.assigned_bags, 0);
+        }
+    });
+}
+
+#[test]
+fn delete_dynamic_bags_succeeded_with_assigned_storage_buckets() {
+    build_test_externalities().execute_with(|| {
+        let initial_balance = 1000;
+        let deletion_prize_value = 77;
+        increase_account_balance(&DEFAULT_MEMBER_ACCOUNT_ID, initial_balance);
+
+        let storage_buckets_number = DefaultMemberDynamicBagNumberOfStorageBuckets::get();
+        create_storage_buckets(storage_buckets_number);
+
+        let dynamic_bag_id = DynamicBagId::<Test>::Member(DEFAULT_MEMBER_ID);
+        CreateDynamicBagFixture::default()
+            .with_bag_id(dynamic_bag_id.clone())
+            .with_deletion_prize(DynamicBagDeletionPrize::<Test> {
+                account_id: DEFAULT_MEMBER_ACCOUNT_ID,
+                prize: deletion_prize_value,
+            })
+            .call_and_assert(Ok(()));
+
+        let bag = Storage::dynamic_bag(&dynamic_bag_id);
+
+        assert_eq!(bag.stored_by.len(), storage_buckets_number as usize);
+
+        let stored_by_bag = bag.stored_by.clone();
+        for bucket_id in &stored_by_bag {
+            let bucket = Storage::storage_bucket_by_id(bucket_id);
+
+            assert_eq!(bucket.assigned_bags, 1);
+        }
+
+        DeleteDynamicBagFixture::default()
+            .with_bag_id(dynamic_bag_id.clone())
+            .with_deletion_account_id(DEFAULT_MEMBER_ACCOUNT_ID)
+            .call_and_assert(Ok(()));
+
+        for bucket_id in &stored_by_bag {
+            let bucket = Storage::storage_bucket_by_id(bucket_id);
+
+            assert_eq!(bucket.assigned_bags, 0);
+        }
+    });
+}
+
 #[test]
 fn delete_dynamic_bags_fails_with_non_existent_dynamic_bag() {
     build_test_externalities().execute_with(|| {
@@ -2731,6 +2887,20 @@ fn delete_storage_bucket_fails_with_non_empty_bucket() {
     });
 }
 
+#[test]
+fn delete_storage_bucket_fails_with_assigned_bag() {
+    build_test_externalities().execute_with(|| {
+        let bag_id = BagId::<Test>::Static(StaticBagId::Council);
+
+        let bucket_id = create_default_storage_bucket_and_assign_to_bag(bag_id.clone());
+
+        DeleteStorageBucketFixture::default()
+            .with_origin(RawOrigin::Signed(STORAGE_WG_LEADER_ACCOUNT_ID))
+            .with_storage_bucket_id(bucket_id)
+            .call_and_assert(Err(Error::<Test>::StorageBucketIsBoundToBag.into()));
+    });
+}
+
 #[test]
 fn remove_storage_bucket_operator_succeeded() {
     build_test_externalities().execute_with(|| {
@@ -3131,8 +3301,25 @@ fn create_dynamic_bag_succeeded() {
         run_to_block(starting_block);
 
         let dynamic_bag_id = DynamicBagId::<Test>::Member(DEFAULT_MEMBER_ID);
+        let distribution_buckets_number = 10;
+        let family_policy_number1 = 2;
+        let family_policy_number2 = 3;
 
         create_storage_buckets(DEFAULT_STORAGE_BUCKETS_NUMBER);
+        let (family1, _) =
+            create_distribution_bucket_family_with_buckets(distribution_buckets_number);
+        let (family2, _) =
+            create_distribution_bucket_family_with_buckets(distribution_buckets_number);
+
+        let family_policy = BTreeMap::from_iter(vec![
+            (family1, family_policy_number1),
+            (family2, family_policy_number2),
+        ]);
+
+        UpdateFamiliesInDynamicBagCreationPolicyFixture::default()
+            .with_origin(RawOrigin::Signed(DISTRIBUTION_WG_LEADER_ACCOUNT_ID))
+            .with_families(family_policy)
+            .call_and_assert(Ok(()));
 
         let deletion_prize_value = 100;
         let deletion_prize_account_id = DEFAULT_MEMBER_ACCOUNT_ID;
@@ -3174,6 +3361,21 @@ fn create_dynamic_bag_succeeded() {
             creation_policy.number_of_storage_buckets as usize
         );
 
+        let total_distributed_buckets_number = family_policy_number1 + family_policy_number2;
+        assert_eq!(
+            bag.distributed_by.len(),
+            total_distributed_buckets_number as usize
+        );
+
+        for distribution_bucket_id in &bag.distributed_by {
+            let bucket = Storage::distribution_bucket_by_family_id_by_index(
+                distribution_bucket_id.distribution_bucket_family_id,
+                distribution_bucket_id.distribution_bucket_index,
+            );
+
+            assert_eq!(bucket.assigned_bags, 1);
+        }
+
         assert_eq!(bag.deletion_prize.unwrap(), deletion_prize_value);
 
         // post-check balances

+ 1 - 1
runtime/Cargo.toml

@@ -4,7 +4,7 @@ edition = '2018'
 name = 'joystream-node-runtime'
 # Follow convention: https://github.com/Joystream/substrate-runtime-joystream/issues/1
 # {Authoring}.{Spec}.{Impl} of the RuntimeVersion
-version = '9.13.0'
+version = '9.14.0'
 
 [dependencies]
 # Third-party dependencies

+ 1 - 1
runtime/src/lib.rs

@@ -84,7 +84,7 @@ pub const VERSION: RuntimeVersion = RuntimeVersion {
     spec_name: create_runtime_str!("joystream-node"),
     impl_name: create_runtime_str!("joystream-node"),
     authoring_version: 9,
-    spec_version: 13,
+    spec_version: 14,
     impl_version: 0,
     apis: crate::runtime_api::EXPORTED_RUNTIME_API_VERSIONS,
     transaction_version: 1,

+ 5 - 4
start.sh

@@ -26,6 +26,10 @@ docker-compose up -d joystream-node
 
 ## Init the chain with some state
 export SKIP_MOCK_CONTENT=true
+HOST_IP=$(tests/network-tests/get-host-ip.sh)
+export COLOSSUS_1_URL="http://${HOST_IP}:3333"
+export COLOSSUS_1_TRANSACTOR_KEY=$(docker run --rm --pull=always docker.io/parity/subkey:2.0.1 inspect ${COLOSSUS_1_TRANSACTOR_URI} --output-type json | jq .ss58Address -r)
+export DISTRIBUTOR_1_URL="http://${HOST_IP}:3334"
 ./tests/network-tests/run-test-scenario.sh ${INIT_CHAIN_SCENARIO}
 
 ## Set sudo as the membership screening authority
@@ -34,9 +38,6 @@ yarn workspace api-scripts set-sudo-as-screening-auth
 ## Member faucet
 docker-compose up -d faucet
 
-## Storage Infrastructure Configuration
-./storage-playground-config.sh
-
 ## Query Node Infrastructure
 ./query-node/start.sh
 
@@ -58,4 +59,4 @@ else
   while true; do
     read
   done
-fi
+fi

+ 2 - 2
storage-node/package.json

@@ -11,7 +11,7 @@
     "@apollo/client": "^3.3.21",
     "@elastic/ecs-winston-format": "^1.3.1",
     "@joystream/metadata-protobuf": "^1.0.0",
-    "@joystream/types": "^0.17.1",
+    "@joystream/types": "^0.17.2",
     "@oclif/command": "^1",
     "@oclif/config": "^1",
     "@oclif/plugin-help": "^3",
@@ -94,7 +94,7 @@
   },
   "volta": {
     "node": "14.16.1",
-    "yarn": "1.22.5"
+    "yarn": "1.22.15"
   },
   "files": [
     "/bin",

+ 1 - 1
tests/network-tests/.env

@@ -37,7 +37,7 @@ WORKING_GROUP_ROLE_STAKE=10
 # Reward interval for working group tests
 LONG_REWARD_INTERVAL=99999
 # First reward interval for working group reward test
-SHORT_FIRST_REWARD_INTERVAL=3
+SHORT_FIRST_REWARD_INTERVAL = 6
 # Reward interval for working group reward test
 SHORT_REWARD_INTERVAL=3
 # Payout amount for working group tests

+ 32 - 0
tests/network-tests/codegen.yml

@@ -0,0 +1,32 @@
+overwrite: true
+
+schema: '../../query-node/generated/graphql-server/generated/schema.graphql'
+
+documents:
+  - './src/graphql/queries/*.graphql'
+
+config:
+  scalars:
+    Date: Date
+  preResolveTypes: true # avoid using Pick
+  skipTypename: true # skip __typename field in typings unless it's part of the query
+
+generates:
+  src/graphql/generated/schema.ts:
+    hooks:
+      afterOneFileWrite:
+        - prettier --write
+        - eslint --fix
+    plugins:
+      - typescript
+  src/graphql/generated/queries.ts:
+    preset: import-types
+    presetConfig:
+      typesPath: ./schema
+    hooks:
+      afterOneFileWrite:
+        - prettier --write
+        - eslint --fix
+    plugins:
+      - typescript-operations
+      - typescript-document-nodes

+ 7 - 0
tests/network-tests/openapitools.json

@@ -0,0 +1,7 @@
+{
+  "$schema": "node_modules/@openapitools/openapi-generator-cli/config.schema.json",
+  "spaces": 2,
+  "generator-cli": {
+    "version": "5.2.1"
+  }
+}

+ 18 - 3
tests/network-tests/package.json

@@ -9,7 +9,12 @@
     "node-ts-strict": "node -r ts-node/register --unhandled-rejections=strict",
     "lint": "eslint . --quiet --ext .ts",
     "checks": "tsc --noEmit --pretty && prettier ./ --check && yarn lint",
-    "format": "prettier ./ --write "
+    "format": "prettier ./ --write ",
+    "generate:api:storage-node": "yarn openapi-generator-cli generate -i ../../storage-node/src/api-spec/openapi.yaml -g typescript-axios -o ./src/apis/storageNode",
+    "generate:api:distributor-node": "yarn openapi-generator-cli generate -i ../../distributor-node/src/api-spec/public.yml -g typescript-axios -o ./src/apis/distributorNode",
+    "generate:api:all": "yarn generate:api:storage-node && yarn generate:api:distributor-node && yarn format",
+    "generate:types:graphql": "graphql-codegen",
+    "generate:all": "yarn generate:types:graphql && yarn generate:api:all"
   },
   "dependencies": {
     "@apollo/client": "^3.2.5",
@@ -23,7 +28,11 @@
     "bn.js": "^4.11.8",
     "dotenv": "^8.2.0",
     "fs": "^0.0.1-security",
-    "uuid": "^7.0.3"
+    "uuid": "^7.0.3",
+    "axios": "^0.21.1",
+    "bmp-js": "^0.1.0",
+    "@types/bmp-js": "^0.1.0",
+    "node-cleanup": "^2.1.2"
   },
   "devDependencies": {
     "@polkadot/ts": "^0.4.8",
@@ -32,7 +41,13 @@
     "chai": "^4.2.0",
     "prettier": "^2.2.1",
     "ts-node": "^10.2.1",
-    "typescript": "^4.4.3"
+    "typescript": "^4.4.3",
+    "@openapitools/openapi-generator-cli": "^2.3.6",
+    "@graphql-codegen/cli": "^1.21.4",
+    "@graphql-codegen/typescript": "^1.22.0",
+    "@graphql-codegen/import-types-preset": "^1.18.1",
+    "@graphql-codegen/typescript-operations": "^1.17.16",
+    "@graphql-codegen/typescript-document-nodes": "^1.17.11"
   },
   "volta": {
     "extends": "../../package.json"

+ 48 - 0
tests/network-tests/run-full-tests.sh

@@ -0,0 +1,48 @@
+#!/usr/bin/env bash
+set -e
+
+SCRIPT_PATH="$(dirname "${BASH_SOURCE[0]}")"
+cd $SCRIPT_PATH
+
+CONTAINER_ID=$(./run-test-node-docker.sh)
+
+function cleanup() {
+    docker logs ${CONTAINER_ID} --tail 15
+    docker-compose -f ../../docker-compose.yml down -v
+}
+
+trap cleanup EXIT
+
+sleep 3
+
+# Display runtime version
+yarn workspace api-scripts tsnode-strict src/status.ts | grep Runtime
+
+# Start any other services we want
+# docker-compose -f ../../docker-compose.yml up -d colossus-1
+
+# Start a query-node
+../../query-node/start.sh
+
+# Run proposals tests first, since they require no leads hired
+./run-test-scenario.sh proposals
+
+# Setup storage & distribution
+HOST_IP=$(./get-host-ip.sh)
+# Because proposals tests hire and then fire each lead,
+# we need to override COLOSSUS_1_WORKER_ID (0 => 1) and DISTRIBUTOR_1_WORKER_ID (0 => 1)
+export COLOSSUS_1_URL="http://${HOST_IP}:3333"
+export COLOSSUS_1_WORKER_ID=1
+export COLOSSUS_1_WORKER_URI=//testing//worker//Storage//${COLOSSUS_1_WORKER_ID}
+export COLOSSUS_1_TRANSACTOR_KEY=$(docker run --rm --pull=always docker.io/parity/subkey:2.0.1 inspect ${COLOSSUS_1_TRANSACTOR_URI} --output-type json | jq .ss58Address -r)
+export DISTRIBUTOR_1_URL="http://${HOST_IP}:3334"
+export DISTRIBUTOR_1_WORKER_ID=1
+export DISTRIBUTOR_1_ACCOUNT_URI=//testing//worker//Distribution//${DISTRIBUTOR_1_WORKER_ID}
+REUSE_KEYS=true ./run-test-scenario.sh init-storage-and-distribution
+
+# Start colossus & argus
+docker-compose -f ../../docker-compose.yml up -d colossus-1
+docker-compose -f ../../docker-compose.yml up -d distributor-1
+
+# Run combined tests reusing the existing keys
+REUSE_KEYS=true ./run-test-scenario.sh combined

+ 1 - 1
tests/network-tests/run-test-scenario.sh

@@ -10,4 +10,4 @@ SCENARIO=$1
 SCENARIO=${SCENARIO:=full}
 
 # Execute the tests
-time DEBUG=integration-tests* yarn workspace network-tests node-ts-strict src/scenarios/${SCENARIO}.ts
+time DEBUG=integration-tests*,-integration-tests:query-node-api:* yarn workspace network-tests node-ts-strict src/scenarios/${SCENARIO}.ts

+ 1 - 1
tests/network-tests/run-tests.sh

@@ -40,4 +40,4 @@ yarn workspace api-scripts tsnode-strict src/status.ts | grep Runtime
 # keys for workers and members..
 
 # Second scenario..
-# ./run-test-scenario.sh $2
+# ./run-test-scenario.sh $2

+ 145 - 142
tests/network-tests/src/Api.ts

@@ -1,21 +1,20 @@
-import { ApiPromise, WsProvider, Keyring } from '@polkadot/api'
+import { ApiPromise, WsProvider, Keyring, SubmittableResult } from '@polkadot/api'
 import { Bytes, Option, u32, Vec, StorageKey } from '@polkadot/types'
-import { Codec, ISubmittableResult } from '@polkadot/types/types'
+import { Codec, ISubmittableResult, IEvent } from '@polkadot/types/types'
 import { KeyringPair } from '@polkadot/keyring/types'
 import { MemberId, PaidMembershipTerms, PaidTermId } from '@joystream/types/members'
 import { Mint, MintId } from '@joystream/types/mint'
 import {
   Application,
-  ApplicationIdToWorkerIdMap,
   Worker,
   WorkerId,
   OpeningPolicyCommitment,
   Opening as WorkingGroupOpening,
 } from '@joystream/types/working-group'
 import { ElectionStake, Seat } from '@joystream/types/council'
-import { AccountInfo, Balance, BalanceOf, BlockNumber, EventRecord } from '@polkadot/types/interfaces'
+import { AccountInfo, Balance, BalanceOf, BlockNumber, EventRecord, AccountId } from '@polkadot/types/interfaces'
 import BN from 'bn.js'
-import { SubmittableExtrinsic } from '@polkadot/api/types'
+import { AugmentedEvent, SubmittableExtrinsic } from '@polkadot/api/types'
 import { Sender, LogLevel } from './sender'
 import { Utils } from './utils'
 import { Stake, StakedState, StakeId } from '@joystream/types/stake'
@@ -39,8 +38,27 @@ import { metadataToBytes } from '../../../cli/lib/helpers/serialization'
 import { assert } from 'chai'
 import { WorkingGroups } from './WorkingGroups'
 
-type AnyMetadata = {
-  serializeBinary(): Uint8Array
+const workingGroupNameByGroup: { [key in WorkingGroups]: string } = {
+  'distributionWorkingGroup': 'Distribution',
+  'storageWorkingGroup': 'Storage',
+  'contentWorkingGroup': 'Content',
+  'gatewayWorkingGroup': 'Gateway',
+  'operationsWorkingGroupAlpha': 'OperationsAlpha',
+  'operationsWorkingGroupBeta': 'OperationsBeta',
+  'operationsWorkingGroupGamma': 'OperationsGamma',
+}
+
+type EventSection = keyof ApiPromise['events'] & string
+type EventMethod<Section extends EventSection> = keyof ApiPromise['events'][Section] & string
+type EventType<
+  Section extends EventSection,
+  Method extends EventMethod<Section>
+> = ApiPromise['events'][Section][Method] extends AugmentedEvent<'promise', infer T> ? IEvent<T> : never
+
+export type KeyGenInfo = {
+  start: number
+  final: number
+  custom: string[]
 }
 
 export class ApiFactory {
@@ -48,9 +66,14 @@ export class ApiFactory {
   private readonly keyring: Keyring
   // number used as part of key derivation path
   private keyId = 0
+  // stores names of the created custom keys
+  private customKeys: string[] = []
   // mapping from account address to key id.
   // To be able to re-derive keypair externally when mini-secret is known.
   readonly addressesToKeyId: Map<string, number> = new Map()
+  // mapping from account address to suri.
+  // To be able to get the suri of a known key for the purpose of, for example, interacting with the CLIs
+  readonly addressesToSuri: Map<string, string>
   // mini secret used in SURI key derivation path
   private readonly miniSecret: string
 
@@ -95,6 +118,7 @@ export class ApiFactory {
     this.keyring.addFromUri(sudoAccountUri)
     this.miniSecret = miniSecret
     this.addressesToKeyId = new Map()
+    this.addressesToSuri = new Map()
     this.keyId = 0
   }
 
@@ -106,30 +130,52 @@ export class ApiFactory {
     const keys: { key: KeyringPair; id: number }[] = []
     for (let i = 0; i < n; i++) {
       const id = this.keyId++
-      const key = this.createCustomKeyPair(`${id}`)
+      const key = this.createKeyPair(`${id}`)
       keys.push({ key, id })
       this.addressesToKeyId.set(key.address, id)
     }
     return keys
   }
 
+  private createKeyPair(suriPath: string, isCustom = false): KeyringPair {
+    if (isCustom) {
+      this.customKeys.push(suriPath)
+    }
+    const uri = `${this.miniSecret}//testing//${suriPath}`
+    const pair = this.keyring.addFromUri(uri)
+    this.addressesToSuri.set(pair.address, uri)
+    return pair
+  }
+
   public createCustomKeyPair(customPath: string): KeyringPair {
-    const uri = `${this.miniSecret}//testing//${customPath}`
-    return this.keyring.addFromUri(uri)
+    return this.createKeyPair(customPath, true)
   }
 
-  public keyGenInfo(): { start: number; final: number } {
+  public keyGenInfo(): KeyGenInfo {
     const start = 0
     const final = this.keyId
     return {
       start,
       final,
+      custom: this.customKeys,
     }
   }
 
   public getAllGeneratedAccounts(): { [k: string]: number } {
     return Object.fromEntries(this.addressesToKeyId)
   }
+
+  public getKeypair(address: AccountId | string): KeyringPair {
+    return this.keyring.getPair(address)
+  }
+
+  public getSuri(address: AccountId | string): string {
+    const suri = this.addressesToSuri.get(address.toString())
+    if (!suri) {
+      throw new Error(`Suri for address ${address} not available!`)
+    }
+    return suri
+  }
 }
 
 export class Api {
@@ -146,11 +192,44 @@ export class Api {
     this.sender = new Sender(api, keyring, label)
   }
 
-  // expose only direct ability to query chain
-  get query() {
+  public get query(): ApiPromise['query'] {
     return this.api.query
   }
 
+  public get consts(): ApiPromise['consts'] {
+    return this.api.consts
+  }
+
+  public get tx(): ApiPromise['tx'] {
+    return this.api.tx
+  }
+
+  public signAndSend(tx: SubmittableExtrinsic<'promise'>, account: string | AccountId): Promise<ISubmittableResult> {
+    return this.sender.signAndSend(tx, account)
+  }
+
+  public signAndSendMany(
+    txs: SubmittableExtrinsic<'promise'>[],
+    account: string | AccountId
+  ): Promise<ISubmittableResult[]> {
+    return Promise.all(txs.map((tx) => this.sender.signAndSend(tx, account)))
+  }
+
+  public signAndSendManyByMany(
+    txs: SubmittableExtrinsic<'promise'>[],
+    accounts: string[] | AccountId[]
+  ): Promise<ISubmittableResult[]> {
+    return Promise.all(txs.map((tx, i) => this.sender.signAndSend(tx, accounts[i])))
+  }
+
+  public getKeypair(address: string | AccountId): KeyringPair {
+    return this.factory.getKeypair(address)
+  }
+
+  public getSuri(address: string | AccountId): string {
+    return this.factory.getSuri(address)
+  }
+
   public enableDebugTxLogs(): void {
     this.sender.setLogLevel(LogLevel.Debug)
   }
@@ -167,7 +246,7 @@ export class Api {
     return this.factory.createCustomKeyPair(path)
   }
 
-  public keyGenInfo(): { start: number; final: number } {
+  public keyGenInfo(): KeyGenInfo {
     return this.factory.keyGenInfo()
   }
 
@@ -177,24 +256,7 @@ export class Api {
 
   // Well known WorkingGroup enum defined in runtime
   public getWorkingGroupString(workingGroup: WorkingGroups): string {
-    switch (workingGroup) {
-      case WorkingGroups.Storage:
-        return 'Storage'
-      case WorkingGroups.Content:
-        return 'Content'
-      case WorkingGroups.Gateway:
-        return 'Gateway'
-      case WorkingGroups.OperationsAlpha:
-        return 'OperationsAlpha'
-      case WorkingGroups.OperationsBeta:
-        return 'OperationsBeta'
-      case WorkingGroups.OperationsGamma:
-        return 'OperationsGamma'
-      case WorkingGroups.Distribution:
-        return 'Distribution'
-      default:
-        throw new Error(`Invalid working group string representation: ${workingGroup}`)
-    }
+    return workingGroupNameByGroup[workingGroup]
   }
 
   public async makeSudoCall(tx: SubmittableExtrinsic<'promise'>): Promise<ISubmittableResult> {
@@ -832,79 +894,52 @@ export class Api {
     return this.getBlockDuration().muln(durationInBlocks).toNumber()
   }
 
-  public findEventRecord(events: EventRecord[], section: string, method: string): EventRecord | undefined {
-    return events.find((record) => record.event.section === section && record.event.method === method)
-  }
-
-  public findMemberRegisteredEvent(events: EventRecord[]): MemberId | undefined {
-    const record = this.findEventRecord(events, 'members', 'MemberRegistered')
-    if (record) {
-      return record.event.data[0] as MemberId
-    }
-  }
-
-  public findProposalCreatedEvent(events: EventRecord[]): ProposalId | undefined {
-    const record = this.findEventRecord(events, 'proposalsEngine', 'ProposalCreated')
-    if (record) {
-      return record.event.data[1] as ProposalId
+  public findEvent<S extends EventSection, M extends EventMethod<S>>(
+    result: SubmittableResult | EventRecord[],
+    section: S,
+    method: M
+  ): EventType<S, M> | undefined {
+    if (Array.isArray(result)) {
+      return result.find(({ event }) => event.section === section && event.method === method)?.event as
+        | EventType<S, M>
+        | undefined
     }
-  }
-
-  public findOpeningAddedEvent(events: EventRecord[], workingGroup: WorkingGroups): OpeningId | undefined {
-    const record = this.findEventRecord(events, workingGroup, 'OpeningAdded')
-    if (record) {
-      return record.event.data[0] as OpeningId
-    }
-  }
-
-  public findLeaderSetEvent(events: EventRecord[], workingGroup: WorkingGroups): WorkerId | undefined {
-    const record = this.findEventRecord(events, workingGroup, 'LeaderSet')
-    if (record) {
-      return (record.event.data as unknown) as WorkerId
-    }
-  }
-
-  public findBeganApplicationReviewEvent(
-    events: EventRecord[],
-    workingGroup: WorkingGroups
-  ): ApplicationId | undefined {
-    const record = this.findEventRecord(events, workingGroup, 'BeganApplicationReview')
-    if (record) {
-      return (record.event.data as unknown) as ApplicationId
-    }
-  }
-
-  public findTerminatedLeaderEvent(events: EventRecord[], workingGroup: WorkingGroups): EventRecord | undefined {
-    return this.findEventRecord(events, workingGroup, 'TerminatedLeader')
-  }
-
-  public findWorkerRewardAmountUpdatedEvent(
-    events: EventRecord[],
-    workingGroup: WorkingGroups,
-    workerId: WorkerId
-  ): WorkerId | undefined {
-    const record = this.findEventRecord(events, workingGroup, 'WorkerRewardAmountUpdated')
-    if (record) {
-      const id = (record.event.data[0] as unknown) as WorkerId
-      if (id.eq(workerId)) {
-        return workerId
-      }
+    return result.findRecord(section, method)?.event as EventType<S, M> | undefined
+  }
+
+  public getEvent<S extends EventSection, M extends EventMethod<S>>(
+    result: SubmittableResult | EventRecord[],
+    section: S,
+    method: M
+  ): EventType<S, M> {
+    const event = this.findEvent(result, section, method)
+    if (!event) {
+      throw new Error(
+        `Cannot find expected ${section}.${method} event in result: ${JSON.stringify(
+          Array.isArray(result) ? result.map((e) => e.toHuman()) : result.toHuman()
+        )}`
+      )
     }
-  }
-
-  public findStakeDecreasedEvent(events: EventRecord[], workingGroup: WorkingGroups): EventRecord | undefined {
-    return this.findEventRecord(events, workingGroup, 'StakeDecreased')
-  }
-
-  public findStakeSlashedEvent(events: EventRecord[], workingGroup: WorkingGroups): EventRecord | undefined {
-    return this.findEventRecord(events, workingGroup, 'StakeSlashed')
-  }
-
-  public findMintCapacityChangedEvent(events: EventRecord[], workingGroup: WorkingGroups): BN | undefined {
-    const record = this.findEventRecord(events, workingGroup, 'MintCapacityChanged')
-    if (record) {
-      return (record.event.data[1] as unknown) as BN
+    return event
+  }
+
+  public findEvents<S extends EventSection, M extends EventMethod<S>>(
+    result: SubmittableResult | EventRecord[],
+    section: S,
+    method: M,
+    expectedCount?: number
+  ): EventType<S, M>[] {
+    const events = Array.isArray(result)
+      ? result.filter(({ event }) => event.section === section && event.method === method).map(({ event }) => event)
+      : result.filterRecords(section, method).map((r) => r.event)
+    if (expectedCount && events.length !== expectedCount) {
+      throw new Error(
+        `Unexpected count of ${section}.${method} events in result: ${JSON.stringify(
+          Array.isArray(result) ? result.map((e) => e.toHuman()) : result.toHuman()
+        )}. ` + `Expected: ${expectedCount}, Got: ${events.length}`
+      )
     }
+    return (events.sort((a, b) => new BN(a.index).cmp(new BN(b.index))) as unknown) as EventType<S, M>[]
   }
 
   // Subscribe to system events, resolves to an InvertedPromise or rejects if subscription fails.
@@ -939,26 +974,6 @@ export class Api {
     return invertedPromise
   }
 
-  public findOpeningFilledEvent(
-    events: EventRecord[],
-    workingGroup: WorkingGroups
-  ): ApplicationIdToWorkerIdMap | undefined {
-    const record = this.findEventRecord(events, workingGroup, 'OpeningFilled')
-    if (record) {
-      return (record.event.data[1] as unknown) as ApplicationIdToWorkerIdMap
-    }
-  }
-
-  public findApplicationReviewBeganEvent(
-    events: EventRecord[],
-    workingGroup: WorkingGroups
-  ): ApplicationId | undefined {
-    const record = this.findEventRecord(events, workingGroup, 'BeganApplicationReview')
-    if (record) {
-      return (record.event.data as unknown) as ApplicationId
-    }
-  }
-
   public async getWorkingGroupMintCapacity(module: WorkingGroups): Promise<BN> {
     const mintId: MintId = await this.api.query[module].mint<MintId>()
     const mint: Mint = await this.api.query.minting.mints<Mint>(mintId)
@@ -1733,13 +1748,11 @@ export class Api {
   }
 
   public async getWorkerRoleAccounts(workerIds: WorkerId[], module: WorkingGroups): Promise<string[]> {
-    const entries: [StorageKey, Worker][] = await this.api.query[module].workerById.entries<Worker>()
+    const workers = await this.api.query[module].workerById.multi<Worker>(workerIds)
 
-    return entries
-      .filter(([idKey]) => {
-        return workerIds.includes(idKey.args[0] as WorkerId)
-      })
-      .map(([, worker]) => worker.role_account_id.toString())
+    return workers.map((worker) => {
+      return worker.role_account_id.toString()
+    })
   }
 
   public async getStake(id: StakeId): Promise<Stake> {
@@ -1822,13 +1835,8 @@ export class Api {
 
     const result = await this.sender.signAndSend(tx, memberControllerAccount)
 
-    const record = this.findEventRecord(result.events, 'content', 'ChannelCreated')
-    if (record) {
-      return record.event.data[1] as ChannelId
-    }
-
-    // TODO: get error from 'result'
-    throw new Error('Failed to create channel')
+    const event = this.getEvent(result.events, 'content', 'ChannelCreated')
+    return event.data[1]
   }
 
   // Create a mock video, throws on failure
@@ -1847,13 +1855,8 @@ export class Api {
 
     const result = await this.sender.signAndSend(tx, memberControllerAccount)
 
-    const record = this.findEventRecord(result.events, 'content', 'VideoCreated')
-    if (record) {
-      return record.event.data[2] as VideoId
-    }
-
-    // TODO: get error from 'result'
-    throw new Error('Failed to create video')
+    const event = this.getEvent(result.events, 'content', 'VideoCreated')
+    return event.data[2]
   }
 
   async createChannelCategoryAsLead(name: string): Promise<ISubmittableResult> {

+ 102 - 65
tests/network-tests/src/QueryNodeApi.ts

@@ -1,85 +1,122 @@
-import { gql, ApolloClient, ApolloQueryResult, NormalizedCacheObject } from '@apollo/client'
+import { ApolloClient, DocumentNode, NormalizedCacheObject } from '@apollo/client/core'
+import { extendDebug, Debugger } from './Debugger'
+import {
+  StorageDataObjectFieldsFragment,
+  GetDataObjectsByIdsQuery,
+  GetDataObjectsByIdsQueryVariables,
+  GetDataObjectsByIds,
+  ChannelFieldsFragment,
+  GetChannelById,
+  GetChannelByIdQuery,
+  GetChannelByIdQueryVariables,
+} from './graphql/generated/queries'
+import { Maybe } from './graphql/generated/schema'
+import { OperationDefinitionNode } from 'graphql'
+import { Utils } from './utils'
 
 export class QueryNodeApi {
   private readonly queryNodeProvider: ApolloClient<NormalizedCacheObject>
+  private readonly debug: Debugger.Debugger
+  private readonly queryDebug: Debugger.Debugger
+  private readonly tryDebug: Debugger.Debugger
 
   constructor(queryNodeProvider: ApolloClient<NormalizedCacheObject>) {
     this.queryNodeProvider = queryNodeProvider
+    this.debug = extendDebug('query-node-api')
+    this.queryDebug = this.debug.extend('query')
+    this.tryDebug = this.debug.extend('try')
   }
 
-  public async getChannelbyHandle(handle: string): Promise<ApolloQueryResult<any>> {
-    const GET_CHANNEL_BY_TITLE = gql`
-      query($handle: String!) {
-        channels(where: { handle_eq: $handle }) {
-          handle
-          description
-          coverPhotoUrl
-          avatarPhotoUrl
-          isPublic
-          isCurated
-          videos {
-            title
-            description
-            duration
-            thumbnailUrl
-            isExplicit
-            isPublic
-          }
-        }
+  public async tryQueryWithTimeout<QueryResultT>(
+    query: () => Promise<QueryResultT>,
+    assertResultIsValid: (res: QueryResultT) => void,
+    retryTimeMs = 18000,
+    retries = 6
+  ): Promise<QueryResultT> {
+    const label = query.toString().replace(/^.*\.([A-za-z0-9]+\(.*\))$/g, '$1')
+    const debug = this.tryDebug.extend(label)
+    let retryCounter = 0
+    const retry = async (error: any) => {
+      if (retryCounter === retries) {
+        debug(`Max number of query retries (${retries}) reached!`)
+        throw error
+      }
+      debug(`Retrying query in ${retryTimeMs}ms...`)
+      ++retryCounter
+      await Utils.wait(retryTimeMs)
+    }
+    while (true) {
+      let result: QueryResultT
+      try {
+        result = await query()
+      } catch (e) {
+        debug(`Query node unreachable`)
+        await retry(e)
+        continue
+      }
+
+      try {
+        assertResultIsValid(result)
+      } catch (e) {
+        debug(`Unexpected query result${e instanceof Error ? ` (${e.message})` : ''}`)
+        await retry(e)
+        continue
       }
-    `
 
-    return await this.queryNodeProvider.query({ query: GET_CHANNEL_BY_TITLE, variables: { handle } })
+      return result
+    }
   }
 
-  public async performFullTextSearchOnChannelTitle(text: string): Promise<ApolloQueryResult<any>> {
-    const FULL_TEXT_SEARCH_ON_CHANNEL_TITLE = gql`
-      query($text: String!) {
-        search(text: $text) {
-          item {
-            ... on Channel {
-              handle
-              description
-            }
-          }
-        }
-      }
-    `
+  private debugQuery(query: DocumentNode, args: Record<string, unknown>): void {
+    const queryDef = query.definitions.find((d) => d.kind === 'OperationDefinition') as OperationDefinitionNode
+    this.queryDebug(`${queryDef.name!.value}(${JSON.stringify(args)})`)
+  }
 
-    return await this.queryNodeProvider.query({ query: FULL_TEXT_SEARCH_ON_CHANNEL_TITLE, variables: { text } })
+  // Query entity by unique input
+  private async uniqueEntityQuery<
+    QueryT extends { [k: string]: Maybe<Record<string, unknown>> | undefined },
+    VariablesT extends Record<string, unknown>
+  >(
+    query: DocumentNode,
+    variables: VariablesT,
+    resultKey: keyof QueryT
+  ): Promise<Required<QueryT>[keyof QueryT] | null> {
+    this.debugQuery(query, variables)
+    return (await this.queryNodeProvider.query<QueryT, VariablesT>({ query, variables })).data[resultKey] || null
   }
 
-  public async performFullTextSearchOnVideoTitle(text: string): Promise<ApolloQueryResult<any>> {
-    const FULL_TEXT_SEARCH_ON_VIDEO_TITLE = gql`
-      query($text: String!) {
-        search(text: $text) {
-          item {
-            ... on Video {
-              title
-            }
-          }
-        }
-      }
-    `
+  // Query entities by "non-unique" input and return first result
+  private async firstEntityQuery<QueryT extends { [k: string]: unknown[] }, VariablesT extends Record<string, unknown>>(
+    query: DocumentNode,
+    variables: VariablesT,
+    resultKey: keyof QueryT
+  ): Promise<QueryT[keyof QueryT][number] | null> {
+    this.debugQuery(query, variables)
+    return (await this.queryNodeProvider.query<QueryT, VariablesT>({ query, variables })).data[resultKey][0] || null
+  }
 
-    return await this.queryNodeProvider.query({ query: FULL_TEXT_SEARCH_ON_VIDEO_TITLE, variables: { text } })
+  // Query multiple entities
+  private async multipleEntitiesQuery<
+    QueryT extends { [k: string]: unknown[] },
+    VariablesT extends Record<string, unknown>
+  >(query: DocumentNode, variables: VariablesT, resultKey: keyof QueryT): Promise<QueryT[keyof QueryT]> {
+    this.debugQuery(query, variables)
+    return (await this.queryNodeProvider.query<QueryT, VariablesT>({ query, variables })).data[resultKey]
   }
 
-  public async performWhereQueryByVideoTitle(title: string): Promise<ApolloQueryResult<any>> {
-    const WHERE_QUERY_ON_VIDEO_TITLE = gql`
-      query($title: String!) {
-        videos(where: { title_eq: $title }) {
-          media {
-            location {
-              __typename
-              ... on JoystreamMediaLocation {
-                dataObjectId
-              }
-            }
-          }
-        }
-      }
-    `
-    return await this.queryNodeProvider.query({ query: WHERE_QUERY_ON_VIDEO_TITLE, variables: { title } })
+  public async channelById(id: string): Promise<Maybe<ChannelFieldsFragment>> {
+    return this.uniqueEntityQuery<GetChannelByIdQuery, GetChannelByIdQueryVariables>(
+      GetChannelById,
+      { id },
+      'channelByUniqueInput'
+    )
+  }
+
+  public async getDataObjectsByIds(ids: string[]): Promise<StorageDataObjectFieldsFragment[]> {
+    return this.multipleEntitiesQuery<GetDataObjectsByIdsQuery, GetDataObjectsByIdsQueryVariables>(
+      GetDataObjectsByIds,
+      { ids },
+      'storageDataObjects'
+    )
   }
 }

+ 26 - 10
tests/network-tests/src/Scenario.ts

@@ -1,5 +1,5 @@
 import { WsProvider } from '@polkadot/api'
-import { ApiFactory, Api } from './Api'
+import { ApiFactory, Api, KeyGenInfo } from './Api'
 import { QueryNodeApi } from './QueryNodeApi'
 import { config } from 'dotenv'
 import { ApolloClient, InMemoryCache, HttpLink } from '@apollo/client'
@@ -9,7 +9,7 @@ import { Job } from './Job'
 import { JobManager } from './JobManager'
 import { ResourceManager } from './Resources'
 import fetch from 'cross-fetch'
-import fs from 'fs'
+import fs, { readFileSync } from 'fs'
 
 export type ScenarioProps = {
   env: NodeJS.ProcessEnv
@@ -17,22 +17,29 @@ export type ScenarioProps = {
   job: (label: string, flows: Flow[] | Flow) => Job
 }
 
+const OUTPUT_FILE_PATH = 'output.json'
+
+type TestsOutput = {
+  accounts: { [k: string]: number }
+  keyIds: KeyGenInfo
+  miniSecret: string
+}
+
 function writeOutput(api: Api, miniSecret: string) {
-  const outputFilename = 'output.json'
-  console.error('Writing generated account to', outputFilename)
+  console.error('Writing generated account to', OUTPUT_FILE_PATH)
   // account to key ids
   const accounts = api.getAllGeneratedAccounts()
 
   // first and last key id used to generate keys in this scenario
   const keyIds = api.keyGenInfo()
 
-  const output = {
+  const output: TestsOutput = {
     accounts,
     keyIds,
     miniSecret,
   }
 
-  fs.writeFileSync(outputFilename, JSON.stringify(output, undefined, 2))
+  fs.writeFileSync(OUTPUT_FILE_PATH, JSON.stringify(output, undefined, 2))
 }
 
 export async function scenario(scene: (props: ScenarioProps) => Promise<void>): Promise<void> {
@@ -53,12 +60,21 @@ export async function scenario(scene: (props: ScenarioProps) => Promise<void>):
 
   const api = apiFactory.getApi('Key Generation')
 
-  // Generate all key ids before START_KEY_ID
-  const startKeyId = parseInt(env.START_KEY_ID || '0')
-  if (startKeyId) {
-    api.createKeyPairs(startKeyId)
+  // Generate all key ids based on REUSE_KEYS or START_KEY_ID (if provided)
+  const reuseKeys = Boolean(env.REUSE_KEYS)
+  let startKeyId: number
+  let customKeys: string[] = []
+  if (reuseKeys) {
+    const output = JSON.parse(readFileSync(OUTPUT_FILE_PATH).toString()) as TestsOutput
+    startKeyId = output.keyIds.final
+    customKeys = output.keyIds.custom
+  } else {
+    startKeyId = parseInt(env.START_KEY_ID || '0')
   }
 
+  api.createKeyPairs(startKeyId)
+  customKeys.forEach((k) => api.createCustomKeyPair(k))
+
   const queryNodeUrl: string = env.QUERY_NODE_URL || 'http://127.0.0.1:8081/graphql'
 
   const queryNodeProvider = new ApolloClient({

+ 27 - 0
tests/network-tests/src/apis/distributorNode/.openapi-generator-ignore

@@ -0,0 +1,27 @@
+# OpenAPI Generator Ignore
+# Generated by openapi-generator https://github.com/openapitools/openapi-generator
+
+# Use this file to prevent files from being overwritten by the generator.
+# The patterns follow closely to .gitignore or .dockerignore.
+
+# As an example, the C# client generator defines ApiClient.cs.
+# You can make changes and tell OpenAPI Generator to ignore just this file by uncommenting the following line:
+#ApiClient.cs
+
+# You can match any string of characters against a directory, file or extension with a single asterisk (*):
+#foo/*/qux
+# The above matches foo/bar/qux and foo/baz/qux, but not foo/bar/baz/qux
+
+# You can recursively match patterns against a directory, file or extension with a double asterisk (**):
+#foo/**/qux
+# This matches foo/bar/qux, foo/baz/qux, and foo/bar/baz/qux
+
+# You can also negate patterns with an exclamation (!).
+# For example, you can ignore all files in a docs folder with the file extension .md:
+#docs/*.md
+# Then explicitly reverse the ignore rule for a single file:
+#!docs/README.md
+
+git_push.sh
+.npmignore
+.gitignore

+ 5 - 0
tests/network-tests/src/apis/distributorNode/.openapi-generator/FILES

@@ -0,0 +1,5 @@
+api.ts
+base.ts
+common.ts
+configuration.ts
+index.ts

+ 1 - 0
tests/network-tests/src/apis/distributorNode/.openapi-generator/VERSION

@@ -0,0 +1 @@
+5.2.1

+ 410 - 0
tests/network-tests/src/apis/distributorNode/api.ts

@@ -0,0 +1,410 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Distributor node public API
+ * Distributor node public API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+import { Configuration } from './configuration'
+import globalAxios, { AxiosPromise, AxiosInstance } from 'axios'
+// Some imports not used depending on template conditions
+// @ts-ignore
+import {
+  DUMMY_BASE_URL,
+  assertParamExists,
+  setApiKeyToObject,
+  setBasicAuthToObject,
+  setBearerAuthToObject,
+  setOAuthToObject,
+  setSearchParams,
+  serializeDataIfNeeded,
+  toPathString,
+  createRequestFunction,
+} from './common'
+// @ts-ignore
+import { BASE_PATH, COLLECTION_FORMATS, RequestArgs, BaseAPI, RequiredError } from './base'
+
+/**
+ * @type BucketsResponse
+ * @export
+ */
+export type BucketsResponse = BucketsResponseOneOf | BucketsResponseOneOf1
+
+/**
+ *
+ * @export
+ * @interface BucketsResponseOneOf
+ */
+export interface BucketsResponseOneOf {
+  /**
+   *
+   * @type {Array<string>}
+   * @memberof BucketsResponseOneOf
+   */
+  bucketIds: Array<string>
+}
+/**
+ *
+ * @export
+ * @interface BucketsResponseOneOf1
+ */
+export interface BucketsResponseOneOf1 {
+  /**
+   *
+   * @type {number}
+   * @memberof BucketsResponseOneOf1
+   */
+  allByWorkerId: number
+}
+/**
+ *
+ * @export
+ * @interface ErrorResponse
+ */
+export interface ErrorResponse {
+  /**
+   *
+   * @type {string}
+   * @memberof ErrorResponse
+   */
+  type?: string
+  /**
+   *
+   * @type {string}
+   * @memberof ErrorResponse
+   */
+  message: string
+}
+/**
+ *
+ * @export
+ * @interface StatusResponse
+ */
+export interface StatusResponse {
+  /**
+   *
+   * @type {string}
+   * @memberof StatusResponse
+   */
+  id: string
+  /**
+   *
+   * @type {number}
+   * @memberof StatusResponse
+   */
+  objectsInCache: number
+  /**
+   *
+   * @type {number}
+   * @memberof StatusResponse
+   */
+  storageLimit: number
+  /**
+   *
+   * @type {number}
+   * @memberof StatusResponse
+   */
+  storageUsed: number
+  /**
+   *
+   * @type {number}
+   * @memberof StatusResponse
+   */
+  uptime: number
+  /**
+   *
+   * @type {number}
+   * @memberof StatusResponse
+   */
+  downloadsInProgress: number
+}
+
+/**
+ * DefaultApi - axios parameter creator
+ * @export
+ */
+export const DefaultApiAxiosParamCreator = function (configuration?: Configuration) {
+  return {
+    /**
+     * Returns a media file.
+     * @param {string} objectId Data Object ID
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    publicAsset: async (objectId: string, options: any = {}): Promise<RequestArgs> => {
+      // verify required parameter 'objectId' is not null or undefined
+      assertParamExists('publicAsset', 'objectId', objectId)
+      const localVarPath = `/assets/{objectId}`.replace(`{${'objectId'}}`, encodeURIComponent(String(objectId)))
+      // use dummy base URL string because the URL constructor only accepts absolute URLs.
+      const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL)
+      let baseOptions
+      if (configuration) {
+        baseOptions = configuration.baseOptions
+      }
+
+      const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options }
+      const localVarHeaderParameter = {} as any
+      const localVarQueryParameter = {} as any
+
+      setSearchParams(localVarUrlObj, localVarQueryParameter, options.query)
+      let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}
+      localVarRequestOptions.headers = { ...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers }
+
+      return {
+        url: toPathString(localVarUrlObj),
+        options: localVarRequestOptions,
+      }
+    },
+    /**
+     * Returns asset response headers (cache status, content type and/or length, accepted ranges etc.)
+     * @param {string} objectId Data Object ID
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    publicAssetHead: async (objectId: string, options: any = {}): Promise<RequestArgs> => {
+      // verify required parameter 'objectId' is not null or undefined
+      assertParamExists('publicAssetHead', 'objectId', objectId)
+      const localVarPath = `/assets/{objectId}`.replace(`{${'objectId'}}`, encodeURIComponent(String(objectId)))
+      // use dummy base URL string because the URL constructor only accepts absolute URLs.
+      const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL)
+      let baseOptions
+      if (configuration) {
+        baseOptions = configuration.baseOptions
+      }
+
+      const localVarRequestOptions = { method: 'HEAD', ...baseOptions, ...options }
+      const localVarHeaderParameter = {} as any
+      const localVarQueryParameter = {} as any
+
+      setSearchParams(localVarUrlObj, localVarQueryParameter, options.query)
+      let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}
+      localVarRequestOptions.headers = { ...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers }
+
+      return {
+        url: toPathString(localVarUrlObj),
+        options: localVarRequestOptions,
+      }
+    },
+    /**
+     * Returns list of distributed buckets
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    publicBuckets: async (options: any = {}): Promise<RequestArgs> => {
+      const localVarPath = `/buckets`
+      // use dummy base URL string because the URL constructor only accepts absolute URLs.
+      const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL)
+      let baseOptions
+      if (configuration) {
+        baseOptions = configuration.baseOptions
+      }
+
+      const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options }
+      const localVarHeaderParameter = {} as any
+      const localVarQueryParameter = {} as any
+
+      setSearchParams(localVarUrlObj, localVarQueryParameter, options.query)
+      let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}
+      localVarRequestOptions.headers = { ...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers }
+
+      return {
+        url: toPathString(localVarUrlObj),
+        options: localVarRequestOptions,
+      }
+    },
+    /**
+     * Returns json object describing current node status.
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    publicStatus: async (options: any = {}): Promise<RequestArgs> => {
+      const localVarPath = `/status`
+      // use dummy base URL string because the URL constructor only accepts absolute URLs.
+      const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL)
+      let baseOptions
+      if (configuration) {
+        baseOptions = configuration.baseOptions
+      }
+
+      const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options }
+      const localVarHeaderParameter = {} as any
+      const localVarQueryParameter = {} as any
+
+      setSearchParams(localVarUrlObj, localVarQueryParameter, options.query)
+      let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}
+      localVarRequestOptions.headers = { ...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers }
+
+      return {
+        url: toPathString(localVarUrlObj),
+        options: localVarRequestOptions,
+      }
+    },
+  }
+}
+
+/**
+ * DefaultApi - functional programming interface
+ * @export
+ */
+export const DefaultApiFp = function (configuration?: Configuration) {
+  const localVarAxiosParamCreator = DefaultApiAxiosParamCreator(configuration)
+  return {
+    /**
+     * Returns a media file.
+     * @param {string} objectId Data Object ID
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    async publicAsset(
+      objectId: string,
+      options?: any
+    ): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<any>> {
+      const localVarAxiosArgs = await localVarAxiosParamCreator.publicAsset(objectId, options)
+      return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)
+    },
+    /**
+     * Returns asset response headers (cache status, content type and/or length, accepted ranges etc.)
+     * @param {string} objectId Data Object ID
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    async publicAssetHead(
+      objectId: string,
+      options?: any
+    ): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<void>> {
+      const localVarAxiosArgs = await localVarAxiosParamCreator.publicAssetHead(objectId, options)
+      return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)
+    },
+    /**
+     * Returns list of distributed buckets
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    async publicBuckets(
+      options?: any
+    ): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<BucketsResponse>> {
+      const localVarAxiosArgs = await localVarAxiosParamCreator.publicBuckets(options)
+      return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)
+    },
+    /**
+     * Returns json object describing current node status.
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    async publicStatus(
+      options?: any
+    ): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<StatusResponse>> {
+      const localVarAxiosArgs = await localVarAxiosParamCreator.publicStatus(options)
+      return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)
+    },
+  }
+}
+
+/**
+ * DefaultApi - factory interface
+ * @export
+ */
+export const DefaultApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) {
+  const localVarFp = DefaultApiFp(configuration)
+  return {
+    /**
+     * Returns a media file.
+     * @param {string} objectId Data Object ID
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    publicAsset(objectId: string, options?: any): AxiosPromise<any> {
+      return localVarFp.publicAsset(objectId, options).then((request) => request(axios, basePath))
+    },
+    /**
+     * Returns asset response headers (cache status, content type and/or length, accepted ranges etc.)
+     * @param {string} objectId Data Object ID
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    publicAssetHead(objectId: string, options?: any): AxiosPromise<void> {
+      return localVarFp.publicAssetHead(objectId, options).then((request) => request(axios, basePath))
+    },
+    /**
+     * Returns list of distributed buckets
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    publicBuckets(options?: any): AxiosPromise<BucketsResponse> {
+      return localVarFp.publicBuckets(options).then((request) => request(axios, basePath))
+    },
+    /**
+     * Returns json object describing current node status.
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    publicStatus(options?: any): AxiosPromise<StatusResponse> {
+      return localVarFp.publicStatus(options).then((request) => request(axios, basePath))
+    },
+  }
+}
+
+/**
+ * DefaultApi - object-oriented interface
+ * @export
+ * @class DefaultApi
+ * @extends {BaseAPI}
+ */
+export class DefaultApi extends BaseAPI {
+  /**
+   * Returns a media file.
+   * @param {string} objectId Data Object ID
+   * @param {*} [options] Override http request option.
+   * @throws {RequiredError}
+   * @memberof DefaultApi
+   */
+  public publicAsset(objectId: string, options?: any) {
+    return DefaultApiFp(this.configuration)
+      .publicAsset(objectId, options)
+      .then((request) => request(this.axios, this.basePath))
+  }
+
+  /**
+   * Returns asset response headers (cache status, content type and/or length, accepted ranges etc.)
+   * @param {string} objectId Data Object ID
+   * @param {*} [options] Override http request option.
+   * @throws {RequiredError}
+   * @memberof DefaultApi
+   */
+  public publicAssetHead(objectId: string, options?: any) {
+    return DefaultApiFp(this.configuration)
+      .publicAssetHead(objectId, options)
+      .then((request) => request(this.axios, this.basePath))
+  }
+
+  /**
+   * Returns list of distributed buckets
+   * @param {*} [options] Override http request option.
+   * @throws {RequiredError}
+   * @memberof DefaultApi
+   */
+  public publicBuckets(options?: any) {
+    return DefaultApiFp(this.configuration)
+      .publicBuckets(options)
+      .then((request) => request(this.axios, this.basePath))
+  }
+
+  /**
+   * Returns json object describing current node status.
+   * @param {*} [options] Override http request option.
+   * @throws {RequiredError}
+   * @memberof DefaultApi
+   */
+  public publicStatus(options?: any) {
+    return DefaultApiFp(this.configuration)
+      .publicStatus(options)
+      .then((request) => request(this.axios, this.basePath))
+  }
+}

+ 74 - 0
tests/network-tests/src/apis/distributorNode/base.ts

@@ -0,0 +1,74 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Distributor node public API
+ * Distributor node public API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+import { Configuration } from './configuration'
+// Some imports not used depending on template conditions
+// @ts-ignore
+import globalAxios, { AxiosPromise, AxiosInstance } from 'axios'
+
+export const BASE_PATH = 'http://localhost:3334/api/v1'.replace(/\/+$/, '')
+
+/**
+ *
+ * @export
+ */
+export const COLLECTION_FORMATS = {
+  csv: ',',
+  ssv: ' ',
+  tsv: '\t',
+  pipes: '|',
+}
+
+/**
+ *
+ * @export
+ * @interface RequestArgs
+ */
+export interface RequestArgs {
+  url: string
+  options: any
+}
+
+/**
+ *
+ * @export
+ * @class BaseAPI
+ */
+export class BaseAPI {
+  protected configuration: Configuration | undefined
+
+  constructor(
+    configuration?: Configuration,
+    protected basePath: string = BASE_PATH,
+    protected axios: AxiosInstance = globalAxios
+  ) {
+    if (configuration) {
+      this.configuration = configuration
+      this.basePath = configuration.basePath || this.basePath
+    }
+  }
+}
+
+/**
+ *
+ * @export
+ * @class RequiredError
+ * @extends {Error}
+ */
+export class RequiredError extends Error {
+  name: 'RequiredError' = 'RequiredError'
+  constructor(public field: string, msg?: string) {
+    super(msg)
+  }
+}

+ 150 - 0
tests/network-tests/src/apis/distributorNode/common.ts

@@ -0,0 +1,150 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Distributor node public API
+ * Distributor node public API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+import { Configuration } from './configuration'
+import { RequiredError, RequestArgs } from './base'
+import { AxiosInstance } from 'axios'
+
+/**
+ *
+ * @export
+ */
+export const DUMMY_BASE_URL = 'https://example.com'
+
+/**
+ *
+ * @throws {RequiredError}
+ * @export
+ */
+export const assertParamExists = function (functionName: string, paramName: string, paramValue: unknown) {
+  if (paramValue === null || paramValue === undefined) {
+    throw new RequiredError(
+      paramName,
+      `Required parameter ${paramName} was null or undefined when calling ${functionName}.`
+    )
+  }
+}
+
+/**
+ *
+ * @export
+ */
+export const setApiKeyToObject = async function (object: any, keyParamName: string, configuration?: Configuration) {
+  if (configuration && configuration.apiKey) {
+    const localVarApiKeyValue =
+      typeof configuration.apiKey === 'function' ? await configuration.apiKey(keyParamName) : await configuration.apiKey
+    object[keyParamName] = localVarApiKeyValue
+  }
+}
+
+/**
+ *
+ * @export
+ */
+export const setBasicAuthToObject = function (object: any, configuration?: Configuration) {
+  if (configuration && (configuration.username || configuration.password)) {
+    object['auth'] = { username: configuration.username, password: configuration.password }
+  }
+}
+
+/**
+ *
+ * @export
+ */
+export const setBearerAuthToObject = async function (object: any, configuration?: Configuration) {
+  if (configuration && configuration.accessToken) {
+    const accessToken =
+      typeof configuration.accessToken === 'function'
+        ? await configuration.accessToken()
+        : await configuration.accessToken
+    object['Authorization'] = 'Bearer ' + accessToken
+  }
+}
+
+/**
+ *
+ * @export
+ */
+export const setOAuthToObject = async function (
+  object: any,
+  name: string,
+  scopes: string[],
+  configuration?: Configuration
+) {
+  if (configuration && configuration.accessToken) {
+    const localVarAccessTokenValue =
+      typeof configuration.accessToken === 'function'
+        ? await configuration.accessToken(name, scopes)
+        : await configuration.accessToken
+    object['Authorization'] = 'Bearer ' + localVarAccessTokenValue
+  }
+}
+
+/**
+ *
+ * @export
+ */
+export const setSearchParams = function (url: URL, ...objects: any[]) {
+  const searchParams = new URLSearchParams(url.search)
+  for (const object of objects) {
+    for (const key in object) {
+      if (Array.isArray(object[key])) {
+        searchParams.delete(key)
+        for (const item of object[key]) {
+          searchParams.append(key, item)
+        }
+      } else {
+        searchParams.set(key, object[key])
+      }
+    }
+  }
+  url.search = searchParams.toString()
+}
+
+/**
+ *
+ * @export
+ */
+export const serializeDataIfNeeded = function (value: any, requestOptions: any, configuration?: Configuration) {
+  const nonString = typeof value !== 'string'
+  const needsSerialization =
+    nonString && configuration && configuration.isJsonMime
+      ? configuration.isJsonMime(requestOptions.headers['Content-Type'])
+      : nonString
+  return needsSerialization ? JSON.stringify(value !== undefined ? value : {}) : value || ''
+}
+
+/**
+ *
+ * @export
+ */
+export const toPathString = function (url: URL) {
+  return url.pathname + url.search + url.hash
+}
+
+/**
+ *
+ * @export
+ */
+export const createRequestFunction = function (
+  axiosArgs: RequestArgs,
+  globalAxios: AxiosInstance,
+  BASE_PATH: string,
+  configuration?: Configuration
+) {
+  return (axios: AxiosInstance = globalAxios, basePath: string = BASE_PATH) => {
+    const axiosRequestArgs = { ...axiosArgs.options, url: (configuration?.basePath || basePath) + axiosArgs.url }
+    return axios.request(axiosRequestArgs)
+  }
+}

+ 108 - 0
tests/network-tests/src/apis/distributorNode/configuration.ts

@@ -0,0 +1,108 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Distributor node public API
+ * Distributor node public API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+export interface ConfigurationParameters {
+  apiKey?: string | Promise<string> | ((name: string) => string) | ((name: string) => Promise<string>)
+  username?: string
+  password?: string
+  accessToken?:
+    | string
+    | Promise<string>
+    | ((name?: string, scopes?: string[]) => string)
+    | ((name?: string, scopes?: string[]) => Promise<string>)
+  basePath?: string
+  baseOptions?: any
+  formDataCtor?: new () => any
+}
+
+export class Configuration {
+  /**
+   * parameter for apiKey security
+   * @param name security name
+   * @memberof Configuration
+   */
+  apiKey?: string | Promise<string> | ((name: string) => string) | ((name: string) => Promise<string>)
+  /**
+   * parameter for basic security
+   *
+   * @type {string}
+   * @memberof Configuration
+   */
+  username?: string
+  /**
+   * parameter for basic security
+   *
+   * @type {string}
+   * @memberof Configuration
+   */
+  password?: string
+  /**
+   * parameter for oauth2 security
+   * @param name security name
+   * @param scopes oauth2 scope
+   * @memberof Configuration
+   */
+  accessToken?:
+    | string
+    | Promise<string>
+    | ((name?: string, scopes?: string[]) => string)
+    | ((name?: string, scopes?: string[]) => Promise<string>)
+  /**
+   * override base path
+   *
+   * @type {string}
+   * @memberof Configuration
+   */
+  basePath?: string
+  /**
+   * base options for axios calls
+   *
+   * @type {any}
+   * @memberof Configuration
+   */
+  baseOptions?: any
+  /**
+   * The FormData constructor that will be used to create multipart form data
+   * requests. You can inject this here so that execution environments that
+   * do not support the FormData class can still run the generated client.
+   *
+   * @type {new () => FormData}
+   */
+  formDataCtor?: new () => any
+
+  constructor(param: ConfigurationParameters = {}) {
+    this.apiKey = param.apiKey
+    this.username = param.username
+    this.password = param.password
+    this.accessToken = param.accessToken
+    this.basePath = param.basePath
+    this.baseOptions = param.baseOptions
+    this.formDataCtor = param.formDataCtor
+  }
+
+  /**
+   * Check if the given MIME is a JSON MIME.
+   * JSON MIME examples:
+   *   application/json
+   *   application/json; charset=UTF8
+   *   APPLICATION/JSON
+   *   application/vnd.company+json
+   * @param mime - MIME (Multipurpose Internet Mail Extensions)
+   * @return True if the given MIME is JSON, false otherwise.
+   */
+  public isJsonMime(mime: string): boolean {
+    const jsonMime: RegExp = new RegExp('^(application/json|[^;/ \t]+/[^;/ \t]+[+]json)[ \t]*(;.*)?$', 'i')
+    return mime !== null && (jsonMime.test(mime) || mime.toLowerCase() === 'application/json-patch+json')
+  }
+}

+ 16 - 0
tests/network-tests/src/apis/distributorNode/index.ts

@@ -0,0 +1,16 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Distributor node public API
+ * Distributor node public API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+export * from './api'
+export * from './configuration'

+ 27 - 0
tests/network-tests/src/apis/storageNode/.openapi-generator-ignore

@@ -0,0 +1,27 @@
+# OpenAPI Generator Ignore
+# Generated by openapi-generator https://github.com/openapitools/openapi-generator
+
+# Use this file to prevent files from being overwritten by the generator.
+# The patterns follow closely to .gitignore or .dockerignore.
+
+# As an example, the C# client generator defines ApiClient.cs.
+# You can make changes and tell OpenAPI Generator to ignore just this file by uncommenting the following line:
+#ApiClient.cs
+
+# You can match any string of characters against a directory, file or extension with a single asterisk (*):
+#foo/*/qux
+# The above matches foo/bar/qux and foo/baz/qux, but not foo/bar/baz/qux
+
+# You can recursively match patterns against a directory, file or extension with a double asterisk (**):
+#foo/**/qux
+# This matches foo/bar/qux, foo/baz/qux, and foo/bar/baz/qux
+
+# You can also negate patterns with an exclamation (!).
+# For example, you can ignore all files in a docs folder with the file extension .md:
+#docs/*.md
+# Then explicitly reverse the ignore rule for a single file:
+#!docs/README.md
+
+git_push.sh
+.npmignore
+.gitignore

+ 5 - 0
tests/network-tests/src/apis/storageNode/.openapi-generator/FILES

@@ -0,0 +1,5 @@
+api.ts
+base.ts
+common.ts
+configuration.ts
+index.ts

+ 1 - 0
tests/network-tests/src/apis/storageNode/.openapi-generator/VERSION

@@ -0,0 +1 @@
+5.2.1

+ 738 - 0
tests/network-tests/src/apis/storageNode/api.ts

@@ -0,0 +1,738 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Storage node API
+ * Storage node API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+import { Configuration } from './configuration'
+import globalAxios, { AxiosPromise, AxiosInstance } from 'axios'
+// Some imports not used depending on template conditions
+// @ts-ignore
+import {
+  DUMMY_BASE_URL,
+  assertParamExists,
+  setApiKeyToObject,
+  setBasicAuthToObject,
+  setBearerAuthToObject,
+  setOAuthToObject,
+  setSearchParams,
+  serializeDataIfNeeded,
+  toPathString,
+  createRequestFunction,
+} from './common'
+// @ts-ignore
+import { BASE_PATH, COLLECTION_FORMATS, RequestArgs, BaseAPI, RequiredError } from './base'
+
+/**
+ *
+ * @export
+ * @interface DataStatsResponse
+ */
+export interface DataStatsResponse {
+  /**
+   *
+   * @type {number}
+   * @memberof DataStatsResponse
+   */
+  totalSize: number
+  /**
+   *
+   * @type {number}
+   * @memberof DataStatsResponse
+   */
+  objectNumber: number
+  /**
+   *
+   * @type {number}
+   * @memberof DataStatsResponse
+   */
+  tempDirSize?: number
+  /**
+   *
+   * @type {number}
+   * @memberof DataStatsResponse
+   */
+  tempDownloads?: number
+}
+/**
+ *
+ * @export
+ * @interface ErrorResponse
+ */
+export interface ErrorResponse {
+  /**
+   *
+   * @type {string}
+   * @memberof ErrorResponse
+   */
+  type?: string
+  /**
+   *
+   * @type {string}
+   * @memberof ErrorResponse
+   */
+  message: string
+}
+/**
+ *
+ * @export
+ * @interface InlineResponse201
+ */
+export interface InlineResponse201 {
+  /**
+   *
+   * @type {string}
+   * @memberof InlineResponse201
+   */
+  id?: string
+}
+/**
+ *
+ * @export
+ * @interface TokenRequest
+ */
+export interface TokenRequest {
+  /**
+   *
+   * @type {TokenRequestData}
+   * @memberof TokenRequest
+   */
+  data: TokenRequestData
+  /**
+   *
+   * @type {string}
+   * @memberof TokenRequest
+   */
+  signature: string
+}
+/**
+ *
+ * @export
+ * @interface TokenRequestData
+ */
+export interface TokenRequestData {
+  /**
+   *
+   * @type {number}
+   * @memberof TokenRequestData
+   */
+  memberId: number
+  /**
+   *
+   * @type {string}
+   * @memberof TokenRequestData
+   */
+  accountId: string
+  /**
+   *
+   * @type {number}
+   * @memberof TokenRequestData
+   */
+  dataObjectId: number
+  /**
+   *
+   * @type {number}
+   * @memberof TokenRequestData
+   */
+  storageBucketId: number
+  /**
+   *
+   * @type {string}
+   * @memberof TokenRequestData
+   */
+  bagId: string
+}
+/**
+ *
+ * @export
+ * @interface VersionResponse
+ */
+export interface VersionResponse {
+  /**
+   *
+   * @type {string}
+   * @memberof VersionResponse
+   */
+  version: string
+  /**
+   *
+   * @type {string}
+   * @memberof VersionResponse
+   */
+  userAgent?: string
+}
+
+/**
+ * FilesApi - axios parameter creator
+ * @export
+ */
+export const FilesApiAxiosParamCreator = function (configuration?: Configuration) {
+  return {
+    /**
+     * Returns a media file.
+     * @param {string} id Data object ID
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    filesApiGetFile: async (id: string, options: any = {}): Promise<RequestArgs> => {
+      // verify required parameter 'id' is not null or undefined
+      assertParamExists('filesApiGetFile', 'id', id)
+      const localVarPath = `/files/{id}`.replace(`{${'id'}}`, encodeURIComponent(String(id)))
+      // use dummy base URL string because the URL constructor only accepts absolute URLs.
+      const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL)
+      let baseOptions
+      if (configuration) {
+        baseOptions = configuration.baseOptions
+      }
+
+      const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options }
+      const localVarHeaderParameter = {} as any
+      const localVarQueryParameter = {} as any
+
+      setSearchParams(localVarUrlObj, localVarQueryParameter, options.query)
+      let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}
+      localVarRequestOptions.headers = { ...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers }
+
+      return {
+        url: toPathString(localVarUrlObj),
+        options: localVarRequestOptions,
+      }
+    },
+    /**
+     * Returns a media file headers.
+     * @param {string} id Data object ID
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    filesApiGetFileHeaders: async (id: string, options: any = {}): Promise<RequestArgs> => {
+      // verify required parameter 'id' is not null or undefined
+      assertParamExists('filesApiGetFileHeaders', 'id', id)
+      const localVarPath = `/files/{id}`.replace(`{${'id'}}`, encodeURIComponent(String(id)))
+      // use dummy base URL string because the URL constructor only accepts absolute URLs.
+      const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL)
+      let baseOptions
+      if (configuration) {
+        baseOptions = configuration.baseOptions
+      }
+
+      const localVarRequestOptions = { method: 'HEAD', ...baseOptions, ...options }
+      const localVarHeaderParameter = {} as any
+      const localVarQueryParameter = {} as any
+
+      setSearchParams(localVarUrlObj, localVarQueryParameter, options.query)
+      let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}
+      localVarRequestOptions.headers = { ...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers }
+
+      return {
+        url: toPathString(localVarUrlObj),
+        options: localVarRequestOptions,
+      }
+    },
+    /**
+     * Upload data
+     * @param {string} dataObjectId Data object runtime ID
+     * @param {string} storageBucketId Storage bucket ID
+     * @param {string} bagId Bag ID
+     * @param {any} [file] Data file
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    filesApiUploadFile: async (
+      dataObjectId: string,
+      storageBucketId: string,
+      bagId: string,
+      file?: any,
+      options: any = {}
+    ): Promise<RequestArgs> => {
+      // verify required parameter 'dataObjectId' is not null or undefined
+      assertParamExists('filesApiUploadFile', 'dataObjectId', dataObjectId)
+      // verify required parameter 'storageBucketId' is not null or undefined
+      assertParamExists('filesApiUploadFile', 'storageBucketId', storageBucketId)
+      // verify required parameter 'bagId' is not null or undefined
+      assertParamExists('filesApiUploadFile', 'bagId', bagId)
+      const localVarPath = `/files`
+      // use dummy base URL string because the URL constructor only accepts absolute URLs.
+      const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL)
+      let baseOptions
+      if (configuration) {
+        baseOptions = configuration.baseOptions
+      }
+
+      const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options }
+      const localVarHeaderParameter = {} as any
+      const localVarQueryParameter = {} as any
+      const localVarFormParams = new ((configuration && configuration.formDataCtor) || FormData)()
+
+      if (file !== undefined) {
+        localVarFormParams.append('file', file as any)
+      }
+
+      if (dataObjectId !== undefined) {
+        localVarFormParams.append('dataObjectId', dataObjectId as any)
+      }
+
+      if (storageBucketId !== undefined) {
+        localVarFormParams.append('storageBucketId', storageBucketId as any)
+      }
+
+      if (bagId !== undefined) {
+        localVarFormParams.append('bagId', bagId as any)
+      }
+
+      localVarHeaderParameter['Content-Type'] = 'multipart/form-data'
+
+      setSearchParams(localVarUrlObj, localVarQueryParameter, options.query)
+      let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}
+      localVarRequestOptions.headers = { ...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers }
+      localVarRequestOptions.data = localVarFormParams
+
+      return {
+        url: toPathString(localVarUrlObj),
+        options: localVarRequestOptions,
+      }
+    },
+  }
+}
+
+/**
+ * FilesApi - functional programming interface
+ * @export
+ */
+export const FilesApiFp = function (configuration?: Configuration) {
+  const localVarAxiosParamCreator = FilesApiAxiosParamCreator(configuration)
+  return {
+    /**
+     * Returns a media file.
+     * @param {string} id Data object ID
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    async filesApiGetFile(
+      id: string,
+      options?: any
+    ): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<any>> {
+      const localVarAxiosArgs = await localVarAxiosParamCreator.filesApiGetFile(id, options)
+      return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)
+    },
+    /**
+     * Returns a media file headers.
+     * @param {string} id Data object ID
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    async filesApiGetFileHeaders(
+      id: string,
+      options?: any
+    ): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<void>> {
+      const localVarAxiosArgs = await localVarAxiosParamCreator.filesApiGetFileHeaders(id, options)
+      return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)
+    },
+    /**
+     * Upload data
+     * @param {string} dataObjectId Data object runtime ID
+     * @param {string} storageBucketId Storage bucket ID
+     * @param {string} bagId Bag ID
+     * @param {any} [file] Data file
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    async filesApiUploadFile(
+      dataObjectId: string,
+      storageBucketId: string,
+      bagId: string,
+      file?: any,
+      options?: any
+    ): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<InlineResponse201>> {
+      const localVarAxiosArgs = await localVarAxiosParamCreator.filesApiUploadFile(
+        dataObjectId,
+        storageBucketId,
+        bagId,
+        file,
+        options
+      )
+      return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)
+    },
+  }
+}
+
+/**
+ * FilesApi - factory interface
+ * @export
+ */
+export const FilesApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) {
+  const localVarFp = FilesApiFp(configuration)
+  return {
+    /**
+     * Returns a media file.
+     * @param {string} id Data object ID
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    filesApiGetFile(id: string, options?: any): AxiosPromise<any> {
+      return localVarFp.filesApiGetFile(id, options).then((request) => request(axios, basePath))
+    },
+    /**
+     * Returns a media file headers.
+     * @param {string} id Data object ID
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    filesApiGetFileHeaders(id: string, options?: any): AxiosPromise<void> {
+      return localVarFp.filesApiGetFileHeaders(id, options).then((request) => request(axios, basePath))
+    },
+    /**
+     * Upload data
+     * @param {string} dataObjectId Data object runtime ID
+     * @param {string} storageBucketId Storage bucket ID
+     * @param {string} bagId Bag ID
+     * @param {any} [file] Data file
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    filesApiUploadFile(
+      dataObjectId: string,
+      storageBucketId: string,
+      bagId: string,
+      file?: any,
+      options?: any
+    ): AxiosPromise<InlineResponse201> {
+      return localVarFp
+        .filesApiUploadFile(dataObjectId, storageBucketId, bagId, file, options)
+        .then((request) => request(axios, basePath))
+    },
+  }
+}
+
+/**
+ * FilesApi - object-oriented interface
+ * @export
+ * @class FilesApi
+ * @extends {BaseAPI}
+ */
+export class FilesApi extends BaseAPI {
+  /**
+   * Returns a media file.
+   * @param {string} id Data object ID
+   * @param {*} [options] Override http request option.
+   * @throws {RequiredError}
+   * @memberof FilesApi
+   */
+  public filesApiGetFile(id: string, options?: any) {
+    return FilesApiFp(this.configuration)
+      .filesApiGetFile(id, options)
+      .then((request) => request(this.axios, this.basePath))
+  }
+
+  /**
+   * Returns a media file headers.
+   * @param {string} id Data object ID
+   * @param {*} [options] Override http request option.
+   * @throws {RequiredError}
+   * @memberof FilesApi
+   */
+  public filesApiGetFileHeaders(id: string, options?: any) {
+    return FilesApiFp(this.configuration)
+      .filesApiGetFileHeaders(id, options)
+      .then((request) => request(this.axios, this.basePath))
+  }
+
+  /**
+   * Upload data
+   * @param {string} dataObjectId Data object runtime ID
+   * @param {string} storageBucketId Storage bucket ID
+   * @param {string} bagId Bag ID
+   * @param {any} [file] Data file
+   * @param {*} [options] Override http request option.
+   * @throws {RequiredError}
+   * @memberof FilesApi
+   */
+  public filesApiUploadFile(dataObjectId: string, storageBucketId: string, bagId: string, file?: any, options?: any) {
+    return FilesApiFp(this.configuration)
+      .filesApiUploadFile(dataObjectId, storageBucketId, bagId, file, options)
+      .then((request) => request(this.axios, this.basePath))
+  }
+}
+
+/**
+ * StateApi - axios parameter creator
+ * @export
+ */
+export const StateApiAxiosParamCreator = function (configuration?: Configuration) {
+  return {
+    /**
+     * Returns all local data objects.
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    stateApiGetAllLocalDataObjects: async (options: any = {}): Promise<RequestArgs> => {
+      const localVarPath = `/state/data-objects`
+      // use dummy base URL string because the URL constructor only accepts absolute URLs.
+      const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL)
+      let baseOptions
+      if (configuration) {
+        baseOptions = configuration.baseOptions
+      }
+
+      const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options }
+      const localVarHeaderParameter = {} as any
+      const localVarQueryParameter = {} as any
+
+      setSearchParams(localVarUrlObj, localVarQueryParameter, options.query)
+      let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}
+      localVarRequestOptions.headers = { ...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers }
+
+      return {
+        url: toPathString(localVarUrlObj),
+        options: localVarRequestOptions,
+      }
+    },
+    /**
+     * Returns local data objects for the bag.
+     * @param {string} bagId Bag ID
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    stateApiGetLocalDataObjectsByBagId: async (bagId: string, options: any = {}): Promise<RequestArgs> => {
+      // verify required parameter 'bagId' is not null or undefined
+      assertParamExists('stateApiGetLocalDataObjectsByBagId', 'bagId', bagId)
+      const localVarPath = `/state/bags/{bagId}/data-objects`.replace(`{${'bagId'}}`, encodeURIComponent(String(bagId)))
+      // use dummy base URL string because the URL constructor only accepts absolute URLs.
+      const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL)
+      let baseOptions
+      if (configuration) {
+        baseOptions = configuration.baseOptions
+      }
+
+      const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options }
+      const localVarHeaderParameter = {} as any
+      const localVarQueryParameter = {} as any
+
+      setSearchParams(localVarUrlObj, localVarQueryParameter, options.query)
+      let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}
+      localVarRequestOptions.headers = { ...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers }
+
+      return {
+        url: toPathString(localVarUrlObj),
+        options: localVarRequestOptions,
+      }
+    },
+    /**
+     * Returns local uploading directory stats.
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    stateApiGetLocalDataStats: async (options: any = {}): Promise<RequestArgs> => {
+      const localVarPath = `/state/data`
+      // use dummy base URL string because the URL constructor only accepts absolute URLs.
+      const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL)
+      let baseOptions
+      if (configuration) {
+        baseOptions = configuration.baseOptions
+      }
+
+      const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options }
+      const localVarHeaderParameter = {} as any
+      const localVarQueryParameter = {} as any
+
+      setSearchParams(localVarUrlObj, localVarQueryParameter, options.query)
+      let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}
+      localVarRequestOptions.headers = { ...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers }
+
+      return {
+        url: toPathString(localVarUrlObj),
+        options: localVarRequestOptions,
+      }
+    },
+    /**
+     * Returns server version.
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    stateApiGetVersion: async (options: any = {}): Promise<RequestArgs> => {
+      const localVarPath = `/version`
+      // use dummy base URL string because the URL constructor only accepts absolute URLs.
+      const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL)
+      let baseOptions
+      if (configuration) {
+        baseOptions = configuration.baseOptions
+      }
+
+      const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options }
+      const localVarHeaderParameter = {} as any
+      const localVarQueryParameter = {} as any
+
+      setSearchParams(localVarUrlObj, localVarQueryParameter, options.query)
+      let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}
+      localVarRequestOptions.headers = { ...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers }
+
+      return {
+        url: toPathString(localVarUrlObj),
+        options: localVarRequestOptions,
+      }
+    },
+  }
+}
+
+/**
+ * StateApi - functional programming interface
+ * @export
+ */
+export const StateApiFp = function (configuration?: Configuration) {
+  const localVarAxiosParamCreator = StateApiAxiosParamCreator(configuration)
+  return {
+    /**
+     * Returns all local data objects.
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    async stateApiGetAllLocalDataObjects(
+      options?: any
+    ): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<Array<string>>> {
+      const localVarAxiosArgs = await localVarAxiosParamCreator.stateApiGetAllLocalDataObjects(options)
+      return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)
+    },
+    /**
+     * Returns local data objects for the bag.
+     * @param {string} bagId Bag ID
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    async stateApiGetLocalDataObjectsByBagId(
+      bagId: string,
+      options?: any
+    ): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<Array<string>>> {
+      const localVarAxiosArgs = await localVarAxiosParamCreator.stateApiGetLocalDataObjectsByBagId(bagId, options)
+      return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)
+    },
+    /**
+     * Returns local uploading directory stats.
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    async stateApiGetLocalDataStats(
+      options?: any
+    ): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<DataStatsResponse>> {
+      const localVarAxiosArgs = await localVarAxiosParamCreator.stateApiGetLocalDataStats(options)
+      return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)
+    },
+    /**
+     * Returns server version.
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    async stateApiGetVersion(
+      options?: any
+    ): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<VersionResponse>> {
+      const localVarAxiosArgs = await localVarAxiosParamCreator.stateApiGetVersion(options)
+      return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration)
+    },
+  }
+}
+
+/**
+ * StateApi - factory interface
+ * @export
+ */
+export const StateApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) {
+  const localVarFp = StateApiFp(configuration)
+  return {
+    /**
+     * Returns all local data objects.
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    stateApiGetAllLocalDataObjects(options?: any): AxiosPromise<Array<string>> {
+      return localVarFp.stateApiGetAllLocalDataObjects(options).then((request) => request(axios, basePath))
+    },
+    /**
+     * Returns local data objects for the bag.
+     * @param {string} bagId Bag ID
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    stateApiGetLocalDataObjectsByBagId(bagId: string, options?: any): AxiosPromise<Array<string>> {
+      return localVarFp.stateApiGetLocalDataObjectsByBagId(bagId, options).then((request) => request(axios, basePath))
+    },
+    /**
+     * Returns local uploading directory stats.
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    stateApiGetLocalDataStats(options?: any): AxiosPromise<DataStatsResponse> {
+      return localVarFp.stateApiGetLocalDataStats(options).then((request) => request(axios, basePath))
+    },
+    /**
+     * Returns server version.
+     * @param {*} [options] Override http request option.
+     * @throws {RequiredError}
+     */
+    stateApiGetVersion(options?: any): AxiosPromise<VersionResponse> {
+      return localVarFp.stateApiGetVersion(options).then((request) => request(axios, basePath))
+    },
+  }
+}
+
+/**
+ * StateApi - object-oriented interface
+ * @export
+ * @class StateApi
+ * @extends {BaseAPI}
+ */
+export class StateApi extends BaseAPI {
+  /**
+   * Returns all local data objects.
+   * @param {*} [options] Override http request option.
+   * @throws {RequiredError}
+   * @memberof StateApi
+   */
+  public stateApiGetAllLocalDataObjects(options?: any) {
+    return StateApiFp(this.configuration)
+      .stateApiGetAllLocalDataObjects(options)
+      .then((request) => request(this.axios, this.basePath))
+  }
+
+  /**
+   * Returns local data objects for the bag.
+   * @param {string} bagId Bag ID
+   * @param {*} [options] Override http request option.
+   * @throws {RequiredError}
+   * @memberof StateApi
+   */
+  public stateApiGetLocalDataObjectsByBagId(bagId: string, options?: any) {
+    return StateApiFp(this.configuration)
+      .stateApiGetLocalDataObjectsByBagId(bagId, options)
+      .then((request) => request(this.axios, this.basePath))
+  }
+
+  /**
+   * Returns local uploading directory stats.
+   * @param {*} [options] Override http request option.
+   * @throws {RequiredError}
+   * @memberof StateApi
+   */
+  public stateApiGetLocalDataStats(options?: any) {
+    return StateApiFp(this.configuration)
+      .stateApiGetLocalDataStats(options)
+      .then((request) => request(this.axios, this.basePath))
+  }
+
+  /**
+   * Returns server version.
+   * @param {*} [options] Override http request option.
+   * @throws {RequiredError}
+   * @memberof StateApi
+   */
+  public stateApiGetVersion(options?: any) {
+    return StateApiFp(this.configuration)
+      .stateApiGetVersion(options)
+      .then((request) => request(this.axios, this.basePath))
+  }
+}

+ 74 - 0
tests/network-tests/src/apis/storageNode/base.ts

@@ -0,0 +1,74 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Storage node API
+ * Storage node API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+import { Configuration } from './configuration'
+// Some imports not used depending on template conditions
+// @ts-ignore
+import globalAxios, { AxiosPromise, AxiosInstance } from 'axios'
+
+export const BASE_PATH = 'http://localhost:3333/api/v1'.replace(/\/+$/, '')
+
+/**
+ *
+ * @export
+ */
+export const COLLECTION_FORMATS = {
+  csv: ',',
+  ssv: ' ',
+  tsv: '\t',
+  pipes: '|',
+}
+
+/**
+ *
+ * @export
+ * @interface RequestArgs
+ */
+export interface RequestArgs {
+  url: string
+  options: any
+}
+
+/**
+ *
+ * @export
+ * @class BaseAPI
+ */
+export class BaseAPI {
+  protected configuration: Configuration | undefined
+
+  constructor(
+    configuration?: Configuration,
+    protected basePath: string = BASE_PATH,
+    protected axios: AxiosInstance = globalAxios
+  ) {
+    if (configuration) {
+      this.configuration = configuration
+      this.basePath = configuration.basePath || this.basePath
+    }
+  }
+}
+
+/**
+ *
+ * @export
+ * @class RequiredError
+ * @extends {Error}
+ */
+export class RequiredError extends Error {
+  name: 'RequiredError' = 'RequiredError'
+  constructor(public field: string, msg?: string) {
+    super(msg)
+  }
+}

+ 150 - 0
tests/network-tests/src/apis/storageNode/common.ts

@@ -0,0 +1,150 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Storage node API
+ * Storage node API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+import { Configuration } from './configuration'
+import { RequiredError, RequestArgs } from './base'
+import { AxiosInstance } from 'axios'
+
+/**
+ *
+ * @export
+ */
+export const DUMMY_BASE_URL = 'https://example.com'
+
+/**
+ *
+ * @throws {RequiredError}
+ * @export
+ */
+export const assertParamExists = function (functionName: string, paramName: string, paramValue: unknown) {
+  if (paramValue === null || paramValue === undefined) {
+    throw new RequiredError(
+      paramName,
+      `Required parameter ${paramName} was null or undefined when calling ${functionName}.`
+    )
+  }
+}
+
+/**
+ *
+ * @export
+ */
+export const setApiKeyToObject = async function (object: any, keyParamName: string, configuration?: Configuration) {
+  if (configuration && configuration.apiKey) {
+    const localVarApiKeyValue =
+      typeof configuration.apiKey === 'function' ? await configuration.apiKey(keyParamName) : await configuration.apiKey
+    object[keyParamName] = localVarApiKeyValue
+  }
+}
+
+/**
+ *
+ * @export
+ */
+export const setBasicAuthToObject = function (object: any, configuration?: Configuration) {
+  if (configuration && (configuration.username || configuration.password)) {
+    object['auth'] = { username: configuration.username, password: configuration.password }
+  }
+}
+
+/**
+ *
+ * @export
+ */
+export const setBearerAuthToObject = async function (object: any, configuration?: Configuration) {
+  if (configuration && configuration.accessToken) {
+    const accessToken =
+      typeof configuration.accessToken === 'function'
+        ? await configuration.accessToken()
+        : await configuration.accessToken
+    object['Authorization'] = 'Bearer ' + accessToken
+  }
+}
+
+/**
+ *
+ * @export
+ */
+export const setOAuthToObject = async function (
+  object: any,
+  name: string,
+  scopes: string[],
+  configuration?: Configuration
+) {
+  if (configuration && configuration.accessToken) {
+    const localVarAccessTokenValue =
+      typeof configuration.accessToken === 'function'
+        ? await configuration.accessToken(name, scopes)
+        : await configuration.accessToken
+    object['Authorization'] = 'Bearer ' + localVarAccessTokenValue
+  }
+}
+
+/**
+ *
+ * @export
+ */
+export const setSearchParams = function (url: URL, ...objects: any[]) {
+  const searchParams = new URLSearchParams(url.search)
+  for (const object of objects) {
+    for (const key in object) {
+      if (Array.isArray(object[key])) {
+        searchParams.delete(key)
+        for (const item of object[key]) {
+          searchParams.append(key, item)
+        }
+      } else {
+        searchParams.set(key, object[key])
+      }
+    }
+  }
+  url.search = searchParams.toString()
+}
+
+/**
+ *
+ * @export
+ */
+export const serializeDataIfNeeded = function (value: any, requestOptions: any, configuration?: Configuration) {
+  const nonString = typeof value !== 'string'
+  const needsSerialization =
+    nonString && configuration && configuration.isJsonMime
+      ? configuration.isJsonMime(requestOptions.headers['Content-Type'])
+      : nonString
+  return needsSerialization ? JSON.stringify(value !== undefined ? value : {}) : value || ''
+}
+
+/**
+ *
+ * @export
+ */
+export const toPathString = function (url: URL) {
+  return url.pathname + url.search + url.hash
+}
+
+/**
+ *
+ * @export
+ */
+export const createRequestFunction = function (
+  axiosArgs: RequestArgs,
+  globalAxios: AxiosInstance,
+  BASE_PATH: string,
+  configuration?: Configuration
+) {
+  return (axios: AxiosInstance = globalAxios, basePath: string = BASE_PATH) => {
+    const axiosRequestArgs = { ...axiosArgs.options, url: (configuration?.basePath || basePath) + axiosArgs.url }
+    return axios.request(axiosRequestArgs)
+  }
+}

+ 108 - 0
tests/network-tests/src/apis/storageNode/configuration.ts

@@ -0,0 +1,108 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Storage node API
+ * Storage node API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+export interface ConfigurationParameters {
+  apiKey?: string | Promise<string> | ((name: string) => string) | ((name: string) => Promise<string>)
+  username?: string
+  password?: string
+  accessToken?:
+    | string
+    | Promise<string>
+    | ((name?: string, scopes?: string[]) => string)
+    | ((name?: string, scopes?: string[]) => Promise<string>)
+  basePath?: string
+  baseOptions?: any
+  formDataCtor?: new () => any
+}
+
+export class Configuration {
+  /**
+   * parameter for apiKey security
+   * @param name security name
+   * @memberof Configuration
+   */
+  apiKey?: string | Promise<string> | ((name: string) => string) | ((name: string) => Promise<string>)
+  /**
+   * parameter for basic security
+   *
+   * @type {string}
+   * @memberof Configuration
+   */
+  username?: string
+  /**
+   * parameter for basic security
+   *
+   * @type {string}
+   * @memberof Configuration
+   */
+  password?: string
+  /**
+   * parameter for oauth2 security
+   * @param name security name
+   * @param scopes oauth2 scope
+   * @memberof Configuration
+   */
+  accessToken?:
+    | string
+    | Promise<string>
+    | ((name?: string, scopes?: string[]) => string)
+    | ((name?: string, scopes?: string[]) => Promise<string>)
+  /**
+   * override base path
+   *
+   * @type {string}
+   * @memberof Configuration
+   */
+  basePath?: string
+  /**
+   * base options for axios calls
+   *
+   * @type {any}
+   * @memberof Configuration
+   */
+  baseOptions?: any
+  /**
+   * The FormData constructor that will be used to create multipart form data
+   * requests. You can inject this here so that execution environments that
+   * do not support the FormData class can still run the generated client.
+   *
+   * @type {new () => FormData}
+   */
+  formDataCtor?: new () => any
+
+  constructor(param: ConfigurationParameters = {}) {
+    this.apiKey = param.apiKey
+    this.username = param.username
+    this.password = param.password
+    this.accessToken = param.accessToken
+    this.basePath = param.basePath
+    this.baseOptions = param.baseOptions
+    this.formDataCtor = param.formDataCtor
+  }
+
+  /**
+   * Check if the given MIME is a JSON MIME.
+   * JSON MIME examples:
+   *   application/json
+   *   application/json; charset=UTF8
+   *   APPLICATION/JSON
+   *   application/vnd.company+json
+   * @param mime - MIME (Multipurpose Internet Mail Extensions)
+   * @return True if the given MIME is JSON, false otherwise.
+   */
+  public isJsonMime(mime: string): boolean {
+    const jsonMime: RegExp = new RegExp('^(application/json|[^;/ \t]+/[^;/ \t]+[+]json)[ \t]*(;.*)?$', 'i')
+    return mime !== null && (jsonMime.test(mime) || mime.toLowerCase() === 'application/json-patch+json')
+  }
+}

+ 16 - 0
tests/network-tests/src/apis/storageNode/index.ts

@@ -0,0 +1,16 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Storage node API
+ * Storage node API
+ *
+ * The version of the OpenAPI document: 0.1.0
+ * Contact: info@joystream.org
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+export * from './api'
+export * from './configuration'

+ 54 - 0
tests/network-tests/src/cli/base.ts

@@ -0,0 +1,54 @@
+import path from 'path'
+import { execFile } from 'child_process'
+import { promisify } from 'util'
+import { Sender } from '../sender'
+
+export type CommandResult = { stdout: string; stderr: string; out: string }
+
+export abstract class CLI {
+  protected env: Record<string, string>
+  protected readonly rootPath: string
+  protected readonly binPath: string
+  protected defaultArgs: string[]
+
+  constructor(rootPath: string, defaultEnv: Record<string, string> = {}, defaultArgs: string[] = []) {
+    this.rootPath = rootPath
+    this.binPath = path.resolve(rootPath, './bin/run')
+    this.env = {
+      ...process.env,
+      AUTO_CONFIRM: 'true',
+      FORCE_COLOR: '0',
+      ...defaultEnv,
+    }
+    this.defaultArgs = [...defaultArgs]
+  }
+
+  protected getArgs(customArgs: string[]): string[] {
+    return [...this.defaultArgs, ...customArgs]
+  }
+
+  protected getFlagStringValue(args: string[], flag: string, alias?: string): string | undefined {
+    const flagIndex = args.lastIndexOf(flag)
+    const aliasIndex = alias ? args.lastIndexOf(alias) : -1
+    const flagOrAliasIndex = Math.max(flagIndex, aliasIndex)
+    if (flagOrAliasIndex === -1) {
+      return undefined
+    }
+    const nextArg = args[flagOrAliasIndex + 1]
+    return nextArg
+  }
+
+  async run(command: string, customArgs: string[] = [], lockKeys: string[] = []): Promise<CommandResult> {
+    const pExecFile = promisify(execFile)
+    const { env } = this
+    const { stdout, stderr } = await Sender.asyncLock.acquire(
+      lockKeys.map((k) => `nonce-${k}`),
+      () =>
+        pExecFile(this.binPath, [command, ...this.getArgs(customArgs)], {
+          env,
+          cwd: this.rootPath,
+        })
+    )
+    return { stdout, stderr, out: stdout.trim() }
+  }
+}

+ 48 - 0
tests/network-tests/src/cli/distributor.ts

@@ -0,0 +1,48 @@
+import path from 'path'
+import { spawn } from 'child_process'
+import { DistributorNodeConfiguration } from '@joystream/distributor-cli/src/types/generated/ConfigJson'
+import { CLI, CommandResult } from './base'
+import { WorkerId } from '@joystream/types/working-group'
+import { ProcessManager } from './utils'
+import Keyring from '@polkadot/keyring'
+
+const CLI_ROOT_PATH = path.resolve(__dirname, '../../../../distributor-node')
+
+export class DistributorCLI extends CLI {
+  protected keys: string[]
+
+  constructor(keyUris: string[]) {
+    const keys: DistributorNodeConfiguration['keys'] = keyUris.map((suri) => ({
+      suri,
+    })) as DistributorNodeConfiguration['keys']
+    const defaultEnv = {
+      JOYSTREAM_DISTRIBUTOR__KEYS: JSON.stringify(keys),
+    }
+    super(CLI_ROOT_PATH, defaultEnv)
+    const keyring = new Keyring({ type: 'sr25519' })
+    keyUris.forEach((uri) => keyring.addFromUri(uri))
+    this.keys = keyring.getPairs().map((p) => p.address)
+  }
+
+  async run(command: string, customArgs: string[] = [], keyLocks?: string[]): Promise<CommandResult> {
+    return super.run(command, customArgs, keyLocks || this.keys)
+  }
+
+  async spawnServer(
+    operatorId: number | WorkerId,
+    port = 3334,
+    buckets: number[] | 'all' = 'all'
+  ): Promise<ProcessManager> {
+    const { env } = this
+    const serverEnv = {
+      ...env,
+      JOYSTREAM_DISTRIBUTOR__PORT: port.toString(),
+      JOYSTREAM_DISTRIBUTOR__WORKER_ID: operatorId.toString(),
+      JOYSTREAM_DISTRIBUTOR__BUCKETS: buckets === 'all' ? 'all' : JSON.stringify(buckets),
+    }
+    const serverProcess = spawn(this.binPath, ['start'], { env: serverEnv, cwd: this.rootPath })
+    const serverManager = new ProcessManager('Distributor node server', serverProcess, 'stdout')
+    await serverManager.untilOutput(`listening on port ${port}`)
+    return serverManager
+  }
+}

+ 47 - 0
tests/network-tests/src/cli/joystream.ts

@@ -0,0 +1,47 @@
+import { KeyringPair } from '@polkadot/keyring/types'
+import path from 'path'
+import { CLI, CommandResult } from './base'
+import { TmpFileManager } from './utils'
+import { ChannelInputParameters } from '@joystream/cli/src/Types'
+
+const CLI_ROOT_PATH = path.resolve(__dirname, '../../../../cli')
+
+export class JoystreamCLI extends CLI {
+  protected keys: string[] = []
+  protected tmpFileManager: TmpFileManager
+
+  constructor(tmpFileManager: TmpFileManager) {
+    const defaultEnv = {
+      HOME: tmpFileManager.tmpDataDir,
+    }
+    super(CLI_ROOT_PATH, defaultEnv)
+    this.tmpFileManager = tmpFileManager
+  }
+
+  async init(): Promise<void> {
+    await this.run('api:setUri', [process.env.NODE_URL || 'ws://127.0.0.1:9944'])
+    await this.run('api:setQueryNodeEndpoint', [process.env.QUERY_NODE_URL || 'http://127.0.0.1:8081/graphql'])
+  }
+
+  async importKey(pair: KeyringPair): Promise<void> {
+    const jsonFile = this.tmpFileManager.jsonFile(pair.toJson())
+    await this.run('account:import', [
+      '--backupFilePath',
+      jsonFile,
+      '--name',
+      `Account${this.keys.length}`,
+      '--password',
+      '',
+    ])
+    this.keys.push(pair.address)
+  }
+
+  async run(command: string, customArgs: string[] = [], keyLocks?: string[]): Promise<CommandResult> {
+    return super.run(command, customArgs, keyLocks || this.keys)
+  }
+
+  async createChannel(inputData: ChannelInputParameters, args: string[]): Promise<CommandResult> {
+    const jsonFile = this.tmpFileManager.jsonFile(inputData)
+    return this.run('content:createChannel', ['--input', jsonFile, ...args])
+  }
+}

+ 66 - 0
tests/network-tests/src/cli/storage.ts

@@ -0,0 +1,66 @@
+import path from 'path'
+import { CLI, CommandResult } from './base'
+import { spawn } from 'child_process'
+import { v4 as uuid } from 'uuid'
+import { WorkerId } from '@joystream/types/working-group'
+import os from 'os'
+import { ProcessManager } from './utils'
+import fs from 'fs'
+import { Keyring } from '@polkadot/keyring'
+
+const CLI_ROOT_PATH = path.resolve(__dirname, '../../../../storage-node')
+
+export class StorageCLI extends CLI {
+  constructor(defaultSuri?: string) {
+    super(CLI_ROOT_PATH, undefined, defaultSuri ? ['--accountUri', defaultSuri] : [])
+  }
+
+  setDefaultSuri(defaultSuri: string): void {
+    this.defaultArgs = ['--accountUri', defaultSuri]
+  }
+
+  async run(command: string, customArgs: string[] = []): Promise<CommandResult> {
+    const args = this.getArgs(customArgs)
+    const accountUri = this.getFlagStringValue(args, '--accountUri', '-y')
+    if (!accountUri) {
+      throw new Error('Missing accountUri')
+    }
+    const accountKey = new Keyring({ type: 'sr25519' }).createFromUri(accountUri).address
+    return super.run(command, args, [accountKey])
+  }
+
+  async spawnServer(
+    operatorId: number | WorkerId,
+    port = 3333,
+    sync = true,
+    syncInterval = 1
+  ): Promise<ProcessManager> {
+    const queryNodeHost = new URL(process.env.QUERY_NODE_URL || '').host
+    const apiUrl = new URL(process.env.NODE_URL || '').toString()
+    const uploadsDir = path.join(os.tmpdir(), uuid())
+    fs.mkdirSync(uploadsDir)
+    const { env } = this
+    const args = [
+      ...this.defaultArgs,
+      '--worker',
+      operatorId.toString(),
+      '--port',
+      port.toString(),
+      '--queryNodeHost',
+      queryNodeHost,
+      '--apiUrl',
+      apiUrl,
+      '--uploads',
+      uploadsDir,
+    ]
+    if (sync) {
+      args.push('--sync')
+      args.push('--syncInterval')
+      args.push(syncInterval.toString())
+    }
+    const serverProcess = spawn(this.binPath, ['server', ...args], { env, cwd: this.rootPath })
+    const serverListener = new ProcessManager('Storage node server', serverProcess, 'stderr')
+    await serverListener.untilOutput('Listening')
+    return serverListener
+  }
+}

+ 126 - 0
tests/network-tests/src/cli/utils.ts

@@ -0,0 +1,126 @@
+import fs, { mkdirSync, rmSync } from 'fs'
+import path from 'path'
+import { v4 as uuid } from 'uuid'
+import { ChildProcessWithoutNullStreams } from 'child_process'
+import { Utils } from '../utils'
+import _ from 'lodash'
+import bmp from 'bmp-js'
+import nodeCleanup from 'node-cleanup'
+
+export class TmpFileManager {
+  tmpDataDir: string
+
+  constructor(baseDir?: string) {
+    this.tmpDataDir = path.join(
+      baseDir || process.env.DATA_PATH || path.join(__filename, '../../../data'),
+      'joystream-testing',
+      uuid()
+    )
+    mkdirSync(this.tmpDataDir, { recursive: true })
+    nodeCleanup(() => {
+      rmSync(this.tmpDataDir, { recursive: true, force: true })
+    })
+  }
+
+  public jsonFile(value: unknown): string {
+    const tmpFilePath = path.join(this.tmpDataDir, `${uuid()}.json`)
+    fs.writeFileSync(tmpFilePath, JSON.stringify(value))
+    return tmpFilePath
+  }
+
+  public randomImgFile(width: number, height: number): string {
+    const data = Buffer.from(Array.from({ length: width * height * 3 }, () => _.random(0, 255)))
+    const rawBmp = bmp.encode({ width, height, data })
+    const tmpFilePath = path.join(this.tmpDataDir, `${uuid()}.bmp`)
+    fs.writeFileSync(tmpFilePath, rawBmp.data)
+    return tmpFilePath
+  }
+}
+
+type OutputType = 'stdout' | 'stderr'
+
+export class ProcessManager {
+  private label: string
+  private stdout = ''
+  private stderr = ''
+  private subprocess: ChildProcessWithoutNullStreams
+  private defaultOutput: OutputType
+  private onStdoutListener: (chunk: Uint8Array) => void
+  private onStderrListener: (chunk: Uint8Array) => void
+
+  constructor(
+    label: string,
+    subprocess: ChildProcessWithoutNullStreams,
+    defaultOutput: OutputType = 'stdout',
+    maxOutputSize = 1024 * 1024 * 10
+  ) {
+    this.label = label
+    this.defaultOutput = defaultOutput
+    this.subprocess = subprocess
+    const onDataListener = (outputType: OutputType) => (chunk: Uint8Array) => {
+      const chunkStr = Buffer.from(chunk).toString()
+      this[outputType] += chunkStr
+      if (this[outputType].length > maxOutputSize) {
+        this[outputType] = this[outputType].slice(-maxOutputSize)
+      }
+    }
+    this.onStdoutListener = onDataListener('stdout')
+    this.onStderrListener = onDataListener('stderr')
+
+    subprocess.stdout.on('data', this.onStdoutListener)
+    subprocess.stderr.on('data', this.onStderrListener)
+    nodeCleanup(() => {
+      console.log(this.recentOutput())
+      subprocess.kill()
+    })
+  }
+
+  private recentOutput() {
+    const length = parseInt(process.env.SUBPROCESSES_FINAL_LOG_LENGTH || '20')
+    return (
+      `\n\nLast STDOUT of ${this.label}:\n ${this.stdout.split('\n').slice(-length).join('\n')}\n\n` +
+      `Last STDERR of ${this.label}:\n ${this.stderr.split('\n').slice(-length).join('\n')}\n\n`
+    )
+  }
+
+  kill(): void {
+    this.subprocess.kill()
+  }
+
+  expectAlive(): void {
+    if (this.subprocess.exitCode !== null) {
+      throw new Error(`Process ${this.label} exited unexpectedly with code: ${this.subprocess.exitCode}`)
+    }
+  }
+
+  expectOutput(expected: string, outputType?: OutputType): void {
+    const outT = outputType || this.defaultOutput
+    if (!this[outT].includes(expected)) {
+      throw new Error(`Expected output: "${expected}" missing in ${this.label} process (${outputType})`)
+    }
+  }
+
+  async untilOutput(
+    expected: string,
+    outputType?: 'stderr' | 'stdout',
+    failOnExit = true,
+    timeoutMs = 120000,
+    waitMs = 1000
+  ): Promise<void> {
+    const start = Date.now()
+    while (true) {
+      try {
+        this.expectOutput(expected, outputType)
+        return
+      } catch (e) {
+        if (failOnExit) {
+          this.expectAlive()
+        }
+        if (Date.now() - start + waitMs >= timeoutMs) {
+          throw new Error(`untilOutput timeout reached. ${(e as Error).message}`)
+        }
+        await Utils.wait(waitMs)
+      }
+    }
+  }
+}

+ 1 - 1
tests/network-tests/src/fixtures/membershipModule.ts

@@ -42,7 +42,7 @@ export class BuyMembershipHappyCaseFixture extends BaseFixture {
         )
       )
     )
-      .map(({ events }) => this.api.findMemberRegisteredEvent(events))
+      .map((r) => this.api.findEvent(r, 'members', 'MemberRegistered')?.data[0])
       .filter((id) => id !== undefined) as MemberId[]
 
     this.debug(`Registered ${this.memberIds.length} new members`)

+ 13 - 18
tests/network-tests/src/fixtures/proposalsModule.ts

@@ -68,7 +68,7 @@ export class CreateWorkingGroupLeaderOpeningFixture extends BaseFixture {
       workingGroup: this.workingGroup,
     })
 
-    this.result = this.api.findProposalCreatedEvent(result.events)
+    this.result = this.api.findEvent(result, 'proposalsEngine', 'ProposalCreated')?.data[1]
   }
 }
 
@@ -110,7 +110,7 @@ export class BeginWorkingGroupLeaderApplicationReviewFixture extends BaseFixture
       this.workingGroup
     )
 
-    this.result = this.api.findProposalCreatedEvent(result.events)
+    this.result = this.api.findEvent(result, 'proposalsEngine', 'ProposalCreated')?.data[1]
   }
 }
 
@@ -176,7 +176,7 @@ export class FillLeaderOpeningProposalFixture extends BaseFixture {
       workingGroup: workingGroupString,
     })
 
-    this.result = this.api.findProposalCreatedEvent(result.events)
+    this.result = this.api.findEvent(result, 'proposalsEngine', 'ProposalCreated')?.data[1]
   }
 }
 
@@ -223,7 +223,7 @@ export class TerminateLeaderRoleProposalFixture extends BaseFixture {
       this.slash,
       workingGroupString
     )
-    this.result = this.api.findProposalCreatedEvent(result.events)
+    this.result = this.api.findEvent(result, 'proposalsEngine', 'ProposalCreated')?.data[1]
   }
 }
 
@@ -269,7 +269,7 @@ export class SetLeaderRewardProposalFixture extends BaseFixture {
       workingGroupString
     )
 
-    this.result = this.api.findProposalCreatedEvent(result.events)
+    this.result = this.api.findEvent(result, 'proposalsEngine', 'ProposalCreated')?.data[1]
   }
 }
 
@@ -315,7 +315,7 @@ export class DecreaseLeaderStakeProposalFixture extends BaseFixture {
       workingGroupString
     )
 
-    this.result = this.api.findProposalCreatedEvent(result.events)
+    this.result = this.api.findEvent(result, 'proposalsEngine', 'ProposalCreated')?.data[1]
   }
 }
 
@@ -359,7 +359,7 @@ export class SlashLeaderProposalFixture extends BaseFixture {
       this.slashAmount,
       workingGroupString
     )
-    this.result = this.api.findProposalCreatedEvent(result.events)
+    this.result = this.api.findEvent(result, 'proposalsEngine', 'ProposalCreated')?.data[1]
   }
 }
 
@@ -401,7 +401,7 @@ export class WorkingGroupMintCapacityProposalFixture extends BaseFixture {
       this.mintCapacity,
       workingGroupString
     )
-    this.result = this.api.findProposalCreatedEvent(result.events)
+    this.result = this.api.findEvent(result, 'proposalsEngine', 'ProposalCreated')?.data[1]
   }
 }
 
@@ -471,8 +471,7 @@ export class ElectionParametersProposalFixture extends BaseFixture {
       proposedMinVotingStake
     )
 
-    const proposalNumber = this.api.findProposalCreatedEvent(proposalCreationResult.events) as ProposalId
-    assert.notEqual(proposalNumber, undefined)
+    const proposalNumber = this.api.getEvent(proposalCreationResult, 'proposalsEngine', 'ProposalCreated').data[1]
 
     const approveProposalFixture = new VoteForProposalFixture(this.api, proposalNumber)
     await approveProposalFixture.execute()
@@ -563,8 +562,7 @@ export class SpendingProposalFixture extends BaseFixture {
       this.spendingBalance,
       fundingRecipient
     )
-    const proposalNumber: ProposalId = this.api.findProposalCreatedEvent(result.events) as ProposalId
-    assert.notEqual(proposalNumber, undefined)
+    const proposalNumber = this.api.getEvent(result, 'proposalsEngine', 'ProposalCreated').data[1]
 
     // Approving spending proposal
     const balanceBeforeMinting: BN = await this.api.getBalance(fundingRecipient)
@@ -609,8 +607,7 @@ export class TextProposalFixture extends BaseFixture {
 
     // Proposal creation
     const result = await this.api.proposeText(this.proposer, proposalStake, proposalTitle, description, proposalText)
-    const proposalNumber = this.api.findProposalCreatedEvent(result.events) as ProposalId
-    assert.notEqual(proposalNumber, undefined)
+    const proposalNumber = this.api.getEvent(result, 'proposalsEngine', 'ProposalCreated').data[1]
 
     // Approving text proposal
     const approveProposalFixture = new VoteForProposalFixture(this.api, proposalNumber)
@@ -651,8 +648,7 @@ export class ValidatorCountProposalFixture extends BaseFixture {
       proposalStake,
       this.proposedValidatorCount
     )
-    const proposalNumber: ProposalId = this.api.findProposalCreatedEvent(result.events) as ProposalId
-    assert.notEqual(proposalNumber, undefined)
+    const proposalNumber = this.api.getEvent(result, 'proposalsEngine', 'ProposalCreated').data[1]
 
     // Approving the proposal
     const approveProposalFixture = new VoteForProposalFixture(this.api, proposalNumber)
@@ -700,8 +696,7 @@ export class UpdateRuntimeFixture extends BaseFixture {
       'runtime to test proposal functionality' + uuid().substring(0, 8),
       runtime
     )
-    const proposalNumber: ProposalId = this.api.findProposalCreatedEvent(result.events) as ProposalId
-    assert.notEqual(proposalNumber, undefined)
+    const proposalNumber = this.api.getEvent(result, 'proposalsEngine', 'ProposalCreated').data[1]
 
     // Approving runtime update proposal
     const approveProposalFixture = new VoteForProposalFixture(this.api, proposalNumber)

+ 115 - 15
tests/network-tests/src/fixtures/workingGroupModule.ts

@@ -5,10 +5,11 @@ import { WorkingGroups } from '../WorkingGroups'
 import { KeyringPair } from '@polkadot/keyring/types'
 import { v4 as uuid } from 'uuid'
 import { RewardRelationship } from '@joystream/types/recurring-rewards'
-import { Application, ApplicationIdToWorkerIdMap, Worker, WorkerId } from '@joystream/types/working-group'
+import { Application, Worker, WorkerId } from '@joystream/types/working-group'
 import { Utils } from '../utils'
 import { ApplicationId, Opening as HiringOpening, OpeningId } from '@joystream/types/hiring'
-import { BaseFixture } from '../Fixture'
+import { BaseFixture, FixtureRunner } from '../Fixture'
+import { BuyMembershipHappyCaseFixture } from './membershipModule'
 
 export class AddWorkerOpeningFixture extends BaseFixture {
   private applicationStake: BN
@@ -77,7 +78,7 @@ export class AddWorkerOpeningFixture extends BaseFixture {
     )
 
     // We don't assert, we allow potential failure
-    this.result = this.api.findOpeningAddedEvent(result.events, this.module)
+    this.result = this.api.findEvent(result, this.module, 'OpeningAdded')?.data[0]
   }
 }
 
@@ -129,7 +130,7 @@ export class SudoAddLeaderOpeningFixture extends BaseFixture {
     )
 
     // We don't assert, we allow potential failure
-    this.result = this.api.findOpeningAddedEvent(result.events, this.module)
+    this.result = this.api.findEvent(result, this.module, 'OpeningAdded')?.data[0]
   }
 }
 
@@ -271,7 +272,7 @@ export class BeginApplicationReviewFixture extends BaseFixture {
     // const beginApplicantReviewPromise: Promise<ApplicationId> = this.api.expectApplicationReviewBegan()
     const result = await this.api.beginApplicantReview(leadAccount, this.openingId, this.module)
 
-    assert.notEqual(this.api.findApplicationReviewBeganEvent(result.events, this.module), undefined)
+    this.api.getEvent(result, this.module, 'BeganApplicationReview')
   }
 }
 
@@ -353,11 +354,7 @@ export class FillOpeningFixture extends BaseFixture {
       this.payoutInterval,
       this.module
     )
-    const applicationIdToWorkerIdMap = this.api.findOpeningFilledEvent(
-      result.events,
-      this.module
-    ) as ApplicationIdToWorkerIdMap
-    assert.notEqual(applicationIdToWorkerIdMap, undefined)
+    const applicationIdToWorkerIdMap = this.api.getEvent(result, this.module, 'OpeningFilled').data[1]
 
     this.workerIds = []
     applicationIdToWorkerIdMap.forEach((workerId) => this.workerIds.push(workerId))
@@ -413,11 +410,7 @@ export class SudoFillLeaderOpeningFixture extends BaseFixture {
     )
 
     // Assertions
-    const applicationIdToWorkerIdMap = this.api.findOpeningFilledEvent(
-      result.events,
-      this.module
-    ) as ApplicationIdToWorkerIdMap
-    assert.notEqual(applicationIdToWorkerIdMap, undefined)
+    const applicationIdToWorkerIdMap = this.api.getEvent(result, this.module, 'OpeningFilled').data[1]
     assert.equal(applicationIdToWorkerIdMap.size, 1)
 
     applicationIdToWorkerIdMap.forEach(async (workerId, applicationId) => {
@@ -721,3 +714,110 @@ export class AwaitPayoutFixture extends BaseFixture {
     )
   }
 }
+
+type HireWorkesConfig = {
+  applicationStake: BN
+  roleStake: BN
+  firstRewardInterval: BN
+  rewardInterval: BN
+  payoutAmount: BN
+  unstakingPeriod: BN
+  openingActivationDelay: BN
+}
+
+export class HireWorkesFixture extends BaseFixture {
+  private numberOfWorkers: number
+  private config: HireWorkesConfig
+  private module: WorkingGroups
+  private workerIds: WorkerId[] = []
+
+  constructor(api: Api, numberOfWorkers: number, module: WorkingGroups, config?: Partial<HireWorkesConfig>) {
+    super(api)
+    this.numberOfWorkers = numberOfWorkers
+    this.module = module
+    this.config = {
+      applicationStake: config?.applicationStake || new BN(process.env.WORKING_GROUP_APPLICATION_STAKE!),
+      roleStake: config?.roleStake || new BN(process.env.WORKING_GROUP_ROLE_STAKE!),
+      firstRewardInterval: config?.firstRewardInterval || new BN(process.env.SHORT_FIRST_REWARD_INTERVAL!),
+      rewardInterval: config?.rewardInterval || new BN(process.env.SHORT_REWARD_INTERVAL!),
+      payoutAmount: config?.payoutAmount || new BN(process.env.PAYOUT_AMOUNT!),
+      unstakingPeriod: config?.unstakingPeriod || new BN(process.env.STORAGE_WORKING_GROUP_UNSTAKING_PERIOD!),
+      openingActivationDelay: config?.openingActivationDelay || new BN(0),
+    }
+  }
+
+  public getHiredWorkers(): WorkerId[] {
+    if (!this.executed) {
+      throw new Error('Fixture not yet executed!')
+    }
+    return this.workerIds
+  }
+
+  public async execute(): Promise<void> {
+    const { api, module } = this
+    const {
+      applicationStake,
+      roleStake,
+      openingActivationDelay,
+      unstakingPeriod,
+      firstRewardInterval,
+      rewardInterval,
+      payoutAmount,
+    } = this.config
+
+    const lead = await api.getGroupLead(module)
+    assert(lead)
+
+    const paidTemrsId = api.createPaidTermId(new BN(process.env.MEMBERSHIP_PAID_TERMS!))
+    const newMembers = api.createKeyPairs(this.numberOfWorkers).map(({ key }) => key.address)
+
+    const memberSetFixture = new BuyMembershipHappyCaseFixture(api, newMembers, paidTemrsId)
+    // Recreating set of members
+    await new FixtureRunner(memberSetFixture).run()
+    const applicants = newMembers
+
+    const addWorkerOpeningFixture = new AddWorkerOpeningFixture(
+      api,
+      applicationStake,
+      roleStake,
+      openingActivationDelay,
+      unstakingPeriod,
+      module
+    )
+    // Add worker opening
+    await new FixtureRunner(addWorkerOpeningFixture).run()
+
+    // First apply for worker opening
+    const applyForWorkerOpeningFixture = new ApplyForOpeningFixture(
+      api,
+      applicants,
+      applicationStake,
+      roleStake,
+      addWorkerOpeningFixture.getCreatedOpeningId() as OpeningId,
+      module
+    )
+    await new FixtureRunner(applyForWorkerOpeningFixture).run()
+    const applicationIds = applyForWorkerOpeningFixture.getApplicationIds()
+
+    // Begin application review
+    const beginApplicationReviewFixture = new BeginApplicationReviewFixture(
+      api,
+      addWorkerOpeningFixture.getCreatedOpeningId() as OpeningId,
+      module
+    )
+    await new FixtureRunner(beginApplicationReviewFixture).run()
+
+    // Fill worker opening
+    const fillOpeningFixture = new FillOpeningFixture(
+      api,
+      applicationIds,
+      addWorkerOpeningFixture.getCreatedOpeningId() as OpeningId,
+      firstRewardInterval,
+      rewardInterval,
+      payoutAmount,
+      module
+    )
+    await new FixtureRunner(fillOpeningFixture).run()
+    this.workerIds = fillOpeningFixture.getWorkerIds()
+  }
+}

+ 71 - 0
tests/network-tests/src/flows/clis/createChannel.ts

@@ -0,0 +1,71 @@
+import { FlowProps } from '../../Flow'
+import { extendDebug } from '../../Debugger'
+import { JoystreamCLI } from '../../cli/joystream'
+import { BuyMembershipHappyCaseFixture } from '../../fixtures/membershipModule'
+import { BN } from '@polkadot/util'
+import { FixtureRunner } from '../../Fixture'
+import { TmpFileManager } from '../../cli/utils'
+import { assert } from 'chai'
+import { Utils } from '../../utils'
+import { statSync } from 'fs'
+
+export default async function createChannel({ api, env, query }: FlowProps): Promise<void> {
+  const debug = extendDebug('flow:createChannel')
+  debug('Started')
+
+  // Create channel owner membership
+  const [channelOwnerKeypair] = await api.createKeyPairs(1)
+  const paidTermId = api.createPaidTermId(new BN(+(env.MEMBERSHIP_PAID_TERMS || 0)))
+  const buyMembershipFixture = new BuyMembershipHappyCaseFixture(api, [channelOwnerKeypair.key.address], paidTermId)
+  await new FixtureRunner(buyMembershipFixture).run()
+
+  // Send some funds to pay the deletion_prize
+  const channelOwnerBalance = api.consts.storage.dataObjectDeletionPrize.muln(2)
+  await api.treasuryTransferBalance(channelOwnerKeypair.key.address, channelOwnerBalance)
+
+  // Create Joystream CLI
+  const tmpFileManager = new TmpFileManager()
+  const joystreamCli = new JoystreamCLI(tmpFileManager)
+
+  // Init CLI, import & select channel owner key
+  await joystreamCli.init()
+  await joystreamCli.importKey(channelOwnerKeypair.key)
+
+  // Create channel
+  const avatarPhotoPath = tmpFileManager.randomImgFile(300, 300)
+  const coverPhotoPath = tmpFileManager.randomImgFile(1920, 500)
+  const channelInput = {
+    title: 'Test channel',
+    avatarPhotoPath,
+    coverPhotoPath,
+    description: 'This is a test channel',
+    isPublic: true,
+    language: 'en',
+    rewardAccount: channelOwnerKeypair.key.address,
+  }
+  const { out: createChannelOut } = await joystreamCli.createChannel(channelInput, ['--context', 'Member'])
+
+  const channelIdMatch = /Channel with id ([0-9]+) successfully created/.exec(createChannelOut)
+  if (!channelIdMatch) {
+    throw new Error(`No channel id found in output:\n${createChannelOut}`)
+  }
+  const [, channelId] = channelIdMatch
+
+  await query.tryQueryWithTimeout(
+    () => query.channelById(channelId),
+    (channel) => {
+      Utils.assert(channel, 'Channel not found')
+      assert.equal(channel.title, channelInput.title)
+      assert.equal(channel.description, channelInput.description)
+      assert.equal(channel.isPublic, channelInput.isPublic)
+      assert.equal(channel.language?.iso, channelInput.language)
+      assert.equal(channel.rewardAccount, channelInput.rewardAccount)
+      assert.equal(channel.avatarPhoto?.type.__typename, 'DataObjectTypeChannelAvatar')
+      assert.equal(channel.avatarPhoto?.size, statSync(avatarPhotoPath).size)
+      assert.equal(channel.coverPhoto?.type.__typename, 'DataObjectTypeChannelCoverPhoto')
+      assert.equal(channel.coverPhoto?.size, statSync(coverPhotoPath).size)
+    }
+  )
+
+  debug('Done')
+}

+ 39 - 0
tests/network-tests/src/flows/clis/initDistributionBucket.ts

@@ -0,0 +1,39 @@
+import { FlowProps } from '../../Flow'
+import { extendDebug } from '../../Debugger'
+import { WorkingGroups } from '../../WorkingGroups'
+import { DistributorCLI } from '../../cli/distributor'
+
+export default async function initDistributionBucket({ api }: FlowProps): Promise<void> {
+  const debug = extendDebug('flow:initDistributionBucketViaCLI')
+  debug('Started')
+
+  const leaderId = await api.getLeadWorkerId(WorkingGroups.Distribution)
+  const leader = await api.getGroupLead(WorkingGroups.Distribution)
+  if (!leaderId || !leader) {
+    throw new Error('Active distribution leader is required in this flow!')
+  }
+  const operatorId = leaderId.toString()
+  const leaderSuri = api.getSuri(leader.role_account_id)
+
+  const cli = new DistributorCLI([leaderSuri])
+
+  await cli.run('leader:set-buckets-per-bag-limit', ['--limit', '10'])
+  const { out: familyId } = await cli.run('leader:create-bucket-family')
+  const { out: bucketIndex } = await cli.run('leader:create-bucket', ['--familyId', familyId, '--acceptingBags', 'yes'])
+  const bucketId = `${familyId}:${bucketIndex}`
+  await cli.run('leader:update-bag', ['--bagId', 'static:council', '--familyId', familyId, '--add', bucketIndex])
+  await cli.run('leader:update-dynamic-bag-policy', ['--type', 'Channel', '--policy', `${familyId}:1`])
+  await cli.run('leader:update-bucket-mode', ['--bucketId', bucketId, '--mode', 'on'])
+  await cli.run('leader:invite-bucket-operator', ['--bucketId', bucketId, '--workerId', operatorId])
+  await cli.run('operator:accept-invitation', ['--bucketId', bucketId, '--workerId', operatorId])
+  await cli.run('operator:set-metadata', [
+    '--bucketId',
+    bucketId,
+    '--workerId',
+    operatorId,
+    '--endpoint',
+    'http://localhost:3334',
+  ])
+
+  debug('Done')
+}

+ 53 - 0
tests/network-tests/src/flows/clis/initStorageBucket.ts

@@ -0,0 +1,53 @@
+import { FlowProps } from '../../Flow'
+import { extendDebug } from '../../Debugger'
+import { WorkingGroups } from '../../WorkingGroups'
+import { StorageCLI } from '../../cli/storage'
+
+export default async function initStorageBucket({ api }: FlowProps): Promise<void> {
+  const debug = extendDebug('flow:initStorageBucketViaCLI')
+  debug('Started')
+
+  const leaderId = await api.getLeadWorkerId(WorkingGroups.Storage)
+  const leader = await api.getGroupLead(WorkingGroups.Storage)
+  if (!leaderId || !leader) {
+    throw new Error('Active storage leader is required in this flow!')
+  }
+  const leaderSuri = api.getSuri(leader.role_account_id)
+  const transactorKey = '5DkE5YD8m5Yzno6EH2RTBnH268TDnnibZMEMjxwYemU4XevU' // //Colossus1
+
+  const operatorId = leaderId.toString()
+
+  const cli = new StorageCLI(leaderSuri)
+  await cli.run('leader:update-bag-limit', ['--limit', '10'])
+  await cli.run('leader:update-voucher-limits', ['--objects', '1000', '--size', '10000000000'])
+  const { out: bucketId } = await cli.run('leader:create-bucket', [
+    '--invited',
+    operatorId,
+    '--allow',
+    '--number',
+    '1000',
+    '--size',
+    '10000000000',
+  ])
+  await cli.run('operator:accept-invitation', [
+    '--workerId',
+    operatorId,
+    '--bucketId',
+    bucketId,
+    '--transactorAccountId',
+    transactorKey,
+  ])
+  await cli.run('leader:update-bag', ['--add', bucketId, '--bagId', 'static:council'])
+  await cli.run('leader:update-dynamic-bag-policy', ['--bagType', 'Channel', '--number', '1'])
+  await cli.run('operator:set-metadata', [
+    '--bucketId',
+    bucketId,
+    '--operatorId',
+    operatorId,
+    '--endpoint',
+    'http://localhost:3333',
+  ])
+  await cli.run('leader:update-data-fee', ['-f', '0'])
+
+  debug('Done')
+}

+ 23 - 0
tests/network-tests/src/flows/membership/makeAliceMember.ts

@@ -0,0 +1,23 @@
+import { FlowProps } from '../../Flow'
+import { BuyMembershipHappyCaseFixture } from '../../fixtures/membershipModule'
+import { PaidTermId } from '@joystream/types/members'
+import BN from 'bn.js'
+import { extendDebug } from '../../Debugger'
+import { FixtureRunner } from '../../Fixture'
+
+export default async function makeAliceMember({ api, env }: FlowProps): Promise<void> {
+  const debug = extendDebug('flow:makeAliceMember')
+  debug('Started')
+
+  const paidTerms: PaidTermId = api.createPaidTermId(new BN(+env.MEMBERSHIP_PAID_TERMS!))
+
+  // Assert membership can be bought if sufficient funds are available
+  const happyCaseFixture = new BuyMembershipHappyCaseFixture(
+    api,
+    ['5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY'],
+    paidTerms
+  )
+  await new FixtureRunner(happyCaseFixture).run()
+
+  debug('Done')
+}

+ 6 - 6
tests/network-tests/src/flows/proposals/manageLeaderRole.ts

@@ -15,7 +15,6 @@ import {
 } from '../../fixtures/proposalsModule'
 import { ApplyForOpeningFixture } from '../../fixtures/workingGroupModule'
 import { PaidTermId } from '@joystream/types/members'
-import { OpeningId } from '@joystream/types/hiring'
 import { ProposalId } from '@joystream/types/proposals'
 import { WorkerId } from '@joystream/types/working-group'
 import { assert } from 'chai'
@@ -30,6 +29,9 @@ export default {
   content: async function ({ api, env, lock }: FlowProps): Promise<void> {
     return manageLeaderRole(api, env, WorkingGroups.Content, lock)
   },
+  distribution: async function ({ api, env, lock }: FlowProps): Promise<void> {
+    return manageLeaderRole(api, env, WorkingGroups.Distribution, lock)
+  },
 }
 
 async function manageLeaderRole(api: Api, env: NodeJS.ProcessEnv, group: WorkingGroups, lock: ResourceLocker) {
@@ -84,8 +86,7 @@ async function manageLeaderRole(api: Api, env: NodeJS.ProcessEnv, group: Working
 
   await new FixtureRunner(voteForCreateOpeningProposalFixture).run()
 
-  const openingId = api.findOpeningAddedEvent(voteForCreateOpeningProposalFixture.events, group) as OpeningId
-  assert(openingId)
+  const openingId = api.getEvent(voteForCreateOpeningProposalFixture.events, group, 'OpeningAdded').data[0]
 
   const applyForLeaderOpeningFixture = new ApplyForOpeningFixture(
     api,
@@ -151,9 +152,8 @@ async function manageLeaderRole(api: Api, env: NodeJS.ProcessEnv, group: Working
 
   const leadId = (await api.getLeadWorkerId(group)) as WorkerId
   assert(leadId)
-  const workerId = api.findWorkerRewardAmountUpdatedEvent(voteForeLeaderRewardFixture.events, group, leadId) as WorkerId
-  assert(workerId)
-  assert(leadId!.eq(workerId))
+  const workerId = api.getEvent(voteForeLeaderRewardFixture.events, group, 'WorkerRewardAmountUpdated').data[0]
+  assert(workerId.eq(leadId))
   const rewardRelationship = await api.getWorkerRewardRelationship(leadId!, group)
   assert(rewardRelationship.amount_per_payout.eq(alteredPayoutAmount))
 

+ 3 - 0
tests/network-tests/src/flows/proposals/workingGroupMintCapacityProposal.ts

@@ -20,6 +20,9 @@ export default {
   content: async function ({ api, env, lock }: FlowProps): Promise<void> {
     return workingGroupMintCapactiy(api, env, WorkingGroups.Content, lock)
   },
+  distribution: async function ({ api, env, lock }: FlowProps): Promise<void> {
+    return workingGroupMintCapactiy(api, env, WorkingGroups.Distribution, lock)
+  },
 }
 
 async function workingGroupMintCapactiy(api: Api, env: NodeJS.ProcessEnv, group: WorkingGroups, lock: ResourceLocker) {

+ 227 - 0
tests/network-tests/src/flows/storagev2/initDistribution.ts

@@ -0,0 +1,227 @@
+import { FlowProps } from '../../Flow'
+import { extendDebug } from '../../Debugger'
+import { WorkingGroups } from '../../WorkingGroups'
+import {
+  DistributionBucketFamilyMetadata,
+  DistributionBucketOperatorMetadata,
+  IDistributionBucketFamilyMetadata,
+  IDistributionBucketOperatorMetadata,
+} from '@joystream/metadata-protobuf'
+import { CreateInterface, createType } from '@joystream/types'
+import { BagId, DistributionBucketFamilyId, DynamicBagId, StaticBagId } from '@joystream/types/storage'
+import { SubmittableExtrinsic } from '@polkadot/api/types'
+import _ from 'lodash'
+import { Utils } from '../../utils'
+import { WorkerId } from '@joystream/types/working-group'
+
+type DistributionBucketConfig = {
+  metadata: IDistributionBucketOperatorMetadata
+  staticBags?: CreateInterface<StaticBagId>[]
+  operatorId: number
+}
+
+type DistributionFamilyConfig = {
+  metadata?: IDistributionBucketFamilyMetadata
+  buckets: DistributionBucketConfig[]
+  dynamicBagPolicy: {
+    [K in keyof typeof DynamicBagId.typeDefinitions]?: number
+  }
+}
+
+type InitDistributionConfig = {
+  families: DistributionFamilyConfig[]
+}
+
+export const allStaticBags: CreateInterface<StaticBagId>[] = [
+  'Council',
+  { WorkingGroup: 'Content' },
+  { WorkingGroup: 'Distribution' },
+  { WorkingGroup: 'Gateway' },
+  { WorkingGroup: 'OperationsAlpha' },
+  { WorkingGroup: 'OperationsBeta' },
+  { WorkingGroup: 'OperationsGamma' },
+  { WorkingGroup: 'Storage' },
+]
+
+export const singleBucketConfig: InitDistributionConfig = {
+  families: [
+    {
+      metadata: { region: 'All' },
+      dynamicBagPolicy: {
+        'Channel': 1,
+        'Member': 1,
+      },
+      buckets: [
+        {
+          metadata: { endpoint: process.env.DISTRIBUTOR_1_URL || 'http://localhost:3334' },
+          staticBags: allStaticBags,
+          operatorId: parseInt(process.env.DISTRIBUTOR_1_WORKER_ID || '0'),
+        },
+      ],
+    },
+  ],
+}
+
+export const doubleBucketConfig: InitDistributionConfig = {
+  families: [
+    {
+      metadata: { region: 'Region 1' },
+      dynamicBagPolicy: {
+        'Channel': 1,
+        'Member': 1,
+      },
+      buckets: [
+        {
+          metadata: { endpoint: process.env.DISTRIBUTOR_1_URL || 'http://localhost:3334' },
+          staticBags: allStaticBags,
+          operatorId: parseInt(process.env.DISTRIBUTOR_1_WORKER_ID || '0'),
+        },
+      ],
+    },
+    {
+      metadata: { region: 'Region 2' },
+      dynamicBagPolicy: {
+        'Channel': 1,
+        'Member': 1,
+      },
+      buckets: [
+        {
+          metadata: { endpoint: process.env.DISTRIBUTOR_2_URL || 'http://localhost:3336' },
+          staticBags: allStaticBags,
+          operatorId: parseInt(process.env.DISTRIBUTOR_2_WORKER_ID || '1'),
+        },
+      ],
+    },
+  ],
+}
+
+export default function createFlow({ families }: InitDistributionConfig) {
+  return async function initDistribution({ api }: FlowProps): Promise<void> {
+    const debug = extendDebug('flow:initDistribution')
+    debug('Started')
+
+    // Get working group leaders
+    const distributionLeaderId = await api.getLeadWorkerId(WorkingGroups.Distribution)
+    const distributionLeader = await api.getGroupLead(WorkingGroups.Distribution)
+    if (!distributionLeaderId || !distributionLeader) {
+      throw new Error('Active distributor leader is required in this flow!')
+    }
+
+    const distributionLeaderKey = distributionLeader.role_account_id.toString()
+    const totalBucketsNum = families.reduce((a, b) => a + b.buckets.length, 0)
+
+    // Hire operators
+    // const hireWorkersFixture = new HireWorkesFixture(api, totalBucketsNum, WorkingGroups.Distribution)
+    // await new FixtureRunner(hireWorkersFixture).run()
+    // const operatorIds = hireWorkersFixture.getHiredWorkers()
+
+    const operatorIds = families.reduce(
+      (ids, { buckets }) => ids.concat(buckets.map((b) => createType('WorkerId', b.operatorId))),
+      [] as WorkerId[]
+    )
+    const operatorKeys = await api.getWorkerRoleAccounts(operatorIds, WorkingGroups.Distribution)
+
+    // Create families, set buckets per bag limit
+    const createFamilyTxs = families.map(() => api.tx.storage.createDistributionBucketFamily())
+    const setBucketsPerBagLimitTx = api.tx.storage.updateDistributionBucketsPerBagLimit(totalBucketsNum)
+    const [createFamilyResults] = await Promise.all([
+      api.signAndSendMany(createFamilyTxs, distributionLeaderKey),
+      api.signAndSendMany([setBucketsPerBagLimitTx], distributionLeaderKey),
+    ])
+    const familyIds = createFamilyResults
+      .map((r) => api.getEvent(r, 'storage', 'DistributionBucketFamilyCreated').data[0])
+      .sort((a, b) => a.cmp(b))
+    const familyById = new Map<number, DistributionFamilyConfig>()
+    familyIds.forEach((id, i) => familyById.set(id.toNumber(), families[i]))
+
+    // Create buckets, update families metadata, set dynamic bag policies
+    const createBucketTxs = families.reduce(
+      (txs, { buckets }, familyIndex) =>
+        txs.concat(buckets.map(() => api.tx.storage.createDistributionBucket(familyIds[familyIndex], true))),
+      [] as SubmittableExtrinsic<'promise'>[]
+    )
+    const updateFamilyMetadataTxs = familyIds.map((id, i) => {
+      const metadataBytes = Utils.metadataToBytes(DistributionBucketFamilyMetadata, families[i].metadata)
+      return api.tx.storage.setDistributionBucketFamilyMetadata(id, metadataBytes)
+    })
+    const dynamicBagPolicies = new Map<string, [DistributionBucketFamilyId, number][]>()
+    familyIds.forEach((familyId, index) => {
+      const family = families[index]
+      Object.entries(family.dynamicBagPolicy).forEach(([bagType, bucketsN]) => {
+        const current = dynamicBagPolicies.get(bagType) || []
+        dynamicBagPolicies.set(bagType, [...current, [familyId, bucketsN]])
+      })
+    })
+    const updateDynamicBagPolicyTxs = _.entries(dynamicBagPolicies).map(([bagType, policyEntries]) =>
+      api.tx.storage.updateFamiliesInDynamicBagCreationPolicy(
+        bagType as keyof typeof DynamicBagId.typeDefinitions,
+        createType('BTreeMap<DistributionBucketFamilyId, u32>', new Map(policyEntries))
+      )
+    )
+    const [createBucketResults] = await Promise.all([
+      api.signAndSendMany(createBucketTxs, distributionLeaderKey),
+      api.signAndSendMany(updateFamilyMetadataTxs, distributionLeaderKey),
+      api.signAndSendMany(updateDynamicBagPolicyTxs, distributionLeaderKey),
+    ])
+    const bucketIds = createBucketResults
+      .map((r) => {
+        const [, , bucketId] = api.getEvent(r, 'storage', 'DistributionBucketCreated').data
+        return bucketId
+      })
+      .sort(
+        (a, b) =>
+          a.distribution_bucket_family_id.cmp(b.distribution_bucket_family_id) ||
+          a.distribution_bucket_index.cmp(b.distribution_bucket_index)
+      )
+    const bucketById = new Map<string, DistributionBucketConfig>()
+    bucketIds.forEach((bucketId) => {
+      const familyId = bucketId.distribution_bucket_family_id.toNumber()
+      const bucketIndex = bucketId.distribution_bucket_index.toNumber()
+      const family = familyById.get(familyId)
+      if (!family) {
+        throw new Error(`familyById not found: ${familyId}`)
+      }
+      bucketById.set(bucketId.toString(), family.buckets[bucketIndex])
+    })
+
+    // Invite bucket operators
+    const bucketInviteTxs = bucketIds.map((bucketId, i) =>
+      api.tx.storage.inviteDistributionBucketOperator(bucketId, operatorIds[i])
+    )
+    await api.signAndSendMany(bucketInviteTxs, distributionLeaderKey)
+
+    // Accept invitations
+    const acceptInvitationTxs = bucketIds.map((bucketId, i) =>
+      api.tx.storage.acceptDistributionBucketInvitation(operatorIds[i], bucketId)
+    )
+    await api.signAndSendManyByMany(acceptInvitationTxs, operatorKeys)
+
+    // Bucket metadata and static bags
+    const bucketSetupPromises = _.flatten(
+      bucketIds.map((bucketId, i) => {
+        const operatorId = operatorIds[i]
+        const operatorKey = operatorKeys[i]
+        const bucketConfig = bucketById.get(bucketId.toString())
+        if (!bucketConfig) {
+          throw new Error('Bucket config not found')
+        }
+        const metadataBytes = Utils.metadataToBytes(DistributionBucketOperatorMetadata, bucketConfig.metadata)
+        const setMetaTx = api.tx.storage.setDistributionOperatorMetadata(operatorId, bucketId, metadataBytes)
+        const setMetaPromise = api.signAndSendMany([setMetaTx], operatorKey)
+        const updateBagTxs = (bucketConfig.staticBags || []).map((sBagId) => {
+          return api.tx.storage.updateDistributionBucketsForBag(
+            createType<BagId, 'BagId'>('BagId', { Static: sBagId }),
+            bucketId.distribution_bucket_family_id,
+            createType('BTreeSet<DistributionBucketIndex>', [bucketId.distribution_bucket_index]),
+            createType('BTreeSet<DistributionBucketIndex>', [])
+          )
+        })
+        const updateBagsPromise = api.signAndSendMany(updateBagTxs, distributionLeaderKey)
+        return [updateBagsPromise, setMetaPromise]
+      })
+    )
+    await Promise.all(bucketSetupPromises)
+
+    debug('Done')
+  }
+}

+ 159 - 0
tests/network-tests/src/flows/storagev2/initStorage.ts

@@ -0,0 +1,159 @@
+import { FlowProps } from '../../Flow'
+import { extendDebug } from '../../Debugger'
+import { WorkingGroups } from '../../WorkingGroups'
+import { IStorageBucketOperatorMetadata, StorageBucketOperatorMetadata } from '@joystream/metadata-protobuf'
+import { CreateInterface, createType } from '@joystream/types'
+import { BagId, DynamicBagId, StaticBagId } from '@joystream/types/storage'
+import _ from 'lodash'
+import { Utils } from '../../utils'
+import BN from 'bn.js'
+
+type StorageBucketConfig = {
+  metadata: IStorageBucketOperatorMetadata
+  staticBags?: CreateInterface<StaticBagId>[]
+  storageLimit: BN
+  objectsLimit: number
+  operatorId: number
+  transactorKey: string
+}
+
+type InitStorageConfig = {
+  buckets: StorageBucketConfig[]
+  dynamicBagPolicy: {
+    [K in keyof typeof DynamicBagId.typeDefinitions]?: number
+  }
+}
+
+export const allStaticBags: CreateInterface<StaticBagId>[] = [
+  'Council',
+  { WorkingGroup: 'Content' },
+  { WorkingGroup: 'Distribution' },
+  { WorkingGroup: 'Gateway' },
+  { WorkingGroup: 'OperationsAlpha' },
+  { WorkingGroup: 'OperationsBeta' },
+  { WorkingGroup: 'OperationsGamma' },
+  { WorkingGroup: 'Storage' },
+]
+
+export const singleBucketConfig: InitStorageConfig = {
+  dynamicBagPolicy: {
+    'Channel': 1,
+    'Member': 1,
+  },
+  buckets: [
+    {
+      metadata: { endpoint: process.env.COLOSSUS_1_URL || 'http://localhost:3333' },
+      staticBags: allStaticBags,
+      operatorId: parseInt(process.env.COLOSSUS_1_WORKER_ID || '0'),
+      storageLimit: new BN(1_000_000_000_000),
+      objectsLimit: 1000000000,
+      transactorKey: process.env.COLOSSUS_1_TRANSACTOR_KEY || '5DkE5YD8m5Yzno6EH2RTBnH268TDnnibZMEMjxwYemU4XevU', // //Colossus1
+    },
+  ],
+}
+
+export const doubleBucketConfig: InitStorageConfig = {
+  dynamicBagPolicy: {
+    'Channel': 2,
+    'Member': 2,
+  },
+  buckets: [
+    {
+      metadata: { endpoint: process.env.COLOSSUS_1_URL || 'http://localhost:3333' },
+      staticBags: allStaticBags,
+      operatorId: parseInt(process.env.COLOSSUS_1_WORKER_ID || '0'),
+      storageLimit: new BN(1_000_000_000_000),
+      objectsLimit: 1000000000,
+      transactorKey: process.env.COLOSSUS_1_TRANSACTOR_KEY || '5DkE5YD8m5Yzno6EH2RTBnH268TDnnibZMEMjxwYemU4XevU', // //Colossus1
+    },
+    {
+      metadata: { endpoint: process.env.STORAGE_2_URL || 'http://localhost:3335' },
+      staticBags: allStaticBags,
+      operatorId: parseInt(process.env.STORAGE_2_WORKER_ID || '1'),
+      storageLimit: new BN(1_000_000_000_000),
+      objectsLimit: 1000000000,
+      transactorKey: process.env.COLOSSUS_2_TRANSACTOR_KEY || '5FbzYmQ3HogiEEDSXPYJe58yCcmSh3vsZLodTdBB6YuLDAj7', // //Colossus2
+    },
+  ],
+}
+
+export default function createFlow({ buckets, dynamicBagPolicy }: InitStorageConfig) {
+  return async function initDistribution({ api }: FlowProps): Promise<void> {
+    const debug = extendDebug('flow:initStorage')
+    debug('Started')
+
+    // Get working group leaders
+    const storageLeaderId = await api.getLeadWorkerId(WorkingGroups.Storage)
+    const storageLeader = await api.getGroupLead(WorkingGroups.Storage)
+    if (!storageLeaderId || !storageLeader) {
+      throw new Error('Active storage leader is required in this flow!')
+    }
+
+    const storageLeaderKey = storageLeader.role_account_id.toString()
+    const maxStorageLimit = buckets.sort((a, b) => b.storageLimit.cmp(a.storageLimit))[0].storageLimit
+    const maxObjectsLimit = Math.max(...buckets.map((b) => b.objectsLimit))
+
+    // Hire operators
+    // const hireWorkersFixture = new HireWorkesFixture(api, totalBucketsNum, WorkingGroups.Distribution)
+    // await new FixtureRunner(hireWorkersFixture).run()
+    // const operatorIds = hireWorkersFixture.getHiredWorkers()
+
+    const operatorIds = buckets.map((b) => createType('WorkerId', b.operatorId))
+    const operatorKeys = await api.getWorkerRoleAccounts(operatorIds, WorkingGroups.Storage)
+
+    // Set global limits and policies
+    const updateDynamicBagPolicyTxs = _.entries(dynamicBagPolicy).map(([bagType, numberOfBuckets]) =>
+      api.tx.storage.updateNumberOfStorageBucketsInDynamicBagCreationPolicy(
+        bagType as keyof typeof DynamicBagId.typeDefinitions,
+        numberOfBuckets
+      )
+    )
+    const setMaxVoucherLimitsTx = api.tx.storage.updateStorageBucketsVoucherMaxLimits(maxStorageLimit, maxObjectsLimit)
+    const setBucketPerBagLimitTx = api.tx.storage.updateStorageBucketsPerBagLimit(Math.max(5, buckets.length))
+
+    await api.signAndSendMany(
+      [...updateDynamicBagPolicyTxs, setMaxVoucherLimitsTx, setBucketPerBagLimitTx],
+      storageLeaderKey
+    )
+
+    // Create buckets
+    const createBucketTxs = buckets.map((b, i) =>
+      api.tx.storage.createStorageBucket(operatorIds[i], true, b.storageLimit, b.objectsLimit)
+    )
+    const createBucketResults = await api.signAndSendMany(createBucketTxs, storageLeaderKey)
+    const bucketById = new Map<number, StorageBucketConfig>()
+    createBucketResults.forEach((res, i) => {
+      const bucketId = api.getEvent(res, 'storage', 'StorageBucketCreated').data[0]
+      bucketById.set(bucketId.toNumber(), buckets[i])
+    })
+
+    // Accept invitations
+    const acceptInvitationTxs = Array.from(bucketById.entries()).map(([bucketId, bucketConfig], i) =>
+      api.tx.storage.acceptStorageBucketInvitation(operatorIds[i], bucketId, bucketConfig.transactorKey)
+    )
+    await api.signAndSendManyByMany(acceptInvitationTxs, operatorKeys)
+
+    // Bucket metadata and static bags
+    const bucketSetupPromises = _.flatten(
+      Array.from(bucketById.entries()).map(([bucketId, bucketConfig], i) => {
+        const operatorId = operatorIds[i]
+        const operatorKey = operatorKeys[i]
+        const metadataBytes = Utils.metadataToBytes(StorageBucketOperatorMetadata, bucketConfig.metadata)
+        const setMetaTx = api.tx.storage.setStorageOperatorMetadata(operatorId, bucketId, metadataBytes)
+        const setMetaPromise = api.signAndSendMany([setMetaTx], operatorKey)
+        const updateBagTxs = (bucketConfig.staticBags || []).map((sBagId) => {
+          return api.tx.storage.updateStorageBucketsForBag(
+            createType<BagId, 'BagId'>('BagId', { Static: sBagId }),
+            createType('BTreeSet<StorageBucketId>', [bucketId]),
+            createType('BTreeSet<StorageBucketId>', [])
+          )
+        })
+        const updateBagsPromise = api.signAndSendMany(updateBagTxs, storageLeaderKey)
+        return [updateBagsPromise, setMetaPromise]
+      })
+    )
+    await Promise.all(bucketSetupPromises)
+
+    debug('Done')
+  }
+}

+ 3 - 0
tests/network-tests/src/flows/workingGroup/manageWorkerAsWorker.ts

@@ -23,6 +23,9 @@ export default {
   content: async function ({ api, env }: FlowProps): Promise<void> {
     return manageWorkerAsWorker(api, env, WorkingGroups.Content)
   },
+  distribution: async function ({ api, env }: FlowProps): Promise<void> {
+    return manageWorkerAsWorker(api, env, WorkingGroups.Distribution)
+  },
 }
 
 // Manage worker as worker

+ 3 - 0
tests/network-tests/src/flows/workingGroup/workerPayout.ts

@@ -29,6 +29,9 @@ export default {
   content: async function ({ api, env, lock }: FlowProps): Promise<void> {
     return workerPayouts(api, env, WorkingGroups.Content, lock)
   },
+  distribution: async function ({ api, env, lock }: FlowProps): Promise<void> {
+    return workerPayouts(api, env, WorkingGroups.Distribution, lock)
+  },
 }
 
 async function workerPayouts(api: Api, env: NodeJS.ProcessEnv, group: WorkingGroups, lock: ResourceLocker) {

+ 158 - 0
tests/network-tests/src/graphql/generated/queries.ts

@@ -0,0 +1,158 @@
+import * as Types from './schema'
+
+import gql from 'graphql-tag'
+type DataObjectTypeFields_DataObjectTypeChannelAvatar_Fragment = {
+  __typename: 'DataObjectTypeChannelAvatar'
+  channel?: Types.Maybe<{ id: string }>
+}
+
+type DataObjectTypeFields_DataObjectTypeChannelCoverPhoto_Fragment = {
+  __typename: 'DataObjectTypeChannelCoverPhoto'
+  channel?: Types.Maybe<{ id: string }>
+}
+
+type DataObjectTypeFields_DataObjectTypeVideoMedia_Fragment = {
+  __typename: 'DataObjectTypeVideoMedia'
+  video?: Types.Maybe<{ id: string }>
+}
+
+type DataObjectTypeFields_DataObjectTypeVideoThumbnail_Fragment = {
+  __typename: 'DataObjectTypeVideoThumbnail'
+  video?: Types.Maybe<{ id: string }>
+}
+
+type DataObjectTypeFields_DataObjectTypeUnknown_Fragment = { __typename: 'DataObjectTypeUnknown' }
+
+export type DataObjectTypeFieldsFragment =
+  | DataObjectTypeFields_DataObjectTypeChannelAvatar_Fragment
+  | DataObjectTypeFields_DataObjectTypeChannelCoverPhoto_Fragment
+  | DataObjectTypeFields_DataObjectTypeVideoMedia_Fragment
+  | DataObjectTypeFields_DataObjectTypeVideoThumbnail_Fragment
+  | DataObjectTypeFields_DataObjectTypeUnknown_Fragment
+
+export type StorageDataObjectFieldsFragment = {
+  id: string
+  ipfsHash: string
+  isAccepted: boolean
+  size: any
+  deletionPrize: any
+  unsetAt?: Types.Maybe<any>
+  storageBagId: string
+  type:
+    | DataObjectTypeFields_DataObjectTypeChannelAvatar_Fragment
+    | DataObjectTypeFields_DataObjectTypeChannelCoverPhoto_Fragment
+    | DataObjectTypeFields_DataObjectTypeVideoMedia_Fragment
+    | DataObjectTypeFields_DataObjectTypeVideoThumbnail_Fragment
+    | DataObjectTypeFields_DataObjectTypeUnknown_Fragment
+}
+
+export type ChannelFieldsFragment = {
+  title?: Types.Maybe<string>
+  description?: Types.Maybe<string>
+  isPublic?: Types.Maybe<boolean>
+  rewardAccount?: Types.Maybe<string>
+  isCensored: boolean
+  language?: Types.Maybe<{ iso: string }>
+  ownerMember?: Types.Maybe<{ id: string }>
+  ownerCuratorGroup?: Types.Maybe<{ id: string }>
+  category?: Types.Maybe<{ name?: Types.Maybe<string> }>
+  avatarPhoto?: Types.Maybe<StorageDataObjectFieldsFragment>
+  coverPhoto?: Types.Maybe<StorageDataObjectFieldsFragment>
+}
+
+export type GetDataObjectsByIdsQueryVariables = Types.Exact<{
+  ids?: Types.Maybe<Array<Types.Scalars['ID']> | Types.Scalars['ID']>
+}>
+
+export type GetDataObjectsByIdsQuery = { storageDataObjects: Array<StorageDataObjectFieldsFragment> }
+
+export type GetChannelByIdQueryVariables = Types.Exact<{
+  id: Types.Scalars['ID']
+}>
+
+export type GetChannelByIdQuery = { channelByUniqueInput?: Types.Maybe<ChannelFieldsFragment> }
+
+export const DataObjectTypeFields = gql`
+  fragment DataObjectTypeFields on DataObjectType {
+    __typename
+    ... on DataObjectTypeChannelAvatar {
+      channel {
+        id
+      }
+    }
+    ... on DataObjectTypeChannelCoverPhoto {
+      channel {
+        id
+      }
+    }
+    ... on DataObjectTypeVideoThumbnail {
+      video {
+        id
+      }
+    }
+    ... on DataObjectTypeVideoMedia {
+      video {
+        id
+      }
+    }
+  }
+`
+export const StorageDataObjectFields = gql`
+  fragment StorageDataObjectFields on StorageDataObject {
+    id
+    ipfsHash
+    isAccepted
+    size
+    type {
+      ...DataObjectTypeFields
+    }
+    deletionPrize
+    unsetAt
+    storageBagId
+  }
+  ${DataObjectTypeFields}
+`
+export const ChannelFields = gql`
+  fragment ChannelFields on Channel {
+    title
+    description
+    isPublic
+    language {
+      iso
+    }
+    rewardAccount
+    isCensored
+    ownerMember {
+      id
+    }
+    ownerCuratorGroup {
+      id
+    }
+    category {
+      name
+    }
+    avatarPhoto {
+      ...StorageDataObjectFields
+    }
+    coverPhoto {
+      ...StorageDataObjectFields
+    }
+  }
+  ${StorageDataObjectFields}
+`
+export const GetDataObjectsByIds = gql`
+  query getDataObjectsByIds($ids: [ID!]) {
+    storageDataObjects(where: { id_in: $ids }) {
+      ...StorageDataObjectFields
+    }
+  }
+  ${StorageDataObjectFields}
+`
+export const GetChannelById = gql`
+  query getChannelById($id: ID!) {
+    channelByUniqueInput(where: { id: $id }) {
+      ...ChannelFields
+    }
+  }
+  ${ChannelFields}
+`

+ 3715 - 0
tests/network-tests/src/graphql/generated/schema.ts

@@ -0,0 +1,3715 @@
+export type Maybe<T> = T | null
+export type Exact<T extends { [key: string]: unknown }> = { [K in keyof T]: T[K] }
+export type MakeOptional<T, K extends keyof T> = Omit<T, K> & { [SubKey in K]?: Maybe<T[SubKey]> }
+export type MakeMaybe<T, K extends keyof T> = Omit<T, K> & { [SubKey in K]: Maybe<T[SubKey]> }
+/** All built-in and custom scalars, mapped to their actual values */
+export type Scalars = {
+  ID: string
+  String: string
+  Boolean: boolean
+  Int: number
+  Float: number
+  /** The javascript `Date` as string. Type represents date and time as the ISO Date string. */
+  DateTime: any
+  /** GraphQL representation of BigInt */
+  BigInt: any
+  /** The `JSONObject` scalar type represents JSON objects as specified by [ECMA-404](http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf). */
+  JSONObject: any
+}
+
+export type BaseGraphQlObject = {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+}
+
+export type BaseModel = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+}
+
+export type BaseModelUuid = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+}
+
+export type BaseWhereInput = {
+  id_eq?: Maybe<Scalars['String']>
+  id_in?: Maybe<Array<Scalars['String']>>
+  createdAt_eq?: Maybe<Scalars['String']>
+  createdAt_lt?: Maybe<Scalars['String']>
+  createdAt_lte?: Maybe<Scalars['String']>
+  createdAt_gt?: Maybe<Scalars['String']>
+  createdAt_gte?: Maybe<Scalars['String']>
+  createdById_eq?: Maybe<Scalars['String']>
+  updatedAt_eq?: Maybe<Scalars['String']>
+  updatedAt_lt?: Maybe<Scalars['String']>
+  updatedAt_lte?: Maybe<Scalars['String']>
+  updatedAt_gt?: Maybe<Scalars['String']>
+  updatedAt_gte?: Maybe<Scalars['String']>
+  updatedById_eq?: Maybe<Scalars['String']>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['String']>
+  deletedAt_lt?: Maybe<Scalars['String']>
+  deletedAt_lte?: Maybe<Scalars['String']>
+  deletedAt_gt?: Maybe<Scalars['String']>
+  deletedAt_gte?: Maybe<Scalars['String']>
+  deletedById_eq?: Maybe<Scalars['String']>
+}
+
+export type Channel = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  ownerMember?: Maybe<Membership>
+  ownerMemberId?: Maybe<Scalars['String']>
+  ownerCuratorGroup?: Maybe<CuratorGroup>
+  ownerCuratorGroupId?: Maybe<Scalars['String']>
+  category?: Maybe<ChannelCategory>
+  categoryId?: Maybe<Scalars['String']>
+  /** Reward account where revenue is sent if set. */
+  rewardAccount?: Maybe<Scalars['String']>
+  /** The title of the Channel */
+  title?: Maybe<Scalars['String']>
+  /** The description of a Channel */
+  description?: Maybe<Scalars['String']>
+  coverPhoto?: Maybe<StorageDataObject>
+  coverPhotoId?: Maybe<Scalars['String']>
+  avatarPhoto?: Maybe<StorageDataObject>
+  avatarPhotoId?: Maybe<Scalars['String']>
+  /** Flag signaling whether a channel is public. */
+  isPublic?: Maybe<Scalars['Boolean']>
+  /** Flag signaling whether a channel is censored. */
+  isCensored: Scalars['Boolean']
+  language?: Maybe<Language>
+  languageId?: Maybe<Scalars['String']>
+  videos: Array<Video>
+  /** Number of the block the channel was created in */
+  createdInBlock: Scalars['Int']
+  collaborators: Array<Membership>
+}
+
+export type ChannelCategoriesByNameFtsOutput = {
+  item: ChannelCategoriesByNameSearchResult
+  rank: Scalars['Float']
+  isTypeOf: Scalars['String']
+  highlight: Scalars['String']
+}
+
+export type ChannelCategoriesByNameSearchResult = ChannelCategory
+
+/** Category of media channel */
+export type ChannelCategory = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** The name of the category */
+  name?: Maybe<Scalars['String']>
+  channels: Array<Channel>
+  createdInBlock: Scalars['Int']
+}
+
+export type ChannelCategoryConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<ChannelCategoryEdge>
+  pageInfo: PageInfo
+}
+
+export type ChannelCategoryCreateInput = {
+  name?: Maybe<Scalars['String']>
+  createdInBlock: Scalars['Float']
+}
+
+export type ChannelCategoryEdge = {
+  node: ChannelCategory
+  cursor: Scalars['String']
+}
+
+export enum ChannelCategoryOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  NameAsc = 'name_ASC',
+  NameDesc = 'name_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+}
+
+export type ChannelCategoryUpdateInput = {
+  name?: Maybe<Scalars['String']>
+  createdInBlock?: Maybe<Scalars['Float']>
+}
+
+export type ChannelCategoryWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  name_eq?: Maybe<Scalars['String']>
+  name_contains?: Maybe<Scalars['String']>
+  name_startsWith?: Maybe<Scalars['String']>
+  name_endsWith?: Maybe<Scalars['String']>
+  name_in?: Maybe<Array<Scalars['String']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  channels_none?: Maybe<ChannelWhereInput>
+  channels_some?: Maybe<ChannelWhereInput>
+  channels_every?: Maybe<ChannelWhereInput>
+  AND?: Maybe<Array<ChannelCategoryWhereInput>>
+  OR?: Maybe<Array<ChannelCategoryWhereInput>>
+}
+
+export type ChannelCategoryWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type ChannelConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<ChannelEdge>
+  pageInfo: PageInfo
+}
+
+export type ChannelCreateInput = {
+  ownerMember?: Maybe<Scalars['ID']>
+  ownerCuratorGroup?: Maybe<Scalars['ID']>
+  category?: Maybe<Scalars['ID']>
+  rewardAccount?: Maybe<Scalars['String']>
+  title?: Maybe<Scalars['String']>
+  description?: Maybe<Scalars['String']>
+  coverPhoto?: Maybe<Scalars['ID']>
+  avatarPhoto?: Maybe<Scalars['ID']>
+  isPublic?: Maybe<Scalars['Boolean']>
+  isCensored: Scalars['Boolean']
+  language?: Maybe<Scalars['ID']>
+  createdInBlock: Scalars['Float']
+}
+
+export type ChannelEdge = {
+  node: Channel
+  cursor: Scalars['String']
+}
+
+export enum ChannelOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  OwnerMemberAsc = 'ownerMember_ASC',
+  OwnerMemberDesc = 'ownerMember_DESC',
+  OwnerCuratorGroupAsc = 'ownerCuratorGroup_ASC',
+  OwnerCuratorGroupDesc = 'ownerCuratorGroup_DESC',
+  CategoryAsc = 'category_ASC',
+  CategoryDesc = 'category_DESC',
+  RewardAccountAsc = 'rewardAccount_ASC',
+  RewardAccountDesc = 'rewardAccount_DESC',
+  TitleAsc = 'title_ASC',
+  TitleDesc = 'title_DESC',
+  DescriptionAsc = 'description_ASC',
+  DescriptionDesc = 'description_DESC',
+  CoverPhotoAsc = 'coverPhoto_ASC',
+  CoverPhotoDesc = 'coverPhoto_DESC',
+  AvatarPhotoAsc = 'avatarPhoto_ASC',
+  AvatarPhotoDesc = 'avatarPhoto_DESC',
+  IsPublicAsc = 'isPublic_ASC',
+  IsPublicDesc = 'isPublic_DESC',
+  IsCensoredAsc = 'isCensored_ASC',
+  IsCensoredDesc = 'isCensored_DESC',
+  LanguageAsc = 'language_ASC',
+  LanguageDesc = 'language_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+}
+
+export type ChannelUpdateInput = {
+  ownerMember?: Maybe<Scalars['ID']>
+  ownerCuratorGroup?: Maybe<Scalars['ID']>
+  category?: Maybe<Scalars['ID']>
+  rewardAccount?: Maybe<Scalars['String']>
+  title?: Maybe<Scalars['String']>
+  description?: Maybe<Scalars['String']>
+  coverPhoto?: Maybe<Scalars['ID']>
+  avatarPhoto?: Maybe<Scalars['ID']>
+  isPublic?: Maybe<Scalars['Boolean']>
+  isCensored?: Maybe<Scalars['Boolean']>
+  language?: Maybe<Scalars['ID']>
+  createdInBlock?: Maybe<Scalars['Float']>
+}
+
+export type ChannelWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  rewardAccount_eq?: Maybe<Scalars['String']>
+  rewardAccount_contains?: Maybe<Scalars['String']>
+  rewardAccount_startsWith?: Maybe<Scalars['String']>
+  rewardAccount_endsWith?: Maybe<Scalars['String']>
+  rewardAccount_in?: Maybe<Array<Scalars['String']>>
+  title_eq?: Maybe<Scalars['String']>
+  title_contains?: Maybe<Scalars['String']>
+  title_startsWith?: Maybe<Scalars['String']>
+  title_endsWith?: Maybe<Scalars['String']>
+  title_in?: Maybe<Array<Scalars['String']>>
+  description_eq?: Maybe<Scalars['String']>
+  description_contains?: Maybe<Scalars['String']>
+  description_startsWith?: Maybe<Scalars['String']>
+  description_endsWith?: Maybe<Scalars['String']>
+  description_in?: Maybe<Array<Scalars['String']>>
+  isPublic_eq?: Maybe<Scalars['Boolean']>
+  isPublic_in?: Maybe<Array<Scalars['Boolean']>>
+  isCensored_eq?: Maybe<Scalars['Boolean']>
+  isCensored_in?: Maybe<Array<Scalars['Boolean']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  ownerMember?: Maybe<MembershipWhereInput>
+  ownerCuratorGroup?: Maybe<CuratorGroupWhereInput>
+  category?: Maybe<ChannelCategoryWhereInput>
+  coverPhoto?: Maybe<StorageDataObjectWhereInput>
+  avatarPhoto?: Maybe<StorageDataObjectWhereInput>
+  language?: Maybe<LanguageWhereInput>
+  videos_none?: Maybe<VideoWhereInput>
+  videos_some?: Maybe<VideoWhereInput>
+  videos_every?: Maybe<VideoWhereInput>
+  collaborators_none?: Maybe<MembershipWhereInput>
+  collaborators_some?: Maybe<MembershipWhereInput>
+  collaborators_every?: Maybe<MembershipWhereInput>
+  AND?: Maybe<Array<ChannelWhereInput>>
+  OR?: Maybe<Array<ChannelWhereInput>>
+}
+
+export type ChannelWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export enum Continent {
+  Af = 'AF',
+  Na = 'NA',
+  Oc = 'OC',
+  An = 'AN',
+  As = 'AS',
+  Eu = 'EU',
+  Sa = 'SA',
+}
+
+export type CuratorGroup = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Curators belonging to this group */
+  curatorIds: Array<Scalars['Int']>
+  /** Is group active or not */
+  isActive: Scalars['Boolean']
+  channels: Array<Channel>
+}
+
+export type CuratorGroupConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<CuratorGroupEdge>
+  pageInfo: PageInfo
+}
+
+export type CuratorGroupCreateInput = {
+  curatorIds: Array<Scalars['Int']>
+  isActive: Scalars['Boolean']
+}
+
+export type CuratorGroupEdge = {
+  node: CuratorGroup
+  cursor: Scalars['String']
+}
+
+export enum CuratorGroupOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  IsActiveAsc = 'isActive_ASC',
+  IsActiveDesc = 'isActive_DESC',
+}
+
+export type CuratorGroupUpdateInput = {
+  curatorIds?: Maybe<Array<Scalars['Int']>>
+  isActive?: Maybe<Scalars['Boolean']>
+}
+
+export type CuratorGroupWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  curatorIds_containsAll?: Maybe<Array<Scalars['Int']>>
+  curatorIds_containsNone?: Maybe<Array<Scalars['Int']>>
+  curatorIds_containsAny?: Maybe<Array<Scalars['Int']>>
+  isActive_eq?: Maybe<Scalars['Boolean']>
+  isActive_in?: Maybe<Array<Scalars['Boolean']>>
+  channels_none?: Maybe<ChannelWhereInput>
+  channels_some?: Maybe<ChannelWhereInput>
+  channels_every?: Maybe<ChannelWhereInput>
+  AND?: Maybe<Array<CuratorGroupWhereInput>>
+  OR?: Maybe<Array<CuratorGroupWhereInput>>
+}
+
+export type CuratorGroupWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type DataObjectType =
+  | DataObjectTypeChannelAvatar
+  | DataObjectTypeChannelCoverPhoto
+  | DataObjectTypeVideoMedia
+  | DataObjectTypeVideoThumbnail
+  | DataObjectTypeUnknown
+
+export type DataObjectTypeChannelAvatar = {
+  /** Related channel entity */
+  channel?: Maybe<Channel>
+}
+
+export type DataObjectTypeChannelCoverPhoto = {
+  /** Related channel entity */
+  channel?: Maybe<Channel>
+}
+
+export type DataObjectTypeUnknown = {
+  phantom?: Maybe<Scalars['Int']>
+}
+
+export type DataObjectTypeVideoMedia = {
+  /** Related video entity */
+  video?: Maybe<Video>
+}
+
+export type DataObjectTypeVideoThumbnail = {
+  /** Related video entity */
+  video?: Maybe<Video>
+}
+
+export type DeleteResponse = {
+  id: Scalars['ID']
+}
+
+export type DistributionBucket = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  family: DistributionBucketFamily
+  familyId: Scalars['String']
+  /** Bucket index within the family */
+  bucketIndex: Scalars['Int']
+  operators: Array<DistributionBucketOperator>
+  /** Whether the bucket is accepting any new bags */
+  acceptingNewBags: Scalars['Boolean']
+  /** Whether the bucket is currently distributing content */
+  distributing: Scalars['Boolean']
+  bags: Array<StorageBag>
+}
+
+export type DistributionBucketConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<DistributionBucketEdge>
+  pageInfo: PageInfo
+}
+
+export type DistributionBucketCreateInput = {
+  family: Scalars['ID']
+  bucketIndex: Scalars['Float']
+  acceptingNewBags: Scalars['Boolean']
+  distributing: Scalars['Boolean']
+}
+
+export type DistributionBucketEdge = {
+  node: DistributionBucket
+  cursor: Scalars['String']
+}
+
+export type DistributionBucketFamily = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  metadata?: Maybe<DistributionBucketFamilyMetadata>
+  metadataId?: Maybe<Scalars['String']>
+  buckets: Array<DistributionBucket>
+}
+
+export type DistributionBucketFamilyConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<DistributionBucketFamilyEdge>
+  pageInfo: PageInfo
+}
+
+export type DistributionBucketFamilyCreateInput = {
+  metadata?: Maybe<Scalars['ID']>
+}
+
+export type DistributionBucketFamilyEdge = {
+  node: DistributionBucketFamily
+  cursor: Scalars['String']
+}
+
+export type DistributionBucketFamilyGeographicArea = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Geographical area (continent / country / subdivision) */
+  area: GeographicalArea
+  distributionBucketFamilyMetadata: DistributionBucketFamilyMetadata
+  distributionBucketFamilyMetadataId: Scalars['String']
+}
+
+export type DistributionBucketFamilyGeographicAreaConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<DistributionBucketFamilyGeographicAreaEdge>
+  pageInfo: PageInfo
+}
+
+export type DistributionBucketFamilyGeographicAreaCreateInput = {
+  area: Scalars['JSONObject']
+  distributionBucketFamilyMetadata: Scalars['ID']
+}
+
+export type DistributionBucketFamilyGeographicAreaEdge = {
+  node: DistributionBucketFamilyGeographicArea
+  cursor: Scalars['String']
+}
+
+export enum DistributionBucketFamilyGeographicAreaOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  DistributionBucketFamilyMetadataAsc = 'distributionBucketFamilyMetadata_ASC',
+  DistributionBucketFamilyMetadataDesc = 'distributionBucketFamilyMetadata_DESC',
+}
+
+export type DistributionBucketFamilyGeographicAreaUpdateInput = {
+  area?: Maybe<Scalars['JSONObject']>
+  distributionBucketFamilyMetadata?: Maybe<Scalars['ID']>
+}
+
+export type DistributionBucketFamilyGeographicAreaWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  area_json?: Maybe<Scalars['JSONObject']>
+  distributionBucketFamilyMetadata?: Maybe<DistributionBucketFamilyMetadataWhereInput>
+  AND?: Maybe<Array<DistributionBucketFamilyGeographicAreaWhereInput>>
+  OR?: Maybe<Array<DistributionBucketFamilyGeographicAreaWhereInput>>
+}
+
+export type DistributionBucketFamilyGeographicAreaWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type DistributionBucketFamilyMetadata = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Name of the geographical region covered by the family (ie.: us-east-1) */
+  region?: Maybe<Scalars['String']>
+  /** Optional, more specific description of the region covered by the family */
+  description?: Maybe<Scalars['String']>
+  areas: Array<DistributionBucketFamilyGeographicArea>
+  /** List of targets (hosts/ips) best suited latency measurements for the family */
+  latencyTestTargets?: Maybe<Array<Scalars['String']>>
+  distributionbucketfamilymetadata?: Maybe<Array<DistributionBucketFamily>>
+}
+
+export type DistributionBucketFamilyMetadataConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<DistributionBucketFamilyMetadataEdge>
+  pageInfo: PageInfo
+}
+
+export type DistributionBucketFamilyMetadataCreateInput = {
+  region?: Maybe<Scalars['String']>
+  description?: Maybe<Scalars['String']>
+  latencyTestTargets?: Maybe<Array<Scalars['String']>>
+}
+
+export type DistributionBucketFamilyMetadataEdge = {
+  node: DistributionBucketFamilyMetadata
+  cursor: Scalars['String']
+}
+
+export enum DistributionBucketFamilyMetadataOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  RegionAsc = 'region_ASC',
+  RegionDesc = 'region_DESC',
+  DescriptionAsc = 'description_ASC',
+  DescriptionDesc = 'description_DESC',
+}
+
+export type DistributionBucketFamilyMetadataUpdateInput = {
+  region?: Maybe<Scalars['String']>
+  description?: Maybe<Scalars['String']>
+  latencyTestTargets?: Maybe<Array<Scalars['String']>>
+}
+
+export type DistributionBucketFamilyMetadataWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  region_eq?: Maybe<Scalars['String']>
+  region_contains?: Maybe<Scalars['String']>
+  region_startsWith?: Maybe<Scalars['String']>
+  region_endsWith?: Maybe<Scalars['String']>
+  region_in?: Maybe<Array<Scalars['String']>>
+  description_eq?: Maybe<Scalars['String']>
+  description_contains?: Maybe<Scalars['String']>
+  description_startsWith?: Maybe<Scalars['String']>
+  description_endsWith?: Maybe<Scalars['String']>
+  description_in?: Maybe<Array<Scalars['String']>>
+  latencyTestTargets_containsAll?: Maybe<Array<Scalars['String']>>
+  latencyTestTargets_containsNone?: Maybe<Array<Scalars['String']>>
+  latencyTestTargets_containsAny?: Maybe<Array<Scalars['String']>>
+  areas_none?: Maybe<DistributionBucketFamilyGeographicAreaWhereInput>
+  areas_some?: Maybe<DistributionBucketFamilyGeographicAreaWhereInput>
+  areas_every?: Maybe<DistributionBucketFamilyGeographicAreaWhereInput>
+  distributionbucketfamilymetadata_none?: Maybe<DistributionBucketFamilyWhereInput>
+  distributionbucketfamilymetadata_some?: Maybe<DistributionBucketFamilyWhereInput>
+  distributionbucketfamilymetadata_every?: Maybe<DistributionBucketFamilyWhereInput>
+  AND?: Maybe<Array<DistributionBucketFamilyMetadataWhereInput>>
+  OR?: Maybe<Array<DistributionBucketFamilyMetadataWhereInput>>
+}
+
+export type DistributionBucketFamilyMetadataWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export enum DistributionBucketFamilyOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  MetadataAsc = 'metadata_ASC',
+  MetadataDesc = 'metadata_DESC',
+}
+
+export type DistributionBucketFamilyUpdateInput = {
+  metadata?: Maybe<Scalars['ID']>
+}
+
+export type DistributionBucketFamilyWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  metadata?: Maybe<DistributionBucketFamilyMetadataWhereInput>
+  buckets_none?: Maybe<DistributionBucketWhereInput>
+  buckets_some?: Maybe<DistributionBucketWhereInput>
+  buckets_every?: Maybe<DistributionBucketWhereInput>
+  AND?: Maybe<Array<DistributionBucketFamilyWhereInput>>
+  OR?: Maybe<Array<DistributionBucketFamilyWhereInput>>
+}
+
+export type DistributionBucketFamilyWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type DistributionBucketOperator = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  distributionBucket: DistributionBucket
+  distributionBucketId: Scalars['String']
+  /** ID of the distribution group worker */
+  workerId: Scalars['Int']
+  /** Current operator status */
+  status: DistributionBucketOperatorStatus
+  metadata?: Maybe<DistributionBucketOperatorMetadata>
+  metadataId?: Maybe<Scalars['String']>
+}
+
+export type DistributionBucketOperatorConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<DistributionBucketOperatorEdge>
+  pageInfo: PageInfo
+}
+
+export type DistributionBucketOperatorCreateInput = {
+  distributionBucket: Scalars['ID']
+  workerId: Scalars['Float']
+  status: DistributionBucketOperatorStatus
+  metadata?: Maybe<Scalars['ID']>
+}
+
+export type DistributionBucketOperatorEdge = {
+  node: DistributionBucketOperator
+  cursor: Scalars['String']
+}
+
+export type DistributionBucketOperatorMetadata = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Root distributor node api endpoint */
+  nodeEndpoint?: Maybe<Scalars['String']>
+  nodeLocation?: Maybe<NodeLocationMetadata>
+  nodeLocationId?: Maybe<Scalars['String']>
+  /** Additional information about the node/operator */
+  extra?: Maybe<Scalars['String']>
+  distributionbucketoperatormetadata?: Maybe<Array<DistributionBucketOperator>>
+}
+
+export type DistributionBucketOperatorMetadataConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<DistributionBucketOperatorMetadataEdge>
+  pageInfo: PageInfo
+}
+
+export type DistributionBucketOperatorMetadataCreateInput = {
+  nodeEndpoint?: Maybe<Scalars['String']>
+  nodeLocation?: Maybe<Scalars['ID']>
+  extra?: Maybe<Scalars['String']>
+}
+
+export type DistributionBucketOperatorMetadataEdge = {
+  node: DistributionBucketOperatorMetadata
+  cursor: Scalars['String']
+}
+
+export enum DistributionBucketOperatorMetadataOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  NodeEndpointAsc = 'nodeEndpoint_ASC',
+  NodeEndpointDesc = 'nodeEndpoint_DESC',
+  NodeLocationAsc = 'nodeLocation_ASC',
+  NodeLocationDesc = 'nodeLocation_DESC',
+  ExtraAsc = 'extra_ASC',
+  ExtraDesc = 'extra_DESC',
+}
+
+export type DistributionBucketOperatorMetadataUpdateInput = {
+  nodeEndpoint?: Maybe<Scalars['String']>
+  nodeLocation?: Maybe<Scalars['ID']>
+  extra?: Maybe<Scalars['String']>
+}
+
+export type DistributionBucketOperatorMetadataWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  nodeEndpoint_eq?: Maybe<Scalars['String']>
+  nodeEndpoint_contains?: Maybe<Scalars['String']>
+  nodeEndpoint_startsWith?: Maybe<Scalars['String']>
+  nodeEndpoint_endsWith?: Maybe<Scalars['String']>
+  nodeEndpoint_in?: Maybe<Array<Scalars['String']>>
+  extra_eq?: Maybe<Scalars['String']>
+  extra_contains?: Maybe<Scalars['String']>
+  extra_startsWith?: Maybe<Scalars['String']>
+  extra_endsWith?: Maybe<Scalars['String']>
+  extra_in?: Maybe<Array<Scalars['String']>>
+  nodeLocation?: Maybe<NodeLocationMetadataWhereInput>
+  distributionbucketoperatormetadata_none?: Maybe<DistributionBucketOperatorWhereInput>
+  distributionbucketoperatormetadata_some?: Maybe<DistributionBucketOperatorWhereInput>
+  distributionbucketoperatormetadata_every?: Maybe<DistributionBucketOperatorWhereInput>
+  AND?: Maybe<Array<DistributionBucketOperatorMetadataWhereInput>>
+  OR?: Maybe<Array<DistributionBucketOperatorMetadataWhereInput>>
+}
+
+export type DistributionBucketOperatorMetadataWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export enum DistributionBucketOperatorOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  DistributionBucketAsc = 'distributionBucket_ASC',
+  DistributionBucketDesc = 'distributionBucket_DESC',
+  WorkerIdAsc = 'workerId_ASC',
+  WorkerIdDesc = 'workerId_DESC',
+  StatusAsc = 'status_ASC',
+  StatusDesc = 'status_DESC',
+  MetadataAsc = 'metadata_ASC',
+  MetadataDesc = 'metadata_DESC',
+}
+
+export enum DistributionBucketOperatorStatus {
+  Invited = 'INVITED',
+  Active = 'ACTIVE',
+}
+
+export type DistributionBucketOperatorUpdateInput = {
+  distributionBucket?: Maybe<Scalars['ID']>
+  workerId?: Maybe<Scalars['Float']>
+  status?: Maybe<DistributionBucketOperatorStatus>
+  metadata?: Maybe<Scalars['ID']>
+}
+
+export type DistributionBucketOperatorWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  workerId_eq?: Maybe<Scalars['Int']>
+  workerId_gt?: Maybe<Scalars['Int']>
+  workerId_gte?: Maybe<Scalars['Int']>
+  workerId_lt?: Maybe<Scalars['Int']>
+  workerId_lte?: Maybe<Scalars['Int']>
+  workerId_in?: Maybe<Array<Scalars['Int']>>
+  status_eq?: Maybe<DistributionBucketOperatorStatus>
+  status_in?: Maybe<Array<DistributionBucketOperatorStatus>>
+  distributionBucket?: Maybe<DistributionBucketWhereInput>
+  metadata?: Maybe<DistributionBucketOperatorMetadataWhereInput>
+  AND?: Maybe<Array<DistributionBucketOperatorWhereInput>>
+  OR?: Maybe<Array<DistributionBucketOperatorWhereInput>>
+}
+
+export type DistributionBucketOperatorWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export enum DistributionBucketOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  FamilyAsc = 'family_ASC',
+  FamilyDesc = 'family_DESC',
+  BucketIndexAsc = 'bucketIndex_ASC',
+  BucketIndexDesc = 'bucketIndex_DESC',
+  AcceptingNewBagsAsc = 'acceptingNewBags_ASC',
+  AcceptingNewBagsDesc = 'acceptingNewBags_DESC',
+  DistributingAsc = 'distributing_ASC',
+  DistributingDesc = 'distributing_DESC',
+}
+
+export type DistributionBucketUpdateInput = {
+  family?: Maybe<Scalars['ID']>
+  bucketIndex?: Maybe<Scalars['Float']>
+  acceptingNewBags?: Maybe<Scalars['Boolean']>
+  distributing?: Maybe<Scalars['Boolean']>
+}
+
+export type DistributionBucketWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  bucketIndex_eq?: Maybe<Scalars['Int']>
+  bucketIndex_gt?: Maybe<Scalars['Int']>
+  bucketIndex_gte?: Maybe<Scalars['Int']>
+  bucketIndex_lt?: Maybe<Scalars['Int']>
+  bucketIndex_lte?: Maybe<Scalars['Int']>
+  bucketIndex_in?: Maybe<Array<Scalars['Int']>>
+  acceptingNewBags_eq?: Maybe<Scalars['Boolean']>
+  acceptingNewBags_in?: Maybe<Array<Scalars['Boolean']>>
+  distributing_eq?: Maybe<Scalars['Boolean']>
+  distributing_in?: Maybe<Array<Scalars['Boolean']>>
+  family?: Maybe<DistributionBucketFamilyWhereInput>
+  operators_none?: Maybe<DistributionBucketOperatorWhereInput>
+  operators_some?: Maybe<DistributionBucketOperatorWhereInput>
+  operators_every?: Maybe<DistributionBucketOperatorWhereInput>
+  bags_none?: Maybe<StorageBagWhereInput>
+  bags_some?: Maybe<StorageBagWhereInput>
+  bags_every?: Maybe<StorageBagWhereInput>
+  AND?: Maybe<Array<DistributionBucketWhereInput>>
+  OR?: Maybe<Array<DistributionBucketWhereInput>>
+}
+
+export type DistributionBucketWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type GeoCoordinates = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  latitude: Scalars['Float']
+  longitude: Scalars['Float']
+  nodelocationmetadatacoordinates?: Maybe<Array<NodeLocationMetadata>>
+}
+
+export type GeoCoordinatesConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<GeoCoordinatesEdge>
+  pageInfo: PageInfo
+}
+
+export type GeoCoordinatesCreateInput = {
+  latitude: Scalars['Float']
+  longitude: Scalars['Float']
+}
+
+export type GeoCoordinatesEdge = {
+  node: GeoCoordinates
+  cursor: Scalars['String']
+}
+
+export enum GeoCoordinatesOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  LatitudeAsc = 'latitude_ASC',
+  LatitudeDesc = 'latitude_DESC',
+  LongitudeAsc = 'longitude_ASC',
+  LongitudeDesc = 'longitude_DESC',
+}
+
+export type GeoCoordinatesUpdateInput = {
+  latitude?: Maybe<Scalars['Float']>
+  longitude?: Maybe<Scalars['Float']>
+}
+
+export type GeoCoordinatesWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  latitude_eq?: Maybe<Scalars['Float']>
+  latitude_gt?: Maybe<Scalars['Float']>
+  latitude_gte?: Maybe<Scalars['Float']>
+  latitude_lt?: Maybe<Scalars['Float']>
+  latitude_lte?: Maybe<Scalars['Float']>
+  latitude_in?: Maybe<Array<Scalars['Float']>>
+  longitude_eq?: Maybe<Scalars['Float']>
+  longitude_gt?: Maybe<Scalars['Float']>
+  longitude_gte?: Maybe<Scalars['Float']>
+  longitude_lt?: Maybe<Scalars['Float']>
+  longitude_lte?: Maybe<Scalars['Float']>
+  longitude_in?: Maybe<Array<Scalars['Float']>>
+  nodelocationmetadatacoordinates_none?: Maybe<NodeLocationMetadataWhereInput>
+  nodelocationmetadatacoordinates_some?: Maybe<NodeLocationMetadataWhereInput>
+  nodelocationmetadatacoordinates_every?: Maybe<NodeLocationMetadataWhereInput>
+  AND?: Maybe<Array<GeoCoordinatesWhereInput>>
+  OR?: Maybe<Array<GeoCoordinatesWhereInput>>
+}
+
+export type GeoCoordinatesWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type GeographicalArea = GeographicalAreaContinent | GeographicalAreaCountry | GeographicalAreaSubdivistion
+
+export type GeographicalAreaContinent = {
+  code?: Maybe<Continent>
+}
+
+export type GeographicalAreaContinentCreateInput = {
+  code?: Maybe<Continent>
+}
+
+export type GeographicalAreaContinentUpdateInput = {
+  code?: Maybe<Continent>
+}
+
+export type GeographicalAreaContinentWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  code_eq?: Maybe<Continent>
+  code_in?: Maybe<Array<Continent>>
+  AND?: Maybe<Array<GeographicalAreaContinentWhereInput>>
+  OR?: Maybe<Array<GeographicalAreaContinentWhereInput>>
+}
+
+export type GeographicalAreaContinentWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type GeographicalAreaCountry = {
+  /** ISO 3166-1 alpha-2 country code */
+  code?: Maybe<Scalars['String']>
+}
+
+export type GeographicalAreaSubdivistion = {
+  /** ISO 3166-2 subdivision code */
+  code?: Maybe<Scalars['String']>
+}
+
+export type Language = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Language identifier ISO 639-1 */
+  iso: Scalars['String']
+  createdInBlock: Scalars['Int']
+  channellanguage?: Maybe<Array<Channel>>
+  videolanguage?: Maybe<Array<Video>>
+}
+
+export type LanguageConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<LanguageEdge>
+  pageInfo: PageInfo
+}
+
+export type LanguageCreateInput = {
+  iso: Scalars['String']
+  createdInBlock: Scalars['Float']
+}
+
+export type LanguageEdge = {
+  node: Language
+  cursor: Scalars['String']
+}
+
+export enum LanguageOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  IsoAsc = 'iso_ASC',
+  IsoDesc = 'iso_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+}
+
+export type LanguageUpdateInput = {
+  iso?: Maybe<Scalars['String']>
+  createdInBlock?: Maybe<Scalars['Float']>
+}
+
+export type LanguageWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  iso_eq?: Maybe<Scalars['String']>
+  iso_contains?: Maybe<Scalars['String']>
+  iso_startsWith?: Maybe<Scalars['String']>
+  iso_endsWith?: Maybe<Scalars['String']>
+  iso_in?: Maybe<Array<Scalars['String']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  channellanguage_none?: Maybe<ChannelWhereInput>
+  channellanguage_some?: Maybe<ChannelWhereInput>
+  channellanguage_every?: Maybe<ChannelWhereInput>
+  videolanguage_none?: Maybe<VideoWhereInput>
+  videolanguage_some?: Maybe<VideoWhereInput>
+  videolanguage_every?: Maybe<VideoWhereInput>
+  AND?: Maybe<Array<LanguageWhereInput>>
+  OR?: Maybe<Array<LanguageWhereInput>>
+}
+
+export type LanguageWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type License = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** License code defined by Joystream */
+  code?: Maybe<Scalars['Int']>
+  /** Attribution (if required by the license) */
+  attribution?: Maybe<Scalars['String']>
+  /** Custom license content */
+  customText?: Maybe<Scalars['String']>
+  videolicense?: Maybe<Array<Video>>
+}
+
+export type LicenseConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<LicenseEdge>
+  pageInfo: PageInfo
+}
+
+export type LicenseCreateInput = {
+  code?: Maybe<Scalars['Float']>
+  attribution?: Maybe<Scalars['String']>
+  customText?: Maybe<Scalars['String']>
+}
+
+export type LicenseEdge = {
+  node: License
+  cursor: Scalars['String']
+}
+
+export enum LicenseOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  CodeAsc = 'code_ASC',
+  CodeDesc = 'code_DESC',
+  AttributionAsc = 'attribution_ASC',
+  AttributionDesc = 'attribution_DESC',
+  CustomTextAsc = 'customText_ASC',
+  CustomTextDesc = 'customText_DESC',
+}
+
+export type LicenseUpdateInput = {
+  code?: Maybe<Scalars['Float']>
+  attribution?: Maybe<Scalars['String']>
+  customText?: Maybe<Scalars['String']>
+}
+
+export type LicenseWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  code_eq?: Maybe<Scalars['Int']>
+  code_gt?: Maybe<Scalars['Int']>
+  code_gte?: Maybe<Scalars['Int']>
+  code_lt?: Maybe<Scalars['Int']>
+  code_lte?: Maybe<Scalars['Int']>
+  code_in?: Maybe<Array<Scalars['Int']>>
+  attribution_eq?: Maybe<Scalars['String']>
+  attribution_contains?: Maybe<Scalars['String']>
+  attribution_startsWith?: Maybe<Scalars['String']>
+  attribution_endsWith?: Maybe<Scalars['String']>
+  attribution_in?: Maybe<Array<Scalars['String']>>
+  customText_eq?: Maybe<Scalars['String']>
+  customText_contains?: Maybe<Scalars['String']>
+  customText_startsWith?: Maybe<Scalars['String']>
+  customText_endsWith?: Maybe<Scalars['String']>
+  customText_in?: Maybe<Array<Scalars['String']>>
+  videolicense_none?: Maybe<VideoWhereInput>
+  videolicense_some?: Maybe<VideoWhereInput>
+  videolicense_every?: Maybe<VideoWhereInput>
+  AND?: Maybe<Array<LicenseWhereInput>>
+  OR?: Maybe<Array<LicenseWhereInput>>
+}
+
+export type LicenseWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type MembersByHandleFtsOutput = {
+  item: MembersByHandleSearchResult
+  rank: Scalars['Float']
+  isTypeOf: Scalars['String']
+  highlight: Scalars['String']
+}
+
+export type MembersByHandleSearchResult = Membership
+
+/** Stored information about a registered user */
+export type Membership = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** The unique handle chosen by member */
+  handle: Scalars['String']
+  /** A Url to member's Avatar image */
+  avatarUri?: Maybe<Scalars['String']>
+  /** Short text chosen by member to share information about themselves */
+  about?: Maybe<Scalars['String']>
+  /** Member's controller account id */
+  controllerAccount: Scalars['String']
+  /** Member's root account id */
+  rootAccount: Scalars['String']
+  /** Blocknumber when member was registered */
+  createdInBlock: Scalars['Int']
+  /** How the member was registered */
+  entry: MembershipEntryMethod
+  /** The type of subscription the member has purchased if any. */
+  subscription?: Maybe<Scalars['Int']>
+  channels: Array<Channel>
+  collaboratorInChannels: Array<Channel>
+}
+
+export type MembershipConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<MembershipEdge>
+  pageInfo: PageInfo
+}
+
+export type MembershipCreateInput = {
+  handle: Scalars['String']
+  avatarUri?: Maybe<Scalars['String']>
+  about?: Maybe<Scalars['String']>
+  controllerAccount: Scalars['String']
+  rootAccount: Scalars['String']
+  createdInBlock: Scalars['Float']
+  entry: MembershipEntryMethod
+  subscription?: Maybe<Scalars['Float']>
+}
+
+export type MembershipEdge = {
+  node: Membership
+  cursor: Scalars['String']
+}
+
+export enum MembershipEntryMethod {
+  Paid = 'PAID',
+  Screening = 'SCREENING',
+  Genesis = 'GENESIS',
+}
+
+export enum MembershipOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  HandleAsc = 'handle_ASC',
+  HandleDesc = 'handle_DESC',
+  AvatarUriAsc = 'avatarUri_ASC',
+  AvatarUriDesc = 'avatarUri_DESC',
+  AboutAsc = 'about_ASC',
+  AboutDesc = 'about_DESC',
+  ControllerAccountAsc = 'controllerAccount_ASC',
+  ControllerAccountDesc = 'controllerAccount_DESC',
+  RootAccountAsc = 'rootAccount_ASC',
+  RootAccountDesc = 'rootAccount_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+  EntryAsc = 'entry_ASC',
+  EntryDesc = 'entry_DESC',
+  SubscriptionAsc = 'subscription_ASC',
+  SubscriptionDesc = 'subscription_DESC',
+}
+
+export type MembershipUpdateInput = {
+  handle?: Maybe<Scalars['String']>
+  avatarUri?: Maybe<Scalars['String']>
+  about?: Maybe<Scalars['String']>
+  controllerAccount?: Maybe<Scalars['String']>
+  rootAccount?: Maybe<Scalars['String']>
+  createdInBlock?: Maybe<Scalars['Float']>
+  entry?: Maybe<MembershipEntryMethod>
+  subscription?: Maybe<Scalars['Float']>
+}
+
+export type MembershipWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  handle_eq?: Maybe<Scalars['String']>
+  handle_contains?: Maybe<Scalars['String']>
+  handle_startsWith?: Maybe<Scalars['String']>
+  handle_endsWith?: Maybe<Scalars['String']>
+  handle_in?: Maybe<Array<Scalars['String']>>
+  avatarUri_eq?: Maybe<Scalars['String']>
+  avatarUri_contains?: Maybe<Scalars['String']>
+  avatarUri_startsWith?: Maybe<Scalars['String']>
+  avatarUri_endsWith?: Maybe<Scalars['String']>
+  avatarUri_in?: Maybe<Array<Scalars['String']>>
+  about_eq?: Maybe<Scalars['String']>
+  about_contains?: Maybe<Scalars['String']>
+  about_startsWith?: Maybe<Scalars['String']>
+  about_endsWith?: Maybe<Scalars['String']>
+  about_in?: Maybe<Array<Scalars['String']>>
+  controllerAccount_eq?: Maybe<Scalars['String']>
+  controllerAccount_contains?: Maybe<Scalars['String']>
+  controllerAccount_startsWith?: Maybe<Scalars['String']>
+  controllerAccount_endsWith?: Maybe<Scalars['String']>
+  controllerAccount_in?: Maybe<Array<Scalars['String']>>
+  rootAccount_eq?: Maybe<Scalars['String']>
+  rootAccount_contains?: Maybe<Scalars['String']>
+  rootAccount_startsWith?: Maybe<Scalars['String']>
+  rootAccount_endsWith?: Maybe<Scalars['String']>
+  rootAccount_in?: Maybe<Array<Scalars['String']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  entry_eq?: Maybe<MembershipEntryMethod>
+  entry_in?: Maybe<Array<MembershipEntryMethod>>
+  subscription_eq?: Maybe<Scalars['Int']>
+  subscription_gt?: Maybe<Scalars['Int']>
+  subscription_gte?: Maybe<Scalars['Int']>
+  subscription_lt?: Maybe<Scalars['Int']>
+  subscription_lte?: Maybe<Scalars['Int']>
+  subscription_in?: Maybe<Array<Scalars['Int']>>
+  channels_none?: Maybe<ChannelWhereInput>
+  channels_some?: Maybe<ChannelWhereInput>
+  channels_every?: Maybe<ChannelWhereInput>
+  collaboratorInChannels_none?: Maybe<ChannelWhereInput>
+  collaboratorInChannels_some?: Maybe<ChannelWhereInput>
+  collaboratorInChannels_every?: Maybe<ChannelWhereInput>
+  AND?: Maybe<Array<MembershipWhereInput>>
+  OR?: Maybe<Array<MembershipWhereInput>>
+}
+
+export type MembershipWhereUniqueInput = {
+  id?: Maybe<Scalars['ID']>
+  handle?: Maybe<Scalars['String']>
+}
+
+export type NodeLocationMetadata = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** ISO 3166-1 alpha-2 country code (2 letters) */
+  countryCode?: Maybe<Scalars['String']>
+  /** City name */
+  city?: Maybe<Scalars['String']>
+  coordinates?: Maybe<GeoCoordinates>
+  coordinatesId?: Maybe<Scalars['String']>
+  distributionbucketoperatormetadatanodeLocation?: Maybe<Array<DistributionBucketOperatorMetadata>>
+  storagebucketoperatormetadatanodeLocation?: Maybe<Array<StorageBucketOperatorMetadata>>
+}
+
+export type NodeLocationMetadataConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<NodeLocationMetadataEdge>
+  pageInfo: PageInfo
+}
+
+export type NodeLocationMetadataCreateInput = {
+  countryCode?: Maybe<Scalars['String']>
+  city?: Maybe<Scalars['String']>
+  coordinates?: Maybe<Scalars['ID']>
+}
+
+export type NodeLocationMetadataEdge = {
+  node: NodeLocationMetadata
+  cursor: Scalars['String']
+}
+
+export enum NodeLocationMetadataOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  CountryCodeAsc = 'countryCode_ASC',
+  CountryCodeDesc = 'countryCode_DESC',
+  CityAsc = 'city_ASC',
+  CityDesc = 'city_DESC',
+  CoordinatesAsc = 'coordinates_ASC',
+  CoordinatesDesc = 'coordinates_DESC',
+}
+
+export type NodeLocationMetadataUpdateInput = {
+  countryCode?: Maybe<Scalars['String']>
+  city?: Maybe<Scalars['String']>
+  coordinates?: Maybe<Scalars['ID']>
+}
+
+export type NodeLocationMetadataWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  countryCode_eq?: Maybe<Scalars['String']>
+  countryCode_contains?: Maybe<Scalars['String']>
+  countryCode_startsWith?: Maybe<Scalars['String']>
+  countryCode_endsWith?: Maybe<Scalars['String']>
+  countryCode_in?: Maybe<Array<Scalars['String']>>
+  city_eq?: Maybe<Scalars['String']>
+  city_contains?: Maybe<Scalars['String']>
+  city_startsWith?: Maybe<Scalars['String']>
+  city_endsWith?: Maybe<Scalars['String']>
+  city_in?: Maybe<Array<Scalars['String']>>
+  coordinates?: Maybe<GeoCoordinatesWhereInput>
+  distributionbucketoperatormetadatanodeLocation_none?: Maybe<DistributionBucketOperatorMetadataWhereInput>
+  distributionbucketoperatormetadatanodeLocation_some?: Maybe<DistributionBucketOperatorMetadataWhereInput>
+  distributionbucketoperatormetadatanodeLocation_every?: Maybe<DistributionBucketOperatorMetadataWhereInput>
+  storagebucketoperatormetadatanodeLocation_none?: Maybe<StorageBucketOperatorMetadataWhereInput>
+  storagebucketoperatormetadatanodeLocation_some?: Maybe<StorageBucketOperatorMetadataWhereInput>
+  storagebucketoperatormetadatanodeLocation_every?: Maybe<StorageBucketOperatorMetadataWhereInput>
+  AND?: Maybe<Array<NodeLocationMetadataWhereInput>>
+  OR?: Maybe<Array<NodeLocationMetadataWhereInput>>
+}
+
+export type NodeLocationMetadataWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type PageInfo = {
+  hasNextPage: Scalars['Boolean']
+  hasPreviousPage: Scalars['Boolean']
+  startCursor?: Maybe<Scalars['String']>
+  endCursor?: Maybe<Scalars['String']>
+}
+
+export type ProcessorState = {
+  lastCompleteBlock: Scalars['Float']
+  lastProcessedEvent: Scalars['String']
+  indexerHead: Scalars['Float']
+  chainHead: Scalars['Float']
+}
+
+export type Query = {
+  channelCategories: Array<ChannelCategory>
+  channelCategoryByUniqueInput?: Maybe<ChannelCategory>
+  channelCategoriesConnection: ChannelCategoryConnection
+  channels: Array<Channel>
+  channelByUniqueInput?: Maybe<Channel>
+  channelsConnection: ChannelConnection
+  curatorGroups: Array<CuratorGroup>
+  curatorGroupByUniqueInput?: Maybe<CuratorGroup>
+  curatorGroupsConnection: CuratorGroupConnection
+  distributionBucketFamilyGeographicAreas: Array<DistributionBucketFamilyGeographicArea>
+  distributionBucketFamilyGeographicAreaByUniqueInput?: Maybe<DistributionBucketFamilyGeographicArea>
+  distributionBucketFamilyGeographicAreasConnection: DistributionBucketFamilyGeographicAreaConnection
+  distributionBucketFamilyMetadata: Array<DistributionBucketFamilyMetadata>
+  distributionBucketFamilyMetadataByUniqueInput?: Maybe<DistributionBucketFamilyMetadata>
+  distributionBucketFamilyMetadataConnection: DistributionBucketFamilyMetadataConnection
+  distributionBucketFamilies: Array<DistributionBucketFamily>
+  distributionBucketFamilyByUniqueInput?: Maybe<DistributionBucketFamily>
+  distributionBucketFamiliesConnection: DistributionBucketFamilyConnection
+  distributionBucketOperatorMetadata: Array<DistributionBucketOperatorMetadata>
+  distributionBucketOperatorMetadataByUniqueInput?: Maybe<DistributionBucketOperatorMetadata>
+  distributionBucketOperatorMetadataConnection: DistributionBucketOperatorMetadataConnection
+  distributionBucketOperators: Array<DistributionBucketOperator>
+  distributionBucketOperatorByUniqueInput?: Maybe<DistributionBucketOperator>
+  distributionBucketOperatorsConnection: DistributionBucketOperatorConnection
+  distributionBuckets: Array<DistributionBucket>
+  distributionBucketByUniqueInput?: Maybe<DistributionBucket>
+  distributionBucketsConnection: DistributionBucketConnection
+  geoCoordinates: Array<GeoCoordinates>
+  geoCoordinatesByUniqueInput?: Maybe<GeoCoordinates>
+  geoCoordinatesConnection: GeoCoordinatesConnection
+  languages: Array<Language>
+  languageByUniqueInput?: Maybe<Language>
+  languagesConnection: LanguageConnection
+  licenses: Array<License>
+  licenseByUniqueInput?: Maybe<License>
+  licensesConnection: LicenseConnection
+  memberships: Array<Membership>
+  membershipByUniqueInput?: Maybe<Membership>
+  membershipsConnection: MembershipConnection
+  nodeLocationMetadata: Array<NodeLocationMetadata>
+  nodeLocationMetadataByUniqueInput?: Maybe<NodeLocationMetadata>
+  nodeLocationMetadataConnection: NodeLocationMetadataConnection
+  channelCategoriesByName: Array<ChannelCategoriesByNameFtsOutput>
+  membersByHandle: Array<MembersByHandleFtsOutput>
+  search: Array<SearchFtsOutput>
+  videoCategoriesByName: Array<VideoCategoriesByNameFtsOutput>
+  storageBags: Array<StorageBag>
+  storageBagByUniqueInput?: Maybe<StorageBag>
+  storageBagsConnection: StorageBagConnection
+  storageBucketOperatorMetadata: Array<StorageBucketOperatorMetadata>
+  storageBucketOperatorMetadataByUniqueInput?: Maybe<StorageBucketOperatorMetadata>
+  storageBucketOperatorMetadataConnection: StorageBucketOperatorMetadataConnection
+  storageBuckets: Array<StorageBucket>
+  storageBucketByUniqueInput?: Maybe<StorageBucket>
+  storageBucketsConnection: StorageBucketConnection
+  storageDataObjects: Array<StorageDataObject>
+  storageDataObjectByUniqueInput?: Maybe<StorageDataObject>
+  storageDataObjectsConnection: StorageDataObjectConnection
+  storageSystemParameters: Array<StorageSystemParameters>
+  storageSystemParametersByUniqueInput?: Maybe<StorageSystemParameters>
+  storageSystemParametersConnection: StorageSystemParametersConnection
+  videoCategories: Array<VideoCategory>
+  videoCategoryByUniqueInput?: Maybe<VideoCategory>
+  videoCategoriesConnection: VideoCategoryConnection
+  videoMediaEncodings: Array<VideoMediaEncoding>
+  videoMediaEncodingByUniqueInput?: Maybe<VideoMediaEncoding>
+  videoMediaEncodingsConnection: VideoMediaEncodingConnection
+  videoMediaMetadata: Array<VideoMediaMetadata>
+  videoMediaMetadataByUniqueInput?: Maybe<VideoMediaMetadata>
+  videoMediaMetadataConnection: VideoMediaMetadataConnection
+  videos: Array<Video>
+  videoByUniqueInput?: Maybe<Video>
+  videosConnection: VideoConnection
+  workers: Array<Worker>
+  workerByUniqueInput?: Maybe<Worker>
+  workersConnection: WorkerConnection
+}
+
+export type QueryChannelCategoriesArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<ChannelCategoryWhereInput>
+  orderBy?: Maybe<Array<ChannelCategoryOrderByInput>>
+}
+
+export type QueryChannelCategoryByUniqueInputArgs = {
+  where: ChannelCategoryWhereUniqueInput
+}
+
+export type QueryChannelCategoriesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<ChannelCategoryWhereInput>
+  orderBy?: Maybe<Array<ChannelCategoryOrderByInput>>
+}
+
+export type QueryChannelsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<ChannelWhereInput>
+  orderBy?: Maybe<Array<ChannelOrderByInput>>
+}
+
+export type QueryChannelByUniqueInputArgs = {
+  where: ChannelWhereUniqueInput
+}
+
+export type QueryChannelsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<ChannelWhereInput>
+  orderBy?: Maybe<Array<ChannelOrderByInput>>
+}
+
+export type QueryCuratorGroupsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<CuratorGroupWhereInput>
+  orderBy?: Maybe<Array<CuratorGroupOrderByInput>>
+}
+
+export type QueryCuratorGroupByUniqueInputArgs = {
+  where: CuratorGroupWhereUniqueInput
+}
+
+export type QueryCuratorGroupsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<CuratorGroupWhereInput>
+  orderBy?: Maybe<Array<CuratorGroupOrderByInput>>
+}
+
+export type QueryDistributionBucketFamilyGeographicAreasArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<DistributionBucketFamilyGeographicAreaWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketFamilyGeographicAreaOrderByInput>>
+}
+
+export type QueryDistributionBucketFamilyGeographicAreaByUniqueInputArgs = {
+  where: DistributionBucketFamilyGeographicAreaWhereUniqueInput
+}
+
+export type QueryDistributionBucketFamilyGeographicAreasConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<DistributionBucketFamilyGeographicAreaWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketFamilyGeographicAreaOrderByInput>>
+}
+
+export type QueryDistributionBucketFamilyMetadataArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<DistributionBucketFamilyMetadataWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketFamilyMetadataOrderByInput>>
+}
+
+export type QueryDistributionBucketFamilyMetadataByUniqueInputArgs = {
+  where: DistributionBucketFamilyMetadataWhereUniqueInput
+}
+
+export type QueryDistributionBucketFamilyMetadataConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<DistributionBucketFamilyMetadataWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketFamilyMetadataOrderByInput>>
+}
+
+export type QueryDistributionBucketFamiliesArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<DistributionBucketFamilyWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketFamilyOrderByInput>>
+}
+
+export type QueryDistributionBucketFamilyByUniqueInputArgs = {
+  where: DistributionBucketFamilyWhereUniqueInput
+}
+
+export type QueryDistributionBucketFamiliesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<DistributionBucketFamilyWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketFamilyOrderByInput>>
+}
+
+export type QueryDistributionBucketOperatorMetadataArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<DistributionBucketOperatorMetadataWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketOperatorMetadataOrderByInput>>
+}
+
+export type QueryDistributionBucketOperatorMetadataByUniqueInputArgs = {
+  where: DistributionBucketOperatorMetadataWhereUniqueInput
+}
+
+export type QueryDistributionBucketOperatorMetadataConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<DistributionBucketOperatorMetadataWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketOperatorMetadataOrderByInput>>
+}
+
+export type QueryDistributionBucketOperatorsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<DistributionBucketOperatorWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketOperatorOrderByInput>>
+}
+
+export type QueryDistributionBucketOperatorByUniqueInputArgs = {
+  where: DistributionBucketOperatorWhereUniqueInput
+}
+
+export type QueryDistributionBucketOperatorsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<DistributionBucketOperatorWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketOperatorOrderByInput>>
+}
+
+export type QueryDistributionBucketsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<DistributionBucketWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketOrderByInput>>
+}
+
+export type QueryDistributionBucketByUniqueInputArgs = {
+  where: DistributionBucketWhereUniqueInput
+}
+
+export type QueryDistributionBucketsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<DistributionBucketWhereInput>
+  orderBy?: Maybe<Array<DistributionBucketOrderByInput>>
+}
+
+export type QueryGeoCoordinatesArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<GeoCoordinatesWhereInput>
+  orderBy?: Maybe<Array<GeoCoordinatesOrderByInput>>
+}
+
+export type QueryGeoCoordinatesByUniqueInputArgs = {
+  where: GeoCoordinatesWhereUniqueInput
+}
+
+export type QueryGeoCoordinatesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<GeoCoordinatesWhereInput>
+  orderBy?: Maybe<Array<GeoCoordinatesOrderByInput>>
+}
+
+export type QueryLanguagesArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<LanguageWhereInput>
+  orderBy?: Maybe<Array<LanguageOrderByInput>>
+}
+
+export type QueryLanguageByUniqueInputArgs = {
+  where: LanguageWhereUniqueInput
+}
+
+export type QueryLanguagesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<LanguageWhereInput>
+  orderBy?: Maybe<Array<LanguageOrderByInput>>
+}
+
+export type QueryLicensesArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<LicenseWhereInput>
+  orderBy?: Maybe<Array<LicenseOrderByInput>>
+}
+
+export type QueryLicenseByUniqueInputArgs = {
+  where: LicenseWhereUniqueInput
+}
+
+export type QueryLicensesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<LicenseWhereInput>
+  orderBy?: Maybe<Array<LicenseOrderByInput>>
+}
+
+export type QueryMembershipsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<MembershipWhereInput>
+  orderBy?: Maybe<Array<MembershipOrderByInput>>
+}
+
+export type QueryMembershipByUniqueInputArgs = {
+  where: MembershipWhereUniqueInput
+}
+
+export type QueryMembershipsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<MembershipWhereInput>
+  orderBy?: Maybe<Array<MembershipOrderByInput>>
+}
+
+export type QueryNodeLocationMetadataArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<NodeLocationMetadataWhereInput>
+  orderBy?: Maybe<Array<NodeLocationMetadataOrderByInput>>
+}
+
+export type QueryNodeLocationMetadataByUniqueInputArgs = {
+  where: NodeLocationMetadataWhereUniqueInput
+}
+
+export type QueryNodeLocationMetadataConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<NodeLocationMetadataWhereInput>
+  orderBy?: Maybe<Array<NodeLocationMetadataOrderByInput>>
+}
+
+export type QueryChannelCategoriesByNameArgs = {
+  whereChannelCategory?: Maybe<ChannelCategoryWhereInput>
+  skip?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  text: Scalars['String']
+}
+
+export type QueryMembersByHandleArgs = {
+  whereMembership?: Maybe<MembershipWhereInput>
+  skip?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  text: Scalars['String']
+}
+
+export type QuerySearchArgs = {
+  whereVideo?: Maybe<VideoWhereInput>
+  whereChannel?: Maybe<ChannelWhereInput>
+  skip?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  text: Scalars['String']
+}
+
+export type QueryVideoCategoriesByNameArgs = {
+  whereVideoCategory?: Maybe<VideoCategoryWhereInput>
+  skip?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  text: Scalars['String']
+}
+
+export type QueryStorageBagsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<StorageBagWhereInput>
+  orderBy?: Maybe<Array<StorageBagOrderByInput>>
+}
+
+export type QueryStorageBagByUniqueInputArgs = {
+  where: StorageBagWhereUniqueInput
+}
+
+export type QueryStorageBagsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<StorageBagWhereInput>
+  orderBy?: Maybe<Array<StorageBagOrderByInput>>
+}
+
+export type QueryStorageBucketOperatorMetadataArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<StorageBucketOperatorMetadataWhereInput>
+  orderBy?: Maybe<Array<StorageBucketOperatorMetadataOrderByInput>>
+}
+
+export type QueryStorageBucketOperatorMetadataByUniqueInputArgs = {
+  where: StorageBucketOperatorMetadataWhereUniqueInput
+}
+
+export type QueryStorageBucketOperatorMetadataConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<StorageBucketOperatorMetadataWhereInput>
+  orderBy?: Maybe<Array<StorageBucketOperatorMetadataOrderByInput>>
+}
+
+export type QueryStorageBucketsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<StorageBucketWhereInput>
+  orderBy?: Maybe<Array<StorageBucketOrderByInput>>
+}
+
+export type QueryStorageBucketByUniqueInputArgs = {
+  where: StorageBucketWhereUniqueInput
+}
+
+export type QueryStorageBucketsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<StorageBucketWhereInput>
+  orderBy?: Maybe<Array<StorageBucketOrderByInput>>
+}
+
+export type QueryStorageDataObjectsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<StorageDataObjectWhereInput>
+  orderBy?: Maybe<Array<StorageDataObjectOrderByInput>>
+}
+
+export type QueryStorageDataObjectByUniqueInputArgs = {
+  where: StorageDataObjectWhereUniqueInput
+}
+
+export type QueryStorageDataObjectsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<StorageDataObjectWhereInput>
+  orderBy?: Maybe<Array<StorageDataObjectOrderByInput>>
+}
+
+export type QueryStorageSystemParametersArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<StorageSystemParametersWhereInput>
+  orderBy?: Maybe<Array<StorageSystemParametersOrderByInput>>
+}
+
+export type QueryStorageSystemParametersByUniqueInputArgs = {
+  where: StorageSystemParametersWhereUniqueInput
+}
+
+export type QueryStorageSystemParametersConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<StorageSystemParametersWhereInput>
+  orderBy?: Maybe<Array<StorageSystemParametersOrderByInput>>
+}
+
+export type QueryVideoCategoriesArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<VideoCategoryWhereInput>
+  orderBy?: Maybe<Array<VideoCategoryOrderByInput>>
+}
+
+export type QueryVideoCategoryByUniqueInputArgs = {
+  where: VideoCategoryWhereUniqueInput
+}
+
+export type QueryVideoCategoriesConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<VideoCategoryWhereInput>
+  orderBy?: Maybe<Array<VideoCategoryOrderByInput>>
+}
+
+export type QueryVideoMediaEncodingsArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<VideoMediaEncodingWhereInput>
+  orderBy?: Maybe<Array<VideoMediaEncodingOrderByInput>>
+}
+
+export type QueryVideoMediaEncodingByUniqueInputArgs = {
+  where: VideoMediaEncodingWhereUniqueInput
+}
+
+export type QueryVideoMediaEncodingsConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<VideoMediaEncodingWhereInput>
+  orderBy?: Maybe<Array<VideoMediaEncodingOrderByInput>>
+}
+
+export type QueryVideoMediaMetadataArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<VideoMediaMetadataWhereInput>
+  orderBy?: Maybe<Array<VideoMediaMetadataOrderByInput>>
+}
+
+export type QueryVideoMediaMetadataByUniqueInputArgs = {
+  where: VideoMediaMetadataWhereUniqueInput
+}
+
+export type QueryVideoMediaMetadataConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<VideoMediaMetadataWhereInput>
+  orderBy?: Maybe<Array<VideoMediaMetadataOrderByInput>>
+}
+
+export type QueryVideosArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<VideoWhereInput>
+  orderBy?: Maybe<Array<VideoOrderByInput>>
+}
+
+export type QueryVideoByUniqueInputArgs = {
+  where: VideoWhereUniqueInput
+}
+
+export type QueryVideosConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<VideoWhereInput>
+  orderBy?: Maybe<Array<VideoOrderByInput>>
+}
+
+export type QueryWorkersArgs = {
+  offset?: Maybe<Scalars['Int']>
+  limit?: Maybe<Scalars['Int']>
+  where?: Maybe<WorkerWhereInput>
+  orderBy?: Maybe<Array<WorkerOrderByInput>>
+}
+
+export type QueryWorkerByUniqueInputArgs = {
+  where: WorkerWhereUniqueInput
+}
+
+export type QueryWorkersConnectionArgs = {
+  first?: Maybe<Scalars['Int']>
+  after?: Maybe<Scalars['String']>
+  last?: Maybe<Scalars['Int']>
+  before?: Maybe<Scalars['String']>
+  where?: Maybe<WorkerWhereInput>
+  orderBy?: Maybe<Array<WorkerOrderByInput>>
+}
+
+export type SearchFtsOutput = {
+  item: SearchSearchResult
+  rank: Scalars['Float']
+  isTypeOf: Scalars['String']
+  highlight: Scalars['String']
+}
+
+export type SearchSearchResult = Channel | Video
+
+export type StandardDeleteResponse = {
+  id: Scalars['ID']
+}
+
+export type StorageBag = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  objects: Array<StorageDataObject>
+  storageBuckets: Array<StorageBucket>
+  distributionBuckets: Array<DistributionBucket>
+  /** Owner of the storage bag */
+  owner: StorageBagOwner
+}
+
+export type StorageBagConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<StorageBagEdge>
+  pageInfo: PageInfo
+}
+
+export type StorageBagCreateInput = {
+  owner: Scalars['JSONObject']
+}
+
+export type StorageBagEdge = {
+  node: StorageBag
+  cursor: Scalars['String']
+}
+
+export enum StorageBagOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+}
+
+export type StorageBagOwner =
+  | StorageBagOwnerCouncil
+  | StorageBagOwnerWorkingGroup
+  | StorageBagOwnerMember
+  | StorageBagOwnerChannel
+  | StorageBagOwnerDao
+
+export type StorageBagOwnerChannel = {
+  channelId?: Maybe<Scalars['Int']>
+}
+
+export type StorageBagOwnerCouncil = {
+  phantom?: Maybe<Scalars['Int']>
+}
+
+export type StorageBagOwnerDao = {
+  daoId?: Maybe<Scalars['Int']>
+}
+
+export type StorageBagOwnerMember = {
+  memberId?: Maybe<Scalars['Int']>
+}
+
+export type StorageBagOwnerWorkingGroup = {
+  workingGroupId?: Maybe<Scalars['String']>
+}
+
+export type StorageBagUpdateInput = {
+  owner?: Maybe<Scalars['JSONObject']>
+}
+
+export type StorageBagWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  owner_json?: Maybe<Scalars['JSONObject']>
+  objects_none?: Maybe<StorageDataObjectWhereInput>
+  objects_some?: Maybe<StorageDataObjectWhereInput>
+  objects_every?: Maybe<StorageDataObjectWhereInput>
+  storageBuckets_none?: Maybe<StorageBucketWhereInput>
+  storageBuckets_some?: Maybe<StorageBucketWhereInput>
+  storageBuckets_every?: Maybe<StorageBucketWhereInput>
+  distributionBuckets_none?: Maybe<DistributionBucketWhereInput>
+  distributionBuckets_some?: Maybe<DistributionBucketWhereInput>
+  distributionBuckets_every?: Maybe<DistributionBucketWhereInput>
+  AND?: Maybe<Array<StorageBagWhereInput>>
+  OR?: Maybe<Array<StorageBagWhereInput>>
+}
+
+export type StorageBagWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageBucket = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Current bucket operator status */
+  operatorStatus: StorageBucketOperatorStatus
+  operatorMetadata?: Maybe<StorageBucketOperatorMetadata>
+  operatorMetadataId?: Maybe<Scalars['String']>
+  /** Whether the bucket is accepting any new storage bags */
+  acceptingNewBags: Scalars['Boolean']
+  bags: Array<StorageBag>
+  /** Bucket's data object size limit in bytes */
+  dataObjectsSizeLimit: Scalars['BigInt']
+  /** Bucket's data object count limit */
+  dataObjectCountLimit: Scalars['BigInt']
+  /** Number of assigned data objects */
+  dataObjectsCount: Scalars['BigInt']
+  /** Total size of assigned data objects */
+  dataObjectsSize: Scalars['BigInt']
+}
+
+export type StorageBucketConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<StorageBucketEdge>
+  pageInfo: PageInfo
+}
+
+export type StorageBucketCreateInput = {
+  operatorStatus: Scalars['JSONObject']
+  operatorMetadata?: Maybe<Scalars['ID']>
+  acceptingNewBags: Scalars['Boolean']
+  dataObjectsSizeLimit: Scalars['String']
+  dataObjectCountLimit: Scalars['String']
+  dataObjectsCount: Scalars['String']
+  dataObjectsSize: Scalars['String']
+}
+
+export type StorageBucketEdge = {
+  node: StorageBucket
+  cursor: Scalars['String']
+}
+
+export type StorageBucketOperatorMetadata = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Root node endpoint */
+  nodeEndpoint?: Maybe<Scalars['String']>
+  nodeLocation?: Maybe<NodeLocationMetadata>
+  nodeLocationId?: Maybe<Scalars['String']>
+  /** Additional information about the node/operator */
+  extra?: Maybe<Scalars['String']>
+  storagebucketoperatorMetadata?: Maybe<Array<StorageBucket>>
+}
+
+export type StorageBucketOperatorMetadataConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<StorageBucketOperatorMetadataEdge>
+  pageInfo: PageInfo
+}
+
+export type StorageBucketOperatorMetadataCreateInput = {
+  nodeEndpoint?: Maybe<Scalars['String']>
+  nodeLocation?: Maybe<Scalars['ID']>
+  extra?: Maybe<Scalars['String']>
+}
+
+export type StorageBucketOperatorMetadataEdge = {
+  node: StorageBucketOperatorMetadata
+  cursor: Scalars['String']
+}
+
+export enum StorageBucketOperatorMetadataOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  NodeEndpointAsc = 'nodeEndpoint_ASC',
+  NodeEndpointDesc = 'nodeEndpoint_DESC',
+  NodeLocationAsc = 'nodeLocation_ASC',
+  NodeLocationDesc = 'nodeLocation_DESC',
+  ExtraAsc = 'extra_ASC',
+  ExtraDesc = 'extra_DESC',
+}
+
+export type StorageBucketOperatorMetadataUpdateInput = {
+  nodeEndpoint?: Maybe<Scalars['String']>
+  nodeLocation?: Maybe<Scalars['ID']>
+  extra?: Maybe<Scalars['String']>
+}
+
+export type StorageBucketOperatorMetadataWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  nodeEndpoint_eq?: Maybe<Scalars['String']>
+  nodeEndpoint_contains?: Maybe<Scalars['String']>
+  nodeEndpoint_startsWith?: Maybe<Scalars['String']>
+  nodeEndpoint_endsWith?: Maybe<Scalars['String']>
+  nodeEndpoint_in?: Maybe<Array<Scalars['String']>>
+  extra_eq?: Maybe<Scalars['String']>
+  extra_contains?: Maybe<Scalars['String']>
+  extra_startsWith?: Maybe<Scalars['String']>
+  extra_endsWith?: Maybe<Scalars['String']>
+  extra_in?: Maybe<Array<Scalars['String']>>
+  nodeLocation?: Maybe<NodeLocationMetadataWhereInput>
+  storagebucketoperatorMetadata_none?: Maybe<StorageBucketWhereInput>
+  storagebucketoperatorMetadata_some?: Maybe<StorageBucketWhereInput>
+  storagebucketoperatorMetadata_every?: Maybe<StorageBucketWhereInput>
+  AND?: Maybe<Array<StorageBucketOperatorMetadataWhereInput>>
+  OR?: Maybe<Array<StorageBucketOperatorMetadataWhereInput>>
+}
+
+export type StorageBucketOperatorMetadataWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageBucketOperatorStatus =
+  | StorageBucketOperatorStatusMissing
+  | StorageBucketOperatorStatusInvited
+  | StorageBucketOperatorStatusActive
+
+export type StorageBucketOperatorStatusActive = {
+  workerId: Scalars['Int']
+  transactorAccountId: Scalars['String']
+}
+
+export type StorageBucketOperatorStatusInvited = {
+  workerId: Scalars['Int']
+}
+
+export type StorageBucketOperatorStatusMissing = {
+  phantom?: Maybe<Scalars['Int']>
+}
+
+export enum StorageBucketOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  OperatorMetadataAsc = 'operatorMetadata_ASC',
+  OperatorMetadataDesc = 'operatorMetadata_DESC',
+  AcceptingNewBagsAsc = 'acceptingNewBags_ASC',
+  AcceptingNewBagsDesc = 'acceptingNewBags_DESC',
+  DataObjectsSizeLimitAsc = 'dataObjectsSizeLimit_ASC',
+  DataObjectsSizeLimitDesc = 'dataObjectsSizeLimit_DESC',
+  DataObjectCountLimitAsc = 'dataObjectCountLimit_ASC',
+  DataObjectCountLimitDesc = 'dataObjectCountLimit_DESC',
+  DataObjectsCountAsc = 'dataObjectsCount_ASC',
+  DataObjectsCountDesc = 'dataObjectsCount_DESC',
+  DataObjectsSizeAsc = 'dataObjectsSize_ASC',
+  DataObjectsSizeDesc = 'dataObjectsSize_DESC',
+}
+
+export type StorageBucketUpdateInput = {
+  operatorStatus?: Maybe<Scalars['JSONObject']>
+  operatorMetadata?: Maybe<Scalars['ID']>
+  acceptingNewBags?: Maybe<Scalars['Boolean']>
+  dataObjectsSizeLimit?: Maybe<Scalars['String']>
+  dataObjectCountLimit?: Maybe<Scalars['String']>
+  dataObjectsCount?: Maybe<Scalars['String']>
+  dataObjectsSize?: Maybe<Scalars['String']>
+}
+
+export type StorageBucketWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  operatorStatus_json?: Maybe<Scalars['JSONObject']>
+  acceptingNewBags_eq?: Maybe<Scalars['Boolean']>
+  acceptingNewBags_in?: Maybe<Array<Scalars['Boolean']>>
+  dataObjectsSizeLimit_eq?: Maybe<Scalars['BigInt']>
+  dataObjectsSizeLimit_gt?: Maybe<Scalars['BigInt']>
+  dataObjectsSizeLimit_gte?: Maybe<Scalars['BigInt']>
+  dataObjectsSizeLimit_lt?: Maybe<Scalars['BigInt']>
+  dataObjectsSizeLimit_lte?: Maybe<Scalars['BigInt']>
+  dataObjectsSizeLimit_in?: Maybe<Array<Scalars['BigInt']>>
+  dataObjectCountLimit_eq?: Maybe<Scalars['BigInt']>
+  dataObjectCountLimit_gt?: Maybe<Scalars['BigInt']>
+  dataObjectCountLimit_gte?: Maybe<Scalars['BigInt']>
+  dataObjectCountLimit_lt?: Maybe<Scalars['BigInt']>
+  dataObjectCountLimit_lte?: Maybe<Scalars['BigInt']>
+  dataObjectCountLimit_in?: Maybe<Array<Scalars['BigInt']>>
+  dataObjectsCount_eq?: Maybe<Scalars['BigInt']>
+  dataObjectsCount_gt?: Maybe<Scalars['BigInt']>
+  dataObjectsCount_gte?: Maybe<Scalars['BigInt']>
+  dataObjectsCount_lt?: Maybe<Scalars['BigInt']>
+  dataObjectsCount_lte?: Maybe<Scalars['BigInt']>
+  dataObjectsCount_in?: Maybe<Array<Scalars['BigInt']>>
+  dataObjectsSize_eq?: Maybe<Scalars['BigInt']>
+  dataObjectsSize_gt?: Maybe<Scalars['BigInt']>
+  dataObjectsSize_gte?: Maybe<Scalars['BigInt']>
+  dataObjectsSize_lt?: Maybe<Scalars['BigInt']>
+  dataObjectsSize_lte?: Maybe<Scalars['BigInt']>
+  dataObjectsSize_in?: Maybe<Array<Scalars['BigInt']>>
+  operatorMetadata?: Maybe<StorageBucketOperatorMetadataWhereInput>
+  bags_none?: Maybe<StorageBagWhereInput>
+  bags_some?: Maybe<StorageBagWhereInput>
+  bags_every?: Maybe<StorageBagWhereInput>
+  AND?: Maybe<Array<StorageBucketWhereInput>>
+  OR?: Maybe<Array<StorageBucketWhereInput>>
+}
+
+export type StorageBucketWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type StorageDataObject = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Whether the data object was uploaded and accepted by the storage provider */
+  isAccepted: Scalars['Boolean']
+  /** Data object size in bytes */
+  size: Scalars['BigInt']
+  storageBag: StorageBag
+  storageBagId: Scalars['String']
+  /** IPFS content hash */
+  ipfsHash: Scalars['String']
+  /** The type of the asset that the data object represents (if known) */
+  type: DataObjectType
+  /** Prize for removing the data object */
+  deletionPrize: Scalars['BigInt']
+  /** If the object is no longer used as an asset - the time at which it was unset (if known) */
+  unsetAt?: Maybe<Scalars['DateTime']>
+  channelcoverPhoto?: Maybe<Array<Channel>>
+  channelavatarPhoto?: Maybe<Array<Channel>>
+  videothumbnailPhoto?: Maybe<Array<Video>>
+  videomedia?: Maybe<Array<Video>>
+}
+
+export type StorageDataObjectConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<StorageDataObjectEdge>
+  pageInfo: PageInfo
+}
+
+export type StorageDataObjectCreateInput = {
+  isAccepted: Scalars['Boolean']
+  size: Scalars['String']
+  storageBag: Scalars['ID']
+  ipfsHash: Scalars['String']
+  type: Scalars['JSONObject']
+  deletionPrize: Scalars['String']
+  unsetAt?: Maybe<Scalars['DateTime']>
+}
+
+export type StorageDataObjectEdge = {
+  node: StorageDataObject
+  cursor: Scalars['String']
+}
+
+export enum StorageDataObjectOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  IsAcceptedAsc = 'isAccepted_ASC',
+  IsAcceptedDesc = 'isAccepted_DESC',
+  SizeAsc = 'size_ASC',
+  SizeDesc = 'size_DESC',
+  StorageBagAsc = 'storageBag_ASC',
+  StorageBagDesc = 'storageBag_DESC',
+  IpfsHashAsc = 'ipfsHash_ASC',
+  IpfsHashDesc = 'ipfsHash_DESC',
+  DeletionPrizeAsc = 'deletionPrize_ASC',
+  DeletionPrizeDesc = 'deletionPrize_DESC',
+  UnsetAtAsc = 'unsetAt_ASC',
+  UnsetAtDesc = 'unsetAt_DESC',
+}
+
+export type StorageDataObjectUpdateInput = {
+  isAccepted?: Maybe<Scalars['Boolean']>
+  size?: Maybe<Scalars['String']>
+  storageBag?: Maybe<Scalars['ID']>
+  ipfsHash?: Maybe<Scalars['String']>
+  type?: Maybe<Scalars['JSONObject']>
+  deletionPrize?: Maybe<Scalars['String']>
+  unsetAt?: Maybe<Scalars['DateTime']>
+}
+
+export type StorageDataObjectWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  isAccepted_eq?: Maybe<Scalars['Boolean']>
+  isAccepted_in?: Maybe<Array<Scalars['Boolean']>>
+  size_eq?: Maybe<Scalars['BigInt']>
+  size_gt?: Maybe<Scalars['BigInt']>
+  size_gte?: Maybe<Scalars['BigInt']>
+  size_lt?: Maybe<Scalars['BigInt']>
+  size_lte?: Maybe<Scalars['BigInt']>
+  size_in?: Maybe<Array<Scalars['BigInt']>>
+  ipfsHash_eq?: Maybe<Scalars['String']>
+  ipfsHash_contains?: Maybe<Scalars['String']>
+  ipfsHash_startsWith?: Maybe<Scalars['String']>
+  ipfsHash_endsWith?: Maybe<Scalars['String']>
+  ipfsHash_in?: Maybe<Array<Scalars['String']>>
+  type_json?: Maybe<Scalars['JSONObject']>
+  deletionPrize_eq?: Maybe<Scalars['BigInt']>
+  deletionPrize_gt?: Maybe<Scalars['BigInt']>
+  deletionPrize_gte?: Maybe<Scalars['BigInt']>
+  deletionPrize_lt?: Maybe<Scalars['BigInt']>
+  deletionPrize_lte?: Maybe<Scalars['BigInt']>
+  deletionPrize_in?: Maybe<Array<Scalars['BigInt']>>
+  unsetAt_eq?: Maybe<Scalars['DateTime']>
+  unsetAt_lt?: Maybe<Scalars['DateTime']>
+  unsetAt_lte?: Maybe<Scalars['DateTime']>
+  unsetAt_gt?: Maybe<Scalars['DateTime']>
+  unsetAt_gte?: Maybe<Scalars['DateTime']>
+  storageBag?: Maybe<StorageBagWhereInput>
+  channelcoverPhoto_none?: Maybe<ChannelWhereInput>
+  channelcoverPhoto_some?: Maybe<ChannelWhereInput>
+  channelcoverPhoto_every?: Maybe<ChannelWhereInput>
+  channelavatarPhoto_none?: Maybe<ChannelWhereInput>
+  channelavatarPhoto_some?: Maybe<ChannelWhereInput>
+  channelavatarPhoto_every?: Maybe<ChannelWhereInput>
+  videothumbnailPhoto_none?: Maybe<VideoWhereInput>
+  videothumbnailPhoto_some?: Maybe<VideoWhereInput>
+  videothumbnailPhoto_every?: Maybe<VideoWhereInput>
+  videomedia_none?: Maybe<VideoWhereInput>
+  videomedia_some?: Maybe<VideoWhereInput>
+  videomedia_every?: Maybe<VideoWhereInput>
+  AND?: Maybe<Array<StorageDataObjectWhereInput>>
+  OR?: Maybe<Array<StorageDataObjectWhereInput>>
+}
+
+export type StorageDataObjectWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+/** Global storage system parameters */
+export type StorageSystemParameters = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Blacklisted content hashes */
+  blacklist: Array<Scalars['String']>
+  /** How many buckets can be assigned to store a bag */
+  storageBucketsPerBagLimit: Scalars['Int']
+  /** How many buckets can be assigned to distribute a bag */
+  distributionBucketsPerBagLimit: Scalars['Int']
+  /** Whether the uploading is globally blocked */
+  uploadingBlocked: Scalars['Boolean']
+  /** Additional fee for storing 1 MB of data */
+  dataObjectFeePerMb: Scalars['BigInt']
+  /** Global max. number of objects a storage bucket can store (can also be further limitted the provider) */
+  storageBucketMaxObjectsCountLimit: Scalars['BigInt']
+  /** Global max. size of objects a storage bucket can store (can also be further limitted the provider) */
+  storageBucketMaxObjectsSizeLimit: Scalars['BigInt']
+  /** ID of the next data object when created */
+  nextDataObjectId: Scalars['BigInt']
+}
+
+export type StorageSystemParametersConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<StorageSystemParametersEdge>
+  pageInfo: PageInfo
+}
+
+export type StorageSystemParametersCreateInput = {
+  blacklist: Array<Scalars['String']>
+  storageBucketsPerBagLimit: Scalars['Float']
+  distributionBucketsPerBagLimit: Scalars['Float']
+  uploadingBlocked: Scalars['Boolean']
+  dataObjectFeePerMb: Scalars['String']
+  storageBucketMaxObjectsCountLimit: Scalars['String']
+  storageBucketMaxObjectsSizeLimit: Scalars['String']
+  nextDataObjectId: Scalars['String']
+}
+
+export type StorageSystemParametersEdge = {
+  node: StorageSystemParameters
+  cursor: Scalars['String']
+}
+
+export enum StorageSystemParametersOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  StorageBucketsPerBagLimitAsc = 'storageBucketsPerBagLimit_ASC',
+  StorageBucketsPerBagLimitDesc = 'storageBucketsPerBagLimit_DESC',
+  DistributionBucketsPerBagLimitAsc = 'distributionBucketsPerBagLimit_ASC',
+  DistributionBucketsPerBagLimitDesc = 'distributionBucketsPerBagLimit_DESC',
+  UploadingBlockedAsc = 'uploadingBlocked_ASC',
+  UploadingBlockedDesc = 'uploadingBlocked_DESC',
+  DataObjectFeePerMbAsc = 'dataObjectFeePerMb_ASC',
+  DataObjectFeePerMbDesc = 'dataObjectFeePerMb_DESC',
+  StorageBucketMaxObjectsCountLimitAsc = 'storageBucketMaxObjectsCountLimit_ASC',
+  StorageBucketMaxObjectsCountLimitDesc = 'storageBucketMaxObjectsCountLimit_DESC',
+  StorageBucketMaxObjectsSizeLimitAsc = 'storageBucketMaxObjectsSizeLimit_ASC',
+  StorageBucketMaxObjectsSizeLimitDesc = 'storageBucketMaxObjectsSizeLimit_DESC',
+  NextDataObjectIdAsc = 'nextDataObjectId_ASC',
+  NextDataObjectIdDesc = 'nextDataObjectId_DESC',
+}
+
+export type StorageSystemParametersUpdateInput = {
+  blacklist?: Maybe<Array<Scalars['String']>>
+  storageBucketsPerBagLimit?: Maybe<Scalars['Float']>
+  distributionBucketsPerBagLimit?: Maybe<Scalars['Float']>
+  uploadingBlocked?: Maybe<Scalars['Boolean']>
+  dataObjectFeePerMb?: Maybe<Scalars['String']>
+  storageBucketMaxObjectsCountLimit?: Maybe<Scalars['String']>
+  storageBucketMaxObjectsSizeLimit?: Maybe<Scalars['String']>
+  nextDataObjectId?: Maybe<Scalars['String']>
+}
+
+export type StorageSystemParametersWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  blacklist_containsAll?: Maybe<Array<Scalars['String']>>
+  blacklist_containsNone?: Maybe<Array<Scalars['String']>>
+  blacklist_containsAny?: Maybe<Array<Scalars['String']>>
+  storageBucketsPerBagLimit_eq?: Maybe<Scalars['Int']>
+  storageBucketsPerBagLimit_gt?: Maybe<Scalars['Int']>
+  storageBucketsPerBagLimit_gte?: Maybe<Scalars['Int']>
+  storageBucketsPerBagLimit_lt?: Maybe<Scalars['Int']>
+  storageBucketsPerBagLimit_lte?: Maybe<Scalars['Int']>
+  storageBucketsPerBagLimit_in?: Maybe<Array<Scalars['Int']>>
+  distributionBucketsPerBagLimit_eq?: Maybe<Scalars['Int']>
+  distributionBucketsPerBagLimit_gt?: Maybe<Scalars['Int']>
+  distributionBucketsPerBagLimit_gte?: Maybe<Scalars['Int']>
+  distributionBucketsPerBagLimit_lt?: Maybe<Scalars['Int']>
+  distributionBucketsPerBagLimit_lte?: Maybe<Scalars['Int']>
+  distributionBucketsPerBagLimit_in?: Maybe<Array<Scalars['Int']>>
+  uploadingBlocked_eq?: Maybe<Scalars['Boolean']>
+  uploadingBlocked_in?: Maybe<Array<Scalars['Boolean']>>
+  dataObjectFeePerMb_eq?: Maybe<Scalars['BigInt']>
+  dataObjectFeePerMb_gt?: Maybe<Scalars['BigInt']>
+  dataObjectFeePerMb_gte?: Maybe<Scalars['BigInt']>
+  dataObjectFeePerMb_lt?: Maybe<Scalars['BigInt']>
+  dataObjectFeePerMb_lte?: Maybe<Scalars['BigInt']>
+  dataObjectFeePerMb_in?: Maybe<Array<Scalars['BigInt']>>
+  storageBucketMaxObjectsCountLimit_eq?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsCountLimit_gt?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsCountLimit_gte?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsCountLimit_lt?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsCountLimit_lte?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsCountLimit_in?: Maybe<Array<Scalars['BigInt']>>
+  storageBucketMaxObjectsSizeLimit_eq?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsSizeLimit_gt?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsSizeLimit_gte?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsSizeLimit_lt?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsSizeLimit_lte?: Maybe<Scalars['BigInt']>
+  storageBucketMaxObjectsSizeLimit_in?: Maybe<Array<Scalars['BigInt']>>
+  nextDataObjectId_eq?: Maybe<Scalars['BigInt']>
+  nextDataObjectId_gt?: Maybe<Scalars['BigInt']>
+  nextDataObjectId_gte?: Maybe<Scalars['BigInt']>
+  nextDataObjectId_lt?: Maybe<Scalars['BigInt']>
+  nextDataObjectId_lte?: Maybe<Scalars['BigInt']>
+  nextDataObjectId_in?: Maybe<Array<Scalars['BigInt']>>
+  AND?: Maybe<Array<StorageSystemParametersWhereInput>>
+  OR?: Maybe<Array<StorageSystemParametersWhereInput>>
+}
+
+export type StorageSystemParametersWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type Subscription = {
+  stateSubscription: ProcessorState
+}
+
+export type Video = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  channel: Channel
+  channelId: Scalars['String']
+  category?: Maybe<VideoCategory>
+  categoryId?: Maybe<Scalars['String']>
+  /** The title of the video */
+  title?: Maybe<Scalars['String']>
+  /** The description of the Video */
+  description?: Maybe<Scalars['String']>
+  /** Video duration in seconds */
+  duration?: Maybe<Scalars['Int']>
+  thumbnailPhoto?: Maybe<StorageDataObject>
+  thumbnailPhotoId?: Maybe<Scalars['String']>
+  language?: Maybe<Language>
+  languageId?: Maybe<Scalars['String']>
+  /** Whether or not Video contains marketing */
+  hasMarketing?: Maybe<Scalars['Boolean']>
+  /** If the Video was published on other platform before beeing published on Joystream - the original publication date */
+  publishedBeforeJoystream?: Maybe<Scalars['DateTime']>
+  /** Whether the Video is supposed to be publically displayed */
+  isPublic?: Maybe<Scalars['Boolean']>
+  /** Flag signaling whether a video is censored. */
+  isCensored: Scalars['Boolean']
+  /** Whether the Video contains explicit material. */
+  isExplicit?: Maybe<Scalars['Boolean']>
+  license?: Maybe<License>
+  licenseId?: Maybe<Scalars['String']>
+  media?: Maybe<StorageDataObject>
+  mediaId?: Maybe<Scalars['String']>
+  mediaMetadata?: Maybe<VideoMediaMetadata>
+  mediaMetadataId?: Maybe<Scalars['String']>
+  createdInBlock: Scalars['Int']
+  /** Is video featured or not */
+  isFeatured: Scalars['Boolean']
+}
+
+export type VideoCategoriesByNameFtsOutput = {
+  item: VideoCategoriesByNameSearchResult
+  rank: Scalars['Float']
+  isTypeOf: Scalars['String']
+  highlight: Scalars['String']
+}
+
+export type VideoCategoriesByNameSearchResult = VideoCategory
+
+export type VideoCategory = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** The name of the category */
+  name?: Maybe<Scalars['String']>
+  videos: Array<Video>
+  createdInBlock: Scalars['Int']
+}
+
+export type VideoCategoryConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<VideoCategoryEdge>
+  pageInfo: PageInfo
+}
+
+export type VideoCategoryCreateInput = {
+  name?: Maybe<Scalars['String']>
+  createdInBlock: Scalars['Float']
+}
+
+export type VideoCategoryEdge = {
+  node: VideoCategory
+  cursor: Scalars['String']
+}
+
+export enum VideoCategoryOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  NameAsc = 'name_ASC',
+  NameDesc = 'name_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+}
+
+export type VideoCategoryUpdateInput = {
+  name?: Maybe<Scalars['String']>
+  createdInBlock?: Maybe<Scalars['Float']>
+}
+
+export type VideoCategoryWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  name_eq?: Maybe<Scalars['String']>
+  name_contains?: Maybe<Scalars['String']>
+  name_startsWith?: Maybe<Scalars['String']>
+  name_endsWith?: Maybe<Scalars['String']>
+  name_in?: Maybe<Array<Scalars['String']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  videos_none?: Maybe<VideoWhereInput>
+  videos_some?: Maybe<VideoWhereInput>
+  videos_every?: Maybe<VideoWhereInput>
+  AND?: Maybe<Array<VideoCategoryWhereInput>>
+  OR?: Maybe<Array<VideoCategoryWhereInput>>
+}
+
+export type VideoCategoryWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type VideoConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<VideoEdge>
+  pageInfo: PageInfo
+}
+
+export type VideoCreateInput = {
+  channel: Scalars['ID']
+  category?: Maybe<Scalars['ID']>
+  title?: Maybe<Scalars['String']>
+  description?: Maybe<Scalars['String']>
+  duration?: Maybe<Scalars['Float']>
+  thumbnailPhoto?: Maybe<Scalars['ID']>
+  language?: Maybe<Scalars['ID']>
+  hasMarketing?: Maybe<Scalars['Boolean']>
+  publishedBeforeJoystream?: Maybe<Scalars['DateTime']>
+  isPublic?: Maybe<Scalars['Boolean']>
+  isCensored: Scalars['Boolean']
+  isExplicit?: Maybe<Scalars['Boolean']>
+  license?: Maybe<Scalars['ID']>
+  media?: Maybe<Scalars['ID']>
+  mediaMetadata?: Maybe<Scalars['ID']>
+  createdInBlock: Scalars['Float']
+  isFeatured: Scalars['Boolean']
+}
+
+export type VideoEdge = {
+  node: Video
+  cursor: Scalars['String']
+}
+
+export type VideoMediaEncoding = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Encoding of the video media object */
+  codecName?: Maybe<Scalars['String']>
+  /** Media container format */
+  container?: Maybe<Scalars['String']>
+  /** Content MIME type */
+  mimeMediaType?: Maybe<Scalars['String']>
+  videomediametadataencoding?: Maybe<Array<VideoMediaMetadata>>
+}
+
+export type VideoMediaEncodingConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<VideoMediaEncodingEdge>
+  pageInfo: PageInfo
+}
+
+export type VideoMediaEncodingCreateInput = {
+  codecName?: Maybe<Scalars['String']>
+  container?: Maybe<Scalars['String']>
+  mimeMediaType?: Maybe<Scalars['String']>
+}
+
+export type VideoMediaEncodingEdge = {
+  node: VideoMediaEncoding
+  cursor: Scalars['String']
+}
+
+export enum VideoMediaEncodingOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  CodecNameAsc = 'codecName_ASC',
+  CodecNameDesc = 'codecName_DESC',
+  ContainerAsc = 'container_ASC',
+  ContainerDesc = 'container_DESC',
+  MimeMediaTypeAsc = 'mimeMediaType_ASC',
+  MimeMediaTypeDesc = 'mimeMediaType_DESC',
+}
+
+export type VideoMediaEncodingUpdateInput = {
+  codecName?: Maybe<Scalars['String']>
+  container?: Maybe<Scalars['String']>
+  mimeMediaType?: Maybe<Scalars['String']>
+}
+
+export type VideoMediaEncodingWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  codecName_eq?: Maybe<Scalars['String']>
+  codecName_contains?: Maybe<Scalars['String']>
+  codecName_startsWith?: Maybe<Scalars['String']>
+  codecName_endsWith?: Maybe<Scalars['String']>
+  codecName_in?: Maybe<Array<Scalars['String']>>
+  container_eq?: Maybe<Scalars['String']>
+  container_contains?: Maybe<Scalars['String']>
+  container_startsWith?: Maybe<Scalars['String']>
+  container_endsWith?: Maybe<Scalars['String']>
+  container_in?: Maybe<Array<Scalars['String']>>
+  mimeMediaType_eq?: Maybe<Scalars['String']>
+  mimeMediaType_contains?: Maybe<Scalars['String']>
+  mimeMediaType_startsWith?: Maybe<Scalars['String']>
+  mimeMediaType_endsWith?: Maybe<Scalars['String']>
+  mimeMediaType_in?: Maybe<Array<Scalars['String']>>
+  videomediametadataencoding_none?: Maybe<VideoMediaMetadataWhereInput>
+  videomediametadataencoding_some?: Maybe<VideoMediaMetadataWhereInput>
+  videomediametadataencoding_every?: Maybe<VideoMediaMetadataWhereInput>
+  AND?: Maybe<Array<VideoMediaEncodingWhereInput>>
+  OR?: Maybe<Array<VideoMediaEncodingWhereInput>>
+}
+
+export type VideoMediaEncodingWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type VideoMediaMetadata = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  encoding?: Maybe<VideoMediaEncoding>
+  encodingId?: Maybe<Scalars['String']>
+  /** Video media width in pixels */
+  pixelWidth?: Maybe<Scalars['Int']>
+  /** Video media height in pixels */
+  pixelHeight?: Maybe<Scalars['Int']>
+  /** Video media size in bytes */
+  size?: Maybe<Scalars['BigInt']>
+  video?: Maybe<Video>
+  createdInBlock: Scalars['Int']
+}
+
+export type VideoMediaMetadataConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<VideoMediaMetadataEdge>
+  pageInfo: PageInfo
+}
+
+export type VideoMediaMetadataCreateInput = {
+  encoding?: Maybe<Scalars['ID']>
+  pixelWidth?: Maybe<Scalars['Float']>
+  pixelHeight?: Maybe<Scalars['Float']>
+  size?: Maybe<Scalars['String']>
+  createdInBlock: Scalars['Float']
+}
+
+export type VideoMediaMetadataEdge = {
+  node: VideoMediaMetadata
+  cursor: Scalars['String']
+}
+
+export enum VideoMediaMetadataOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  EncodingAsc = 'encoding_ASC',
+  EncodingDesc = 'encoding_DESC',
+  PixelWidthAsc = 'pixelWidth_ASC',
+  PixelWidthDesc = 'pixelWidth_DESC',
+  PixelHeightAsc = 'pixelHeight_ASC',
+  PixelHeightDesc = 'pixelHeight_DESC',
+  SizeAsc = 'size_ASC',
+  SizeDesc = 'size_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+}
+
+export type VideoMediaMetadataUpdateInput = {
+  encoding?: Maybe<Scalars['ID']>
+  pixelWidth?: Maybe<Scalars['Float']>
+  pixelHeight?: Maybe<Scalars['Float']>
+  size?: Maybe<Scalars['String']>
+  createdInBlock?: Maybe<Scalars['Float']>
+}
+
+export type VideoMediaMetadataWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  pixelWidth_eq?: Maybe<Scalars['Int']>
+  pixelWidth_gt?: Maybe<Scalars['Int']>
+  pixelWidth_gte?: Maybe<Scalars['Int']>
+  pixelWidth_lt?: Maybe<Scalars['Int']>
+  pixelWidth_lte?: Maybe<Scalars['Int']>
+  pixelWidth_in?: Maybe<Array<Scalars['Int']>>
+  pixelHeight_eq?: Maybe<Scalars['Int']>
+  pixelHeight_gt?: Maybe<Scalars['Int']>
+  pixelHeight_gte?: Maybe<Scalars['Int']>
+  pixelHeight_lt?: Maybe<Scalars['Int']>
+  pixelHeight_lte?: Maybe<Scalars['Int']>
+  pixelHeight_in?: Maybe<Array<Scalars['Int']>>
+  size_eq?: Maybe<Scalars['BigInt']>
+  size_gt?: Maybe<Scalars['BigInt']>
+  size_gte?: Maybe<Scalars['BigInt']>
+  size_lt?: Maybe<Scalars['BigInt']>
+  size_lte?: Maybe<Scalars['BigInt']>
+  size_in?: Maybe<Array<Scalars['BigInt']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  encoding?: Maybe<VideoMediaEncodingWhereInput>
+  video?: Maybe<VideoWhereInput>
+  AND?: Maybe<Array<VideoMediaMetadataWhereInput>>
+  OR?: Maybe<Array<VideoMediaMetadataWhereInput>>
+}
+
+export type VideoMediaMetadataWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export enum VideoOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  ChannelAsc = 'channel_ASC',
+  ChannelDesc = 'channel_DESC',
+  CategoryAsc = 'category_ASC',
+  CategoryDesc = 'category_DESC',
+  TitleAsc = 'title_ASC',
+  TitleDesc = 'title_DESC',
+  DescriptionAsc = 'description_ASC',
+  DescriptionDesc = 'description_DESC',
+  DurationAsc = 'duration_ASC',
+  DurationDesc = 'duration_DESC',
+  ThumbnailPhotoAsc = 'thumbnailPhoto_ASC',
+  ThumbnailPhotoDesc = 'thumbnailPhoto_DESC',
+  LanguageAsc = 'language_ASC',
+  LanguageDesc = 'language_DESC',
+  HasMarketingAsc = 'hasMarketing_ASC',
+  HasMarketingDesc = 'hasMarketing_DESC',
+  PublishedBeforeJoystreamAsc = 'publishedBeforeJoystream_ASC',
+  PublishedBeforeJoystreamDesc = 'publishedBeforeJoystream_DESC',
+  IsPublicAsc = 'isPublic_ASC',
+  IsPublicDesc = 'isPublic_DESC',
+  IsCensoredAsc = 'isCensored_ASC',
+  IsCensoredDesc = 'isCensored_DESC',
+  IsExplicitAsc = 'isExplicit_ASC',
+  IsExplicitDesc = 'isExplicit_DESC',
+  LicenseAsc = 'license_ASC',
+  LicenseDesc = 'license_DESC',
+  MediaAsc = 'media_ASC',
+  MediaDesc = 'media_DESC',
+  MediaMetadataAsc = 'mediaMetadata_ASC',
+  MediaMetadataDesc = 'mediaMetadata_DESC',
+  CreatedInBlockAsc = 'createdInBlock_ASC',
+  CreatedInBlockDesc = 'createdInBlock_DESC',
+  IsFeaturedAsc = 'isFeatured_ASC',
+  IsFeaturedDesc = 'isFeatured_DESC',
+}
+
+export type VideoUpdateInput = {
+  channel?: Maybe<Scalars['ID']>
+  category?: Maybe<Scalars['ID']>
+  title?: Maybe<Scalars['String']>
+  description?: Maybe<Scalars['String']>
+  duration?: Maybe<Scalars['Float']>
+  thumbnailPhoto?: Maybe<Scalars['ID']>
+  language?: Maybe<Scalars['ID']>
+  hasMarketing?: Maybe<Scalars['Boolean']>
+  publishedBeforeJoystream?: Maybe<Scalars['DateTime']>
+  isPublic?: Maybe<Scalars['Boolean']>
+  isCensored?: Maybe<Scalars['Boolean']>
+  isExplicit?: Maybe<Scalars['Boolean']>
+  license?: Maybe<Scalars['ID']>
+  media?: Maybe<Scalars['ID']>
+  mediaMetadata?: Maybe<Scalars['ID']>
+  createdInBlock?: Maybe<Scalars['Float']>
+  isFeatured?: Maybe<Scalars['Boolean']>
+}
+
+export type VideoWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  title_eq?: Maybe<Scalars['String']>
+  title_contains?: Maybe<Scalars['String']>
+  title_startsWith?: Maybe<Scalars['String']>
+  title_endsWith?: Maybe<Scalars['String']>
+  title_in?: Maybe<Array<Scalars['String']>>
+  description_eq?: Maybe<Scalars['String']>
+  description_contains?: Maybe<Scalars['String']>
+  description_startsWith?: Maybe<Scalars['String']>
+  description_endsWith?: Maybe<Scalars['String']>
+  description_in?: Maybe<Array<Scalars['String']>>
+  duration_eq?: Maybe<Scalars['Int']>
+  duration_gt?: Maybe<Scalars['Int']>
+  duration_gte?: Maybe<Scalars['Int']>
+  duration_lt?: Maybe<Scalars['Int']>
+  duration_lte?: Maybe<Scalars['Int']>
+  duration_in?: Maybe<Array<Scalars['Int']>>
+  hasMarketing_eq?: Maybe<Scalars['Boolean']>
+  hasMarketing_in?: Maybe<Array<Scalars['Boolean']>>
+  publishedBeforeJoystream_eq?: Maybe<Scalars['DateTime']>
+  publishedBeforeJoystream_lt?: Maybe<Scalars['DateTime']>
+  publishedBeforeJoystream_lte?: Maybe<Scalars['DateTime']>
+  publishedBeforeJoystream_gt?: Maybe<Scalars['DateTime']>
+  publishedBeforeJoystream_gte?: Maybe<Scalars['DateTime']>
+  isPublic_eq?: Maybe<Scalars['Boolean']>
+  isPublic_in?: Maybe<Array<Scalars['Boolean']>>
+  isCensored_eq?: Maybe<Scalars['Boolean']>
+  isCensored_in?: Maybe<Array<Scalars['Boolean']>>
+  isExplicit_eq?: Maybe<Scalars['Boolean']>
+  isExplicit_in?: Maybe<Array<Scalars['Boolean']>>
+  createdInBlock_eq?: Maybe<Scalars['Int']>
+  createdInBlock_gt?: Maybe<Scalars['Int']>
+  createdInBlock_gte?: Maybe<Scalars['Int']>
+  createdInBlock_lt?: Maybe<Scalars['Int']>
+  createdInBlock_lte?: Maybe<Scalars['Int']>
+  createdInBlock_in?: Maybe<Array<Scalars['Int']>>
+  isFeatured_eq?: Maybe<Scalars['Boolean']>
+  isFeatured_in?: Maybe<Array<Scalars['Boolean']>>
+  channel?: Maybe<ChannelWhereInput>
+  category?: Maybe<VideoCategoryWhereInput>
+  thumbnailPhoto?: Maybe<StorageDataObjectWhereInput>
+  language?: Maybe<LanguageWhereInput>
+  license?: Maybe<LicenseWhereInput>
+  media?: Maybe<StorageDataObjectWhereInput>
+  mediaMetadata?: Maybe<VideoMediaMetadataWhereInput>
+  AND?: Maybe<Array<VideoWhereInput>>
+  OR?: Maybe<Array<VideoWhereInput>>
+}
+
+export type VideoWhereUniqueInput = {
+  id: Scalars['ID']
+}
+
+export type Worker = BaseGraphQlObject & {
+  id: Scalars['ID']
+  createdAt: Scalars['DateTime']
+  createdById: Scalars['String']
+  updatedAt?: Maybe<Scalars['DateTime']>
+  updatedById?: Maybe<Scalars['String']>
+  deletedAt?: Maybe<Scalars['DateTime']>
+  deletedById?: Maybe<Scalars['String']>
+  version: Scalars['Int']
+  /** Sign of worker still being active */
+  isActive: Scalars['Boolean']
+  /** Runtime identifier */
+  workerId: Scalars['String']
+  /** Associated working group */
+  type: WorkerType
+  /** Custom metadata set by provider */
+  metadata?: Maybe<Scalars['String']>
+}
+
+export type WorkerConnection = {
+  totalCount: Scalars['Int']
+  edges: Array<WorkerEdge>
+  pageInfo: PageInfo
+}
+
+export type WorkerCreateInput = {
+  isActive: Scalars['Boolean']
+  workerId: Scalars['String']
+  type: WorkerType
+  metadata?: Maybe<Scalars['String']>
+}
+
+export type WorkerEdge = {
+  node: Worker
+  cursor: Scalars['String']
+}
+
+export enum WorkerOrderByInput {
+  CreatedAtAsc = 'createdAt_ASC',
+  CreatedAtDesc = 'createdAt_DESC',
+  UpdatedAtAsc = 'updatedAt_ASC',
+  UpdatedAtDesc = 'updatedAt_DESC',
+  DeletedAtAsc = 'deletedAt_ASC',
+  DeletedAtDesc = 'deletedAt_DESC',
+  IsActiveAsc = 'isActive_ASC',
+  IsActiveDesc = 'isActive_DESC',
+  WorkerIdAsc = 'workerId_ASC',
+  WorkerIdDesc = 'workerId_DESC',
+  TypeAsc = 'type_ASC',
+  TypeDesc = 'type_DESC',
+  MetadataAsc = 'metadata_ASC',
+  MetadataDesc = 'metadata_DESC',
+}
+
+export enum WorkerType {
+  Gateway = 'GATEWAY',
+  Storage = 'STORAGE',
+}
+
+export type WorkerUpdateInput = {
+  isActive?: Maybe<Scalars['Boolean']>
+  workerId?: Maybe<Scalars['String']>
+  type?: Maybe<WorkerType>
+  metadata?: Maybe<Scalars['String']>
+}
+
+export type WorkerWhereInput = {
+  id_eq?: Maybe<Scalars['ID']>
+  id_in?: Maybe<Array<Scalars['ID']>>
+  createdAt_eq?: Maybe<Scalars['DateTime']>
+  createdAt_lt?: Maybe<Scalars['DateTime']>
+  createdAt_lte?: Maybe<Scalars['DateTime']>
+  createdAt_gt?: Maybe<Scalars['DateTime']>
+  createdAt_gte?: Maybe<Scalars['DateTime']>
+  createdById_eq?: Maybe<Scalars['ID']>
+  createdById_in?: Maybe<Array<Scalars['ID']>>
+  updatedAt_eq?: Maybe<Scalars['DateTime']>
+  updatedAt_lt?: Maybe<Scalars['DateTime']>
+  updatedAt_lte?: Maybe<Scalars['DateTime']>
+  updatedAt_gt?: Maybe<Scalars['DateTime']>
+  updatedAt_gte?: Maybe<Scalars['DateTime']>
+  updatedById_eq?: Maybe<Scalars['ID']>
+  updatedById_in?: Maybe<Array<Scalars['ID']>>
+  deletedAt_all?: Maybe<Scalars['Boolean']>
+  deletedAt_eq?: Maybe<Scalars['DateTime']>
+  deletedAt_lt?: Maybe<Scalars['DateTime']>
+  deletedAt_lte?: Maybe<Scalars['DateTime']>
+  deletedAt_gt?: Maybe<Scalars['DateTime']>
+  deletedAt_gte?: Maybe<Scalars['DateTime']>
+  deletedById_eq?: Maybe<Scalars['ID']>
+  deletedById_in?: Maybe<Array<Scalars['ID']>>
+  isActive_eq?: Maybe<Scalars['Boolean']>
+  isActive_in?: Maybe<Array<Scalars['Boolean']>>
+  workerId_eq?: Maybe<Scalars['String']>
+  workerId_contains?: Maybe<Scalars['String']>
+  workerId_startsWith?: Maybe<Scalars['String']>
+  workerId_endsWith?: Maybe<Scalars['String']>
+  workerId_in?: Maybe<Array<Scalars['String']>>
+  type_eq?: Maybe<WorkerType>
+  type_in?: Maybe<Array<WorkerType>>
+  metadata_eq?: Maybe<Scalars['String']>
+  metadata_contains?: Maybe<Scalars['String']>
+  metadata_startsWith?: Maybe<Scalars['String']>
+  metadata_endsWith?: Maybe<Scalars['String']>
+  metadata_in?: Maybe<Array<Scalars['String']>>
+  AND?: Maybe<Array<WorkerWhereInput>>
+  OR?: Maybe<Array<WorkerWhereInput>>
+}
+
+export type WorkerWhereUniqueInput = {
+  id: Scalars['ID']
+}

+ 74 - 0
tests/network-tests/src/graphql/queries/storagev2.graphql

@@ -0,0 +1,74 @@
+fragment DataObjectTypeFields on DataObjectType {
+  __typename
+  ... on DataObjectTypeChannelAvatar {
+    channel {
+      id
+    }
+  }
+  ... on DataObjectTypeChannelCoverPhoto {
+    channel {
+      id
+    }
+  }
+  ... on DataObjectTypeVideoThumbnail {
+    video {
+      id
+    }
+  }
+  ... on DataObjectTypeVideoMedia {
+    video {
+      id
+    }
+  }
+}
+
+fragment StorageDataObjectFields on StorageDataObject {
+  id
+  ipfsHash
+  isAccepted
+  size
+  type {
+    ...DataObjectTypeFields
+  }
+  deletionPrize
+  unsetAt
+  storageBagId
+}
+
+fragment ChannelFields on Channel {
+  title
+  description
+  isPublic
+  language {
+    iso
+  }
+  rewardAccount
+  isCensored
+  ownerMember {
+    id
+  }
+  ownerCuratorGroup {
+    id
+  }
+  category {
+    name
+  }
+  avatarPhoto {
+    ...StorageDataObjectFields
+  }
+  coverPhoto {
+    ...StorageDataObjectFields
+  }
+}
+
+query getDataObjectsByIds($ids: [ID!]) {
+  storageDataObjects(where: { id_in: $ids }) {
+    ...StorageDataObjectFields
+  }
+}
+
+query getChannelById($id: ID!) {
+  channelByUniqueInput(where: { id: $id }) {
+    ...ChannelFields
+  }
+}

+ 18 - 24
tests/network-tests/src/scenarios/full.ts → tests/network-tests/src/scenarios/combined.ts

@@ -1,50 +1,44 @@
 import creatingMemberships from '../flows/membership/creatingMemberships'
-import councilSetup from '../flows/council/setup'
 import leaderSetup from '../flows/workingGroup/leaderSetup'
-import electionParametersProposal from '../flows/proposals/electionParametersProposal'
-import manageLeaderRole from '../flows/proposals/manageLeaderRole'
-import spendingProposal from '../flows/proposals/spendingProposal'
-import textProposal from '../flows/proposals/textProposal'
-import validatorCountProposal from '../flows/proposals/validatorCountProposal'
-import wgMintCapacityProposal from '../flows/proposals/workingGroupMintCapacityProposal'
 import atLeastValueBug from '../flows/workingGroup/atLeastValueBug'
 import { manageWorkerFlow } from '../flows/workingGroup/manageWorkerAsLead'
 import manageWorkerAsWorker from '../flows/workingGroup/manageWorkerAsWorker'
 import workerPayout from '../flows/workingGroup/workerPayout'
+import initDistributionBucket from '../flows/clis/initDistributionBucket'
+import initStorageBucket from '../flows/clis/initStorageBucket'
+import createChannel from '../flows/clis/createChannel'
 import { scenario } from '../Scenario'
 import { WorkingGroups } from '../WorkingGroups'
 
 scenario(async ({ job }) => {
+  // These tests assume:
+  // - storage setup (including hired lead)
+  // - existing council
   job('creating members', creatingMemberships)
 
-  const councilJob = job('council setup', councilSetup)
-
-  const proposalsJob = job('proposals', [
-    electionParametersProposal,
-    spendingProposal,
-    textProposal,
-    validatorCountProposal,
-    wgMintCapacityProposal.storage,
-    wgMintCapacityProposal.content,
-    manageLeaderRole.storage,
-    manageLeaderRole.content,
-  ]).requires(councilJob)
-
   const leadSetupJob = job('setup leads', [
-    leaderSetup(WorkingGroups.Storage),
-    leaderSetup(WorkingGroups.Content),
-  ]).after(proposalsJob)
+    leaderSetup(WorkingGroups.Storage, true),
+    leaderSetup(WorkingGroups.Content, true),
+    leaderSetup(WorkingGroups.Distribution, true),
+  ])
 
   // Test bug only on one instance of working group is sufficient
   job('at least value bug', atLeastValueBug).requires(leadSetupJob)
 
   // tests minting payouts (requires council to set mint capacity)
-  job('worker payouts', [workerPayout.storage, workerPayout.content]).requires(leadSetupJob).requires(councilJob)
+  job('worker payouts', [workerPayout.storage, workerPayout.content, workerPayout.distribution]).requires(leadSetupJob)
 
   job('working group tests', [
     manageWorkerFlow(WorkingGroups.Storage),
     manageWorkerAsWorker.storage,
     manageWorkerFlow(WorkingGroups.Content),
     manageWorkerAsWorker.content,
+    manageWorkerFlow(WorkingGroups.Distribution),
+    manageWorkerAsWorker.distribution,
   ]).requires(leadSetupJob)
+
+  const createChannelJob = job('create channel via CLI', createChannel)
+  job('init storage and distribution buckets via CLI', [initDistributionBucket, initStorageBucket]).after(
+    createChannelJob
+  )
 })

+ 16 - 0
tests/network-tests/src/scenarios/giza-issue-reproduction-setup.ts

@@ -0,0 +1,16 @@
+import makeAliceMember from '../flows/membership/makeAliceMember'
+import leaderSetup from '../flows/workingGroup/leaderSetup'
+import { hireWorkersFlow } from '../flows/workingGroup/manageWorkerAsLead'
+import updateAccountsFlow from '../misc/updateAllWorkerRoleAccountsFlow'
+import initStorage, { doubleBucketConfig as storageConfig } from '../flows/storagev2/initStorage'
+import { WorkingGroups } from '../WorkingGroups'
+import { scenario } from '../Scenario'
+
+scenario(async ({ job }) => {
+  job('Make Alice a member', makeAliceMember)
+
+  const leads = job('Set Storage Lead', leaderSetup(WorkingGroups.Storage))
+  const workers = job('Hire Storage Worker', hireWorkersFlow(WorkingGroups.Storage, 1)).after(leads)
+  const updateWorkerAccounts = job('Update worker accounts', updateAccountsFlow).after(workers)
+  job('initialize storage system (2 buckets)', initStorage(storageConfig)).requires(updateWorkerAccounts)
+})

+ 16 - 0
tests/network-tests/src/scenarios/init-storage-and-distribution.ts

@@ -0,0 +1,16 @@
+import leaderSetup from '../flows/workingGroup/leaderSetup'
+import initStorage, { singleBucketConfig as defaultStorageConfig } from '../flows/storagev2/initStorage'
+import initDistribution, { singleBucketConfig as defaultDistributionConfig } from '../flows/storagev2/initDistribution'
+import { scenario } from '../Scenario'
+import { WorkingGroups } from '../WorkingGroups'
+import updateAccountsFlow from '../misc/updateAllWorkerRoleAccountsFlow'
+
+scenario(async ({ job }) => {
+  const setupLead = job('setup leads', [
+    leaderSetup(WorkingGroups.Distribution, true),
+    leaderSetup(WorkingGroups.Storage, true),
+  ])
+  const updateWorkerAccounts = job('Update worker accounts', updateAccountsFlow).after(setupLead)
+  job('initialize storage system', initStorage(defaultStorageConfig)).after(updateWorkerAccounts)
+  job('initialize distribution system', initDistribution(defaultDistributionConfig)).after(updateWorkerAccounts)
+})

+ 28 - 0
tests/network-tests/src/scenarios/proposals.ts

@@ -0,0 +1,28 @@
+import creatingMemberships from '../flows/membership/creatingMemberships'
+import councilSetup from '../flows/council/setup'
+import electionParametersProposal from '../flows/proposals/electionParametersProposal'
+import manageLeaderRole from '../flows/proposals/manageLeaderRole'
+import spendingProposal from '../flows/proposals/spendingProposal'
+import textProposal from '../flows/proposals/textProposal'
+import validatorCountProposal from '../flows/proposals/validatorCountProposal'
+import wgMintCapacityProposal from '../flows/proposals/workingGroupMintCapacityProposal'
+import { scenario } from '../Scenario'
+
+scenario(async ({ job }) => {
+  job('creating members', creatingMemberships)
+
+  const councilJob = job('council setup', councilSetup)
+
+  job('proposals', [
+    electionParametersProposal,
+    spendingProposal,
+    textProposal,
+    validatorCountProposal,
+    wgMintCapacityProposal.storage,
+    wgMintCapacityProposal.content,
+    wgMintCapacityProposal.distribution,
+    manageLeaderRole.storage,
+    manageLeaderRole.content,
+    manageLeaderRole.distribution,
+  ]).requires(councilJob)
+})

+ 7 - 0
tests/network-tests/src/scenarios/setup-new-chain.ts

@@ -2,6 +2,8 @@ import assignCouncil from '../flows/council/assign'
 import leaderSetup from '../flows/workingGroup/leaderSetup'
 import mockContentFlow from '../misc/mockContentFlow'
 import updateAccountsFlow from '../misc/updateAllWorkerRoleAccountsFlow'
+import initStorage, { singleBucketConfig as defaultStorageConfig } from '../flows/storagev2/initStorage'
+import initDistribution, { singleBucketConfig as defaultDistributionConfig } from '../flows/storagev2/initDistribution'
 import { AllWorkingGroups } from '../WorkingGroups'
 import { scenario } from '../Scenario'
 
@@ -16,6 +18,11 @@ scenario(async ({ job }) => {
 
   const updateWorkerAccounts = job('Update worker accounts', updateAccountsFlow).after(leads)
 
+  if (!process.env.SKIP_STORAGE_AND_DISTRIBUTION) {
+    job('initialize storage system', initStorage(defaultStorageConfig)).requires(updateWorkerAccounts)
+    job('initialize distribution system', initDistribution(defaultDistributionConfig)).requires(updateWorkerAccounts)
+  }
+
   // Create some mock content in content directory - without assets or any real metadata
   job('Create Mock Content', mockContentFlow).after(updateWorkerAccounts)
 

+ 2 - 2
tests/network-tests/src/sender.ts

@@ -17,7 +17,7 @@ export enum LogLevel {
 
 export class Sender {
   private readonly api: ApiPromise
-  private static readonly asyncLock: AsyncLock = new AsyncLock({ maxPending: 2048 })
+  static readonly asyncLock: AsyncLock = new AsyncLock({ maxPending: 2048 })
   private readonly keyring: Keyring
   private readonly debug: Debugger.Debugger
   private logs: LogLevel = LogLevel.None
@@ -114,7 +114,7 @@ export class Sender {
     // Instead use a single lock for all calls, to force all transactions to be submitted in same order
     // of call to signAndSend. Otherwise it raises chance of race conditions.
     // It happens in rare cases and has lead some tests to fail occasionally in the past
-    await Sender.asyncLock.acquire('tx-queue', async () => {
+    await Sender.asyncLock.acquire(['tx-queue', `nonce-${account.toString()}`], async () => {
       const nonce = await this.api.rpc.system.accountNextIndex(senderKeyPair.address)
       const signedTx = tx.sign(senderKeyPair, { nonce })
       sentTx = signedTx.toHuman()

+ 19 - 0
tests/network-tests/src/utils.ts

@@ -5,6 +5,10 @@ import BN from 'bn.js'
 import fs from 'fs'
 import { decodeAddress } from '@polkadot/keyring'
 import { Seat } from '@joystream/types/council'
+import { metaToObject } from '@joystream/metadata-protobuf/utils'
+import { AnyMetadataClass, DecodedMetadataObject } from '@joystream/metadata-protobuf/types'
+import { createType } from '@joystream/types'
+import { Bytes } from '@polkadot/types'
 
 export class Utils {
   private static LENGTH_ADDRESS = 32 + 1 // publicKey + prefix
@@ -50,4 +54,19 @@ export class Utils {
   public static camelToSnakeCase(key: string): string {
     return key.replace(/[A-Z]/g, (letter) => `_${letter.toLowerCase()}`)
   }
+
+  public static metadataToBytes<T>(metaClass: AnyMetadataClass<T>, obj: T): Bytes {
+    return createType('Bytes', '0x' + Buffer.from(metaClass.encode(obj).finish()).toString('hex'))
+  }
+
+  public static metadataFromBytes<T>(metaClass: AnyMetadataClass<T>, bytes: Bytes): DecodedMetadataObject<T> {
+    // We use `toObject()` to get rid of .prototype defaults for optional fields
+    return metaToObject(metaClass, metaClass.decode(bytes.toU8a(true)))
+  }
+
+  public static assert(condition: any, msg?: string): asserts condition {
+    if (!condition) {
+      throw new Error(msg || 'Assertion failed')
+    }
+  }
 }

+ 2 - 1
types/augment/all/defs.json

@@ -469,7 +469,8 @@
     "StorageBucket": {
         "operator_status": "StorageBucketOperatorStatus",
         "accepting_new_bags": "bool",
-        "voucher": "Voucher"
+        "voucher": "Voucher",
+        "assigned_bags": "u64"
     },
     "StaticBagId": {
         "_enum": {

+ 1 - 0
types/augment/all/types.ts

@@ -1059,6 +1059,7 @@ export interface StorageBucket extends Struct {
   readonly operator_status: StorageBucketOperatorStatus;
   readonly accepting_new_bags: bool;
   readonly voucher: Voucher;
+  readonly assigned_bags: u64;
 }
 
 /** @name StorageBucketId */

+ 2 - 2
types/package.json

@@ -1,6 +1,6 @@
 {
   "name": "@joystream/types",
-  "version": "0.17.1",
+  "version": "0.17.2",
   "description": "Types for Joystream Substrate Runtime - Giza release",
   "main": "index.js",
   "types": "index.d.ts",
@@ -65,6 +65,6 @@
   "homepage": "https://github.com/Joystream/joystream",
   "volta": {
     "node": "14.16.1",
-    "yarn": "1.22.4"
+    "yarn": "1.22.15"
   }
 }

+ 2 - 0
types/src/storage.ts

@@ -172,6 +172,7 @@ export type IStorageBucket = {
   operator_status: StorageBucketOperatorStatus
   accepting_new_bags: bool
   voucher: Voucher
+  assigned_bags: u64
 }
 
 export class StorageBucket
@@ -179,6 +180,7 @@ export class StorageBucket
     operator_status: StorageBucketOperatorStatus,
     accepting_new_bags: bool,
     voucher: Voucher,
+    assigned_bags: u64,
   })
   implements IStorageBucket {}
 

+ 1 - 1
utils/api-scripts/package.json

@@ -11,7 +11,7 @@
     "tsnode-strict": "node -r ts-node/register --unhandled-rejections=strict"
   },
   "dependencies": {
-    "@joystream/types": "^0.17.1",
+    "@joystream/types": "^0.17.2",
     "@polkadot/api": "5.9.1",
     "@polkadot/types": "5.9.1",
     "@polkadot/keyring": "7.3.1",

+ 1 - 1
utils/chain-spec-builder/Cargo.toml

@@ -3,7 +3,7 @@ authors = ['Joystream contributors']
 build = 'build.rs'
 edition = '2018'
 name = 'chain-spec-builder'
-version = '3.3.0'
+version = '3.3.1'
 
 [dependencies]
 ansi_term = "0.12.1"

+ 1 - 1
utils/migration-scripts/package.json

@@ -12,7 +12,7 @@
     "@oclif/config": "^1",
     "@oclif/plugin-help": "^3.2.3",
     "tslib": "^1",
-    "@joystream/types": "^0.17.1",
+    "@joystream/types": "^0.17.2",
     "@polkadot/api": "5.9.1",
     "@polkadot/types": "5.9.1",
     "@polkadot/keyring": "7.3.1",

+ 1 - 1
utils/migration-scripts/src/logging.ts

@@ -10,7 +10,7 @@ winston.addColors(colors)
 
 export function createLogger(label: string): Logger {
   return winston.createLogger({
-    level: 'debug',
+    level: process.env.DEBUG ? 'debug' : 'info',
     transports: [new winston.transports.Console()],
     defaultMeta: { label },
     format: winston.format.combine(

+ 2 - 0
utils/migration-scripts/src/sumer-giza/AssetsManager.ts

@@ -202,9 +202,11 @@ export class AssetsManager {
     let lastError: Error | undefined
     for (const endpoint of endpoints) {
       try {
+        this.logger.debug(`Trying to fetch asset ${contentId} from ${endpoint}...`)
         const tmpAssetPath = await this.fetchAsset(endpoint, contentId, expectedSize)
         return tmpAssetPath
       } catch (e) {
+        this.logger.debug(`Fetching ${contentId} from ${endpoint} failed: ${(e as Error).message}`)
         lastError = e as Error
         continue
       }

+ 1 - 0
utils/migration-scripts/src/sumer-giza/ContentMigration.ts

@@ -55,6 +55,7 @@ export class ContentMigration {
     const forcedChannelOwner = await this.getForcedChannelOwner()
     const assetsManager = await AssetsManager.create({
       api,
+      queryNodeApi,
       config,
     })
     const { idsMap: channelsMap, videoIds } = await new ChannelMigration({

+ 13 - 1
yarn.lock

@@ -3131,7 +3131,7 @@
     yaml-validator "^3.0.0"
 
 "@joystream/types@link:types":
-  version "0.17.0"
+  version "0.17.1"
   dependencies:
     "@polkadot/api" "5.9.1"
     "@polkadot/keyring" "7.3.1"
@@ -5695,6 +5695,13 @@
   resolved "https://registry.yarnpkg.com/@types/bluebird/-/bluebird-3.5.36.tgz#00d9301d4dc35c2f6465a8aec634bb533674c652"
   integrity sha512-HBNx4lhkxN7bx6P0++W8E289foSu8kO8GCk2unhuVggO+cE7rh9DhZUyPhUxNRG9m+5B5BTKxZQ5ZP92x/mx9Q==
 
+"@types/bmp-js@^0.1.0":
+  version "0.1.0"
+  resolved "https://registry.yarnpkg.com/@types/bmp-js/-/bmp-js-0.1.0.tgz#301afe2bb3ac7ef0f18465966e4166f0491b3332"
+  integrity sha512-uMU85ROcmlY1f4mVPTlNodRXa6Z5f0AIxvv5b0pvjty3KNg7ljf5lNSspHgaF6iFDCiGpLQmJna+VwEpUC9TyA==
+  dependencies:
+    "@types/node" "*"
+
 "@types/bn.js@^4.11.5", "@types/bn.js@^4.11.6":
   version "4.11.6"
   resolved "https://registry.yarnpkg.com/@types/bn.js/-/bn.js-4.11.6.tgz#c306c70d9358aaea33cd4eda092a742b9505967c"
@@ -9527,6 +9534,11 @@ bluebird@~3.4.1:
   resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.4.7.tgz#f72d760be09b7f76d08ed8fae98b289a8d05fab3"
   integrity sha1-9y12C+Cbf3bQjtj66Ysomo0F+rM=
 
+bmp-js@^0.1.0:
+  version "0.1.0"
+  resolved "https://registry.yarnpkg.com/bmp-js/-/bmp-js-0.1.0.tgz#e05a63f796a6c1ff25f4771ec7adadc148c07233"
+  integrity sha1-4Fpj95amwf8l9Hcex62twUjAcjM=
+
 bn.js@4.12.0, bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.11.8, bn.js@^4.11.9, bn.js@^4.12.0, bn.js@^5.0.0, bn.js@^5.1.1, bn.js@^5.1.2, bn.js@^5.1.3, bn.js@^5.2.0:
   version "4.12.0"
   resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.12.0.tgz#775b3f278efbb9718eec7361f483fb36fbbfea88"

Деякі файли не було показано, через те що забагато файлів було змінено