Browse Source

Merge https://github.com/Joystream/joystream into vnft_auction

iorveth 3 years ago
parent
commit
a70e64db41
100 changed files with 4579 additions and 766 deletions
  1. 1 0
      .dockerignore
  2. 3 7
      .github/workflows/create-ami.yml
  3. 152 0
      .github/workflows/create-release.yml
  4. 154 47
      .github/workflows/joystream-node-docker.yml
  5. 43 0
      .github/workflows/query-node.yml
  6. 1 1
      .github/workflows/run-network-tests.yml
  7. 2 2
      Cargo.lock
  8. 1 1
      apps.Dockerfile
  9. 16 0
      colossus.Dockerfile
  10. 2 2
      devops/infrastructure/README.md
  11. 50 0
      devops/infrastructure/build-arm64-playbook.yml
  12. 1 1
      devops/infrastructure/deploy-config.sample.cfg
  13. 5 6
      devops/infrastructure/deploy-infra.sh
  14. 4 4
      devops/infrastructure/deploy-single-node.sh
  15. 5 0
      devops/infrastructure/github-action-playbook.yml
  16. 6 0
      devops/infrastructure/node-network/.gitignore
  17. 27 0
      devops/infrastructure/node-network/Pulumi.yaml
  18. 125 0
      devops/infrastructure/node-network/README.md
  19. 29 0
      devops/infrastructure/node-network/configMap.ts
  20. 342 0
      devops/infrastructure/node-network/index.ts
  21. 44 0
      devops/infrastructure/node-network/json_modify.py
  22. 161 0
      devops/infrastructure/node-network/nfsVolume.ts
  23. 15 0
      devops/infrastructure/node-network/package.json
  24. 18 0
      devops/infrastructure/node-network/tsconfig.json
  25. 18 0
      devops/infrastructure/node-network/utils.ts
  26. 95 0
      devops/infrastructure/node-network/validator.ts
  27. 134 0
      devops/infrastructure/pulumi-common/caddy.ts
  28. 1 0
      devops/infrastructure/pulumi-common/index.ts
  29. 11 0
      devops/infrastructure/pulumi-common/package.json
  30. 15 0
      devops/infrastructure/pulumi-common/tsconfig.json
  31. 6 0
      devops/infrastructure/query-node/.gitignore
  32. 19 0
      devops/infrastructure/query-node/Pulumi.yaml
  33. 117 0
      devops/infrastructure/query-node/README.md
  34. 29 0
      devops/infrastructure/query-node/configMap.ts
  35. 461 0
      devops/infrastructure/query-node/index.ts
  36. 18 0
      devops/infrastructure/query-node/package.json
  37. 73 0
      devops/infrastructure/query-node/s3Helpers.ts
  38. 18 0
      devops/infrastructure/query-node/tsconfig.json
  39. 4 3
      devops/infrastructure/requirements.yml
  40. 12 1
      devops/infrastructure/roles/admin/tasks/main.yml
  41. 115 0
      devops/infrastructure/single-instance-docker.yml
  42. 0 0
      devops/infrastructure/single-node-playbook.yml
  43. 5 0
      devops/infrastructure/storage-node/.gitignore
  44. 33 0
      devops/infrastructure/storage-node/Pulumi.yaml
  45. 120 0
      devops/infrastructure/storage-node/README.md
  46. 236 0
      devops/infrastructure/storage-node/index.ts
  47. 14 0
      devops/infrastructure/storage-node/package.json
  48. 18 0
      devops/infrastructure/storage-node/tsconfig.json
  49. 50 0
      joystream-node-armv7.Dockerfile
  50. 1 1
      node/Cargo.toml
  51. 1 0
      package.json
  52. 2 0
      pioneer/packages/apps/src/SideBar/index.tsx
  53. 325 0
      pioneer/packages/apps/src/SidebarBanner.tsx
  54. 2 0
      pioneer/packages/joy-election/src/index.tsx
  55. 13 22
      pioneer/packages/joy-forum/src/ForumRoot.tsx
  56. 78 14
      pioneer/packages/joy-forum/src/ViewThread.tsx
  57. 2 0
      pioneer/packages/joy-forum/src/index.tsx
  58. 4 2
      pioneer/packages/joy-forum/src/style.ts
  59. 1 1
      pioneer/packages/joy-proposals/src/Proposal/VotingSection.tsx
  60. 2 0
      pioneer/packages/joy-proposals/src/index.tsx
  61. 2 0
      pioneer/packages/joy-roles/src/index.tsx
  62. BIN
      pioneer/packages/joy-utils/src/assets/coin-illustration.png
  63. BIN
      pioneer/packages/joy-utils/src/assets/coin-illustration1.png
  64. 138 0
      pioneer/packages/joy-utils/src/react/components/FMReminderBanner.tsx
  65. 6 17
      query-node/README.md
  66. 273 102
      query-node/generated/graphql-server/generated/binding.ts
  67. 304 392
      query-node/generated/graphql-server/generated/classes.ts
  68. 277 104
      query-node/generated/graphql-server/generated/schema.graphql
  69. 2 0
      query-node/generated/graphql-server/model/index.ts
  70. 2 2
      query-node/generated/graphql-server/package.json
  71. 9 1
      query-node/generated/graphql-server/src/index.ts
  72. 5 1
      query-node/generated/graphql-server/src/modules/channel-category/channel-category.model.ts
  73. 1 1
      query-node/generated/graphql-server/src/modules/channel-category/channel-category.resolver.ts
  74. 1 1
      query-node/generated/graphql-server/src/modules/channel-category/channel-category.service.ts
  75. 39 4
      query-node/generated/graphql-server/src/modules/channel/channel.model.ts
  76. 1 1
      query-node/generated/graphql-server/src/modules/channel/channel.resolver.ts
  77. 1 1
      query-node/generated/graphql-server/src/modules/channel/channel.service.ts
  78. 6 1
      query-node/generated/graphql-server/src/modules/curator-group/curator-group.model.ts
  79. 1 1
      query-node/generated/graphql-server/src/modules/curator-group/curator-group.resolver.ts
  80. 1 1
      query-node/generated/graphql-server/src/modules/curator-group/curator-group.service.ts
  81. 31 4
      query-node/generated/graphql-server/src/modules/data-object/data-object.model.ts
  82. 1 1
      query-node/generated/graphql-server/src/modules/data-object/data-object.resolver.ts
  83. 1 1
      query-node/generated/graphql-server/src/modules/data-object/data-object.service.ts
  84. 12 2
      query-node/generated/graphql-server/src/modules/language/language.model.ts
  85. 1 1
      query-node/generated/graphql-server/src/modules/language/language.resolver.ts
  86. 1 1
      query-node/generated/graphql-server/src/modules/language/language.service.ts
  87. 7 1
      query-node/generated/graphql-server/src/modules/license/license.model.ts
  88. 1 1
      query-node/generated/graphql-server/src/modules/license/license.resolver.ts
  89. 1 1
      query-node/generated/graphql-server/src/modules/license/license.service.ts
  90. 6 1
      query-node/generated/graphql-server/src/modules/membership/membership.model.ts
  91. 1 1
      query-node/generated/graphql-server/src/modules/membership/membership.resolver.ts
  92. 1 1
      query-node/generated/graphql-server/src/modules/membership/membership.service.ts
  93. 14 0
      query-node/generated/graphql-server/src/modules/next-entity-id/next-entity-id.model.ts
  94. 128 0
      query-node/generated/graphql-server/src/modules/next-entity-id/next-entity-id.resolver.ts
  95. 28 0
      query-node/generated/graphql-server/src/modules/next-entity-id/next-entity-id.service.ts
  96. 6 1
      query-node/generated/graphql-server/src/modules/video-category/video-category.model.ts
  97. 1 1
      query-node/generated/graphql-server/src/modules/video-category/video-category.resolver.ts
  98. 1 1
      query-node/generated/graphql-server/src/modules/video-category/video-category.service.ts
  99. 7 1
      query-node/generated/graphql-server/src/modules/video-media-encoding/video-media-encoding.model.ts
  100. 1 1
      query-node/generated/graphql-server/src/modules/video-media-encoding/video-media-encoding.resolver.ts

+ 1 - 0
.dockerignore

@@ -6,3 +6,4 @@ query-node/**/dist
 query-node/lib
 cli/
 tests/
+devops/

+ 3 - 7
.github/workflows/create-ami.yml

@@ -1,11 +1,7 @@
-name: Build code and create AMI
+name: Create AWS AMI
 
 on:
-  push:
-    branches:
-      - master
-      - olympia
-      - test_branch
+  workflow_dispatch:
 
 jobs:
   build:
@@ -45,7 +41,7 @@ jobs:
         parameter-overrides: "KeyName=${{ env.KEY_NAME }}"
 
     - name: Install Ansible dependencies
-      run: pipx inject ansible-base boto3 botocore
+      run: pipx inject ansible-core boto3 botocore
 
     - name: Run playbook
       uses: dawidd6/action-ansible-playbook@v2

+ 152 - 0
.github/workflows/create-release.yml

@@ -0,0 +1,152 @@
+name: Create release with node binaries
+
+on:
+  workflow_dispatch:
+    inputs:
+      name:
+        description: 'Release name (v9.3.0 - Antioch)'
+        required: true
+      tag:
+        description: 'Tag (v9.3.0)'
+        required: true
+
+env:
+  REPOSITORY: joystream/node
+
+jobs:
+  build-mac-binary:
+    runs-on: macos-latest
+    steps:
+      - name: Checkout
+        uses: actions/checkout@v2
+
+      - id: compute_shasum
+        name: Compute runtime code shasum
+        run: |
+          export RUNTIME_CODE_SHASUM=`scripts/runtime-code-shasum.sh`
+          echo "::set-output name=shasum::${RUNTIME_CODE_SHASUM}"
+
+      - name: Run Setup
+        run: |
+          ./setup.sh
+
+      - name: Build binaries
+        run: |
+          yarn cargo-build
+
+      - name: Tar the binary
+        run: |
+          tar czvf joystream-node-macos.tar.gz -C ./target/release joystream-node
+
+      - name: Temporarily save node binary
+        uses: actions/upload-artifact@v2
+        with:
+          name: joystream-node-macos-${{ steps.compute_shasum.outputs.shasum }}
+          path: joystream-node-macos.tar.gz
+          retention-days: 1
+
+  build-rpi-binary:
+    runs-on: ubuntu-latest
+    steps:
+      - name: Checkout
+        uses: actions/checkout@v2
+
+      - id: compute_shasum
+        name: Compute runtime code shasum
+        run: |
+          export RUNTIME_CODE_SHASUM=`scripts/runtime-code-shasum.sh`
+          echo "::set-output name=shasum::${RUNTIME_CODE_SHASUM}"
+
+      - name: Run Setup
+        run: |
+          ./setup.sh
+
+      - name: Build binaries
+        run: |
+          export WORKSPACE_ROOT=`cargo metadata --offline --no-deps --format-version 1 | jq .workspace_root -r`
+          sudo chmod a+w $WORKSPACE_ROOT
+          ./scripts/raspberry-cross-build.sh
+
+      - name: Tar the binary
+        run: |
+          tar czvf joystream-node-rpi.tar.gz -C ./target/arm-unknown-linux-gnueabihf/release joystream-node
+
+      - name: Temporarily save node binary
+        uses: actions/upload-artifact@v2
+        with:
+          name: joystream-node-rpi-${{ steps.compute_shasum.outputs.shasum }}
+          path: joystream-node-rpi.tar.gz
+          retention-days: 1
+
+  create-release:
+    runs-on: ubuntu-latest
+    needs: [build-mac-binary, build-rpi-binary]
+    steps:
+      - name: Checkout
+        uses: actions/checkout@v2
+
+      - id: compute_shasum
+        name: Compute runtime code shasum
+        run: |
+          export RUNTIME_CODE_SHASUM=`scripts/runtime-code-shasum.sh`
+          echo "::set-output name=shasum::${RUNTIME_CODE_SHASUM}"
+
+      - id: extract_binaries
+        name: Copy binaries & wasm file from docker images
+        run: |
+          IMAGE=${{ env.REPOSITORY }}:${{ steps.compute_shasum.outputs.shasum }}
+
+          docker run -d --entrypoint tail --name temp-container-joystream-node $IMAGE-amd64 -f /dev/null
+
+          RESULT=$(docker exec temp-container-joystream-node b2sum -l 256 runtime.compact.wasm | awk '{print $1}')
+          VERSION_AND_COMMIT=$(docker exec temp-container-joystream-node /joystream/node --version | awk '{print $2}' | cut -d- -f -2)
+          echo "::set-output name=blob_hash::${RESULT}"
+          echo "::set-output name=version_and_commit::${VERSION_AND_COMMIT}"
+
+          docker cp temp-container-joystream-node:/joystream/runtime.compact.wasm ./joystream_runtime_${{ github.event.inputs.tag }}.wasm
+          docker cp temp-container-joystream-node:/joystream/node ./joystream-node
+          tar -czvf joystream-node-$VERSION_AND_COMMIT-x86_64-linux-gnu.tar.gz joystream-node
+
+          docker rm --force temp-container-joystream-node
+
+          docker cp $(docker create --rm $IMAGE-arm64):/joystream/node ./joystream-node
+          tar -czvf joystream-node-$VERSION_AND_COMMIT-arm64-linux-gnu.tar.gz joystream-node
+
+          docker cp $(docker create --rm $IMAGE-arm):/joystream/node ./joystream-node
+          tar -czvf joystream-node-$VERSION_AND_COMMIT-armv7-linux-gnu.tar.gz joystream-node
+
+      - name: Retrieve saved MacOS binary
+        uses: actions/download-artifact@v2
+        with:
+          name: joystream-node-macos-${{ steps.compute_shasum.outputs.shasum }}
+
+      - name: Retrieve saved RPi binary
+        uses: actions/download-artifact@v2
+        with:
+          name: joystream-node-rpi-${{ steps.compute_shasum.outputs.shasum }}
+
+      - name: Rename MacOS and RPi tar
+        run: |
+          mv joystream-node-macos.tar.gz joystream-node-${{ steps.extract_binaries.outputs.version_and_commit }}-x86_64-macos.tar.gz
+          mv joystream-node-rpi.tar.gz joystream-node-${{ steps.extract_binaries.outputs.version_and_commit }}-rpi.tar.gz
+
+      - name: Release
+        uses: softprops/action-gh-release@v1
+        with:
+          files: |
+            *.tar.gz
+            *.wasm
+          tag_name: ${{ github.event.inputs.tag }}
+          name: ${{ github.event.inputs.name }}
+          draft: true
+          body: 'Verify wasm hash:
+            ```
+            $ b2sum -l 256 joystream_runtime_${{ github.event.inputs.tag }}.wasm
+            ```
+
+            This should be the output
+
+            ```
+            ${{ steps.extract_binaries.outputs.blob_hash }}
+            ```
+            '

+ 154 - 47
.github/workflows/joystream-node-docker.yml

@@ -1,13 +1,22 @@
 name: joystream-node-docker
+
 on: push
 
+env:
+  REPOSITORY: joystream/node
+  KEY_NAME: joystream-github-action-key
+
 jobs:
-  build:
-    name: Build joystream/node Docker image
-    if: github.repository == 'Joystream/joystream'
+  push-amd64:
+    name: Build joystream/node Docker image for amd64
     runs-on: ubuntu-latest
+    outputs:
+      tag_shasum: ${{ steps.compute_shasum.outputs.shasum }}
+      image_exists: ${{ steps.compute_main_image_exists.outputs.image_exists }}
     steps:
-      - uses: actions/checkout@v1
+      - name: Checkout
+        uses: actions/checkout@v2
+
       - uses: actions/setup-node@v1
         with:
           node-version: '14.x'
@@ -18,62 +27,160 @@ jobs:
           export RUNTIME_CODE_SHASUM=`scripts/runtime-code-shasum.sh`
           echo "::set-output name=shasum::${RUNTIME_CODE_SHASUM}"
 
-      - name: Setup cache directory
-        run: mkdir ~/docker-images
-
-      - name: Cache docker images
-        uses: actions/cache@v2
-        env:
-          cache-name: joystream-node-docker
+      - name: Login to DockerHub
+        uses: docker/login-action@v1
         with:
-          path: ~/docker-images
-          key: ${{ env.cache-name }}-${{ steps.compute_shasum.outputs.shasum }}
+          username: ${{ secrets.DOCKERHUB_USERNAME }}
+          password: ${{ secrets.DOCKERHUB_PASSWORD }}
 
-      - name: Check if we have cached image
-        continue-on-error: true
+      - name: Check if we have already have the manifest on Dockerhub
+        id: compute_main_image_exists
+        # Will output 0 if image exists and 1 if does not exists
         run: |
-          if [ -f ~/docker-images/joystream-node-docker-image.tar.gz ]; then
-            docker load --input ~/docker-images/joystream-node-docker-image.tar.gz
-            cp ~/docker-images/joystream-node-docker-image.tar.gz .
-          fi
+          export IMAGE_EXISTS=$(docker manifest inspect ${{ env.REPOSITORY }}:${{ steps.compute_shasum.outputs.shasum }} > /dev/null ; echo $?)
+          echo "::set-output name=image_exists::${IMAGE_EXISTS}"
 
       - name: Check if we have pre-built image on Dockerhub
-        continue-on-error: true
+        id: compute_image_exists
+        # Will output 0 if image exists and 1 if does not exists
+        run: |
+          export IMAGE_EXISTS=$(docker manifest inspect ${{ env.REPOSITORY }}:${{ steps.compute_shasum.outputs.shasum }}-amd64 > /dev/null ; echo $?)
+          echo "::set-output name=image_exists::${IMAGE_EXISTS}"
+
+      - name: Build and push
+        uses: docker/build-push-action@v2
+        with:
+          context: .
+          file: joystream-node.Dockerfile
+          platforms: linux/amd64
+          push: true
+          tags: ${{ env.REPOSITORY }}:${{ steps.compute_shasum.outputs.shasum }}-amd64
+        if: ${{ steps.compute_image_exists.outputs.image_exists == 1 }}
+
+  push-arm:
+    name: Build joystream/node Docker image for arm
+    runs-on: ubuntu-latest
+    strategy:
+      matrix:
+        platform: ['linux/arm64', 'linux/arm/v7']
+        include:
+          - platform: 'linux/arm64'
+            platform_tag: 'arm64'
+            file: 'joystream-node.Dockerfile'
+          - platform: 'linux/arm/v7'
+            platform_tag: 'arm'
+            file: 'joystream-node-armv7.Dockerfile'
+    env:
+      STACK_NAME: joystream-ga-docker-${{ github.run_number }}-${{ matrix.platform_tag }}
+    steps:
+      - name: Extract branch name
+        shell: bash
+        run: echo "##[set-output name=branch;]$(echo ${GITHUB_REF#refs/heads/})"
+        id: extract_branch
+
+      - name: Checkout
+        uses: actions/checkout@v2
+
+      - uses: actions/setup-node@v1
+        with:
+          node-version: '14.x'
+
+      - name: Install Ansible dependencies
+        run: pipx inject ansible-core boto3 botocore
+
+      - id: compute_shasum
+        name: Compute runtime code shasum
         run: |
-          if ! [ -f joystream-node-docker-image.tar.gz ]; then
-            docker pull joystream/node:${{ steps.compute_shasum.outputs.shasum }}
-            docker image tag joystream/node:${{ steps.compute_shasum.outputs.shasum }} joystream/node:latest
-            docker save --output joystream-node-docker-image.tar joystream/node:latest
-            gzip joystream-node-docker-image.tar
-            cp joystream-node-docker-image.tar.gz ~/docker-images/
-          fi
-
-      - name: Build new joystream/node image
+          export RUNTIME_CODE_SHASUM=`scripts/runtime-code-shasum.sh`
+          echo "::set-output name=shasum::${RUNTIME_CODE_SHASUM}"
+
+      - name: Login to DockerHub
+        uses: docker/login-action@v1
+        with:
+          username: ${{ secrets.DOCKERHUB_USERNAME }}
+          password: ${{ secrets.DOCKERHUB_PASSWORD }}
+
+      - name: Check if we have pre-built image on Dockerhub
+        id: compute_image_exists
+        # Will output 0 if image exists and 1 if does not exists
         run: |
-          if ! [ -f joystream-node-docker-image.tar.gz ]; then
-            docker build . --file joystream-node.Dockerfile --tag joystream/node
-            docker save --output joystream-node-docker-image.tar joystream/node
-            gzip joystream-node-docker-image.tar
-            cp joystream-node-docker-image.tar.gz ~/docker-images/
-            echo "NEW_BUILD=true" >> $GITHUB_ENV
-          fi
-
-      - name: Save joystream/node image to Artifacts
-        uses: actions/upload-artifact@v2
+          export IMAGE_EXISTS=$(docker manifest inspect ${{ env.REPOSITORY }}:${{ steps.compute_shasum.outputs.shasum }}-${{ matrix.platform_tag }} > /dev/null ; echo $?)
+          echo "::set-output name=image_exists::${IMAGE_EXISTS}"
+
+      - name: Configure AWS credentials
+        uses: aws-actions/configure-aws-credentials@v1
+        with:
+          aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
+          aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+          aws-region: us-east-1
+        if: ${{ steps.compute_image_exists.outputs.image_exists == 1 }}
+
+      - name: Deploy to AWS CloudFormation
+        uses: aws-actions/aws-cloudformation-github-deploy@v1
+        id: deploy_stack
+        with:
+          name: ${{ env.STACK_NAME }}
+          template: devops/infrastructure/single-instance-docker.yml
+          no-fail-on-empty-changeset: '1'
+          parameter-overrides: 'KeyName=${{ env.KEY_NAME }},EC2AMI=ami-00d1ab6b335f217cf,EC2InstanceType=t4g.xlarge'
+        if: ${{ steps.compute_image_exists.outputs.image_exists == 1 }}
+
+      - name: Run playbook
+        uses: dawidd6/action-ansible-playbook@v2
         with:
-          name: ${{ steps.compute_shasum.outputs.shasum }}-joystream-node-docker-image.tar.gz
-          path: joystream-node-docker-image.tar.gz
+          playbook: build-arm64-playbook.yml
+          directory: devops/infrastructure
+          requirements: requirements.yml
+          key: ${{ secrets.SSH_PRIVATE_KEY }}
+          inventory: |
+            [all]
+            ${{ steps.deploy_stack.outputs.PublicIp }}
+          options: |
+            --extra-vars "git_repo=https://github.com/${{ github.repository }} \
+                          branch_name=${{ steps.extract_branch.outputs.branch }} \
+                          docker_username=${{ secrets.DOCKERHUB_USERNAME }} \
+                          docker_password=${{ secrets.DOCKERHUB_PASSWORD }} \
+                          tag_name=${{ steps.compute_shasum.outputs.shasum }}-${{ matrix.platform_tag }} \
+                          repository=${{ env.REPOSITORY }} dockerfile=${{ matrix.file }} \
+                          stack_name=${{ env.STACK_NAME }} platform=${{ matrix.platform }}"
+        if: ${{ steps.compute_image_exists.outputs.image_exists == 1 }}
 
+  push-manifest:
+    name: Create manifest using both the arch images
+    needs: [push-amd64, push-arm]
+    # Only run this job if the image does not exist with tag equal to the shasum
+    if: needs.push-amd64.outputs.image_exists == 1
+    runs-on: ubuntu-latest
+    env:
+      TAG_SHASUM: ${{ needs.push-amd64.outputs.tag_shasum }}
+    steps:
       - name: Login to DockerHub
         uses: docker/login-action@v1
         with:
           username: ${{ secrets.DOCKERHUB_USERNAME }}
           password: ${{ secrets.DOCKERHUB_PASSWORD }}
-        if: env.NEW_BUILD
 
-      - name: Publish new image to DockerHub
+      - name: Create manifest for multi-arch images
+        run: |
+          # get artifacts from previous steps
+          IMAGE=${{ env.REPOSITORY }}:${{ env.TAG_SHASUM }}
+          echo $IMAGE
+          docker pull $IMAGE-amd64
+          docker pull $IMAGE-arm64
+          docker pull $IMAGE-arm
+          docker manifest create $IMAGE $IMAGE-amd64 $IMAGE-arm64 $IMAGE-arm
+          docker manifest annotate $IMAGE $IMAGE-amd64 --arch amd64
+          docker manifest annotate $IMAGE $IMAGE-arm64 --arch arm64
+          docker manifest annotate $IMAGE $IMAGE-arm --arch arm
+          docker manifest push $IMAGE
+
+      - name: Create manifest with latest tag for master
+        if: github.ref == 'refs/heads/master'
         run: |
-          docker image tag joystream/node joystream/node:${{ steps.compute_shasum.outputs.shasum }}
-          docker push joystream/node:${{ steps.compute_shasum.outputs.shasum }}
-        if: env.NEW_BUILD
-  
+          IMAGE=${{ env.REPOSITORY }}:${{ env.TAG_SHASUM }}
+          LATEST_TAG=${{ env.REPOSITORY }}:latest
+          docker manifest create $LATEST_TAG $IMAGE-amd64 $IMAGE-arm64 $IMAGE-arm
+          docker manifest annotate $LATEST_TAG $IMAGE-amd64 --arch amd64
+          docker manifest annotate $LATEST_TAG $IMAGE-arm64 --arch arm64
+          docker manifest annotate $LATEST_TAG $IMAGE-arm --arch arm
+          docker manifest push $LATEST_TAG

+ 43 - 0
.github/workflows/query-node.yml

@@ -0,0 +1,43 @@
+name: query-node
+on: [pull_request, push]
+
+jobs:
+  query_node_build_ubuntu:
+    name: Ubuntu Checks
+    runs-on: ubuntu-latest
+    strategy:
+      matrix:
+        node-version: [14.x]
+    steps:
+    - uses: actions/checkout@v1
+    - name: Use Node.js ${{ matrix.node-version }}
+      uses: actions/setup-node@v1
+      with:
+        node-version: ${{ matrix.node-version }}
+    - name: checks
+      run: |
+        yarn install --frozen-lockfile
+        yarn workspace @joystream/types build
+        yarn workspace @joystream/content-metadata-protobuf build:ts
+        ./query-node/build.sh
+        yarn workspace query-node-mappings checks --quiet
+
+  query_node_build_osx:
+    name: MacOS Checks
+    runs-on: macos-latest
+    strategy:
+      matrix:
+        node-version: [14.x]
+    steps:
+    - uses: actions/checkout@v1
+    - name: Use Node.js ${{ matrix.node-version }}
+      uses: actions/setup-node@v1
+      with:
+        node-version: ${{ matrix.node-version }}
+    - name: checks
+      run: |
+        yarn install --frozen-lockfile --network-timeout 120000
+        yarn workspace @joystream/types build
+        yarn workspace @joystream/content-metadata-protobuf build:ts
+        ./query-node/build.sh
+        yarn workspace query-node-mappings checks --quiet

+ 1 - 1
.github/workflows/run-network-tests.yml

@@ -103,7 +103,7 @@ jobs:
       - name: Ensure tests are runnable
         run: yarn workspace network-tests build
       - name: Execute network tests
-        run: RUNTIME=antioch tests/network-tests/run-tests.sh full
+        run: RUNTIME=sumer tests/network-tests/run-tests.sh full
 
   basic_runtime:
     name: Integration Tests (New Chain)

+ 2 - 2
Cargo.lock

@@ -2332,7 +2332,7 @@ dependencies = [
 
 [[package]]
 name = "joystream-node"
-version = "5.5.0"
+version = "5.6.0"
 dependencies = [
  "frame-benchmarking",
  "frame-benchmarking-cli",
@@ -2393,7 +2393,7 @@ dependencies = [
 
 [[package]]
 name = "joystream-node-runtime"
-version = "9.7.0"
+version = "9.8.0"
 dependencies = [
  "frame-benchmarking",
  "frame-executive",

+ 1 - 1
apps.Dockerfile

@@ -1,4 +1,4 @@
-FROM node:14 as builder
+FROM --platform=linux/x86-64 node:14 as builder
 
 WORKDIR /joystream
 COPY . /joystream

+ 16 - 0
colossus.Dockerfile

@@ -0,0 +1,16 @@
+FROM --platform=linux/x86-64 node:14 as builder
+
+WORKDIR /joystream
+COPY . /joystream
+RUN  rm -fr /joystream/pioneer
+
+EXPOSE 3001
+
+RUN yarn --frozen-lockfile
+
+RUN yarn workspace @joystream/types build
+RUN yarn workspace storage-node build
+
+RUN yarn
+
+ENTRYPOINT yarn colossus --dev --ws-provider $WS_PROVIDER_ENDPOINT_URI

+ 2 - 2
devops/infrastructure/README.md

@@ -26,10 +26,10 @@ On Mac run the command:
 Follow [the official installation guide](https://docs.ansible.com/ansible/latest/installation_guide/intro_installation.html) for your system.
 
 # How to run
-Edit the file `bash-config.cfg` and update parameters like AWS_KEY_PAIR_NAME, KEY_PATH
+Copy and edit the file `deploy-config.sample.cfg` and update parameters like AWS_KEY_PAIR_NAME, KEY_PATH
 Run the `deploy-infra.sh` script to deploy the infrastructure
 
 ```
 cd devops/infrastructure
-./deploy-infra.sh
+./deploy-infra.sh your-deploy-config.cfg
 ```

+ 50 - 0
devops/infrastructure/build-arm64-playbook.yml

@@ -0,0 +1,50 @@
+---
+# Setup joystream code, build docker image
+
+- name: Build image and push to docker hub
+  hosts: all
+
+  tasks:
+    - block:
+        - name: Get code from git repo
+          include_role:
+            name: common
+            tasks_from: get-code-git
+
+        - name: Install Docker Module for Python
+          pip:
+            name: docker
+
+        - name: Log into DockerHub
+          community.docker.docker_login:
+            username: '{{ docker_username }}'
+            password: '{{ docker_password }}'
+
+        - name: Build an image and push it to a private repo
+          community.docker.docker_image:
+            build:
+              path: ./joystream
+              dockerfile: '{{ dockerfile }}'
+              platform: '{{ platform }}'
+            name: '{{ repository }}'
+            tag: '{{ tag_name }}'
+            push: yes
+            source: build
+          async: 7200
+          poll: 0
+          register: build_result
+
+        - name: Check on build async task
+          async_status:
+            jid: '{{ build_result.ansible_job_id }}'
+          register: job_result
+          until: job_result.finished
+          retries: 72
+          delay: 100
+
+      always:
+        - name: Delete the stack
+          amazon.aws.cloudformation:
+            stack_name: '{{ stack_name }}'
+            state: 'absent'
+          delegate_to: localhost

+ 1 - 1
devops/infrastructure/bash-config.sample.cfg → devops/infrastructure/deploy-config.sample.cfg

@@ -10,7 +10,7 @@ VALIDATOR_EC2_INSTANCE_TYPE=t2.micro
 BUILD_EC2_INSTANCE_TYPE=t2.xlarge
 RPC_EC2_INSTANCE_TYPE=t2.micro
 
-# Set a prebuilt AMI if required
+# prebuilt AMI with joystream-node, chain-spec and subkey already built
 EC2_AMI_ID="ami-08ffec5991ca99db9"
 
 ACCOUNT_ID=$(aws sts get-caller-identity --profile $CLI_PROFILE --query Account --output text)

+ 5 - 6
devops/infrastructure/deploy-infra.sh

@@ -73,16 +73,15 @@ if [ $? -eq 0 ]; then
 
   if [ -z "$EC2_AMI_ID" ]
   then
-    echo -e "\n\n=========== Configuring the node servers ==========="
+    echo -e "\n\n=========== Compile joystream-node on build server ==========="
     ansible-playbook -i $INVENTORY_PATH --private-key $KEY_PATH build-code.yml \
       --extra-vars "branch_name=$BRANCH_NAME git_repo=$GIT_REPO build_local_code=$BUILD_LOCAL_CODE data_path=data-$NEW_STACK_NAME"
-  fi
 
-  echo -e "\n\n=========== Configuring the Build server ==========="
-  ansible-playbook -i $INVENTORY_PATH --private-key $KEY_PATH setup-admin.yml \
-    --extra-vars "local_dir=$LOCAL_CODE_PATH build_local_code=$BUILD_LOCAL_CODE"
+    echo -e "\n\n=========== Install additional utils on build server ==========="
+    ansible-playbook -i $INVENTORY_PATH --private-key $KEY_PATH setup-admin.yml
+  fi
 
-  echo -e "\n\n=========== Configuring the chain spec file and Pioneer app ==========="
+  echo -e "\n\n=========== Configure and start new validators, rpc node and pioneer ==========="
   ansible-playbook -i $INVENTORY_PATH --private-key $KEY_PATH chain-spec-pioneer.yml \
     --extra-vars "local_dir=$LOCAL_CODE_PATH network_suffix=$NETWORK_SUFFIX
                   data_path=data-$NEW_STACK_NAME bucket_name=$BUCKET_NAME number_of_validators=$NUMBER_OF_VALIDATORS"

+ 4 - 4
devops/infrastructure/deploy-single-node.sh

@@ -6,7 +6,7 @@ source common.sh
 
 if [ -z "$1" ]; then
   echo "ERROR: Configuration file not passed"
-  echo "Please use ./deploy-infra.sh PATH/TO/CONFIG to run this script"
+  echo "Please use ./deploy-single-node.sh PATH/TO/CONFIG to run this script"
   exit 1
 else
   echo "Using $1 file for config"
@@ -24,7 +24,7 @@ if [ ! -f "$KEY_PATH" ]; then
 fi
 
 # # Deploy the CloudFormation template
-echo -e "\n\n=========== Deploying single instance ==========="
+echo -e "\n\n=========== Deploying single node ==========="
 aws cloudformation deploy \
   --region $REGION \
   --profile $CLI_PROFILE \
@@ -45,7 +45,7 @@ if [ $? -eq 0 ]; then
 
   echo -e "New Node Public IP: $SERVER_IP"
 
-  echo -e "\n\n=========== Configuring the chain spec file and Pioneer app ==========="
-  ansible-playbook -i $SERVER_IP, --private-key $KEY_PATH new-node-playbook.yml \
+  echo -e "\n\n=========== Configuring node ==========="
+  ansible-playbook -i $SERVER_IP, --private-key $KEY_PATH single-node-playbook.yml \
     --extra-vars "binary_file=$BINARY_FILE chain_spec_file=$CHAIN_SPEC_FILE"
 fi

+ 5 - 0
devops/infrastructure/github-action-playbook.yml

@@ -16,6 +16,11 @@
           name: common
           tasks_from: run-setup-build
 
+      - name: Install subkey
+        include_role:
+          name: admin
+          tasks_from: main
+
       - name: Basic AMI Creation
         amazon.aws.ec2_ami:
           instance_id: "{{ instance_id }}"

+ 6 - 0
devops/infrastructure/node-network/.gitignore

@@ -0,0 +1,6 @@
+/bin/
+/node_modules/
+kubeconfig.yml
+package-lock.json
+.env
+Pulumi.*.yaml

+ 27 - 0
devops/infrastructure/node-network/Pulumi.yaml

@@ -0,0 +1,27 @@
+name: node-network
+runtime: nodejs
+description: Kubernetes IaC for Joystream RPC and Validator nodes
+template:
+  config:
+    aws:profile:
+      default: joystream-user
+    aws:region:
+      default: us-east-1
+    isMinikube:
+      description: Whether you are deploying to minikube
+      default: false
+    numberOfValidators:
+      description: Number of validators as starting nodes
+      default: 2
+    networkSuffix:
+      description: Suffix to attach to the network id and name
+      default: 8129
+    isLoadBalancerReady:
+      description: Whether the load balancer service is ready and has been assigned an IP
+      default: false
+    nodeImage:
+      description: Docker image with tag to be used as validator and RPC nodes
+      default: 'joystream/node:latest'
+    encryptionKey:
+      description: Key to encrypt the 7z containing secrets with
+      default: '1234'

+ 125 - 0
devops/infrastructure/node-network/README.md

@@ -0,0 +1,125 @@
+# Query Node automated deployment
+
+Deploys a Joystream node network on EKS Kubernetes cluster
+
+## Deploying the App
+
+To deploy your infrastructure, follow the below steps.
+
+### Prerequisites
+
+1. [Install Pulumi](https://www.pulumi.com/docs/get-started/install/)
+1. [Install Node.js](https://nodejs.org/en/download/)
+1. Install a package manager for Node.js, such as [npm](https://www.npmjs.com/get-npm) or [Yarn](https://yarnpkg.com/en/docs/install).
+1. [Configure AWS Credentials](https://www.pulumi.com/docs/intro/cloud-providers/aws/setup/)
+1. Optional (for debugging): [Install kubectl](https://kubernetes.io/docs/tasks/tools/)
+
+### Steps
+
+After cloning this repo, from this working directory, run these commands:
+
+1. Install the required Node.js packages:
+
+   This installs the dependent packages [needed](https://www.pulumi.com/docs/intro/concepts/how-pulumi-works/) for our Pulumi program.
+
+   ```bash
+   $ npm install
+   ```
+
+1. Create a new stack, which is an isolated deployment target for this example:
+
+   This will initialize the Pulumi program in TypeScript.
+
+   ```bash
+   $ pulumi stack init
+   ```
+
+1. Set the required configuration variables in `Pulumi.<stack>.yaml`
+
+   ```bash
+   $ pulumi config set-all --plaintext aws:region=us-east-1 --plaintext aws:profile=joystream-user \
+    --plaintext numberOfValidators=2 --plaintext isMinikube=true --plaintext networkSuffix=8122 \
+    --plaintext nodeImage=joystream/node:latest --plaintext encryptionKey=password
+   ```
+
+   If you want to build the stack on AWS set the `isMinikube` config to `false`
+
+   ```bash
+   $ pulumi config set isMinikube false
+   ```
+
+1. Stand up the Kubernetes cluster:
+
+   Running `pulumi up -y` will deploy the EKS cluster. Note, provisioning a
+   new EKS cluster takes between 10-15 minutes.
+
+1. Once the stack is up and running, we will modify the Caddy config to get SSL certificate for the load balancer for AWS
+
+   Modify the config variable `isLoadBalancerReady`
+
+   ```bash
+   $ pulumi config set isLoadBalancerReady true
+   ```
+
+   Run `pulumi up -y` to update the Caddy config
+
+1. You can now access the endpoints using `pulumi stack output endpoint1` or `pulumi stack output endpoint2`
+
+   The ws-rpc endpoint is `https://<ENDPOINT>/ws-rpc` and http-rpc endpoint is `https://<ENDPOINT>/http-rpc`
+
+1. If you are using Minikube, run `minikube service node-network -n $(pulumi stack output namespaceName)`
+
+   This will setup a proxy for your `node-network` service, which can then be accessed at
+   the URL given in the output
+
+1. Access the Kubernetes Cluster using `kubectl`
+
+   To access your new Kubernetes cluster using `kubectl`, we need to set up the
+   `kubeconfig` file and download `kubectl`. We can leverage the Pulumi
+   stack output in the CLI, as Pulumi facilitates exporting these objects for us.
+
+   ```bash
+   $ pulumi stack output kubeconfig --show-secrets > kubeconfig
+   $ export KUBECONFIG=$PWD/kubeconfig
+   $ kubectl get nodes
+   ```
+
+   We can also use the stack output to query the cluster for our newly created Deployment:
+
+   ```bash
+   $ kubectl get deployment $(pulumi stack output deploymentName) --namespace=$(pulumi stack output namespaceName)
+   $ kubectl get service $(pulumi stack output serviceName) --namespace=$(pulumi stack output namespaceName)
+   ```
+
+   To get logs
+
+   ```bash
+   $ kubectl config set-context --current --namespace=$(pulumi stack output namespaceName)
+   $ kubectl get pods
+   $ kubectl logs <PODNAME> --all-containers
+   ```
+
+   To see complete pulumi stack output
+
+   ```bash
+   $ pulumi stack output
+   ```
+
+   To execute a command
+
+   ```bash
+   $ kubectl exec --stdin --tty <PODNAME> -c colossus -- /bin/bash
+   ```
+
+1. To get the chain-data and secrets, run the below command
+
+   ```bash
+   $ kubectl cp $(kubectl get pods | grep rpc-node | awk '{print $1}'):/chain-data/chain-data.7z ./chain-data.7z
+   ```
+
+1. Once you've finished experimenting, tear down your stack's resources by destroying and removing it:
+
+   ```bash
+   $ pulumi destroy --yes
+   $ pulumi stack rm --yes
+   ```

+ 29 - 0
devops/infrastructure/node-network/configMap.ts

@@ -0,0 +1,29 @@
+import * as pulumi from '@pulumi/pulumi'
+import * as k8s from '@pulumi/kubernetes'
+import * as fs from 'fs'
+
+export class configMapFromFile extends pulumi.ComponentResource {
+  public readonly configName?: pulumi.Output<string>
+
+  constructor(name: string, args: ConfigMapArgs, opts: pulumi.ComponentResourceOptions = {}) {
+    super('pkg:node-network:configMap', name, {}, opts)
+
+    this.configName = new k8s.core.v1.ConfigMap(
+      name,
+      {
+        metadata: {
+          namespace: args.namespaceName,
+        },
+        data: {
+          'fileData': fs.readFileSync(args.filePath).toString(),
+        },
+      },
+      opts
+    ).metadata.apply((m) => m.name)
+  }
+}
+
+export interface ConfigMapArgs {
+  filePath: string
+  namespaceName: pulumi.Output<string>
+}

+ 342 - 0
devops/infrastructure/node-network/index.ts

@@ -0,0 +1,342 @@
+import * as awsx from '@pulumi/awsx'
+import * as eks from '@pulumi/eks'
+import * as pulumi from '@pulumi/pulumi'
+import * as k8s from '@pulumi/kubernetes'
+import { configMapFromFile } from './configMap'
+import { CaddyServiceDeployment } from 'pulumi-common'
+import { getSubkeyContainers } from './utils'
+import { ValidatorServiceDeployment } from './validator'
+import { NFSServiceDeployment } from './nfsVolume'
+// const { exec } = require('child_process')
+
+const config = new pulumi.Config()
+const awsConfig = new pulumi.Config('aws')
+const isMinikube = config.getBoolean('isMinikube')
+
+export let kubeconfig: pulumi.Output<any>
+
+let provider: k8s.Provider
+
+if (isMinikube) {
+  provider = new k8s.Provider('local', {})
+} else {
+  // Create a VPC for our cluster.
+  const vpc = new awsx.ec2.Vpc('joystream-node-vpc', { numberOfAvailabilityZones: 2 })
+
+  // Create an EKS cluster with the default configuration.
+  const cluster = new eks.Cluster('eksctl-node-network', {
+    vpcId: vpc.id,
+    subnetIds: vpc.publicSubnetIds,
+    desiredCapacity: 2,
+    maxSize: 2,
+    instanceType: 't2.medium',
+    providerCredentialOpts: {
+      profileName: awsConfig.get('profile'),
+    },
+  })
+  provider = cluster.provider
+
+  // Export the cluster's kubeconfig.
+  kubeconfig = cluster.kubeconfig
+}
+
+const resourceOptions = { provider: provider }
+
+const name = 'node-network'
+
+// Create a Kubernetes Namespace
+const ns = new k8s.core.v1.Namespace(name, {}, resourceOptions)
+
+// Export the Namespace name
+export const namespaceName = ns.metadata.name
+
+const appLabels = { appClass: name }
+
+const networkSuffix = config.get('networkSuffix') || '8129'
+const numberOfValidators = config.getNumber('numberOfValidators') || 1
+const chainDataPath = '/chain-data'
+const chainSpecPath = `${chainDataPath}/chainspec-raw.json`
+const nodeImage = config.get('nodeImage') || 'joystream/node:latest'
+const encryptKey = config.get('encryptionKey') || '1234'
+
+const subkeyContainers = getSubkeyContainers(numberOfValidators, chainDataPath)
+let pvcClaimName: pulumi.Output<any>
+
+if (isMinikube) {
+  const pvc = new k8s.core.v1.PersistentVolumeClaim(
+    `${name}-pvc`,
+    {
+      metadata: {
+        labels: appLabels,
+        namespace: namespaceName,
+        name: `${name}-pvc`,
+      },
+      spec: {
+        accessModes: ['ReadWriteMany'],
+        resources: {
+          requests: {
+            storage: `1Gi`,
+          },
+        },
+      },
+    },
+    resourceOptions
+  )
+
+  const pv = new k8s.core.v1.PersistentVolume(`${name}-pv`, {
+    metadata: {
+      labels: { ...appLabels, type: 'local' },
+      namespace: namespaceName,
+      name: `${name}-pv`,
+    },
+    spec: {
+      accessModes: ['ReadWriteMany'],
+      capacity: {
+        storage: `1Gi`,
+      },
+      hostPath: {
+        path: '/mnt/data/',
+      },
+    },
+  })
+  pvcClaimName = pvc.metadata.apply((m) => m.name)
+} else {
+  const nfsVolume = new NFSServiceDeployment('nfs-server', { namespace: namespaceName }, resourceOptions)
+  pvcClaimName = nfsVolume.pvc.metadata.apply((m) => m.name)
+}
+
+const jsonModifyConfig = new configMapFromFile(
+  'json-modify-config',
+  {
+    filePath: 'json_modify.py',
+    namespaceName: namespaceName,
+  },
+  resourceOptions
+).configName
+
+const chainDataPrepareJob = new k8s.batch.v1.Job(
+  'chain-data',
+  {
+    metadata: {
+      namespace: namespaceName,
+    },
+    spec: {
+      backoffLimit: 0,
+      template: {
+        spec: {
+          containers: [
+            ...subkeyContainers,
+            {
+              name: 'builder-node',
+              image: nodeImage,
+              command: ['/bin/sh', '-c'],
+              args: [
+                `/joystream/chain-spec-builder generate -a ${numberOfValidators} \
+                --chain-spec-path ${chainDataPath}/chainspec.json --deployment live \
+                --endowed 1 --keystore-path ${chainDataPath}/data > ${chainDataPath}/seeds.txt`,
+              ],
+              volumeMounts: [
+                {
+                  name: 'config-data',
+                  mountPath: chainDataPath,
+                },
+              ],
+            },
+            {
+              name: 'json-modify',
+              image: 'python',
+              command: ['python'],
+              args: [
+                '/scripts/json_modify.py',
+                '--path',
+                `${chainDataPath}`,
+                '--prefix',
+                networkSuffix,
+                '--validators',
+                `${numberOfValidators}`,
+              ],
+              volumeMounts: [
+                {
+                  mountPath: '/scripts/json_modify.py',
+                  name: 'json-modify-script',
+                  subPath: 'fileData',
+                },
+                {
+                  name: 'config-data',
+                  mountPath: chainDataPath,
+                },
+              ],
+            },
+            {
+              name: 'raw-chain-spec',
+              image: nodeImage,
+              command: ['/bin/sh', '-c'],
+              args: [`/joystream/node build-spec --chain ${chainDataPath}/chainspec.json --raw > ${chainSpecPath}`],
+              volumeMounts: [
+                {
+                  name: 'config-data',
+                  mountPath: chainDataPath,
+                },
+              ],
+            },
+            {
+              name: '7z',
+              image: 'danielwhatmuff/7z-docker',
+              command: ['/bin/sh', '-c'],
+              args: [`7z a -p${encryptKey} ${chainDataPath}/chain-data.7z ${chainDataPath}/*`],
+              volumeMounts: [
+                {
+                  name: 'config-data',
+                  mountPath: chainDataPath,
+                },
+              ],
+            },
+          ],
+          volumes: [
+            {
+              name: 'json-modify-script',
+              configMap: {
+                name: jsonModifyConfig,
+              },
+            },
+            {
+              name: 'config-data',
+              persistentVolumeClaim: {
+                claimName: pvcClaimName,
+              },
+            },
+          ],
+          restartPolicy: 'Never',
+        },
+      },
+    },
+  },
+  { ...resourceOptions }
+)
+
+// Create N validator service deployments
+const validators = []
+
+for (let i = 1; i <= numberOfValidators; i++) {
+  const validator = new ValidatorServiceDeployment(
+    `node-${i}`,
+    { namespace: namespaceName, index: i, chainSpecPath, dataPath: chainDataPath, pvc: pvcClaimName, nodeImage },
+    { ...resourceOptions, dependsOn: chainDataPrepareJob }
+  )
+  validators.push(validator)
+}
+
+const deployment = new k8s.apps.v1.Deployment(
+  `rpc-node`,
+  {
+    metadata: {
+      namespace: namespaceName,
+      labels: appLabels,
+    },
+    spec: {
+      replicas: 1,
+      selector: { matchLabels: appLabels },
+      template: {
+        metadata: {
+          labels: appLabels,
+        },
+        spec: {
+          initContainers: [],
+          containers: [
+            {
+              name: 'rpc-node',
+              image: nodeImage,
+              ports: [
+                { name: 'rpc-9944', containerPort: 9944 },
+                { name: 'rpc-9933', containerPort: 9933 },
+                { name: 'rpc-30333', containerPort: 30333 },
+              ],
+              args: [
+                '--chain',
+                chainSpecPath,
+                '--ws-external',
+                '--rpc-cors',
+                'all',
+                '--pruning',
+                'archive',
+                '--ws-max-connections',
+                '512',
+                '--telemetry-url',
+                'wss://telemetry.joystream.org/submit/ 0',
+                '--telemetry-url',
+                'wss://telemetry.polkadot.io/submit/ 0',
+              ],
+              volumeMounts: [
+                {
+                  name: 'config-data',
+                  mountPath: chainDataPath,
+                },
+              ],
+            },
+          ],
+          volumes: [
+            {
+              name: 'config-data',
+              persistentVolumeClaim: {
+                claimName: pvcClaimName,
+              },
+            },
+          ],
+        },
+      },
+    },
+  },
+  { ...resourceOptions, dependsOn: validators }
+)
+
+// Export the Deployment name
+export const deploymentName = deployment.metadata.name
+
+// Create a Service for the RPC Node
+const service = new k8s.core.v1.Service(
+  name,
+  {
+    metadata: {
+      labels: appLabels,
+      namespace: namespaceName,
+      name: 'node-network',
+    },
+    spec: {
+      type: isMinikube ? 'NodePort' : 'ClusterIP',
+      ports: [
+        { name: 'port-1', port: 9944 },
+        { name: 'port-2', port: 9933 },
+      ],
+      selector: appLabels,
+    },
+  },
+  resourceOptions
+)
+
+// Export the Service name and public LoadBalancer Endpoint
+export const serviceName = service.metadata.name
+
+const lbReady = config.get('isLoadBalancerReady') === 'true'
+
+const caddyEndpoints = [
+  `/ws-rpc {
+  reverse_proxy node-network:9944
+}`,
+  `/http-rpc {
+  reverse_proxy node-network:9933
+}`,
+]
+
+export let endpoint1: pulumi.Output<string>
+export let endpoint2: pulumi.Output<string>
+
+if (!isMinikube) {
+  const caddy = new CaddyServiceDeployment(
+    'caddy-proxy',
+    { lbReady, namespaceName: namespaceName, isMinikube, caddyEndpoints },
+    resourceOptions
+  )
+
+  endpoint1 = pulumi.interpolate`${caddy.primaryEndpoint}`
+  endpoint2 = pulumi.interpolate`${caddy.secondaryEndpoint}`
+}

+ 44 - 0
devops/infrastructure/node-network/json_modify.py

@@ -0,0 +1,44 @@
+#!/usr/bin/python
+import argparse
+import json
+
+def main(chain_path, prefix, number_of_validators):
+    chain_spec_path = f"{chain_path}/chainspec.json"
+    print(f"Updating chain spec file {chain_spec_path}")
+    number_of_validators = int(number_of_validators)
+
+    with open(chain_spec_path) as f:
+        data = json.load(f)
+
+    response = {
+        "name": f'{data["name"]} {prefix}',
+        "id": f'{data["id"]}_{prefix}',
+        "protocolId": f'{data["protocolId"]}{prefix}'
+    }
+
+    boot_node_list = data["bootNodes"]
+    for i in range(1, number_of_validators + 1):
+        public_key = open(f"{chain_path}/publickey{i}").read().replace('\n', '')
+        boot_node = f"/dns4/node-{i}/tcp/30333/p2p/{public_key}"
+        boot_node_list.append(boot_node)
+
+    telemetry_endpoints = data["telemetryEndpoints"]
+    telemetry_endpoints.append([
+        "/dns/telemetry.joystream.org/tcp/443/x-parity-wss/%2Fsubmit%2F", 0])
+
+    response["bootNodes"] = boot_node_list
+    response["telemetryEndpoints"] = telemetry_endpoints
+
+    data.update(response)
+    with open(chain_spec_path, 'w') as outfile:
+        json.dump(data, outfile, indent=4)
+    print("Chain spec file updated")
+
+if __name__ == '__main__':
+    parser = argparse.ArgumentParser(description='Modify Chain Spec file')
+    parser.add_argument('--path', required=True, help="Path to chain data")
+    parser.add_argument('--prefix', required=True, help="Network prefix")
+    parser.add_argument('--validators', required=True, help="Number of Validators")
+    args = parser.parse_args()
+    print(args.path)
+    main(chain_path=args.path, prefix=args.prefix, number_of_validators=args.validators)

+ 161 - 0
devops/infrastructure/node-network/nfsVolume.ts

@@ -0,0 +1,161 @@
+import * as k8s from '@pulumi/kubernetes'
+import * as k8stypes from '@pulumi/kubernetes/types/input'
+import * as pulumi from '@pulumi/pulumi'
+
+/**
+ * NFSServiceDeployment is an abstraction uses the cloud resources to create a PVC
+ * which is then used by an NFS container, enabling users to then use this NFS server
+ * as a shared file system without depending on creating custom cloud resources
+ */
+export class NFSServiceDeployment extends pulumi.ComponentResource {
+  public readonly deployment: k8s.apps.v1.Deployment
+  public readonly service: k8s.core.v1.Service
+  public readonly pvc: k8s.core.v1.PersistentVolumeClaim
+
+  constructor(name: string, args: ServiceDeploymentArgs, opts?: pulumi.ComponentResourceOptions) {
+    super('k8sjs:service:NFSServiceDeployment', name, {}, opts)
+
+    const nfsLabels = { role: 'nfs-server' }
+    const claimName = 'pvcfornfs'
+
+    // Deploys a cloud block storage which will be used as base storage for NFS server
+    const pvcNFS = new k8s.core.v1.PersistentVolumeClaim(
+      claimName,
+      {
+        metadata: {
+          labels: nfsLabels,
+          namespace: args.namespace,
+          name: claimName,
+        },
+        spec: {
+          accessModes: ['ReadWriteOnce'],
+          resources: {
+            requests: {
+              storage: `1Gi`,
+            },
+          },
+        },
+      },
+      { parent: this }
+    )
+
+    const container: k8stypes.core.v1.Container = {
+      name: 'nfs-server',
+      image: 'gcr.io/google_containers/volume-nfs:0.8',
+      ports: [
+        { name: 'nfs', containerPort: 2049 },
+        { name: 'mountd', containerPort: 20048 },
+        { name: 'rpcbind', containerPort: 111 },
+      ],
+      command: ['/bin/sh', '-c'],
+      args: ['chmod 777 /exports && /usr/local/bin/run_nfs.sh /exports'],
+      securityContext: { 'privileged': true },
+      volumeMounts: [
+        {
+          name: 'nfsstore',
+          mountPath: '/exports',
+        },
+      ],
+    }
+
+    this.deployment = new k8s.apps.v1.Deployment(
+      `nfs-server`,
+      {
+        metadata: {
+          namespace: args.namespace,
+          labels: nfsLabels,
+          name: 'nfs-server',
+        },
+        spec: {
+          replicas: 1,
+          selector: { matchLabels: nfsLabels },
+          template: {
+            metadata: {
+              labels: nfsLabels,
+            },
+            spec: {
+              containers: [container],
+              volumes: [
+                {
+                  name: 'nfsstore',
+                  persistentVolumeClaim: {
+                    claimName,
+                  },
+                },
+              ],
+            },
+          },
+        },
+      },
+      { parent: this }
+    )
+
+    this.service = new k8s.core.v1.Service(
+      'nfs-server',
+      {
+        metadata: {
+          namespace: args.namespace,
+          name: 'nfs-server',
+        },
+        spec: {
+          ports: [
+            { name: 'nfs', port: 2049 },
+            { name: 'mountd', port: 20048 },
+            { name: 'rpcbind', port: 111 },
+          ],
+          selector: nfsLabels,
+        },
+      },
+      { parent: this }
+    )
+
+    const ip = this.service.spec.apply((v) => v.clusterIP)
+
+    const pv = new k8s.core.v1.PersistentVolume(
+      `${name}-pv`,
+      {
+        metadata: {
+          labels: nfsLabels,
+          namespace: args.namespace,
+          name: `${name}-pvc`,
+        },
+        spec: {
+          accessModes: ['ReadWriteMany'],
+          capacity: {
+            storage: `1Gi`,
+          },
+          nfs: {
+            server: ip, //pulumi.interpolate`nfs-server.${namespaceName}.svc.cluster.local`,
+            path: '/',
+          },
+        },
+      },
+      { parent: this, dependsOn: this.service }
+    )
+
+    this.pvc = new k8s.core.v1.PersistentVolumeClaim(
+      `${name}-pvc`,
+      {
+        metadata: {
+          namespace: args.namespace,
+          name: `${name}-pvc`,
+        },
+        spec: {
+          accessModes: ['ReadWriteMany'],
+          resources: {
+            requests: {
+              storage: `1Gi`,
+            },
+          },
+          storageClassName: '',
+          selector: { matchLabels: nfsLabels },
+        },
+      },
+      { parent: this, dependsOn: pv }
+    )
+  }
+}
+
+export interface ServiceDeploymentArgs {
+  namespace: pulumi.Output<string>
+}

+ 15 - 0
devops/infrastructure/node-network/package.json

@@ -0,0 +1,15 @@
+{
+  "name": "joystream-node-network",
+  "devDependencies": {
+    "@types/node": "^10.0.0"
+  },
+  "dependencies": {
+    "@pulumi/aws": "^4.0.0",
+    "@pulumi/awsx": "^0.30.0",
+    "@pulumi/eks": "^0.31.0",
+    "@pulumi/kubernetes": "^3.0.0",
+    "@pulumi/pulumi": "^3.0.0",
+    "@pulumi/docker": "^3.0.0",
+    "pulumi-common": "file:../pulumi-common"
+  }
+}

+ 18 - 0
devops/infrastructure/node-network/tsconfig.json

@@ -0,0 +1,18 @@
+{
+    "compilerOptions": {
+        "strict": true,
+        "outDir": "bin",
+        "target": "es2016",
+        "module": "commonjs",
+        "moduleResolution": "node",
+        "sourceMap": true,
+        "experimentalDecorators": true,
+        "pretty": true,
+        "noFallthroughCasesInSwitch": true,
+        "noImplicitReturns": true,
+        "forceConsistentCasingInFileNames": true
+    },
+    "files": [
+        "index.ts"
+    ]
+}

+ 18 - 0
devops/infrastructure/node-network/utils.ts

@@ -0,0 +1,18 @@
+export const getSubkeyContainers = (validators: number, dataPath: string) => {
+  const result = []
+  for (let i = 1; i <= validators; i++) {
+    result.push({
+      name: `subkey-node-${i}`,
+      image: 'parity/subkey:latest',
+      command: ['/bin/sh', '-c'],
+      args: [`subkey generate-node-key > ${dataPath}/privatekey${i} 2> ${dataPath}/publickey${i}`],
+      volumeMounts: [
+        {
+          name: 'config-data',
+          mountPath: dataPath,
+        },
+      ],
+    })
+  }
+  return result
+}

+ 95 - 0
devops/infrastructure/node-network/validator.ts

@@ -0,0 +1,95 @@
+import * as k8s from '@pulumi/kubernetes'
+import * as k8stypes from '@pulumi/kubernetes/types/input'
+import * as pulumi from '@pulumi/pulumi'
+
+/**
+ * ValidatorServiceDeployment is an example abstraction that uses a class to fold together the common pattern of a
+ * Kubernetes Deployment and its associated Service object.
+ */
+export class ValidatorServiceDeployment extends pulumi.ComponentResource {
+  public readonly deployment: k8s.apps.v1.Deployment
+  public readonly service: k8s.core.v1.Service
+  public readonly ipAddress?: pulumi.Output<string>
+
+  constructor(name: string, args: ServiceDeploymentArgs, opts?: pulumi.ComponentResourceOptions) {
+    super('k8sjs:service:ValidatorServiceDeployment', name, {}, opts)
+
+    const labels = { app: name }
+    const container: k8stypes.core.v1.Container = {
+      name: `joystream-node-${args.index}`,
+      image: args.nodeImage,
+      args: [
+        '--chain',
+        args.chainSpecPath,
+        '--pruning',
+        'archive',
+        '--node-key-file',
+        `${args.dataPath}/privatekey${args.index}`,
+        '--keystore-path',
+        `${args.dataPath}/data/auth-${args.index - 1}`,
+        '--validator',
+        '--log',
+        'runtime,txpool,transaction-pool,trace=sync',
+      ],
+      volumeMounts: [
+        {
+          name: 'config-data',
+          mountPath: args.dataPath,
+        },
+      ],
+    }
+    this.deployment = new k8s.apps.v1.Deployment(
+      name,
+      {
+        metadata: {
+          namespace: args.namespace,
+          labels: labels,
+        },
+        spec: {
+          selector: { matchLabels: labels },
+          replicas: 1,
+          template: {
+            metadata: { labels: labels },
+            spec: {
+              containers: [container],
+              volumes: [
+                {
+                  name: 'config-data',
+                  persistentVolumeClaim: {
+                    claimName: args.pvc,
+                  },
+                },
+              ],
+            },
+          },
+        },
+      },
+      { parent: this }
+    )
+
+    this.service = new k8s.core.v1.Service(
+      name,
+      {
+        metadata: {
+          name: name,
+          namespace: args.namespace,
+          labels: this.deployment.metadata.labels,
+        },
+        spec: {
+          ports: [{ name: 'port-1', port: 30333 }],
+          selector: this.deployment.spec.template.metadata.labels,
+        },
+      },
+      { parent: this }
+    )
+  }
+}
+
+export interface ServiceDeploymentArgs {
+  namespace: pulumi.Output<string>
+  index: number
+  chainSpecPath: string
+  dataPath: string
+  nodeImage: string
+  pvc: pulumi.OutputInstance<any>
+}

+ 134 - 0
devops/infrastructure/pulumi-common/caddy.ts

@@ -0,0 +1,134 @@
+import * as k8s from '@pulumi/kubernetes'
+import * as pulumi from '@pulumi/pulumi'
+import * as dns from 'dns'
+
+/**
+ * ServiceDeployment is an example abstraction that uses a class to fold together the common pattern of a
+ * Kubernetes Deployment and its associated Service object.
+ */
+export class CaddyServiceDeployment extends pulumi.ComponentResource {
+  public readonly deployment: k8s.apps.v1.Deployment
+  public readonly service: k8s.core.v1.Service
+  public readonly hostname?: pulumi.Output<string>
+  public readonly primaryEndpoint?: pulumi.Output<string>
+  public readonly secondaryEndpoint?: pulumi.Output<string>
+
+  constructor(name: string, args: ServiceDeploymentArgs, opts?: pulumi.ComponentResourceOptions) {
+    super('caddy:service:CaddyServiceDeployment', name, {}, opts)
+
+    const labels = { app: name }
+    let volumes: pulumi.Input<pulumi.Input<k8s.types.input.core.v1.Volume>[]> = []
+    let caddyVolumeMounts: pulumi.Input<pulumi.Input<k8s.types.input.core.v1.VolumeMount>[]> = []
+
+    async function lookupPromise(url: string): Promise<dns.LookupAddress[]> {
+      return new Promise((resolve, reject) => {
+        dns.lookup(url, { all: true }, (err: any, addresses: dns.LookupAddress[]) => {
+          if (err) reject(err)
+          resolve(addresses)
+        })
+      })
+    }
+
+    this.service = new k8s.core.v1.Service(
+      name,
+      {
+        metadata: {
+          name: name,
+          namespace: args.namespaceName,
+          labels: labels,
+        },
+        spec: {
+          type: args.isMinikube ? 'NodePort' : 'LoadBalancer',
+          ports: [
+            { name: 'http', port: 80 },
+            { name: 'https', port: 443 },
+          ],
+          selector: labels,
+        },
+      },
+      { parent: this }
+    )
+
+    this.hostname = this.service.status.loadBalancer.ingress[0].hostname
+
+    if (args.lbReady) {
+      let caddyConfig: pulumi.Output<string>
+      const lbIps: pulumi.Output<dns.LookupAddress[]> = this.hostname.apply((dnsName) => {
+        return lookupPromise(dnsName)
+      })
+
+      function getProxyString(ipAddress: pulumi.Output<string>) {
+        let result: pulumi.Output<string> = pulumi.interpolate``
+        for (const endpoint of args.caddyEndpoints) {
+          result = pulumi.interpolate`${ipAddress}.nip.io${endpoint}\n${result}`
+        }
+        return result
+      }
+
+      caddyConfig = pulumi.interpolate`${getProxyString(lbIps[0].address)}
+        ${getProxyString(lbIps[1].address)}`
+
+      this.primaryEndpoint = pulumi.interpolate`${lbIps[0].address}.nip.io`
+      this.secondaryEndpoint = pulumi.interpolate`${lbIps[1].address}.nip.io`
+
+      const keyConfig = new k8s.core.v1.ConfigMap(
+        name,
+        {
+          metadata: { namespace: args.namespaceName, labels: labels },
+          data: { 'fileData': caddyConfig },
+        },
+        { parent: this }
+      )
+      const keyConfigName = keyConfig.metadata.apply((m) => m.name)
+
+      caddyVolumeMounts.push({
+        mountPath: '/etc/caddy/Caddyfile',
+        name: 'caddy-volume',
+        subPath: 'fileData',
+      })
+      volumes.push({
+        name: 'caddy-volume',
+        configMap: {
+          name: keyConfigName,
+        },
+      })
+    }
+
+    this.deployment = new k8s.apps.v1.Deployment(
+      name,
+      {
+        metadata: { namespace: args.namespaceName, labels: labels },
+        spec: {
+          selector: { matchLabels: labels },
+          replicas: 1,
+          template: {
+            metadata: { labels: labels },
+            spec: {
+              containers: [
+                {
+                  name: 'caddy',
+                  image: 'caddy',
+                  ports: [
+                    { name: 'caddy-http', containerPort: 80 },
+                    { name: 'caddy-https', containerPort: 443 },
+                  ],
+                  volumeMounts: caddyVolumeMounts,
+                },
+              ],
+              volumes,
+            },
+          },
+        },
+      },
+      { parent: this }
+    )
+  }
+}
+
+export interface ServiceDeploymentArgs {
+  namespaceName: pulumi.Output<string>
+  // Endpoints are caddyConfig strings concatenated after IP.nip.io
+  caddyEndpoints: string[]
+  lbReady?: boolean
+  isMinikube?: boolean
+}

+ 1 - 0
devops/infrastructure/pulumi-common/index.ts

@@ -0,0 +1 @@
+export { CaddyServiceDeployment } from './caddy'

+ 11 - 0
devops/infrastructure/pulumi-common/package.json

@@ -0,0 +1,11 @@
+{
+  "name": "pulumi-common",
+  "version": "1.0.0",
+  "devDependencies": {
+    "@types/node": "^10.0.0"
+  },
+  "dependencies": {
+    "@pulumi/kubernetes": "^3.0.0",
+    "@pulumi/pulumi": "^3.0.0"
+  }
+}

+ 15 - 0
devops/infrastructure/pulumi-common/tsconfig.json

@@ -0,0 +1,15 @@
+{
+  "compilerOptions": {
+    "strict": true,
+    "target": "es2016",
+    "module": "commonjs",
+    "moduleResolution": "node",
+    "sourceMap": true,
+    "experimentalDecorators": true,
+    "pretty": true,
+    "composite": true,
+    "noFallthroughCasesInSwitch": true,
+    "noImplicitReturns": true,
+    "forceConsistentCasingInFileNames": true
+  }
+}

+ 6 - 0
devops/infrastructure/query-node/.gitignore

@@ -0,0 +1,6 @@
+/bin/
+/node_modules/
+kubeconfig.yml
+package-lock.json
+.env
+Pulumi.*.yaml

+ 19 - 0
devops/infrastructure/query-node/Pulumi.yaml

@@ -0,0 +1,19 @@
+name: query-node
+runtime: nodejs
+description: Kubernetes IaC for Query Node
+template:
+  config:
+    aws:profile:
+      default: joystream-user
+    aws:region:
+      default: us-east-1
+    isMinikube:
+      description: Whether you are deploying to minikube
+      default: false
+    isLoadBalancerReady:
+      description: Whether the load balancer service is ready and has been assigned an IP
+      default: false
+    membersFilePath:
+      description: Path to members.json file for processor initialization
+    workersFilePath:
+      description: Path to workers.json file for processor initialization

+ 117 - 0
devops/infrastructure/query-node/README.md

@@ -0,0 +1,117 @@
+# Query Node automated deployment
+
+Deploys an EKS Kubernetes cluster with query node
+
+## Deploying the App
+
+To deploy your infrastructure, follow the below steps.
+
+### Prerequisites
+
+1. [Install Pulumi](https://www.pulumi.com/docs/get-started/install/)
+1. [Install Node.js](https://nodejs.org/en/download/)
+1. Install a package manager for Node.js, such as [npm](https://www.npmjs.com/get-npm) or [Yarn](https://yarnpkg.com/en/docs/install).
+1. [Configure AWS Credentials](https://www.pulumi.com/docs/intro/cloud-providers/aws/setup/)
+1. Optional (for debugging): [Install kubectl](https://kubernetes.io/docs/tasks/tools/)
+
+### Steps
+
+After cloning this repo, from this working directory, run these commands:
+
+1. Install the required Node.js packages:
+
+   This installs the dependent packages [needed](https://www.pulumi.com/docs/intro/concepts/how-pulumi-works/) for our Pulumi program.
+
+   ```bash
+   $ npm install
+   ```
+
+1. Create a new stack, which is an isolated deployment target for this example:
+
+   This will initialize the Pulumi program in TypeScript.
+
+   ```bash
+   $ pulumi stack init
+   ```
+
+1. Set the required configuration variables in `Pulumi.<stack>.yaml`
+
+   ```bash
+   $ pulumi config set-all --plaintext aws:region=us-east-1 --plaintext aws:profile=joystream-user \
+    --plaintext workersFilePath=<PATH> --plaintext membersFilePath=<PATH> --plaintext isMinikube=true
+   ```
+
+   If you want to build the stack on AWS set the `isMinikube` config to `false`
+
+   ```bash
+   $ puluim config set isMinikube false
+   ```
+
+1. Create a `.env` file in this directory (`cp ../../../.env ./.env`) and set the database and other variables in it
+
+   Make sure to set `GRAPHQL_SERVER_PORT=4001`
+
+1. Stand up the Kubernetes cluster:
+
+   Running `pulumi up -y` will deploy the EKS cluster. Note, provisioning a
+   new EKS cluster takes between 10-15 minutes.
+
+1. Once the stack is up and running, we will modify the Caddy config to get SSL certificate for the load balancer
+
+   Modify the config variable `isLoadBalancerReady`
+
+   ```bash
+   $ pulumi config set isLoadBalancerReady true
+   ```
+
+   Run `pulumi up -y` to update the Caddy config
+
+1. You can now access the endpoints using `pulumi stack output endpoint1` or `pulumi stack output endpoint2`
+
+   The GraphQl server is accessible at `https://<ENDPOINT>/server/graphql` and indexer at `https://<ENDPOINT>/indexer/graphql`
+
+1. Access the Kubernetes Cluster using `kubectl`
+
+   To access your new Kubernetes cluster using `kubectl`, we need to set up the
+   `kubeconfig` file and download `kubectl`. We can leverage the Pulumi
+   stack output in the CLI, as Pulumi facilitates exporting these objects for us.
+
+   ```bash
+   $ pulumi stack output kubeconfig --show-secrets > kubeconfig
+   $ export KUBECONFIG=$PWD/kubeconfig
+   $ kubectl get nodes
+   ```
+
+   We can also use the stack output to query the cluster for our newly created Deployment:
+
+   ```bash
+   $ kubectl get deployment $(pulumi stack output deploymentName) --namespace=$(pulumi stack output namespaceName)
+   $ kubectl get service $(pulumi stack output serviceName) --namespace=$(pulumi stack output namespaceName)
+   ```
+
+   To get logs
+
+   ```bash
+   $ kubectl config set-context --current --namespace=$(pulumi stack output namespaceName)
+   $ kubectl get pods
+   $ kubectl logs <PODNAME> --all-containers
+   ```
+
+   To see complete pulumi stack output
+
+   ```bash
+   $ pulumi stack output
+   ```
+
+   To execute a command
+
+   ```bash
+   $ kubectl exec --stdin --tty <PODNAME> -c colossus -- /bin/bash
+   ```
+
+1. Once you've finished experimenting, tear down your stack's resources by destroying and removing it:
+
+   ```bash
+   $ pulumi destroy --yes
+   $ pulumi stack rm --yes
+   ```

+ 29 - 0
devops/infrastructure/query-node/configMap.ts

@@ -0,0 +1,29 @@
+import * as pulumi from '@pulumi/pulumi'
+import * as k8s from '@pulumi/kubernetes'
+import * as fs from 'fs'
+
+export class configMapFromFile extends pulumi.ComponentResource {
+  public readonly configName?: pulumi.Output<string>
+
+  constructor(name: string, args: ConfigMapArgs, opts: pulumi.ComponentResourceOptions = {}) {
+    super('pkg:query-node:configMap', name, {}, opts)
+
+    this.configName = new k8s.core.v1.ConfigMap(
+      name,
+      {
+        metadata: {
+          namespace: args.namespaceName,
+        },
+        data: {
+          'fileData': fs.readFileSync(args.filePath).toString(),
+        },
+      },
+      opts
+    ).metadata.apply((m) => m.name)
+  }
+}
+
+export interface ConfigMapArgs {
+  filePath: string
+  namespaceName: pulumi.Output<string>
+}

+ 461 - 0
devops/infrastructure/query-node/index.ts

@@ -0,0 +1,461 @@
+import * as awsx from '@pulumi/awsx'
+import * as eks from '@pulumi/eks'
+import * as docker from '@pulumi/docker'
+import * as pulumi from '@pulumi/pulumi'
+import { configMapFromFile } from './configMap'
+import * as k8s from '@pulumi/kubernetes'
+import * as s3Helpers from './s3Helpers'
+import { CaddyServiceDeployment } from 'pulumi-common'
+
+require('dotenv').config()
+
+const config = new pulumi.Config()
+const awsConfig = new pulumi.Config('aws')
+const isMinikube = config.getBoolean('isMinikube')
+export let kubeconfig: pulumi.Output<any>
+export let joystreamAppsImage: pulumi.Output<string>
+let provider: k8s.Provider
+
+if (isMinikube) {
+  provider = new k8s.Provider('local', {})
+
+  // Create image from local app
+  joystreamAppsImage = new docker.Image('joystream/apps', {
+    build: {
+      context: '../../../',
+      dockerfile: '../../../apps.Dockerfile',
+    },
+    imageName: 'joystream/apps:latest',
+    skipPush: true,
+  }).baseImageName
+  // joystreamAppsImage = pulumi.interpolate`joystream/apps`
+} else {
+  // Create a VPC for our cluster.
+  const vpc = new awsx.ec2.Vpc('query-node-vpc', { numberOfAvailabilityZones: 2, numberOfNatGateways: 1 })
+
+  // Create an EKS cluster with the default configuration.
+  const cluster = new eks.Cluster('eksctl-query-node', {
+    vpcId: vpc.id,
+    subnetIds: vpc.publicSubnetIds,
+    desiredCapacity: 3,
+    maxSize: 3,
+    instanceType: 't2.large',
+    providerCredentialOpts: {
+      profileName: awsConfig.get('profile'),
+    },
+  })
+  provider = cluster.provider
+
+  // Export the cluster's kubeconfig.
+  kubeconfig = cluster.kubeconfig
+
+  // Create a repository
+  const repo = new awsx.ecr.Repository('joystream/apps')
+
+  joystreamAppsImage = repo.buildAndPushImage({
+    dockerfile: '../../../apps.Dockerfile',
+    context: '../../../',
+  })
+}
+
+const resourceOptions = { provider: provider }
+
+const name = 'query-node'
+
+// Create a Kubernetes Namespace
+// const ns = new k8s.core.v1.Namespace(name, {}, { provider: cluster.provider })
+const ns = new k8s.core.v1.Namespace(name, {}, resourceOptions)
+
+// Export the Namespace name
+export const namespaceName = ns.metadata.name
+
+const appLabels = { appClass: name }
+
+// Create a Deployment
+const databaseLabels = { app: 'postgres-db' }
+
+const pvc = new k8s.core.v1.PersistentVolumeClaim(
+  `db-pvc`,
+  {
+    metadata: {
+      labels: databaseLabels,
+      namespace: namespaceName,
+      name: `db-pvc`,
+    },
+    spec: {
+      accessModes: ['ReadWriteOnce'],
+      resources: {
+        requests: {
+          storage: `10Gi`,
+        },
+      },
+    },
+  },
+  resourceOptions
+)
+
+const databaseDeployment = new k8s.apps.v1.Deployment(
+  'postgres-db',
+  {
+    metadata: {
+      namespace: namespaceName,
+      labels: databaseLabels,
+    },
+    spec: {
+      selector: { matchLabels: databaseLabels },
+      template: {
+        metadata: { labels: databaseLabels },
+        spec: {
+          containers: [
+            {
+              name: 'postgres-db',
+              image: 'postgres:12',
+              env: [
+                { name: 'POSTGRES_USER', value: process.env.DB_USER! },
+                { name: 'POSTGRES_PASSWORD', value: process.env.DB_PASS! },
+                { name: 'POSTGRES_DB', value: process.env.INDEXER_DB_NAME! },
+              ],
+              ports: [{ containerPort: 5432 }],
+              volumeMounts: [
+                {
+                  name: 'postgres-data',
+                  mountPath: '/var/lib/postgresql/data',
+                  subPath: 'postgres',
+                },
+              ],
+            },
+          ],
+          volumes: [
+            {
+              name: 'postgres-data',
+              persistentVolumeClaim: {
+                claimName: `db-pvc`,
+              },
+            },
+          ],
+        },
+      },
+    },
+  },
+  resourceOptions
+)
+
+const databaseService = new k8s.core.v1.Service(
+  'postgres-db',
+  {
+    metadata: {
+      namespace: namespaceName,
+      labels: databaseDeployment.metadata.labels,
+      name: 'postgres-db',
+    },
+    spec: {
+      ports: [{ port: 5432 }],
+      selector: databaseDeployment.spec.template.metadata.labels,
+    },
+  },
+  resourceOptions
+)
+
+const migrationJob = new k8s.batch.v1.Job(
+  'db-migration',
+  {
+    metadata: {
+      namespace: namespaceName,
+    },
+    spec: {
+      backoffLimit: 0,
+      template: {
+        spec: {
+          containers: [
+            {
+              name: 'db-migration',
+              image: joystreamAppsImage,
+              imagePullPolicy: 'IfNotPresent',
+              resources: { requests: { cpu: '100m', memory: '100Mi' } },
+              env: [
+                {
+                  name: 'WARTHOG_DB_HOST',
+                  value: 'postgres-db',
+                },
+                {
+                  name: 'DB_HOST',
+                  value: 'postgres-db',
+                },
+                { name: 'DB_NAME', value: process.env.DB_NAME! },
+                { name: 'DB_PASS', value: process.env.DB_PASS! },
+              ],
+              command: ['/bin/sh', '-c'],
+              args: ['yarn workspace query-node-root db:prepare; yarn workspace query-node-root db:migrate'],
+            },
+          ],
+          restartPolicy: 'Never',
+        },
+      },
+    },
+  },
+  { ...resourceOptions, dependsOn: databaseService }
+)
+
+const membersFilePath = config.get('membersFilePath')
+  ? config.get('membersFilePath')!
+  : '../../../query-node/mappings/bootstrap/data/members.json'
+const workersFilePath = config.get('workersFilePath')
+  ? config.get('workersFilePath')!
+  : '../../../query-node/mappings/bootstrap/data/workers.json'
+
+const dataBucket = new s3Helpers.FileBucket('bootstrap-data', {
+  files: [
+    { path: membersFilePath, name: 'members.json' },
+    { path: workersFilePath, name: 'workers.json' },
+  ],
+  policy: s3Helpers.publicReadPolicy,
+})
+
+const membersUrl = dataBucket.getUrlForFile('members.json')
+const workersUrl = dataBucket.getUrlForFile('workers.json')
+
+const dataPath = '/joystream/query-node/mappings/bootstrap/data'
+
+const processorJob = new k8s.batch.v1.Job(
+  'processor-migration',
+  {
+    metadata: {
+      namespace: namespaceName,
+    },
+    spec: {
+      backoffLimit: 0,
+      template: {
+        spec: {
+          initContainers: [
+            {
+              name: 'curl-init',
+              image: 'appropriate/curl',
+              command: ['/bin/sh', '-c'],
+              args: [
+                pulumi.interpolate`curl -o ${dataPath}/workers.json ${workersUrl}; curl -o ${dataPath}/members.json ${membersUrl}; ls -al ${dataPath};`,
+              ],
+              volumeMounts: [
+                {
+                  name: 'bootstrap-data',
+                  mountPath: dataPath,
+                },
+              ],
+            },
+          ],
+          containers: [
+            {
+              name: 'processor-migration',
+              image: joystreamAppsImage,
+              imagePullPolicy: 'IfNotPresent',
+              env: [
+                {
+                  name: 'INDEXER_ENDPOINT_URL',
+                  value: `http://localhost:${process.env.WARTHOG_APP_PORT}/graphql`,
+                },
+                { name: 'TYPEORM_HOST', value: 'postgres-db' },
+                { name: 'TYPEORM_DATABASE', value: process.env.DB_NAME! },
+                { name: 'DEBUG', value: 'index-builder:*' },
+                { name: 'PROCESSOR_POLL_INTERVAL', value: '1000' },
+              ],
+              volumeMounts: [
+                {
+                  name: 'bootstrap-data',
+                  mountPath: dataPath,
+                },
+              ],
+              args: ['workspace', 'query-node-root', 'processor:bootstrap'],
+            },
+          ],
+          restartPolicy: 'Never',
+          volumes: [
+            {
+              name: 'bootstrap-data',
+              emptyDir: {},
+            },
+          ],
+        },
+      },
+    },
+  },
+  { ...resourceOptions, dependsOn: migrationJob }
+)
+
+const defsConfig = new configMapFromFile(
+  'defs-config',
+  {
+    filePath: '../../../types/augment/all/defs.json',
+    namespaceName: namespaceName,
+  },
+  resourceOptions
+).configName
+
+const deployment = new k8s.apps.v1.Deployment(
+  name,
+  {
+    metadata: {
+      namespace: namespaceName,
+      labels: appLabels,
+    },
+    spec: {
+      replicas: 1,
+      selector: { matchLabels: appLabels },
+      template: {
+        metadata: {
+          labels: appLabels,
+        },
+        spec: {
+          containers: [
+            {
+              name: 'redis',
+              image: 'redis:6.0-alpine',
+              ports: [{ containerPort: 6379 }],
+            },
+            {
+              name: 'indexer',
+              image: 'joystream/hydra-indexer:2.1.0-beta.9',
+              env: [
+                { name: 'DB_HOST', value: 'postgres-db' },
+                { name: 'DB_NAME', value: process.env.INDEXER_DB_NAME! },
+                { name: 'DB_PASS', value: process.env.DB_PASS! },
+                { name: 'INDEXER_WORKERS', value: '5' },
+                { name: 'REDIS_URI', value: 'redis://localhost:6379/0' },
+                { name: 'DEBUG', value: 'index-builder:*' },
+                { name: 'WS_PROVIDER_ENDPOINT_URI', value: process.env.WS_PROVIDER_ENDPOINT_URI! },
+                { name: 'TYPES_JSON', value: 'types.json' },
+                { name: 'PGUSER', value: process.env.DB_USER! },
+                { name: 'BLOCK_HEIGHT', value: process.env.BLOCK_HEIGHT! },
+              ],
+              volumeMounts: [
+                {
+                  mountPath: '/home/hydra/packages/hydra-indexer/types.json',
+                  name: 'indexer-volume',
+                  subPath: 'fileData',
+                },
+              ],
+              command: ['/bin/sh', '-c'],
+              args: ['yarn db:bootstrap && yarn start:prod'],
+            },
+            {
+              name: 'hydra-indexer-gateway',
+              image: 'joystream/hydra-indexer-gateway:2.1.0-beta.5',
+              env: [
+                { name: 'WARTHOG_STARTER_DB_DATABASE', value: process.env.INDEXER_DB_NAME! },
+                { name: 'WARTHOG_STARTER_DB_HOST', value: 'postgres-db' },
+                { name: 'WARTHOG_STARTER_DB_PASSWORD', value: process.env.DB_PASS! },
+                { name: 'WARTHOG_STARTER_DB_PORT', value: process.env.DB_PORT! },
+                { name: 'WARTHOG_STARTER_DB_USERNAME', value: process.env.DB_USER! },
+                { name: 'WARTHOG_STARTER_REDIS_URI', value: 'redis://localhost:6379/0' },
+                { name: 'WARTHOG_APP_PORT', value: process.env.WARTHOG_APP_PORT! },
+                { name: 'PORT', value: process.env.WARTHOG_APP_PORT! },
+                { name: 'DEBUG', value: '*' },
+              ],
+              ports: [{ containerPort: 4002 }],
+            },
+            {
+              name: 'processor',
+              image: joystreamAppsImage,
+              imagePullPolicy: 'IfNotPresent',
+              env: [
+                {
+                  name: 'INDEXER_ENDPOINT_URL',
+                  value: `http://localhost:${process.env.WARTHOG_APP_PORT}/graphql`,
+                },
+                { name: 'TYPEORM_HOST', value: 'postgres-db' },
+                { name: 'TYPEORM_DATABASE', value: process.env.DB_NAME! },
+                { name: 'DEBUG', value: 'index-builder:*' },
+                { name: 'PROCESSOR_POLL_INTERVAL', value: '1000' },
+              ],
+              volumeMounts: [
+                {
+                  mountPath: '/joystream/query-node/mappings/lib/generated/types/typedefs.json',
+                  name: 'processor-volume',
+                  subPath: 'fileData',
+                },
+              ],
+              command: ['/bin/sh', '-c'],
+              args: ['cd query-node && yarn hydra-processor run -e ../.env'],
+            },
+            {
+              name: 'graphql-server',
+              image: joystreamAppsImage,
+              imagePullPolicy: 'IfNotPresent',
+              env: [
+                { name: 'DB_HOST', value: 'postgres-db' },
+                { name: 'DB_PASS', value: process.env.DB_PASS! },
+                { name: 'DB_USER', value: process.env.DB_USER! },
+                { name: 'DB_PORT', value: process.env.DB_PORT! },
+                { name: 'DB_NAME', value: process.env.DB_NAME! },
+                { name: 'GRAPHQL_SERVER_HOST', value: process.env.GRAPHQL_SERVER_HOST! },
+                { name: 'GRAPHQL_SERVER_PORT', value: process.env.GRAPHQL_SERVER_PORT! },
+              ],
+              ports: [{ name: 'graph-ql-port', containerPort: Number(process.env.GRAPHQL_SERVER_PORT!) }],
+              args: ['workspace', 'query-node-root', 'query-node:start:prod'],
+            },
+          ],
+          volumes: [
+            {
+              name: 'processor-volume',
+              configMap: {
+                name: defsConfig,
+              },
+            },
+            {
+              name: 'indexer-volume',
+              configMap: {
+                name: defsConfig,
+              },
+            },
+          ],
+        },
+      },
+    },
+  },
+  { ...resourceOptions, dependsOn: processorJob }
+)
+
+// Export the Deployment name
+export const deploymentName = deployment.metadata.name
+
+// Create a LoadBalancer Service for the NGINX Deployment
+const service = new k8s.core.v1.Service(
+  name,
+  {
+    metadata: {
+      labels: appLabels,
+      namespace: namespaceName,
+      name: 'query-node',
+    },
+    spec: {
+      ports: [
+        { name: 'port-1', port: 8081, targetPort: 'graph-ql-port' },
+        { name: 'port-2', port: 4000, targetPort: 4002 },
+      ],
+      selector: appLabels,
+    },
+  },
+  resourceOptions
+)
+
+// Export the Service name and public LoadBalancer Endpoint
+export const serviceName = service.metadata.name
+
+const caddyEndpoints = [
+  `/indexer/* {
+    uri strip_prefix /indexer
+    reverse_proxy query-node:4000
+}`,
+  `/server/* {
+    uri strip_prefix /server
+    reverse_proxy query-node:8081
+}`,
+]
+
+const lbReady = config.get('isLoadBalancerReady') === 'true'
+const caddy = new CaddyServiceDeployment(
+  'caddy-proxy',
+  { lbReady, namespaceName: namespaceName, isMinikube, caddyEndpoints },
+  resourceOptions
+)
+
+export const endpoint1 = caddy.primaryEndpoint
+export const endpoint2 = caddy.secondaryEndpoint

+ 18 - 0
devops/infrastructure/query-node/package.json

@@ -0,0 +1,18 @@
+{
+  "name": "query-node",
+  "devDependencies": {
+    "@types/node": "^10.0.0"
+  },
+  "dependencies": {
+    "@pulumi/aws": "^4.0.0",
+    "@pulumi/awsx": "^0.30.0",
+    "@pulumi/eks": "^0.31.0",
+    "@pulumi/kubernetes": "^3.0.0",
+    "@pulumi/pulumi": "^3.0.0",
+    "@pulumi/docker": "^3.0.0",
+    "dotenv": "^10.0.0",
+    "mime": "^2.5.2",
+    "@types/mime": "^2.0.0",
+    "pulumi-common": "file:../pulumi-common"
+  }
+}

+ 73 - 0
devops/infrastructure/query-node/s3Helpers.ts

@@ -0,0 +1,73 @@
+import * as fs from 'fs'
+import * as mime from 'mime'
+
+import * as aws from '@pulumi/aws'
+import * as pulumi from '@pulumi/pulumi'
+
+interface FileObject {
+  name: string
+  path: string
+}
+
+export interface FileBucketOpts {
+  files: FileObject[]
+  policy?: (bucket: aws.s3.Bucket) => pulumi.Output<string>
+}
+
+export class FileBucket {
+  public readonly bucket: aws.s3.Bucket
+  public readonly files: { [key: string]: aws.s3.BucketObject }
+  public readonly policy: aws.s3.BucketPolicy | undefined
+
+  private readonly fileContents: { [key: string]: string }
+
+  constructor(bucketName: string, opts: FileBucketOpts) {
+    this.bucket = new aws.s3.Bucket(bucketName)
+    this.fileContents = {}
+    this.files = {}
+    for (const file of opts.files) {
+      this.fileContents[file.name] = fs.readFileSync(file.path).toString()
+      this.files[file.name] = new aws.s3.BucketObject(file.name, {
+        bucket: this.bucket,
+        source: new pulumi.asset.FileAsset(file.path),
+        contentType: mime.getType(file.path) || undefined,
+      })
+    }
+
+    if (opts.policy !== undefined) {
+      // Set the access policy for the bucket so all objects are readable
+      this.policy = new aws.s3.BucketPolicy(`bucketPolicy`, {
+        bucket: this.bucket.bucket,
+        // policy: this.bucket.bucket.apply(publicReadPolicyForBucket)
+        policy: opts.policy(this.bucket),
+      })
+    }
+  }
+
+  getUrlForFile(file: string): pulumi.Output<string> {
+    if (!(file in this.files)) {
+      throw new Error(`Bucket does not have file '${file}'`)
+    }
+
+    return pulumi.all([this.bucket.bucketDomainName, this.files[file].id]).apply(([domain, id]) => `${domain}/${id}`)
+  }
+}
+
+// Create an S3 Bucket Policy to allow public read of all objects in bucket
+export function publicReadPolicy(bucket: aws.s3.Bucket): pulumi.Output<string> {
+  return bucket.bucket.apply((bucketName) =>
+    JSON.stringify({
+      Version: '2012-10-17',
+      Statement: [
+        {
+          Effect: 'Allow',
+          Principal: '*',
+          Action: ['s3:GetObject'],
+          Resource: [
+            `arn:aws:s3:::${bucketName}/*`, // policy refers to bucket name explicitly
+          ],
+        },
+      ],
+    })
+  )
+}

+ 18 - 0
devops/infrastructure/query-node/tsconfig.json

@@ -0,0 +1,18 @@
+{
+    "compilerOptions": {
+        "strict": true,
+        "outDir": "bin",
+        "target": "es2016",
+        "module": "commonjs",
+        "moduleResolution": "node",
+        "sourceMap": true,
+        "experimentalDecorators": true,
+        "pretty": true,
+        "noFallthroughCasesInSwitch": true,
+        "noImplicitReturns": true,
+        "forceConsistentCasingInFileNames": true
+    },
+    "files": [
+        "index.ts"
+    ]
+}

+ 4 - 3
devops/infrastructure/requirements.yml

@@ -1,6 +1,7 @@
 ---
 roles:
-- caddy_ansible.caddy_ansible
+  - caddy_ansible.caddy_ansible
 collections:
-- community.aws
-- amazon.aws
+  - community.aws
+  - amazon.aws
+  - community.docker

+ 12 - 1
devops/infrastructure/roles/admin/tasks/main.yml

@@ -7,7 +7,7 @@
 
 - name: Copy bash_profile content to bashrc for non-interactive sessions
   blockinfile:
-    block: "{{ bash_data.stdout }}"
+    block: '{{ bash_data.stdout }}'
     path: ~/.bashrc
     insertbefore: BOF
 
@@ -16,3 +16,14 @@
 
 - name: Install subkey
   shell: cargo install --force subkey --git https://github.com/paritytech/substrate --version 2.0.1 --locked
+  async: 3600
+  poll: 0
+  register: install_result
+
+- name: Check whether install subkey task has finished
+  async_status:
+    jid: '{{ install_result.ansible_job_id }}'
+  register: job_result
+  until: job_result.finished
+  retries: 36
+  delay: 100

+ 115 - 0
devops/infrastructure/single-instance-docker.yml

@@ -0,0 +1,115 @@
+AWSTemplateFormatVersion: 2010-09-09
+
+Parameters:
+  EC2InstanceType:
+    Type: String
+    Default: t2.xlarge
+  EC2AMI:
+    Type: String
+    Default: 'ami-09e67e426f25ce0d7'
+  KeyName:
+    Description: Name of an existing EC2 KeyPair to enable SSH access to the instance
+    Type: 'AWS::EC2::KeyPair::KeyName'
+    Default: 'joystream-key'
+    ConstraintDescription: must be the name of an existing EC2 KeyPair.
+
+Resources:
+  SecurityGroup:
+    Type: AWS::EC2::SecurityGroup
+    Properties:
+      GroupDescription: !Sub 'Internal Security group for validator nodes ${AWS::StackName}'
+      SecurityGroupIngress:
+        - IpProtocol: tcp
+          FromPort: 22
+          ToPort: 22
+          CidrIp: 0.0.0.0/0
+      Tags:
+        - Key: Name
+          Value: !Sub '${AWS::StackName}_validator'
+
+  InstanceLaunchTemplate:
+    Type: AWS::EC2::LaunchTemplate
+    Metadata:
+      AWS::CloudFormation::Init:
+        config:
+          packages:
+            apt:
+              wget: []
+              unzip: []
+    Properties:
+      LaunchTemplateName: !Sub 'LaunchTemplate_${AWS::StackName}'
+      LaunchTemplateData:
+        ImageId: !Ref EC2AMI
+        InstanceType: !Ref EC2InstanceType
+        KeyName: !Ref KeyName
+        SecurityGroupIds:
+          - !GetAtt SecurityGroup.GroupId
+        BlockDeviceMappings:
+          - DeviceName: /dev/sda1
+            Ebs:
+              VolumeSize: '30'
+        UserData:
+          Fn::Base64: !Sub |
+            #!/bin/bash -xe
+
+            # send script output to /tmp so we can debug boot failures
+            exec > /tmp/userdata.log 2>&1
+
+            # Update all packages
+            apt-get update -y
+
+            # Install the updates
+            apt-get upgrade -y
+
+            apt-get install -y apt-transport-https ca-certificates curl gnupg lsb-release
+
+            curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg
+
+            echo "deb [arch=arm64 signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
+
+            apt-get update -y
+
+            apt-get install -y docker-ce docker-ce-cli containerd.io
+
+            usermod -aG docker ubuntu
+
+            # Get latest cfn scripts and install them;
+            apt-get install -y python3-setuptools
+            mkdir -p /opt/aws/bin
+            wget https://s3.amazonaws.com/cloudformation-examples/aws-cfn-bootstrap-py3-latest.tar.gz
+            python3 -m easy_install --script-dir /opt/aws/bin aws-cfn-bootstrap-py3-latest.tar.gz
+
+            apt-get install -y python3-pip
+
+            /opt/aws/bin/cfn-signal -e $? -r "Instance Created" '${WaitHandle}'
+
+  Instance:
+    Type: AWS::EC2::Instance
+    Properties:
+      LaunchTemplate:
+        LaunchTemplateId: !Ref InstanceLaunchTemplate
+        Version: !GetAtt InstanceLaunchTemplate.LatestVersionNumber
+      Tags:
+        - Key: Name
+          Value: !Sub '${AWS::StackName}_1'
+
+  WaitHandle:
+    Type: AWS::CloudFormation::WaitConditionHandle
+
+  WaitCondition:
+    Type: AWS::CloudFormation::WaitCondition
+    Properties:
+      Handle: !Ref 'WaitHandle'
+      Timeout: '600'
+      Count: 1
+
+Outputs:
+  PublicIp:
+    Description: The DNS name for the created instance
+    Value: !Sub '${Instance.PublicIp}'
+    Export:
+      Name: !Sub '${AWS::StackName}PublicIp'
+
+  InstanceId:
+    Description: The Instance ID
+    Value: !Ref Instance

+ 0 - 0
devops/infrastructure/new-node-playbook.yml → devops/infrastructure/single-node-playbook.yml


+ 5 - 0
devops/infrastructure/storage-node/.gitignore

@@ -0,0 +1,5 @@
+/bin/
+/node_modules/
+kubeconfig.yml
+package-lock.json
+Pulumi.*.yaml

+ 33 - 0
devops/infrastructure/storage-node/Pulumi.yaml

@@ -0,0 +1,33 @@
+name: eks-cluster
+runtime: nodejs
+description: A Pulumi program to deploy storage node to cloud environment
+template:
+  config:
+    aws:profile:
+      default: joystream-user
+    aws:region:
+      default: us-east-1
+    wsProviderEndpointURI:
+      description: Chain RPC endpoint
+      default: 'wss://rome-rpc-endpoint.joystream.org:9944/'
+    isAnonymous:
+      description: Whether you are deploying an anonymous storage node
+      default: true
+    isLoadBalancerReady:
+      description: Whether the load balancer service is ready and has been assigned an IP
+      default: false
+    colossusPort:
+      description: Port that is exposed for the colossus container
+      default: 3000
+    storage:
+      description: Amount of storage in gigabytes for ipfs volume
+      default: 40
+    providerId:
+      description: StorageProviderId assigned to you in working group
+    keyFile:
+      description: Path to JSON key export file to use as the storage provider (role account)
+    publicURL:
+      description: API Public URL to announce
+    passphrase:
+      description: Optional passphrase to use to decrypt the key-file
+      secret: true

+ 120 - 0
devops/infrastructure/storage-node/README.md

@@ -0,0 +1,120 @@
+# Amazon EKS Cluster: Hello World!
+
+This example deploys an EKS Kubernetes cluster with custom ipfs image
+
+## Deploying the App
+
+To deploy your infrastructure, follow the below steps.
+
+### Prerequisites
+
+1. [Install Pulumi](https://www.pulumi.com/docs/get-started/install/)
+1. [Install Node.js](https://nodejs.org/en/download/)
+1. Install a package manager for Node.js, such as [npm](https://www.npmjs.com/get-npm) or [Yarn](https://yarnpkg.com/en/docs/install).
+1. [Configure AWS Credentials](https://www.pulumi.com/docs/intro/cloud-providers/aws/setup/)
+1. Optional (for debugging): [Install kubectl](https://kubernetes.io/docs/tasks/tools/)
+
+### Steps
+
+After cloning this repo, from this working directory, run these commands:
+
+1. Install the required Node.js packages:
+
+   This installs the dependent packages [needed](https://www.pulumi.com/docs/intro/concepts/how-pulumi-works/) for our Pulumi program.
+
+   ```bash
+   $ npm install
+   ```
+
+1. Create a new stack, which is an isolated deployment target for this example:
+
+   This will initialize the Pulumi program in TypeScript.
+
+   ```bash
+   $ pulumi stack init
+   ```
+
+1. Set the required configuration variables in `Pulumi.<stack>.yaml`
+
+   ```bash
+   $ pulumi config set-all --plaintext aws:region=us-east-1 --plaintext aws:profile=joystream-user \
+    --plaintext wsProviderEndpointURI='wss://rome-rpc-endpoint.joystream.org:9944/' \
+    --plaintext isAnonymous=true
+   ```
+
+   If running for production use the below mentioned config
+
+   ```bash
+   $ pulumi config set-all --plaintext aws:region=us-east-1 --plaintext aws:profile=joystream-user \
+    --plaintext wsProviderEndpointURI='wss://rome-rpc-endpoint.joystream.org:9944/' --plaintext isAnonymous=false \
+    --plaintext providerId=<ID> --plaintext keyFile=<PATH> --plaintext publicURL=<DOMAIN> --secret passphrase=<PASSPHRASE>
+   ```
+
+   You can also set the `storage` and the `colossusPort` config parameters if required
+
+1. Stand up the EKS cluster:
+
+   Running `pulumi up -y` will deploy the EKS cluster. Note, provisioning a
+   new EKS cluster takes between 10-15 minutes.
+
+1. Once the stack if up and running, we will modify the Caddy config to get SSL certificate for the load balancer
+
+   Modify the config variable `isLoadBalancerReady`
+
+   ```bash
+   $ pulumi config set isLoadBalancerReady true
+   ```
+
+   Run `pulumi up -y` to update the Caddy config
+
+1. Access the Kubernetes Cluster using `kubectl`
+
+   To access your new Kubernetes cluster using `kubectl`, we need to set up the
+   `kubeconfig` file and download `kubectl`. We can leverage the Pulumi
+   stack output in the CLI, as Pulumi facilitates exporting these objects for us.
+
+   ```bash
+   $ pulumi stack output kubeconfig --show-secrets > kubeconfig
+   $ export KUBECONFIG=$PWD/kubeconfig
+   $ kubectl get nodes
+   ```
+
+   We can also use the stack output to query the cluster for our newly created Deployment:
+
+   ```bash
+   $ kubectl get deployment $(pulumi stack output deploymentName) --namespace=$(pulumi stack output namespaceName)
+   $ kubectl get service $(pulumi stack output serviceName) --namespace=$(pulumi stack output namespaceName)
+   ```
+
+   To get logs
+
+   ```bash
+   $ kubectl config set-context --current --namespace=$(pulumi stack output namespaceName)
+   $ kubectl get pods
+   $ kubectl logs <PODNAME> --all-containers
+   ```
+
+   To run a command on a pod
+
+   ```bash
+   $ kubectl exec ${POD_NAME} -c ${CONTAINER_NAME} -- ${CMD} ${ARG1}
+   ```
+
+   To see complete pulumi stack output
+
+   ```bash
+   $ pulumi stack output
+   ```
+
+   To execute a command
+
+   ```bash
+   $ kubectl exec --stdin --tty <PODNAME> -c colossus -- /bin/bash
+   ```
+
+1. Once you've finished experimenting, tear down your stack's resources by destroying and removing it:
+
+   ```bash
+   $ pulumi destroy --yes
+   $ pulumi stack rm --yes
+   ```

+ 236 - 0
devops/infrastructure/storage-node/index.ts

@@ -0,0 +1,236 @@
+import * as awsx from '@pulumi/awsx'
+import * as aws from '@pulumi/aws'
+import * as eks from '@pulumi/eks'
+import * as k8s from '@pulumi/kubernetes'
+import * as pulumi from '@pulumi/pulumi'
+import { CaddyServiceDeployment } from 'pulumi-common'
+import * as fs from 'fs'
+
+const awsConfig = new pulumi.Config('aws')
+const config = new pulumi.Config()
+
+const wsProviderEndpointURI = config.require('wsProviderEndpointURI')
+const isAnonymous = config.require('isAnonymous') === 'true'
+const lbReady = config.get('isLoadBalancerReady') === 'true'
+const name = 'storage-node'
+const colossusPort = parseInt(config.get('colossusPort') || '3000')
+const storage = parseInt(config.get('storage') || '40')
+
+let additionalParams: string[] | pulumi.Input<string>[] = []
+let volumeMounts: pulumi.Input<pulumi.Input<k8s.types.input.core.v1.VolumeMount>[]> = []
+let volumes: pulumi.Input<pulumi.Input<k8s.types.input.core.v1.Volume>[]> = []
+
+// Create a VPC for our cluster.
+const vpc = new awsx.ec2.Vpc('storage-node-vpc', { numberOfAvailabilityZones: 2, numberOfNatGateways: 1 })
+
+// Create an EKS cluster with the default configuration.
+const cluster = new eks.Cluster('eksctl-storage-node', {
+  vpcId: vpc.id,
+  subnetIds: vpc.publicSubnetIds,
+  instanceType: 't2.medium',
+  providerCredentialOpts: {
+    profileName: awsConfig.get('profile'),
+  },
+})
+
+// Export the cluster's kubeconfig.
+export const kubeconfig = cluster.kubeconfig
+
+// Create a repository
+const repo = new awsx.ecr.Repository('colossus-image')
+
+// Build an image and publish it to our ECR repository.
+export const colossusImage = repo.buildAndPushImage({
+  dockerfile: '../../../colossus.Dockerfile',
+  context: '../../../',
+})
+
+const resourceOptions = { provider: cluster.provider }
+
+// Create a Kubernetes Namespace
+const ns = new k8s.core.v1.Namespace(name, {}, resourceOptions)
+
+// Export the Namespace name
+export const namespaceName = ns.metadata.name
+
+const appLabels = { appClass: name }
+
+const pvc = new k8s.core.v1.PersistentVolumeClaim(
+  `${name}-pvc`,
+  {
+    metadata: {
+      labels: appLabels,
+      namespace: namespaceName,
+      name: `${name}-pvc`,
+    },
+    spec: {
+      accessModes: ['ReadWriteOnce'],
+      resources: {
+        requests: {
+          storage: `${storage}Gi`,
+        },
+      },
+    },
+  },
+  resourceOptions
+)
+
+volumes.push({
+  name: 'ipfs-data',
+  persistentVolumeClaim: {
+    claimName: `${name}-pvc`,
+  },
+})
+
+const caddyEndpoints = [
+  ` {
+    reverse_proxy storage-node:${colossusPort}
+}`,
+]
+
+const caddy = new CaddyServiceDeployment(
+  'caddy-proxy',
+  { lbReady, namespaceName: namespaceName, caddyEndpoints },
+  resourceOptions
+)
+
+export const endpoint1 = caddy.primaryEndpoint
+export const endpoint2 = caddy.secondaryEndpoint
+
+export let appLink: pulumi.Output<string>
+
+if (lbReady) {
+  appLink = pulumi.interpolate`https://${endpoint1}`
+
+  if (!isAnonymous) {
+    const remoteKeyFilePath = '/joystream/key-file.json'
+    const providerId = config.require('providerId')
+    const keyFile = config.require('keyFile')
+    const publicUrl = config.get('publicURL') ? config.get('publicURL')! : appLink
+
+    const keyConfig = new k8s.core.v1.ConfigMap('key-config', {
+      metadata: { namespace: namespaceName, labels: appLabels },
+      data: { 'fileData': fs.readFileSync(keyFile).toString() },
+    })
+    const keyConfigName = keyConfig.metadata.apply((m) => m.name)
+
+    additionalParams = ['--provider-id', providerId, '--key-file', remoteKeyFilePath, '--public-url', publicUrl]
+
+    volumeMounts.push({
+      mountPath: remoteKeyFilePath,
+      name: 'keyfile-volume',
+      subPath: 'fileData',
+    })
+
+    volumes.push({
+      name: 'keyfile-volume',
+      configMap: {
+        name: keyConfigName,
+      },
+    })
+
+    const passphrase = config.get('passphrase')
+    if (passphrase) {
+      additionalParams.push('--passphrase', passphrase)
+    }
+  }
+}
+
+if (isAnonymous) {
+  additionalParams.push('--anonymous')
+}
+
+// Create a Deployment
+const deployment = new k8s.apps.v1.Deployment(
+  name,
+  {
+    metadata: {
+      namespace: namespaceName,
+      labels: appLabels,
+    },
+    spec: {
+      replicas: 1,
+      selector: { matchLabels: appLabels },
+      template: {
+        metadata: {
+          labels: appLabels,
+        },
+        spec: {
+          hostname: 'ipfs',
+          containers: [
+            {
+              name: 'ipfs',
+              image: 'ipfs/go-ipfs:latest',
+              ports: [{ containerPort: 5001 }, { containerPort: 8080 }],
+              command: ['/bin/sh', '-c'],
+              args: [
+                'set -e; \
+                /usr/local/bin/start_ipfs config profile apply lowpower; \
+                /usr/local/bin/start_ipfs config --json Gateway.PublicGateways \'{"localhost": null }\'; \
+                /usr/local/bin/start_ipfs config Datastore.StorageMax 200GB; \
+                /sbin/tini -- /usr/local/bin/start_ipfs daemon --migrate=true',
+              ],
+              volumeMounts: [
+                {
+                  name: 'ipfs-data',
+                  mountPath: '/data/ipfs',
+                },
+              ],
+            },
+            {
+              name: 'colossus',
+              image: colossusImage,
+              env: [
+                {
+                  name: 'WS_PROVIDER_ENDPOINT_URI',
+                  // example 'wss://18.209.241.63.nip.io/'
+                  value: wsProviderEndpointURI,
+                },
+                {
+                  name: 'DEBUG',
+                  value: 'joystream:*',
+                },
+              ],
+              volumeMounts,
+              command: [
+                'yarn',
+                'colossus',
+                '--ws-provider',
+                wsProviderEndpointURI,
+                '--ipfs-host',
+                'ipfs',
+                ...additionalParams,
+              ],
+              ports: [{ containerPort: colossusPort }],
+            },
+          ],
+          volumes,
+        },
+      },
+    },
+  },
+  resourceOptions
+)
+
+// Create a LoadBalancer Service for the Deployment
+const service = new k8s.core.v1.Service(
+  name,
+  {
+    metadata: {
+      labels: appLabels,
+      namespace: namespaceName,
+      name: 'storage-node',
+    },
+    spec: {
+      ports: [{ name: 'port-1', port: colossusPort }],
+      selector: appLabels,
+    },
+  },
+  resourceOptions
+)
+
+// Export the Service name
+export const serviceName = service.metadata.name
+
+// Export the Deployment name
+export const deploymentName = deployment.metadata.name

+ 14 - 0
devops/infrastructure/storage-node/package.json

@@ -0,0 +1,14 @@
+{
+  "name": "eks-cluster",
+  "devDependencies": {
+    "@types/node": "^10.0.0"
+  },
+  "dependencies": {
+    "@pulumi/aws": "^4.0.0",
+    "@pulumi/awsx": "^0.30.0",
+    "@pulumi/eks": "^0.31.0",
+    "@pulumi/kubernetes": "^3.0.0",
+    "@pulumi/pulumi": "^3.0.0",
+    "pulumi-common": "file:../pulumi-common"
+  }
+}

+ 18 - 0
devops/infrastructure/storage-node/tsconfig.json

@@ -0,0 +1,18 @@
+{
+    "compilerOptions": {
+        "strict": true,
+        "outDir": "bin",
+        "target": "es2016",
+        "module": "commonjs",
+        "moduleResolution": "node",
+        "sourceMap": true,
+        "experimentalDecorators": true,
+        "pretty": true,
+        "noFallthroughCasesInSwitch": true,
+        "noImplicitReturns": true,
+        "forceConsistentCasingInFileNames": true
+    },
+    "files": [
+        "index.ts"
+    ]
+}

+ 50 - 0
joystream-node-armv7.Dockerfile

@@ -0,0 +1,50 @@
+FROM rust:1.52.1-buster AS rust
+RUN rustup self update
+RUN rustup install nightly-2021-03-24 --force
+RUN rustup default nightly-2021-03-24
+RUN rustup target add wasm32-unknown-unknown --toolchain nightly-2021-03-24
+RUN rustup component add --toolchain nightly-2021-03-24 clippy
+RUN apt-get update && \
+  apt-get install -y curl git gcc xz-utils sudo pkg-config unzip clang llvm libc6-dev
+
+FROM rust AS builder
+LABEL description="Compiles all workspace artifacts"
+WORKDIR /joystream
+COPY . /joystream
+
+# Build all cargo crates
+# Ensure our tests and linter pass before actual build
+ENV WASM_BUILD_TOOLCHAIN=nightly-2021-03-24
+RUN apt-get install -y libprotobuf-dev protobuf-compiler
+RUN BUILD_DUMMY_WASM_BINARY=1 cargo clippy --release --all -- -D warnings && \
+    cargo test --release --all && \
+    cargo build --target armv7-unknown-linux-gnueabihf --release
+
+FROM ubuntu:21.04
+LABEL description="Joystream node"
+WORKDIR /joystream
+COPY --from=builder /joystream/target/armv7-unknown-linux-gnueabihf/release/joystream-node /joystream/node
+COPY --from=builder /joystream/target/armv7-unknown-linux-gnueabihf/release/wbuild/joystream-node-runtime/joystream_node_runtime.compact.wasm /joystream/runtime.compact.wasm
+COPY --from=builder /joystream/target/armv7-unknown-linux-gnueabihf/release/chain-spec-builder /joystream/chain-spec-builder
+
+# confirm it works
+RUN /joystream/node --version
+
+# https://manpages.debian.org/stretch/coreutils/b2sum.1.en.html
+# RUN apt-get install coreutils
+# print the blake2 256 hash of the wasm blob
+RUN b2sum -l 256 /joystream/runtime.compact.wasm
+# print the blake2 512 hash of the wasm blob
+RUN b2sum -l 512 /joystream/runtime.compact.wasm
+
+EXPOSE 30333 9933 9944
+
+# Use these volumes to persits chain state and keystore, eg.:
+# --base-path /data
+# optionally separate keystore (otherwise it will be stored in the base path)
+# --keystore-path /keystore
+# if base-path isn't specified, chain state is stored inside container in ~/.local/share/joystream-node/
+# which is not ideal
+VOLUME ["/data", "/keystore"]
+
+ENTRYPOINT ["/joystream/node"]

+ 1 - 1
node/Cargo.toml

@@ -3,7 +3,7 @@ authors = ['Joystream contributors']
 build = 'build.rs'
 edition = '2018'
 name = 'joystream-node'
-version = '5.5.0'
+version = '5.6.0'
 default-run = "joystream-node"
 
 [[bin]]

+ 1 - 0
package.json

@@ -37,6 +37,7 @@
     "@polkadot/util": "^6.0.5",
     "@polkadot/util-crypto": "^6.0.5",
     "@polkadot/wasm-crypto": "^4.0.2",
+    "warthog": "https://github.com/Joystream/warthog/releases/download/v2.37.2-sumer/joystream-warthog-v2.37.2-sumer.tgz",
     "babel-core": "^7.0.0-bridge.0",
     "typescript": "^3.9.7",
     "bn.js": "^5.1.2",

+ 2 - 0
pioneer/packages/apps/src/SideBar/index.tsx

@@ -14,6 +14,7 @@ import NetworkModal from '../modals/Network';
 import { useTranslation } from '../translate';
 import ChainInfo from './ChainInfo';
 import Item from './Item';
+import SidebarBanner from '../SidebarBanner';
 
 interface Props {
   className?: string;
@@ -100,6 +101,7 @@ function SideBar ({ className = '', collapse, handleResize, isCollapsed, isMenuO
                 )
             ))}
             <Menu.Divider hidden />
+            <SidebarBanner isSidebarCollapsed={isCollapsed}/>
           </div>
           <div className={`apps--SideBar-collapse ${isCollapsed ? 'collapsed' : 'expanded'}`}>
             <Button

+ 325 - 0
pioneer/packages/apps/src/SidebarBanner.tsx

@@ -0,0 +1,325 @@
+import React, { useState, useEffect } from 'react';
+import usePromise from '@polkadot/joy-utils/react/hooks/usePromise';
+import styled from 'styled-components';
+import { Segment, Loader, Button } from 'semantic-ui-react';
+
+const COUNTER_BORDER_RADIUS_VALUE = 2;
+
+const BannerContainer = styled.div<{ isCollapsed?: boolean }>`
+  ${({ isCollapsed }) => isCollapsed ? `
+    min-height: 222px;
+    max-height: 222px;
+  ` : `
+    min-height: 322px;
+    max-height: 322px;
+    padding: 16px;
+  `}
+  width: 100%;
+  display: flex;
+  flex-direction: column;
+  justify-content: center;
+  align-items: center;
+  background-color: #4038FF;
+`;
+
+const BannerTitle = styled.h1`
+  padding-right: 1px;
+  font-family: Lato;
+  font-size: 16px;
+  font-weight: 800;
+  line-height: 20px;
+  letter-spacing: 0em;
+  color: white;
+`;
+
+const BannerSubtitle = styled.h2`
+  margin-top: 16px;
+  font-family: Lato;
+  font-size: 14px;
+  font-weight: 400;
+  line-height: 18px;
+  letter-spacing: 0em;
+  color: #E0E1FF;
+`;
+
+const BannerLink = styled.a`
+  margin-top: 8px;
+  font-size: 12px;
+  font-weight: 600;
+  line-height: 16px;
+  letter-spacing: 0em;
+  text-align: center;
+  text-decoration: underline;
+  color: #B4BBFF !important;
+`;
+
+const BannerButton = styled(Button)`
+  width: 100% !important;
+  margin-top: 8px !important;
+`;
+
+const ProgressContainer = styled.div<{ isCollapsed ?: boolean }>`
+  width: 100%;
+  ${({ isCollapsed }) => isCollapsed ? `
+    margin-top: 3px;
+  ` : `
+    margin-top: 8px;
+  `}
+`;
+
+const CounterContainer = styled.div<{ isCollapsed ?: boolean }>`
+  width: 100%;
+  ${({ isCollapsed }) => isCollapsed ? `
+    height: 120px;
+    flex-direction: column;
+  ` : `
+    height: 64px;
+  `}
+  padding: 8px;
+  display: flex;
+  align-items: center;
+  justify-content: ${({ children }) => children && children > 1 ? 'space-between' : 'center'};
+  background-color: #261EE4;
+  border-top-left-radius: ${COUNTER_BORDER_RADIUS_VALUE}px;
+  border-top-right-radius: ${COUNTER_BORDER_RADIUS_VALUE}px;
+`;
+
+const CounterItem = styled.div<{ isCollapsed ?: boolean }>`
+  ${({ isCollapsed }) => isCollapsed ? `
+    width: 43px;
+  ` : `
+    width: 56px;
+  `}
+  height: 48px;
+  display: flex;
+  flex-direction: column;
+  align-items: center;
+  justify-content: space-between;
+`;
+
+const CounterItemNumber = styled.p`
+  margin: 0;
+  font-size: 32px;
+  font-weight: 700;
+  line-height: 32px;
+  letter-spacing: 0em;
+  color: white;
+`;
+
+const CounterItemText = styled.p`
+  margin: 0;
+  font-size: 10px;
+  font-weight: 600;
+  line-height: 16px;
+  letter-spacing: 0em;
+  color: white;
+`;
+
+const Progress = styled.div<{ isCollapsed?: boolean }>`
+  width: 100%;
+  height: 6px;
+  background-color: #5252FF;
+  ${({ isCollapsed }) => !isCollapsed && `
+    border-bottom-left-radius: ${COUNTER_BORDER_RADIUS_VALUE}px;
+    border-bottom-right-radius: ${COUNTER_BORDER_RADIUS_VALUE}px;
+  `}
+`;
+
+const ProgressBar = styled.div<{ isCollapsed?: boolean }>`
+  width: 0%;
+  height: 100%;
+  background-color: white;
+  ${({ isCollapsed }) => !isCollapsed && `
+    border-bottom-left-radius: ${COUNTER_BORDER_RADIUS_VALUE}px;
+    border-bottom-right-radius: ${COUNTER_BORDER_RADIUS_VALUE}px;
+  `}
+`;
+
+const ErrorText = styled.h1`
+  font-size: 14px;
+  letter-spacing: 0em;
+  font-weight: 600;
+  color: white;
+`;
+
+const DatesContainer = styled.div`
+  width: 100%;
+  display: flex;
+  justify-content: space-between;
+  margin-top: 8px;
+`;
+
+const DateText = styled.p<{ isCollapsed?: boolean }>`
+  font-size: 12px;
+  line-height: 16px;
+  letter-spacing: 0em;
+  color: #E0E1FF;
+  ${({ isCollapsed }) => isCollapsed && `
+    margin-top: 8px;
+  `}
+`;
+
+const StyledLoader = styled(Loader)`
+  ::before {
+    border-color: rgba(255,255,255,.15) !important;
+  }
+
+  ::after {
+    border-color: white transparent transparent !important;
+  }
+`;
+
+const FM_DATA_URL = 'https://raw.githubusercontent.com/Joystream/founding-members/main/data/fm-info.json';
+const MILLISECONDS_TO_DAYS = 1000 * 60 * 60 * 24;
+
+type FoundingMembersData = {
+  scoringPeriodsFull: {
+    currentScoringPeriod: {
+      started: string;
+      ends: string;
+    }
+  }
+}
+
+const numberToDateString = (number: number) => {
+  const remainingTime: Array<[number, string]> = [];
+
+  const weeks = Math.floor(number / 7);
+  const days = Math.floor(number - (weeks * 7));
+  const hours = Math.floor((number - ((weeks * 7) + days)) * 24);
+
+  if (weeks) {
+    remainingTime.push([weeks, weeks === 1 ? 'WEEK' : 'WEEKS']);
+
+    if (days) {
+      remainingTime.push([days, days === 1 ? 'DAY' : 'DAYS']);
+    }
+
+    return remainingTime;
+  }
+
+  if (days) {
+    remainingTime.push([days, days === 1 ? 'DAY' : 'DAYS']);
+  }
+
+  if (hours) {
+    remainingTime.push([hours, hours === 1 ? 'HOUR' : 'HOURS']);
+  }
+
+  return remainingTime;
+};
+
+const SidebarBanner = ({ isSidebarCollapsed } : { isSidebarCollapsed: boolean}) => {
+  const [foundingMembersData, foundingMembersDataError] = usePromise<FoundingMembersData | undefined>(
+    () => fetch(FM_DATA_URL).then((res) => res.json().then((data) => data as FoundingMembersData)), undefined, []
+  );
+  const [dates, setDates] = useState<{ started: Date, ends: Date }>();
+  const [progress, setProgress] = useState<number>(0);
+  const [remainingTime, setRemainingTime] = useState<number>();
+
+  useEffect(() => {
+    if (foundingMembersData && !foundingMembersDataError) {
+      const scoringPeriodStartedDate = new Date(foundingMembersData.scoringPeriodsFull.currentScoringPeriod.started);
+      const scoringPeriodEndedDate = new Date(foundingMembersData.scoringPeriodsFull.currentScoringPeriod.ends);
+      const now = new Date();
+
+      // calculate the elapsed time from start of scoring period until now
+      const timeDifferenceBetweenDates = Math.abs(scoringPeriodEndedDate.getTime() - scoringPeriodStartedDate.getTime()) / MILLISECONDS_TO_DAYS;
+      const timePassedUntilNow = Math.abs(now.getTime() - scoringPeriodStartedDate.getTime()) / MILLISECONDS_TO_DAYS;
+      const progressPercentage = (timePassedUntilNow / timeDifferenceBetweenDates) * 100;
+
+      // calculate the amount of days remaining until the end of the scoring period
+      const remainingTime = Math.abs(scoringPeriodEndedDate.getTime() - now.getTime()) / MILLISECONDS_TO_DAYS;
+
+      setRemainingTime(remainingTime);
+
+      setDates({
+        started: scoringPeriodStartedDate,
+        ends: scoringPeriodEndedDate
+      });
+
+      setProgress(progressPercentage > 100 ? 100 : progressPercentage);
+    }
+  }, [foundingMembersData]);
+
+  const Loading = ({ isCollapsed } : { isCollapsed ?: boolean}) => (
+    <Segment>
+      <StyledLoader active size={isCollapsed ? 'small' : 'medium'} />
+    </Segment>
+  );
+
+  const Error = () => (
+    <ErrorText> Error.. </ErrorText>
+  );
+
+  if (isSidebarCollapsed) {
+    return (
+      <BannerContainer isCollapsed={true}>
+        <BannerSubtitle>Scoring period ends in:</BannerSubtitle>
+        <ProgressContainer isCollapsed={true}>
+          <CounterContainer isCollapsed={true}>
+            {remainingTime
+              ? numberToDateString(remainingTime).map(([amountOfTime, timePeriodString], index) => (
+                <CounterItem key={`${index}-${amountOfTime}-${timePeriodString}`}>
+                  <CounterItemNumber>{amountOfTime}</CounterItemNumber>
+                  <CounterItemText>{timePeriodString}</CounterItemText>
+                </CounterItem>
+              ))
+              : <Loading isCollapsed={true}/>
+            }
+            {!remainingTime && foundingMembersDataError ? <Error /> : null}
+          </CounterContainer>
+          <Progress isCollapsed={true}>
+            <ProgressBar isCollapsed={true} style={{ width: `${progress}%` }}/>
+          </Progress>
+        </ProgressContainer>
+        <DateText isCollapsed={true} >{dates?.ends.toLocaleString('default', { month: 'short' })} {dates?.ends.getDate()}</DateText>
+      </BannerContainer>
+    );
+  }
+
+  return (
+    <BannerContainer>
+      <BannerTitle>Report your activity to earn FM points</BannerTitle>
+      <BannerSubtitle>Current scoring period ends in:</BannerSubtitle>
+      <ProgressContainer>
+        <CounterContainer>
+          {remainingTime
+            ? numberToDateString(remainingTime).map(([amountOfTime, timePeriodString], index) => (
+              <CounterItem key={`${index}-${amountOfTime}-${timePeriodString}`}>
+                <CounterItemNumber>{amountOfTime}</CounterItemNumber>
+                <CounterItemText>{timePeriodString}</CounterItemText>
+              </CounterItem>
+            ))
+            : <Loading />
+          }
+          {!remainingTime && foundingMembersDataError ? <Error /> : null}
+        </CounterContainer>
+        <Progress>
+          <ProgressBar style={{ width: `${progress}%` }}/>
+        </Progress>
+        <DatesContainer>
+          <DateText>{dates?.started.toLocaleString('default', { month: 'short' })} {dates?.started.getDate()}</DateText>
+          <DateText>{dates?.ends.toLocaleString('default', { month: 'short' })} {dates?.ends.getDate()}</DateText>
+        </DatesContainer>
+      </ProgressContainer>
+      <BannerButton
+        color='black'
+        href='https://www.joystream.org/founding-members/form/'
+        target='_blank'
+        rel='noopener noreferrer'
+      >
+        Report now
+      </BannerButton>
+      <BannerLink
+        href='https://github.com/Joystream/founding-members/blob/main/SUBMISSION-GUIDELINES.md'
+        target='_blank'
+        rel='noopener noreferrer'
+      >
+        Learn more...
+      </BannerLink>
+    </BannerContainer>
+  );
+};
+
+export default SidebarBanner;

+ 2 - 0
pioneer/packages/joy-election/src/index.tsx

@@ -22,6 +22,7 @@ import Reveals from './Reveals';
 import { queryToProp } from '@polkadot/joy-utils/functions/misc';
 import { Seat } from '@joystream/types/council';
 import { ApiProps } from '@polkadot/react-api/types';
+import FMReminderBanner from '@polkadot/joy-utils/react/components/FMReminderBanner';
 
 const ElectionMain = styled.main`${style}`;
 
@@ -67,6 +68,7 @@ class App extends React.PureComponent<Props, State> {
 
     return (
       <ElectionMain className='election--App'>
+        <FMReminderBanner contextualTitle='Council'/>
         <header>
           <Tabs basePath={basePath} items={tabs} />
         </header>

+ 13 - 22
pioneer/packages/joy-forum/src/ForumRoot.tsx

@@ -1,7 +1,6 @@
 import React, { useState, useEffect } from 'react';
 import { Link } from 'react-router-dom';
 import styled from 'styled-components';
-import { orderBy } from 'lodash';
 import BN from 'bn.js';
 
 import { Section } from '@polkadot/joy-utils/react/components';
@@ -63,36 +62,28 @@ const InnerRecentActivity: React.FC<RecentActivityProps> = ({ nextPostId, api })
       if (!nextPostId) return;
 
       const newId = (id: number | BN) => api.createType('PostId', id);
-      const apiCalls: Promise<Post>[] = [];
-      let id = newId(1);
+      let id = newId(nextPostId.toNumber() - 1);
 
-      while (nextPostId.gt(id)) {
-        apiCalls.push(api.query.forum.postById(id) as Promise<Post>);
-        id = newId(id.add(newId(1)));
-      }
+      const threadsIdsLookup = {} as Record<number, boolean>;
+      const recentUniquePosts = new Array<Post>();
 
-      const allPosts = await Promise.all(apiCalls);
-      const sortedPosts = orderBy(
-        allPosts,
-        [(x) => x.id.toNumber()],
-        ['desc']
-      );
+      while (id.gt(newId(0))) {
+        const post = await api.query.forum.postById(id) as Post;
 
-      const threadsIdsLookup = {} as Record<number, boolean>;
-      const postsWithUniqueThreads = sortedPosts.reduce((acc, post) => {
         const threadId = post.thread_id.toNumber();
 
-        if (threadsIdsLookup[threadId]) return acc;
+        id = newId(id.toNumber() - 1);
+
+        if (threadsIdsLookup[threadId]) continue;
 
         threadsIdsLookup[threadId] = true;
 
-        return [
-          ...acc,
-          post
-        ];
-      }, [] as Post[]);
+        recentUniquePosts.push(post);
 
-      const recentUniquePosts = postsWithUniqueThreads.slice(0, RecentActivityPostsCount);
+        if (recentUniquePosts.length === RecentActivityPostsCount) {
+          break;
+        }
+      }
 
       setRecentPosts(recentUniquePosts);
       setLoaded(true);

+ 78 - 14
pioneer/packages/joy-forum/src/ViewThread.tsx

@@ -5,7 +5,7 @@ import styled from 'styled-components';
 import { Table, Button, Label, Icon } from 'semantic-ui-react';
 import BN from 'bn.js';
 
-import { ThreadId } from '@joystream/types/common';
+import { PostId, ThreadId } from '@joystream/types/common';
 import { Category, Thread, Post } from '@joystream/types/forum';
 import { Pagination, RepliesPerPage, CategoryCrumbs, TimeAgoDate, usePagination, useQueryParam, ReplyIdxQueryParam, ReplyEditIdQueryParam } from './utils';
 import { ViewReply } from './ViewReply';
@@ -22,6 +22,7 @@ import MemberPreview from '@polkadot/joy-utils/react/components/MemberByAccountP
 import { formatDate } from '@polkadot/joy-utils/functions/date';
 import { NewReply, EditReply } from './EditReply';
 import { useApi } from '@polkadot/react-hooks';
+import { ApiPromise } from '@polkadot/api/promise';
 
 type ThreadTitleProps = {
   thread: Thread;
@@ -124,6 +125,77 @@ type ViewThreadProps = ApiProps & InnerViewThreadProps & {
   nextPostId?: ThreadId;
 };
 
+const POSTS_THREAD_MAP_CACHE_KEY = 'postsThreadMap';
+
+async function refreshPostsInThreadCache (nextPostId: PostId, api: ApiPromise) {
+  const newId = (id: number | BN) => api.createType('PostId', id);
+  const apiCalls: Promise<Post>[] = [];
+  let idToFetch = newId(1);
+
+  let postsToThread = getPostsIdsInThreadCache();
+  const nextThreadId = await api.query.forum.nextThreadId() as ThreadId;
+
+  if (postsToThread.size >= nextThreadId.toNumber()) { // invalid cache
+    postsToThread = new Map<number, number[]>();
+  }
+
+  if (postsToThread.size > 0) {
+    const lastPostIdInCache = Math.max(...Array.from(postsToThread.values()).flat());
+
+    idToFetch = newId(lastPostIdInCache + 1);
+    const lastPost = await api.query.forum.postById(lastPostIdInCache) as Post;
+
+    if (lastPost) {
+      const postsInThread = postsToThread.get(lastPost.thread_id.toNumber());
+
+      if (!postsInThread || !postsInThread.includes(lastPostIdInCache)) { // cache doesn't match the data in chain
+        postsToThread = new Map<number, number[]>();
+      }
+    } else {
+      postsToThread = new Map<number, number[]>();
+    }
+  }
+
+  const lastPostId = nextPostId.sub(new BN(1));
+
+  while (lastPostId.gte(idToFetch)) {
+    apiCalls.push(api.query.forum.postById(idToFetch) as Promise<Post>);
+    idToFetch = newId(idToFetch.add(newId(1)));
+  }
+
+  const newPosts = await Promise.all<Post>(apiCalls);
+
+  const newPostsToThread = new Map<number, number[]>();
+
+  newPosts.forEach((newPost) => {
+    const previousNewPostIds = newPostsToThread.get(newPost.thread_id.toNumber()) ?? [];
+
+    newPostsToThread.set(newPost.thread_id.toNumber(), [...previousNewPostIds, newPost.id.toNumber()]);
+  });
+
+  if (postsToThread.size > 0) {
+    newPostsToThread.forEach((postIds, threadId) => {
+      const existingPostIds = postsToThread.get(threadId) ?? [];
+
+      postsToThread.set(threadId, [...existingPostIds, ...postIds]);
+    });
+  } else {
+    postsToThread = newPostsToThread;
+  }
+
+  localStorage.setItem(POSTS_THREAD_MAP_CACHE_KEY, JSON.stringify([...postsToThread]));
+}
+
+function getPostsIdsInThreadCache (): Map<number, number[]> {
+  const serializedMap = localStorage.getItem(POSTS_THREAD_MAP_CACHE_KEY);
+
+  if (!serializedMap) {
+    return new Map<number, number[]>();
+  }
+
+  return new Map<number, number[]>(JSON.parse(serializedMap));
+}
+
 function InnerViewThread (props: ViewThreadProps) {
   const [showModerateForm, setShowModerateForm] = useState(false);
   const [displayedPosts, setDisplayedPosts] = useState<Post[]>([]);
@@ -154,20 +226,12 @@ function InnerViewThread (props: ViewThreadProps) {
     const loadPosts = async () => {
       if (!nextPostId || totalPostsInThread === 0 || thread.isEmpty) return;
 
-      const newId = (id: number | BN) => api.createType('PostId', id);
-      const apiCalls: Promise<Post>[] = [];
-      let id = newId(1);
-
-      while (nextPostId.gt(id)) {
-        apiCalls.push(api.query.forum.postById(id) as Promise<Post>);
-        id = newId(id.add(newId(1)));
-      }
+      await refreshPostsInThreadCache(nextPostId, api);
+      const mapPostToThread = getPostsIdsInThreadCache();
+      const postIdsInThread = mapPostToThread.get(thread.id.toNumber()) as number[];
+      const postsInThisThread = await Promise.all(postIdsInThread
+        ? postIdsInThread.map((postId: number) => api.query.forum.postById(postId)) : []) as Post[];
 
-      const allPosts = await Promise.all<Post>(apiCalls);
-      const postsInThisThread = allPosts.filter((item) =>
-        !item.isEmpty &&
-        item.thread_id.eq(thread.id)
-      );
       const sortedPosts = orderBy(
         postsInThisThread,
         [(x) => x.nr_in_thread.toNumber()],

+ 2 - 0
pioneer/packages/joy-forum/src/index.tsx

@@ -16,6 +16,7 @@ import { CategoryList, ViewCategoryById } from './CategoryList';
 import { ViewThreadById } from './ViewThread';
 import { LegacyPagingRedirect } from './LegacyPagingRedirect';
 import ForumRoot from './ForumRoot';
+import FMReminderBanner from '@polkadot/joy-utils/react/components/FMReminderBanner';
 
 const ForumMain = styled.main`${style}`;
 
@@ -29,6 +30,7 @@ class App extends React.PureComponent<Props> {
       <ForumProvider>
         <ForumSudoProvider>
           <ForumMain className='forum--App'>
+            <FMReminderBanner contextualTitle='Forum'/>
             <Switch>
               <Route path={`${basePath}/categories/new`} component={NewCategory} />
               {/* routes for handling legacy format of forum paging within the routing path */}

+ 4 - 2
pioneer/packages/joy-forum/src/style.ts

@@ -1,12 +1,14 @@
 import { css } from 'styled-components';
 
 export default css`
-  padding-top: 1.5rem;
-
   .ui.segment {
     background-color: #fff;
   }
 
+  .ui.breadcrumb {
+    margin-top: 2rem;
+  }
+
   .ForumPageTitle {
     display: flex;
     margin-top: 1rem;

+ 1 - 1
pioneer/packages/joy-proposals/src/Proposal/VotingSection.tsx

@@ -99,7 +99,7 @@ export default function VotingSection ({
 
   return (
     <>
-      <Header as='h3'>Sumbit your vote</Header>
+      <Header as='h3'>Submit your vote</Header>
       <Divider />
       <VoteButtons>
         { VoteKinds.map((voteKind) =>

+ 2 - 0
pioneer/packages/joy-proposals/src/index.tsx

@@ -27,6 +27,7 @@ import { SignalForm,
 import { RouteProps as AppMainRouteProps } from '@polkadot/apps-routing/types';
 import style from './style';
 import { HistoricalProposalFromId } from './Proposal/ProposalFromId';
+import FMReminderBanner from '@polkadot/joy-utils/react/components/FMReminderBanner';
 
 const ProposalsMain = styled.main`${style}`;
 
@@ -58,6 +59,7 @@ function App (props: Props): React.ReactElement<Props> {
 
   return (
     <ProposalsMain className='proposal--App'>
+      <FMReminderBanner contextualTitle='Proposals'/>
       <StyledHeader>
         <Tabs
           basePath={basePath}

+ 2 - 0
pioneer/packages/joy-roles/src/index.tsx

@@ -17,6 +17,7 @@ import { OpportunityController, OpportunityView } from './tabs/Opportunity.contr
 import { OpportunitiesController, OpportunitiesView } from './tabs/Opportunities.controller';
 import { ApplyController, ApplyView } from './flows/apply.controller';
 import { MyRolesController, MyRolesView } from './tabs/MyRoles.controller';
+import FMReminderBanner from '@polkadot/joy-utils/react/components/FMReminderBanner';
 
 import './index.sass';
 
@@ -74,6 +75,7 @@ export const App: React.FC<Props> = (props: Props) => {
 
   return (
     <main className='roles--App'>
+      <FMReminderBanner contextualTitle='Working Groups'/>
       <header>
         <Tabs
           basePath={basePath}

BIN
pioneer/packages/joy-utils/src/assets/coin-illustration.png


BIN
pioneer/packages/joy-utils/src/assets/coin-illustration1.png


+ 138 - 0
pioneer/packages/joy-utils/src/react/components/FMReminderBanner.tsx

@@ -0,0 +1,138 @@
+import React from 'react';
+import styled from 'styled-components';
+import { Button, Icon } from 'semantic-ui-react';
+import CoinIllustration from '../../assets/coin-illustration.png';
+import CoinIllustrationSmall from '../../assets/coin-illustration1.png';
+
+const Container = styled.div`
+  height: auto;
+  margin: 2em 0 0 0;
+  display: flex;
+  justify-content: center;
+  align-items: center;
+`;
+
+const Banner = styled.div`
+  height: 89px;
+  width: 100%;
+  display: flex;
+  align-items: center;
+  justify-content: space-between;
+  padding: 1.5em;
+  background-color: #262626;
+  box-shadow: inset 0px 0px 0px 1px rgba(34, 36, 38, 0.22);
+  border-radius: 4px;
+  background-image: url(${CoinIllustration});
+  background-position: 90% 0;
+  background-repeat: no-repeat;
+  background-size: contain;
+
+  @media(max-width: 1450px){
+    height: 109px;
+  }
+
+  @media(max-width: 1200px){
+    background-image: none;
+  }
+
+  @media(max-width: 800px){
+    flex-direction: column;
+    align-items: initial;
+    height: auto;
+  }
+
+  @media (max-width: 425px){
+    background-image: url(${CoinIllustrationSmall});
+    padding-top: 7em;
+    background-position: left 0;
+    background-size: 200px;
+  }
+`;
+
+const TextContainer = styled.div``;
+
+const BannerTitle = styled.h1`
+  font-family: Lato;
+  font-size: 16px;
+  font-style: normal;
+  font-weight: 900;
+  line-height: 20px;
+  letter-spacing: 0em;
+  color: white;
+  margin-bottom: 7px;
+`;
+
+const BannerText = styled.p`
+  font-size: 14px;
+  font-style: normal;
+  font-weight: 400;
+  line-height: 20px;
+  letter-spacing: 0.0033em;
+  color: #FFFFFFDE;
+
+  a {
+    text-decoration: underline;
+    color: inherit;
+  }
+`;
+
+const BannerButton = styled(Button)`
+  background-color: #4038FF !important;
+  color: white !important;
+  min-width: 155px !important;
+  width: 155px !important;
+  min-height: 36px !important;
+  height: 36px !important;
+
+  .icon {
+    background-color: #3D35F2 !important;
+  }
+
+  margin-left: 260px !important;
+
+  @media(max-width: 1200px){
+    margin-left: 30px !important;
+  }
+
+  @media(max-width: 800px){
+    margin: 20px 0 0 0 !important;
+  }
+`;
+
+interface Props {
+  contextualTitle: 'Council' | 'Working Groups' | 'Proposals' | 'Forum';
+}
+
+const FMReminderBanner = ({ contextualTitle } : Props) => {
+  return (
+    <Container>
+      <Banner>
+        <TextContainer>
+          <BannerTitle>Report your {contextualTitle} activity to earn Founding Members points!</BannerTitle>
+          <BannerText>
+            Only activity that&apos;s been reported is eligible for earning FM points.
+            <a
+              href='https://github.com/Joystream/founding-members/blob/main/SUBMISSION-GUIDELINES.md'
+              target='_blank'
+              rel='noopener noreferrer'
+            >
+              Learn more about reporting your activity...
+            </a>
+          </BannerText>
+        </TextContainer>
+        <BannerButton
+          icon
+          labelPosition='right'
+          href='https://www.joystream.org/founding-members/form/'
+          target='_blank'
+          rel='noopener noreferrer'
+        >
+            Report Now
+          <Icon name='arrow right' />
+        </BannerButton>
+      </Banner>
+    </Container>
+  );
+};
+
+export default FMReminderBanner;

+ 6 - 17
query-node/README.md

@@ -60,22 +60,11 @@ The simplest way to run an indexer locally is to run `docker-compose-indexer.yml
 
 Follow the links for more information about the [indexer](https://github.com/Joystream/hydra/tree/master/packages/hydra-indexer/README.md) service and [indexer-api-gateway](https://github.com/Joystream/hydra/tree/master/packages/hydra-indexer-gateway/README.md).
 
-
-
-# Tmp command order
-TODO: remove after integration tests are finished and query node runs without any issues
-```
-# build everything
-yarn
-yarn build
-
+## GraphQL Playground assets url
+Query node's user interface, GraphQL Playground, is expecting to be served at `/graphql`. 
+If you are serving the files on path like `/query/server/graphql` via some nginx proxy, aliasing, etc. you will need to provide
+the base url to query node server via `GRAPHQL_PLAYGROUND_CDN` environment variable.
 ```
-
-running the processor:
-```
-cp types/augment/all/defs.json query-node/mappings/lib/generated/types/typedefs.json
-docker-compose up -d db
-yarn workspace query-node-root db:create
-yarn workspace query-node-root db:migrate
-
+# use the following when serving playground at `/query/server/graphql`
+GRAPHQL_PLAYGROUND_CDN="query/server" yarn workspace query-node-root query-node:start:dev 
 ```

+ 273 - 102
query-node/generated/graphql-server/generated/binding.ts

@@ -6,49 +6,49 @@ import { IResolvers } from 'graphql-tools/dist/Interfaces'
 import * as schema from  './schema.graphql'
 
 export interface Query {
-    curatorGroups: <T = Array<CuratorGroup>>(args: { offset?: Int | null, limit?: Int | null, where?: CuratorGroupWhereInput | null, orderBy?: CuratorGroupOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    curatorGroups: <T = Array<CuratorGroup>>(args: { offset?: Int | null, limit?: Int | null, where?: CuratorGroupWhereInput | null, orderBy?: Array<CuratorGroupOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
     curatorGroupByUniqueInput: <T = CuratorGroup | null>(args: { where: CuratorGroupWhereUniqueInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T | null> ,
-    curatorGroupsConnection: <T = CuratorGroupConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: CuratorGroupWhereInput | null, orderBy?: CuratorGroupOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
-    dataObjects: <T = Array<DataObject>>(args: { offset?: Int | null, limit?: Int | null, where?: DataObjectWhereInput | null, orderBy?: DataObjectOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    curatorGroupsConnection: <T = CuratorGroupConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: CuratorGroupWhereInput | null, orderBy?: Array<CuratorGroupOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    dataObjects: <T = Array<DataObject>>(args: { offset?: Int | null, limit?: Int | null, where?: DataObjectWhereInput | null, orderBy?: Array<DataObjectOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
     dataObjectByUniqueInput: <T = DataObject | null>(args: { where: DataObjectWhereUniqueInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T | null> ,
-    dataObjectsConnection: <T = DataObjectConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: DataObjectWhereInput | null, orderBy?: DataObjectOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
-    featuredVideos: <T = Array<FeaturedVideo>>(args: { offset?: Int | null, limit?: Int | null, where?: FeaturedVideoWhereInput | null, orderBy?: FeaturedVideoOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
-    featuredVideoByUniqueInput: <T = FeaturedVideo | null>(args: { where: FeaturedVideoWhereUniqueInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T | null> ,
-    featuredVideosConnection: <T = FeaturedVideoConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: FeaturedVideoWhereInput | null, orderBy?: FeaturedVideoOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
-    channelCategories: <T = Array<ChannelCategory>>(args: { offset?: Int | null, limit?: Int | null, where?: ChannelCategoryWhereInput | null, orderBy?: ChannelCategoryOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    dataObjectsConnection: <T = DataObjectConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: DataObjectWhereInput | null, orderBy?: Array<DataObjectOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    channelCategories: <T = Array<ChannelCategory>>(args: { offset?: Int | null, limit?: Int | null, where?: ChannelCategoryWhereInput | null, orderBy?: Array<ChannelCategoryOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
     channelCategoryByUniqueInput: <T = ChannelCategory | null>(args: { where: ChannelCategoryWhereUniqueInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T | null> ,
-    channelCategoriesConnection: <T = ChannelCategoryConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: ChannelCategoryWhereInput | null, orderBy?: ChannelCategoryOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
-    channels: <T = Array<Channel>>(args: { offset?: Int | null, limit?: Int | null, where?: ChannelWhereInput | null, orderBy?: ChannelOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    channelCategoriesConnection: <T = ChannelCategoryConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: ChannelCategoryWhereInput | null, orderBy?: Array<ChannelCategoryOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    channels: <T = Array<Channel>>(args: { offset?: Int | null, limit?: Int | null, where?: ChannelWhereInput | null, orderBy?: Array<ChannelOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
     channelByUniqueInput: <T = Channel | null>(args: { where: ChannelWhereUniqueInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T | null> ,
-    channelsConnection: <T = ChannelConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: ChannelWhereInput | null, orderBy?: ChannelOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
-    languages: <T = Array<Language>>(args: { offset?: Int | null, limit?: Int | null, where?: LanguageWhereInput | null, orderBy?: LanguageOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    channelsConnection: <T = ChannelConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: ChannelWhereInput | null, orderBy?: Array<ChannelOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    languages: <T = Array<Language>>(args: { offset?: Int | null, limit?: Int | null, where?: LanguageWhereInput | null, orderBy?: Array<LanguageOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
     languageByUniqueInput: <T = Language | null>(args: { where: LanguageWhereUniqueInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T | null> ,
-    languagesConnection: <T = LanguageConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: LanguageWhereInput | null, orderBy?: LanguageOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
-    licenses: <T = Array<License>>(args: { offset?: Int | null, limit?: Int | null, where?: LicenseWhereInput | null, orderBy?: LicenseOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    languagesConnection: <T = LanguageConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: LanguageWhereInput | null, orderBy?: Array<LanguageOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    licenses: <T = Array<License>>(args: { offset?: Int | null, limit?: Int | null, where?: LicenseWhereInput | null, orderBy?: Array<LicenseOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
     licenseByUniqueInput: <T = License | null>(args: { where: LicenseWhereUniqueInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T | null> ,
-    licensesConnection: <T = LicenseConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: LicenseWhereInput | null, orderBy?: LicenseOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
-    memberships: <T = Array<Membership>>(args: { offset?: Int | null, limit?: Int | null, where?: MembershipWhereInput | null, orderBy?: MembershipOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    licensesConnection: <T = LicenseConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: LicenseWhereInput | null, orderBy?: Array<LicenseOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    memberships: <T = Array<Membership>>(args: { offset?: Int | null, limit?: Int | null, where?: MembershipWhereInput | null, orderBy?: Array<MembershipOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
     membershipByUniqueInput: <T = Membership | null>(args: { where: MembershipWhereUniqueInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T | null> ,
-    membershipsConnection: <T = MembershipConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: MembershipWhereInput | null, orderBy?: MembershipOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    membershipsConnection: <T = MembershipConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: MembershipWhereInput | null, orderBy?: Array<MembershipOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    nextEntityIds: <T = Array<NextEntityId>>(args: { offset?: Int | null, limit?: Int | null, where?: NextEntityIdWhereInput | null, orderBy?: Array<NextEntityIdOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    nextEntityIdByUniqueInput: <T = NextEntityId | null>(args: { where: NextEntityIdWhereUniqueInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T | null> ,
+    nextEntityIdsConnection: <T = NextEntityIdConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: NextEntityIdWhereInput | null, orderBy?: Array<NextEntityIdOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
     channelCategoriesByName: <T = Array<ChannelCategoriesByNameFTSOutput>>(args: { whereChannelCategory?: ChannelCategoryWhereInput | null, skip?: Int | null, limit?: Int | null, text: String }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
     membersByHandle: <T = Array<MembersByHandleFTSOutput>>(args: { whereMembership?: MembershipWhereInput | null, skip?: Int | null, limit?: Int | null, text: String }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
     search: <T = Array<SearchFTSOutput>>(args: { whereVideo?: VideoWhereInput | null, whereChannel?: ChannelWhereInput | null, skip?: Int | null, limit?: Int | null, text: String }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
     videoCategoriesByName: <T = Array<VideoCategoriesByNameFTSOutput>>(args: { whereVideoCategory?: VideoCategoryWhereInput | null, skip?: Int | null, limit?: Int | null, text: String }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
-    videoCategories: <T = Array<VideoCategory>>(args: { offset?: Int | null, limit?: Int | null, where?: VideoCategoryWhereInput | null, orderBy?: VideoCategoryOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    videoCategories: <T = Array<VideoCategory>>(args: { offset?: Int | null, limit?: Int | null, where?: VideoCategoryWhereInput | null, orderBy?: Array<VideoCategoryOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
     videoCategoryByUniqueInput: <T = VideoCategory | null>(args: { where: VideoCategoryWhereUniqueInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T | null> ,
-    videoCategoriesConnection: <T = VideoCategoryConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: VideoCategoryWhereInput | null, orderBy?: VideoCategoryOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
-    videoMediaEncodings: <T = Array<VideoMediaEncoding>>(args: { offset?: Int | null, limit?: Int | null, where?: VideoMediaEncodingWhereInput | null, orderBy?: VideoMediaEncodingOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    videoCategoriesConnection: <T = VideoCategoryConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: VideoCategoryWhereInput | null, orderBy?: Array<VideoCategoryOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    videoMediaEncodings: <T = Array<VideoMediaEncoding>>(args: { offset?: Int | null, limit?: Int | null, where?: VideoMediaEncodingWhereInput | null, orderBy?: Array<VideoMediaEncodingOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
     videoMediaEncodingByUniqueInput: <T = VideoMediaEncoding | null>(args: { where: VideoMediaEncodingWhereUniqueInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T | null> ,
-    videoMediaEncodingsConnection: <T = VideoMediaEncodingConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: VideoMediaEncodingWhereInput | null, orderBy?: VideoMediaEncodingOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
-    videoMediaMetadata: <T = Array<VideoMediaMetadata>>(args: { offset?: Int | null, limit?: Int | null, where?: VideoMediaMetadataWhereInput | null, orderBy?: VideoMediaMetadataOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    videoMediaEncodingsConnection: <T = VideoMediaEncodingConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: VideoMediaEncodingWhereInput | null, orderBy?: Array<VideoMediaEncodingOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    videoMediaMetadata: <T = Array<VideoMediaMetadata>>(args: { offset?: Int | null, limit?: Int | null, where?: VideoMediaMetadataWhereInput | null, orderBy?: Array<VideoMediaMetadataOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
     videoMediaMetadataByUniqueInput: <T = VideoMediaMetadata | null>(args: { where: VideoMediaMetadataWhereUniqueInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T | null> ,
-    videoMediaMetadataConnection: <T = VideoMediaMetadataConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: VideoMediaMetadataWhereInput | null, orderBy?: VideoMediaMetadataOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
-    videos: <T = Array<Video>>(args: { offset?: Int | null, limit?: Int | null, where?: VideoWhereInput | null, orderBy?: VideoOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    videoMediaMetadataConnection: <T = VideoMediaMetadataConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: VideoMediaMetadataWhereInput | null, orderBy?: Array<VideoMediaMetadataOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    videos: <T = Array<Video>>(args: { offset?: Int | null, limit?: Int | null, where?: VideoWhereInput | null, orderBy?: Array<VideoOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
     videoByUniqueInput: <T = Video | null>(args: { where: VideoWhereUniqueInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T | null> ,
-    videosConnection: <T = VideoConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: VideoWhereInput | null, orderBy?: VideoOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
-    workers: <T = Array<Worker>>(args: { offset?: Int | null, limit?: Int | null, where?: WorkerWhereInput | null, orderBy?: WorkerOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    videosConnection: <T = VideoConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: VideoWhereInput | null, orderBy?: Array<VideoOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    workers: <T = Array<Worker>>(args: { offset?: Int | null, limit?: Int | null, where?: WorkerWhereInput | null, orderBy?: Array<WorkerOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
     workerByUniqueInput: <T = Worker | null>(args: { where: WorkerWhereUniqueInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T | null> ,
-    workersConnection: <T = WorkerConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: WorkerWhereInput | null, orderBy?: WorkerOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> 
+    workersConnection: <T = WorkerConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: WorkerWhereInput | null, orderBy?: Array<WorkerOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> 
   }
 
 export interface Mutation {}
@@ -106,6 +106,8 @@ export type DataObjectOrderByInput =   'createdAt_ASC' |
   'typeId_DESC' |
   'size_ASC' |
   'size_DESC' |
+  'liaison_ASC' |
+  'liaison_DESC' |
   'liaisonId_ASC' |
   'liaisonId_DESC' |
   'liaisonJudgement_ASC' |
@@ -115,15 +117,6 @@ export type DataObjectOrderByInput =   'createdAt_ASC' |
   'joystreamContentId_ASC' |
   'joystreamContentId_DESC'
 
-export type FeaturedVideoOrderByInput =   'createdAt_ASC' |
-  'createdAt_DESC' |
-  'updatedAt_ASC' |
-  'updatedAt_DESC' |
-  'deletedAt_ASC' |
-  'deletedAt_DESC' |
-  'videoId_ASC' |
-  'videoId_DESC'
-
 export type ChannelCategoryOrderByInput =   'createdAt_ASC' |
   'createdAt_DESC' |
   'updatedAt_ASC' |
@@ -141,10 +134,16 @@ export type ChannelOrderByInput =   'createdAt_ASC' |
   'updatedAt_DESC' |
   'deletedAt_ASC' |
   'deletedAt_DESC' |
+  'ownerMember_ASC' |
+  'ownerMember_DESC' |
   'ownerMemberId_ASC' |
   'ownerMemberId_DESC' |
+  'ownerCuratorGroup_ASC' |
+  'ownerCuratorGroup_DESC' |
   'ownerCuratorGroupId_ASC' |
   'ownerCuratorGroupId_DESC' |
+  'category_ASC' |
+  'category_DESC' |
   'categoryId_ASC' |
   'categoryId_DESC' |
   'rewardAccount_ASC' |
@@ -153,10 +152,14 @@ export type ChannelOrderByInput =   'createdAt_ASC' |
   'title_DESC' |
   'description_ASC' |
   'description_DESC' |
+  'coverPhotoDataObject_ASC' |
+  'coverPhotoDataObject_DESC' |
   'coverPhotoDataObjectId_ASC' |
   'coverPhotoDataObjectId_DESC' |
   'coverPhotoAvailability_ASC' |
   'coverPhotoAvailability_DESC' |
+  'avatarPhotoDataObject_ASC' |
+  'avatarPhotoDataObject_DESC' |
   'avatarPhotoDataObjectId_ASC' |
   'avatarPhotoDataObjectId_DESC' |
   'avatarPhotoAvailability_ASC' |
@@ -165,6 +168,8 @@ export type ChannelOrderByInput =   'createdAt_ASC' |
   'isPublic_DESC' |
   'isCensored_ASC' |
   'isCensored_DESC' |
+  'language_ASC' |
+  'language_DESC' |
   'languageId_ASC' |
   'languageId_DESC' |
   'createdInBlock_ASC' |
@@ -224,6 +229,15 @@ export type MembershipOrderByInput =   'createdAt_ASC' |
   'subscription_ASC' |
   'subscription_DESC'
 
+export type NextEntityIdOrderByInput =   'createdAt_ASC' |
+  'createdAt_DESC' |
+  'updatedAt_ASC' |
+  'updatedAt_DESC' |
+  'deletedAt_ASC' |
+  'deletedAt_DESC' |
+  'nextId_ASC' |
+  'nextId_DESC'
+
 export type VideoCategoryOrderByInput =   'createdAt_ASC' |
   'createdAt_DESC' |
   'updatedAt_ASC' |
@@ -254,6 +268,8 @@ export type VideoMediaMetadataOrderByInput =   'createdAt_ASC' |
   'updatedAt_DESC' |
   'deletedAt_ASC' |
   'deletedAt_DESC' |
+  'encoding_ASC' |
+  'encoding_DESC' |
   'encodingId_ASC' |
   'encodingId_DESC' |
   'pixelWidth_ASC' |
@@ -271,8 +287,12 @@ export type VideoOrderByInput =   'createdAt_ASC' |
   'updatedAt_DESC' |
   'deletedAt_ASC' |
   'deletedAt_DESC' |
+  'channel_ASC' |
+  'channel_DESC' |
   'channelId_ASC' |
   'channelId_DESC' |
+  'category_ASC' |
+  'category_DESC' |
   'categoryId_ASC' |
   'categoryId_DESC' |
   'title_ASC' |
@@ -281,10 +301,14 @@ export type VideoOrderByInput =   'createdAt_ASC' |
   'description_DESC' |
   'duration_ASC' |
   'duration_DESC' |
+  'thumbnailPhotoDataObject_ASC' |
+  'thumbnailPhotoDataObject_DESC' |
   'thumbnailPhotoDataObjectId_ASC' |
   'thumbnailPhotoDataObjectId_DESC' |
   'thumbnailPhotoAvailability_ASC' |
   'thumbnailPhotoAvailability_DESC' |
+  'language_ASC' |
+  'language_DESC' |
   'languageId_ASC' |
   'languageId_DESC' |
   'hasMarketing_ASC' |
@@ -297,12 +321,18 @@ export type VideoOrderByInput =   'createdAt_ASC' |
   'isCensored_DESC' |
   'isExplicit_ASC' |
   'isExplicit_DESC' |
+  'license_ASC' |
+  'license_DESC' |
   'licenseId_ASC' |
   'licenseId_DESC' |
+  'mediaDataObject_ASC' |
+  'mediaDataObject_DESC' |
   'mediaDataObjectId_ASC' |
   'mediaDataObjectId_DESC' |
   'mediaAvailability_ASC' |
   'mediaAvailability_DESC' |
+  'mediaMetadata_ASC' |
+  'mediaMetadata_DESC' |
   'mediaMetadataId_ASC' |
   'mediaMetadataId_DESC' |
   'createdInBlock_ASC' |
@@ -387,8 +417,16 @@ export interface CuratorGroupWhereInput {
   deletedAt_gte?: DateTime | null
   deletedById_eq?: ID_Input | null
   deletedById_in?: ID_Output[] | ID_Output | null
+  curatorIds_containsAll?: Int[] | Int | null
+  curatorIds_containsNone?: Int[] | Int | null
+  curatorIds_containsAny?: Int[] | Int | null
   isActive_eq?: Boolean | null
   isActive_in?: Boolean[] | Boolean | null
+  channels_none?: ChannelWhereInput | null
+  channels_some?: ChannelWhereInput | null
+  channels_every?: ChannelWhereInput | null
+  AND?: CuratorGroupWhereInput[] | CuratorGroupWhereInput | null
+  OR?: CuratorGroupWhereInput[] | CuratorGroupWhereInput | null
 }
 
 export interface CuratorGroupWhereUniqueInput {
@@ -400,6 +438,7 @@ export interface DataObjectCreateInput {
   createdInBlock: Float
   typeId: Float
   size: Float
+  liaison?: ID_Input | null
   liaisonId?: ID_Input | null
   liaisonJudgement: LiaisonJudgement
   ipfsContentId: String
@@ -445,6 +484,8 @@ export interface DataObjectOwnerCouncilWhereInput {
   dummy_lt?: Int | null
   dummy_lte?: Int | null
   dummy_in?: Int[] | Int | null
+  AND?: DataObjectOwnerCouncilWhereInput[] | DataObjectOwnerCouncilWhereInput | null
+  OR?: DataObjectOwnerCouncilWhereInput[] | DataObjectOwnerCouncilWhereInput | null
 }
 
 export interface DataObjectOwnerCouncilWhereUniqueInput {
@@ -490,6 +531,8 @@ export interface DataObjectOwnerDaoWhereInput {
   dao_lt?: Int | null
   dao_lte?: Int | null
   dao_in?: Int[] | Int | null
+  AND?: DataObjectOwnerDaoWhereInput[] | DataObjectOwnerDaoWhereInput | null
+  OR?: DataObjectOwnerDaoWhereInput[] | DataObjectOwnerDaoWhereInput | null
 }
 
 export interface DataObjectOwnerDaoWhereUniqueInput {
@@ -543,6 +586,8 @@ export interface DataObjectOwnerChannelWhereInput {
   dummy_lt?: Int | null
   dummy_lte?: Int | null
   dummy_in?: Int[] | Int | null
+  AND?: DataObjectOwnerChannelWhereInput[] | DataObjectOwnerChannelWhereInput | null
+  OR?: DataObjectOwnerChannelWhereInput[] | DataObjectOwnerChannelWhereInput | null
 }
 
 export interface DataObjectOwnerChannelWhereUniqueInput {
@@ -596,6 +641,8 @@ export interface DataObjectOwnerMemberWhereInput {
   dummy_lt?: Int | null
   dummy_lte?: Int | null
   dummy_in?: Int[] | Int | null
+  AND?: DataObjectOwnerMemberWhereInput[] | DataObjectOwnerMemberWhereInput | null
+  OR?: DataObjectOwnerMemberWhereInput[] | DataObjectOwnerMemberWhereInput | null
 }
 
 export interface DataObjectOwnerMemberWhereUniqueInput {
@@ -641,6 +688,8 @@ export interface DataObjectOwnerWorkingGroupWhereInput {
   workingGroup_lt?: Int | null
   workingGroup_lte?: Int | null
   workingGroup_in?: Int[] | Int | null
+  AND?: DataObjectOwnerWorkingGroupWhereInput[] | DataObjectOwnerWorkingGroupWhereInput | null
+  OR?: DataObjectOwnerWorkingGroupWhereInput[] | DataObjectOwnerWorkingGroupWhereInput | null
 }
 
 export interface DataObjectOwnerWorkingGroupWhereUniqueInput {
@@ -652,6 +701,7 @@ export interface DataObjectUpdateInput {
   createdInBlock?: Float | null
   typeId?: Float | null
   size?: Float | null
+  liaison?: ID_Input | null
   liaisonId?: ID_Input | null
   liaisonJudgement?: LiaisonJudgement | null
   ipfsContentId?: String | null
@@ -716,53 +766,27 @@ export interface DataObjectWhereInput {
   joystreamContentId_startsWith?: String | null
   joystreamContentId_endsWith?: String | null
   joystreamContentId_in?: String[] | String | null
+  liaison?: WorkerWhereInput | null
+  channelcoverPhotoDataObject_none?: ChannelWhereInput | null
+  channelcoverPhotoDataObject_some?: ChannelWhereInput | null
+  channelcoverPhotoDataObject_every?: ChannelWhereInput | null
+  channelavatarPhotoDataObject_none?: ChannelWhereInput | null
+  channelavatarPhotoDataObject_some?: ChannelWhereInput | null
+  channelavatarPhotoDataObject_every?: ChannelWhereInput | null
+  videothumbnailPhotoDataObject_none?: VideoMediaMetadataWhereInput | null
+  videothumbnailPhotoDataObject_some?: VideoMediaMetadataWhereInput | null
+  videothumbnailPhotoDataObject_every?: VideoMediaMetadataWhereInput | null
+  videomediaDataObject_none?: VideoMediaMetadataWhereInput | null
+  videomediaDataObject_some?: VideoMediaMetadataWhereInput | null
+  videomediaDataObject_every?: VideoMediaMetadataWhereInput | null
+  AND?: DataObjectWhereInput[] | DataObjectWhereInput | null
+  OR?: DataObjectWhereInput[] | DataObjectWhereInput | null
 }
 
 export interface DataObjectWhereUniqueInput {
   id: ID_Output
 }
 
-export interface FeaturedVideoCreateInput {
-  videoId: ID_Output
-}
-
-export interface FeaturedVideoUpdateInput {
-  videoId?: ID_Input | null
-}
-
-export interface FeaturedVideoWhereInput {
-  id_eq?: ID_Input | null
-  id_in?: ID_Output[] | ID_Output | null
-  createdAt_eq?: DateTime | null
-  createdAt_lt?: DateTime | null
-  createdAt_lte?: DateTime | null
-  createdAt_gt?: DateTime | null
-  createdAt_gte?: DateTime | null
-  createdById_eq?: ID_Input | null
-  createdById_in?: ID_Output[] | ID_Output | null
-  updatedAt_eq?: DateTime | null
-  updatedAt_lt?: DateTime | null
-  updatedAt_lte?: DateTime | null
-  updatedAt_gt?: DateTime | null
-  updatedAt_gte?: DateTime | null
-  updatedById_eq?: ID_Input | null
-  updatedById_in?: ID_Output[] | ID_Output | null
-  deletedAt_all?: Boolean | null
-  deletedAt_eq?: DateTime | null
-  deletedAt_lt?: DateTime | null
-  deletedAt_lte?: DateTime | null
-  deletedAt_gt?: DateTime | null
-  deletedAt_gte?: DateTime | null
-  deletedById_eq?: ID_Input | null
-  deletedById_in?: ID_Output[] | ID_Output | null
-  videoId_eq?: ID_Input | null
-  videoId_in?: ID_Output[] | ID_Output | null
-}
-
-export interface FeaturedVideoWhereUniqueInput {
-  id: ID_Output
-}
-
 export interface ChannelCategoryCreateInput {
   name?: String | null
   createdInBlock: Float
@@ -809,6 +833,11 @@ export interface ChannelCategoryWhereInput {
   createdInBlock_lt?: Int | null
   createdInBlock_lte?: Int | null
   createdInBlock_in?: Int[] | Int | null
+  channels_none?: ChannelWhereInput | null
+  channels_some?: ChannelWhereInput | null
+  channels_every?: ChannelWhereInput | null
+  AND?: ChannelCategoryWhereInput[] | ChannelCategoryWhereInput | null
+  OR?: ChannelCategoryWhereInput[] | ChannelCategoryWhereInput | null
 }
 
 export interface ChannelCategoryWhereUniqueInput {
@@ -816,39 +845,51 @@ export interface ChannelCategoryWhereUniqueInput {
 }
 
 export interface ChannelCreateInput {
+  ownerMember?: ID_Input | null
   ownerMemberId?: ID_Input | null
+  ownerCuratorGroup?: ID_Input | null
   ownerCuratorGroupId?: ID_Input | null
+  category?: ID_Input | null
   categoryId?: ID_Input | null
   rewardAccount?: String | null
   title?: String | null
   description?: String | null
+  coverPhotoDataObject?: ID_Input | null
   coverPhotoDataObjectId?: ID_Input | null
   coverPhotoUrls: Array<String>
   coverPhotoAvailability: AssetAvailability
+  avatarPhotoDataObject?: ID_Input | null
   avatarPhotoDataObjectId?: ID_Input | null
   avatarPhotoUrls: Array<String>
   avatarPhotoAvailability: AssetAvailability
   isPublic?: Boolean | null
   isCensored: Boolean
+  language?: ID_Input | null
   languageId?: ID_Input | null
   createdInBlock: Float
 }
 
 export interface ChannelUpdateInput {
+  ownerMember?: ID_Input | null
   ownerMemberId?: ID_Input | null
+  ownerCuratorGroup?: ID_Input | null
   ownerCuratorGroupId?: ID_Input | null
+  category?: ID_Input | null
   categoryId?: ID_Input | null
   rewardAccount?: String | null
   title?: String | null
   description?: String | null
+  coverPhotoDataObject?: ID_Input | null
   coverPhotoDataObjectId?: ID_Input | null
   coverPhotoUrls?: String[] | String | null
   coverPhotoAvailability?: AssetAvailability | null
+  avatarPhotoDataObject?: ID_Input | null
   avatarPhotoDataObjectId?: ID_Input | null
   avatarPhotoUrls?: String[] | String | null
   avatarPhotoAvailability?: AssetAvailability | null
   isPublic?: Boolean | null
   isCensored?: Boolean | null
+  language?: ID_Input | null
   languageId?: ID_Input | null
   createdInBlock?: Float | null
 }
@@ -901,10 +942,16 @@ export interface ChannelWhereInput {
   description_in?: String[] | String | null
   coverPhotoDataObjectId_eq?: ID_Input | null
   coverPhotoDataObjectId_in?: ID_Output[] | ID_Output | null
+  coverPhotoUrls_containsAll?: String[] | String | null
+  coverPhotoUrls_containsNone?: String[] | String | null
+  coverPhotoUrls_containsAny?: String[] | String | null
   coverPhotoAvailability_eq?: AssetAvailability | null
   coverPhotoAvailability_in?: AssetAvailability[] | AssetAvailability | null
   avatarPhotoDataObjectId_eq?: ID_Input | null
   avatarPhotoDataObjectId_in?: ID_Output[] | ID_Output | null
+  avatarPhotoUrls_containsAll?: String[] | String | null
+  avatarPhotoUrls_containsNone?: String[] | String | null
+  avatarPhotoUrls_containsAny?: String[] | String | null
   avatarPhotoAvailability_eq?: AssetAvailability | null
   avatarPhotoAvailability_in?: AssetAvailability[] | AssetAvailability | null
   isPublic_eq?: Boolean | null
@@ -919,6 +966,17 @@ export interface ChannelWhereInput {
   createdInBlock_lt?: Int | null
   createdInBlock_lte?: Int | null
   createdInBlock_in?: Int[] | Int | null
+  ownerMember?: MembershipWhereInput | null
+  ownerCuratorGroup?: CuratorGroupWhereInput | null
+  category?: ChannelCategoryWhereInput | null
+  coverPhotoDataObject?: DataObjectWhereInput | null
+  avatarPhotoDataObject?: DataObjectWhereInput | null
+  language?: LanguageWhereInput | null
+  videos_none?: VideoWhereInput | null
+  videos_some?: VideoWhereInput | null
+  videos_every?: VideoWhereInput | null
+  AND?: ChannelWhereInput[] | ChannelWhereInput | null
+  OR?: ChannelWhereInput[] | ChannelWhereInput | null
 }
 
 export interface ChannelWhereUniqueInput {
@@ -971,6 +1029,14 @@ export interface LanguageWhereInput {
   createdInBlock_lt?: Int | null
   createdInBlock_lte?: Int | null
   createdInBlock_in?: Int[] | Int | null
+  channellanguage_none?: ChannelWhereInput | null
+  channellanguage_some?: ChannelWhereInput | null
+  channellanguage_every?: ChannelWhereInput | null
+  videolanguage_none?: VideoWhereInput | null
+  videolanguage_some?: VideoWhereInput | null
+  videolanguage_every?: VideoWhereInput | null
+  AND?: LanguageWhereInput[] | LanguageWhereInput | null
+  OR?: LanguageWhereInput[] | LanguageWhereInput | null
 }
 
 export interface LanguageWhereUniqueInput {
@@ -1030,6 +1096,11 @@ export interface LicenseWhereInput {
   customText_startsWith?: String | null
   customText_endsWith?: String | null
   customText_in?: String[] | String | null
+  videolanguage_none?: VideoWhereInput | null
+  videolanguage_some?: VideoWhereInput | null
+  videolanguage_every?: VideoWhereInput | null
+  AND?: LicenseWhereInput[] | LicenseWhereInput | null
+  OR?: LicenseWhereInput[] | LicenseWhereInput | null
 }
 
 export interface LicenseWhereUniqueInput {
@@ -1122,6 +1193,11 @@ export interface MembershipWhereInput {
   subscription_lt?: Int | null
   subscription_lte?: Int | null
   subscription_in?: Int[] | Int | null
+  channels_none?: ChannelWhereInput | null
+  channels_some?: ChannelWhereInput | null
+  channels_every?: ChannelWhereInput | null
+  AND?: MembershipWhereInput[] | MembershipWhereInput | null
+  OR?: MembershipWhereInput[] | MembershipWhereInput | null
 }
 
 export interface MembershipWhereUniqueInput {
@@ -1129,6 +1205,53 @@ export interface MembershipWhereUniqueInput {
   handle?: String | null
 }
 
+export interface NextEntityIdCreateInput {
+  nextId: Float
+}
+
+export interface NextEntityIdUpdateInput {
+  nextId?: Float | null
+}
+
+export interface NextEntityIdWhereInput {
+  id_eq?: ID_Input | null
+  id_in?: ID_Output[] | ID_Output | null
+  createdAt_eq?: DateTime | null
+  createdAt_lt?: DateTime | null
+  createdAt_lte?: DateTime | null
+  createdAt_gt?: DateTime | null
+  createdAt_gte?: DateTime | null
+  createdById_eq?: ID_Input | null
+  createdById_in?: ID_Output[] | ID_Output | null
+  updatedAt_eq?: DateTime | null
+  updatedAt_lt?: DateTime | null
+  updatedAt_lte?: DateTime | null
+  updatedAt_gt?: DateTime | null
+  updatedAt_gte?: DateTime | null
+  updatedById_eq?: ID_Input | null
+  updatedById_in?: ID_Output[] | ID_Output | null
+  deletedAt_all?: Boolean | null
+  deletedAt_eq?: DateTime | null
+  deletedAt_lt?: DateTime | null
+  deletedAt_lte?: DateTime | null
+  deletedAt_gt?: DateTime | null
+  deletedAt_gte?: DateTime | null
+  deletedById_eq?: ID_Input | null
+  deletedById_in?: ID_Output[] | ID_Output | null
+  nextId_eq?: Float | null
+  nextId_gt?: Float | null
+  nextId_gte?: Float | null
+  nextId_lt?: Float | null
+  nextId_lte?: Float | null
+  nextId_in?: Float[] | Float | null
+  AND?: NextEntityIdWhereInput[] | NextEntityIdWhereInput | null
+  OR?: NextEntityIdWhereInput[] | NextEntityIdWhereInput | null
+}
+
+export interface NextEntityIdWhereUniqueInput {
+  id: ID_Output
+}
+
 export interface VideoCategoryCreateInput {
   name?: String | null
   createdInBlock: Float
@@ -1175,6 +1298,11 @@ export interface VideoCategoryWhereInput {
   createdInBlock_lt?: Int | null
   createdInBlock_lte?: Int | null
   createdInBlock_in?: Int[] | Int | null
+  videos_none?: VideoWhereInput | null
+  videos_some?: VideoWhereInput | null
+  videos_every?: VideoWhereInput | null
+  AND?: VideoCategoryWhereInput[] | VideoCategoryWhereInput | null
+  OR?: VideoCategoryWhereInput[] | VideoCategoryWhereInput | null
 }
 
 export interface VideoCategoryWhereUniqueInput {
@@ -1182,24 +1310,31 @@ export interface VideoCategoryWhereUniqueInput {
 }
 
 export interface VideoCreateInput {
+  channel?: ID_Input | null
   channelId?: ID_Input | null
+  category?: ID_Input | null
   categoryId?: ID_Input | null
   title?: String | null
   description?: String | null
   duration?: Float | null
+  thumbnailPhotoDataObject?: ID_Input | null
   thumbnailPhotoDataObjectId?: ID_Input | null
   thumbnailPhotoUrls: Array<String>
   thumbnailPhotoAvailability: AssetAvailability
+  language?: ID_Input | null
   languageId?: ID_Input | null
   hasMarketing?: Boolean | null
   publishedBeforeJoystream?: DateTime | null
   isPublic?: Boolean | null
   isCensored: Boolean
   isExplicit?: Boolean | null
+  license?: ID_Input | null
   licenseId?: ID_Input | null
+  mediaDataObject?: ID_Input | null
   mediaDataObjectId?: ID_Input | null
   mediaUrls: Array<String>
   mediaAvailability: AssetAvailability
+  mediaMetadata?: ID_Input | null
   mediaMetadataId?: ID_Input | null
   createdInBlock: Float
   isFeatured: Boolean
@@ -1257,6 +1392,11 @@ export interface VideoMediaEncodingWhereInput {
   mimeMediaType_startsWith?: String | null
   mimeMediaType_endsWith?: String | null
   mimeMediaType_in?: String[] | String | null
+  videomediametadataencoding_none?: VideoMediaMetadataWhereInput | null
+  videomediametadataencoding_some?: VideoMediaMetadataWhereInput | null
+  videomediametadataencoding_every?: VideoMediaMetadataWhereInput | null
+  AND?: VideoMediaEncodingWhereInput[] | VideoMediaEncodingWhereInput | null
+  OR?: VideoMediaEncodingWhereInput[] | VideoMediaEncodingWhereInput | null
 }
 
 export interface VideoMediaEncodingWhereUniqueInput {
@@ -1264,6 +1404,7 @@ export interface VideoMediaEncodingWhereUniqueInput {
 }
 
 export interface VideoMediaMetadataCreateInput {
+  encoding?: ID_Input | null
   encodingId?: ID_Input | null
   pixelWidth?: Float | null
   pixelHeight?: Float | null
@@ -1272,6 +1413,7 @@ export interface VideoMediaMetadataCreateInput {
 }
 
 export interface VideoMediaMetadataUpdateInput {
+  encoding?: ID_Input | null
   encodingId?: ID_Input | null
   pixelWidth?: Float | null
   pixelHeight?: Float | null
@@ -1330,6 +1472,10 @@ export interface VideoMediaMetadataWhereInput {
   createdInBlock_lt?: Int | null
   createdInBlock_lte?: Int | null
   createdInBlock_in?: Int[] | Int | null
+  encoding?: VideoMediaEncodingWhereInput | null
+  video?: VideoWhereInput | null
+  AND?: VideoMediaMetadataWhereInput[] | VideoMediaMetadataWhereInput | null
+  OR?: VideoMediaMetadataWhereInput[] | VideoMediaMetadataWhereInput | null
 }
 
 export interface VideoMediaMetadataWhereUniqueInput {
@@ -1337,24 +1483,31 @@ export interface VideoMediaMetadataWhereUniqueInput {
 }
 
 export interface VideoUpdateInput {
+  channel?: ID_Input | null
   channelId?: ID_Input | null
+  category?: ID_Input | null
   categoryId?: ID_Input | null
   title?: String | null
   description?: String | null
   duration?: Float | null
+  thumbnailPhotoDataObject?: ID_Input | null
   thumbnailPhotoDataObjectId?: ID_Input | null
   thumbnailPhotoUrls?: String[] | String | null
   thumbnailPhotoAvailability?: AssetAvailability | null
+  language?: ID_Input | null
   languageId?: ID_Input | null
   hasMarketing?: Boolean | null
   publishedBeforeJoystream?: DateTime | null
   isPublic?: Boolean | null
   isCensored?: Boolean | null
   isExplicit?: Boolean | null
+  license?: ID_Input | null
   licenseId?: ID_Input | null
+  mediaDataObject?: ID_Input | null
   mediaDataObjectId?: ID_Input | null
   mediaUrls?: String[] | String | null
   mediaAvailability?: AssetAvailability | null
+  mediaMetadata?: ID_Input | null
   mediaMetadataId?: ID_Input | null
   createdInBlock?: Float | null
   isFeatured?: Boolean | null
@@ -1407,6 +1560,9 @@ export interface VideoWhereInput {
   duration_in?: Int[] | Int | null
   thumbnailPhotoDataObjectId_eq?: ID_Input | null
   thumbnailPhotoDataObjectId_in?: ID_Output[] | ID_Output | null
+  thumbnailPhotoUrls_containsAll?: String[] | String | null
+  thumbnailPhotoUrls_containsNone?: String[] | String | null
+  thumbnailPhotoUrls_containsAny?: String[] | String | null
   thumbnailPhotoAvailability_eq?: AssetAvailability | null
   thumbnailPhotoAvailability_in?: AssetAvailability[] | AssetAvailability | null
   languageId_eq?: ID_Input | null
@@ -1428,6 +1584,9 @@ export interface VideoWhereInput {
   licenseId_in?: ID_Output[] | ID_Output | null
   mediaDataObjectId_eq?: ID_Input | null
   mediaDataObjectId_in?: ID_Output[] | ID_Output | null
+  mediaUrls_containsAll?: String[] | String | null
+  mediaUrls_containsNone?: String[] | String | null
+  mediaUrls_containsAny?: String[] | String | null
   mediaAvailability_eq?: AssetAvailability | null
   mediaAvailability_in?: AssetAvailability[] | AssetAvailability | null
   mediaMetadataId_eq?: ID_Input | null
@@ -1440,6 +1599,15 @@ export interface VideoWhereInput {
   createdInBlock_in?: Int[] | Int | null
   isFeatured_eq?: Boolean | null
   isFeatured_in?: Boolean[] | Boolean | null
+  channel?: ChannelWhereInput | null
+  category?: VideoCategoryWhereInput | null
+  thumbnailPhotoDataObject?: DataObjectWhereInput | null
+  language?: LanguageWhereInput | null
+  license?: LicenseWhereInput | null
+  mediaDataObject?: DataObjectWhereInput | null
+  mediaMetadata?: VideoMediaMetadataWhereInput | null
+  AND?: VideoWhereInput[] | VideoWhereInput | null
+  OR?: VideoWhereInput[] | VideoWhereInput | null
 }
 
 export interface VideoWhereUniqueInput {
@@ -1499,6 +1667,11 @@ export interface WorkerWhereInput {
   metadata_startsWith?: String | null
   metadata_endsWith?: String | null
   metadata_in?: String[] | String | null
+  dataObjects_none?: DataObjectWhereInput | null
+  dataObjects_some?: DataObjectWhereInput | null
+  dataObjects_every?: DataObjectWhereInput | null
+  AND?: WorkerWhereInput[] | WorkerWhereInput | null
+  OR?: WorkerWhereInput[] | WorkerWhereInput | null
 }
 
 export interface WorkerWhereUniqueInput {
@@ -1628,30 +1801,6 @@ export interface DataObjectOwnerWorkingGroup {
   workingGroup: Int
 }
 
-export interface FeaturedVideo extends BaseGraphQLObject {
-  id: ID_Output
-  createdAt: DateTime
-  createdById: String
-  updatedAt?: DateTime | null
-  updatedById?: String | null
-  deletedAt?: DateTime | null
-  deletedById?: String | null
-  version: Int
-  video: Video
-  videoId: String
-}
-
-export interface FeaturedVideoConnection {
-  totalCount: Int
-  edges: Array<FeaturedVideoEdge>
-  pageInfo: PageInfo
-}
-
-export interface FeaturedVideoEdge {
-  node: FeaturedVideo
-  cursor: String
-}
-
 export interface Channel extends BaseGraphQLObject {
   id: ID_Output
   createdAt: DateTime
@@ -1827,6 +1976,29 @@ export interface MembershipEdge {
   cursor: String
 }
 
+export interface NextEntityId extends BaseGraphQLObject {
+  id: ID_Output
+  createdAt: DateTime
+  createdById: String
+  updatedAt?: DateTime | null
+  updatedById?: String | null
+  deletedAt?: DateTime | null
+  deletedById?: String | null
+  version: Int
+  nextId: Float
+}
+
+export interface NextEntityIdConnection {
+  totalCount: Int
+  edges: Array<NextEntityIdEdge>
+  pageInfo: PageInfo
+}
+
+export interface NextEntityIdEdge {
+  node: NextEntityId
+  cursor: String
+}
+
 export interface PageInfo {
   hasNextPage: Boolean
   hasPreviousPage: Boolean
@@ -1889,7 +2061,6 @@ export interface Video extends BaseGraphQLObject {
   mediaMetadataId?: String | null
   createdInBlock: Int
   isFeatured: Boolean
-  featured?: FeaturedVideo | null
 }
 
 export interface VideoCategoriesByNameFTSOutput {

File diff suppressed because it is too large
+ 304 - 392
query-node/generated/graphql-server/generated/classes.ts


+ 277 - 104
query-node/generated/graphql-server/generated/schema.graphql

@@ -136,8 +136,16 @@ input CuratorGroupWhereInput {
   deletedAt_gte: DateTime
   deletedById_eq: ID
   deletedById_in: [ID!]
+  curatorIds_containsAll: [Int!]
+  curatorIds_containsNone: [Int!]
+  curatorIds_containsAny: [Int!]
   isActive_eq: Boolean
   isActive_in: [Boolean!]
+  channels_none: ChannelWhereInput
+  channels_some: ChannelWhereInput
+  channels_every: ChannelWhereInput
+  AND: [CuratorGroupWhereInput!]
+  OR: [CuratorGroupWhereInput!]
 }
 
 input CuratorGroupWhereUniqueInput {
@@ -194,6 +202,7 @@ input DataObjectCreateInput {
   createdInBlock: Float!
   typeId: Float!
   size: Float!
+  liaison: ID
   liaisonId: ID
   liaisonJudgement: LiaisonJudgement!
   ipfsContentId: String!
@@ -218,6 +227,8 @@ enum DataObjectOrderByInput {
   typeId_DESC
   size_ASC
   size_DESC
+  liaison_ASC
+  liaison_DESC
   liaisonId_ASC
   liaisonId_DESC
   liaisonJudgement_ASC
@@ -274,6 +285,8 @@ input DataObjectOwnerCouncilWhereInput {
   dummy_lt: Int
   dummy_lte: Int
   dummy_in: [Int!]
+  AND: [DataObjectOwnerCouncilWhereInput!]
+  OR: [DataObjectOwnerCouncilWhereInput!]
 }
 
 input DataObjectOwnerCouncilWhereUniqueInput {
@@ -324,6 +337,8 @@ input DataObjectOwnerDaoWhereInput {
   dao_lt: Int
   dao_lte: Int
   dao_in: [Int!]
+  AND: [DataObjectOwnerDaoWhereInput!]
+  OR: [DataObjectOwnerDaoWhereInput!]
 }
 
 input DataObjectOwnerDaoWhereUniqueInput {
@@ -385,6 +400,8 @@ input DataObjectOwnerChannelWhereInput {
   dummy_lt: Int
   dummy_lte: Int
   dummy_in: [Int!]
+  AND: [DataObjectOwnerChannelWhereInput!]
+  OR: [DataObjectOwnerChannelWhereInput!]
 }
 
 input DataObjectOwnerChannelWhereUniqueInput {
@@ -446,6 +463,8 @@ input DataObjectOwnerMemberWhereInput {
   dummy_lt: Int
   dummy_lte: Int
   dummy_in: [Int!]
+  AND: [DataObjectOwnerMemberWhereInput!]
+  OR: [DataObjectOwnerMemberWhereInput!]
 }
 
 input DataObjectOwnerMemberWhereUniqueInput {
@@ -496,6 +515,8 @@ input DataObjectOwnerWorkingGroupWhereInput {
   workingGroup_lt: Int
   workingGroup_lte: Int
   workingGroup_in: [Int!]
+  AND: [DataObjectOwnerWorkingGroupWhereInput!]
+  OR: [DataObjectOwnerWorkingGroupWhereInput!]
 }
 
 input DataObjectOwnerWorkingGroupWhereUniqueInput {
@@ -507,6 +528,7 @@ input DataObjectUpdateInput {
   createdInBlock: Float
   typeId: Float
   size: Float
+  liaison: ID
   liaisonId: ID
   liaisonJudgement: LiaisonJudgement
   ipfsContentId: String
@@ -571,6 +593,21 @@ input DataObjectWhereInput {
   joystreamContentId_startsWith: String
   joystreamContentId_endsWith: String
   joystreamContentId_in: [String!]
+  liaison: WorkerWhereInput
+  channelcoverPhotoDataObject_none: ChannelWhereInput
+  channelcoverPhotoDataObject_some: ChannelWhereInput
+  channelcoverPhotoDataObject_every: ChannelWhereInput
+  channelavatarPhotoDataObject_none: ChannelWhereInput
+  channelavatarPhotoDataObject_some: ChannelWhereInput
+  channelavatarPhotoDataObject_every: ChannelWhereInput
+  videothumbnailPhotoDataObject_none: VideoMediaMetadataWhereInput
+  videothumbnailPhotoDataObject_some: VideoMediaMetadataWhereInput
+  videothumbnailPhotoDataObject_every: VideoMediaMetadataWhereInput
+  videomediaDataObject_none: VideoMediaMetadataWhereInput
+  videomediaDataObject_some: VideoMediaMetadataWhereInput
+  videomediaDataObject_every: VideoMediaMetadataWhereInput
+  AND: [DataObjectWhereInput!]
+  OR: [DataObjectWhereInput!]
 }
 
 input DataObjectWhereUniqueInput {
@@ -586,82 +623,6 @@ interface DeleteResponse {
   id: ID!
 }
 
-type FeaturedVideo implements BaseGraphQLObject {
-  id: ID!
-  createdAt: DateTime!
-  createdById: String!
-  updatedAt: DateTime
-  updatedById: String
-  deletedAt: DateTime
-  deletedById: String
-  version: Int!
-  video: Video!
-  videoId: String!
-}
-
-type FeaturedVideoConnection {
-  totalCount: Int!
-  edges: [FeaturedVideoEdge!]!
-  pageInfo: PageInfo!
-}
-
-input FeaturedVideoCreateInput {
-  videoId: ID!
-}
-
-type FeaturedVideoEdge {
-  node: FeaturedVideo!
-  cursor: String!
-}
-
-enum FeaturedVideoOrderByInput {
-  createdAt_ASC
-  createdAt_DESC
-  updatedAt_ASC
-  updatedAt_DESC
-  deletedAt_ASC
-  deletedAt_DESC
-  videoId_ASC
-  videoId_DESC
-}
-
-input FeaturedVideoUpdateInput {
-  videoId: ID
-}
-
-input FeaturedVideoWhereInput {
-  id_eq: ID
-  id_in: [ID!]
-  createdAt_eq: DateTime
-  createdAt_lt: DateTime
-  createdAt_lte: DateTime
-  createdAt_gt: DateTime
-  createdAt_gte: DateTime
-  createdById_eq: ID
-  createdById_in: [ID!]
-  updatedAt_eq: DateTime
-  updatedAt_lt: DateTime
-  updatedAt_lte: DateTime
-  updatedAt_gt: DateTime
-  updatedAt_gte: DateTime
-  updatedById_eq: ID
-  updatedById_in: [ID!]
-  deletedAt_all: Boolean
-  deletedAt_eq: DateTime
-  deletedAt_lt: DateTime
-  deletedAt_lte: DateTime
-  deletedAt_gt: DateTime
-  deletedAt_gte: DateTime
-  deletedById_eq: ID
-  deletedById_in: [ID!]
-  videoId_eq: ID
-  videoId_in: [ID!]
-}
-
-input FeaturedVideoWhereUniqueInput {
-  id: ID!
-}
-
 type Channel implements BaseGraphQLObject {
   id: ID!
   createdAt: DateTime!
@@ -810,6 +771,11 @@ input ChannelCategoryWhereInput {
   createdInBlock_lt: Int
   createdInBlock_lte: Int
   createdInBlock_in: [Int!]
+  channels_none: ChannelWhereInput
+  channels_some: ChannelWhereInput
+  channels_every: ChannelWhereInput
+  AND: [ChannelCategoryWhereInput!]
+  OR: [ChannelCategoryWhereInput!]
 }
 
 input ChannelCategoryWhereUniqueInput {
@@ -823,20 +789,26 @@ type ChannelConnection {
 }
 
 input ChannelCreateInput {
+  ownerMember: ID
   ownerMemberId: ID
+  ownerCuratorGroup: ID
   ownerCuratorGroupId: ID
+  category: ID
   categoryId: ID
   rewardAccount: String
   title: String
   description: String
+  coverPhotoDataObject: ID
   coverPhotoDataObjectId: ID
   coverPhotoUrls: [String!]!
   coverPhotoAvailability: AssetAvailability!
+  avatarPhotoDataObject: ID
   avatarPhotoDataObjectId: ID
   avatarPhotoUrls: [String!]!
   avatarPhotoAvailability: AssetAvailability!
   isPublic: Boolean
   isCensored: Boolean!
+  language: ID
   languageId: ID
   createdInBlock: Float!
 }
@@ -853,10 +825,16 @@ enum ChannelOrderByInput {
   updatedAt_DESC
   deletedAt_ASC
   deletedAt_DESC
+  ownerMember_ASC
+  ownerMember_DESC
   ownerMemberId_ASC
   ownerMemberId_DESC
+  ownerCuratorGroup_ASC
+  ownerCuratorGroup_DESC
   ownerCuratorGroupId_ASC
   ownerCuratorGroupId_DESC
+  category_ASC
+  category_DESC
   categoryId_ASC
   categoryId_DESC
   rewardAccount_ASC
@@ -865,10 +843,14 @@ enum ChannelOrderByInput {
   title_DESC
   description_ASC
   description_DESC
+  coverPhotoDataObject_ASC
+  coverPhotoDataObject_DESC
   coverPhotoDataObjectId_ASC
   coverPhotoDataObjectId_DESC
   coverPhotoAvailability_ASC
   coverPhotoAvailability_DESC
+  avatarPhotoDataObject_ASC
+  avatarPhotoDataObject_DESC
   avatarPhotoDataObjectId_ASC
   avatarPhotoDataObjectId_DESC
   avatarPhotoAvailability_ASC
@@ -877,6 +859,8 @@ enum ChannelOrderByInput {
   isPublic_DESC
   isCensored_ASC
   isCensored_DESC
+  language_ASC
+  language_DESC
   languageId_ASC
   languageId_DESC
   createdInBlock_ASC
@@ -884,20 +868,26 @@ enum ChannelOrderByInput {
 }
 
 input ChannelUpdateInput {
+  ownerMember: ID
   ownerMemberId: ID
+  ownerCuratorGroup: ID
   ownerCuratorGroupId: ID
+  category: ID
   categoryId: ID
   rewardAccount: String
   title: String
   description: String
+  coverPhotoDataObject: ID
   coverPhotoDataObjectId: ID
   coverPhotoUrls: [String!]
   coverPhotoAvailability: AssetAvailability
+  avatarPhotoDataObject: ID
   avatarPhotoDataObjectId: ID
   avatarPhotoUrls: [String!]
   avatarPhotoAvailability: AssetAvailability
   isPublic: Boolean
   isCensored: Boolean
+  language: ID
   languageId: ID
   createdInBlock: Float
 }
@@ -950,10 +940,16 @@ input ChannelWhereInput {
   description_in: [String!]
   coverPhotoDataObjectId_eq: ID
   coverPhotoDataObjectId_in: [ID!]
+  coverPhotoUrls_containsAll: [String!]
+  coverPhotoUrls_containsNone: [String!]
+  coverPhotoUrls_containsAny: [String!]
   coverPhotoAvailability_eq: AssetAvailability
   coverPhotoAvailability_in: [AssetAvailability!]
   avatarPhotoDataObjectId_eq: ID
   avatarPhotoDataObjectId_in: [ID!]
+  avatarPhotoUrls_containsAll: [String!]
+  avatarPhotoUrls_containsNone: [String!]
+  avatarPhotoUrls_containsAny: [String!]
   avatarPhotoAvailability_eq: AssetAvailability
   avatarPhotoAvailability_in: [AssetAvailability!]
   isPublic_eq: Boolean
@@ -968,6 +964,17 @@ input ChannelWhereInput {
   createdInBlock_lt: Int
   createdInBlock_lte: Int
   createdInBlock_in: [Int!]
+  ownerMember: MembershipWhereInput
+  ownerCuratorGroup: CuratorGroupWhereInput
+  category: ChannelCategoryWhereInput
+  coverPhotoDataObject: DataObjectWhereInput
+  avatarPhotoDataObject: DataObjectWhereInput
+  language: LanguageWhereInput
+  videos_none: VideoWhereInput
+  videos_some: VideoWhereInput
+  videos_every: VideoWhereInput
+  AND: [ChannelWhereInput!]
+  OR: [ChannelWhereInput!]
 }
 
 input ChannelWhereUniqueInput {
@@ -1066,6 +1073,14 @@ input LanguageWhereInput {
   createdInBlock_lt: Int
   createdInBlock_lte: Int
   createdInBlock_in: [Int!]
+  channellanguage_none: ChannelWhereInput
+  channellanguage_some: ChannelWhereInput
+  channellanguage_every: ChannelWhereInput
+  videolanguage_none: VideoWhereInput
+  videolanguage_some: VideoWhereInput
+  videolanguage_every: VideoWhereInput
+  AND: [LanguageWhereInput!]
+  OR: [LanguageWhereInput!]
 }
 
 input LanguageWhereUniqueInput {
@@ -1177,6 +1192,11 @@ input LicenseWhereInput {
   customText_startsWith: String
   customText_endsWith: String
   customText_in: [String!]
+  videolanguage_none: VideoWhereInput
+  videolanguage_some: VideoWhereInput
+  videolanguage_every: VideoWhereInput
+  AND: [LicenseWhereInput!]
+  OR: [LicenseWhereInput!]
 }
 
 input LicenseWhereUniqueInput {
@@ -1357,6 +1377,11 @@ input MembershipWhereInput {
   subscription_lt: Int
   subscription_lte: Int
   subscription_in: [Int!]
+  channels_none: ChannelWhereInput
+  channels_some: ChannelWhereInput
+  channels_every: ChannelWhereInput
+  AND: [MembershipWhereInput!]
+  OR: [MembershipWhereInput!]
 }
 
 input MembershipWhereUniqueInput {
@@ -1364,6 +1389,89 @@ input MembershipWhereUniqueInput {
   handle: String
 }
 
+type NextEntityId implements BaseGraphQLObject {
+  id: ID!
+  createdAt: DateTime!
+  createdById: String!
+  updatedAt: DateTime
+  updatedById: String
+  deletedAt: DateTime
+  deletedById: String
+  version: Int!
+
+  """Next deterministic id for entities without custom id"""
+  nextId: Float!
+}
+
+type NextEntityIdConnection {
+  totalCount: Int!
+  edges: [NextEntityIdEdge!]!
+  pageInfo: PageInfo!
+}
+
+input NextEntityIdCreateInput {
+  nextId: Float!
+}
+
+type NextEntityIdEdge {
+  node: NextEntityId!
+  cursor: String!
+}
+
+enum NextEntityIdOrderByInput {
+  createdAt_ASC
+  createdAt_DESC
+  updatedAt_ASC
+  updatedAt_DESC
+  deletedAt_ASC
+  deletedAt_DESC
+  nextId_ASC
+  nextId_DESC
+}
+
+input NextEntityIdUpdateInput {
+  nextId: Float
+}
+
+input NextEntityIdWhereInput {
+  id_eq: ID
+  id_in: [ID!]
+  createdAt_eq: DateTime
+  createdAt_lt: DateTime
+  createdAt_lte: DateTime
+  createdAt_gt: DateTime
+  createdAt_gte: DateTime
+  createdById_eq: ID
+  createdById_in: [ID!]
+  updatedAt_eq: DateTime
+  updatedAt_lt: DateTime
+  updatedAt_lte: DateTime
+  updatedAt_gt: DateTime
+  updatedAt_gte: DateTime
+  updatedById_eq: ID
+  updatedById_in: [ID!]
+  deletedAt_all: Boolean
+  deletedAt_eq: DateTime
+  deletedAt_lt: DateTime
+  deletedAt_lte: DateTime
+  deletedAt_gt: DateTime
+  deletedAt_gte: DateTime
+  deletedById_eq: ID
+  deletedById_in: [ID!]
+  nextId_eq: Float
+  nextId_gt: Float
+  nextId_gte: Float
+  nextId_lt: Float
+  nextId_lte: Float
+  nextId_in: [Float!]
+  AND: [NextEntityIdWhereInput!]
+  OR: [NextEntityIdWhereInput!]
+}
+
+input NextEntityIdWhereUniqueInput {
+  id: ID!
+}
+
 type PageInfo {
   hasNextPage: Boolean!
   hasPreviousPage: Boolean!
@@ -1379,49 +1487,49 @@ type ProcessorState {
 }
 
 type Query {
-  curatorGroups(offset: Int, limit: Int = 50, where: CuratorGroupWhereInput, orderBy: CuratorGroupOrderByInput): [CuratorGroup!]!
+  curatorGroups(offset: Int, limit: Int = 50, where: CuratorGroupWhereInput, orderBy: [CuratorGroupOrderByInput!]): [CuratorGroup!]!
   curatorGroupByUniqueInput(where: CuratorGroupWhereUniqueInput!): CuratorGroup
-  curatorGroupsConnection(first: Int, after: String, last: Int, before: String, where: CuratorGroupWhereInput, orderBy: CuratorGroupOrderByInput): CuratorGroupConnection!
-  dataObjects(offset: Int, limit: Int = 50, where: DataObjectWhereInput, orderBy: DataObjectOrderByInput): [DataObject!]!
+  curatorGroupsConnection(first: Int, after: String, last: Int, before: String, where: CuratorGroupWhereInput, orderBy: [CuratorGroupOrderByInput!]): CuratorGroupConnection!
+  dataObjects(offset: Int, limit: Int = 50, where: DataObjectWhereInput, orderBy: [DataObjectOrderByInput!]): [DataObject!]!
   dataObjectByUniqueInput(where: DataObjectWhereUniqueInput!): DataObject
-  dataObjectsConnection(first: Int, after: String, last: Int, before: String, where: DataObjectWhereInput, orderBy: DataObjectOrderByInput): DataObjectConnection!
-  featuredVideos(offset: Int, limit: Int = 50, where: FeaturedVideoWhereInput, orderBy: FeaturedVideoOrderByInput): [FeaturedVideo!]!
-  featuredVideoByUniqueInput(where: FeaturedVideoWhereUniqueInput!): FeaturedVideo
-  featuredVideosConnection(first: Int, after: String, last: Int, before: String, where: FeaturedVideoWhereInput, orderBy: FeaturedVideoOrderByInput): FeaturedVideoConnection!
-  channelCategories(offset: Int, limit: Int = 50, where: ChannelCategoryWhereInput, orderBy: ChannelCategoryOrderByInput): [ChannelCategory!]!
+  dataObjectsConnection(first: Int, after: String, last: Int, before: String, where: DataObjectWhereInput, orderBy: [DataObjectOrderByInput!]): DataObjectConnection!
+  channelCategories(offset: Int, limit: Int = 50, where: ChannelCategoryWhereInput, orderBy: [ChannelCategoryOrderByInput!]): [ChannelCategory!]!
   channelCategoryByUniqueInput(where: ChannelCategoryWhereUniqueInput!): ChannelCategory
-  channelCategoriesConnection(first: Int, after: String, last: Int, before: String, where: ChannelCategoryWhereInput, orderBy: ChannelCategoryOrderByInput): ChannelCategoryConnection!
-  channels(offset: Int, limit: Int = 50, where: ChannelWhereInput, orderBy: ChannelOrderByInput): [Channel!]!
+  channelCategoriesConnection(first: Int, after: String, last: Int, before: String, where: ChannelCategoryWhereInput, orderBy: [ChannelCategoryOrderByInput!]): ChannelCategoryConnection!
+  channels(offset: Int, limit: Int = 50, where: ChannelWhereInput, orderBy: [ChannelOrderByInput!]): [Channel!]!
   channelByUniqueInput(where: ChannelWhereUniqueInput!): Channel
-  channelsConnection(first: Int, after: String, last: Int, before: String, where: ChannelWhereInput, orderBy: ChannelOrderByInput): ChannelConnection!
-  languages(offset: Int, limit: Int = 50, where: LanguageWhereInput, orderBy: LanguageOrderByInput): [Language!]!
+  channelsConnection(first: Int, after: String, last: Int, before: String, where: ChannelWhereInput, orderBy: [ChannelOrderByInput!]): ChannelConnection!
+  languages(offset: Int, limit: Int = 50, where: LanguageWhereInput, orderBy: [LanguageOrderByInput!]): [Language!]!
   languageByUniqueInput(where: LanguageWhereUniqueInput!): Language
-  languagesConnection(first: Int, after: String, last: Int, before: String, where: LanguageWhereInput, orderBy: LanguageOrderByInput): LanguageConnection!
-  licenses(offset: Int, limit: Int = 50, where: LicenseWhereInput, orderBy: LicenseOrderByInput): [License!]!
+  languagesConnection(first: Int, after: String, last: Int, before: String, where: LanguageWhereInput, orderBy: [LanguageOrderByInput!]): LanguageConnection!
+  licenses(offset: Int, limit: Int = 50, where: LicenseWhereInput, orderBy: [LicenseOrderByInput!]): [License!]!
   licenseByUniqueInput(where: LicenseWhereUniqueInput!): License
-  licensesConnection(first: Int, after: String, last: Int, before: String, where: LicenseWhereInput, orderBy: LicenseOrderByInput): LicenseConnection!
-  memberships(offset: Int, limit: Int = 50, where: MembershipWhereInput, orderBy: MembershipOrderByInput): [Membership!]!
+  licensesConnection(first: Int, after: String, last: Int, before: String, where: LicenseWhereInput, orderBy: [LicenseOrderByInput!]): LicenseConnection!
+  memberships(offset: Int, limit: Int = 50, where: MembershipWhereInput, orderBy: [MembershipOrderByInput!]): [Membership!]!
   membershipByUniqueInput(where: MembershipWhereUniqueInput!): Membership
-  membershipsConnection(first: Int, after: String, last: Int, before: String, where: MembershipWhereInput, orderBy: MembershipOrderByInput): MembershipConnection!
+  membershipsConnection(first: Int, after: String, last: Int, before: String, where: MembershipWhereInput, orderBy: [MembershipOrderByInput!]): MembershipConnection!
+  nextEntityIds(offset: Int, limit: Int = 50, where: NextEntityIdWhereInput, orderBy: [NextEntityIdOrderByInput!]): [NextEntityId!]!
+  nextEntityIdByUniqueInput(where: NextEntityIdWhereUniqueInput!): NextEntityId
+  nextEntityIdsConnection(first: Int, after: String, last: Int, before: String, where: NextEntityIdWhereInput, orderBy: [NextEntityIdOrderByInput!]): NextEntityIdConnection!
   channelCategoriesByName(whereChannelCategory: ChannelCategoryWhereInput, skip: Int = 0, limit: Int = 5, text: String!): [ChannelCategoriesByNameFTSOutput!]!
   membersByHandle(whereMembership: MembershipWhereInput, skip: Int = 0, limit: Int = 5, text: String!): [MembersByHandleFTSOutput!]!
   search(whereVideo: VideoWhereInput, whereChannel: ChannelWhereInput, skip: Int = 0, limit: Int = 5, text: String!): [SearchFTSOutput!]!
   videoCategoriesByName(whereVideoCategory: VideoCategoryWhereInput, skip: Int = 0, limit: Int = 5, text: String!): [VideoCategoriesByNameFTSOutput!]!
-  videoCategories(offset: Int, limit: Int = 50, where: VideoCategoryWhereInput, orderBy: VideoCategoryOrderByInput): [VideoCategory!]!
+  videoCategories(offset: Int, limit: Int = 50, where: VideoCategoryWhereInput, orderBy: [VideoCategoryOrderByInput!]): [VideoCategory!]!
   videoCategoryByUniqueInput(where: VideoCategoryWhereUniqueInput!): VideoCategory
-  videoCategoriesConnection(first: Int, after: String, last: Int, before: String, where: VideoCategoryWhereInput, orderBy: VideoCategoryOrderByInput): VideoCategoryConnection!
-  videoMediaEncodings(offset: Int, limit: Int = 50, where: VideoMediaEncodingWhereInput, orderBy: VideoMediaEncodingOrderByInput): [VideoMediaEncoding!]!
+  videoCategoriesConnection(first: Int, after: String, last: Int, before: String, where: VideoCategoryWhereInput, orderBy: [VideoCategoryOrderByInput!]): VideoCategoryConnection!
+  videoMediaEncodings(offset: Int, limit: Int = 50, where: VideoMediaEncodingWhereInput, orderBy: [VideoMediaEncodingOrderByInput!]): [VideoMediaEncoding!]!
   videoMediaEncodingByUniqueInput(where: VideoMediaEncodingWhereUniqueInput!): VideoMediaEncoding
-  videoMediaEncodingsConnection(first: Int, after: String, last: Int, before: String, where: VideoMediaEncodingWhereInput, orderBy: VideoMediaEncodingOrderByInput): VideoMediaEncodingConnection!
-  videoMediaMetadata(offset: Int, limit: Int = 50, where: VideoMediaMetadataWhereInput, orderBy: VideoMediaMetadataOrderByInput): [VideoMediaMetadata!]!
+  videoMediaEncodingsConnection(first: Int, after: String, last: Int, before: String, where: VideoMediaEncodingWhereInput, orderBy: [VideoMediaEncodingOrderByInput!]): VideoMediaEncodingConnection!
+  videoMediaMetadata(offset: Int, limit: Int = 50, where: VideoMediaMetadataWhereInput, orderBy: [VideoMediaMetadataOrderByInput!]): [VideoMediaMetadata!]!
   videoMediaMetadataByUniqueInput(where: VideoMediaMetadataWhereUniqueInput!): VideoMediaMetadata
-  videoMediaMetadataConnection(first: Int, after: String, last: Int, before: String, where: VideoMediaMetadataWhereInput, orderBy: VideoMediaMetadataOrderByInput): VideoMediaMetadataConnection!
-  videos(offset: Int, limit: Int = 50, where: VideoWhereInput, orderBy: VideoOrderByInput): [Video!]!
+  videoMediaMetadataConnection(first: Int, after: String, last: Int, before: String, where: VideoMediaMetadataWhereInput, orderBy: [VideoMediaMetadataOrderByInput!]): VideoMediaMetadataConnection!
+  videos(offset: Int, limit: Int = 50, where: VideoWhereInput, orderBy: [VideoOrderByInput!]): [Video!]!
   videoByUniqueInput(where: VideoWhereUniqueInput!): Video
-  videosConnection(first: Int, after: String, last: Int, before: String, where: VideoWhereInput, orderBy: VideoOrderByInput): VideoConnection!
-  workers(offset: Int, limit: Int = 50, where: WorkerWhereInput, orderBy: WorkerOrderByInput): [Worker!]!
+  videosConnection(first: Int, after: String, last: Int, before: String, where: VideoWhereInput, orderBy: [VideoOrderByInput!]): VideoConnection!
+  workers(offset: Int, limit: Int = 50, where: WorkerWhereInput, orderBy: [WorkerOrderByInput!]): [Worker!]!
   workerByUniqueInput(where: WorkerWhereUniqueInput!): Worker
-  workersConnection(first: Int, after: String, last: Int, before: String, where: WorkerWhereInput, orderBy: WorkerOrderByInput): WorkerConnection!
+  workersConnection(first: Int, after: String, last: Int, before: String, where: WorkerWhereInput, orderBy: [WorkerOrderByInput!]): WorkerConnection!
 }
 
 type SearchFTSOutput {
@@ -1506,7 +1614,6 @@ type Video implements BaseGraphQLObject {
 
   """Is video featured or not"""
   isFeatured: Boolean!
-  featured: FeaturedVideo
 }
 
 type VideoCategoriesByNameFTSOutput {
@@ -1604,6 +1711,11 @@ input VideoCategoryWhereInput {
   createdInBlock_lt: Int
   createdInBlock_lte: Int
   createdInBlock_in: [Int!]
+  videos_none: VideoWhereInput
+  videos_some: VideoWhereInput
+  videos_every: VideoWhereInput
+  AND: [VideoCategoryWhereInput!]
+  OR: [VideoCategoryWhereInput!]
 }
 
 input VideoCategoryWhereUniqueInput {
@@ -1617,24 +1729,31 @@ type VideoConnection {
 }
 
 input VideoCreateInput {
+  channel: ID
   channelId: ID
+  category: ID
   categoryId: ID
   title: String
   description: String
   duration: Float
+  thumbnailPhotoDataObject: ID
   thumbnailPhotoDataObjectId: ID
   thumbnailPhotoUrls: [String!]!
   thumbnailPhotoAvailability: AssetAvailability!
+  language: ID
   languageId: ID
   hasMarketing: Boolean
   publishedBeforeJoystream: DateTime
   isPublic: Boolean
   isCensored: Boolean!
   isExplicit: Boolean
+  license: ID
   licenseId: ID
+  mediaDataObject: ID
   mediaDataObjectId: ID
   mediaUrls: [String!]!
   mediaAvailability: AssetAvailability!
+  mediaMetadata: ID
   mediaMetadataId: ID
   createdInBlock: Float!
   isFeatured: Boolean!
@@ -1744,6 +1863,11 @@ input VideoMediaEncodingWhereInput {
   mimeMediaType_startsWith: String
   mimeMediaType_endsWith: String
   mimeMediaType_in: [String!]
+  videomediametadataencoding_none: VideoMediaMetadataWhereInput
+  videomediametadataencoding_some: VideoMediaMetadataWhereInput
+  videomediametadataencoding_every: VideoMediaMetadataWhereInput
+  AND: [VideoMediaEncodingWhereInput!]
+  OR: [VideoMediaEncodingWhereInput!]
 }
 
 input VideoMediaEncodingWhereUniqueInput {
@@ -1781,6 +1905,7 @@ type VideoMediaMetadataConnection {
 }
 
 input VideoMediaMetadataCreateInput {
+  encoding: ID
   encodingId: ID
   pixelWidth: Float
   pixelHeight: Float
@@ -1800,6 +1925,8 @@ enum VideoMediaMetadataOrderByInput {
   updatedAt_DESC
   deletedAt_ASC
   deletedAt_DESC
+  encoding_ASC
+  encoding_DESC
   encodingId_ASC
   encodingId_DESC
   pixelWidth_ASC
@@ -1813,6 +1940,7 @@ enum VideoMediaMetadataOrderByInput {
 }
 
 input VideoMediaMetadataUpdateInput {
+  encoding: ID
   encodingId: ID
   pixelWidth: Float
   pixelHeight: Float
@@ -1871,6 +1999,10 @@ input VideoMediaMetadataWhereInput {
   createdInBlock_lt: Int
   createdInBlock_lte: Int
   createdInBlock_in: [Int!]
+  encoding: VideoMediaEncodingWhereInput
+  video: VideoWhereInput
+  AND: [VideoMediaMetadataWhereInput!]
+  OR: [VideoMediaMetadataWhereInput!]
 }
 
 input VideoMediaMetadataWhereUniqueInput {
@@ -1884,8 +2016,12 @@ enum VideoOrderByInput {
   updatedAt_DESC
   deletedAt_ASC
   deletedAt_DESC
+  channel_ASC
+  channel_DESC
   channelId_ASC
   channelId_DESC
+  category_ASC
+  category_DESC
   categoryId_ASC
   categoryId_DESC
   title_ASC
@@ -1894,10 +2030,14 @@ enum VideoOrderByInput {
   description_DESC
   duration_ASC
   duration_DESC
+  thumbnailPhotoDataObject_ASC
+  thumbnailPhotoDataObject_DESC
   thumbnailPhotoDataObjectId_ASC
   thumbnailPhotoDataObjectId_DESC
   thumbnailPhotoAvailability_ASC
   thumbnailPhotoAvailability_DESC
+  language_ASC
+  language_DESC
   languageId_ASC
   languageId_DESC
   hasMarketing_ASC
@@ -1910,12 +2050,18 @@ enum VideoOrderByInput {
   isCensored_DESC
   isExplicit_ASC
   isExplicit_DESC
+  license_ASC
+  license_DESC
   licenseId_ASC
   licenseId_DESC
+  mediaDataObject_ASC
+  mediaDataObject_DESC
   mediaDataObjectId_ASC
   mediaDataObjectId_DESC
   mediaAvailability_ASC
   mediaAvailability_DESC
+  mediaMetadata_ASC
+  mediaMetadata_DESC
   mediaMetadataId_ASC
   mediaMetadataId_DESC
   createdInBlock_ASC
@@ -1925,24 +2071,31 @@ enum VideoOrderByInput {
 }
 
 input VideoUpdateInput {
+  channel: ID
   channelId: ID
+  category: ID
   categoryId: ID
   title: String
   description: String
   duration: Float
+  thumbnailPhotoDataObject: ID
   thumbnailPhotoDataObjectId: ID
   thumbnailPhotoUrls: [String!]
   thumbnailPhotoAvailability: AssetAvailability
+  language: ID
   languageId: ID
   hasMarketing: Boolean
   publishedBeforeJoystream: DateTime
   isPublic: Boolean
   isCensored: Boolean
   isExplicit: Boolean
+  license: ID
   licenseId: ID
+  mediaDataObject: ID
   mediaDataObjectId: ID
   mediaUrls: [String!]
   mediaAvailability: AssetAvailability
+  mediaMetadata: ID
   mediaMetadataId: ID
   createdInBlock: Float
   isFeatured: Boolean
@@ -1995,6 +2148,9 @@ input VideoWhereInput {
   duration_in: [Int!]
   thumbnailPhotoDataObjectId_eq: ID
   thumbnailPhotoDataObjectId_in: [ID!]
+  thumbnailPhotoUrls_containsAll: [String!]
+  thumbnailPhotoUrls_containsNone: [String!]
+  thumbnailPhotoUrls_containsAny: [String!]
   thumbnailPhotoAvailability_eq: AssetAvailability
   thumbnailPhotoAvailability_in: [AssetAvailability!]
   languageId_eq: ID
@@ -2016,6 +2172,9 @@ input VideoWhereInput {
   licenseId_in: [ID!]
   mediaDataObjectId_eq: ID
   mediaDataObjectId_in: [ID!]
+  mediaUrls_containsAll: [String!]
+  mediaUrls_containsNone: [String!]
+  mediaUrls_containsAny: [String!]
   mediaAvailability_eq: AssetAvailability
   mediaAvailability_in: [AssetAvailability!]
   mediaMetadataId_eq: ID
@@ -2028,6 +2187,15 @@ input VideoWhereInput {
   createdInBlock_in: [Int!]
   isFeatured_eq: Boolean
   isFeatured_in: [Boolean!]
+  channel: ChannelWhereInput
+  category: VideoCategoryWhereInput
+  thumbnailPhotoDataObject: DataObjectWhereInput
+  language: LanguageWhereInput
+  license: LicenseWhereInput
+  mediaDataObject: DataObjectWhereInput
+  mediaMetadata: VideoMediaMetadataWhereInput
+  AND: [VideoWhereInput!]
+  OR: [VideoWhereInput!]
 }
 
 input VideoWhereUniqueInput {
@@ -2144,6 +2312,11 @@ input WorkerWhereInput {
   metadata_startsWith: String
   metadata_endsWith: String
   metadata_in: [String!]
+  dataObjects_none: DataObjectWhereInput
+  dataObjects_some: DataObjectWhereInput
+  dataObjects_every: DataObjectWhereInput
+  AND: [WorkerWhereInput!]
+  OR: [WorkerWhereInput!]
 }
 
 input WorkerWhereUniqueInput {

+ 2 - 0
query-node/generated/graphql-server/model/index.ts

@@ -12,6 +12,8 @@ import { License } from '../src/modules/license/license.model';
 export { License };
 import { Membership } from '../src/modules/membership/membership.model';
 export { Membership };
+import { NextEntityId } from '../src/modules/next-entity-id/next-entity-id.model';
+export { NextEntityId };
 import { Video } from '../src/modules/video/video.model';
 export { Video };
 import { VideoCategory } from '../src/modules/video-category/video-category.model';

+ 2 - 2
query-node/generated/graphql-server/package.json

@@ -59,11 +59,11 @@
       "**/generated/*"
     ]
   },
-  "hydra": "https://github.com/metmirr/warthog/releases/download/v2.23.0/warthog-v2.23.0.tgz",
+  "hydra": "https://github.com/Joystream/warthog/releases/download/v2.37.0/joystream-warthog-v2.37.0.tgz",
   "dependencies": {
     "dotenv": "^8.2.0",
     "reflect-metadata": "^0.1.13",
-    "warthog": "https://github.com/metmirr/warthog/releases/download/v2.23.0/warthog-v2.23.0.tgz",
+    "warthog": "https://github.com/Joystream/warthog/releases/download/v2.37.0/joystream-warthog-v2.37.0.tgz",
     "@types/bn.js": "^4.11.6",
     "bn.js": "^5.1.3",
     "lodash": "^4.17.15",

+ 9 - 1
query-node/generated/graphql-server/src/index.ts

@@ -8,6 +8,7 @@ import { Logger } from '../src/logger';
 
 import { buildServerSchema, getServer } from './server';
 import { startPgSubsribers } from './pubsub';
+import { queryTemplates } from './queryTemplates'
 
 
 class CustomNamingStrategy extends SnakeNamingStrategy {
@@ -22,7 +23,14 @@ class CustomNamingStrategy extends SnakeNamingStrategy {
 async function bootstrap() {
   await loadConfig();
 
-  const server = getServer({}, { namingStrategy: new CustomNamingStrategy() });
+  const appOptions = {
+    playgroundConfig: {
+      queryTemplates,
+      cdnUrl: process.env.GRAPHQL_PLAYGROUND_CDN || '',
+    }
+  }
+
+  const server = getServer(appOptions, { namingStrategy: new CustomNamingStrategy() });
 
   // Create database tables. Warthog migrate command does not support CustomNamingStrategy thats why
   // we have this code

+ 5 - 1
query-node/generated/graphql-server/src/modules/channel-category/channel-category.model.ts

@@ -10,7 +10,11 @@ export class ChannelCategory extends BaseModel {
   })
   name?: string;
 
-  @OneToMany(() => Channel, (param: Channel) => param.category)
+  @OneToMany(() => Channel, (param: Channel) => param.category, {
+    modelName: 'ChannelCategory',
+    relModelName: 'Channel',
+    propertyName: 'channels',
+  })
   channels?: Channel[];
 
   @IntField({})

+ 1 - 1
query-node/generated/graphql-server/src/modules/channel-category/channel-category.resolver.ts

@@ -77,7 +77,7 @@ export class ChannelCategoryConnectionWhereArgs extends ConnectionPageInputOptio
   where?: ChannelCategoryWhereInput;
 
   @Field(() => ChannelCategoryOrderByEnum, { nullable: true })
-  orderBy?: ChannelCategoryOrderByEnum;
+  orderBy?: [ChannelCategoryOrderByEnum];
 }
 
 @Resolver(ChannelCategory)

+ 1 - 1
query-node/generated/graphql-server/src/modules/channel-category/channel-category.service.ts

@@ -13,7 +13,7 @@ export class ChannelCategoryService extends BaseService<ChannelCategory> {
 
   async find<W extends WhereInput>(
     where?: any,
-    orderBy?: string,
+    orderBy?: string | string[],
     limit?: number,
     offset?: number,
     fields?: string[]

+ 39 - 4
query-node/generated/graphql-server/src/modules/channel/channel.model.ts

@@ -22,16 +22,33 @@ export { AssetAvailability };
 
 @Model({ api: {} })
 export class Channel extends BaseModel {
-  @ManyToOne(() => Membership, (param: Membership) => param.channels, { skipGraphQLField: true, nullable: true, cascade: ["insert", "update"] })
+  @ManyToOne(() => Membership, (param: Membership) => param.channels, {
+    skipGraphQLField: true,
+    nullable: true,
+    cascade: ["insert", "update"],
+    modelName: 'Channel',
+    relModelName: 'Membership',
+    propertyName: 'ownerMember',
+  })
   ownerMember?: Membership;
 
-  @ManyToOne(() => CuratorGroup, (param: CuratorGroup) => param.channels, { skipGraphQLField: true, nullable: true, cascade: ["insert", "update"]})
+  @ManyToOne(() => CuratorGroup, (param: CuratorGroup) => param.channels, {
+    skipGraphQLField: true,
+    nullable: true,
+    cascade: ["insert", "update"],
+    modelName: 'Channel',
+    relModelName: 'CuratorGroup',
+    propertyName: 'ownerCuratorGroup',
+  })
   ownerCuratorGroup?: CuratorGroup;
 
   @ManyToOne(() => ChannelCategory, (param: ChannelCategory) => param.channels, {
     skipGraphQLField: true,
     nullable: true,
     cascade: ["insert", "update"],
+    modelName: 'Channel',
+    relModelName: 'ChannelCategory',
+    propertyName: 'category',
   })
   category?: ChannelCategory;
 
@@ -57,6 +74,9 @@ export class Channel extends BaseModel {
     skipGraphQLField: true,
     nullable: true,
     cascade: ["insert", "update"],
+    modelName: 'Channel',
+    relModelName: 'DataObject',
+    propertyName: 'coverPhotoDataObject',
   })
   coverPhotoDataObject?: DataObject;
 
@@ -75,6 +95,9 @@ export class Channel extends BaseModel {
     skipGraphQLField: true,
     nullable: true,
     cascade: ["insert", "update"],
+    modelName: 'Channel',
+    relModelName: 'DataObject',
+    propertyName: 'avatarPhotoDataObject',
   })
   avatarPhotoDataObject?: DataObject;
 
@@ -100,10 +123,22 @@ export class Channel extends BaseModel {
   })
   isCensored!: boolean;
 
-  @ManyToOne(() => Language, (param: Language) => param.channellanguage, { skipGraphQLField: true, nullable: true, cascade: ["insert", "update"] })
+  @ManyToOne(() => Language, (param: Language) => param.channellanguage, {
+    skipGraphQLField: true,
+    nullable: true,
+    cascade: ["insert", "update"],
+    modelName: 'Channel',
+    relModelName: 'Language',
+    propertyName: 'language',
+  })
   language?: Language;
 
-  @OneToMany(() => Video, (param: Video) => param.channel, { cascade: ["insert", "update"] })
+  @OneToMany(() => Video, (param: Video) => param.channel, {
+    cascade: ["insert", "update"],
+    modelName: 'Channel',
+    relModelName: 'Video',
+    propertyName: 'videos',
+  })
   videos?: Video[];
 
   @IntField({})

+ 1 - 1
query-node/generated/graphql-server/src/modules/channel/channel.resolver.ts

@@ -82,7 +82,7 @@ export class ChannelConnectionWhereArgs extends ConnectionPageInputOptions {
   where?: ChannelWhereInput;
 
   @Field(() => ChannelOrderByEnum, { nullable: true })
-  orderBy?: ChannelOrderByEnum;
+  orderBy?: [ChannelOrderByEnum];
 }
 
 @Resolver(Channel)

+ 1 - 1
query-node/generated/graphql-server/src/modules/channel/channel.service.ts

@@ -13,7 +13,7 @@ export class ChannelService extends BaseService<Channel> {
 
   async find<W extends WhereInput>(
     where?: any,
-    orderBy?: string,
+    orderBy?: string | string[],
     limit?: number,
     offset?: number,
     fields?: string[]

+ 6 - 1
query-node/generated/graphql-server/src/modules/curator-group/curator-group.model.ts

@@ -15,7 +15,12 @@ export class CuratorGroup extends BaseModel {
   })
   isActive!: boolean;
 
-  @OneToMany(() => Channel, (param: Channel) => param.ownerCuratorGroup, { cascade: ["insert", "update"] })
+  @OneToMany(() => Channel, (param: Channel) => param.ownerCuratorGroup, { 
+    cascade: ["insert", "update"],
+    modelName: 'CuratorGroup',
+    relModelName: 'Channel',
+    propertyName: 'channels',
+  })
   channels?: Channel[];
 
   constructor(init?: Partial<CuratorGroup>) {

+ 1 - 1
query-node/generated/graphql-server/src/modules/curator-group/curator-group.resolver.ts

@@ -77,7 +77,7 @@ export class CuratorGroupConnectionWhereArgs extends ConnectionPageInputOptions
   where?: CuratorGroupWhereInput;
 
   @Field(() => CuratorGroupOrderByEnum, { nullable: true })
-  orderBy?: CuratorGroupOrderByEnum;
+  orderBy?: [CuratorGroupOrderByEnum];
 }
 
 @Resolver(CuratorGroup)

+ 1 - 1
query-node/generated/graphql-server/src/modules/curator-group/curator-group.service.ts

@@ -13,7 +13,7 @@ export class CuratorGroupService extends BaseService<CuratorGroup> {
 
   async find<W extends WhereInput>(
     where?: any,
-    orderBy?: string,
+    orderBy?: string | string[],
     limit?: number,
     offset?: number,
     fields?: string[]

+ 31 - 4
query-node/generated/graphql-server/src/modules/data-object/data-object.model.ts

@@ -44,6 +44,9 @@ export class DataObject extends BaseModel {
   @ManyToOne(() => Worker, (param: Worker) => param.dataObjects, {
     skipGraphQLField: true,
     nullable: true,
+    modelName: 'DataObject',
+    relModelName: 'Worker',
+    propertyName: 'liaison',
   })
   liaison?: Worker;
 
@@ -62,16 +65,40 @@ export class DataObject extends BaseModel {
   })
   joystreamContentId!: string;
 
-  @OneToMany(() => Channel, (param: Channel) => param.coverPhotoDataObject, { nullable: true, cascade: ["insert", "update"] })
+  @OneToMany(() => Channel, (param: Channel) => param.coverPhotoDataObject, {
+    nullable: true,
+    cascade: ["insert", "update"],
+    modelName: 'DataObject',
+    relModelName: 'Channel',
+    propertyName: 'channelcoverPhotoDataObject',
+  })
   channelcoverPhotoDataObject?: Channel[];
 
-  @OneToMany(() => Channel, (param: Channel) => param.avatarPhotoDataObject, { nullable: true, cascade: ["insert", "update"] })
+  @OneToMany(() => Channel, (param: Channel) => param.avatarPhotoDataObject, {
+    nullable: true,
+    cascade: ["insert", "update"],
+    modelName: 'DataObject',
+    relModelName: 'Channel',
+    propertyName: 'channelavatarPhotoDataObject',
+  })
   channelavatarPhotoDataObject?: Channel[];
 
-  @OneToMany(() => Video, (param: Video) => param.thumbnailPhotoDataObject, { nullable: true, cascade: ["insert", "update"] })
+  @OneToMany(() => Video, (param: Video) => param.thumbnailPhotoDataObject, {
+    nullable: true,
+    cascade: ["insert", "update"],
+    modelName: 'DataObject',
+    relModelName: 'VideoMediaMetadata',
+    propertyName: 'videothumbnailPhotoDataObject',
+  })
   videothumbnailPhotoDataObject?: Video[];
 
-  @OneToMany(() => Video, (param: Video) => param.mediaDataObject, { nullable: true, cascade: ["insert", "update"] })
+  @OneToMany(() => Video, (param: Video) => param.mediaDataObject, {
+    nullable: true,
+    cascade: ["insert", "update"],
+    modelName: 'DataObject',
+    relModelName: 'VideoMediaMetadata',
+    propertyName: 'videomediaDataObject',
+  })
   videomediaDataObject?: Video[];
 
   constructor(init?: Partial<DataObject>) {

+ 1 - 1
query-node/generated/graphql-server/src/modules/data-object/data-object.resolver.ts

@@ -79,7 +79,7 @@ export class DataObjectConnectionWhereArgs extends ConnectionPageInputOptions {
   where?: DataObjectWhereInput;
 
   @Field(() => DataObjectOrderByEnum, { nullable: true })
-  orderBy?: DataObjectOrderByEnum;
+  orderBy?: [DataObjectOrderByEnum];
 }
 
 @Resolver(DataObject)

+ 1 - 1
query-node/generated/graphql-server/src/modules/data-object/data-object.service.ts

@@ -13,7 +13,7 @@ export class DataObjectService extends BaseService<DataObject> {
 
   async find<W extends WhereInput>(
     where?: any,
-    orderBy?: string,
+    orderBy?: string | string[],
     limit?: number,
     offset?: number,
     fields?: string[]

+ 12 - 2
query-node/generated/graphql-server/src/modules/language/language.model.ts

@@ -13,10 +13,20 @@ export class Language extends BaseModel {
   @IntField({})
   createdInBlock!: number;
 
-  @OneToMany(() => Channel, (param: Channel) => param.language, { nullable: true, cascade: ["insert", "update"] })
+  @OneToMany(() => Channel, (param: Channel) => param.language, {
+    nullable: true, cascade: ["insert", "update"],
+    modelName: 'Language',
+    relModelName: 'Channel',
+    propertyName: 'channellanguage',
+  })
   channellanguage?: Channel[];
 
-  @OneToMany(() => Video, (param: Video) => param.language, { nullable: true, cascade: ["insert", "update"] })
+  @OneToMany(() => Video, (param: Video) => param.language, {
+    nullable: true, cascade: ["insert", "update"],
+    modelName: 'Language',
+    relModelName: 'Video',
+    propertyName: 'videolanguage',
+  })
   videolanguage?: Video[];
 
   constructor(init?: Partial<Language>) {

+ 1 - 1
query-node/generated/graphql-server/src/modules/language/language.resolver.ts

@@ -78,7 +78,7 @@ export class LanguageConnectionWhereArgs extends ConnectionPageInputOptions {
   where?: LanguageWhereInput;
 
   @Field(() => LanguageOrderByEnum, { nullable: true })
-  orderBy?: LanguageOrderByEnum;
+  orderBy?: [LanguageOrderByEnum];
 }
 
 @Resolver(Language)

+ 1 - 1
query-node/generated/graphql-server/src/modules/language/language.service.ts

@@ -13,7 +13,7 @@ export class LanguageService extends BaseService<Language> {
 
   async find<W extends WhereInput>(
     where?: any,
-    orderBy?: string,
+    orderBy?: string | string[],
     limit?: number,
     offset?: number,
     fields?: string[]

+ 7 - 1
query-node/generated/graphql-server/src/modules/license/license.model.ts

@@ -22,7 +22,13 @@ export class License extends BaseModel {
   })
   customText?: string;
 
-  @OneToMany(() => Video, (param: Video) => param.license, { nullable: true, cascade: ["insert", "update"] })
+  @OneToMany(() => Video, (param: Video) => param.license, { 
+    nullable: true,
+    cascade: ["insert", "update"],
+    modelName: 'License',
+    relModelName: 'Video',
+    propertyName: 'videolanguage',
+  })
   videolicense?: Video[];
 
   constructor(init?: Partial<License>) {

+ 1 - 1
query-node/generated/graphql-server/src/modules/license/license.resolver.ts

@@ -77,7 +77,7 @@ export class LicenseConnectionWhereArgs extends ConnectionPageInputOptions {
   where?: LicenseWhereInput;
 
   @Field(() => LicenseOrderByEnum, { nullable: true })
-  orderBy?: LicenseOrderByEnum;
+  orderBy?: [LicenseOrderByEnum];
 }
 
 @Resolver(License)

+ 1 - 1
query-node/generated/graphql-server/src/modules/license/license.service.ts

@@ -13,7 +13,7 @@ export class LicenseService extends BaseService<License> {
 
   async find<W extends WhereInput>(
     where?: any,
-    orderBy?: string,
+    orderBy?: string | string[],
     limit?: number,
     offset?: number,
     fields?: string[]

+ 6 - 1
query-node/generated/graphql-server/src/modules/membership/membership.model.ts

@@ -51,7 +51,12 @@ export class Membership extends BaseModel {
   })
   subscription?: number;
 
-  @OneToMany(() => Channel, (param: Channel) => param.ownerMember, { cascade: ["insert", "update"] })
+  @OneToMany(() => Channel, (param: Channel) => param.ownerMember, {
+    cascade: ["insert", "update"],
+    modelName: 'Membership',
+    relModelName: 'Channel',
+    propertyName: 'channels',
+  })
   channels?: Channel[];
 
   constructor(init?: Partial<Membership>) {

+ 1 - 1
query-node/generated/graphql-server/src/modules/membership/membership.resolver.ts

@@ -77,7 +77,7 @@ export class MembershipConnectionWhereArgs extends ConnectionPageInputOptions {
   where?: MembershipWhereInput;
 
   @Field(() => MembershipOrderByEnum, { nullable: true })
-  orderBy?: MembershipOrderByEnum;
+  orderBy?: [MembershipOrderByEnum];
 }
 
 @Resolver(Membership)

+ 1 - 1
query-node/generated/graphql-server/src/modules/membership/membership.service.ts

@@ -13,7 +13,7 @@ export class MembershipService extends BaseService<Membership> {
 
   async find<W extends WhereInput>(
     where?: any,
-    orderBy?: string,
+    orderBy?: string | string[],
     limit?: number,
     offset?: number,
     fields?: string[]

+ 14 - 0
query-node/generated/graphql-server/src/modules/next-entity-id/next-entity-id.model.ts

@@ -0,0 +1,14 @@
+import { BaseModel, FloatField, Model, StringField } from 'warthog';
+
+@Model({ api: {} })
+export class NextEntityId extends BaseModel {
+  @FloatField({
+    description: `Next deterministic id for entities without custom id`,
+  })
+  nextId!: number;
+
+  constructor(init?: Partial<NextEntityId>) {
+    super();
+    Object.assign(this, init);
+  }
+}

+ 128 - 0
query-node/generated/graphql-server/src/modules/next-entity-id/next-entity-id.resolver.ts

@@ -0,0 +1,128 @@
+import {
+  Arg,
+  Args,
+  Mutation,
+  Query,
+  Root,
+  Resolver,
+  FieldResolver,
+  ObjectType,
+  Field,
+  Int,
+  ArgsType,
+  Info,
+} from 'type-graphql';
+import graphqlFields from 'graphql-fields';
+import { Inject } from 'typedi';
+import { Min } from 'class-validator';
+import { Fields, StandardDeleteResponse, UserId, PageInfo, RawFields } from 'warthog';
+
+import {
+  NextEntityIdCreateInput,
+  NextEntityIdCreateManyArgs,
+  NextEntityIdUpdateArgs,
+  NextEntityIdWhereArgs,
+  NextEntityIdWhereInput,
+  NextEntityIdWhereUniqueInput,
+  NextEntityIdOrderByEnum,
+} from '../../../generated';
+
+import { NextEntityId } from './next-entity-id.model';
+import { NextEntityIdService } from './next-entity-id.service';
+
+@ObjectType()
+export class NextEntityIdEdge {
+  @Field(() => NextEntityId, { nullable: false })
+  node!: NextEntityId;
+
+  @Field(() => String, { nullable: false })
+  cursor!: string;
+}
+
+@ObjectType()
+export class NextEntityIdConnection {
+  @Field(() => Int, { nullable: false })
+  totalCount!: number;
+
+  @Field(() => [NextEntityIdEdge], { nullable: false })
+  edges!: NextEntityIdEdge[];
+
+  @Field(() => PageInfo, { nullable: false })
+  pageInfo!: PageInfo;
+}
+
+@ArgsType()
+export class ConnectionPageInputOptions {
+  @Field(() => Int, { nullable: true })
+  @Min(0)
+  first?: number;
+
+  @Field(() => String, { nullable: true })
+  after?: string; // V3: TODO: should we make a RelayCursor scalar?
+
+  @Field(() => Int, { nullable: true })
+  @Min(0)
+  last?: number;
+
+  @Field(() => String, { nullable: true })
+  before?: string;
+}
+
+@ArgsType()
+export class NextEntityIdConnectionWhereArgs extends ConnectionPageInputOptions {
+  @Field(() => NextEntityIdWhereInput, { nullable: true })
+  where?: NextEntityIdWhereInput;
+
+  @Field(() => NextEntityIdOrderByEnum, { nullable: true })
+  orderBy?: [NextEntityIdOrderByEnum];
+}
+
+@Resolver(NextEntityId)
+export class NextEntityIdResolver {
+  constructor(@Inject('NextEntityIdService') public readonly service: NextEntityIdService) {}
+
+  @Query(() => [NextEntityId])
+  async nextEntityIds(
+    @Args() { where, orderBy, limit, offset }: NextEntityIdWhereArgs,
+    @Fields() fields: string[]
+  ): Promise<NextEntityId[]> {
+    return this.service.find<NextEntityIdWhereInput>(where, orderBy, limit, offset, fields);
+  }
+
+  @Query(() => NextEntityId, { nullable: true })
+  async nextEntityIdByUniqueInput(
+    @Arg('where') where: NextEntityIdWhereUniqueInput,
+    @Fields() fields: string[]
+  ): Promise<NextEntityId | null> {
+    const result = await this.service.find(where, undefined, 1, 0, fields);
+    return result && result.length >= 1 ? result[0] : null;
+  }
+
+  @Query(() => NextEntityIdConnection)
+  async nextEntityIdsConnection(
+    @Args() { where, orderBy, ...pageOptions }: NextEntityIdConnectionWhereArgs,
+    @Info() info: any
+  ): Promise<NextEntityIdConnection> {
+    const rawFields = graphqlFields(info, {}, { excludedFields: ['__typename'] });
+
+    let result: any = {
+      totalCount: 0,
+      edges: [],
+      pageInfo: {
+        hasNextPage: false,
+        hasPreviousPage: false,
+      },
+    };
+    // If the related database table does not have any records then an error is thrown to the client
+    // by warthog
+    try {
+      result = await this.service.findConnection<NextEntityIdWhereInput>(where, orderBy, pageOptions, rawFields);
+    } catch (err) {
+      console.log(err);
+      // TODO: should continue to return this on `Error: Items is empty` or throw the error
+      if (!(err.message as string).includes('Items is empty')) throw err;
+    }
+
+    return result as Promise<NextEntityIdConnection>;
+  }
+}

+ 28 - 0
query-node/generated/graphql-server/src/modules/next-entity-id/next-entity-id.service.ts

@@ -0,0 +1,28 @@
+import { Service } from 'typedi';
+import { Repository } from 'typeorm';
+import { InjectRepository } from 'typeorm-typedi-extensions';
+import { BaseService, WhereInput } from 'warthog';
+
+import { NextEntityId } from './next-entity-id.model';
+
+@Service('NextEntityIdService')
+export class NextEntityIdService extends BaseService<NextEntityId> {
+  constructor(@InjectRepository(NextEntityId) protected readonly repository: Repository<NextEntityId>) {
+    super(NextEntityId, repository);
+  }
+
+  async find<W extends WhereInput>(
+    where?: any,
+    orderBy?: string | string[],
+    limit?: number,
+    offset?: number,
+    fields?: string[]
+  ): Promise<NextEntityId[]> {
+    let f = fields;
+    if (f == undefined) {
+      f = [];
+    }
+
+    return super.find<W>(where, orderBy, limit, offset, f);
+  }
+}

+ 6 - 1
query-node/generated/graphql-server/src/modules/video-category/video-category.model.ts

@@ -10,7 +10,12 @@ export class VideoCategory extends BaseModel {
   })
   name?: string;
 
-  @OneToMany(() => Video, (param: Video) => param.category, { cascade: ["insert", "update"] })
+  @OneToMany(() => Video, (param: Video) => param.category, {
+    cascade: ["insert", "update"],
+    modelName: 'VideoCategory',
+    relModelName: 'Video',
+    propertyName: 'videos',
+  })
   videos?: Video[];
 
   @IntField({})

+ 1 - 1
query-node/generated/graphql-server/src/modules/video-category/video-category.resolver.ts

@@ -77,7 +77,7 @@ export class VideoCategoryConnectionWhereArgs extends ConnectionPageInputOptions
   where?: VideoCategoryWhereInput;
 
   @Field(() => VideoCategoryOrderByEnum, { nullable: true })
-  orderBy?: VideoCategoryOrderByEnum;
+  orderBy?: [VideoCategoryOrderByEnum];
 }
 
 @Resolver(VideoCategory)

+ 1 - 1
query-node/generated/graphql-server/src/modules/video-category/video-category.service.ts

@@ -13,7 +13,7 @@ export class VideoCategoryService extends BaseService<VideoCategory> {
 
   async find<W extends WhereInput>(
     where?: any,
-    orderBy?: string,
+    orderBy?: string | string[],
     limit?: number,
     offset?: number,
     fields?: string[]

+ 7 - 1
query-node/generated/graphql-server/src/modules/video-media-encoding/video-media-encoding.model.ts

@@ -22,7 +22,13 @@ export class VideoMediaEncoding extends BaseModel {
   })
   mimeMediaType?: string;
 
-  @OneToMany(() => VideoMediaMetadata, (param: VideoMediaMetadata) => param.encoding, { nullable: true, cascade: ["insert", "update"] })
+  @OneToMany(() => VideoMediaMetadata, (param: VideoMediaMetadata) => param.encoding, {
+    nullable: true,
+    cascade: ["insert", "update"],
+    modelName: 'VideoMediaEncoding',
+    relModelName: 'VideoMediaMetadata',
+    propertyName: 'videomediametadataencoding',
+  })
   videomediametadataencoding?: VideoMediaMetadata[];
 
   constructor(init?: Partial<VideoMediaEncoding>) {

+ 1 - 1
query-node/generated/graphql-server/src/modules/video-media-encoding/video-media-encoding.resolver.ts

@@ -77,7 +77,7 @@ export class VideoMediaEncodingConnectionWhereArgs extends ConnectionPageInputOp
   where?: VideoMediaEncodingWhereInput;
 
   @Field(() => VideoMediaEncodingOrderByEnum, { nullable: true })
-  orderBy?: VideoMediaEncodingOrderByEnum;
+  orderBy?: [VideoMediaEncodingOrderByEnum];
 }
 
 @Resolver(VideoMediaEncoding)

Some files were not shown because too many files changed in this diff