Browse Source

Merge branch 'master' into runtime/increase-max-wg-mint-capacity

Mokhtar Naamani 3 years ago
parent
commit
07f5918ddd
100 changed files with 4494 additions and 1410 deletions
  1. 1 1
      .github/workflows/create-ami.yml
  2. 152 0
      .github/workflows/create-release.yml
  3. 154 47
      .github/workflows/joystream-node-docker.yml
  4. 43 0
      .github/workflows/query-node.yml
  5. 1 1
      apps.Dockerfile
  6. 50 0
      devops/infrastructure/build-arm64-playbook.yml
  7. 6 0
      devops/infrastructure/node-network/.gitignore
  8. 21 0
      devops/infrastructure/node-network/Pulumi.yaml
  9. 113 0
      devops/infrastructure/node-network/README.md
  10. 135 0
      devops/infrastructure/node-network/caddy.ts
  11. 29 0
      devops/infrastructure/node-network/configMap.ts
  12. 312 0
      devops/infrastructure/node-network/index.ts
  13. 44 0
      devops/infrastructure/node-network/json_modify.py
  14. 161 0
      devops/infrastructure/node-network/nfsVolume.ts
  15. 14 0
      devops/infrastructure/node-network/package.json
  16. 18 0
      devops/infrastructure/node-network/tsconfig.json
  17. 18 0
      devops/infrastructure/node-network/utils.ts
  18. 94 0
      devops/infrastructure/node-network/validator.ts
  19. 6 0
      devops/infrastructure/query-node/.gitignore
  20. 19 0
      devops/infrastructure/query-node/Pulumi.yaml
  21. 117 0
      devops/infrastructure/query-node/README.md
  22. 137 0
      devops/infrastructure/query-node/caddy.ts
  23. 29 0
      devops/infrastructure/query-node/configMap.ts
  24. 452 0
      devops/infrastructure/query-node/index.ts
  25. 17 0
      devops/infrastructure/query-node/package.json
  26. 73 0
      devops/infrastructure/query-node/s3Helpers.ts
  27. 18 0
      devops/infrastructure/query-node/tsconfig.json
  28. 4 3
      devops/infrastructure/requirements.yml
  29. 115 0
      devops/infrastructure/single-instance-docker.yml
  30. 50 0
      joystream-node-armv7.Dockerfile
  31. 1 0
      package.json
  32. 1 1
      pioneer/packages/joy-proposals/src/Proposal/VotingSection.tsx
  33. 6 17
      query-node/README.md
  34. 273 102
      query-node/generated/graphql-server/generated/binding.ts
  35. 304 392
      query-node/generated/graphql-server/generated/classes.ts
  36. 277 104
      query-node/generated/graphql-server/generated/schema.graphql
  37. 2 0
      query-node/generated/graphql-server/model/index.ts
  38. 2 2
      query-node/generated/graphql-server/package.json
  39. 9 1
      query-node/generated/graphql-server/src/index.ts
  40. 5 1
      query-node/generated/graphql-server/src/modules/channel-category/channel-category.model.ts
  41. 1 1
      query-node/generated/graphql-server/src/modules/channel-category/channel-category.resolver.ts
  42. 1 1
      query-node/generated/graphql-server/src/modules/channel-category/channel-category.service.ts
  43. 39 4
      query-node/generated/graphql-server/src/modules/channel/channel.model.ts
  44. 1 1
      query-node/generated/graphql-server/src/modules/channel/channel.resolver.ts
  45. 1 1
      query-node/generated/graphql-server/src/modules/channel/channel.service.ts
  46. 6 1
      query-node/generated/graphql-server/src/modules/curator-group/curator-group.model.ts
  47. 1 1
      query-node/generated/graphql-server/src/modules/curator-group/curator-group.resolver.ts
  48. 1 1
      query-node/generated/graphql-server/src/modules/curator-group/curator-group.service.ts
  49. 31 4
      query-node/generated/graphql-server/src/modules/data-object/data-object.model.ts
  50. 1 1
      query-node/generated/graphql-server/src/modules/data-object/data-object.resolver.ts
  51. 1 1
      query-node/generated/graphql-server/src/modules/data-object/data-object.service.ts
  52. 12 2
      query-node/generated/graphql-server/src/modules/language/language.model.ts
  53. 1 1
      query-node/generated/graphql-server/src/modules/language/language.resolver.ts
  54. 1 1
      query-node/generated/graphql-server/src/modules/language/language.service.ts
  55. 7 1
      query-node/generated/graphql-server/src/modules/license/license.model.ts
  56. 1 1
      query-node/generated/graphql-server/src/modules/license/license.resolver.ts
  57. 1 1
      query-node/generated/graphql-server/src/modules/license/license.service.ts
  58. 6 1
      query-node/generated/graphql-server/src/modules/membership/membership.model.ts
  59. 1 1
      query-node/generated/graphql-server/src/modules/membership/membership.resolver.ts
  60. 1 1
      query-node/generated/graphql-server/src/modules/membership/membership.service.ts
  61. 14 0
      query-node/generated/graphql-server/src/modules/next-entity-id/next-entity-id.model.ts
  62. 128 0
      query-node/generated/graphql-server/src/modules/next-entity-id/next-entity-id.resolver.ts
  63. 28 0
      query-node/generated/graphql-server/src/modules/next-entity-id/next-entity-id.service.ts
  64. 6 1
      query-node/generated/graphql-server/src/modules/video-category/video-category.model.ts
  65. 1 1
      query-node/generated/graphql-server/src/modules/video-category/video-category.resolver.ts
  66. 1 1
      query-node/generated/graphql-server/src/modules/video-category/video-category.service.ts
  67. 7 1
      query-node/generated/graphql-server/src/modules/video-media-encoding/video-media-encoding.model.ts
  68. 1 1
      query-node/generated/graphql-server/src/modules/video-media-encoding/video-media-encoding.resolver.ts
  69. 1 1
      query-node/generated/graphql-server/src/modules/video-media-encoding/video-media-encoding.service.ts
  70. 9 1
      query-node/generated/graphql-server/src/modules/video-media-metadata/video-media-metadata.model.ts
  71. 1 1
      query-node/generated/graphql-server/src/modules/video-media-metadata/video-media-metadata.resolver.ts
  72. 1 1
      query-node/generated/graphql-server/src/modules/video-media-metadata/video-media-metadata.service.ts
  73. 45 5
      query-node/generated/graphql-server/src/modules/video/video.model.ts
  74. 1 1
      query-node/generated/graphql-server/src/modules/video/video.resolver.ts
  75. 1 1
      query-node/generated/graphql-server/src/modules/video/video.service.ts
  76. 5 1
      query-node/generated/graphql-server/src/modules/worker/worker.model.ts
  77. 1 1
      query-node/generated/graphql-server/src/modules/worker/worker.resolver.ts
  78. 1 1
      query-node/generated/graphql-server/src/modules/worker/worker.service.ts
  79. 72 0
      query-node/generated/graphql-server/src/queryTemplates.ts
  80. 1 0
      query-node/mappings/.eslintignore
  81. 19 0
      query-node/mappings/.eslintrc.js
  82. 1 0
      query-node/mappings/.prettierignore
  83. 3 0
      query-node/mappings/.prettierrc.js
  84. 37 39
      query-node/mappings/bootstrap/index.ts
  85. 9 11
      query-node/mappings/bootstrap/members.ts
  86. 12 3
      query-node/mappings/bootstrap/workers.ts
  87. 5 2
      query-node/mappings/package.json
  88. 75 38
      query-node/mappings/src/common.ts
  89. 72 114
      query-node/mappings/src/content/channel.ts
  90. 23 32
      query-node/mappings/src/content/curatorGroup.ts
  91. 127 91
      query-node/mappings/src/content/utils.ts
  92. 183 184
      query-node/mappings/src/content/video.ts
  93. 1 1
      query-node/mappings/src/eventFix.ts
  94. 34 53
      query-node/mappings/src/membership.ts
  95. 60 56
      query-node/mappings/src/storage.ts
  96. 98 64
      query-node/mappings/src/workingGroup.ts
  97. 1 1
      query-node/package.json
  98. 8 0
      query-node/schema.graphql
  99. 0 2
      scripts/raspberry-cross-build.sh
  100. 3 1
      scripts/runtime-code-shasum.sh

+ 1 - 1
.github/workflows/create-ami.yml

@@ -45,7 +45,7 @@ jobs:
         parameter-overrides: "KeyName=${{ env.KEY_NAME }}"
 
     - name: Install Ansible dependencies
-      run: pipx inject ansible-base boto3 botocore
+      run: pipx inject ansible-core boto3 botocore
 
     - name: Run playbook
       uses: dawidd6/action-ansible-playbook@v2

+ 152 - 0
.github/workflows/create-release.yml

@@ -0,0 +1,152 @@
+name: Create release with node binaries
+
+on:
+  workflow_dispatch:
+    inputs:
+      name:
+        description: 'Release name (v9.3.0 - Antioch)'
+        required: true
+      tag:
+        description: 'Tag (v9.3.0)'
+        required: true
+
+env:
+  REPOSITORY: joystream/node
+
+jobs:
+  build-mac-binary:
+    runs-on: macos-latest
+    steps:
+      - name: Checkout
+        uses: actions/checkout@v2
+
+      - id: compute_shasum
+        name: Compute runtime code shasum
+        run: |
+          export RUNTIME_CODE_SHASUM=`scripts/runtime-code-shasum.sh`
+          echo "::set-output name=shasum::${RUNTIME_CODE_SHASUM}"
+
+      - name: Run Setup
+        run: |
+          ./setup.sh
+
+      - name: Build binaries
+        run: |
+          yarn cargo-build
+
+      - name: Tar the binary
+        run: |
+          tar czvf joystream-node-macos.tar.gz -C ./target/release joystream-node
+
+      - name: Temporarily save node binary
+        uses: actions/upload-artifact@v2
+        with:
+          name: joystream-node-macos-${{ steps.compute_shasum.outputs.shasum }}
+          path: joystream-node-macos.tar.gz
+          retention-days: 1
+
+  build-rpi-binary:
+    runs-on: ubuntu-latest
+    steps:
+      - name: Checkout
+        uses: actions/checkout@v2
+
+      - id: compute_shasum
+        name: Compute runtime code shasum
+        run: |
+          export RUNTIME_CODE_SHASUM=`scripts/runtime-code-shasum.sh`
+          echo "::set-output name=shasum::${RUNTIME_CODE_SHASUM}"
+
+      - name: Run Setup
+        run: |
+          ./setup.sh
+
+      - name: Build binaries
+        run: |
+          export WORKSPACE_ROOT=`cargo metadata --offline --no-deps --format-version 1 | jq .workspace_root -r`
+          sudo chmod a+w $WORKSPACE_ROOT
+          ./scripts/raspberry-cross-build.sh
+
+      - name: Tar the binary
+        run: |
+          tar czvf joystream-node-rpi.tar.gz -C ./target/arm-unknown-linux-gnueabihf/release joystream-node
+
+      - name: Temporarily save node binary
+        uses: actions/upload-artifact@v2
+        with:
+          name: joystream-node-rpi-${{ steps.compute_shasum.outputs.shasum }}
+          path: joystream-node-rpi.tar.gz
+          retention-days: 1
+
+  create-release:
+    runs-on: ubuntu-latest
+    needs: [build-mac-binary, build-rpi-binary]
+    steps:
+      - name: Checkout
+        uses: actions/checkout@v2
+
+      - id: compute_shasum
+        name: Compute runtime code shasum
+        run: |
+          export RUNTIME_CODE_SHASUM=`scripts/runtime-code-shasum.sh`
+          echo "::set-output name=shasum::${RUNTIME_CODE_SHASUM}"
+
+      - id: extract_binaries
+        name: Copy binaries & wasm file from docker images
+        run: |
+          IMAGE=${{ env.REPOSITORY }}:${{ steps.compute_shasum.outputs.shasum }}
+
+          docker run -d --entrypoint tail --name temp-container-joystream-node $IMAGE-amd64 -f /dev/null
+
+          RESULT=$(docker exec temp-container-joystream-node b2sum -l 256 runtime.compact.wasm | awk '{print $1}')
+          VERSION_AND_COMMIT=$(docker exec temp-container-joystream-node /joystream/node --version | awk '{print $2}' | cut -d- -f -2)
+          echo "::set-output name=blob_hash::${RESULT}"
+          echo "::set-output name=version_and_commit::${VERSION_AND_COMMIT}"
+
+          docker cp temp-container-joystream-node:/joystream/runtime.compact.wasm ./joystream_runtime_${{ github.event.inputs.tag }}.wasm
+          docker cp temp-container-joystream-node:/joystream/node ./joystream-node
+          tar -czvf joystream-node-$VERSION_AND_COMMIT-x86_64-linux-gnu.tar.gz joystream-node
+
+          docker rm --force temp-container-joystream-node
+
+          docker cp $(docker create --rm $IMAGE-arm64):/joystream/node ./joystream-node
+          tar -czvf joystream-node-$VERSION_AND_COMMIT-arm64-linux-gnu.tar.gz joystream-node
+
+          docker cp $(docker create --rm $IMAGE-arm):/joystream/node ./joystream-node
+          tar -czvf joystream-node-$VERSION_AND_COMMIT-armv7-linux-gnu.tar.gz joystream-node
+
+      - name: Retrieve saved MacOS binary
+        uses: actions/download-artifact@v2
+        with:
+          name: joystream-node-macos-${{ steps.compute_shasum.outputs.shasum }}
+
+      - name: Retrieve saved RPi binary
+        uses: actions/download-artifact@v2
+        with:
+          name: joystream-node-rpi-${{ steps.compute_shasum.outputs.shasum }}
+
+      - name: Rename MacOS and RPi tar
+        run: |
+          mv joystream-node-macos.tar.gz joystream-node-${{ steps.extract_binaries.outputs.version_and_commit }}-x86_64-macos.tar.gz
+          mv joystream-node-rpi.tar.gz joystream-node-${{ steps.extract_binaries.outputs.version_and_commit }}-rpi.tar.gz
+
+      - name: Release
+        uses: softprops/action-gh-release@v1
+        with:
+          files: |
+            *.tar.gz
+            *.wasm
+          tag_name: ${{ github.event.inputs.tag }}
+          name: ${{ github.event.inputs.name }}
+          draft: true
+          body: 'Verify wasm hash:
+            ```
+            $ b2sum -l 256 joystream_runtime_${{ github.event.inputs.tag }}.wasm
+            ```
+
+            This should be the output
+
+            ```
+            ${{ steps.extract_binaries.outputs.blob_hash }}
+            ```
+            '

+ 154 - 47
.github/workflows/joystream-node-docker.yml

@@ -1,13 +1,22 @@
 name: joystream-node-docker
+
 on: push
 
+env:
+  REPOSITORY: joystream/node
+  KEY_NAME: joystream-github-action-key
+
 jobs:
-  build:
-    name: Build joystream/node Docker image
-    if: github.repository == 'Joystream/joystream'
+  push-amd64:
+    name: Build joystream/node Docker image for amd64
     runs-on: ubuntu-latest
+    outputs:
+      tag_shasum: ${{ steps.compute_shasum.outputs.shasum }}
+      image_exists: ${{ steps.compute_main_image_exists.outputs.image_exists }}
     steps:
-      - uses: actions/checkout@v1
+      - name: Checkout
+        uses: actions/checkout@v2
+
       - uses: actions/setup-node@v1
         with:
           node-version: '14.x'
@@ -18,62 +27,160 @@ jobs:
           export RUNTIME_CODE_SHASUM=`scripts/runtime-code-shasum.sh`
           echo "::set-output name=shasum::${RUNTIME_CODE_SHASUM}"
 
-      - name: Setup cache directory
-        run: mkdir ~/docker-images
-
-      - name: Cache docker images
-        uses: actions/cache@v2
-        env:
-          cache-name: joystream-node-docker
+      - name: Login to DockerHub
+        uses: docker/login-action@v1
         with:
-          path: ~/docker-images
-          key: ${{ env.cache-name }}-${{ steps.compute_shasum.outputs.shasum }}
+          username: ${{ secrets.DOCKERHUB_USERNAME }}
+          password: ${{ secrets.DOCKERHUB_PASSWORD }}
 
-      - name: Check if we have cached image
-        continue-on-error: true
+      - name: Check if we have already have the manifest on Dockerhub
+        id: compute_main_image_exists
+        # Will output 0 if image exists and 1 if does not exists
         run: |
-          if [ -f ~/docker-images/joystream-node-docker-image.tar.gz ]; then
-            docker load --input ~/docker-images/joystream-node-docker-image.tar.gz
-            cp ~/docker-images/joystream-node-docker-image.tar.gz .
-          fi
+          export IMAGE_EXISTS=$(docker manifest inspect ${{ env.REPOSITORY }}:${{ steps.compute_shasum.outputs.shasum }} > /dev/null ; echo $?)
+          echo "::set-output name=image_exists::${IMAGE_EXISTS}"
 
       - name: Check if we have pre-built image on Dockerhub
-        continue-on-error: true
+        id: compute_image_exists
+        # Will output 0 if image exists and 1 if does not exists
+        run: |
+          export IMAGE_EXISTS=$(docker manifest inspect ${{ env.REPOSITORY }}:${{ steps.compute_shasum.outputs.shasum }}-amd64 > /dev/null ; echo $?)
+          echo "::set-output name=image_exists::${IMAGE_EXISTS}"
+
+      - name: Build and push
+        uses: docker/build-push-action@v2
+        with:
+          context: .
+          file: joystream-node.Dockerfile
+          platforms: linux/amd64
+          push: true
+          tags: ${{ env.REPOSITORY }}:${{ steps.compute_shasum.outputs.shasum }}-amd64
+        if: ${{ steps.compute_image_exists.outputs.image_exists == 1 }}
+
+  push-arm:
+    name: Build joystream/node Docker image for arm
+    runs-on: ubuntu-latest
+    strategy:
+      matrix:
+        platform: ['linux/arm64', 'linux/arm/v7']
+        include:
+          - platform: 'linux/arm64'
+            platform_tag: 'arm64'
+            file: 'joystream-node.Dockerfile'
+          - platform: 'linux/arm/v7'
+            platform_tag: 'arm'
+            file: 'joystream-node-armv7.Dockerfile'
+    env:
+      STACK_NAME: joystream-ga-docker-${{ github.run_number }}-${{ matrix.platform_tag }}
+    steps:
+      - name: Extract branch name
+        shell: bash
+        run: echo "##[set-output name=branch;]$(echo ${GITHUB_REF#refs/heads/})"
+        id: extract_branch
+
+      - name: Checkout
+        uses: actions/checkout@v2
+
+      - uses: actions/setup-node@v1
+        with:
+          node-version: '14.x'
+
+      - name: Install Ansible dependencies
+        run: pipx inject ansible-core boto3 botocore
+
+      - id: compute_shasum
+        name: Compute runtime code shasum
         run: |
-          if ! [ -f joystream-node-docker-image.tar.gz ]; then
-            docker pull joystream/node:${{ steps.compute_shasum.outputs.shasum }}
-            docker image tag joystream/node:${{ steps.compute_shasum.outputs.shasum }} joystream/node:latest
-            docker save --output joystream-node-docker-image.tar joystream/node:latest
-            gzip joystream-node-docker-image.tar
-            cp joystream-node-docker-image.tar.gz ~/docker-images/
-          fi
-
-      - name: Build new joystream/node image
+          export RUNTIME_CODE_SHASUM=`scripts/runtime-code-shasum.sh`
+          echo "::set-output name=shasum::${RUNTIME_CODE_SHASUM}"
+
+      - name: Login to DockerHub
+        uses: docker/login-action@v1
+        with:
+          username: ${{ secrets.DOCKERHUB_USERNAME }}
+          password: ${{ secrets.DOCKERHUB_PASSWORD }}
+
+      - name: Check if we have pre-built image on Dockerhub
+        id: compute_image_exists
+        # Will output 0 if image exists and 1 if does not exists
         run: |
-          if ! [ -f joystream-node-docker-image.tar.gz ]; then
-            docker build . --file joystream-node.Dockerfile --tag joystream/node
-            docker save --output joystream-node-docker-image.tar joystream/node
-            gzip joystream-node-docker-image.tar
-            cp joystream-node-docker-image.tar.gz ~/docker-images/
-            echo "NEW_BUILD=true" >> $GITHUB_ENV
-          fi
-
-      - name: Save joystream/node image to Artifacts
-        uses: actions/upload-artifact@v2
+          export IMAGE_EXISTS=$(docker manifest inspect ${{ env.REPOSITORY }}:${{ steps.compute_shasum.outputs.shasum }}-${{ matrix.platform_tag }} > /dev/null ; echo $?)
+          echo "::set-output name=image_exists::${IMAGE_EXISTS}"
+
+      - name: Configure AWS credentials
+        uses: aws-actions/configure-aws-credentials@v1
+        with:
+          aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
+          aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+          aws-region: us-east-1
+        if: ${{ steps.compute_image_exists.outputs.image_exists == 1 }}
+
+      - name: Deploy to AWS CloudFormation
+        uses: aws-actions/aws-cloudformation-github-deploy@v1
+        id: deploy_stack
+        with:
+          name: ${{ env.STACK_NAME }}
+          template: devops/infrastructure/single-instance-docker.yml
+          no-fail-on-empty-changeset: '1'
+          parameter-overrides: 'KeyName=${{ env.KEY_NAME }},EC2AMI=ami-00d1ab6b335f217cf,EC2InstanceType=t4g.xlarge'
+        if: ${{ steps.compute_image_exists.outputs.image_exists == 1 }}
+
+      - name: Run playbook
+        uses: dawidd6/action-ansible-playbook@v2
         with:
-          name: ${{ steps.compute_shasum.outputs.shasum }}-joystream-node-docker-image.tar.gz
-          path: joystream-node-docker-image.tar.gz
+          playbook: build-arm64-playbook.yml
+          directory: devops/infrastructure
+          requirements: requirements.yml
+          key: ${{ secrets.SSH_PRIVATE_KEY }}
+          inventory: |
+            [all]
+            ${{ steps.deploy_stack.outputs.PublicIp }}
+          options: |
+            --extra-vars "git_repo=https://github.com/${{ github.repository }} \
+                          branch_name=${{ steps.extract_branch.outputs.branch }} \
+                          docker_username=${{ secrets.DOCKERHUB_USERNAME }} \
+                          docker_password=${{ secrets.DOCKERHUB_PASSWORD }} \
+                          tag_name=${{ steps.compute_shasum.outputs.shasum }}-${{ matrix.platform_tag }} \
+                          repository=${{ env.REPOSITORY }} dockerfile=${{ matrix.file }} \
+                          stack_name=${{ env.STACK_NAME }} platform=${{ matrix.platform }}"
+        if: ${{ steps.compute_image_exists.outputs.image_exists == 1 }}
 
+  push-manifest:
+    name: Create manifest using both the arch images
+    needs: [push-amd64, push-arm]
+    # Only run this job if the image does not exist with tag equal to the shasum
+    if: needs.push-amd64.outputs.image_exists == 1
+    runs-on: ubuntu-latest
+    env:
+      TAG_SHASUM: ${{ needs.push-amd64.outputs.tag_shasum }}
+    steps:
       - name: Login to DockerHub
         uses: docker/login-action@v1
         with:
           username: ${{ secrets.DOCKERHUB_USERNAME }}
           password: ${{ secrets.DOCKERHUB_PASSWORD }}
-        if: env.NEW_BUILD
 
-      - name: Publish new image to DockerHub
+      - name: Create manifest for multi-arch images
+        run: |
+          # get artifacts from previous steps
+          IMAGE=${{ env.REPOSITORY }}:${{ env.TAG_SHASUM }}
+          echo $IMAGE
+          docker pull $IMAGE-amd64
+          docker pull $IMAGE-arm64
+          docker pull $IMAGE-arm
+          docker manifest create $IMAGE $IMAGE-amd64 $IMAGE-arm64 $IMAGE-arm
+          docker manifest annotate $IMAGE $IMAGE-amd64 --arch amd64
+          docker manifest annotate $IMAGE $IMAGE-arm64 --arch arm64
+          docker manifest annotate $IMAGE $IMAGE-arm --arch arm
+          docker manifest push $IMAGE
+
+      - name: Create manifest with latest tag for master
+        if: github.ref == 'refs/heads/master'
         run: |
-          docker image tag joystream/node joystream/node:${{ steps.compute_shasum.outputs.shasum }}
-          docker push joystream/node:${{ steps.compute_shasum.outputs.shasum }}
-        if: env.NEW_BUILD
-  
+          IMAGE=${{ env.REPOSITORY }}:${{ env.TAG_SHASUM }}
+          LATEST_TAG=${{ env.REPOSITORY }}:latest
+          docker manifest create $LATEST_TAG $IMAGE-amd64 $IMAGE-arm64 $IMAGE-arm
+          docker manifest annotate $LATEST_TAG $IMAGE-amd64 --arch amd64
+          docker manifest annotate $LATEST_TAG $IMAGE-arm64 --arch arm64
+          docker manifest annotate $LATEST_TAG $IMAGE-arm --arch arm
+          docker manifest push $LATEST_TAG

+ 43 - 0
.github/workflows/query-node.yml

@@ -0,0 +1,43 @@
+name: query-node
+on: [pull_request, push]
+
+jobs:
+  query_node_build_ubuntu:
+    name: Ubuntu Checks
+    runs-on: ubuntu-latest
+    strategy:
+      matrix:
+        node-version: [14.x]
+    steps:
+    - uses: actions/checkout@v1
+    - name: Use Node.js ${{ matrix.node-version }}
+      uses: actions/setup-node@v1
+      with:
+        node-version: ${{ matrix.node-version }}
+    - name: checks
+      run: |
+        yarn install --frozen-lockfile
+        yarn workspace @joystream/types build
+        yarn workspace @joystream/content-metadata-protobuf build:ts
+        ./query-node/build.sh
+        yarn workspace query-node-mappings checks --quiet
+
+  query_node_build_osx:
+    name: MacOS Checks
+    runs-on: macos-latest
+    strategy:
+      matrix:
+        node-version: [14.x]
+    steps:
+    - uses: actions/checkout@v1
+    - name: Use Node.js ${{ matrix.node-version }}
+      uses: actions/setup-node@v1
+      with:
+        node-version: ${{ matrix.node-version }}
+    - name: checks
+      run: |
+        yarn install --frozen-lockfile --network-timeout 120000
+        yarn workspace @joystream/types build
+        yarn workspace @joystream/content-metadata-protobuf build:ts
+        ./query-node/build.sh
+        yarn workspace query-node-mappings checks --quiet

+ 1 - 1
apps.Dockerfile

@@ -1,4 +1,4 @@
-FROM node:14 as builder
+FROM --platform=linux/x86-64 node:14 as builder
 
 WORKDIR /joystream
 COPY . /joystream

+ 50 - 0
devops/infrastructure/build-arm64-playbook.yml

@@ -0,0 +1,50 @@
+---
+# Setup joystream code, build docker image
+
+- name: Build image and push to docker hub
+  hosts: all
+
+  tasks:
+    - block:
+        - name: Get code from git repo
+          include_role:
+            name: common
+            tasks_from: get-code-git
+
+        - name: Install Docker Module for Python
+          pip:
+            name: docker
+
+        - name: Log into DockerHub
+          community.docker.docker_login:
+            username: '{{ docker_username }}'
+            password: '{{ docker_password }}'
+
+        - name: Build an image and push it to a private repo
+          community.docker.docker_image:
+            build:
+              path: ./joystream
+              dockerfile: '{{ dockerfile }}'
+              platform: '{{ platform }}'
+            name: '{{ repository }}'
+            tag: '{{ tag_name }}'
+            push: yes
+            source: build
+          async: 7200
+          poll: 0
+          register: build_result
+
+        - name: Check on build async task
+          async_status:
+            jid: '{{ build_result.ansible_job_id }}'
+          register: job_result
+          until: job_result.finished
+          retries: 72
+          delay: 100
+
+      always:
+        - name: Delete the stack
+          amazon.aws.cloudformation:
+            stack_name: '{{ stack_name }}'
+            state: 'absent'
+          delegate_to: localhost

+ 6 - 0
devops/infrastructure/node-network/.gitignore

@@ -0,0 +1,6 @@
+/bin/
+/node_modules/
+kubeconfig.yml
+package-lock.json
+.env
+Pulumi.*.yaml

+ 21 - 0
devops/infrastructure/node-network/Pulumi.yaml

@@ -0,0 +1,21 @@
+name: node-network
+runtime: nodejs
+description: Kubernetes IaC for Joystream RPC and Validator nodes
+template:
+  config:
+    aws:profile:
+      default: joystream-user
+    aws:region:
+      default: us-east-1
+    isMinikube:
+      description: Whether you are deploying to minikube
+      default: false
+    numberOfValidators:
+      description: Number of validators as starting nodes
+      default: 2
+    networkSuffix:
+      description: Suffix to attach to the network id and name
+      default: 8129
+    isLoadBalancerReady:
+      description: Whether the load balancer service is ready and has been assigned an IP
+      default: false

+ 113 - 0
devops/infrastructure/node-network/README.md

@@ -0,0 +1,113 @@
+# Query Node automated deployment
+
+Deploys a Joystream node network on EKS Kubernetes cluster
+
+## Deploying the App
+
+To deploy your infrastructure, follow the below steps.
+
+### Prerequisites
+
+1. [Install Pulumi](https://www.pulumi.com/docs/get-started/install/)
+1. [Install Node.js](https://nodejs.org/en/download/)
+1. Install a package manager for Node.js, such as [npm](https://www.npmjs.com/get-npm) or [Yarn](https://yarnpkg.com/en/docs/install).
+1. [Configure AWS Credentials](https://www.pulumi.com/docs/intro/cloud-providers/aws/setup/)
+1. Optional (for debugging): [Install kubectl](https://kubernetes.io/docs/tasks/tools/)
+
+### Steps
+
+After cloning this repo, from this working directory, run these commands:
+
+1. Install the required Node.js packages:
+
+   This installs the dependent packages [needed](https://www.pulumi.com/docs/intro/concepts/how-pulumi-works/) for our Pulumi program.
+
+   ```bash
+   $ npm install
+   ```
+
+1. Create a new stack, which is an isolated deployment target for this example:
+
+   This will initialize the Pulumi program in TypeScript.
+
+   ```bash
+   $ pulumi stack init
+   ```
+
+1. Set the required configuration variables in `Pulumi.<stack>.yaml`
+
+   ```bash
+   $ pulumi config set-all --plaintext aws:region=us-east-1 --plaintext aws:profile=joystream-user \
+    --plaintext numberOfValidators=2 --plaintext isMinikube=true --plaintext networkSuffix=8122
+   ```
+
+   If you want to build the stack on AWS set the `isMinikube` config to `false`
+
+   ```bash
+   $ pulumi config set isMinikube false
+   ```
+
+1. Stand up the Kubernetes cluster:
+
+   Running `pulumi up -y` will deploy the EKS cluster. Note, provisioning a
+   new EKS cluster takes between 10-15 minutes.
+
+1. Once the stack is up and running, we will modify the Caddy config to get SSL certificate for the load balancer for AWS
+
+   Modify the config variable `isLoadBalancerReady`
+
+   ```bash
+   $ pulumi config set isLoadBalancerReady true
+   ```
+
+   Run `pulumi up -y` to update the Caddy config
+
+1. You can now access the endpoints using `pulumi stack output endpoint1` or `pulumi stack output endpoint2`
+
+   The ws-rpc endpoint is `https://<ENDPOINT>/ws-rpc` and http-rpc endpoint is `https://<ENDPOINT>/http-rpc`
+
+1. Access the Kubernetes Cluster using `kubectl`
+
+   To access your new Kubernetes cluster using `kubectl`, we need to set up the
+   `kubeconfig` file and download `kubectl`. We can leverage the Pulumi
+   stack output in the CLI, as Pulumi facilitates exporting these objects for us.
+
+   ```bash
+   $ pulumi stack output kubeconfig --show-secrets > kubeconfig
+   $ export KUBECONFIG=$PWD/kubeconfig
+   $ kubectl get nodes
+   ```
+
+   We can also use the stack output to query the cluster for our newly created Deployment:
+
+   ```bash
+   $ kubectl get deployment $(pulumi stack output deploymentName) --namespace=$(pulumi stack output namespaceName)
+   $ kubectl get service $(pulumi stack output serviceName) --namespace=$(pulumi stack output namespaceName)
+   ```
+
+   To get logs
+
+   ```bash
+   $ kubectl config set-context --current --namespace=$(pulumi stack output namespaceName)
+   $ kubectl get pods
+   $ kubectl logs <PODNAME> --all-containers
+   ```
+
+   To see complete pulumi stack output
+
+   ```bash
+   $ pulumi stack output
+   ```
+
+   To execute a command
+
+   ```bash
+   $ kubectl exec --stdin --tty <PODNAME> -c colossus -- /bin/bash
+   ```
+
+1. Once you've finished experimenting, tear down your stack's resources by destroying and removing it:
+
+   ```bash
+   $ pulumi destroy --yes
+   $ pulumi stack rm --yes
+   ```

+ 135 - 0
devops/infrastructure/node-network/caddy.ts

@@ -0,0 +1,135 @@
+import * as k8s from '@pulumi/kubernetes'
+import * as pulumi from '@pulumi/pulumi'
+import * as dns from 'dns'
+
+/**
+ * ServiceDeployment is an example abstraction that uses a class to fold together the common pattern of a
+ * Kubernetes Deployment and its associated Service object.
+ */
+export class CaddyServiceDeployment extends pulumi.ComponentResource {
+  public readonly deployment: k8s.apps.v1.Deployment
+  public readonly service: k8s.core.v1.Service
+  public readonly hostname?: pulumi.Output<string>
+  public readonly primaryEndpoint?: pulumi.Output<string>
+  public readonly secondaryEndpoint?: pulumi.Output<string>
+
+  constructor(name: string, args: ServiceDeploymentArgs, opts?: pulumi.ComponentResourceOptions) {
+    super('k8sjs:service:ServiceDeployment', name, {}, opts)
+
+    const labels = { app: name }
+    let volumes: pulumi.Input<pulumi.Input<k8s.types.input.core.v1.Volume>[]> = []
+    let caddyVolumeMounts: pulumi.Input<pulumi.Input<k8s.types.input.core.v1.VolumeMount>[]> = []
+
+    async function lookupPromise(url: string): Promise<dns.LookupAddress[]> {
+      return new Promise((resolve, reject) => {
+        dns.lookup(url, { all: true }, (err: any, addresses: dns.LookupAddress[]) => {
+          if (err) reject(err)
+          resolve(addresses)
+        })
+      })
+    }
+
+    this.service = new k8s.core.v1.Service(
+      name,
+      {
+        metadata: {
+          name: name,
+          namespace: args.namespaceName,
+          labels: labels,
+        },
+        spec: {
+          type: args.isMinikube ? 'NodePort' : 'LoadBalancer',
+          ports: [
+            { name: 'http', port: 80 },
+            { name: 'https', port: 443 },
+          ],
+          selector: labels,
+        },
+      },
+      { parent: this }
+    )
+
+    this.hostname = this.service.status.loadBalancer.ingress[0].hostname
+
+    if (args.lbReady) {
+      let caddyConfig: pulumi.Output<string>
+      const lbIps: pulumi.Output<dns.LookupAddress[]> = this.hostname.apply((dnsName) => {
+        return lookupPromise(dnsName)
+      })
+
+      function getProxyString(ipAddress: pulumi.Output<string>) {
+        return pulumi.interpolate`${ipAddress}.nip.io/ws-rpc {
+          reverse_proxy node-network:9944
+        }
+
+        ${ipAddress}.nip.io/http-rpc {
+          reverse_proxy node-network:9933
+        }
+        `
+      }
+
+      caddyConfig = pulumi.interpolate`${getProxyString(lbIps[0].address)}
+        ${getProxyString(lbIps[1].address)}`
+
+      this.primaryEndpoint = pulumi.interpolate`${lbIps[0].address}.nip.io`
+      this.secondaryEndpoint = pulumi.interpolate`${lbIps[1].address}.nip.io`
+
+      const keyConfig = new k8s.core.v1.ConfigMap(
+        name,
+        {
+          metadata: { namespace: args.namespaceName, labels: labels },
+          data: { 'fileData': caddyConfig },
+        },
+        { parent: this }
+      )
+      const keyConfigName = keyConfig.metadata.apply((m) => m.name)
+
+      caddyVolumeMounts.push({
+        mountPath: '/etc/caddy/Caddyfile',
+        name: 'caddy-volume',
+        subPath: 'fileData',
+      })
+      volumes.push({
+        name: 'caddy-volume',
+        configMap: {
+          name: keyConfigName,
+        },
+      })
+    }
+
+    this.deployment = new k8s.apps.v1.Deployment(
+      name,
+      {
+        metadata: { namespace: args.namespaceName, labels: labels },
+        spec: {
+          selector: { matchLabels: labels },
+          replicas: 1,
+          template: {
+            metadata: { labels: labels },
+            spec: {
+              containers: [
+                {
+                  name: 'caddy',
+                  image: 'caddy',
+                  ports: [
+                    { name: 'caddy-http', containerPort: 80 },
+                    { name: 'caddy-https', containerPort: 443 },
+                  ],
+                  volumeMounts: caddyVolumeMounts,
+                },
+              ],
+              volumes,
+            },
+          },
+        },
+      },
+      { parent: this }
+    )
+  }
+}
+
+export interface ServiceDeploymentArgs {
+  namespaceName: pulumi.Output<string>
+  lbReady?: boolean
+  isMinikube?: boolean
+}

+ 29 - 0
devops/infrastructure/node-network/configMap.ts

@@ -0,0 +1,29 @@
+import * as pulumi from '@pulumi/pulumi'
+import * as k8s from '@pulumi/kubernetes'
+import * as fs from 'fs'
+
+export class configMapFromFile extends pulumi.ComponentResource {
+  public readonly configName?: pulumi.Output<string>
+
+  constructor(name: string, args: ConfigMapArgs, opts: pulumi.ComponentResourceOptions = {}) {
+    super('pkg:node-network:configMap', name, {}, opts)
+
+    this.configName = new k8s.core.v1.ConfigMap(
+      name,
+      {
+        metadata: {
+          namespace: args.namespaceName,
+        },
+        data: {
+          'fileData': fs.readFileSync(args.filePath).toString(),
+        },
+      },
+      opts
+    ).metadata.apply((m) => m.name)
+  }
+}
+
+export interface ConfigMapArgs {
+  filePath: string
+  namespaceName: pulumi.Output<string>
+}

+ 312 - 0
devops/infrastructure/node-network/index.ts

@@ -0,0 +1,312 @@
+import * as awsx from '@pulumi/awsx'
+import * as eks from '@pulumi/eks'
+import * as pulumi from '@pulumi/pulumi'
+import * as k8s from '@pulumi/kubernetes'
+import { configMapFromFile } from './configMap'
+import { CaddyServiceDeployment } from './caddy'
+import { getSubkeyContainers } from './utils'
+import { ValidatorServiceDeployment } from './validator'
+import { NFSServiceDeployment } from './nfsVolume'
+// const { exec } = require('child_process')
+
+const config = new pulumi.Config()
+const awsConfig = new pulumi.Config('aws')
+const isMinikube = config.getBoolean('isMinikube')
+
+export let kubeconfig: pulumi.Output<any>
+
+let provider: k8s.Provider
+
+if (isMinikube) {
+  provider = new k8s.Provider('local', {})
+} else {
+  // Create a VPC for our cluster.
+  const vpc = new awsx.ec2.Vpc('joystream-node-vpc', { numberOfAvailabilityZones: 2 })
+
+  // Create an EKS cluster with the default configuration.
+  const cluster = new eks.Cluster('eksctl-node-network', {
+    vpcId: vpc.id,
+    subnetIds: vpc.publicSubnetIds,
+    desiredCapacity: 2,
+    maxSize: 2,
+    instanceType: 't2.medium',
+    providerCredentialOpts: {
+      profileName: awsConfig.get('profile'),
+    },
+  })
+  provider = cluster.provider
+
+  // Export the cluster's kubeconfig.
+  kubeconfig = cluster.kubeconfig
+}
+
+const resourceOptions = { provider: provider }
+
+const name = 'node-network'
+
+// Create a Kubernetes Namespace
+const ns = new k8s.core.v1.Namespace(name, {}, resourceOptions)
+
+// Export the Namespace name
+export const namespaceName = ns.metadata.name
+
+const appLabels = { appClass: name }
+
+const networkSuffix = config.get('networkSuffix') || '8129'
+const numberOfValidators = config.getNumber('numberOfValidators') || 1
+const chainDataPath = '/chain-data'
+const chainSpecPath = `${chainDataPath}/chainspec-raw.json`
+
+const subkeyContainers = getSubkeyContainers(numberOfValidators, chainDataPath)
+let pvcClaimName: pulumi.Output<any>
+
+if (isMinikube) {
+  const pvc = new k8s.core.v1.PersistentVolumeClaim(
+    `${name}-pvc`,
+    {
+      metadata: {
+        labels: appLabels,
+        namespace: namespaceName,
+        name: `${name}-pvc`,
+      },
+      spec: {
+        accessModes: ['ReadWriteMany'],
+        resources: {
+          requests: {
+            storage: `1Gi`,
+          },
+        },
+      },
+    },
+    resourceOptions
+  )
+
+  const pv = new k8s.core.v1.PersistentVolume(`${name}-pv`, {
+    metadata: {
+      labels: { ...appLabels, type: 'local' },
+      namespace: namespaceName,
+      name: `${name}-pv`,
+    },
+    spec: {
+      accessModes: ['ReadWriteMany'],
+      capacity: {
+        storage: `1Gi`,
+      },
+      hostPath: {
+        path: '/mnt/data/',
+      },
+    },
+  })
+  pvcClaimName = pvc.metadata.apply((m) => m.name)
+} else {
+  const nfsVolume = new NFSServiceDeployment('nfs-server', { namespace: namespaceName }, resourceOptions)
+  pvcClaimName = nfsVolume.pvc.metadata.apply((m) => m.name)
+}
+
+const jsonModifyConfig = new configMapFromFile(
+  'json-modify-config',
+  {
+    filePath: 'json_modify.py',
+    namespaceName: namespaceName,
+  },
+  resourceOptions
+).configName
+
+const chainDataPrepareJob = new k8s.batch.v1.Job(
+  'chain-data',
+  {
+    metadata: {
+      namespace: namespaceName,
+    },
+    spec: {
+      backoffLimit: 0,
+      template: {
+        spec: {
+          containers: [
+            ...subkeyContainers,
+            {
+              name: 'builder-node',
+              image: 'joystream/node:latest',
+              command: ['/bin/sh', '-c'],
+              args: [
+                `/joystream/chain-spec-builder generate -a ${numberOfValidators} \
+                --chain-spec-path ${chainDataPath}/chainspec.json --deployment live \
+                --endowed 1 --keystore-path ${chainDataPath}/data > ${chainDataPath}/seeds.txt`,
+              ],
+              volumeMounts: [
+                {
+                  name: 'config-data',
+                  mountPath: chainDataPath,
+                },
+              ],
+            },
+            {
+              name: 'json-modify',
+              image: 'python',
+              command: ['python'],
+              args: [
+                '/scripts/json_modify.py',
+                '--path',
+                `${chainDataPath}`,
+                '--prefix',
+                networkSuffix,
+                '--validators',
+                `${numberOfValidators}`,
+              ],
+              volumeMounts: [
+                {
+                  mountPath: '/scripts/json_modify.py',
+                  name: 'json-modify-script',
+                  subPath: 'fileData',
+                },
+                {
+                  name: 'config-data',
+                  mountPath: chainDataPath,
+                },
+              ],
+            },
+            {
+              name: 'raw-chain-spec',
+              image: 'joystream/node:latest',
+              command: ['/bin/sh', '-c'],
+              args: [`/joystream/node build-spec --chain ${chainDataPath}/chainspec.json --raw > ${chainSpecPath}`],
+              volumeMounts: [
+                {
+                  name: 'config-data',
+                  mountPath: chainDataPath,
+                },
+              ],
+            },
+          ],
+          volumes: [
+            {
+              name: 'json-modify-script',
+              configMap: {
+                name: jsonModifyConfig,
+              },
+            },
+            {
+              name: 'config-data',
+              persistentVolumeClaim: {
+                claimName: pvcClaimName,
+              },
+            },
+          ],
+          restartPolicy: 'Never',
+        },
+      },
+    },
+  },
+  { ...resourceOptions }
+)
+
+// Create N validator service deployments
+const validators = []
+
+for (let i = 1; i <= numberOfValidators; i++) {
+  const validator = new ValidatorServiceDeployment(
+    `node-${i}`,
+    { namespace: namespaceName, index: i, chainSpecPath, dataPath: chainDataPath, pvc: pvcClaimName },
+    { ...resourceOptions, dependsOn: chainDataPrepareJob }
+  )
+  validators.push(validator)
+}
+
+const deployment = new k8s.apps.v1.Deployment(
+  `rpc-node`,
+  {
+    metadata: {
+      namespace: namespaceName,
+      labels: appLabels,
+    },
+    spec: {
+      replicas: 1,
+      selector: { matchLabels: appLabels },
+      template: {
+        metadata: {
+          labels: appLabels,
+        },
+        spec: {
+          initContainers: [],
+          containers: [
+            {
+              name: 'rpc-node',
+              image: 'joystream/node:latest',
+              ports: [
+                { name: 'rpc-9944', containerPort: 9944 },
+                { name: 'rpc-9933', containerPort: 9933 },
+                { name: 'rpc-30333', containerPort: 30333 },
+              ],
+              args: [
+                '--chain',
+                chainSpecPath,
+                '--ws-external',
+                '--rpc-cors',
+                'all',
+                '--pruning',
+                'archive',
+                '--ws-max-connections',
+                '512',
+                '--telemetry-url',
+                'wss://telemetry.joystream.org/submit/ 0',
+                '--telemetry-url',
+                'wss://telemetry.polkadot.io/submit/ 0',
+              ],
+              volumeMounts: [
+                {
+                  name: 'config-data',
+                  mountPath: chainDataPath,
+                },
+              ],
+            },
+          ],
+          volumes: [
+            {
+              name: 'config-data',
+              persistentVolumeClaim: {
+                claimName: pvcClaimName,
+              },
+            },
+          ],
+        },
+      },
+    },
+  },
+  { ...resourceOptions, dependsOn: validators }
+)
+
+// Export the Deployment name
+export const deploymentName = deployment.metadata.name
+
+// Create a Service for the RPC Node
+const service = new k8s.core.v1.Service(
+  name,
+  {
+    metadata: {
+      labels: appLabels,
+      namespace: namespaceName,
+      name: 'node-network',
+    },
+    spec: {
+      ports: [
+        { name: 'port-1', port: 9944 },
+        { name: 'port-2', port: 9933 },
+      ],
+      selector: appLabels,
+    },
+  },
+  resourceOptions
+)
+
+// Export the Service name and public LoadBalancer Endpoint
+export const serviceName = service.metadata.name
+
+const lbReady = config.get('isLoadBalancerReady') === 'true'
+const caddy = new CaddyServiceDeployment(
+  'caddy-proxy',
+  { lbReady, namespaceName: namespaceName, isMinikube },
+  resourceOptions
+)
+
+export const endpoint1 = caddy.primaryEndpoint
+export const endpoint2 = caddy.secondaryEndpoint

+ 44 - 0
devops/infrastructure/node-network/json_modify.py

@@ -0,0 +1,44 @@
+#!/usr/bin/python
+import argparse
+import json
+
+def main(chain_path, prefix, number_of_validators):
+    chain_spec_path = f"{chain_path}/chainspec.json"
+    print(f"Updating chain spec file {chain_spec_path}")
+    number_of_validators = int(number_of_validators)
+
+    with open(chain_spec_path) as f:
+        data = json.load(f)
+
+    response = {
+        "name": f'{data["name"]} {prefix}',
+        "id": f'{data["id"]}_{prefix}',
+        "protocolId": f'{data["protocolId"]}{prefix}'
+    }
+
+    boot_node_list = data["bootNodes"]
+    for i in range(1, number_of_validators + 1):
+        public_key = open(f"{chain_path}/publickey{i}").read().replace('\n', '')
+        boot_node = f"/dns4/node-{i}/tcp/30333/p2p/{public_key}"
+        boot_node_list.append(boot_node)
+
+    telemetry_endpoints = data["telemetryEndpoints"]
+    telemetry_endpoints.append([
+        "/dns/telemetry.joystream.org/tcp/443/x-parity-wss/%2Fsubmit%2F", 0])
+
+    response["bootNodes"] = boot_node_list
+    response["telemetryEndpoints"] = telemetry_endpoints
+
+    data.update(response)
+    with open(chain_spec_path, 'w') as outfile:
+        json.dump(data, outfile, indent=4)
+    print("Chain spec file updated")
+
+if __name__ == '__main__':
+    parser = argparse.ArgumentParser(description='Modify Chain Spec file')
+    parser.add_argument('--path', required=True, help="Path to chain data")
+    parser.add_argument('--prefix', required=True, help="Network prefix")
+    parser.add_argument('--validators', required=True, help="Number of Validators")
+    args = parser.parse_args()
+    print(args.path)
+    main(chain_path=args.path, prefix=args.prefix, number_of_validators=args.validators)

+ 161 - 0
devops/infrastructure/node-network/nfsVolume.ts

@@ -0,0 +1,161 @@
+import * as k8s from '@pulumi/kubernetes'
+import * as k8stypes from '@pulumi/kubernetes/types/input'
+import * as pulumi from '@pulumi/pulumi'
+
+/**
+ * NFSServiceDeployment is an abstraction uses the cloud resources to create a PVC
+ * which is then used by an NFS container, enabling users to then use this NFS server
+ * as a shared file system without depending on creating custom cloud resources
+ */
+export class NFSServiceDeployment extends pulumi.ComponentResource {
+  public readonly deployment: k8s.apps.v1.Deployment
+  public readonly service: k8s.core.v1.Service
+  public readonly pvc: k8s.core.v1.PersistentVolumeClaim
+
+  constructor(name: string, args: ServiceDeploymentArgs, opts?: pulumi.ComponentResourceOptions) {
+    super('k8sjs:service:NFSServiceDeployment', name, {}, opts)
+
+    const nfsLabels = { role: 'nfs-server' }
+    const claimName = 'pvcfornfs'
+
+    // Deploys a cloud block storage which will be used as base storage for NFS server
+    const pvcNFS = new k8s.core.v1.PersistentVolumeClaim(
+      claimName,
+      {
+        metadata: {
+          labels: nfsLabels,
+          namespace: args.namespace,
+          name: claimName,
+        },
+        spec: {
+          accessModes: ['ReadWriteOnce'],
+          resources: {
+            requests: {
+              storage: `1Gi`,
+            },
+          },
+        },
+      },
+      { parent: this }
+    )
+
+    const container: k8stypes.core.v1.Container = {
+      name: 'nfs-server',
+      image: 'gcr.io/google_containers/volume-nfs:0.8',
+      ports: [
+        { name: 'nfs', containerPort: 2049 },
+        { name: 'mountd', containerPort: 20048 },
+        { name: 'rpcbind', containerPort: 111 },
+      ],
+      command: ['/bin/sh', '-c'],
+      args: ['chmod 777 /exports && /usr/local/bin/run_nfs.sh /exports'],
+      securityContext: { 'privileged': true },
+      volumeMounts: [
+        {
+          name: 'nfsstore',
+          mountPath: '/exports',
+        },
+      ],
+    }
+
+    this.deployment = new k8s.apps.v1.Deployment(
+      `nfs-server`,
+      {
+        metadata: {
+          namespace: args.namespace,
+          labels: nfsLabels,
+          name: 'nfs-server',
+        },
+        spec: {
+          replicas: 1,
+          selector: { matchLabels: nfsLabels },
+          template: {
+            metadata: {
+              labels: nfsLabels,
+            },
+            spec: {
+              containers: [container],
+              volumes: [
+                {
+                  name: 'nfsstore',
+                  persistentVolumeClaim: {
+                    claimName,
+                  },
+                },
+              ],
+            },
+          },
+        },
+      },
+      { parent: this }
+    )
+
+    this.service = new k8s.core.v1.Service(
+      'nfs-server',
+      {
+        metadata: {
+          namespace: args.namespace,
+          name: 'nfs-server',
+        },
+        spec: {
+          ports: [
+            { name: 'nfs', port: 2049 },
+            { name: 'mountd', port: 20048 },
+            { name: 'rpcbind', port: 111 },
+          ],
+          selector: nfsLabels,
+        },
+      },
+      { parent: this }
+    )
+
+    const ip = this.service.spec.apply((v) => v.clusterIP)
+
+    const pv = new k8s.core.v1.PersistentVolume(
+      `${name}-pv`,
+      {
+        metadata: {
+          labels: nfsLabels,
+          namespace: args.namespace,
+          name: `${name}-pvc`,
+        },
+        spec: {
+          accessModes: ['ReadWriteMany'],
+          capacity: {
+            storage: `1Gi`,
+          },
+          nfs: {
+            server: ip, //pulumi.interpolate`nfs-server.${namespaceName}.svc.cluster.local`,
+            path: '/',
+          },
+        },
+      },
+      { parent: this, dependsOn: this.service }
+    )
+
+    this.pvc = new k8s.core.v1.PersistentVolumeClaim(
+      `${name}-pvc`,
+      {
+        metadata: {
+          namespace: args.namespace,
+          name: `${name}-pvc`,
+        },
+        spec: {
+          accessModes: ['ReadWriteMany'],
+          resources: {
+            requests: {
+              storage: `1Gi`,
+            },
+          },
+          storageClassName: '',
+          selector: { matchLabels: nfsLabels },
+        },
+      },
+      { parent: this, dependsOn: pv }
+    )
+  }
+}
+
+export interface ServiceDeploymentArgs {
+  namespace: pulumi.Output<string>
+}

+ 14 - 0
devops/infrastructure/node-network/package.json

@@ -0,0 +1,14 @@
+{
+  "name": "joystream-node-network",
+  "devDependencies": {
+    "@types/node": "^10.0.0"
+  },
+  "dependencies": {
+    "@pulumi/aws": "^4.0.0",
+    "@pulumi/awsx": "^0.30.0",
+    "@pulumi/eks": "^0.31.0",
+    "@pulumi/kubernetes": "^3.0.0",
+    "@pulumi/pulumi": "^3.0.0",
+    "@pulumi/docker": "^3.0.0"
+  }
+}

+ 18 - 0
devops/infrastructure/node-network/tsconfig.json

@@ -0,0 +1,18 @@
+{
+    "compilerOptions": {
+        "strict": true,
+        "outDir": "bin",
+        "target": "es2016",
+        "module": "commonjs",
+        "moduleResolution": "node",
+        "sourceMap": true,
+        "experimentalDecorators": true,
+        "pretty": true,
+        "noFallthroughCasesInSwitch": true,
+        "noImplicitReturns": true,
+        "forceConsistentCasingInFileNames": true
+    },
+    "files": [
+        "index.ts"
+    ]
+}

+ 18 - 0
devops/infrastructure/node-network/utils.ts

@@ -0,0 +1,18 @@
+export const getSubkeyContainers = (validators: number, dataPath: string) => {
+  const result = []
+  for (let i = 1; i <= validators; i++) {
+    result.push({
+      name: `subkey-node-${i}`,
+      image: 'parity/subkey:latest',
+      command: ['/bin/sh', '-c'],
+      args: [`subkey generate-node-key > ${dataPath}/privatekey${i} 2> ${dataPath}/publickey${i}`],
+      volumeMounts: [
+        {
+          name: 'config-data',
+          mountPath: dataPath,
+        },
+      ],
+    })
+  }
+  return result
+}

+ 94 - 0
devops/infrastructure/node-network/validator.ts

@@ -0,0 +1,94 @@
+import * as k8s from '@pulumi/kubernetes'
+import * as k8stypes from '@pulumi/kubernetes/types/input'
+import * as pulumi from '@pulumi/pulumi'
+
+/**
+ * ValidatorServiceDeployment is an example abstraction that uses a class to fold together the common pattern of a
+ * Kubernetes Deployment and its associated Service object.
+ */
+export class ValidatorServiceDeployment extends pulumi.ComponentResource {
+  public readonly deployment: k8s.apps.v1.Deployment
+  public readonly service: k8s.core.v1.Service
+  public readonly ipAddress?: pulumi.Output<string>
+
+  constructor(name: string, args: ServiceDeploymentArgs, opts?: pulumi.ComponentResourceOptions) {
+    super('k8sjs:service:ValidatorServiceDeployment', name, {}, opts)
+
+    const labels = { app: name }
+    const container: k8stypes.core.v1.Container = {
+      name: `joystream-node-${args.index}`,
+      image: 'joystream/node:latest',
+      args: [
+        '--chain',
+        args.chainSpecPath,
+        '--pruning',
+        'archive',
+        '--node-key-file',
+        `${args.dataPath}/privatekey${args.index}`,
+        '--keystore-path',
+        `${args.dataPath}/data/auth-${args.index - 1}`,
+        '--validator',
+        '--log',
+        'runtime,txpool,transaction-pool,trace=sync',
+      ],
+      volumeMounts: [
+        {
+          name: 'config-data',
+          mountPath: args.dataPath,
+        },
+      ],
+    }
+    this.deployment = new k8s.apps.v1.Deployment(
+      name,
+      {
+        metadata: {
+          namespace: args.namespace,
+          labels: labels,
+        },
+        spec: {
+          selector: { matchLabels: labels },
+          replicas: 1,
+          template: {
+            metadata: { labels: labels },
+            spec: {
+              containers: [container],
+              volumes: [
+                {
+                  name: 'config-data',
+                  persistentVolumeClaim: {
+                    claimName: args.pvc,
+                  },
+                },
+              ],
+            },
+          },
+        },
+      },
+      { parent: this }
+    )
+
+    this.service = new k8s.core.v1.Service(
+      name,
+      {
+        metadata: {
+          name: name,
+          namespace: args.namespace,
+          labels: this.deployment.metadata.labels,
+        },
+        spec: {
+          ports: [{ name: 'port-1', port: 30333 }],
+          selector: this.deployment.spec.template.metadata.labels,
+        },
+      },
+      { parent: this }
+    )
+  }
+}
+
+export interface ServiceDeploymentArgs {
+  namespace: pulumi.Output<string>
+  index: number
+  chainSpecPath: string
+  dataPath: string
+  pvc: pulumi.OutputInstance<any>
+}

+ 6 - 0
devops/infrastructure/query-node/.gitignore

@@ -0,0 +1,6 @@
+/bin/
+/node_modules/
+kubeconfig.yml
+package-lock.json
+.env
+Pulumi.*.yaml

+ 19 - 0
devops/infrastructure/query-node/Pulumi.yaml

@@ -0,0 +1,19 @@
+name: query-node
+runtime: nodejs
+description: Kubernetes IaC for Query Node
+template:
+  config:
+    aws:profile:
+      default: joystream-user
+    aws:region:
+      default: us-east-1
+    isMinikube:
+      description: Whether you are deploying to minikube
+      default: false
+    isLoadBalancerReady:
+      description: Whether the load balancer service is ready and has been assigned an IP
+      default: false
+    membersFilePath:
+      description: Path to members.json file for processor initialization
+    workersFilePath:
+      description: Path to workers.json file for processor initialization

+ 117 - 0
devops/infrastructure/query-node/README.md

@@ -0,0 +1,117 @@
+# Query Node automated deployment
+
+Deploys an EKS Kubernetes cluster with query node
+
+## Deploying the App
+
+To deploy your infrastructure, follow the below steps.
+
+### Prerequisites
+
+1. [Install Pulumi](https://www.pulumi.com/docs/get-started/install/)
+1. [Install Node.js](https://nodejs.org/en/download/)
+1. Install a package manager for Node.js, such as [npm](https://www.npmjs.com/get-npm) or [Yarn](https://yarnpkg.com/en/docs/install).
+1. [Configure AWS Credentials](https://www.pulumi.com/docs/intro/cloud-providers/aws/setup/)
+1. Optional (for debugging): [Install kubectl](https://kubernetes.io/docs/tasks/tools/)
+
+### Steps
+
+After cloning this repo, from this working directory, run these commands:
+
+1. Install the required Node.js packages:
+
+   This installs the dependent packages [needed](https://www.pulumi.com/docs/intro/concepts/how-pulumi-works/) for our Pulumi program.
+
+   ```bash
+   $ npm install
+   ```
+
+1. Create a new stack, which is an isolated deployment target for this example:
+
+   This will initialize the Pulumi program in TypeScript.
+
+   ```bash
+   $ pulumi stack init
+   ```
+
+1. Set the required configuration variables in `Pulumi.<stack>.yaml`
+
+   ```bash
+   $ pulumi config set-all --plaintext aws:region=us-east-1 --plaintext aws:profile=joystream-user \
+    --plaintext workersFilePath=<PATH> --plaintext membersFilePath=<PATH> --plaintext isMinikube=true
+   ```
+
+   If you want to build the stack on AWS set the `isMinikube` config to `false`
+
+   ```bash
+   $ puluim config set isMinikube false
+   ```
+
+1. Create a `.env` file in this directory (`cp ../../../.env ./.env`) and set the database and other variables in it
+
+   Make sure to set `GRAPHQL_SERVER_PORT=4001`
+
+1. Stand up the Kubernetes cluster:
+
+   Running `pulumi up -y` will deploy the EKS cluster. Note, provisioning a
+   new EKS cluster takes between 10-15 minutes.
+
+1. Once the stack is up and running, we will modify the Caddy config to get SSL certificate for the load balancer
+
+   Modify the config variable `isLoadBalancerReady`
+
+   ```bash
+   $ pulumi config set isLoadBalancerReady true
+   ```
+
+   Run `pulumi up -y` to update the Caddy config
+
+1. You can now access the endpoints using `pulumi stack output endpoint1` or `pulumi stack output endpoint2`
+
+   The GraphQl server is accessible at `https://<ENDPOINT>/server/graphql` and indexer at `https://<ENDPOINT>/indexer/graphql`
+
+1. Access the Kubernetes Cluster using `kubectl`
+
+   To access your new Kubernetes cluster using `kubectl`, we need to set up the
+   `kubeconfig` file and download `kubectl`. We can leverage the Pulumi
+   stack output in the CLI, as Pulumi facilitates exporting these objects for us.
+
+   ```bash
+   $ pulumi stack output kubeconfig --show-secrets > kubeconfig
+   $ export KUBECONFIG=$PWD/kubeconfig
+   $ kubectl get nodes
+   ```
+
+   We can also use the stack output to query the cluster for our newly created Deployment:
+
+   ```bash
+   $ kubectl get deployment $(pulumi stack output deploymentName) --namespace=$(pulumi stack output namespaceName)
+   $ kubectl get service $(pulumi stack output serviceName) --namespace=$(pulumi stack output namespaceName)
+   ```
+
+   To get logs
+
+   ```bash
+   $ kubectl config set-context --current --namespace=$(pulumi stack output namespaceName)
+   $ kubectl get pods
+   $ kubectl logs <PODNAME> --all-containers
+   ```
+
+   To see complete pulumi stack output
+
+   ```bash
+   $ pulumi stack output
+   ```
+
+   To execute a command
+
+   ```bash
+   $ kubectl exec --stdin --tty <PODNAME> -c colossus -- /bin/bash
+   ```
+
+1. Once you've finished experimenting, tear down your stack's resources by destroying and removing it:
+
+   ```bash
+   $ pulumi destroy --yes
+   $ pulumi stack rm --yes
+   ```

+ 137 - 0
devops/infrastructure/query-node/caddy.ts

@@ -0,0 +1,137 @@
+import * as k8s from '@pulumi/kubernetes'
+import * as pulumi from '@pulumi/pulumi'
+import * as dns from 'dns'
+
+/**
+ * ServiceDeployment is an example abstraction that uses a class to fold together the common pattern of a
+ * Kubernetes Deployment and its associated Service object.
+ */
+export class CaddyServiceDeployment extends pulumi.ComponentResource {
+  public readonly deployment: k8s.apps.v1.Deployment
+  public readonly service: k8s.core.v1.Service
+  public readonly hostname?: pulumi.Output<string>
+  public readonly primaryEndpoint?: pulumi.Output<string>
+  public readonly secondaryEndpoint?: pulumi.Output<string>
+
+  constructor(name: string, args: ServiceDeploymentArgs, opts?: pulumi.ComponentResourceOptions) {
+    super('k8sjs:service:ServiceDeployment', name, {}, opts)
+
+    const labels = { app: name }
+    let volumes: pulumi.Input<pulumi.Input<k8s.types.input.core.v1.Volume>[]> = []
+    let caddyVolumeMounts: pulumi.Input<pulumi.Input<k8s.types.input.core.v1.VolumeMount>[]> = []
+
+    async function lookupPromise(url: string): Promise<dns.LookupAddress[]> {
+      return new Promise((resolve, reject) => {
+        dns.lookup(url, { all: true }, (err: any, addresses: dns.LookupAddress[]) => {
+          if (err) reject(err)
+          resolve(addresses)
+        })
+      })
+    }
+
+    this.service = new k8s.core.v1.Service(
+      name,
+      {
+        metadata: {
+          name: name,
+          namespace: args.namespaceName,
+          labels: labels,
+        },
+        spec: {
+          type: 'LoadBalancer',
+          ports: [
+            { name: 'http', port: 80 },
+            { name: 'https', port: 443 },
+          ],
+          selector: labels,
+        },
+      },
+      { parent: this }
+    )
+
+    this.hostname = this.service.status.loadBalancer.ingress[0].hostname
+
+    if (args.lbReady) {
+      let caddyConfig: pulumi.Output<string>
+      const lbIps: pulumi.Output<dns.LookupAddress[]> = this.hostname.apply((dnsName) => {
+        return lookupPromise(dnsName)
+      })
+
+      function getProxyString(ipAddress: pulumi.Output<string>) {
+        return pulumi.interpolate`${ipAddress}.nip.io/indexer/* {
+          uri strip_prefix /indexer
+          reverse_proxy query-node:4000
+        }
+
+        ${ipAddress}.nip.io/server/* {
+          uri strip_prefix /server
+          reverse_proxy query-node:8081
+        }
+        `
+      }
+
+      caddyConfig = pulumi.interpolate`${getProxyString(lbIps[0].address)}
+        ${getProxyString(lbIps[1].address)}`
+
+      this.primaryEndpoint = pulumi.interpolate`${lbIps[0].address}.nip.io`
+      this.secondaryEndpoint = pulumi.interpolate`${lbIps[1].address}.nip.io`
+
+      const keyConfig = new k8s.core.v1.ConfigMap(
+        name,
+        {
+          metadata: { namespace: args.namespaceName, labels: labels },
+          data: { 'fileData': caddyConfig },
+        },
+        { parent: this }
+      )
+      const keyConfigName = keyConfig.metadata.apply((m) => m.name)
+
+      caddyVolumeMounts.push({
+        mountPath: '/etc/caddy/Caddyfile',
+        name: 'caddy-volume',
+        subPath: 'fileData',
+      })
+      volumes.push({
+        name: 'caddy-volume',
+        configMap: {
+          name: keyConfigName,
+        },
+      })
+    }
+
+    this.deployment = new k8s.apps.v1.Deployment(
+      name,
+      {
+        metadata: { namespace: args.namespaceName, labels: labels },
+        spec: {
+          selector: { matchLabels: labels },
+          replicas: 1,
+          template: {
+            metadata: { labels: labels },
+            spec: {
+              containers: [
+                {
+                  name: 'caddy',
+                  image: 'caddy',
+                  ports: [
+                    { name: 'caddy-http', containerPort: 80 },
+                    { name: 'caddy-https', containerPort: 443 },
+                  ],
+                  volumeMounts: caddyVolumeMounts,
+                },
+              ],
+              volumes,
+            },
+          },
+        },
+      },
+      { parent: this }
+    )
+  }
+}
+
+export interface ServiceDeploymentArgs {
+  namespaceName: pulumi.Output<string>
+  lbReady?: boolean
+  isMinikube?: boolean
+}

+ 29 - 0
devops/infrastructure/query-node/configMap.ts

@@ -0,0 +1,29 @@
+import * as pulumi from '@pulumi/pulumi'
+import * as k8s from '@pulumi/kubernetes'
+import * as fs from 'fs'
+
+export class configMapFromFile extends pulumi.ComponentResource {
+  public readonly configName?: pulumi.Output<string>
+
+  constructor(name: string, args: ConfigMapArgs, opts: pulumi.ComponentResourceOptions = {}) {
+    super('pkg:query-node:configMap', name, {}, opts)
+
+    this.configName = new k8s.core.v1.ConfigMap(
+      name,
+      {
+        metadata: {
+          namespace: args.namespaceName,
+        },
+        data: {
+          'fileData': fs.readFileSync(args.filePath).toString(),
+        },
+      },
+      opts
+    ).metadata.apply((m) => m.name)
+  }
+}
+
+export interface ConfigMapArgs {
+  filePath: string
+  namespaceName: pulumi.Output<string>
+}

+ 452 - 0
devops/infrastructure/query-node/index.ts

@@ -0,0 +1,452 @@
+import * as awsx from '@pulumi/awsx'
+import * as eks from '@pulumi/eks'
+import * as docker from '@pulumi/docker'
+import * as pulumi from '@pulumi/pulumi'
+import { configMapFromFile } from './configMap'
+import * as k8s from '@pulumi/kubernetes'
+import * as s3Helpers from './s3Helpers'
+import { CaddyServiceDeployment } from './caddy'
+import { workers } from 'cluster'
+// import * as fs from 'fs'
+
+require('dotenv').config()
+
+const config = new pulumi.Config()
+const awsConfig = new pulumi.Config('aws')
+const isMinikube = config.getBoolean('isMinikube')
+export let kubeconfig: pulumi.Output<any>
+export let joystreamAppsImage: pulumi.Output<string>
+let provider: k8s.Provider
+
+if (isMinikube) {
+  provider = new k8s.Provider('local', {})
+
+  // Create image from local app
+  joystreamAppsImage = new docker.Image('joystream/apps', {
+    build: {
+      context: '../../../',
+      dockerfile: '../../../apps.Dockerfile',
+    },
+    imageName: 'joystream/apps:latest',
+    skipPush: true,
+  }).baseImageName
+  // joystreamAppsImage = pulumi.interpolate`joystream/apps`
+} else {
+  // Create a VPC for our cluster.
+  const vpc = new awsx.ec2.Vpc('query-node-vpc', { numberOfAvailabilityZones: 2 })
+
+  // Create an EKS cluster with the default configuration.
+  const cluster = new eks.Cluster('eksctl-my-cluster', {
+    vpcId: vpc.id,
+    subnetIds: vpc.publicSubnetIds,
+    desiredCapacity: 3,
+    maxSize: 3,
+    instanceType: 't2.large',
+    providerCredentialOpts: {
+      profileName: awsConfig.get('profile'),
+    },
+  })
+  provider = cluster.provider
+
+  // Export the cluster's kubeconfig.
+  kubeconfig = cluster.kubeconfig
+
+  // Create a repository
+  const repo = new awsx.ecr.Repository('joystream/apps')
+
+  joystreamAppsImage = repo.buildAndPushImage({
+    dockerfile: '../../../apps.Dockerfile',
+    context: '../../../',
+  })
+}
+
+const resourceOptions = { provider: provider }
+
+const name = 'query-node'
+
+// Create a Kubernetes Namespace
+// const ns = new k8s.core.v1.Namespace(name, {}, { provider: cluster.provider })
+const ns = new k8s.core.v1.Namespace(name, {}, resourceOptions)
+
+// Export the Namespace name
+export const namespaceName = ns.metadata.name
+
+const appLabels = { appClass: name }
+
+// Create a Deployment
+const databaseLabels = { app: 'postgres-db' }
+
+const pvc = new k8s.core.v1.PersistentVolumeClaim(
+  `db-pvc`,
+  {
+    metadata: {
+      labels: databaseLabels,
+      namespace: namespaceName,
+      name: `db-pvc`,
+    },
+    spec: {
+      accessModes: ['ReadWriteOnce'],
+      resources: {
+        requests: {
+          storage: `10Gi`,
+        },
+      },
+    },
+  },
+  resourceOptions
+)
+
+const databaseDeployment = new k8s.apps.v1.Deployment(
+  'postgres-db',
+  {
+    metadata: {
+      namespace: namespaceName,
+      labels: databaseLabels,
+    },
+    spec: {
+      selector: { matchLabels: databaseLabels },
+      template: {
+        metadata: { labels: databaseLabels },
+        spec: {
+          containers: [
+            {
+              name: 'postgres-db',
+              image: 'postgres:12',
+              env: [
+                { name: 'POSTGRES_USER', value: process.env.DB_USER! },
+                { name: 'POSTGRES_PASSWORD', value: process.env.DB_PASS! },
+                { name: 'POSTGRES_DB', value: process.env.INDEXER_DB_NAME! },
+              ],
+              ports: [{ containerPort: 5432 }],
+              volumeMounts: [
+                {
+                  name: 'postgres-data',
+                  mountPath: '/var/lib/postgresql/data',
+                  subPath: 'postgres',
+                },
+              ],
+            },
+          ],
+          volumes: [
+            {
+              name: 'postgres-data',
+              persistentVolumeClaim: {
+                claimName: `db-pvc`,
+              },
+            },
+          ],
+        },
+      },
+    },
+  },
+  resourceOptions
+)
+
+const databaseService = new k8s.core.v1.Service(
+  'postgres-db',
+  {
+    metadata: {
+      namespace: namespaceName,
+      labels: databaseDeployment.metadata.labels,
+      name: 'postgres-db',
+    },
+    spec: {
+      ports: [{ port: 5432 }],
+      selector: databaseDeployment.spec.template.metadata.labels,
+    },
+  },
+  resourceOptions
+)
+
+const migrationJob = new k8s.batch.v1.Job(
+  'db-migration',
+  {
+    metadata: {
+      namespace: namespaceName,
+    },
+    spec: {
+      backoffLimit: 0,
+      template: {
+        spec: {
+          containers: [
+            {
+              name: 'db-migration',
+              image: joystreamAppsImage,
+              imagePullPolicy: 'IfNotPresent',
+              resources: { requests: { cpu: '100m', memory: '100Mi' } },
+              env: [
+                {
+                  name: 'WARTHOG_DB_HOST',
+                  value: 'postgres-db',
+                },
+                {
+                  name: 'DB_HOST',
+                  value: 'postgres-db',
+                },
+                { name: 'DB_NAME', value: process.env.DB_NAME! },
+                { name: 'DB_PASS', value: process.env.DB_PASS! },
+              ],
+              command: ['/bin/sh', '-c'],
+              args: ['yarn workspace query-node-root db:prepare; yarn workspace query-node-root db:migrate'],
+            },
+          ],
+          restartPolicy: 'Never',
+        },
+      },
+    },
+  },
+  { ...resourceOptions, dependsOn: databaseService }
+)
+
+const membersFilePath = config.get('membersFilePath')
+  ? config.get('membersFilePath')!
+  : '../../../query-node/mappings/bootstrap/data/members.json'
+const workersFilePath = config.get('workersFilePath')
+  ? config.get('workersFilePath')!
+  : '../../../query-node/mappings/bootstrap/data/workers.json'
+
+const dataBucket = new s3Helpers.FileBucket('bootstrap-data', {
+  files: [
+    { path: membersFilePath, name: 'members.json' },
+    { path: workersFilePath, name: 'workers.json' },
+  ],
+  policy: s3Helpers.publicReadPolicy,
+})
+
+const membersUrl = dataBucket.getUrlForFile('members.json')
+const workersUrl = dataBucket.getUrlForFile('workers.json')
+
+const dataPath = '/joystream/query-node/mappings/bootstrap/data'
+
+const processorJob = new k8s.batch.v1.Job(
+  'processor-migration',
+  {
+    metadata: {
+      namespace: namespaceName,
+    },
+    spec: {
+      backoffLimit: 0,
+      template: {
+        spec: {
+          initContainers: [
+            {
+              name: 'curl-init',
+              image: 'appropriate/curl',
+              command: ['/bin/sh', '-c'],
+              args: [
+                pulumi.interpolate`curl -o ${dataPath}/workers.json ${workersUrl}; curl -o ${dataPath}/members.json ${membersUrl}; ls -al ${dataPath};`,
+              ],
+              volumeMounts: [
+                {
+                  name: 'bootstrap-data',
+                  mountPath: dataPath,
+                },
+              ],
+            },
+          ],
+          containers: [
+            {
+              name: 'processor-migration',
+              image: joystreamAppsImage,
+              imagePullPolicy: 'IfNotPresent',
+              env: [
+                {
+                  name: 'INDEXER_ENDPOINT_URL',
+                  value: `http://localhost:${process.env.WARTHOG_APP_PORT}/graphql`,
+                },
+                { name: 'TYPEORM_HOST', value: 'postgres-db' },
+                { name: 'TYPEORM_DATABASE', value: process.env.DB_NAME! },
+                { name: 'DEBUG', value: 'index-builder:*' },
+                { name: 'PROCESSOR_POLL_INTERVAL', value: '1000' },
+              ],
+              volumeMounts: [
+                {
+                  name: 'bootstrap-data',
+                  mountPath: dataPath,
+                },
+              ],
+              args: ['workspace', 'query-node-root', 'processor:bootstrap'],
+            },
+          ],
+          restartPolicy: 'Never',
+          volumes: [
+            {
+              name: 'bootstrap-data',
+              emptyDir: {},
+            },
+          ],
+        },
+      },
+    },
+  },
+  { ...resourceOptions, dependsOn: migrationJob }
+)
+
+const defsConfig = new configMapFromFile(
+  'defs-config',
+  {
+    filePath: '../../../types/augment/all/defs.json',
+    namespaceName: namespaceName,
+  },
+  resourceOptions
+).configName
+
+const deployment = new k8s.apps.v1.Deployment(
+  name,
+  {
+    metadata: {
+      namespace: namespaceName,
+      labels: appLabels,
+    },
+    spec: {
+      replicas: 1,
+      selector: { matchLabels: appLabels },
+      template: {
+        metadata: {
+          labels: appLabels,
+        },
+        spec: {
+          containers: [
+            {
+              name: 'redis',
+              image: 'redis:6.0-alpine',
+              ports: [{ containerPort: 6379 }],
+            },
+            {
+              name: 'indexer',
+              image: 'joystream/hydra-indexer:2.1.0-beta.9',
+              env: [
+                { name: 'DB_HOST', value: 'postgres-db' },
+                { name: 'DB_NAME', value: process.env.INDEXER_DB_NAME! },
+                { name: 'DB_PASS', value: process.env.DB_PASS! },
+                { name: 'INDEXER_WORKERS', value: '5' },
+                { name: 'REDIS_URI', value: 'redis://localhost:6379/0' },
+                { name: 'DEBUG', value: 'index-builder:*' },
+                { name: 'WS_PROVIDER_ENDPOINT_URI', value: process.env.WS_PROVIDER_ENDPOINT_URI! },
+                { name: 'TYPES_JSON', value: 'types.json' },
+                { name: 'PGUSER', value: process.env.DB_USER! },
+                { name: 'BLOCK_HEIGHT', value: process.env.BLOCK_HEIGHT! },
+              ],
+              volumeMounts: [
+                {
+                  mountPath: '/home/hydra/packages/hydra-indexer/types.json',
+                  name: 'indexer-volume',
+                  subPath: 'fileData',
+                },
+              ],
+              command: ['/bin/sh', '-c'],
+              args: ['yarn db:bootstrap && yarn start:prod'],
+            },
+            {
+              name: 'hydra-indexer-gateway',
+              image: 'joystream/hydra-indexer-gateway:2.1.0-beta.5',
+              env: [
+                { name: 'WARTHOG_STARTER_DB_DATABASE', value: process.env.INDEXER_DB_NAME! },
+                { name: 'WARTHOG_STARTER_DB_HOST', value: 'postgres-db' },
+                { name: 'WARTHOG_STARTER_DB_PASSWORD', value: process.env.DB_PASS! },
+                { name: 'WARTHOG_STARTER_DB_PORT', value: process.env.DB_PORT! },
+                { name: 'WARTHOG_STARTER_DB_USERNAME', value: process.env.DB_USER! },
+                { name: 'WARTHOG_STARTER_REDIS_URI', value: 'redis://localhost:6379/0' },
+                { name: 'WARTHOG_APP_PORT', value: process.env.WARTHOG_APP_PORT! },
+                { name: 'PORT', value: process.env.WARTHOG_APP_PORT! },
+                { name: 'DEBUG', value: '*' },
+              ],
+              ports: [{ containerPort: 4002 }],
+            },
+            {
+              name: 'processor',
+              image: joystreamAppsImage,
+              imagePullPolicy: 'IfNotPresent',
+              env: [
+                {
+                  name: 'INDEXER_ENDPOINT_URL',
+                  value: `http://localhost:${process.env.WARTHOG_APP_PORT}/graphql`,
+                },
+                { name: 'TYPEORM_HOST', value: 'postgres-db' },
+                { name: 'TYPEORM_DATABASE', value: process.env.DB_NAME! },
+                { name: 'DEBUG', value: 'index-builder:*' },
+                { name: 'PROCESSOR_POLL_INTERVAL', value: '1000' },
+              ],
+              volumeMounts: [
+                {
+                  mountPath: '/joystream/query-node/mappings/lib/generated/types/typedefs.json',
+                  name: 'processor-volume',
+                  subPath: 'fileData',
+                },
+              ],
+              command: ['/bin/sh', '-c'],
+              args: ['cd query-node && yarn hydra-processor run -e ../.env'],
+            },
+            {
+              name: 'graphql-server',
+              image: joystreamAppsImage,
+              imagePullPolicy: 'IfNotPresent',
+              env: [
+                { name: 'DB_HOST', value: 'postgres-db' },
+                { name: 'DB_PASS', value: process.env.DB_PASS! },
+                { name: 'DB_USER', value: process.env.DB_USER! },
+                { name: 'DB_PORT', value: process.env.DB_PORT! },
+                { name: 'DB_NAME', value: process.env.DB_NAME! },
+                { name: 'GRAPHQL_SERVER_HOST', value: process.env.GRAPHQL_SERVER_HOST! },
+                { name: 'GRAPHQL_SERVER_PORT', value: process.env.GRAPHQL_SERVER_PORT! },
+              ],
+              ports: [{ name: 'graph-ql-port', containerPort: Number(process.env.GRAPHQL_SERVER_PORT!) }],
+              args: ['workspace', 'query-node-root', 'query-node:start:prod'],
+            },
+          ],
+          volumes: [
+            {
+              name: 'processor-volume',
+              configMap: {
+                name: defsConfig,
+              },
+            },
+            {
+              name: 'indexer-volume',
+              configMap: {
+                name: defsConfig,
+              },
+            },
+          ],
+        },
+      },
+    },
+  },
+  { ...resourceOptions, dependsOn: processorJob }
+)
+
+// Export the Deployment name
+export const deploymentName = deployment.metadata.name
+
+// Create a LoadBalancer Service for the NGINX Deployment
+const service = new k8s.core.v1.Service(
+  name,
+  {
+    metadata: {
+      labels: appLabels,
+      namespace: namespaceName,
+      name: 'query-node',
+    },
+    spec: {
+      ports: [
+        { name: 'port-1', port: 8081, targetPort: 'graph-ql-port' },
+        { name: 'port-2', port: 4000, targetPort: 4002 },
+      ],
+      selector: appLabels,
+    },
+  },
+  resourceOptions
+)
+
+// Export the Service name and public LoadBalancer Endpoint
+export const serviceName = service.metadata.name
+
+// When "done", this will print the public IP.
+// export let serviceHostname: pulumi.Output<string>
+
+// serviceHostname = service.status.loadBalancer.ingress[0].hostname
+const lbReady = config.get('isLoadBalancerReady') === 'true'
+const caddy = new CaddyServiceDeployment('caddy-proxy', { lbReady, namespaceName: namespaceName }, resourceOptions)
+
+export const endpoint1 = caddy.primaryEndpoint
+export const endpoint2 = caddy.secondaryEndpoint

+ 17 - 0
devops/infrastructure/query-node/package.json

@@ -0,0 +1,17 @@
+{
+  "name": "query-node",
+  "devDependencies": {
+    "@types/node": "^10.0.0"
+  },
+  "dependencies": {
+    "@pulumi/aws": "^4.0.0",
+    "@pulumi/awsx": "^0.30.0",
+    "@pulumi/eks": "^0.31.0",
+    "@pulumi/kubernetes": "^3.0.0",
+    "@pulumi/pulumi": "^3.0.0",
+    "@pulumi/docker": "^3.0.0",
+    "dotenv": "^10.0.0",
+    "mime": "^2.5.2",
+    "@types/mime": "^2.0.0"
+  }
+}

+ 73 - 0
devops/infrastructure/query-node/s3Helpers.ts

@@ -0,0 +1,73 @@
+import * as fs from 'fs'
+import * as mime from 'mime'
+
+import * as aws from '@pulumi/aws'
+import * as pulumi from '@pulumi/pulumi'
+
+interface FileObject {
+  name: string
+  path: string
+}
+
+export interface FileBucketOpts {
+  files: FileObject[]
+  policy?: (bucket: aws.s3.Bucket) => pulumi.Output<string>
+}
+
+export class FileBucket {
+  public readonly bucket: aws.s3.Bucket
+  public readonly files: { [key: string]: aws.s3.BucketObject }
+  public readonly policy: aws.s3.BucketPolicy | undefined
+
+  private readonly fileContents: { [key: string]: string }
+
+  constructor(bucketName: string, opts: FileBucketOpts) {
+    this.bucket = new aws.s3.Bucket(bucketName)
+    this.fileContents = {}
+    this.files = {}
+    for (const file of opts.files) {
+      this.fileContents[file.name] = fs.readFileSync(file.path).toString()
+      this.files[file.name] = new aws.s3.BucketObject(file.name, {
+        bucket: this.bucket,
+        source: new pulumi.asset.FileAsset(file.path),
+        contentType: mime.getType(file.path) || undefined,
+      })
+    }
+
+    if (opts.policy !== undefined) {
+      // Set the access policy for the bucket so all objects are readable
+      this.policy = new aws.s3.BucketPolicy(`bucketPolicy`, {
+        bucket: this.bucket.bucket,
+        // policy: this.bucket.bucket.apply(publicReadPolicyForBucket)
+        policy: opts.policy(this.bucket),
+      })
+    }
+  }
+
+  getUrlForFile(file: string): pulumi.Output<string> {
+    if (!(file in this.files)) {
+      throw new Error(`Bucket does not have file '${file}'`)
+    }
+
+    return pulumi.all([this.bucket.bucketDomainName, this.files[file].id]).apply(([domain, id]) => `${domain}/${id}`)
+  }
+}
+
+// Create an S3 Bucket Policy to allow public read of all objects in bucket
+export function publicReadPolicy(bucket: aws.s3.Bucket): pulumi.Output<string> {
+  return bucket.bucket.apply((bucketName) =>
+    JSON.stringify({
+      Version: '2012-10-17',
+      Statement: [
+        {
+          Effect: 'Allow',
+          Principal: '*',
+          Action: ['s3:GetObject'],
+          Resource: [
+            `arn:aws:s3:::${bucketName}/*`, // policy refers to bucket name explicitly
+          ],
+        },
+      ],
+    })
+  )
+}

+ 18 - 0
devops/infrastructure/query-node/tsconfig.json

@@ -0,0 +1,18 @@
+{
+    "compilerOptions": {
+        "strict": true,
+        "outDir": "bin",
+        "target": "es2016",
+        "module": "commonjs",
+        "moduleResolution": "node",
+        "sourceMap": true,
+        "experimentalDecorators": true,
+        "pretty": true,
+        "noFallthroughCasesInSwitch": true,
+        "noImplicitReturns": true,
+        "forceConsistentCasingInFileNames": true
+    },
+    "files": [
+        "index.ts"
+    ]
+}

+ 4 - 3
devops/infrastructure/requirements.yml

@@ -1,6 +1,7 @@
 ---
 roles:
-- caddy_ansible.caddy_ansible
+  - caddy_ansible.caddy_ansible
 collections:
-- community.aws
-- amazon.aws
+  - community.aws
+  - amazon.aws
+  - community.docker

+ 115 - 0
devops/infrastructure/single-instance-docker.yml

@@ -0,0 +1,115 @@
+AWSTemplateFormatVersion: 2010-09-09
+
+Parameters:
+  EC2InstanceType:
+    Type: String
+    Default: t2.xlarge
+  EC2AMI:
+    Type: String
+    Default: 'ami-09e67e426f25ce0d7'
+  KeyName:
+    Description: Name of an existing EC2 KeyPair to enable SSH access to the instance
+    Type: 'AWS::EC2::KeyPair::KeyName'
+    Default: 'joystream-key'
+    ConstraintDescription: must be the name of an existing EC2 KeyPair.
+
+Resources:
+  SecurityGroup:
+    Type: AWS::EC2::SecurityGroup
+    Properties:
+      GroupDescription: !Sub 'Internal Security group for validator nodes ${AWS::StackName}'
+      SecurityGroupIngress:
+        - IpProtocol: tcp
+          FromPort: 22
+          ToPort: 22
+          CidrIp: 0.0.0.0/0
+      Tags:
+        - Key: Name
+          Value: !Sub '${AWS::StackName}_validator'
+
+  InstanceLaunchTemplate:
+    Type: AWS::EC2::LaunchTemplate
+    Metadata:
+      AWS::CloudFormation::Init:
+        config:
+          packages:
+            apt:
+              wget: []
+              unzip: []
+    Properties:
+      LaunchTemplateName: !Sub 'LaunchTemplate_${AWS::StackName}'
+      LaunchTemplateData:
+        ImageId: !Ref EC2AMI
+        InstanceType: !Ref EC2InstanceType
+        KeyName: !Ref KeyName
+        SecurityGroupIds:
+          - !GetAtt SecurityGroup.GroupId
+        BlockDeviceMappings:
+          - DeviceName: /dev/sda1
+            Ebs:
+              VolumeSize: '30'
+        UserData:
+          Fn::Base64: !Sub |
+            #!/bin/bash -xe
+
+            # send script output to /tmp so we can debug boot failures
+            exec > /tmp/userdata.log 2>&1
+
+            # Update all packages
+            apt-get update -y
+
+            # Install the updates
+            apt-get upgrade -y
+
+            apt-get install -y apt-transport-https ca-certificates curl gnupg lsb-release
+
+            curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg
+
+            echo "deb [arch=arm64 signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
+
+            apt-get update -y
+
+            apt-get install -y docker-ce docker-ce-cli containerd.io
+
+            usermod -aG docker ubuntu
+
+            # Get latest cfn scripts and install them;
+            apt-get install -y python3-setuptools
+            mkdir -p /opt/aws/bin
+            wget https://s3.amazonaws.com/cloudformation-examples/aws-cfn-bootstrap-py3-latest.tar.gz
+            python3 -m easy_install --script-dir /opt/aws/bin aws-cfn-bootstrap-py3-latest.tar.gz
+
+            apt-get install -y python3-pip
+
+            /opt/aws/bin/cfn-signal -e $? -r "Instance Created" '${WaitHandle}'
+
+  Instance:
+    Type: AWS::EC2::Instance
+    Properties:
+      LaunchTemplate:
+        LaunchTemplateId: !Ref InstanceLaunchTemplate
+        Version: !GetAtt InstanceLaunchTemplate.LatestVersionNumber
+      Tags:
+        - Key: Name
+          Value: !Sub '${AWS::StackName}_1'
+
+  WaitHandle:
+    Type: AWS::CloudFormation::WaitConditionHandle
+
+  WaitCondition:
+    Type: AWS::CloudFormation::WaitCondition
+    Properties:
+      Handle: !Ref 'WaitHandle'
+      Timeout: '600'
+      Count: 1
+
+Outputs:
+  PublicIp:
+    Description: The DNS name for the created instance
+    Value: !Sub '${Instance.PublicIp}'
+    Export:
+      Name: !Sub '${AWS::StackName}PublicIp'
+
+  InstanceId:
+    Description: The Instance ID
+    Value: !Ref Instance

+ 50 - 0
joystream-node-armv7.Dockerfile

@@ -0,0 +1,50 @@
+FROM rust:1.52.1-buster AS rust
+RUN rustup self update
+RUN rustup install nightly-2021-03-24 --force
+RUN rustup default nightly-2021-03-24
+RUN rustup target add wasm32-unknown-unknown --toolchain nightly-2021-03-24
+RUN rustup component add --toolchain nightly-2021-03-24 clippy
+RUN apt-get update && \
+  apt-get install -y curl git gcc xz-utils sudo pkg-config unzip clang llvm libc6-dev
+
+FROM rust AS builder
+LABEL description="Compiles all workspace artifacts"
+WORKDIR /joystream
+COPY . /joystream
+
+# Build all cargo crates
+# Ensure our tests and linter pass before actual build
+ENV WASM_BUILD_TOOLCHAIN=nightly-2021-03-24
+RUN apt-get install -y libprotobuf-dev protobuf-compiler
+RUN BUILD_DUMMY_WASM_BINARY=1 cargo clippy --release --all -- -D warnings && \
+    cargo test --release --all && \
+    cargo build --target armv7-unknown-linux-gnueabihf --release
+
+FROM ubuntu:21.04
+LABEL description="Joystream node"
+WORKDIR /joystream
+COPY --from=builder /joystream/target/armv7-unknown-linux-gnueabihf/release/joystream-node /joystream/node
+COPY --from=builder /joystream/target/armv7-unknown-linux-gnueabihf/release/wbuild/joystream-node-runtime/joystream_node_runtime.compact.wasm /joystream/runtime.compact.wasm
+COPY --from=builder /joystream/target/armv7-unknown-linux-gnueabihf/release/chain-spec-builder /joystream/chain-spec-builder
+
+# confirm it works
+RUN /joystream/node --version
+
+# https://manpages.debian.org/stretch/coreutils/b2sum.1.en.html
+# RUN apt-get install coreutils
+# print the blake2 256 hash of the wasm blob
+RUN b2sum -l 256 /joystream/runtime.compact.wasm
+# print the blake2 512 hash of the wasm blob
+RUN b2sum -l 512 /joystream/runtime.compact.wasm
+
+EXPOSE 30333 9933 9944
+
+# Use these volumes to persits chain state and keystore, eg.:
+# --base-path /data
+# optionally separate keystore (otherwise it will be stored in the base path)
+# --keystore-path /keystore
+# if base-path isn't specified, chain state is stored inside container in ~/.local/share/joystream-node/
+# which is not ideal
+VOLUME ["/data", "/keystore"]
+
+ENTRYPOINT ["/joystream/node"]

+ 1 - 0
package.json

@@ -37,6 +37,7 @@
     "@polkadot/util": "^6.0.5",
     "@polkadot/util-crypto": "^6.0.5",
     "@polkadot/wasm-crypto": "^4.0.2",
+    "warthog": "https://github.com/Joystream/warthog/releases/download/v2.37.2-sumer/joystream-warthog-v2.37.2-sumer.tgz",
     "babel-core": "^7.0.0-bridge.0",
     "typescript": "^3.9.7",
     "bn.js": "^5.1.2",

+ 1 - 1
pioneer/packages/joy-proposals/src/Proposal/VotingSection.tsx

@@ -99,7 +99,7 @@ export default function VotingSection ({
 
   return (
     <>
-      <Header as='h3'>Sumbit your vote</Header>
+      <Header as='h3'>Submit your vote</Header>
       <Divider />
       <VoteButtons>
         { VoteKinds.map((voteKind) =>

+ 6 - 17
query-node/README.md

@@ -60,22 +60,11 @@ The simplest way to run an indexer locally is to run `docker-compose-indexer.yml
 
 Follow the links for more information about the [indexer](https://github.com/Joystream/hydra/tree/master/packages/hydra-indexer/README.md) service and [indexer-api-gateway](https://github.com/Joystream/hydra/tree/master/packages/hydra-indexer-gateway/README.md).
 
-
-
-# Tmp command order
-TODO: remove after integration tests are finished and query node runs without any issues
-```
-# build everything
-yarn
-yarn build
-
+## GraphQL Playground assets url
+Query node's user interface, GraphQL Playground, is expecting to be served at `/graphql`. 
+If you are serving the files on path like `/query/server/graphql` via some nginx proxy, aliasing, etc. you will need to provide
+the base url to query node server via `GRAPHQL_PLAYGROUND_CDN` environment variable.
 ```
-
-running the processor:
-```
-cp types/augment/all/defs.json query-node/mappings/lib/generated/types/typedefs.json
-docker-compose up -d db
-yarn workspace query-node-root db:create
-yarn workspace query-node-root db:migrate
-
+# use the following when serving playground at `/query/server/graphql`
+GRAPHQL_PLAYGROUND_CDN="query/server" yarn workspace query-node-root query-node:start:dev 
 ```

+ 273 - 102
query-node/generated/graphql-server/generated/binding.ts

@@ -6,49 +6,49 @@ import { IResolvers } from 'graphql-tools/dist/Interfaces'
 import * as schema from  './schema.graphql'
 
 export interface Query {
-    curatorGroups: <T = Array<CuratorGroup>>(args: { offset?: Int | null, limit?: Int | null, where?: CuratorGroupWhereInput | null, orderBy?: CuratorGroupOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    curatorGroups: <T = Array<CuratorGroup>>(args: { offset?: Int | null, limit?: Int | null, where?: CuratorGroupWhereInput | null, orderBy?: Array<CuratorGroupOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
     curatorGroupByUniqueInput: <T = CuratorGroup | null>(args: { where: CuratorGroupWhereUniqueInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T | null> ,
-    curatorGroupsConnection: <T = CuratorGroupConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: CuratorGroupWhereInput | null, orderBy?: CuratorGroupOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
-    dataObjects: <T = Array<DataObject>>(args: { offset?: Int | null, limit?: Int | null, where?: DataObjectWhereInput | null, orderBy?: DataObjectOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    curatorGroupsConnection: <T = CuratorGroupConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: CuratorGroupWhereInput | null, orderBy?: Array<CuratorGroupOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    dataObjects: <T = Array<DataObject>>(args: { offset?: Int | null, limit?: Int | null, where?: DataObjectWhereInput | null, orderBy?: Array<DataObjectOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
     dataObjectByUniqueInput: <T = DataObject | null>(args: { where: DataObjectWhereUniqueInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T | null> ,
-    dataObjectsConnection: <T = DataObjectConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: DataObjectWhereInput | null, orderBy?: DataObjectOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
-    featuredVideos: <T = Array<FeaturedVideo>>(args: { offset?: Int | null, limit?: Int | null, where?: FeaturedVideoWhereInput | null, orderBy?: FeaturedVideoOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
-    featuredVideoByUniqueInput: <T = FeaturedVideo | null>(args: { where: FeaturedVideoWhereUniqueInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T | null> ,
-    featuredVideosConnection: <T = FeaturedVideoConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: FeaturedVideoWhereInput | null, orderBy?: FeaturedVideoOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
-    channelCategories: <T = Array<ChannelCategory>>(args: { offset?: Int | null, limit?: Int | null, where?: ChannelCategoryWhereInput | null, orderBy?: ChannelCategoryOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    dataObjectsConnection: <T = DataObjectConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: DataObjectWhereInput | null, orderBy?: Array<DataObjectOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    channelCategories: <T = Array<ChannelCategory>>(args: { offset?: Int | null, limit?: Int | null, where?: ChannelCategoryWhereInput | null, orderBy?: Array<ChannelCategoryOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
     channelCategoryByUniqueInput: <T = ChannelCategory | null>(args: { where: ChannelCategoryWhereUniqueInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T | null> ,
-    channelCategoriesConnection: <T = ChannelCategoryConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: ChannelCategoryWhereInput | null, orderBy?: ChannelCategoryOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
-    channels: <T = Array<Channel>>(args: { offset?: Int | null, limit?: Int | null, where?: ChannelWhereInput | null, orderBy?: ChannelOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    channelCategoriesConnection: <T = ChannelCategoryConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: ChannelCategoryWhereInput | null, orderBy?: Array<ChannelCategoryOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    channels: <T = Array<Channel>>(args: { offset?: Int | null, limit?: Int | null, where?: ChannelWhereInput | null, orderBy?: Array<ChannelOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
     channelByUniqueInput: <T = Channel | null>(args: { where: ChannelWhereUniqueInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T | null> ,
-    channelsConnection: <T = ChannelConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: ChannelWhereInput | null, orderBy?: ChannelOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
-    languages: <T = Array<Language>>(args: { offset?: Int | null, limit?: Int | null, where?: LanguageWhereInput | null, orderBy?: LanguageOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    channelsConnection: <T = ChannelConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: ChannelWhereInput | null, orderBy?: Array<ChannelOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    languages: <T = Array<Language>>(args: { offset?: Int | null, limit?: Int | null, where?: LanguageWhereInput | null, orderBy?: Array<LanguageOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
     languageByUniqueInput: <T = Language | null>(args: { where: LanguageWhereUniqueInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T | null> ,
-    languagesConnection: <T = LanguageConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: LanguageWhereInput | null, orderBy?: LanguageOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
-    licenses: <T = Array<License>>(args: { offset?: Int | null, limit?: Int | null, where?: LicenseWhereInput | null, orderBy?: LicenseOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    languagesConnection: <T = LanguageConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: LanguageWhereInput | null, orderBy?: Array<LanguageOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    licenses: <T = Array<License>>(args: { offset?: Int | null, limit?: Int | null, where?: LicenseWhereInput | null, orderBy?: Array<LicenseOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
     licenseByUniqueInput: <T = License | null>(args: { where: LicenseWhereUniqueInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T | null> ,
-    licensesConnection: <T = LicenseConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: LicenseWhereInput | null, orderBy?: LicenseOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
-    memberships: <T = Array<Membership>>(args: { offset?: Int | null, limit?: Int | null, where?: MembershipWhereInput | null, orderBy?: MembershipOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    licensesConnection: <T = LicenseConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: LicenseWhereInput | null, orderBy?: Array<LicenseOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    memberships: <T = Array<Membership>>(args: { offset?: Int | null, limit?: Int | null, where?: MembershipWhereInput | null, orderBy?: Array<MembershipOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
     membershipByUniqueInput: <T = Membership | null>(args: { where: MembershipWhereUniqueInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T | null> ,
-    membershipsConnection: <T = MembershipConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: MembershipWhereInput | null, orderBy?: MembershipOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    membershipsConnection: <T = MembershipConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: MembershipWhereInput | null, orderBy?: Array<MembershipOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    nextEntityIds: <T = Array<NextEntityId>>(args: { offset?: Int | null, limit?: Int | null, where?: NextEntityIdWhereInput | null, orderBy?: Array<NextEntityIdOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    nextEntityIdByUniqueInput: <T = NextEntityId | null>(args: { where: NextEntityIdWhereUniqueInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T | null> ,
+    nextEntityIdsConnection: <T = NextEntityIdConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: NextEntityIdWhereInput | null, orderBy?: Array<NextEntityIdOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
     channelCategoriesByName: <T = Array<ChannelCategoriesByNameFTSOutput>>(args: { whereChannelCategory?: ChannelCategoryWhereInput | null, skip?: Int | null, limit?: Int | null, text: String }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
     membersByHandle: <T = Array<MembersByHandleFTSOutput>>(args: { whereMembership?: MembershipWhereInput | null, skip?: Int | null, limit?: Int | null, text: String }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
     search: <T = Array<SearchFTSOutput>>(args: { whereVideo?: VideoWhereInput | null, whereChannel?: ChannelWhereInput | null, skip?: Int | null, limit?: Int | null, text: String }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
     videoCategoriesByName: <T = Array<VideoCategoriesByNameFTSOutput>>(args: { whereVideoCategory?: VideoCategoryWhereInput | null, skip?: Int | null, limit?: Int | null, text: String }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
-    videoCategories: <T = Array<VideoCategory>>(args: { offset?: Int | null, limit?: Int | null, where?: VideoCategoryWhereInput | null, orderBy?: VideoCategoryOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    videoCategories: <T = Array<VideoCategory>>(args: { offset?: Int | null, limit?: Int | null, where?: VideoCategoryWhereInput | null, orderBy?: Array<VideoCategoryOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
     videoCategoryByUniqueInput: <T = VideoCategory | null>(args: { where: VideoCategoryWhereUniqueInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T | null> ,
-    videoCategoriesConnection: <T = VideoCategoryConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: VideoCategoryWhereInput | null, orderBy?: VideoCategoryOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
-    videoMediaEncodings: <T = Array<VideoMediaEncoding>>(args: { offset?: Int | null, limit?: Int | null, where?: VideoMediaEncodingWhereInput | null, orderBy?: VideoMediaEncodingOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    videoCategoriesConnection: <T = VideoCategoryConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: VideoCategoryWhereInput | null, orderBy?: Array<VideoCategoryOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    videoMediaEncodings: <T = Array<VideoMediaEncoding>>(args: { offset?: Int | null, limit?: Int | null, where?: VideoMediaEncodingWhereInput | null, orderBy?: Array<VideoMediaEncodingOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
     videoMediaEncodingByUniqueInput: <T = VideoMediaEncoding | null>(args: { where: VideoMediaEncodingWhereUniqueInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T | null> ,
-    videoMediaEncodingsConnection: <T = VideoMediaEncodingConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: VideoMediaEncodingWhereInput | null, orderBy?: VideoMediaEncodingOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
-    videoMediaMetadata: <T = Array<VideoMediaMetadata>>(args: { offset?: Int | null, limit?: Int | null, where?: VideoMediaMetadataWhereInput | null, orderBy?: VideoMediaMetadataOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    videoMediaEncodingsConnection: <T = VideoMediaEncodingConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: VideoMediaEncodingWhereInput | null, orderBy?: Array<VideoMediaEncodingOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    videoMediaMetadata: <T = Array<VideoMediaMetadata>>(args: { offset?: Int | null, limit?: Int | null, where?: VideoMediaMetadataWhereInput | null, orderBy?: Array<VideoMediaMetadataOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
     videoMediaMetadataByUniqueInput: <T = VideoMediaMetadata | null>(args: { where: VideoMediaMetadataWhereUniqueInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T | null> ,
-    videoMediaMetadataConnection: <T = VideoMediaMetadataConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: VideoMediaMetadataWhereInput | null, orderBy?: VideoMediaMetadataOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
-    videos: <T = Array<Video>>(args: { offset?: Int | null, limit?: Int | null, where?: VideoWhereInput | null, orderBy?: VideoOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    videoMediaMetadataConnection: <T = VideoMediaMetadataConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: VideoMediaMetadataWhereInput | null, orderBy?: Array<VideoMediaMetadataOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    videos: <T = Array<Video>>(args: { offset?: Int | null, limit?: Int | null, where?: VideoWhereInput | null, orderBy?: Array<VideoOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
     videoByUniqueInput: <T = Video | null>(args: { where: VideoWhereUniqueInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T | null> ,
-    videosConnection: <T = VideoConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: VideoWhereInput | null, orderBy?: VideoOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
-    workers: <T = Array<Worker>>(args: { offset?: Int | null, limit?: Int | null, where?: WorkerWhereInput | null, orderBy?: WorkerOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    videosConnection: <T = VideoConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: VideoWhereInput | null, orderBy?: Array<VideoOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
+    workers: <T = Array<Worker>>(args: { offset?: Int | null, limit?: Int | null, where?: WorkerWhereInput | null, orderBy?: Array<WorkerOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> ,
     workerByUniqueInput: <T = Worker | null>(args: { where: WorkerWhereUniqueInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T | null> ,
-    workersConnection: <T = WorkerConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: WorkerWhereInput | null, orderBy?: WorkerOrderByInput | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> 
+    workersConnection: <T = WorkerConnection>(args: { first?: Int | null, after?: String | null, last?: Int | null, before?: String | null, where?: WorkerWhereInput | null, orderBy?: Array<WorkerOrderByInput> | null }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> 
   }
 
 export interface Mutation {}
@@ -106,6 +106,8 @@ export type DataObjectOrderByInput =   'createdAt_ASC' |
   'typeId_DESC' |
   'size_ASC' |
   'size_DESC' |
+  'liaison_ASC' |
+  'liaison_DESC' |
   'liaisonId_ASC' |
   'liaisonId_DESC' |
   'liaisonJudgement_ASC' |
@@ -115,15 +117,6 @@ export type DataObjectOrderByInput =   'createdAt_ASC' |
   'joystreamContentId_ASC' |
   'joystreamContentId_DESC'
 
-export type FeaturedVideoOrderByInput =   'createdAt_ASC' |
-  'createdAt_DESC' |
-  'updatedAt_ASC' |
-  'updatedAt_DESC' |
-  'deletedAt_ASC' |
-  'deletedAt_DESC' |
-  'videoId_ASC' |
-  'videoId_DESC'
-
 export type ChannelCategoryOrderByInput =   'createdAt_ASC' |
   'createdAt_DESC' |
   'updatedAt_ASC' |
@@ -141,10 +134,16 @@ export type ChannelOrderByInput =   'createdAt_ASC' |
   'updatedAt_DESC' |
   'deletedAt_ASC' |
   'deletedAt_DESC' |
+  'ownerMember_ASC' |
+  'ownerMember_DESC' |
   'ownerMemberId_ASC' |
   'ownerMemberId_DESC' |
+  'ownerCuratorGroup_ASC' |
+  'ownerCuratorGroup_DESC' |
   'ownerCuratorGroupId_ASC' |
   'ownerCuratorGroupId_DESC' |
+  'category_ASC' |
+  'category_DESC' |
   'categoryId_ASC' |
   'categoryId_DESC' |
   'rewardAccount_ASC' |
@@ -153,10 +152,14 @@ export type ChannelOrderByInput =   'createdAt_ASC' |
   'title_DESC' |
   'description_ASC' |
   'description_DESC' |
+  'coverPhotoDataObject_ASC' |
+  'coverPhotoDataObject_DESC' |
   'coverPhotoDataObjectId_ASC' |
   'coverPhotoDataObjectId_DESC' |
   'coverPhotoAvailability_ASC' |
   'coverPhotoAvailability_DESC' |
+  'avatarPhotoDataObject_ASC' |
+  'avatarPhotoDataObject_DESC' |
   'avatarPhotoDataObjectId_ASC' |
   'avatarPhotoDataObjectId_DESC' |
   'avatarPhotoAvailability_ASC' |
@@ -165,6 +168,8 @@ export type ChannelOrderByInput =   'createdAt_ASC' |
   'isPublic_DESC' |
   'isCensored_ASC' |
   'isCensored_DESC' |
+  'language_ASC' |
+  'language_DESC' |
   'languageId_ASC' |
   'languageId_DESC' |
   'createdInBlock_ASC' |
@@ -224,6 +229,15 @@ export type MembershipOrderByInput =   'createdAt_ASC' |
   'subscription_ASC' |
   'subscription_DESC'
 
+export type NextEntityIdOrderByInput =   'createdAt_ASC' |
+  'createdAt_DESC' |
+  'updatedAt_ASC' |
+  'updatedAt_DESC' |
+  'deletedAt_ASC' |
+  'deletedAt_DESC' |
+  'nextId_ASC' |
+  'nextId_DESC'
+
 export type VideoCategoryOrderByInput =   'createdAt_ASC' |
   'createdAt_DESC' |
   'updatedAt_ASC' |
@@ -254,6 +268,8 @@ export type VideoMediaMetadataOrderByInput =   'createdAt_ASC' |
   'updatedAt_DESC' |
   'deletedAt_ASC' |
   'deletedAt_DESC' |
+  'encoding_ASC' |
+  'encoding_DESC' |
   'encodingId_ASC' |
   'encodingId_DESC' |
   'pixelWidth_ASC' |
@@ -271,8 +287,12 @@ export type VideoOrderByInput =   'createdAt_ASC' |
   'updatedAt_DESC' |
   'deletedAt_ASC' |
   'deletedAt_DESC' |
+  'channel_ASC' |
+  'channel_DESC' |
   'channelId_ASC' |
   'channelId_DESC' |
+  'category_ASC' |
+  'category_DESC' |
   'categoryId_ASC' |
   'categoryId_DESC' |
   'title_ASC' |
@@ -281,10 +301,14 @@ export type VideoOrderByInput =   'createdAt_ASC' |
   'description_DESC' |
   'duration_ASC' |
   'duration_DESC' |
+  'thumbnailPhotoDataObject_ASC' |
+  'thumbnailPhotoDataObject_DESC' |
   'thumbnailPhotoDataObjectId_ASC' |
   'thumbnailPhotoDataObjectId_DESC' |
   'thumbnailPhotoAvailability_ASC' |
   'thumbnailPhotoAvailability_DESC' |
+  'language_ASC' |
+  'language_DESC' |
   'languageId_ASC' |
   'languageId_DESC' |
   'hasMarketing_ASC' |
@@ -297,12 +321,18 @@ export type VideoOrderByInput =   'createdAt_ASC' |
   'isCensored_DESC' |
   'isExplicit_ASC' |
   'isExplicit_DESC' |
+  'license_ASC' |
+  'license_DESC' |
   'licenseId_ASC' |
   'licenseId_DESC' |
+  'mediaDataObject_ASC' |
+  'mediaDataObject_DESC' |
   'mediaDataObjectId_ASC' |
   'mediaDataObjectId_DESC' |
   'mediaAvailability_ASC' |
   'mediaAvailability_DESC' |
+  'mediaMetadata_ASC' |
+  'mediaMetadata_DESC' |
   'mediaMetadataId_ASC' |
   'mediaMetadataId_DESC' |
   'createdInBlock_ASC' |
@@ -387,8 +417,16 @@ export interface CuratorGroupWhereInput {
   deletedAt_gte?: DateTime | null
   deletedById_eq?: ID_Input | null
   deletedById_in?: ID_Output[] | ID_Output | null
+  curatorIds_containsAll?: Int[] | Int | null
+  curatorIds_containsNone?: Int[] | Int | null
+  curatorIds_containsAny?: Int[] | Int | null
   isActive_eq?: Boolean | null
   isActive_in?: Boolean[] | Boolean | null
+  channels_none?: ChannelWhereInput | null
+  channels_some?: ChannelWhereInput | null
+  channels_every?: ChannelWhereInput | null
+  AND?: CuratorGroupWhereInput[] | CuratorGroupWhereInput | null
+  OR?: CuratorGroupWhereInput[] | CuratorGroupWhereInput | null
 }
 
 export interface CuratorGroupWhereUniqueInput {
@@ -400,6 +438,7 @@ export interface DataObjectCreateInput {
   createdInBlock: Float
   typeId: Float
   size: Float
+  liaison?: ID_Input | null
   liaisonId?: ID_Input | null
   liaisonJudgement: LiaisonJudgement
   ipfsContentId: String
@@ -445,6 +484,8 @@ export interface DataObjectOwnerCouncilWhereInput {
   dummy_lt?: Int | null
   dummy_lte?: Int | null
   dummy_in?: Int[] | Int | null
+  AND?: DataObjectOwnerCouncilWhereInput[] | DataObjectOwnerCouncilWhereInput | null
+  OR?: DataObjectOwnerCouncilWhereInput[] | DataObjectOwnerCouncilWhereInput | null
 }
 
 export interface DataObjectOwnerCouncilWhereUniqueInput {
@@ -490,6 +531,8 @@ export interface DataObjectOwnerDaoWhereInput {
   dao_lt?: Int | null
   dao_lte?: Int | null
   dao_in?: Int[] | Int | null
+  AND?: DataObjectOwnerDaoWhereInput[] | DataObjectOwnerDaoWhereInput | null
+  OR?: DataObjectOwnerDaoWhereInput[] | DataObjectOwnerDaoWhereInput | null
 }
 
 export interface DataObjectOwnerDaoWhereUniqueInput {
@@ -543,6 +586,8 @@ export interface DataObjectOwnerChannelWhereInput {
   dummy_lt?: Int | null
   dummy_lte?: Int | null
   dummy_in?: Int[] | Int | null
+  AND?: DataObjectOwnerChannelWhereInput[] | DataObjectOwnerChannelWhereInput | null
+  OR?: DataObjectOwnerChannelWhereInput[] | DataObjectOwnerChannelWhereInput | null
 }
 
 export interface DataObjectOwnerChannelWhereUniqueInput {
@@ -596,6 +641,8 @@ export interface DataObjectOwnerMemberWhereInput {
   dummy_lt?: Int | null
   dummy_lte?: Int | null
   dummy_in?: Int[] | Int | null
+  AND?: DataObjectOwnerMemberWhereInput[] | DataObjectOwnerMemberWhereInput | null
+  OR?: DataObjectOwnerMemberWhereInput[] | DataObjectOwnerMemberWhereInput | null
 }
 
 export interface DataObjectOwnerMemberWhereUniqueInput {
@@ -641,6 +688,8 @@ export interface DataObjectOwnerWorkingGroupWhereInput {
   workingGroup_lt?: Int | null
   workingGroup_lte?: Int | null
   workingGroup_in?: Int[] | Int | null
+  AND?: DataObjectOwnerWorkingGroupWhereInput[] | DataObjectOwnerWorkingGroupWhereInput | null
+  OR?: DataObjectOwnerWorkingGroupWhereInput[] | DataObjectOwnerWorkingGroupWhereInput | null
 }
 
 export interface DataObjectOwnerWorkingGroupWhereUniqueInput {
@@ -652,6 +701,7 @@ export interface DataObjectUpdateInput {
   createdInBlock?: Float | null
   typeId?: Float | null
   size?: Float | null
+  liaison?: ID_Input | null
   liaisonId?: ID_Input | null
   liaisonJudgement?: LiaisonJudgement | null
   ipfsContentId?: String | null
@@ -716,53 +766,27 @@ export interface DataObjectWhereInput {
   joystreamContentId_startsWith?: String | null
   joystreamContentId_endsWith?: String | null
   joystreamContentId_in?: String[] | String | null
+  liaison?: WorkerWhereInput | null
+  channelcoverPhotoDataObject_none?: ChannelWhereInput | null
+  channelcoverPhotoDataObject_some?: ChannelWhereInput | null
+  channelcoverPhotoDataObject_every?: ChannelWhereInput | null
+  channelavatarPhotoDataObject_none?: ChannelWhereInput | null
+  channelavatarPhotoDataObject_some?: ChannelWhereInput | null
+  channelavatarPhotoDataObject_every?: ChannelWhereInput | null
+  videothumbnailPhotoDataObject_none?: VideoMediaMetadataWhereInput | null
+  videothumbnailPhotoDataObject_some?: VideoMediaMetadataWhereInput | null
+  videothumbnailPhotoDataObject_every?: VideoMediaMetadataWhereInput | null
+  videomediaDataObject_none?: VideoMediaMetadataWhereInput | null
+  videomediaDataObject_some?: VideoMediaMetadataWhereInput | null
+  videomediaDataObject_every?: VideoMediaMetadataWhereInput | null
+  AND?: DataObjectWhereInput[] | DataObjectWhereInput | null
+  OR?: DataObjectWhereInput[] | DataObjectWhereInput | null
 }
 
 export interface DataObjectWhereUniqueInput {
   id: ID_Output
 }
 
-export interface FeaturedVideoCreateInput {
-  videoId: ID_Output
-}
-
-export interface FeaturedVideoUpdateInput {
-  videoId?: ID_Input | null
-}
-
-export interface FeaturedVideoWhereInput {
-  id_eq?: ID_Input | null
-  id_in?: ID_Output[] | ID_Output | null
-  createdAt_eq?: DateTime | null
-  createdAt_lt?: DateTime | null
-  createdAt_lte?: DateTime | null
-  createdAt_gt?: DateTime | null
-  createdAt_gte?: DateTime | null
-  createdById_eq?: ID_Input | null
-  createdById_in?: ID_Output[] | ID_Output | null
-  updatedAt_eq?: DateTime | null
-  updatedAt_lt?: DateTime | null
-  updatedAt_lte?: DateTime | null
-  updatedAt_gt?: DateTime | null
-  updatedAt_gte?: DateTime | null
-  updatedById_eq?: ID_Input | null
-  updatedById_in?: ID_Output[] | ID_Output | null
-  deletedAt_all?: Boolean | null
-  deletedAt_eq?: DateTime | null
-  deletedAt_lt?: DateTime | null
-  deletedAt_lte?: DateTime | null
-  deletedAt_gt?: DateTime | null
-  deletedAt_gte?: DateTime | null
-  deletedById_eq?: ID_Input | null
-  deletedById_in?: ID_Output[] | ID_Output | null
-  videoId_eq?: ID_Input | null
-  videoId_in?: ID_Output[] | ID_Output | null
-}
-
-export interface FeaturedVideoWhereUniqueInput {
-  id: ID_Output
-}
-
 export interface ChannelCategoryCreateInput {
   name?: String | null
   createdInBlock: Float
@@ -809,6 +833,11 @@ export interface ChannelCategoryWhereInput {
   createdInBlock_lt?: Int | null
   createdInBlock_lte?: Int | null
   createdInBlock_in?: Int[] | Int | null
+  channels_none?: ChannelWhereInput | null
+  channels_some?: ChannelWhereInput | null
+  channels_every?: ChannelWhereInput | null
+  AND?: ChannelCategoryWhereInput[] | ChannelCategoryWhereInput | null
+  OR?: ChannelCategoryWhereInput[] | ChannelCategoryWhereInput | null
 }
 
 export interface ChannelCategoryWhereUniqueInput {
@@ -816,39 +845,51 @@ export interface ChannelCategoryWhereUniqueInput {
 }
 
 export interface ChannelCreateInput {
+  ownerMember?: ID_Input | null
   ownerMemberId?: ID_Input | null
+  ownerCuratorGroup?: ID_Input | null
   ownerCuratorGroupId?: ID_Input | null
+  category?: ID_Input | null
   categoryId?: ID_Input | null
   rewardAccount?: String | null
   title?: String | null
   description?: String | null
+  coverPhotoDataObject?: ID_Input | null
   coverPhotoDataObjectId?: ID_Input | null
   coverPhotoUrls: Array<String>
   coverPhotoAvailability: AssetAvailability
+  avatarPhotoDataObject?: ID_Input | null
   avatarPhotoDataObjectId?: ID_Input | null
   avatarPhotoUrls: Array<String>
   avatarPhotoAvailability: AssetAvailability
   isPublic?: Boolean | null
   isCensored: Boolean
+  language?: ID_Input | null
   languageId?: ID_Input | null
   createdInBlock: Float
 }
 
 export interface ChannelUpdateInput {
+  ownerMember?: ID_Input | null
   ownerMemberId?: ID_Input | null
+  ownerCuratorGroup?: ID_Input | null
   ownerCuratorGroupId?: ID_Input | null
+  category?: ID_Input | null
   categoryId?: ID_Input | null
   rewardAccount?: String | null
   title?: String | null
   description?: String | null
+  coverPhotoDataObject?: ID_Input | null
   coverPhotoDataObjectId?: ID_Input | null
   coverPhotoUrls?: String[] | String | null
   coverPhotoAvailability?: AssetAvailability | null
+  avatarPhotoDataObject?: ID_Input | null
   avatarPhotoDataObjectId?: ID_Input | null
   avatarPhotoUrls?: String[] | String | null
   avatarPhotoAvailability?: AssetAvailability | null
   isPublic?: Boolean | null
   isCensored?: Boolean | null
+  language?: ID_Input | null
   languageId?: ID_Input | null
   createdInBlock?: Float | null
 }
@@ -901,10 +942,16 @@ export interface ChannelWhereInput {
   description_in?: String[] | String | null
   coverPhotoDataObjectId_eq?: ID_Input | null
   coverPhotoDataObjectId_in?: ID_Output[] | ID_Output | null
+  coverPhotoUrls_containsAll?: String[] | String | null
+  coverPhotoUrls_containsNone?: String[] | String | null
+  coverPhotoUrls_containsAny?: String[] | String | null
   coverPhotoAvailability_eq?: AssetAvailability | null
   coverPhotoAvailability_in?: AssetAvailability[] | AssetAvailability | null
   avatarPhotoDataObjectId_eq?: ID_Input | null
   avatarPhotoDataObjectId_in?: ID_Output[] | ID_Output | null
+  avatarPhotoUrls_containsAll?: String[] | String | null
+  avatarPhotoUrls_containsNone?: String[] | String | null
+  avatarPhotoUrls_containsAny?: String[] | String | null
   avatarPhotoAvailability_eq?: AssetAvailability | null
   avatarPhotoAvailability_in?: AssetAvailability[] | AssetAvailability | null
   isPublic_eq?: Boolean | null
@@ -919,6 +966,17 @@ export interface ChannelWhereInput {
   createdInBlock_lt?: Int | null
   createdInBlock_lte?: Int | null
   createdInBlock_in?: Int[] | Int | null
+  ownerMember?: MembershipWhereInput | null
+  ownerCuratorGroup?: CuratorGroupWhereInput | null
+  category?: ChannelCategoryWhereInput | null
+  coverPhotoDataObject?: DataObjectWhereInput | null
+  avatarPhotoDataObject?: DataObjectWhereInput | null
+  language?: LanguageWhereInput | null
+  videos_none?: VideoWhereInput | null
+  videos_some?: VideoWhereInput | null
+  videos_every?: VideoWhereInput | null
+  AND?: ChannelWhereInput[] | ChannelWhereInput | null
+  OR?: ChannelWhereInput[] | ChannelWhereInput | null
 }
 
 export interface ChannelWhereUniqueInput {
@@ -971,6 +1029,14 @@ export interface LanguageWhereInput {
   createdInBlock_lt?: Int | null
   createdInBlock_lte?: Int | null
   createdInBlock_in?: Int[] | Int | null
+  channellanguage_none?: ChannelWhereInput | null
+  channellanguage_some?: ChannelWhereInput | null
+  channellanguage_every?: ChannelWhereInput | null
+  videolanguage_none?: VideoWhereInput | null
+  videolanguage_some?: VideoWhereInput | null
+  videolanguage_every?: VideoWhereInput | null
+  AND?: LanguageWhereInput[] | LanguageWhereInput | null
+  OR?: LanguageWhereInput[] | LanguageWhereInput | null
 }
 
 export interface LanguageWhereUniqueInput {
@@ -1030,6 +1096,11 @@ export interface LicenseWhereInput {
   customText_startsWith?: String | null
   customText_endsWith?: String | null
   customText_in?: String[] | String | null
+  videolanguage_none?: VideoWhereInput | null
+  videolanguage_some?: VideoWhereInput | null
+  videolanguage_every?: VideoWhereInput | null
+  AND?: LicenseWhereInput[] | LicenseWhereInput | null
+  OR?: LicenseWhereInput[] | LicenseWhereInput | null
 }
 
 export interface LicenseWhereUniqueInput {
@@ -1122,6 +1193,11 @@ export interface MembershipWhereInput {
   subscription_lt?: Int | null
   subscription_lte?: Int | null
   subscription_in?: Int[] | Int | null
+  channels_none?: ChannelWhereInput | null
+  channels_some?: ChannelWhereInput | null
+  channels_every?: ChannelWhereInput | null
+  AND?: MembershipWhereInput[] | MembershipWhereInput | null
+  OR?: MembershipWhereInput[] | MembershipWhereInput | null
 }
 
 export interface MembershipWhereUniqueInput {
@@ -1129,6 +1205,53 @@ export interface MembershipWhereUniqueInput {
   handle?: String | null
 }
 
+export interface NextEntityIdCreateInput {
+  nextId: Float
+}
+
+export interface NextEntityIdUpdateInput {
+  nextId?: Float | null
+}
+
+export interface NextEntityIdWhereInput {
+  id_eq?: ID_Input | null
+  id_in?: ID_Output[] | ID_Output | null
+  createdAt_eq?: DateTime | null
+  createdAt_lt?: DateTime | null
+  createdAt_lte?: DateTime | null
+  createdAt_gt?: DateTime | null
+  createdAt_gte?: DateTime | null
+  createdById_eq?: ID_Input | null
+  createdById_in?: ID_Output[] | ID_Output | null
+  updatedAt_eq?: DateTime | null
+  updatedAt_lt?: DateTime | null
+  updatedAt_lte?: DateTime | null
+  updatedAt_gt?: DateTime | null
+  updatedAt_gte?: DateTime | null
+  updatedById_eq?: ID_Input | null
+  updatedById_in?: ID_Output[] | ID_Output | null
+  deletedAt_all?: Boolean | null
+  deletedAt_eq?: DateTime | null
+  deletedAt_lt?: DateTime | null
+  deletedAt_lte?: DateTime | null
+  deletedAt_gt?: DateTime | null
+  deletedAt_gte?: DateTime | null
+  deletedById_eq?: ID_Input | null
+  deletedById_in?: ID_Output[] | ID_Output | null
+  nextId_eq?: Float | null
+  nextId_gt?: Float | null
+  nextId_gte?: Float | null
+  nextId_lt?: Float | null
+  nextId_lte?: Float | null
+  nextId_in?: Float[] | Float | null
+  AND?: NextEntityIdWhereInput[] | NextEntityIdWhereInput | null
+  OR?: NextEntityIdWhereInput[] | NextEntityIdWhereInput | null
+}
+
+export interface NextEntityIdWhereUniqueInput {
+  id: ID_Output
+}
+
 export interface VideoCategoryCreateInput {
   name?: String | null
   createdInBlock: Float
@@ -1175,6 +1298,11 @@ export interface VideoCategoryWhereInput {
   createdInBlock_lt?: Int | null
   createdInBlock_lte?: Int | null
   createdInBlock_in?: Int[] | Int | null
+  videos_none?: VideoWhereInput | null
+  videos_some?: VideoWhereInput | null
+  videos_every?: VideoWhereInput | null
+  AND?: VideoCategoryWhereInput[] | VideoCategoryWhereInput | null
+  OR?: VideoCategoryWhereInput[] | VideoCategoryWhereInput | null
 }
 
 export interface VideoCategoryWhereUniqueInput {
@@ -1182,24 +1310,31 @@ export interface VideoCategoryWhereUniqueInput {
 }
 
 export interface VideoCreateInput {
+  channel?: ID_Input | null
   channelId?: ID_Input | null
+  category?: ID_Input | null
   categoryId?: ID_Input | null
   title?: String | null
   description?: String | null
   duration?: Float | null
+  thumbnailPhotoDataObject?: ID_Input | null
   thumbnailPhotoDataObjectId?: ID_Input | null
   thumbnailPhotoUrls: Array<String>
   thumbnailPhotoAvailability: AssetAvailability
+  language?: ID_Input | null
   languageId?: ID_Input | null
   hasMarketing?: Boolean | null
   publishedBeforeJoystream?: DateTime | null
   isPublic?: Boolean | null
   isCensored: Boolean
   isExplicit?: Boolean | null
+  license?: ID_Input | null
   licenseId?: ID_Input | null
+  mediaDataObject?: ID_Input | null
   mediaDataObjectId?: ID_Input | null
   mediaUrls: Array<String>
   mediaAvailability: AssetAvailability
+  mediaMetadata?: ID_Input | null
   mediaMetadataId?: ID_Input | null
   createdInBlock: Float
   isFeatured: Boolean
@@ -1257,6 +1392,11 @@ export interface VideoMediaEncodingWhereInput {
   mimeMediaType_startsWith?: String | null
   mimeMediaType_endsWith?: String | null
   mimeMediaType_in?: String[] | String | null
+  videomediametadataencoding_none?: VideoMediaMetadataWhereInput | null
+  videomediametadataencoding_some?: VideoMediaMetadataWhereInput | null
+  videomediametadataencoding_every?: VideoMediaMetadataWhereInput | null
+  AND?: VideoMediaEncodingWhereInput[] | VideoMediaEncodingWhereInput | null
+  OR?: VideoMediaEncodingWhereInput[] | VideoMediaEncodingWhereInput | null
 }
 
 export interface VideoMediaEncodingWhereUniqueInput {
@@ -1264,6 +1404,7 @@ export interface VideoMediaEncodingWhereUniqueInput {
 }
 
 export interface VideoMediaMetadataCreateInput {
+  encoding?: ID_Input | null
   encodingId?: ID_Input | null
   pixelWidth?: Float | null
   pixelHeight?: Float | null
@@ -1272,6 +1413,7 @@ export interface VideoMediaMetadataCreateInput {
 }
 
 export interface VideoMediaMetadataUpdateInput {
+  encoding?: ID_Input | null
   encodingId?: ID_Input | null
   pixelWidth?: Float | null
   pixelHeight?: Float | null
@@ -1330,6 +1472,10 @@ export interface VideoMediaMetadataWhereInput {
   createdInBlock_lt?: Int | null
   createdInBlock_lte?: Int | null
   createdInBlock_in?: Int[] | Int | null
+  encoding?: VideoMediaEncodingWhereInput | null
+  video?: VideoWhereInput | null
+  AND?: VideoMediaMetadataWhereInput[] | VideoMediaMetadataWhereInput | null
+  OR?: VideoMediaMetadataWhereInput[] | VideoMediaMetadataWhereInput | null
 }
 
 export interface VideoMediaMetadataWhereUniqueInput {
@@ -1337,24 +1483,31 @@ export interface VideoMediaMetadataWhereUniqueInput {
 }
 
 export interface VideoUpdateInput {
+  channel?: ID_Input | null
   channelId?: ID_Input | null
+  category?: ID_Input | null
   categoryId?: ID_Input | null
   title?: String | null
   description?: String | null
   duration?: Float | null
+  thumbnailPhotoDataObject?: ID_Input | null
   thumbnailPhotoDataObjectId?: ID_Input | null
   thumbnailPhotoUrls?: String[] | String | null
   thumbnailPhotoAvailability?: AssetAvailability | null
+  language?: ID_Input | null
   languageId?: ID_Input | null
   hasMarketing?: Boolean | null
   publishedBeforeJoystream?: DateTime | null
   isPublic?: Boolean | null
   isCensored?: Boolean | null
   isExplicit?: Boolean | null
+  license?: ID_Input | null
   licenseId?: ID_Input | null
+  mediaDataObject?: ID_Input | null
   mediaDataObjectId?: ID_Input | null
   mediaUrls?: String[] | String | null
   mediaAvailability?: AssetAvailability | null
+  mediaMetadata?: ID_Input | null
   mediaMetadataId?: ID_Input | null
   createdInBlock?: Float | null
   isFeatured?: Boolean | null
@@ -1407,6 +1560,9 @@ export interface VideoWhereInput {
   duration_in?: Int[] | Int | null
   thumbnailPhotoDataObjectId_eq?: ID_Input | null
   thumbnailPhotoDataObjectId_in?: ID_Output[] | ID_Output | null
+  thumbnailPhotoUrls_containsAll?: String[] | String | null
+  thumbnailPhotoUrls_containsNone?: String[] | String | null
+  thumbnailPhotoUrls_containsAny?: String[] | String | null
   thumbnailPhotoAvailability_eq?: AssetAvailability | null
   thumbnailPhotoAvailability_in?: AssetAvailability[] | AssetAvailability | null
   languageId_eq?: ID_Input | null
@@ -1428,6 +1584,9 @@ export interface VideoWhereInput {
   licenseId_in?: ID_Output[] | ID_Output | null
   mediaDataObjectId_eq?: ID_Input | null
   mediaDataObjectId_in?: ID_Output[] | ID_Output | null
+  mediaUrls_containsAll?: String[] | String | null
+  mediaUrls_containsNone?: String[] | String | null
+  mediaUrls_containsAny?: String[] | String | null
   mediaAvailability_eq?: AssetAvailability | null
   mediaAvailability_in?: AssetAvailability[] | AssetAvailability | null
   mediaMetadataId_eq?: ID_Input | null
@@ -1440,6 +1599,15 @@ export interface VideoWhereInput {
   createdInBlock_in?: Int[] | Int | null
   isFeatured_eq?: Boolean | null
   isFeatured_in?: Boolean[] | Boolean | null
+  channel?: ChannelWhereInput | null
+  category?: VideoCategoryWhereInput | null
+  thumbnailPhotoDataObject?: DataObjectWhereInput | null
+  language?: LanguageWhereInput | null
+  license?: LicenseWhereInput | null
+  mediaDataObject?: DataObjectWhereInput | null
+  mediaMetadata?: VideoMediaMetadataWhereInput | null
+  AND?: VideoWhereInput[] | VideoWhereInput | null
+  OR?: VideoWhereInput[] | VideoWhereInput | null
 }
 
 export interface VideoWhereUniqueInput {
@@ -1499,6 +1667,11 @@ export interface WorkerWhereInput {
   metadata_startsWith?: String | null
   metadata_endsWith?: String | null
   metadata_in?: String[] | String | null
+  dataObjects_none?: DataObjectWhereInput | null
+  dataObjects_some?: DataObjectWhereInput | null
+  dataObjects_every?: DataObjectWhereInput | null
+  AND?: WorkerWhereInput[] | WorkerWhereInput | null
+  OR?: WorkerWhereInput[] | WorkerWhereInput | null
 }
 
 export interface WorkerWhereUniqueInput {
@@ -1628,30 +1801,6 @@ export interface DataObjectOwnerWorkingGroup {
   workingGroup: Int
 }
 
-export interface FeaturedVideo extends BaseGraphQLObject {
-  id: ID_Output
-  createdAt: DateTime
-  createdById: String
-  updatedAt?: DateTime | null
-  updatedById?: String | null
-  deletedAt?: DateTime | null
-  deletedById?: String | null
-  version: Int
-  video: Video
-  videoId: String
-}
-
-export interface FeaturedVideoConnection {
-  totalCount: Int
-  edges: Array<FeaturedVideoEdge>
-  pageInfo: PageInfo
-}
-
-export interface FeaturedVideoEdge {
-  node: FeaturedVideo
-  cursor: String
-}
-
 export interface Channel extends BaseGraphQLObject {
   id: ID_Output
   createdAt: DateTime
@@ -1827,6 +1976,29 @@ export interface MembershipEdge {
   cursor: String
 }
 
+export interface NextEntityId extends BaseGraphQLObject {
+  id: ID_Output
+  createdAt: DateTime
+  createdById: String
+  updatedAt?: DateTime | null
+  updatedById?: String | null
+  deletedAt?: DateTime | null
+  deletedById?: String | null
+  version: Int
+  nextId: Float
+}
+
+export interface NextEntityIdConnection {
+  totalCount: Int
+  edges: Array<NextEntityIdEdge>
+  pageInfo: PageInfo
+}
+
+export interface NextEntityIdEdge {
+  node: NextEntityId
+  cursor: String
+}
+
 export interface PageInfo {
   hasNextPage: Boolean
   hasPreviousPage: Boolean
@@ -1889,7 +2061,6 @@ export interface Video extends BaseGraphQLObject {
   mediaMetadataId?: String | null
   createdInBlock: Int
   isFeatured: Boolean
-  featured?: FeaturedVideo | null
 }
 
 export interface VideoCategoriesByNameFTSOutput {

File diff suppressed because it is too large
+ 304 - 392
query-node/generated/graphql-server/generated/classes.ts


+ 277 - 104
query-node/generated/graphql-server/generated/schema.graphql

@@ -136,8 +136,16 @@ input CuratorGroupWhereInput {
   deletedAt_gte: DateTime
   deletedById_eq: ID
   deletedById_in: [ID!]
+  curatorIds_containsAll: [Int!]
+  curatorIds_containsNone: [Int!]
+  curatorIds_containsAny: [Int!]
   isActive_eq: Boolean
   isActive_in: [Boolean!]
+  channels_none: ChannelWhereInput
+  channels_some: ChannelWhereInput
+  channels_every: ChannelWhereInput
+  AND: [CuratorGroupWhereInput!]
+  OR: [CuratorGroupWhereInput!]
 }
 
 input CuratorGroupWhereUniqueInput {
@@ -194,6 +202,7 @@ input DataObjectCreateInput {
   createdInBlock: Float!
   typeId: Float!
   size: Float!
+  liaison: ID
   liaisonId: ID
   liaisonJudgement: LiaisonJudgement!
   ipfsContentId: String!
@@ -218,6 +227,8 @@ enum DataObjectOrderByInput {
   typeId_DESC
   size_ASC
   size_DESC
+  liaison_ASC
+  liaison_DESC
   liaisonId_ASC
   liaisonId_DESC
   liaisonJudgement_ASC
@@ -274,6 +285,8 @@ input DataObjectOwnerCouncilWhereInput {
   dummy_lt: Int
   dummy_lte: Int
   dummy_in: [Int!]
+  AND: [DataObjectOwnerCouncilWhereInput!]
+  OR: [DataObjectOwnerCouncilWhereInput!]
 }
 
 input DataObjectOwnerCouncilWhereUniqueInput {
@@ -324,6 +337,8 @@ input DataObjectOwnerDaoWhereInput {
   dao_lt: Int
   dao_lte: Int
   dao_in: [Int!]
+  AND: [DataObjectOwnerDaoWhereInput!]
+  OR: [DataObjectOwnerDaoWhereInput!]
 }
 
 input DataObjectOwnerDaoWhereUniqueInput {
@@ -385,6 +400,8 @@ input DataObjectOwnerChannelWhereInput {
   dummy_lt: Int
   dummy_lte: Int
   dummy_in: [Int!]
+  AND: [DataObjectOwnerChannelWhereInput!]
+  OR: [DataObjectOwnerChannelWhereInput!]
 }
 
 input DataObjectOwnerChannelWhereUniqueInput {
@@ -446,6 +463,8 @@ input DataObjectOwnerMemberWhereInput {
   dummy_lt: Int
   dummy_lte: Int
   dummy_in: [Int!]
+  AND: [DataObjectOwnerMemberWhereInput!]
+  OR: [DataObjectOwnerMemberWhereInput!]
 }
 
 input DataObjectOwnerMemberWhereUniqueInput {
@@ -496,6 +515,8 @@ input DataObjectOwnerWorkingGroupWhereInput {
   workingGroup_lt: Int
   workingGroup_lte: Int
   workingGroup_in: [Int!]
+  AND: [DataObjectOwnerWorkingGroupWhereInput!]
+  OR: [DataObjectOwnerWorkingGroupWhereInput!]
 }
 
 input DataObjectOwnerWorkingGroupWhereUniqueInput {
@@ -507,6 +528,7 @@ input DataObjectUpdateInput {
   createdInBlock: Float
   typeId: Float
   size: Float
+  liaison: ID
   liaisonId: ID
   liaisonJudgement: LiaisonJudgement
   ipfsContentId: String
@@ -571,6 +593,21 @@ input DataObjectWhereInput {
   joystreamContentId_startsWith: String
   joystreamContentId_endsWith: String
   joystreamContentId_in: [String!]
+  liaison: WorkerWhereInput
+  channelcoverPhotoDataObject_none: ChannelWhereInput
+  channelcoverPhotoDataObject_some: ChannelWhereInput
+  channelcoverPhotoDataObject_every: ChannelWhereInput
+  channelavatarPhotoDataObject_none: ChannelWhereInput
+  channelavatarPhotoDataObject_some: ChannelWhereInput
+  channelavatarPhotoDataObject_every: ChannelWhereInput
+  videothumbnailPhotoDataObject_none: VideoMediaMetadataWhereInput
+  videothumbnailPhotoDataObject_some: VideoMediaMetadataWhereInput
+  videothumbnailPhotoDataObject_every: VideoMediaMetadataWhereInput
+  videomediaDataObject_none: VideoMediaMetadataWhereInput
+  videomediaDataObject_some: VideoMediaMetadataWhereInput
+  videomediaDataObject_every: VideoMediaMetadataWhereInput
+  AND: [DataObjectWhereInput!]
+  OR: [DataObjectWhereInput!]
 }
 
 input DataObjectWhereUniqueInput {
@@ -586,82 +623,6 @@ interface DeleteResponse {
   id: ID!
 }
 
-type FeaturedVideo implements BaseGraphQLObject {
-  id: ID!
-  createdAt: DateTime!
-  createdById: String!
-  updatedAt: DateTime
-  updatedById: String
-  deletedAt: DateTime
-  deletedById: String
-  version: Int!
-  video: Video!
-  videoId: String!
-}
-
-type FeaturedVideoConnection {
-  totalCount: Int!
-  edges: [FeaturedVideoEdge!]!
-  pageInfo: PageInfo!
-}
-
-input FeaturedVideoCreateInput {
-  videoId: ID!
-}
-
-type FeaturedVideoEdge {
-  node: FeaturedVideo!
-  cursor: String!
-}
-
-enum FeaturedVideoOrderByInput {
-  createdAt_ASC
-  createdAt_DESC
-  updatedAt_ASC
-  updatedAt_DESC
-  deletedAt_ASC
-  deletedAt_DESC
-  videoId_ASC
-  videoId_DESC
-}
-
-input FeaturedVideoUpdateInput {
-  videoId: ID
-}
-
-input FeaturedVideoWhereInput {
-  id_eq: ID
-  id_in: [ID!]
-  createdAt_eq: DateTime
-  createdAt_lt: DateTime
-  createdAt_lte: DateTime
-  createdAt_gt: DateTime
-  createdAt_gte: DateTime
-  createdById_eq: ID
-  createdById_in: [ID!]
-  updatedAt_eq: DateTime
-  updatedAt_lt: DateTime
-  updatedAt_lte: DateTime
-  updatedAt_gt: DateTime
-  updatedAt_gte: DateTime
-  updatedById_eq: ID
-  updatedById_in: [ID!]
-  deletedAt_all: Boolean
-  deletedAt_eq: DateTime
-  deletedAt_lt: DateTime
-  deletedAt_lte: DateTime
-  deletedAt_gt: DateTime
-  deletedAt_gte: DateTime
-  deletedById_eq: ID
-  deletedById_in: [ID!]
-  videoId_eq: ID
-  videoId_in: [ID!]
-}
-
-input FeaturedVideoWhereUniqueInput {
-  id: ID!
-}
-
 type Channel implements BaseGraphQLObject {
   id: ID!
   createdAt: DateTime!
@@ -810,6 +771,11 @@ input ChannelCategoryWhereInput {
   createdInBlock_lt: Int
   createdInBlock_lte: Int
   createdInBlock_in: [Int!]
+  channels_none: ChannelWhereInput
+  channels_some: ChannelWhereInput
+  channels_every: ChannelWhereInput
+  AND: [ChannelCategoryWhereInput!]
+  OR: [ChannelCategoryWhereInput!]
 }
 
 input ChannelCategoryWhereUniqueInput {
@@ -823,20 +789,26 @@ type ChannelConnection {
 }
 
 input ChannelCreateInput {
+  ownerMember: ID
   ownerMemberId: ID
+  ownerCuratorGroup: ID
   ownerCuratorGroupId: ID
+  category: ID
   categoryId: ID
   rewardAccount: String
   title: String
   description: String
+  coverPhotoDataObject: ID
   coverPhotoDataObjectId: ID
   coverPhotoUrls: [String!]!
   coverPhotoAvailability: AssetAvailability!
+  avatarPhotoDataObject: ID
   avatarPhotoDataObjectId: ID
   avatarPhotoUrls: [String!]!
   avatarPhotoAvailability: AssetAvailability!
   isPublic: Boolean
   isCensored: Boolean!
+  language: ID
   languageId: ID
   createdInBlock: Float!
 }
@@ -853,10 +825,16 @@ enum ChannelOrderByInput {
   updatedAt_DESC
   deletedAt_ASC
   deletedAt_DESC
+  ownerMember_ASC
+  ownerMember_DESC
   ownerMemberId_ASC
   ownerMemberId_DESC
+  ownerCuratorGroup_ASC
+  ownerCuratorGroup_DESC
   ownerCuratorGroupId_ASC
   ownerCuratorGroupId_DESC
+  category_ASC
+  category_DESC
   categoryId_ASC
   categoryId_DESC
   rewardAccount_ASC
@@ -865,10 +843,14 @@ enum ChannelOrderByInput {
   title_DESC
   description_ASC
   description_DESC
+  coverPhotoDataObject_ASC
+  coverPhotoDataObject_DESC
   coverPhotoDataObjectId_ASC
   coverPhotoDataObjectId_DESC
   coverPhotoAvailability_ASC
   coverPhotoAvailability_DESC
+  avatarPhotoDataObject_ASC
+  avatarPhotoDataObject_DESC
   avatarPhotoDataObjectId_ASC
   avatarPhotoDataObjectId_DESC
   avatarPhotoAvailability_ASC
@@ -877,6 +859,8 @@ enum ChannelOrderByInput {
   isPublic_DESC
   isCensored_ASC
   isCensored_DESC
+  language_ASC
+  language_DESC
   languageId_ASC
   languageId_DESC
   createdInBlock_ASC
@@ -884,20 +868,26 @@ enum ChannelOrderByInput {
 }
 
 input ChannelUpdateInput {
+  ownerMember: ID
   ownerMemberId: ID
+  ownerCuratorGroup: ID
   ownerCuratorGroupId: ID
+  category: ID
   categoryId: ID
   rewardAccount: String
   title: String
   description: String
+  coverPhotoDataObject: ID
   coverPhotoDataObjectId: ID
   coverPhotoUrls: [String!]
   coverPhotoAvailability: AssetAvailability
+  avatarPhotoDataObject: ID
   avatarPhotoDataObjectId: ID
   avatarPhotoUrls: [String!]
   avatarPhotoAvailability: AssetAvailability
   isPublic: Boolean
   isCensored: Boolean
+  language: ID
   languageId: ID
   createdInBlock: Float
 }
@@ -950,10 +940,16 @@ input ChannelWhereInput {
   description_in: [String!]
   coverPhotoDataObjectId_eq: ID
   coverPhotoDataObjectId_in: [ID!]
+  coverPhotoUrls_containsAll: [String!]
+  coverPhotoUrls_containsNone: [String!]
+  coverPhotoUrls_containsAny: [String!]
   coverPhotoAvailability_eq: AssetAvailability
   coverPhotoAvailability_in: [AssetAvailability!]
   avatarPhotoDataObjectId_eq: ID
   avatarPhotoDataObjectId_in: [ID!]
+  avatarPhotoUrls_containsAll: [String!]
+  avatarPhotoUrls_containsNone: [String!]
+  avatarPhotoUrls_containsAny: [String!]
   avatarPhotoAvailability_eq: AssetAvailability
   avatarPhotoAvailability_in: [AssetAvailability!]
   isPublic_eq: Boolean
@@ -968,6 +964,17 @@ input ChannelWhereInput {
   createdInBlock_lt: Int
   createdInBlock_lte: Int
   createdInBlock_in: [Int!]
+  ownerMember: MembershipWhereInput
+  ownerCuratorGroup: CuratorGroupWhereInput
+  category: ChannelCategoryWhereInput
+  coverPhotoDataObject: DataObjectWhereInput
+  avatarPhotoDataObject: DataObjectWhereInput
+  language: LanguageWhereInput
+  videos_none: VideoWhereInput
+  videos_some: VideoWhereInput
+  videos_every: VideoWhereInput
+  AND: [ChannelWhereInput!]
+  OR: [ChannelWhereInput!]
 }
 
 input ChannelWhereUniqueInput {
@@ -1066,6 +1073,14 @@ input LanguageWhereInput {
   createdInBlock_lt: Int
   createdInBlock_lte: Int
   createdInBlock_in: [Int!]
+  channellanguage_none: ChannelWhereInput
+  channellanguage_some: ChannelWhereInput
+  channellanguage_every: ChannelWhereInput
+  videolanguage_none: VideoWhereInput
+  videolanguage_some: VideoWhereInput
+  videolanguage_every: VideoWhereInput
+  AND: [LanguageWhereInput!]
+  OR: [LanguageWhereInput!]
 }
 
 input LanguageWhereUniqueInput {
@@ -1177,6 +1192,11 @@ input LicenseWhereInput {
   customText_startsWith: String
   customText_endsWith: String
   customText_in: [String!]
+  videolanguage_none: VideoWhereInput
+  videolanguage_some: VideoWhereInput
+  videolanguage_every: VideoWhereInput
+  AND: [LicenseWhereInput!]
+  OR: [LicenseWhereInput!]
 }
 
 input LicenseWhereUniqueInput {
@@ -1357,6 +1377,11 @@ input MembershipWhereInput {
   subscription_lt: Int
   subscription_lte: Int
   subscription_in: [Int!]
+  channels_none: ChannelWhereInput
+  channels_some: ChannelWhereInput
+  channels_every: ChannelWhereInput
+  AND: [MembershipWhereInput!]
+  OR: [MembershipWhereInput!]
 }
 
 input MembershipWhereUniqueInput {
@@ -1364,6 +1389,89 @@ input MembershipWhereUniqueInput {
   handle: String
 }
 
+type NextEntityId implements BaseGraphQLObject {
+  id: ID!
+  createdAt: DateTime!
+  createdById: String!
+  updatedAt: DateTime
+  updatedById: String
+  deletedAt: DateTime
+  deletedById: String
+  version: Int!
+
+  """Next deterministic id for entities without custom id"""
+  nextId: Float!
+}
+
+type NextEntityIdConnection {
+  totalCount: Int!
+  edges: [NextEntityIdEdge!]!
+  pageInfo: PageInfo!
+}
+
+input NextEntityIdCreateInput {
+  nextId: Float!
+}
+
+type NextEntityIdEdge {
+  node: NextEntityId!
+  cursor: String!
+}
+
+enum NextEntityIdOrderByInput {
+  createdAt_ASC
+  createdAt_DESC
+  updatedAt_ASC
+  updatedAt_DESC
+  deletedAt_ASC
+  deletedAt_DESC
+  nextId_ASC
+  nextId_DESC
+}
+
+input NextEntityIdUpdateInput {
+  nextId: Float
+}
+
+input NextEntityIdWhereInput {
+  id_eq: ID
+  id_in: [ID!]
+  createdAt_eq: DateTime
+  createdAt_lt: DateTime
+  createdAt_lte: DateTime
+  createdAt_gt: DateTime
+  createdAt_gte: DateTime
+  createdById_eq: ID
+  createdById_in: [ID!]
+  updatedAt_eq: DateTime
+  updatedAt_lt: DateTime
+  updatedAt_lte: DateTime
+  updatedAt_gt: DateTime
+  updatedAt_gte: DateTime
+  updatedById_eq: ID
+  updatedById_in: [ID!]
+  deletedAt_all: Boolean
+  deletedAt_eq: DateTime
+  deletedAt_lt: DateTime
+  deletedAt_lte: DateTime
+  deletedAt_gt: DateTime
+  deletedAt_gte: DateTime
+  deletedById_eq: ID
+  deletedById_in: [ID!]
+  nextId_eq: Float
+  nextId_gt: Float
+  nextId_gte: Float
+  nextId_lt: Float
+  nextId_lte: Float
+  nextId_in: [Float!]
+  AND: [NextEntityIdWhereInput!]
+  OR: [NextEntityIdWhereInput!]
+}
+
+input NextEntityIdWhereUniqueInput {
+  id: ID!
+}
+
 type PageInfo {
   hasNextPage: Boolean!
   hasPreviousPage: Boolean!
@@ -1379,49 +1487,49 @@ type ProcessorState {
 }
 
 type Query {
-  curatorGroups(offset: Int, limit: Int = 50, where: CuratorGroupWhereInput, orderBy: CuratorGroupOrderByInput): [CuratorGroup!]!
+  curatorGroups(offset: Int, limit: Int = 50, where: CuratorGroupWhereInput, orderBy: [CuratorGroupOrderByInput!]): [CuratorGroup!]!
   curatorGroupByUniqueInput(where: CuratorGroupWhereUniqueInput!): CuratorGroup
-  curatorGroupsConnection(first: Int, after: String, last: Int, before: String, where: CuratorGroupWhereInput, orderBy: CuratorGroupOrderByInput): CuratorGroupConnection!
-  dataObjects(offset: Int, limit: Int = 50, where: DataObjectWhereInput, orderBy: DataObjectOrderByInput): [DataObject!]!
+  curatorGroupsConnection(first: Int, after: String, last: Int, before: String, where: CuratorGroupWhereInput, orderBy: [CuratorGroupOrderByInput!]): CuratorGroupConnection!
+  dataObjects(offset: Int, limit: Int = 50, where: DataObjectWhereInput, orderBy: [DataObjectOrderByInput!]): [DataObject!]!
   dataObjectByUniqueInput(where: DataObjectWhereUniqueInput!): DataObject
-  dataObjectsConnection(first: Int, after: String, last: Int, before: String, where: DataObjectWhereInput, orderBy: DataObjectOrderByInput): DataObjectConnection!
-  featuredVideos(offset: Int, limit: Int = 50, where: FeaturedVideoWhereInput, orderBy: FeaturedVideoOrderByInput): [FeaturedVideo!]!
-  featuredVideoByUniqueInput(where: FeaturedVideoWhereUniqueInput!): FeaturedVideo
-  featuredVideosConnection(first: Int, after: String, last: Int, before: String, where: FeaturedVideoWhereInput, orderBy: FeaturedVideoOrderByInput): FeaturedVideoConnection!
-  channelCategories(offset: Int, limit: Int = 50, where: ChannelCategoryWhereInput, orderBy: ChannelCategoryOrderByInput): [ChannelCategory!]!
+  dataObjectsConnection(first: Int, after: String, last: Int, before: String, where: DataObjectWhereInput, orderBy: [DataObjectOrderByInput!]): DataObjectConnection!
+  channelCategories(offset: Int, limit: Int = 50, where: ChannelCategoryWhereInput, orderBy: [ChannelCategoryOrderByInput!]): [ChannelCategory!]!
   channelCategoryByUniqueInput(where: ChannelCategoryWhereUniqueInput!): ChannelCategory
-  channelCategoriesConnection(first: Int, after: String, last: Int, before: String, where: ChannelCategoryWhereInput, orderBy: ChannelCategoryOrderByInput): ChannelCategoryConnection!
-  channels(offset: Int, limit: Int = 50, where: ChannelWhereInput, orderBy: ChannelOrderByInput): [Channel!]!
+  channelCategoriesConnection(first: Int, after: String, last: Int, before: String, where: ChannelCategoryWhereInput, orderBy: [ChannelCategoryOrderByInput!]): ChannelCategoryConnection!
+  channels(offset: Int, limit: Int = 50, where: ChannelWhereInput, orderBy: [ChannelOrderByInput!]): [Channel!]!
   channelByUniqueInput(where: ChannelWhereUniqueInput!): Channel
-  channelsConnection(first: Int, after: String, last: Int, before: String, where: ChannelWhereInput, orderBy: ChannelOrderByInput): ChannelConnection!
-  languages(offset: Int, limit: Int = 50, where: LanguageWhereInput, orderBy: LanguageOrderByInput): [Language!]!
+  channelsConnection(first: Int, after: String, last: Int, before: String, where: ChannelWhereInput, orderBy: [ChannelOrderByInput!]): ChannelConnection!
+  languages(offset: Int, limit: Int = 50, where: LanguageWhereInput, orderBy: [LanguageOrderByInput!]): [Language!]!
   languageByUniqueInput(where: LanguageWhereUniqueInput!): Language
-  languagesConnection(first: Int, after: String, last: Int, before: String, where: LanguageWhereInput, orderBy: LanguageOrderByInput): LanguageConnection!
-  licenses(offset: Int, limit: Int = 50, where: LicenseWhereInput, orderBy: LicenseOrderByInput): [License!]!
+  languagesConnection(first: Int, after: String, last: Int, before: String, where: LanguageWhereInput, orderBy: [LanguageOrderByInput!]): LanguageConnection!
+  licenses(offset: Int, limit: Int = 50, where: LicenseWhereInput, orderBy: [LicenseOrderByInput!]): [License!]!
   licenseByUniqueInput(where: LicenseWhereUniqueInput!): License
-  licensesConnection(first: Int, after: String, last: Int, before: String, where: LicenseWhereInput, orderBy: LicenseOrderByInput): LicenseConnection!
-  memberships(offset: Int, limit: Int = 50, where: MembershipWhereInput, orderBy: MembershipOrderByInput): [Membership!]!
+  licensesConnection(first: Int, after: String, last: Int, before: String, where: LicenseWhereInput, orderBy: [LicenseOrderByInput!]): LicenseConnection!
+  memberships(offset: Int, limit: Int = 50, where: MembershipWhereInput, orderBy: [MembershipOrderByInput!]): [Membership!]!
   membershipByUniqueInput(where: MembershipWhereUniqueInput!): Membership
-  membershipsConnection(first: Int, after: String, last: Int, before: String, where: MembershipWhereInput, orderBy: MembershipOrderByInput): MembershipConnection!
+  membershipsConnection(first: Int, after: String, last: Int, before: String, where: MembershipWhereInput, orderBy: [MembershipOrderByInput!]): MembershipConnection!
+  nextEntityIds(offset: Int, limit: Int = 50, where: NextEntityIdWhereInput, orderBy: [NextEntityIdOrderByInput!]): [NextEntityId!]!
+  nextEntityIdByUniqueInput(where: NextEntityIdWhereUniqueInput!): NextEntityId
+  nextEntityIdsConnection(first: Int, after: String, last: Int, before: String, where: NextEntityIdWhereInput, orderBy: [NextEntityIdOrderByInput!]): NextEntityIdConnection!
   channelCategoriesByName(whereChannelCategory: ChannelCategoryWhereInput, skip: Int = 0, limit: Int = 5, text: String!): [ChannelCategoriesByNameFTSOutput!]!
   membersByHandle(whereMembership: MembershipWhereInput, skip: Int = 0, limit: Int = 5, text: String!): [MembersByHandleFTSOutput!]!
   search(whereVideo: VideoWhereInput, whereChannel: ChannelWhereInput, skip: Int = 0, limit: Int = 5, text: String!): [SearchFTSOutput!]!
   videoCategoriesByName(whereVideoCategory: VideoCategoryWhereInput, skip: Int = 0, limit: Int = 5, text: String!): [VideoCategoriesByNameFTSOutput!]!
-  videoCategories(offset: Int, limit: Int = 50, where: VideoCategoryWhereInput, orderBy: VideoCategoryOrderByInput): [VideoCategory!]!
+  videoCategories(offset: Int, limit: Int = 50, where: VideoCategoryWhereInput, orderBy: [VideoCategoryOrderByInput!]): [VideoCategory!]!
   videoCategoryByUniqueInput(where: VideoCategoryWhereUniqueInput!): VideoCategory
-  videoCategoriesConnection(first: Int, after: String, last: Int, before: String, where: VideoCategoryWhereInput, orderBy: VideoCategoryOrderByInput): VideoCategoryConnection!
-  videoMediaEncodings(offset: Int, limit: Int = 50, where: VideoMediaEncodingWhereInput, orderBy: VideoMediaEncodingOrderByInput): [VideoMediaEncoding!]!
+  videoCategoriesConnection(first: Int, after: String, last: Int, before: String, where: VideoCategoryWhereInput, orderBy: [VideoCategoryOrderByInput!]): VideoCategoryConnection!
+  videoMediaEncodings(offset: Int, limit: Int = 50, where: VideoMediaEncodingWhereInput, orderBy: [VideoMediaEncodingOrderByInput!]): [VideoMediaEncoding!]!
   videoMediaEncodingByUniqueInput(where: VideoMediaEncodingWhereUniqueInput!): VideoMediaEncoding
-  videoMediaEncodingsConnection(first: Int, after: String, last: Int, before: String, where: VideoMediaEncodingWhereInput, orderBy: VideoMediaEncodingOrderByInput): VideoMediaEncodingConnection!
-  videoMediaMetadata(offset: Int, limit: Int = 50, where: VideoMediaMetadataWhereInput, orderBy: VideoMediaMetadataOrderByInput): [VideoMediaMetadata!]!
+  videoMediaEncodingsConnection(first: Int, after: String, last: Int, before: String, where: VideoMediaEncodingWhereInput, orderBy: [VideoMediaEncodingOrderByInput!]): VideoMediaEncodingConnection!
+  videoMediaMetadata(offset: Int, limit: Int = 50, where: VideoMediaMetadataWhereInput, orderBy: [VideoMediaMetadataOrderByInput!]): [VideoMediaMetadata!]!
   videoMediaMetadataByUniqueInput(where: VideoMediaMetadataWhereUniqueInput!): VideoMediaMetadata
-  videoMediaMetadataConnection(first: Int, after: String, last: Int, before: String, where: VideoMediaMetadataWhereInput, orderBy: VideoMediaMetadataOrderByInput): VideoMediaMetadataConnection!
-  videos(offset: Int, limit: Int = 50, where: VideoWhereInput, orderBy: VideoOrderByInput): [Video!]!
+  videoMediaMetadataConnection(first: Int, after: String, last: Int, before: String, where: VideoMediaMetadataWhereInput, orderBy: [VideoMediaMetadataOrderByInput!]): VideoMediaMetadataConnection!
+  videos(offset: Int, limit: Int = 50, where: VideoWhereInput, orderBy: [VideoOrderByInput!]): [Video!]!
   videoByUniqueInput(where: VideoWhereUniqueInput!): Video
-  videosConnection(first: Int, after: String, last: Int, before: String, where: VideoWhereInput, orderBy: VideoOrderByInput): VideoConnection!
-  workers(offset: Int, limit: Int = 50, where: WorkerWhereInput, orderBy: WorkerOrderByInput): [Worker!]!
+  videosConnection(first: Int, after: String, last: Int, before: String, where: VideoWhereInput, orderBy: [VideoOrderByInput!]): VideoConnection!
+  workers(offset: Int, limit: Int = 50, where: WorkerWhereInput, orderBy: [WorkerOrderByInput!]): [Worker!]!
   workerByUniqueInput(where: WorkerWhereUniqueInput!): Worker
-  workersConnection(first: Int, after: String, last: Int, before: String, where: WorkerWhereInput, orderBy: WorkerOrderByInput): WorkerConnection!
+  workersConnection(first: Int, after: String, last: Int, before: String, where: WorkerWhereInput, orderBy: [WorkerOrderByInput!]): WorkerConnection!
 }
 
 type SearchFTSOutput {
@@ -1506,7 +1614,6 @@ type Video implements BaseGraphQLObject {
 
   """Is video featured or not"""
   isFeatured: Boolean!
-  featured: FeaturedVideo
 }
 
 type VideoCategoriesByNameFTSOutput {
@@ -1604,6 +1711,11 @@ input VideoCategoryWhereInput {
   createdInBlock_lt: Int
   createdInBlock_lte: Int
   createdInBlock_in: [Int!]
+  videos_none: VideoWhereInput
+  videos_some: VideoWhereInput
+  videos_every: VideoWhereInput
+  AND: [VideoCategoryWhereInput!]
+  OR: [VideoCategoryWhereInput!]
 }
 
 input VideoCategoryWhereUniqueInput {
@@ -1617,24 +1729,31 @@ type VideoConnection {
 }
 
 input VideoCreateInput {
+  channel: ID
   channelId: ID
+  category: ID
   categoryId: ID
   title: String
   description: String
   duration: Float
+  thumbnailPhotoDataObject: ID
   thumbnailPhotoDataObjectId: ID
   thumbnailPhotoUrls: [String!]!
   thumbnailPhotoAvailability: AssetAvailability!
+  language: ID
   languageId: ID
   hasMarketing: Boolean
   publishedBeforeJoystream: DateTime
   isPublic: Boolean
   isCensored: Boolean!
   isExplicit: Boolean
+  license: ID
   licenseId: ID
+  mediaDataObject: ID
   mediaDataObjectId: ID
   mediaUrls: [String!]!
   mediaAvailability: AssetAvailability!
+  mediaMetadata: ID
   mediaMetadataId: ID
   createdInBlock: Float!
   isFeatured: Boolean!
@@ -1744,6 +1863,11 @@ input VideoMediaEncodingWhereInput {
   mimeMediaType_startsWith: String
   mimeMediaType_endsWith: String
   mimeMediaType_in: [String!]
+  videomediametadataencoding_none: VideoMediaMetadataWhereInput
+  videomediametadataencoding_some: VideoMediaMetadataWhereInput
+  videomediametadataencoding_every: VideoMediaMetadataWhereInput
+  AND: [VideoMediaEncodingWhereInput!]
+  OR: [VideoMediaEncodingWhereInput!]
 }
 
 input VideoMediaEncodingWhereUniqueInput {
@@ -1781,6 +1905,7 @@ type VideoMediaMetadataConnection {
 }
 
 input VideoMediaMetadataCreateInput {
+  encoding: ID
   encodingId: ID
   pixelWidth: Float
   pixelHeight: Float
@@ -1800,6 +1925,8 @@ enum VideoMediaMetadataOrderByInput {
   updatedAt_DESC
   deletedAt_ASC
   deletedAt_DESC
+  encoding_ASC
+  encoding_DESC
   encodingId_ASC
   encodingId_DESC
   pixelWidth_ASC
@@ -1813,6 +1940,7 @@ enum VideoMediaMetadataOrderByInput {
 }
 
 input VideoMediaMetadataUpdateInput {
+  encoding: ID
   encodingId: ID
   pixelWidth: Float
   pixelHeight: Float
@@ -1871,6 +1999,10 @@ input VideoMediaMetadataWhereInput {
   createdInBlock_lt: Int
   createdInBlock_lte: Int
   createdInBlock_in: [Int!]
+  encoding: VideoMediaEncodingWhereInput
+  video: VideoWhereInput
+  AND: [VideoMediaMetadataWhereInput!]
+  OR: [VideoMediaMetadataWhereInput!]
 }
 
 input VideoMediaMetadataWhereUniqueInput {
@@ -1884,8 +2016,12 @@ enum VideoOrderByInput {
   updatedAt_DESC
   deletedAt_ASC
   deletedAt_DESC
+  channel_ASC
+  channel_DESC
   channelId_ASC
   channelId_DESC
+  category_ASC
+  category_DESC
   categoryId_ASC
   categoryId_DESC
   title_ASC
@@ -1894,10 +2030,14 @@ enum VideoOrderByInput {
   description_DESC
   duration_ASC
   duration_DESC
+  thumbnailPhotoDataObject_ASC
+  thumbnailPhotoDataObject_DESC
   thumbnailPhotoDataObjectId_ASC
   thumbnailPhotoDataObjectId_DESC
   thumbnailPhotoAvailability_ASC
   thumbnailPhotoAvailability_DESC
+  language_ASC
+  language_DESC
   languageId_ASC
   languageId_DESC
   hasMarketing_ASC
@@ -1910,12 +2050,18 @@ enum VideoOrderByInput {
   isCensored_DESC
   isExplicit_ASC
   isExplicit_DESC
+  license_ASC
+  license_DESC
   licenseId_ASC
   licenseId_DESC
+  mediaDataObject_ASC
+  mediaDataObject_DESC
   mediaDataObjectId_ASC
   mediaDataObjectId_DESC
   mediaAvailability_ASC
   mediaAvailability_DESC
+  mediaMetadata_ASC
+  mediaMetadata_DESC
   mediaMetadataId_ASC
   mediaMetadataId_DESC
   createdInBlock_ASC
@@ -1925,24 +2071,31 @@ enum VideoOrderByInput {
 }
 
 input VideoUpdateInput {
+  channel: ID
   channelId: ID
+  category: ID
   categoryId: ID
   title: String
   description: String
   duration: Float
+  thumbnailPhotoDataObject: ID
   thumbnailPhotoDataObjectId: ID
   thumbnailPhotoUrls: [String!]
   thumbnailPhotoAvailability: AssetAvailability
+  language: ID
   languageId: ID
   hasMarketing: Boolean
   publishedBeforeJoystream: DateTime
   isPublic: Boolean
   isCensored: Boolean
   isExplicit: Boolean
+  license: ID
   licenseId: ID
+  mediaDataObject: ID
   mediaDataObjectId: ID
   mediaUrls: [String!]
   mediaAvailability: AssetAvailability
+  mediaMetadata: ID
   mediaMetadataId: ID
   createdInBlock: Float
   isFeatured: Boolean
@@ -1995,6 +2148,9 @@ input VideoWhereInput {
   duration_in: [Int!]
   thumbnailPhotoDataObjectId_eq: ID
   thumbnailPhotoDataObjectId_in: [ID!]
+  thumbnailPhotoUrls_containsAll: [String!]
+  thumbnailPhotoUrls_containsNone: [String!]
+  thumbnailPhotoUrls_containsAny: [String!]
   thumbnailPhotoAvailability_eq: AssetAvailability
   thumbnailPhotoAvailability_in: [AssetAvailability!]
   languageId_eq: ID
@@ -2016,6 +2172,9 @@ input VideoWhereInput {
   licenseId_in: [ID!]
   mediaDataObjectId_eq: ID
   mediaDataObjectId_in: [ID!]
+  mediaUrls_containsAll: [String!]
+  mediaUrls_containsNone: [String!]
+  mediaUrls_containsAny: [String!]
   mediaAvailability_eq: AssetAvailability
   mediaAvailability_in: [AssetAvailability!]
   mediaMetadataId_eq: ID
@@ -2028,6 +2187,15 @@ input VideoWhereInput {
   createdInBlock_in: [Int!]
   isFeatured_eq: Boolean
   isFeatured_in: [Boolean!]
+  channel: ChannelWhereInput
+  category: VideoCategoryWhereInput
+  thumbnailPhotoDataObject: DataObjectWhereInput
+  language: LanguageWhereInput
+  license: LicenseWhereInput
+  mediaDataObject: DataObjectWhereInput
+  mediaMetadata: VideoMediaMetadataWhereInput
+  AND: [VideoWhereInput!]
+  OR: [VideoWhereInput!]
 }
 
 input VideoWhereUniqueInput {
@@ -2144,6 +2312,11 @@ input WorkerWhereInput {
   metadata_startsWith: String
   metadata_endsWith: String
   metadata_in: [String!]
+  dataObjects_none: DataObjectWhereInput
+  dataObjects_some: DataObjectWhereInput
+  dataObjects_every: DataObjectWhereInput
+  AND: [WorkerWhereInput!]
+  OR: [WorkerWhereInput!]
 }
 
 input WorkerWhereUniqueInput {

+ 2 - 0
query-node/generated/graphql-server/model/index.ts

@@ -12,6 +12,8 @@ import { License } from '../src/modules/license/license.model';
 export { License };
 import { Membership } from '../src/modules/membership/membership.model';
 export { Membership };
+import { NextEntityId } from '../src/modules/next-entity-id/next-entity-id.model';
+export { NextEntityId };
 import { Video } from '../src/modules/video/video.model';
 export { Video };
 import { VideoCategory } from '../src/modules/video-category/video-category.model';

+ 2 - 2
query-node/generated/graphql-server/package.json

@@ -59,11 +59,11 @@
       "**/generated/*"
     ]
   },
-  "hydra": "https://github.com/metmirr/warthog/releases/download/v2.23.0/warthog-v2.23.0.tgz",
+  "hydra": "https://github.com/Joystream/warthog/releases/download/v2.37.0/joystream-warthog-v2.37.0.tgz",
   "dependencies": {
     "dotenv": "^8.2.0",
     "reflect-metadata": "^0.1.13",
-    "warthog": "https://github.com/metmirr/warthog/releases/download/v2.23.0/warthog-v2.23.0.tgz",
+    "warthog": "https://github.com/Joystream/warthog/releases/download/v2.37.0/joystream-warthog-v2.37.0.tgz",
     "@types/bn.js": "^4.11.6",
     "bn.js": "^5.1.3",
     "lodash": "^4.17.15",

+ 9 - 1
query-node/generated/graphql-server/src/index.ts

@@ -8,6 +8,7 @@ import { Logger } from '../src/logger';
 
 import { buildServerSchema, getServer } from './server';
 import { startPgSubsribers } from './pubsub';
+import { queryTemplates } from './queryTemplates'
 
 
 class CustomNamingStrategy extends SnakeNamingStrategy {
@@ -22,7 +23,14 @@ class CustomNamingStrategy extends SnakeNamingStrategy {
 async function bootstrap() {
   await loadConfig();
 
-  const server = getServer({}, { namingStrategy: new CustomNamingStrategy() });
+  const appOptions = {
+    playgroundConfig: {
+      queryTemplates,
+      cdnUrl: process.env.GRAPHQL_PLAYGROUND_CDN || '',
+    }
+  }
+
+  const server = getServer(appOptions, { namingStrategy: new CustomNamingStrategy() });
 
   // Create database tables. Warthog migrate command does not support CustomNamingStrategy thats why
   // we have this code

+ 5 - 1
query-node/generated/graphql-server/src/modules/channel-category/channel-category.model.ts

@@ -10,7 +10,11 @@ export class ChannelCategory extends BaseModel {
   })
   name?: string;
 
-  @OneToMany(() => Channel, (param: Channel) => param.category)
+  @OneToMany(() => Channel, (param: Channel) => param.category, {
+    modelName: 'ChannelCategory',
+    relModelName: 'Channel',
+    propertyName: 'channels',
+  })
   channels?: Channel[];
 
   @IntField({})

+ 1 - 1
query-node/generated/graphql-server/src/modules/channel-category/channel-category.resolver.ts

@@ -77,7 +77,7 @@ export class ChannelCategoryConnectionWhereArgs extends ConnectionPageInputOptio
   where?: ChannelCategoryWhereInput;
 
   @Field(() => ChannelCategoryOrderByEnum, { nullable: true })
-  orderBy?: ChannelCategoryOrderByEnum;
+  orderBy?: [ChannelCategoryOrderByEnum];
 }
 
 @Resolver(ChannelCategory)

+ 1 - 1
query-node/generated/graphql-server/src/modules/channel-category/channel-category.service.ts

@@ -13,7 +13,7 @@ export class ChannelCategoryService extends BaseService<ChannelCategory> {
 
   async find<W extends WhereInput>(
     where?: any,
-    orderBy?: string,
+    orderBy?: string | string[],
     limit?: number,
     offset?: number,
     fields?: string[]

+ 39 - 4
query-node/generated/graphql-server/src/modules/channel/channel.model.ts

@@ -22,16 +22,33 @@ export { AssetAvailability };
 
 @Model({ api: {} })
 export class Channel extends BaseModel {
-  @ManyToOne(() => Membership, (param: Membership) => param.channels, { skipGraphQLField: true, nullable: true, cascade: ["insert", "update"] })
+  @ManyToOne(() => Membership, (param: Membership) => param.channels, {
+    skipGraphQLField: true,
+    nullable: true,
+    cascade: ["insert", "update"],
+    modelName: 'Channel',
+    relModelName: 'Membership',
+    propertyName: 'ownerMember',
+  })
   ownerMember?: Membership;
 
-  @ManyToOne(() => CuratorGroup, (param: CuratorGroup) => param.channels, { skipGraphQLField: true, nullable: true, cascade: ["insert", "update"]})
+  @ManyToOne(() => CuratorGroup, (param: CuratorGroup) => param.channels, {
+    skipGraphQLField: true,
+    nullable: true,
+    cascade: ["insert", "update"],
+    modelName: 'Channel',
+    relModelName: 'CuratorGroup',
+    propertyName: 'ownerCuratorGroup',
+  })
   ownerCuratorGroup?: CuratorGroup;
 
   @ManyToOne(() => ChannelCategory, (param: ChannelCategory) => param.channels, {
     skipGraphQLField: true,
     nullable: true,
     cascade: ["insert", "update"],
+    modelName: 'Channel',
+    relModelName: 'ChannelCategory',
+    propertyName: 'category',
   })
   category?: ChannelCategory;
 
@@ -57,6 +74,9 @@ export class Channel extends BaseModel {
     skipGraphQLField: true,
     nullable: true,
     cascade: ["insert", "update"],
+    modelName: 'Channel',
+    relModelName: 'DataObject',
+    propertyName: 'coverPhotoDataObject',
   })
   coverPhotoDataObject?: DataObject;
 
@@ -75,6 +95,9 @@ export class Channel extends BaseModel {
     skipGraphQLField: true,
     nullable: true,
     cascade: ["insert", "update"],
+    modelName: 'Channel',
+    relModelName: 'DataObject',
+    propertyName: 'avatarPhotoDataObject',
   })
   avatarPhotoDataObject?: DataObject;
 
@@ -100,10 +123,22 @@ export class Channel extends BaseModel {
   })
   isCensored!: boolean;
 
-  @ManyToOne(() => Language, (param: Language) => param.channellanguage, { skipGraphQLField: true, nullable: true, cascade: ["insert", "update"] })
+  @ManyToOne(() => Language, (param: Language) => param.channellanguage, {
+    skipGraphQLField: true,
+    nullable: true,
+    cascade: ["insert", "update"],
+    modelName: 'Channel',
+    relModelName: 'Language',
+    propertyName: 'language',
+  })
   language?: Language;
 
-  @OneToMany(() => Video, (param: Video) => param.channel, { cascade: ["insert", "update"] })
+  @OneToMany(() => Video, (param: Video) => param.channel, {
+    cascade: ["insert", "update"],
+    modelName: 'Channel',
+    relModelName: 'Video',
+    propertyName: 'videos',
+  })
   videos?: Video[];
 
   @IntField({})

+ 1 - 1
query-node/generated/graphql-server/src/modules/channel/channel.resolver.ts

@@ -82,7 +82,7 @@ export class ChannelConnectionWhereArgs extends ConnectionPageInputOptions {
   where?: ChannelWhereInput;
 
   @Field(() => ChannelOrderByEnum, { nullable: true })
-  orderBy?: ChannelOrderByEnum;
+  orderBy?: [ChannelOrderByEnum];
 }
 
 @Resolver(Channel)

+ 1 - 1
query-node/generated/graphql-server/src/modules/channel/channel.service.ts

@@ -13,7 +13,7 @@ export class ChannelService extends BaseService<Channel> {
 
   async find<W extends WhereInput>(
     where?: any,
-    orderBy?: string,
+    orderBy?: string | string[],
     limit?: number,
     offset?: number,
     fields?: string[]

+ 6 - 1
query-node/generated/graphql-server/src/modules/curator-group/curator-group.model.ts

@@ -15,7 +15,12 @@ export class CuratorGroup extends BaseModel {
   })
   isActive!: boolean;
 
-  @OneToMany(() => Channel, (param: Channel) => param.ownerCuratorGroup, { cascade: ["insert", "update"] })
+  @OneToMany(() => Channel, (param: Channel) => param.ownerCuratorGroup, { 
+    cascade: ["insert", "update"],
+    modelName: 'CuratorGroup',
+    relModelName: 'Channel',
+    propertyName: 'channels',
+  })
   channels?: Channel[];
 
   constructor(init?: Partial<CuratorGroup>) {

+ 1 - 1
query-node/generated/graphql-server/src/modules/curator-group/curator-group.resolver.ts

@@ -77,7 +77,7 @@ export class CuratorGroupConnectionWhereArgs extends ConnectionPageInputOptions
   where?: CuratorGroupWhereInput;
 
   @Field(() => CuratorGroupOrderByEnum, { nullable: true })
-  orderBy?: CuratorGroupOrderByEnum;
+  orderBy?: [CuratorGroupOrderByEnum];
 }
 
 @Resolver(CuratorGroup)

+ 1 - 1
query-node/generated/graphql-server/src/modules/curator-group/curator-group.service.ts

@@ -13,7 +13,7 @@ export class CuratorGroupService extends BaseService<CuratorGroup> {
 
   async find<W extends WhereInput>(
     where?: any,
-    orderBy?: string,
+    orderBy?: string | string[],
     limit?: number,
     offset?: number,
     fields?: string[]

+ 31 - 4
query-node/generated/graphql-server/src/modules/data-object/data-object.model.ts

@@ -44,6 +44,9 @@ export class DataObject extends BaseModel {
   @ManyToOne(() => Worker, (param: Worker) => param.dataObjects, {
     skipGraphQLField: true,
     nullable: true,
+    modelName: 'DataObject',
+    relModelName: 'Worker',
+    propertyName: 'liaison',
   })
   liaison?: Worker;
 
@@ -62,16 +65,40 @@ export class DataObject extends BaseModel {
   })
   joystreamContentId!: string;
 
-  @OneToMany(() => Channel, (param: Channel) => param.coverPhotoDataObject, { nullable: true, cascade: ["insert", "update"] })
+  @OneToMany(() => Channel, (param: Channel) => param.coverPhotoDataObject, {
+    nullable: true,
+    cascade: ["insert", "update"],
+    modelName: 'DataObject',
+    relModelName: 'Channel',
+    propertyName: 'channelcoverPhotoDataObject',
+  })
   channelcoverPhotoDataObject?: Channel[];
 
-  @OneToMany(() => Channel, (param: Channel) => param.avatarPhotoDataObject, { nullable: true, cascade: ["insert", "update"] })
+  @OneToMany(() => Channel, (param: Channel) => param.avatarPhotoDataObject, {
+    nullable: true,
+    cascade: ["insert", "update"],
+    modelName: 'DataObject',
+    relModelName: 'Channel',
+    propertyName: 'channelavatarPhotoDataObject',
+  })
   channelavatarPhotoDataObject?: Channel[];
 
-  @OneToMany(() => Video, (param: Video) => param.thumbnailPhotoDataObject, { nullable: true, cascade: ["insert", "update"] })
+  @OneToMany(() => Video, (param: Video) => param.thumbnailPhotoDataObject, {
+    nullable: true,
+    cascade: ["insert", "update"],
+    modelName: 'DataObject',
+    relModelName: 'VideoMediaMetadata',
+    propertyName: 'videothumbnailPhotoDataObject',
+  })
   videothumbnailPhotoDataObject?: Video[];
 
-  @OneToMany(() => Video, (param: Video) => param.mediaDataObject, { nullable: true, cascade: ["insert", "update"] })
+  @OneToMany(() => Video, (param: Video) => param.mediaDataObject, {
+    nullable: true,
+    cascade: ["insert", "update"],
+    modelName: 'DataObject',
+    relModelName: 'VideoMediaMetadata',
+    propertyName: 'videomediaDataObject',
+  })
   videomediaDataObject?: Video[];
 
   constructor(init?: Partial<DataObject>) {

+ 1 - 1
query-node/generated/graphql-server/src/modules/data-object/data-object.resolver.ts

@@ -79,7 +79,7 @@ export class DataObjectConnectionWhereArgs extends ConnectionPageInputOptions {
   where?: DataObjectWhereInput;
 
   @Field(() => DataObjectOrderByEnum, { nullable: true })
-  orderBy?: DataObjectOrderByEnum;
+  orderBy?: [DataObjectOrderByEnum];
 }
 
 @Resolver(DataObject)

+ 1 - 1
query-node/generated/graphql-server/src/modules/data-object/data-object.service.ts

@@ -13,7 +13,7 @@ export class DataObjectService extends BaseService<DataObject> {
 
   async find<W extends WhereInput>(
     where?: any,
-    orderBy?: string,
+    orderBy?: string | string[],
     limit?: number,
     offset?: number,
     fields?: string[]

+ 12 - 2
query-node/generated/graphql-server/src/modules/language/language.model.ts

@@ -13,10 +13,20 @@ export class Language extends BaseModel {
   @IntField({})
   createdInBlock!: number;
 
-  @OneToMany(() => Channel, (param: Channel) => param.language, { nullable: true, cascade: ["insert", "update"] })
+  @OneToMany(() => Channel, (param: Channel) => param.language, {
+    nullable: true, cascade: ["insert", "update"],
+    modelName: 'Language',
+    relModelName: 'Channel',
+    propertyName: 'channellanguage',
+  })
   channellanguage?: Channel[];
 
-  @OneToMany(() => Video, (param: Video) => param.language, { nullable: true, cascade: ["insert", "update"] })
+  @OneToMany(() => Video, (param: Video) => param.language, {
+    nullable: true, cascade: ["insert", "update"],
+    modelName: 'Language',
+    relModelName: 'Video',
+    propertyName: 'videolanguage',
+  })
   videolanguage?: Video[];
 
   constructor(init?: Partial<Language>) {

+ 1 - 1
query-node/generated/graphql-server/src/modules/language/language.resolver.ts

@@ -78,7 +78,7 @@ export class LanguageConnectionWhereArgs extends ConnectionPageInputOptions {
   where?: LanguageWhereInput;
 
   @Field(() => LanguageOrderByEnum, { nullable: true })
-  orderBy?: LanguageOrderByEnum;
+  orderBy?: [LanguageOrderByEnum];
 }
 
 @Resolver(Language)

+ 1 - 1
query-node/generated/graphql-server/src/modules/language/language.service.ts

@@ -13,7 +13,7 @@ export class LanguageService extends BaseService<Language> {
 
   async find<W extends WhereInput>(
     where?: any,
-    orderBy?: string,
+    orderBy?: string | string[],
     limit?: number,
     offset?: number,
     fields?: string[]

+ 7 - 1
query-node/generated/graphql-server/src/modules/license/license.model.ts

@@ -22,7 +22,13 @@ export class License extends BaseModel {
   })
   customText?: string;
 
-  @OneToMany(() => Video, (param: Video) => param.license, { nullable: true, cascade: ["insert", "update"] })
+  @OneToMany(() => Video, (param: Video) => param.license, { 
+    nullable: true,
+    cascade: ["insert", "update"],
+    modelName: 'License',
+    relModelName: 'Video',
+    propertyName: 'videolanguage',
+  })
   videolicense?: Video[];
 
   constructor(init?: Partial<License>) {

+ 1 - 1
query-node/generated/graphql-server/src/modules/license/license.resolver.ts

@@ -77,7 +77,7 @@ export class LicenseConnectionWhereArgs extends ConnectionPageInputOptions {
   where?: LicenseWhereInput;
 
   @Field(() => LicenseOrderByEnum, { nullable: true })
-  orderBy?: LicenseOrderByEnum;
+  orderBy?: [LicenseOrderByEnum];
 }
 
 @Resolver(License)

+ 1 - 1
query-node/generated/graphql-server/src/modules/license/license.service.ts

@@ -13,7 +13,7 @@ export class LicenseService extends BaseService<License> {
 
   async find<W extends WhereInput>(
     where?: any,
-    orderBy?: string,
+    orderBy?: string | string[],
     limit?: number,
     offset?: number,
     fields?: string[]

+ 6 - 1
query-node/generated/graphql-server/src/modules/membership/membership.model.ts

@@ -51,7 +51,12 @@ export class Membership extends BaseModel {
   })
   subscription?: number;
 
-  @OneToMany(() => Channel, (param: Channel) => param.ownerMember, { cascade: ["insert", "update"] })
+  @OneToMany(() => Channel, (param: Channel) => param.ownerMember, {
+    cascade: ["insert", "update"],
+    modelName: 'Membership',
+    relModelName: 'Channel',
+    propertyName: 'channels',
+  })
   channels?: Channel[];
 
   constructor(init?: Partial<Membership>) {

+ 1 - 1
query-node/generated/graphql-server/src/modules/membership/membership.resolver.ts

@@ -77,7 +77,7 @@ export class MembershipConnectionWhereArgs extends ConnectionPageInputOptions {
   where?: MembershipWhereInput;
 
   @Field(() => MembershipOrderByEnum, { nullable: true })
-  orderBy?: MembershipOrderByEnum;
+  orderBy?: [MembershipOrderByEnum];
 }
 
 @Resolver(Membership)

+ 1 - 1
query-node/generated/graphql-server/src/modules/membership/membership.service.ts

@@ -13,7 +13,7 @@ export class MembershipService extends BaseService<Membership> {
 
   async find<W extends WhereInput>(
     where?: any,
-    orderBy?: string,
+    orderBy?: string | string[],
     limit?: number,
     offset?: number,
     fields?: string[]

+ 14 - 0
query-node/generated/graphql-server/src/modules/next-entity-id/next-entity-id.model.ts

@@ -0,0 +1,14 @@
+import { BaseModel, FloatField, Model, StringField } from 'warthog';
+
+@Model({ api: {} })
+export class NextEntityId extends BaseModel {
+  @FloatField({
+    description: `Next deterministic id for entities without custom id`,
+  })
+  nextId!: number;
+
+  constructor(init?: Partial<NextEntityId>) {
+    super();
+    Object.assign(this, init);
+  }
+}

+ 128 - 0
query-node/generated/graphql-server/src/modules/next-entity-id/next-entity-id.resolver.ts

@@ -0,0 +1,128 @@
+import {
+  Arg,
+  Args,
+  Mutation,
+  Query,
+  Root,
+  Resolver,
+  FieldResolver,
+  ObjectType,
+  Field,
+  Int,
+  ArgsType,
+  Info,
+} from 'type-graphql';
+import graphqlFields from 'graphql-fields';
+import { Inject } from 'typedi';
+import { Min } from 'class-validator';
+import { Fields, StandardDeleteResponse, UserId, PageInfo, RawFields } from 'warthog';
+
+import {
+  NextEntityIdCreateInput,
+  NextEntityIdCreateManyArgs,
+  NextEntityIdUpdateArgs,
+  NextEntityIdWhereArgs,
+  NextEntityIdWhereInput,
+  NextEntityIdWhereUniqueInput,
+  NextEntityIdOrderByEnum,
+} from '../../../generated';
+
+import { NextEntityId } from './next-entity-id.model';
+import { NextEntityIdService } from './next-entity-id.service';
+
+@ObjectType()
+export class NextEntityIdEdge {
+  @Field(() => NextEntityId, { nullable: false })
+  node!: NextEntityId;
+
+  @Field(() => String, { nullable: false })
+  cursor!: string;
+}
+
+@ObjectType()
+export class NextEntityIdConnection {
+  @Field(() => Int, { nullable: false })
+  totalCount!: number;
+
+  @Field(() => [NextEntityIdEdge], { nullable: false })
+  edges!: NextEntityIdEdge[];
+
+  @Field(() => PageInfo, { nullable: false })
+  pageInfo!: PageInfo;
+}
+
+@ArgsType()
+export class ConnectionPageInputOptions {
+  @Field(() => Int, { nullable: true })
+  @Min(0)
+  first?: number;
+
+  @Field(() => String, { nullable: true })
+  after?: string; // V3: TODO: should we make a RelayCursor scalar?
+
+  @Field(() => Int, { nullable: true })
+  @Min(0)
+  last?: number;
+
+  @Field(() => String, { nullable: true })
+  before?: string;
+}
+
+@ArgsType()
+export class NextEntityIdConnectionWhereArgs extends ConnectionPageInputOptions {
+  @Field(() => NextEntityIdWhereInput, { nullable: true })
+  where?: NextEntityIdWhereInput;
+
+  @Field(() => NextEntityIdOrderByEnum, { nullable: true })
+  orderBy?: [NextEntityIdOrderByEnum];
+}
+
+@Resolver(NextEntityId)
+export class NextEntityIdResolver {
+  constructor(@Inject('NextEntityIdService') public readonly service: NextEntityIdService) {}
+
+  @Query(() => [NextEntityId])
+  async nextEntityIds(
+    @Args() { where, orderBy, limit, offset }: NextEntityIdWhereArgs,
+    @Fields() fields: string[]
+  ): Promise<NextEntityId[]> {
+    return this.service.find<NextEntityIdWhereInput>(where, orderBy, limit, offset, fields);
+  }
+
+  @Query(() => NextEntityId, { nullable: true })
+  async nextEntityIdByUniqueInput(
+    @Arg('where') where: NextEntityIdWhereUniqueInput,
+    @Fields() fields: string[]
+  ): Promise<NextEntityId | null> {
+    const result = await this.service.find(where, undefined, 1, 0, fields);
+    return result && result.length >= 1 ? result[0] : null;
+  }
+
+  @Query(() => NextEntityIdConnection)
+  async nextEntityIdsConnection(
+    @Args() { where, orderBy, ...pageOptions }: NextEntityIdConnectionWhereArgs,
+    @Info() info: any
+  ): Promise<NextEntityIdConnection> {
+    const rawFields = graphqlFields(info, {}, { excludedFields: ['__typename'] });
+
+    let result: any = {
+      totalCount: 0,
+      edges: [],
+      pageInfo: {
+        hasNextPage: false,
+        hasPreviousPage: false,
+      },
+    };
+    // If the related database table does not have any records then an error is thrown to the client
+    // by warthog
+    try {
+      result = await this.service.findConnection<NextEntityIdWhereInput>(where, orderBy, pageOptions, rawFields);
+    } catch (err) {
+      console.log(err);
+      // TODO: should continue to return this on `Error: Items is empty` or throw the error
+      if (!(err.message as string).includes('Items is empty')) throw err;
+    }
+
+    return result as Promise<NextEntityIdConnection>;
+  }
+}

+ 28 - 0
query-node/generated/graphql-server/src/modules/next-entity-id/next-entity-id.service.ts

@@ -0,0 +1,28 @@
+import { Service } from 'typedi';
+import { Repository } from 'typeorm';
+import { InjectRepository } from 'typeorm-typedi-extensions';
+import { BaseService, WhereInput } from 'warthog';
+
+import { NextEntityId } from './next-entity-id.model';
+
+@Service('NextEntityIdService')
+export class NextEntityIdService extends BaseService<NextEntityId> {
+  constructor(@InjectRepository(NextEntityId) protected readonly repository: Repository<NextEntityId>) {
+    super(NextEntityId, repository);
+  }
+
+  async find<W extends WhereInput>(
+    where?: any,
+    orderBy?: string | string[],
+    limit?: number,
+    offset?: number,
+    fields?: string[]
+  ): Promise<NextEntityId[]> {
+    let f = fields;
+    if (f == undefined) {
+      f = [];
+    }
+
+    return super.find<W>(where, orderBy, limit, offset, f);
+  }
+}

+ 6 - 1
query-node/generated/graphql-server/src/modules/video-category/video-category.model.ts

@@ -10,7 +10,12 @@ export class VideoCategory extends BaseModel {
   })
   name?: string;
 
-  @OneToMany(() => Video, (param: Video) => param.category, { cascade: ["insert", "update"] })
+  @OneToMany(() => Video, (param: Video) => param.category, {
+    cascade: ["insert", "update"],
+    modelName: 'VideoCategory',
+    relModelName: 'Video',
+    propertyName: 'videos',
+  })
   videos?: Video[];
 
   @IntField({})

+ 1 - 1
query-node/generated/graphql-server/src/modules/video-category/video-category.resolver.ts

@@ -77,7 +77,7 @@ export class VideoCategoryConnectionWhereArgs extends ConnectionPageInputOptions
   where?: VideoCategoryWhereInput;
 
   @Field(() => VideoCategoryOrderByEnum, { nullable: true })
-  orderBy?: VideoCategoryOrderByEnum;
+  orderBy?: [VideoCategoryOrderByEnum];
 }
 
 @Resolver(VideoCategory)

+ 1 - 1
query-node/generated/graphql-server/src/modules/video-category/video-category.service.ts

@@ -13,7 +13,7 @@ export class VideoCategoryService extends BaseService<VideoCategory> {
 
   async find<W extends WhereInput>(
     where?: any,
-    orderBy?: string,
+    orderBy?: string | string[],
     limit?: number,
     offset?: number,
     fields?: string[]

+ 7 - 1
query-node/generated/graphql-server/src/modules/video-media-encoding/video-media-encoding.model.ts

@@ -22,7 +22,13 @@ export class VideoMediaEncoding extends BaseModel {
   })
   mimeMediaType?: string;
 
-  @OneToMany(() => VideoMediaMetadata, (param: VideoMediaMetadata) => param.encoding, { nullable: true, cascade: ["insert", "update"] })
+  @OneToMany(() => VideoMediaMetadata, (param: VideoMediaMetadata) => param.encoding, {
+    nullable: true,
+    cascade: ["insert", "update"],
+    modelName: 'VideoMediaEncoding',
+    relModelName: 'VideoMediaMetadata',
+    propertyName: 'videomediametadataencoding',
+  })
   videomediametadataencoding?: VideoMediaMetadata[];
 
   constructor(init?: Partial<VideoMediaEncoding>) {

+ 1 - 1
query-node/generated/graphql-server/src/modules/video-media-encoding/video-media-encoding.resolver.ts

@@ -77,7 +77,7 @@ export class VideoMediaEncodingConnectionWhereArgs extends ConnectionPageInputOp
   where?: VideoMediaEncodingWhereInput;
 
   @Field(() => VideoMediaEncodingOrderByEnum, { nullable: true })
-  orderBy?: VideoMediaEncodingOrderByEnum;
+  orderBy?: [VideoMediaEncodingOrderByEnum];
 }
 
 @Resolver(VideoMediaEncoding)

+ 1 - 1
query-node/generated/graphql-server/src/modules/video-media-encoding/video-media-encoding.service.ts

@@ -13,7 +13,7 @@ export class VideoMediaEncodingService extends BaseService<VideoMediaEncoding> {
 
   async find<W extends WhereInput>(
     where?: any,
-    orderBy?: string,
+    orderBy?: string | string[],
     limit?: number,
     offset?: number,
     fields?: string[]

+ 9 - 1
query-node/generated/graphql-server/src/modules/video-media-metadata/video-media-metadata.model.ts

@@ -9,6 +9,9 @@ export class VideoMediaMetadata extends BaseModel {
     skipGraphQLField: true,
     nullable: true,
     cascade: ["insert", "update"],
+    modelName: 'VideoMediaMetadata',
+    relModelName: 'VideoMediaEncoding',
+    propertyName: 'encoding',
   })
   encoding?: VideoMediaEncoding;
 
@@ -34,7 +37,12 @@ export class VideoMediaMetadata extends BaseModel {
   })
   size?: number;
 
-  @OneToOne(() => Video, (param: Video) => param.mediaMetadata, { nullable: true, cascade: ["insert", "update"] })
+  @OneToOne(() => Video, (param: Video) => param.mediaMetadata, { 
+    nullable: true, cascade: ["insert", "update"],
+    modelName: 'VideoMediaMetadata',
+    relModelName: 'Video',
+    propertyName: 'video',
+  })
   video?: Video;
 
   @IntField({})

+ 1 - 1
query-node/generated/graphql-server/src/modules/video-media-metadata/video-media-metadata.resolver.ts

@@ -78,7 +78,7 @@ export class VideoMediaMetadataConnectionWhereArgs extends ConnectionPageInputOp
   where?: VideoMediaMetadataWhereInput;
 
   @Field(() => VideoMediaMetadataOrderByEnum, { nullable: true })
-  orderBy?: VideoMediaMetadataOrderByEnum;
+  orderBy?: [VideoMediaMetadataOrderByEnum];
 }
 
 @Resolver(VideoMediaMetadata)

+ 1 - 1
query-node/generated/graphql-server/src/modules/video-media-metadata/video-media-metadata.service.ts

@@ -13,7 +13,7 @@ export class VideoMediaMetadataService extends BaseService<VideoMediaMetadata> {
 
   async find<W extends WhereInput>(
     where?: any,
-    orderBy?: string,
+    orderBy?: string | string[],
     limit?: number,
     offset?: number,
     fields?: string[]

+ 45 - 5
query-node/generated/graphql-server/src/modules/video/video.model.ts

@@ -24,10 +24,24 @@ export { AssetAvailability };
 
 @Model({ api: {} })
 export class Video extends BaseModel {
-  @ManyToOne(() => Channel, (param: Channel) => param.videos, { skipGraphQLField: true, nullable: true, cascade: ["insert", "update"] })
+  @ManyToOne(() => Channel, (param: Channel) => param.videos, {
+    skipGraphQLField: true,
+    nullable: true,
+    cascade: ["insert", "update"],
+    modelName: 'Video',
+    relModelName: 'Channel',
+    propertyName: 'channel',
+  })
   channel!: Channel;
 
-  @ManyToOne(() => VideoCategory, (param: VideoCategory) => param.videos, { skipGraphQLField: true, nullable: true, cascade: ["insert", "update"] })
+  @ManyToOne(() => VideoCategory, (param: VideoCategory) => param.videos, {
+    skipGraphQLField: true,
+    nullable: true,
+    cascade: ["insert", "update"],
+    modelName: 'Video',
+    relModelName: 'VideoCategory',
+    propertyName: 'category',
+  })
   category?: VideoCategory;
 
   @StringField({
@@ -52,6 +66,9 @@ export class Video extends BaseModel {
     skipGraphQLField: true,
     nullable: true,
     cascade: ["insert", "update"],
+    modelName: 'Video',
+    relModelName: 'DataObject',
+    propertyName: 'thumbnailPhotoDataObject',
   })
   thumbnailPhotoDataObject?: DataObject;
 
@@ -66,7 +83,14 @@ export class Video extends BaseModel {
   })
   thumbnailPhotoAvailability!: AssetAvailability;
 
-  @ManyToOne(() => Language, (param: Language) => param.videolanguage, { skipGraphQLField: true, nullable: true, cascade: ["insert", "update"] })
+  @ManyToOne(() => Language, (param: Language) => param.videolanguage, {
+    skipGraphQLField: true,
+    nullable: true,
+    cascade: ["insert", "update"],
+    modelName: 'Video',
+    relModelName: 'Language',
+    propertyName: 'language',
+  })
   language?: Language;
 
   @BooleanField({
@@ -98,13 +122,23 @@ export class Video extends BaseModel {
   })
   isExplicit?: boolean;
 
-  @ManyToOne(() => License, (param: License) => param.videolicense, { skipGraphQLField: true, nullable: true, cascade: ["insert", "update"] })
+  @ManyToOne(() => License, (param: License) => param.videolicense, {
+    skipGraphQLField: true,
+    nullable: true,
+    cascade: ["insert", "update"],
+    modelName: 'Video',
+    relModelName: 'License',
+    propertyName: 'license',
+  })
   license?: License;
 
   @ManyToOne(() => DataObject, (param: DataObject) => param.videomediaDataObject, {
     skipGraphQLField: true,
     nullable: true,
     cascade: ["insert", "update"],
+    modelName: 'Video',
+    relModelName: 'DataObject',
+    propertyName: 'mediaDataObject',
   })
   mediaDataObject?: DataObject;
 
@@ -119,7 +153,13 @@ export class Video extends BaseModel {
   })
   mediaAvailability!: AssetAvailability;
 
-  @OneToOneJoin(() => VideoMediaMetadata, (param: VideoMediaMetadata) => param.video, { nullable: true, cascade: ["insert", "update"] })
+  @OneToOneJoin(() => VideoMediaMetadata, (param: VideoMediaMetadata) => param.video, {
+    nullable: true,
+    cascade: ["insert", "update"],
+    modelName: 'Video',
+    relModelName: 'VideoMediaMetadata',
+    propertyName: 'mediaMetadata',
+  })
   mediaMetadata?: VideoMediaMetadata;
 
   @IntField({})

+ 1 - 1
query-node/generated/graphql-server/src/modules/video/video.resolver.ts

@@ -82,7 +82,7 @@ export class VideoConnectionWhereArgs extends ConnectionPageInputOptions {
   where?: VideoWhereInput;
 
   @Field(() => VideoOrderByEnum, { nullable: true })
-  orderBy?: VideoOrderByEnum;
+  orderBy?: [VideoOrderByEnum];
 }
 
 @Resolver(Video)

+ 1 - 1
query-node/generated/graphql-server/src/modules/video/video.service.ts

@@ -13,7 +13,7 @@ export class VideoService extends BaseService<Video> {
 
   async find<W extends WhereInput>(
     where?: any,
-    orderBy?: string,
+    orderBy?: string | string[],
     limit?: number,
     offset?: number,
     fields?: string[]

+ 5 - 1
query-node/generated/graphql-server/src/modules/worker/worker.model.ts

@@ -28,7 +28,11 @@ export class Worker extends BaseModel {
   })
   metadata?: string;
 
-  @OneToMany(() => DataObject, (param: DataObject) => param.liaison)
+  @OneToMany(() => DataObject, (param: DataObject) => param.liaison, {
+    modelName: 'Worker',
+    relModelName: 'DataObject',
+    propertyName: 'dataObjects',
+  })
   dataObjects?: DataObject[];
 
   constructor(init?: Partial<Worker>) {

+ 1 - 1
query-node/generated/graphql-server/src/modules/worker/worker.resolver.ts

@@ -77,7 +77,7 @@ export class WorkerConnectionWhereArgs extends ConnectionPageInputOptions {
   where?: WorkerWhereInput;
 
   @Field(() => WorkerOrderByEnum, { nullable: true })
-  orderBy?: WorkerOrderByEnum;
+  orderBy?: [WorkerOrderByEnum];
 }
 
 @Resolver(Worker)

+ 1 - 1
query-node/generated/graphql-server/src/modules/worker/worker.service.ts

@@ -13,7 +13,7 @@ export class WorkerService extends BaseService<Worker> {
 
   async find<W extends WhereInput>(
     where?: any,
-    orderBy?: string,
+    orderBy?: string | string[],
     limit?: number,
     offset?: number,
     fields?: string[]

+ 72 - 0
query-node/generated/graphql-server/src/queryTemplates.ts

@@ -0,0 +1,72 @@
+import { IQueryTemplate, queryTemplateUtils } from '@apollographql/graphql-playground-react/lib/components/Playground/QueryTemplates/templateUtils'
+
+// fields that will be ignored by autofill
+const commonIgnoredFields = [
+  'deletedAt',
+  'createdById',
+  'updatedById',
+  'deletedById',
+  'version',
+]
+
+const dataObjectIgnoredFields = [
+  ...commonIgnoredFields,
+
+  // dataObject's `owner` is problematic because it's variant and will need some special handling
+  'owner',
+]
+
+const exampleDate = `"2018-01-31 23:59"`
+
+export const queryTemplates: IQueryTemplate[] = [
+  ...queryTemplateUtils.getOneGetAllTemplates('video', 'videos', 'videos', dataObjectIgnoredFields),
+  {
+    title: 'Featured videos',
+    description: 'Get all featured videos.',
+    ignoredFields: commonIgnoredFields,
+    query: `query {
+      ${queryTemplateUtils.descriptionMarker}
+      videos(where: { isFeatured_eq: true }) { ${queryTemplateUtils.allPropsMarker} }
+    }`,
+  }, {
+    title: 'All recent videos',
+    description: 'Get all videos after created or updated after the given date.',
+    ignoredFields: commonIgnoredFields,
+    query: `query {
+      ${queryTemplateUtils.descriptionMarker}
+      videos(where: {
+        createdAt_gt: ${exampleDate},
+        updatedAt_gt: ${exampleDate},
+      }) { ${queryTemplateUtils.allPropsMarker} }
+    }`,
+  },
+
+  ...queryTemplateUtils.getOneGetAllTemplates('video category', 'video categories', 'videoCategories', commonIgnoredFields),
+  {
+    title: `All videos in category`,
+    description: `Get all videos associated with the given video category.`,
+    ignoredFields: commonIgnoredFields,
+    query: `query {
+      ${queryTemplateUtils.descriptionMarker}
+      videos(where: { categoryId_eq: 1 }) { ${queryTemplateUtils.allPropsMarker} }
+    }`,
+  },
+  ...queryTemplateUtils.getOneGetAllTemplates('channel', 'channels', 'channels', dataObjectIgnoredFields),
+  ...queryTemplateUtils.getOneGetAllTemplates('channel category', 'channels categories', 'channelCategories', commonIgnoredFields),
+
+  {
+    title: `Channel's videos`,
+    description: `Get all videos associated with the given channel.`,
+    ignoredFields: commonIgnoredFields,
+    query: `query {
+      ${queryTemplateUtils.descriptionMarker}
+      videos(where: { channelId_eq: 1 }) { ${queryTemplateUtils.allPropsMarker} }
+    }`,
+  },
+
+  ...queryTemplateUtils.getOneGetAllTemplates('asset', 'assets', 'dataObjects', dataObjectIgnoredFields),
+  ...queryTemplateUtils.getOneGetAllTemplates('membership', 'memberships', 'memberships', commonIgnoredFields),
+
+  ...queryTemplateUtils.getOneGetAllTemplates('curator group', 'curator groups', 'curatorGroups', commonIgnoredFields),
+  ...queryTemplateUtils.getOneGetAllTemplates('worker', 'workers', 'workers', dataObjectIgnoredFields),
+].map(queryTemplateUtils.formatQuery)

+ 1 - 0
query-node/mappings/.eslintignore

@@ -0,0 +1 @@
+lib/

+ 19 - 0
query-node/mappings/.eslintrc.js

@@ -0,0 +1,19 @@
+module.exports = {
+  extends: ['@joystream/eslint-config'],
+  env: {
+    node: true,
+  },
+  rules: {
+    '@typescript-eslint/naming-convention': 'off',
+    '@typescript-eslint/ban-types': ["error",
+      {
+        "types": {
+          // enable usage of `Object` data type in TS; it has it's meaning(!) and it's disabled
+          // by default only beacuse people tend to misuse it
+          "Object": false,
+        },
+        "extendDefaults": true
+      }
+    ]
+  },
+}

+ 1 - 0
query-node/mappings/.prettierignore

@@ -0,0 +1 @@
+lib/

+ 3 - 0
query-node/mappings/.prettierrc.js

@@ -0,0 +1,3 @@
+module.exports = {
+  ...require('@joystream/prettier-config'),
+}

+ 37 - 39
query-node/mappings/bootstrap/index.ts

@@ -1,11 +1,11 @@
 import { createDBConnection } from '@dzlzv/hydra-processor/lib/db'
 import { DatabaseManager, makeDatabaseManager } from '@dzlzv/hydra-db-utils'
-import { Connection, getManager } from 'typeorm'
+import { Connection, getManager, FindConditions } from 'typeorm'
 
-import { bootMembers, IBootstrapMember } from './members';
-import { bootWorkers, IBootstrapWorker, IBootstrapWorkers } from './workers';
+import { bootMembers, IBootstrapMember } from './members'
+import { bootWorkers, IBootstrapWorker, IBootstrapWorkers } from './workers'
 import { Worker, WorkerType } from 'query-node'
-import { FindConditions } from 'typeorm'
+
 import fs from 'fs'
 import path from 'path'
 
@@ -14,58 +14,56 @@ init()
 
 // bootstrap flow
 async function init() {
-    // prepare database and import data
-    const [databaseManager, connection] = await createDatabaseManager()
+  // prepare database and import data
+  const [databaseManager, connection] = await createDatabaseManager()
 
-    // escape if db is already initialized
-    if (await isDbInitialized(databaseManager)) {
-        await connection.close()
-        return
-    }
+  // escape if db is already initialized
+  if (await isDbInitialized(databaseManager)) {
+    await connection.close()
+    return
+  }
 
-    // load import data
-    const data = loadData()
+  // load import data
+  const data = loadData()
 
-    // bootstrap entities
-    await bootMembers(databaseManager, data.members)
-    await bootWorkers(databaseManager, data.workers)
+  // bootstrap entities
+  await bootMembers(databaseManager, data.members)
+  await bootWorkers(databaseManager, data.workers)
 
-    await connection.close()
+  await connection.close()
 }
 
 async function isDbInitialized(db: DatabaseManager): Promise<boolean> {
-    // simple way to check if db is bootstrapped already - check if there is at least 1 storage provider
-    const membership = await db.get(Worker, {
-        where: {
-          type: WorkerType.STORAGE,
-        } as FindConditions<Worker>
-    })
+  // simple way to check if db is bootstrapped already - check if there is at least 1 storage provider
+  const membership = await db.get(Worker, {
+    where: {
+      type: WorkerType.STORAGE,
+    } as FindConditions<Worker>,
+  })
 
-    return !!membership
+  return !!membership
 }
 
 async function createDatabaseManager(): Promise<[DatabaseManager, Connection]> {
-    // paths in `entities` should be the same as `entities` set in `manifest.yml`
-    const entities = [
-        'generated/graphql-server/dist/**/*.model.js'
-    ]
+  // paths in `entities` should be the same as `entities` set in `manifest.yml`
+  const entities = ['generated/graphql-server/dist/**/*.model.js']
 
-    // connect to db and create manager
-    const connection = await createDBConnection(entities)
-    const entityManager = getManager(connection.name)
-    const databaseManager = makeDatabaseManager(entityManager)
+  // connect to db and create manager
+  const connection = await createDBConnection(entities)
+  const entityManager = getManager(connection.name)
+  const databaseManager = makeDatabaseManager(entityManager)
 
-    return [databaseManager, connection]
+  return [databaseManager, connection]
 }
 
 interface IBootstrapData {
-    members: IBootstrapMember[]
-    workers: IBootstrapWorkers
+  members: IBootstrapMember[]
+  workers: IBootstrapWorkers
 }
 
 function loadData(): IBootstrapData {
-    return {
-        members: JSON.parse(fs.readFileSync(process.env.BOOTSTRAP_DATA_FOLDER + '/members.json').toString()),
-        workers: JSON.parse(fs.readFileSync(process.env.BOOTSTRAP_DATA_FOLDER + '/workers.json').toString()),
-    }
+  return {
+    members: JSON.parse(fs.readFileSync(process.env.BOOTSTRAP_DATA_FOLDER + '/members.json').toString()),
+    workers: JSON.parse(fs.readFileSync(process.env.BOOTSTRAP_DATA_FOLDER + '/workers.json').toString()),
+  }
 }

+ 9 - 11
query-node/mappings/bootstrap/members.ts

@@ -1,21 +1,19 @@
-//import { Connection } from 'typeorm'
+// import { Connection } from 'typeorm'
 import { DatabaseManager } from '@dzlzv/hydra-db-utils'
-import {
-  logger,
-} from '../src/common'
+import { logger } from '../src/common'
 import { MembershipEntryMethod, Membership } from 'query-node'
 
 export interface IBootstrapMember {
   member_id: number
-  root_account: string,
-  controller_account: string,
-  handle: string,
-  avatar_uri: string,
-  about: string,
+  root_account: string
+  controller_account: string
+  handle: string
+  avatar_uri: string
+  about: string
   registered_at_time: number
 }
 
-//export async function bootMembers(members: IBootstrapMember[], db: Connection): Promise<void> {
+// export async function bootMembers(members: IBootstrapMember[], db: Connection): Promise<void> {
 export async function bootMembers(db: DatabaseManager, members: IBootstrapMember[]): Promise<void> {
   for (const rawMember of members) {
     // create new membership
@@ -39,6 +37,6 @@ export async function bootMembers(db: DatabaseManager, members: IBootstrapMember
     await db.save<Membership>(member)
 
     // emit log event
-    logger.info('Member has been bootstrapped', {id: rawMember.member_id})
+    logger.info('Member has been bootstrapped', { id: rawMember.member_id })
   }
 }

+ 12 - 3
query-node/mappings/bootstrap/workers.ts

@@ -1,6 +1,6 @@
 import { DatabaseManager } from '@dzlzv/hydra-db-utils'
 import { Worker, WorkerType } from 'query-node'
-import {logger} from '../src/common'
+import { logger, getNextId } from '../src/common'
 
 export interface IBootstrapWorkers {
   storage: IBootstrapWorker[]
@@ -9,6 +9,7 @@ export interface IBootstrapWorkers {
 
 export interface IBootstrapWorker {
   id: string
+  created_at: string
 }
 
 export async function bootWorkers(db: DatabaseManager, workers: IBootstrapWorkers): Promise<void> {
@@ -16,7 +17,11 @@ export async function bootWorkers(db: DatabaseManager, workers: IBootstrapWorker
   await bootWorkersInGroup(db, workers.gateway, WorkerType.GATEWAY)
 }
 
-export async function bootWorkersInGroup(db: DatabaseManager, workers: IBootstrapWorker[], workerType: WorkerType): Promise<void> {
+export async function bootWorkersInGroup(
+  db: DatabaseManager,
+  workers: IBootstrapWorker[],
+  workerType: WorkerType
+): Promise<void> {
   if (!workers) {
     return
   }
@@ -25,15 +30,19 @@ export async function bootWorkersInGroup(db: DatabaseManager, workers: IBootstra
     // create new membership
     const worker = new Worker({
       // main data
+      id: await getNextId(db),
       workerId: rawWorker.id,
       type: workerType,
       isActive: true,
+
+      createdAt: new Date(rawWorker.created_at),
+      updatedAt: new Date(rawWorker.created_at),
     })
 
     // save worker
     await db.save<Worker>(worker)
 
     // emit log event
-    logger.info('Worker has been bootstrapped', {id: rawWorker.id, workerType})
+    logger.info('Worker has been bootstrapped', { id: rawWorker.id, workerType })
   }
 }

+ 5 - 2
query-node/mappings/package.json

@@ -7,8 +7,10 @@
   "scripts": {
     "build": "rm -rf lib && tsc --build tsconfig.json && yarn copy-types",
     "copy-types": "cp ../../types/augment/all/defs.json lib/generated/types/typedefs.json",
-    "lint": "echo \"Skippinng\"",
-    "clean": "rm -rf lib"
+    "clean": "rm -rf lib",
+    "lint": "eslint . --quiet --ext .ts",
+    "checks": "tsc --noEmit --pretty && prettier ./ --check && yarn lint",
+    "format": "prettier ./ --write "
   },
   "dependencies": {
     "@polkadot/types": "4.2.1",
@@ -21,6 +23,7 @@
     "warthog": "https://github.com/metmirr/warthog/releases/download/v2.23.0/warthog-v2.23.0.tgz"
   },
   "devDependencies": {
+    "prettier": "^2.2.1",
     "ts-node": "^9.0.0",
     "typescript": "^3.8"
   }

+ 75 - 38
query-node/mappings/src/common.ts

@@ -1,18 +1,11 @@
-import { SubstrateEvent } from '@dzlzv/hydra-common'
+import { SubstrateEvent, SubstrateExtrinsic, ExtrinsicArg } from '@dzlzv/hydra-common'
 import { DatabaseManager } from '@dzlzv/hydra-db-utils'
-import { u64 } from '@polkadot/types/primitive';
-import { SubstrateExtrinsic, ExtrinsicArg } from '@dzlzv/hydra-common'
+import { u64, Bytes } from '@polkadot/types/primitive'
+import { fixBlockTimestamp } from './eventFix'
 
 // Asset
-import {
-  DataObjectOwner,
-  DataObject,
-  LiaisonJudgement,
-  Network,
-} from 'query-node'
-import {
-  ContentParameters,
-} from '@joystream/types/augment'
+import { DataObjectOwner, DataObject, LiaisonJudgement, Network, NextEntityId } from 'query-node'
+import { ContentParameters } from '@joystream/types/augment'
 
 import { ContentParameters as Custom_ContentParameters } from '@joystream/types/storage'
 import { registry } from '@joystream/types'
@@ -28,7 +21,7 @@ export function inconsistentState(extraInfo: string, data?: unknown): never {
   // log error
   logger.error(errorMessage, data)
 
-  throw errorMessage
+  throw new Error(errorMessage)
 }
 
 /*
@@ -41,26 +34,50 @@ export function invalidMetadata(extraInfo: string, data?: unknown): void {
   logger.info(errorMessage, data)
 }
 
+/*
+  Creates a predictable and unique ID for the given content.
+*/
+export async function getNextId(db: DatabaseManager): Promise<string> {
+  // load or create record
+  const existingRecord = (await db.get(NextEntityId, {})) || new NextEntityId({ id: '0', nextId: 1 })
+
+  // remember id
+  const entityId = existingRecord.nextId
+
+  // increment id
+  existingRecord.nextId = existingRecord.nextId + 1
+
+  // save record
+  await db.save<NextEntityId>(existingRecord)
+
+  return entityId.toString()
+}
+
 /*
   Prepares data object from content parameters.
 */
 export async function prepareDataObject(
+  db: DatabaseManager,
   contentParameters: ContentParameters,
-  blockNumber: number,
-  owner: typeof DataObjectOwner,
+  event: SubstrateEvent,
+  owner: typeof DataObjectOwner
 ): Promise<DataObject> {
   // convert generic content parameters coming from processor to custom Joystream data type
   const customContentParameters = new Custom_ContentParameters(registry, contentParameters.toJSON() as any)
 
   const dataObject = new DataObject({
+    id: await getNextId(db),
     owner,
-    createdInBlock: blockNumber,
+    createdInBlock: event.blockNumber,
     typeId: contentParameters.type_id.toNumber(),
     size: customContentParameters.size_in_bytes.toNumber(),
     liaisonJudgement: LiaisonJudgement.PENDING, // judgement is pending at start; liaison id is set when content is accepted/rejected
-    ipfsContentId: contentParameters.ipfs_content_id.toUtf8(),
+    ipfsContentId: convertBytesToString(contentParameters.ipfs_content_id),
     joystreamContentId: customContentParameters.content_id.encode(),
 
+    createdAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
+    updatedAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
+
     createdById: '1',
     updatedById: '1',
   })
@@ -68,7 +85,7 @@ export async function prepareDataObject(
   return dataObject
 }
 
-/////////////////// Sudo extrinsic calls ///////////////////////////////////////
+/// ///////////////// Sudo extrinsic calls ///////////////////////////////////////
 
 // soft-peg interface for typegen-generated `*Call` types
 export interface IGenericExtrinsicObject<T> {
@@ -92,16 +109,18 @@ export function extractExtrinsicArgs<DataParams, EventObject extends IGenericExt
 
   // in ideal world this parameter would not be needed, but there is no way to associate parameters
   // used in sudo to extrinsic parameters without it
-  argsIndeces: Record<keyof DataParams, number>,
-): EventObject['args'] { // this is equal to DataParams but only this notation works properly
+  argsIndeces: Record<keyof DataParams, number>
+): EventObject['args'] {
+  // this is equal to DataParams but only this notation works properly
   // escape when extrinsic info is not available
   if (!rawEvent.extrinsic) {
-    throw 'Invalid event - no extrinsic set' // this should never happen
+    throw new Error('Invalid event - no extrinsic set') // this should never happen
   }
 
   // regural extrinsic call?
-  if (rawEvent.extrinsic.section != 'sudo') {
-    return (new callFactory(rawEvent)).args
+  if (rawEvent.extrinsic.section !== 'sudo') {
+    // eslint-disable-next-line new-cap
+    return new callFactory(rawEvent).args
   }
 
   // sudo extrinsic call
@@ -110,7 +129,7 @@ export function extractExtrinsicArgs<DataParams, EventObject extends IGenericExt
 
   // convert naming convention (underscore_names to camelCase)
   const clearArgs = Object.keys(callArgs.args).reduce((acc, key) => {
-    const formattedName = key.replace(/_([a-z])/g, tmp => tmp[1].toUpperCase())
+    const formattedName = key.replace(/_([a-z])/g, (tmp) => tmp[1].toUpperCase())
 
     acc[formattedName] = callArgs.args[key]
 
@@ -119,19 +138,20 @@ export function extractExtrinsicArgs<DataParams, EventObject extends IGenericExt
 
   // prepare partial event object
   const partialEvent = {
-    extrinsic: {
+    extrinsic: ({
       args: Object.keys(argsIndeces).reduce((acc, key) => {
-        acc[(argsIndeces)[key]] = {
-          value: clearArgs[key]
+        acc[argsIndeces[key]] = {
+          value: clearArgs[key],
         }
 
         return acc
       }, [] as unknown[]),
-    } as unknown as SubstrateExtrinsic
+    } as unknown) as SubstrateExtrinsic,
   } as SubstrateEvent
 
   // create event object and extract processed args
-  const finalArgs = (new callFactory(partialEvent)).args
+  // eslint-disable-next-line new-cap
+  const finalArgs = new callFactory(partialEvent).args
 
   return finalArgs
 }
@@ -141,28 +161,29 @@ export function extractExtrinsicArgs<DataParams, EventObject extends IGenericExt
 */
 export function extractSudoCallParameters<DataParams>(rawEvent: SubstrateEvent): ISudoCallArgs<DataParams> {
   if (!rawEvent.extrinsic) {
-    throw 'Invalid event - no extrinsic set' // this should never happen
+    throw new Error('Invalid event - no extrinsic set') // this should never happen
   }
 
   // see Substrate's sudo frame for more info about sudo extrinsics and `call` argument index
-  const argIndex = false
-    || (rawEvent.extrinsic.method == 'sudoAs' && 1) // who, *call*
-    || (rawEvent.extrinsic.method == 'sudo' && 0) // *call*
-    || (rawEvent.extrinsic.method == 'sudoUncheckedWeight' && 0) // *call*, _weight
+  const argIndex =
+    false ||
+    (rawEvent.extrinsic.method === 'sudoAs' && 1) || // who, *call*
+    (rawEvent.extrinsic.method === 'sudo' && 0) || // *call*
+    (rawEvent.extrinsic.method === 'sudoUncheckedWeight' && 0) // *call*, _weight
 
   // ensure `call` argument was found
   if (argIndex === false) {
     // this could possibly happen in sometime in future if new sudo options are introduced in Substrate
-    throw 'Not implemented situation with sudo'
+    throw new Error('Not implemented situation with sudo')
   }
 
   // typecast call arguments
-  const callArgs = rawEvent.extrinsic.args[argIndex].value as unknown as ISudoCallArgs<DataParams>
+  const callArgs = (rawEvent.extrinsic.args[argIndex].value as unknown) as ISudoCallArgs<DataParams>
 
   return callArgs
 }
 
-/////////////////// Logger /////////////////////////////////////////////////////
+/// ///////////////// Logger /////////////////////////////////////////////////////
 
 /*
   Simple logger enabling error and informational reporting.
@@ -171,7 +192,6 @@ export function extractSudoCallParameters<DataParams>(rawEvent: SubstrateEvent):
   Hydra will provide logger instance and relevant code using `Logger` should be refactored.
 */
 class Logger {
-
   /*
     Log significant event.
   */
@@ -188,3 +208,20 @@ class Logger {
 }
 
 export const logger = new Logger()
+
+/*
+  Helper for converting Bytes type to string
+*/
+export function convertBytesToString(b: Bytes | null): string {
+  if (!b) {
+    return ''
+  }
+
+  const text = Buffer.from(b.toU8a(true)).toString()
+
+  // prevent utf-8 null character
+  // eslint-disable-next-line no-control-regex
+  const result = text.replace(/\u0000/g, '')
+
+  return result
+}

+ 72 - 114
query-node/mappings/src/content/channel.ts

@@ -1,11 +1,11 @@
 import { fixBlockTimestamp } from '../eventFix'
 import { SubstrateEvent } from '@dzlzv/hydra-common'
 import { DatabaseManager } from '@dzlzv/hydra-db-utils'
-import ISO6391 from 'iso-639-1';
+import ISO6391 from 'iso-639-1'
 import { FindConditions, In } from 'typeorm'
 
-import { AccountId } from "@polkadot/types/interfaces";
-import { Option } from '@polkadot/types/codec';
+import { AccountId } from '@polkadot/types/interfaces'
+import { Option } from '@polkadot/types/codec'
 import { Content } from '../../../generated/types'
 import {
   readProtobuf,
@@ -14,36 +14,22 @@ import {
   convertContentActorToDataObjectOwner,
 } from './utils'
 
-import {
-  Channel,
-  ChannelCategory,
-  DataObject,
-} from 'query-node'
-import {
-  inconsistentState,
-  logger,
-} from '../common'
-
-import {
-  AssetAvailability,
-} from 'query-node'
+import { Channel, ChannelCategory, DataObject, AssetAvailability } from 'query-node'
+import { inconsistentState, logger } from '../common'
 
 // eslint-disable-next-line @typescript-eslint/naming-convention
 export async function content_ChannelCreated(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
   // read event data
-  const {channelId, channelCreationParameters, contentActor} = new Content.ChannelCreatedEvent(event).data
+  const { channelId, channelCreationParameters, contentActor } = new Content.ChannelCreatedEvent(event).data
 
   // read metadata
-  const protobufContent = await readProtobufWithAssets(
-    new Channel(),
-    {
-      metadata: channelCreationParameters.meta,
-      db,
-      blockNumber: event.blockNumber,
-      assets: channelCreationParameters.assets,
-      contentOwner: convertContentActorToDataObjectOwner(contentActor, channelId.toNumber()),
-    }
-  )
+  const protobufContent = await readProtobufWithAssets(new Channel(), {
+    metadata: channelCreationParameters.meta,
+    db,
+    event,
+    assets: channelCreationParameters.assets,
+    contentOwner: convertContentActorToDataObjectOwner(contentActor, channelId.toNumber()),
+  })
 
   // create entity
   const channel = new Channel({
@@ -64,30 +50,23 @@ export async function content_ChannelCreated(db: DatabaseManager, event: Substra
     updatedAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
 
     // prepare channel owner (handles fields `ownerMember` and `ownerCuratorGroup`)
-    ...await convertContentActorToChannelOwner(db, contentActor),
+    ...(await convertContentActorToChannelOwner(db, contentActor)),
 
     // integrate metadata
-    ...protobufContent
+    ...protobufContent,
   })
 
   // save entity
   await db.save<Channel>(channel)
 
   // emit log event
-  logger.info('Channel has been created', {id: channel.id})
+  logger.info('Channel has been created', { id: channel.id })
 }
 
 // eslint-disable-next-line @typescript-eslint/naming-convention
-export async function content_ChannelUpdated(
-  db: DatabaseManager,
-  event: SubstrateEvent
-) {
+export async function content_ChannelUpdated(db: DatabaseManager, event: SubstrateEvent) {
   // read event data
-  const {
-    channelId,
-    channelUpdateParameters,
-    contentActor,
-  } = new Content.ChannelUpdatedEvent(event).data
+  const { channelId, channelUpdateParameters, contentActor } = new Content.ChannelUpdatedEvent(event).data
 
   // load channel
   const channel = await db.get(Channel, { where: { id: channelId.toString() } as FindConditions<Channel> })
@@ -102,19 +81,16 @@ export async function content_ChannelUpdated(
 
   //  update metadata if it was changed
   if (newMetadata) {
-    const protobufContent = await readProtobufWithAssets(
-      new Channel(),
-      {
-        metadata: newMetadata,
-        db,
-        blockNumber: event.blockNumber,
-        assets: channelUpdateParameters.assets.unwrapOr([]),
-        contentOwner: convertContentActorToDataObjectOwner(contentActor, channelId.toNumber()),
-      }
-    )
+    const protobufContent = await readProtobufWithAssets(new Channel(), {
+      metadata: newMetadata,
+      db,
+      event,
+      assets: channelUpdateParameters.assets.unwrapOr([]),
+      contentOwner: convertContentActorToDataObjectOwner(contentActor, channelId.toNumber()),
+    })
 
     // update all fields read from protobuf
-    for (let [key, value] of Object.entries(protobufContent)) {
+    for (const [key, value] of Object.entries(protobufContent)) {
       channel[key] = value
     }
   }
@@ -135,20 +111,19 @@ export async function content_ChannelUpdated(
   await db.save<Channel>(channel)
 
   // emit log event
-  logger.info('Channel has been updated', {id: channel.id})
+  logger.info('Channel has been updated', { id: channel.id })
 }
 
-export async function content_ChannelAssetsRemoved(
-  db: DatabaseManager,
-  event: SubstrateEvent
-) {
+export async function content_ChannelAssetsRemoved(db: DatabaseManager, event: SubstrateEvent) {
   // read event data
-  const {contentId: contentIds} = new Content.ChannelAssetsRemovedEvent(event).data
+  const { contentId: contentIds } = new Content.ChannelAssetsRemovedEvent(event).data
 
   // load channel
-  const assets = await db.getMany(DataObject, { where: {
-    id: In(contentIds.toArray().map(item => item.toString()))
-  } as FindConditions<DataObject>})
+  const assets = await db.getMany(DataObject, {
+    where: {
+      id: In(contentIds.toArray().map((item) => item.toString())),
+    } as FindConditions<DataObject>,
+  })
 
   // delete assets
   for (const asset of assets) {
@@ -156,16 +131,13 @@ export async function content_ChannelAssetsRemoved(
   }
 
   // emit log event
-  logger.info('Channel assets have been removed', {ids: contentIds})
+  logger.info('Channel assets have been removed', { ids: contentIds })
 }
 
 // eslint-disable-next-line @typescript-eslint/naming-convention
-export async function content_ChannelCensorshipStatusUpdated(
-  db: DatabaseManager,
-  event: SubstrateEvent
-) {
+export async function content_ChannelCensorshipStatusUpdated(db: DatabaseManager, event: SubstrateEvent) {
   // read event data
-  const {channelId, isCensored} = new Content.ChannelCensorshipStatusUpdatedEvent(event).data
+  const { channelId, isCensored } = new Content.ChannelCensorshipStatusUpdatedEvent(event).data
 
   // load event
   const channel = await db.get(Channel, { where: { id: channelId.toString() } as FindConditions<Channel> })
@@ -176,7 +148,7 @@ export async function content_ChannelCensorshipStatusUpdated(
   }
 
   // update channel
-  channel.isCensored = isCensored.isTrue;
+  channel.isCensored = isCensored.isTrue
 
   // set last update time
   channel.updatedAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
@@ -185,28 +157,22 @@ export async function content_ChannelCensorshipStatusUpdated(
   await db.save<Channel>(channel)
 
   // emit log event
-  logger.info('Channel censorship status has been updated', {id: channelId, isCensored: isCensored.isTrue})
+  logger.info('Channel censorship status has been updated', { id: channelId, isCensored: isCensored.isTrue })
 }
 
-/////////////////// ChannelCategory ////////////////////////////////////////////
+/// ///////////////// ChannelCategory ////////////////////////////////////////////
 
 // eslint-disable-next-line @typescript-eslint/naming-convention
-export async function content_ChannelCategoryCreated(
-  db: DatabaseManager,
-  event: SubstrateEvent
-) {
+export async function content_ChannelCategoryCreated(db: DatabaseManager, event: SubstrateEvent) {
   // read event data
-  const {channelCategoryCreationParameters, channelCategoryId} = new Content.ChannelCategoryCreatedEvent(event).data
+  const { channelCategoryCreationParameters, channelCategoryId } = new Content.ChannelCategoryCreatedEvent(event).data
 
   // read metadata
-  const protobufContent = await readProtobuf(
-    new ChannelCategory(),
-    {
-      metadata: channelCategoryCreationParameters.meta,
-      db,
-      blockNumber: event.blockNumber,
-    }
-  )
+  const protobufContent = await readProtobuf(new ChannelCategory(), {
+    metadata: channelCategoryCreationParameters.meta,
+    db,
+    event,
+  })
 
   // create new channel category
   const channelCategory = new ChannelCategory({
@@ -220,31 +186,27 @@ export async function content_ChannelCategoryCreated(
     updatedAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
 
     // integrate metadata
-    ...protobufContent
+    ...protobufContent,
   })
 
   // save channel
   await db.save<ChannelCategory>(channelCategory)
 
   // emit log event
-  logger.info('Channel category has been created', {id: channelCategory.id})
+  logger.info('Channel category has been created', { id: channelCategory.id })
 }
 
 // eslint-disable-next-line @typescript-eslint/naming-convention
-export async function content_ChannelCategoryUpdated(
-  db: DatabaseManager,
-  event: SubstrateEvent
-) {
+export async function content_ChannelCategoryUpdated(db: DatabaseManager, event: SubstrateEvent) {
   // read event data
-  const {
-    channelCategoryId,
-    channelCategoryUpdateParameters,
-  } = new Content.ChannelCategoryUpdatedEvent(event).data
+  const { channelCategoryId, channelCategoryUpdateParameters } = new Content.ChannelCategoryUpdatedEvent(event).data
 
   // load channel category
-  const channelCategory = await db.get(ChannelCategory, { where: {
-    id: channelCategoryId.toString()
-  } as FindConditions<ChannelCategory> })
+  const channelCategory = await db.get(ChannelCategory, {
+    where: {
+      id: channelCategoryId.toString(),
+    } as FindConditions<ChannelCategory>,
+  })
 
   // ensure channel exists
   if (!channelCategory) {
@@ -252,17 +214,14 @@ export async function content_ChannelCategoryUpdated(
   }
 
   // read metadata
-  const protobufContent = await readProtobuf(
-    new ChannelCategory(),
-    {
-      metadata: channelCategoryUpdateParameters.new_meta,
-      db,
-      blockNumber: event.blockNumber,
-    }
-  )
+  const protobufContent = await readProtobuf(new ChannelCategory(), {
+    metadata: channelCategoryUpdateParameters.new_meta,
+    db,
+    event,
+  })
 
   // update all fields read from protobuf
-  for (let [key, value] of Object.entries(protobufContent)) {
+  for (const [key, value] of Object.entries(protobufContent)) {
     channelCategory[key] = value
   }
 
@@ -273,21 +232,20 @@ export async function content_ChannelCategoryUpdated(
   await db.save<ChannelCategory>(channelCategory)
 
   // emit log event
-  logger.info('Channel category has been updated', {id: channelCategory.id})
+  logger.info('Channel category has been updated', { id: channelCategory.id })
 }
 
 // eslint-disable-next-line @typescript-eslint/naming-convention
-export async function content_ChannelCategoryDeleted(
-  db: DatabaseManager,
-  event: SubstrateEvent
-) {
+export async function content_ChannelCategoryDeleted(db: DatabaseManager, event: SubstrateEvent) {
   // read event data
-  const {channelCategoryId} = new Content.ChannelCategoryDeletedEvent(event).data
+  const { channelCategoryId } = new Content.ChannelCategoryDeletedEvent(event).data
 
   // load channel category
-  const channelCategory = await db.get(ChannelCategory, { where: {
-    id: channelCategoryId.toString()
-  } as FindConditions<ChannelCategory> })
+  const channelCategory = await db.get(ChannelCategory, {
+    where: {
+      id: channelCategoryId.toString(),
+    } as FindConditions<ChannelCategory>,
+  })
 
   // ensure channel category exists
   if (!channelCategory) {
@@ -298,10 +256,10 @@ export async function content_ChannelCategoryDeleted(
   await db.remove<ChannelCategory>(channelCategory)
 
   // emit log event
-  logger.info('Channel category has been deleted', {id: channelCategory.id})
+  logger.info('Channel category has been deleted', { id: channelCategory.id })
 }
 
-/////////////////// Helpers ////////////////////////////////////////////////////
+/// ///////////////// Helpers ////////////////////////////////////////////////////
 
 function handleChannelRewardAccountChange(
   channel: Channel, // will be modified inside of the function!

+ 23 - 32
query-node/mappings/src/content/curatorGroup.ts

@@ -6,17 +6,11 @@ import { FindConditions } from 'typeorm'
 import { CuratorGroup } from 'query-node'
 import { Content } from '../../../generated/types'
 
-import {
-  inconsistentState,
-  logger,
-} from '../common'
-
-export async function content_CuratorGroupCreated(
-  db: DatabaseManager,
-  event: SubstrateEvent
-) {
+import { inconsistentState, logger } from '../common'
+
+export async function content_CuratorGroupCreated(db: DatabaseManager, event: SubstrateEvent) {
   // read event data
-  const {curatorGroupId} = new Content.CuratorGroupCreatedEvent(event).data
+  const { curatorGroupId } = new Content.CuratorGroupCreatedEvent(event).data
 
   // create new curator group
   const curatorGroup = new CuratorGroup({
@@ -34,18 +28,17 @@ export async function content_CuratorGroupCreated(
   await db.save<CuratorGroup>(curatorGroup)
 
   // emit log event
-  logger.info('Curator group has been created', {id: curatorGroupId})
+  logger.info('Curator group has been created', { id: curatorGroupId })
 }
 
-export async function content_CuratorGroupStatusSet(
-  db: DatabaseManager,
-  event: SubstrateEvent
-) {
+export async function content_CuratorGroupStatusSet(db: DatabaseManager, event: SubstrateEvent) {
   // read event data
-  const {curatorGroupId, bool: isActive} = new Content.CuratorGroupStatusSetEvent(event).data
+  const { curatorGroupId, bool: isActive } = new Content.CuratorGroupStatusSetEvent(event).data
 
   // load curator group
-  const curatorGroup = await db.get(CuratorGroup, { where: { id: curatorGroupId.toString() } as FindConditions<CuratorGroup>})
+  const curatorGroup = await db.get(CuratorGroup, {
+    where: { id: curatorGroupId.toString() } as FindConditions<CuratorGroup>,
+  })
 
   // ensure curator group exists
   if (!curatorGroup) {
@@ -62,18 +55,17 @@ export async function content_CuratorGroupStatusSet(
   await db.save<CuratorGroup>(curatorGroup)
 
   // emit log event
-  logger.info('Curator group status has been set', {id: curatorGroupId, isActive})
+  logger.info('Curator group status has been set', { id: curatorGroupId, isActive })
 }
 
-export async function content_CuratorAdded(
-  db: DatabaseManager,
-  event: SubstrateEvent
-) {
+export async function content_CuratorAdded(db: DatabaseManager, event: SubstrateEvent) {
   // read event data
-  const {curatorGroupId, curatorId} = new Content.CuratorAddedEvent(event).data
+  const { curatorGroupId, curatorId } = new Content.CuratorAddedEvent(event).data
 
   // load curator group
-  const curatorGroup = await db.get(CuratorGroup, { where: { id: curatorGroupId.toString() } as FindConditions<CuratorGroup>})
+  const curatorGroup = await db.get(CuratorGroup, {
+    where: { id: curatorGroupId.toString() } as FindConditions<CuratorGroup>,
+  })
 
   // ensure curator group exists
   if (!curatorGroup) {
@@ -90,18 +82,17 @@ export async function content_CuratorAdded(
   await db.save<CuratorGroup>(curatorGroup)
 
   // emit log event
-  logger.info('Curator has been added to curator group', {id: curatorGroupId, curatorId})
+  logger.info('Curator has been added to curator group', { id: curatorGroupId, curatorId })
 }
 
-export async function content_CuratorRemoved(
-  db: DatabaseManager,
-  event: SubstrateEvent
-) {
+export async function content_CuratorRemoved(db: DatabaseManager, event: SubstrateEvent) {
   // read event data
-  const {curatorGroupId, curatorId} = new Content.CuratorAddedEvent(event).data
+  const { curatorGroupId, curatorId } = new Content.CuratorAddedEvent(event).data
 
   // load curator group
-  const curatorGroup = await db.get(CuratorGroup, { where: { id: curatorGroupId.toString() } as FindConditions<CuratorGroup>})
+  const curatorGroup = await db.get(CuratorGroup, {
+    where: { id: curatorGroupId.toString() } as FindConditions<CuratorGroup>,
+  })
 
   // ensure curator group exists
   if (!curatorGroup) {
@@ -122,5 +113,5 @@ export async function content_CuratorRemoved(
   await db.save<CuratorGroup>(curatorGroup)
 
   // emit log event
-  logger.info('Curator has been removed from curator group', {id: curatorGroupId, curatorId})
+  logger.info('Curator has been removed from curator group', { id: curatorGroupId, curatorId })
 }

+ 127 - 91
query-node/mappings/src/content/utils.ts

@@ -12,7 +12,8 @@ import { Bytes } from '@polkadot/types'
 import ISO6391 from 'iso-639-1'
 import { u64 } from '@polkadot/types/primitive'
 import { FindConditions } from 'typeorm'
-import * as jspb from "google-protobuf"
+import * as jspb from 'google-protobuf'
+import { fixBlockTimestamp } from '../eventFix'
 
 // protobuf definitions
 import {
@@ -25,17 +26,9 @@ import {
   VideoCategoryMetadata,
 } from '@joystream/content-metadata-protobuf'
 
-import {
-  Content,
-} from '../../../generated/types'
-
-import {
-  invalidMetadata,
-  inconsistentState,
-  logger,
-  prepareDataObject,
-} from '../common'
+import { Content } from '../../../generated/types'
 
+import { invalidMetadata, inconsistentState, logger, prepareDataObject, getNextId } from '../common'
 
 import {
   // primary entities
@@ -58,17 +51,11 @@ import {
   DataObject,
   LiaisonJudgement,
   AssetAvailability,
-
   Membership,
 } from 'query-node'
 
 // Joystream types
-import {
-  ChannelId,
-  ContentParameters,
-  NewAsset,
-  ContentActor,
-} from '@joystream/types/augment'
+import { ChannelId, ContentParameters, NewAsset, ContentActor } from '@joystream/types/augment'
 
 import { ContentParameters as Custom_ContentParameters } from '@joystream/types/storage'
 import { registry } from '@joystream/types'
@@ -92,7 +79,7 @@ function isAssetInStorage(dataObject: AssetStorageOrUrls): dataObject is DataObj
 export interface IReadProtobufArguments {
   metadata: Bytes
   db: DatabaseManager
-  blockNumber: number
+  event: SubstrateEvent
 }
 
 export interface IReadProtobufArgumentsWithAssets extends IReadProtobufArguments {
@@ -107,7 +94,6 @@ export interface IReadProtobufArgumentsWithAssets extends IReadProtobufArguments
   Change - set the new value
 */
 export class PropertyChange<T> {
-
   static newUnset<T>(): PropertyChange<T> {
     return new PropertyChange<T>('unset')
   }
@@ -123,11 +109,10 @@ export class PropertyChange<T> {
   /*
     Determines property change from the given object property.
   */
-  static fromObjectProperty<
-    T,
-    Key extends string,
-    ChangedObject extends {[key in Key]?: T}
-  >(object: ChangedObject, key: Key): PropertyChange<T> {
+  static fromObjectProperty<T, Key extends string, ChangedObject extends { [key in Key]?: T }>(
+    object: ChangedObject,
+    key: Key
+  ): PropertyChange<T> {
     if (!(key in object)) {
       return PropertyChange.newNoChange<T>()
     }
@@ -148,21 +133,19 @@ export class PropertyChange<T> {
   }
 
   public isUnset(): boolean {
-    return this.type == 'unset'
+    return this.type === 'unset'
   }
 
   public isNoChange(): boolean {
-    return this.type == 'nochange'
+    return this.type === 'nochange'
   }
 
   public isValue(): boolean {
-    return this.type == 'change'
+    return this.type === 'change'
   }
 
   public getValue(): T | undefined {
-    return this.type == 'change'
-      ? this.value
-      : undefined
+    return this.type === 'change' ? this.value : undefined
   }
 
   /*
@@ -198,7 +181,7 @@ export interface RawVideoMetadata {
 */
 export async function readProtobuf<T extends ChannelCategory | VideoCategory>(
   type: T,
-  parameters: IReadProtobufArguments,
+  parameters: IReadProtobufArguments
 ): Promise<Partial<T>> {
   // true option here is crucial, it indicates that we want just the underlying bytes (by default it will also include bytes encoding the length)
   const metaU8a = parameters.metadata.toU8a(true)
@@ -220,8 +203,8 @@ export async function readProtobuf<T extends ChannelCategory | VideoCategory>(
   }
 
   // this should never happen
-  logger.error('Not implemented metadata type', {type})
-  throw `Not implemented metadata type`
+  logger.error('Not implemented metadata type', { type })
+  throw new Error(`Not implemented metadata type`)
 }
 
 /*
@@ -231,7 +214,7 @@ export async function readProtobuf<T extends ChannelCategory | VideoCategory>(
 
 export async function readProtobufWithAssets<T extends Channel | Video>(
   type: T,
-  parameters: IReadProtobufArgumentsWithAssets,
+  parameters: IReadProtobufArgumentsWithAssets
 ): Promise<Partial<T>> {
   // true option here is crucial, it indicates that we want just the underlying bytes (by default it will also include bytes encoding the length)
   const metaU8a = parameters.metadata.toU8a(true)
@@ -240,16 +223,15 @@ export async function readProtobufWithAssets<T extends Channel | Video>(
   if (type instanceof Channel) {
     const meta = ChannelMetadata.deserializeBinary(metaU8a)
     const metaAsObject = convertMetadataToObject<ChannelMetadata.AsObject>(meta)
-    const result = metaAsObject as any as Partial<Channel>
+    const result = (metaAsObject as any) as Partial<Channel>
 
     // prepare cover photo asset if needed
     if ('coverPhoto' in metaAsObject) {
       const asset = await extractAsset({
-        //assetIndex: metaAsObject.coverPhoto,
         assetIndex: metaAsObject.coverPhoto,
         assets: parameters.assets,
         db: parameters.db,
-        blockNumber: parameters.blockNumber,
+        event: parameters.event,
         contentOwner: parameters.contentOwner,
       })
       integrateAsset('coverPhoto', result, asset) // changes `result` inline!
@@ -262,7 +244,7 @@ export async function readProtobufWithAssets<T extends Channel | Video>(
         assetIndex: metaAsObject.avatarPhoto,
         assets: parameters.assets,
         db: parameters.db,
-        blockNumber: parameters.blockNumber,
+        event: parameters.event,
         contentOwner: parameters.contentOwner,
       })
       integrateAsset('avatarPhoto', result, asset) // changes `result` inline!
@@ -271,7 +253,7 @@ export async function readProtobufWithAssets<T extends Channel | Video>(
 
     // prepare language if needed
     if ('language' in metaAsObject) {
-      const language = await prepareLanguage(metaAsObject.language, parameters.db, parameters.blockNumber)
+      const language = await prepareLanguage(metaAsObject.language, parameters.db, parameters.event)
       delete metaAsObject.language // make sure temporary value will not interfere
       language.integrateInto(result, 'language')
     }
@@ -283,7 +265,7 @@ export async function readProtobufWithAssets<T extends Channel | Video>(
   if (type instanceof Video) {
     const meta = VideoMetadata.deserializeBinary(metaU8a)
     const metaAsObject = convertMetadataToObject<VideoMetadata.AsObject>(meta)
-    const result = metaAsObject as any as Partial<Video>
+    const result = (metaAsObject as any) as Partial<Video>
 
     // prepare video category if needed
     if ('category' in metaAsObject) {
@@ -299,7 +281,11 @@ export async function readProtobufWithAssets<T extends Channel | Video>(
 
       // NOTE: type hack - `RawVideoMetadata` is inserted instead of VideoMediaMetadata - it should be edited in `video.ts`
       //       see `integrateVideoMetadata()` in `video.ts` for more info
-      result.mediaMetadata = prepareVideoMetadata(metaAsObject, videoSize, parameters.blockNumber) as unknown as VideoMediaMetadata
+      result.mediaMetadata = (prepareVideoMetadata(
+        metaAsObject,
+        videoSize,
+        parameters.event.blockNumber
+      ) as unknown) as VideoMediaMetadata
 
       // remove extra values
       delete metaAsObject.mediaType
@@ -309,7 +295,7 @@ export async function readProtobufWithAssets<T extends Channel | Video>(
 
     // prepare license if needed
     if ('license' in metaAsObject) {
-      result.license = await prepareLicense(metaAsObject.license)
+      result.license = await prepareLicense(parameters.db, metaAsObject.license, parameters.event)
     }
 
     // prepare thumbnail photo asset if needed
@@ -318,7 +304,7 @@ export async function readProtobufWithAssets<T extends Channel | Video>(
         assetIndex: metaAsObject.thumbnailPhoto,
         assets: parameters.assets,
         db: parameters.db,
-        blockNumber: parameters.blockNumber,
+        event: parameters.event,
         contentOwner: parameters.contentOwner,
       })
       integrateAsset('thumbnailPhoto', result, asset) // changes `result` inline!
@@ -331,7 +317,7 @@ export async function readProtobufWithAssets<T extends Channel | Video>(
         assetIndex: metaAsObject.video,
         assets: parameters.assets,
         db: parameters.db,
-        blockNumber: parameters.blockNumber,
+        event: parameters.event,
         contentOwner: parameters.contentOwner,
       })
       integrateAsset('media', result, asset) // changes `result` inline!
@@ -340,7 +326,7 @@ export async function readProtobufWithAssets<T extends Channel | Video>(
 
     // prepare language if needed
     if ('language' in metaAsObject) {
-      const language = await prepareLanguage(metaAsObject.language, parameters.db, parameters.blockNumber)
+      const language = await prepareLanguage(metaAsObject.language, parameters.db, parameters.event)
       delete metaAsObject.language // make sure temporary value will not interfere
       language.integrateInto(result, 'language')
     }
@@ -355,13 +341,16 @@ export async function readProtobufWithAssets<T extends Channel | Video>(
   }
 
   // this should never happen
-  logger.error('Not implemented metadata type', {type})
-  throw `Not implemented metadata type`
+  logger.error('Not implemented metadata type', { type })
+  throw new Error(`Not implemented metadata type`)
 }
 
-export async function convertContentActorToChannelOwner(db: DatabaseManager, contentActor: ContentActor): Promise<{
-  ownerMember?: Membership,
-  ownerCuratorGroup?: CuratorGroup,
+export async function convertContentActorToChannelOwner(
+  db: DatabaseManager,
+  contentActor: ContentActor
+): Promise<{
+  ownerMember?: Membership
+  ownerCuratorGroup?: CuratorGroup
 }> {
   if (contentActor.isMember) {
     const memberId = contentActor.asMember.toNumber()
@@ -380,7 +369,9 @@ export async function convertContentActorToChannelOwner(db: DatabaseManager, con
 
   if (contentActor.isCurator) {
     const curatorGroupId = contentActor.asCurator[0].toNumber()
-    const curatorGroup = await db.get(CuratorGroup, { where: { id: curatorGroupId.toString() } as FindConditions<CuratorGroup> })
+    const curatorGroup = await db.get(CuratorGroup, {
+      where: { id: curatorGroupId.toString() } as FindConditions<CuratorGroup>,
+    })
 
     // ensure curator group exists
     if (!curatorGroup) {
@@ -395,11 +386,14 @@ export async function convertContentActorToChannelOwner(db: DatabaseManager, con
 
   // TODO: contentActor.isLead
 
-  logger.error('Not implemented ContentActor type', {contentActor: contentActor.toString()})
-  throw 'Not-implemented ContentActor type used'
+  logger.error('Not implemented ContentActor type', { contentActor: contentActor.toString() })
+  throw new Error('Not-implemented ContentActor type used')
 }
 
-export function convertContentActorToDataObjectOwner(contentActor: ContentActor, channelId: number): typeof DataObjectOwner {
+export function convertContentActorToDataObjectOwner(
+  contentActor: ContentActor,
+  channelId: number
+): typeof DataObjectOwner {
   const owner = new DataObjectOwnerChannel()
   owner.channel = channelId
 
@@ -421,25 +415,26 @@ export function convertContentActorToDataObjectOwner(contentActor: ContentActor,
   }
 
   logger.error('Not implemented ContentActor type', {contentActor: contentActor.toString()})
-  throw 'Not-implemented ContentActor type used'
+  throw new Error('Not-implemented ContentActor type used')
   */
 }
 
-function handlePublishedBeforeJoystream(video: Partial<Video>, metadata: PublishedBeforeJoystreamMetadata.AsObject): PropertyChange<Date> {
+function handlePublishedBeforeJoystream(
+  video: Partial<Video>,
+  metadata: PublishedBeforeJoystreamMetadata.AsObject
+): PropertyChange<Date> {
   // is publish being unset
   if ('isPublished' in metadata && !metadata.isPublished) {
     return PropertyChange.newUnset()
   }
 
   // try to parse timestamp from publish date
-  const timestamp = metadata.date
-    ? Date.parse(metadata.date)
-    : NaN
+  const timestamp = metadata.date ? Date.parse(metadata.date) : NaN
 
   // ensure date is valid
   if (isNaN(timestamp)) {
     invalidMetadata(`Invalid date used for publishedBeforeJoystream`, {
-      timestamp
+      timestamp,
     })
     return PropertyChange.newNoChange()
   }
@@ -451,7 +446,7 @@ function handlePublishedBeforeJoystream(video: Partial<Video>, metadata: Publish
 interface IConvertAssetParameters {
   rawAsset: NewAsset
   db: DatabaseManager
-  blockNumber: number
+  event: SubstrateEvent
   contentOwner: typeof DataObjectOwner
 }
 
@@ -461,7 +456,7 @@ interface IConvertAssetParameters {
 async function convertAsset(parameters: IConvertAssetParameters): Promise<AssetStorageOrUrls> {
   // is asset describing list of URLs?
   if (parameters.rawAsset.isUrls) {
-    const urls = parameters.rawAsset.asUrls.toArray().map(item => item.toString())
+    const urls = parameters.rawAsset.asUrls.toArray().map((item) => item.toString())
 
     return urls
   }
@@ -470,7 +465,12 @@ async function convertAsset(parameters: IConvertAssetParameters): Promise<AssetS
 
   // prepare data object
   const contentParameters: ContentParameters = parameters.rawAsset.asUpload
-  const dataObject = await prepareDataObject(contentParameters, parameters.blockNumber, parameters.contentOwner)
+  const dataObject = await prepareDataObject(
+    parameters.db,
+    contentParameters,
+    parameters.event,
+    parameters.contentOwner
+  )
 
   return dataObject
 }
@@ -479,7 +479,7 @@ interface IExtractAssetParameters {
   assetIndex: number | undefined
   assets: NewAsset[]
   db: DatabaseManager
-  blockNumber: number
+  event: SubstrateEvent
   contentOwner: typeof DataObjectOwner
 }
 
@@ -505,7 +505,7 @@ async function extractAsset(parameters: IExtractAssetParameters): Promise<Proper
   const asset = await convertAsset({
     rawAsset: parameters.assets[parameters.assetIndex],
     db: parameters.db,
-    blockNumber: parameters.blockNumber,
+    event: parameters.event,
     contentOwner: parameters.contentOwner,
   })
 
@@ -570,7 +570,7 @@ function extractVideoSize(assets: NewAsset[], assetIndex: number | undefined): n
 
   // ensure asset index is valid
   if (assetIndex > assets.length) {
-    invalidMetadata(`Non-existing asset video size extraction requested`, {assetsProvided: assets.length, assetIndex})
+    invalidMetadata(`Non-existing asset video size extraction requested`, { assetsProvided: assets.length, assetIndex })
     return undefined
   }
 
@@ -591,7 +591,11 @@ function extractVideoSize(assets: NewAsset[], assetIndex: number | undefined): n
   return videoSize
 }
 
-async function prepareLanguage(languageIso: string | undefined, db: DatabaseManager, blockNumber: number): Promise<PropertyChange<Language>> {
+async function prepareLanguage(
+  languageIso: string | undefined,
+  db: DatabaseManager,
+  event: SubstrateEvent
+): Promise<PropertyChange<Language>> {
   // is language being unset?
   if (languageIso === undefined) {
     return PropertyChange.newUnset()
@@ -614,11 +618,14 @@ async function prepareLanguage(languageIso: string | undefined, db: DatabaseMana
     return PropertyChange.newChange(language)
   }
 
-
   // create new language
   const newLanguage = new Language({
+    id: await getNextId(db),
     iso: languageIso,
-    createdInBlock: blockNumber,
+    createdInBlock: event.blockNumber,
+
+    createdAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
+    updatedAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
 
     // TODO: remove these lines after Hydra auto-fills the values when cascading save (remove them on all places)
     createdById: '1',
@@ -630,7 +637,11 @@ async function prepareLanguage(languageIso: string | undefined, db: DatabaseMana
   return PropertyChange.newChange(newLanguage)
 }
 
-async function prepareLicense(licenseProtobuf: LicenseMetadata.AsObject | undefined): Promise<License | undefined> {
+async function prepareLicense(
+  db: DatabaseManager,
+  licenseProtobuf: LicenseMetadata.AsObject | undefined,
+  event: SubstrateEvent
+): Promise<License | undefined> {
   // NOTE: Deletion of any previous license should take place in appropriate event handling function
   //       and not here even it might appear so.
 
@@ -647,6 +658,10 @@ async function prepareLicense(licenseProtobuf: LicenseMetadata.AsObject | undefi
   // crete new license
   const license = new License({
     ...licenseProtobuf,
+    id: await getNextId(db),
+
+    createdAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
+    updatedAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
 
     createdById: '1',
     updatedById: '1',
@@ -660,39 +675,60 @@ async function prepareLicense(licenseProtobuf: LicenseMetadata.AsObject | undefi
   Empty object means deletion is requested.
 */
 function isLicenseEmpty(licenseObject: LicenseMetadata.AsObject): boolean {
-    let somePropertySet = Object.entries(licenseObject).reduce((acc, [key, value]) => {
-        return acc || value !== undefined
-    }, false)
+  const somePropertySet = Object.entries(licenseObject).reduce((acc, [key, value]) => {
+    return acc || value !== undefined
+  }, false)
 
-    return !somePropertySet
+  return !somePropertySet
 }
 
-
-function prepareVideoMetadata(videoProtobuf: VideoMetadata.AsObject, videoSize: number | undefined, blockNumber: number): RawVideoMetadata {
+function prepareVideoMetadata(
+  videoProtobuf: VideoMetadata.AsObject,
+  videoSize: number | undefined,
+  blockNumber: number
+): RawVideoMetadata {
   const rawMeta = {
     encoding: {
-      codecName: PropertyChange.fromObjectProperty<string, 'codecName', MediaTypeMetadata.AsObject>(videoProtobuf.mediaType || {}, 'codecName'),
-      container: PropertyChange.fromObjectProperty<string, 'container', MediaTypeMetadata.AsObject>(videoProtobuf.mediaType || {}, 'container'),
-      mimeMediaType: PropertyChange.fromObjectProperty<string, 'mimeMediaType', MediaTypeMetadata.AsObject>(videoProtobuf.mediaType || {}, 'mimeMediaType'),
+      codecName: PropertyChange.fromObjectProperty<string, 'codecName', MediaTypeMetadata.AsObject>(
+        videoProtobuf.mediaType || {},
+        'codecName'
+      ),
+      container: PropertyChange.fromObjectProperty<string, 'container', MediaTypeMetadata.AsObject>(
+        videoProtobuf.mediaType || {},
+        'container'
+      ),
+      mimeMediaType: PropertyChange.fromObjectProperty<string, 'mimeMediaType', MediaTypeMetadata.AsObject>(
+        videoProtobuf.mediaType || {},
+        'mimeMediaType'
+      ),
     },
-    pixelWidth: PropertyChange.fromObjectProperty<number, 'mediaPixelWidth', VideoMetadata.AsObject>(videoProtobuf, 'mediaPixelWidth'),
-    pixelHeight: PropertyChange.fromObjectProperty<number, 'mediaPixelHeight', VideoMetadata.AsObject>(videoProtobuf, 'mediaPixelHeight'),
-    size: videoSize === undefined
-      ? PropertyChange.newNoChange()
-      : PropertyChange.newChange(videoSize)
+    pixelWidth: PropertyChange.fromObjectProperty<number, 'mediaPixelWidth', VideoMetadata.AsObject>(
+      videoProtobuf,
+      'mediaPixelWidth'
+    ),
+    pixelHeight: PropertyChange.fromObjectProperty<number, 'mediaPixelHeight', VideoMetadata.AsObject>(
+      videoProtobuf,
+      'mediaPixelHeight'
+    ),
+    size: videoSize === undefined ? PropertyChange.newNoChange() : PropertyChange.newChange(videoSize),
   } as RawVideoMetadata
 
   return rawMeta
 }
 
-async function prepareVideoCategory(categoryId: number | undefined, db: DatabaseManager): Promise<PropertyChange<VideoCategory>> {
+async function prepareVideoCategory(
+  categoryId: number | undefined,
+  db: DatabaseManager
+): Promise<PropertyChange<VideoCategory>> {
   // is category being unset?
   if (categoryId === undefined) {
     return PropertyChange.newUnset()
   }
 
   // load video category
-  const category = await db.get(VideoCategory, { where: { id: categoryId.toString() } as FindConditions<VideoCategory> })
+  const category = await db.get(VideoCategory, {
+    where: { id: categoryId.toString() } as FindConditions<VideoCategory>,
+  })
 
   // ensure video category exists
   if (!category) {
@@ -710,15 +746,15 @@ function convertMetadataToObject<T extends Object>(metadata: jspb.Message): T {
   for (const key in metaAsObject) {
     const funcNameBase = key.charAt(0).toUpperCase() + key.slice(1)
     const hasFuncName = 'has' + funcNameBase
-    const isSet = funcNameBase == 'PersonsList' // there is no `VideoMetadata.hasPersonsList` method from unkown reason -> create exception
-      ? true
-      : metadata[hasFuncName]()
+    const isSet =
+      funcNameBase === 'PersonsList' // there is no `VideoMetadata.hasPersonsList` method from unkown reason -> create exception
+        ? true
+        : metadata[hasFuncName]()
 
     if (!isSet) {
       continue
     }
 
-
     const getFuncName = 'get' + funcNameBase
     const value = metadata[getFuncName]()
 

+ 183 - 184
query-node/mappings/src/content/video.ts

@@ -4,61 +4,38 @@ import { SubstrateEvent } from '@dzlzv/hydra-common'
 import { DatabaseManager } from '@dzlzv/hydra-db-utils'
 import { FindConditions, In } from 'typeorm'
 
-import {
-  Content,
-} from '../../../generated/types'
+import { Content } from '../../../generated/types'
 
-import {
-  inconsistentState,
-  logger,
-} from '../common'
+import { inconsistentState, logger, getNextId } from '../common'
 
-import {
-  convertContentActorToDataObjectOwner,
-  readProtobuf,
-  readProtobufWithAssets,
-  RawVideoMetadata,
-} from './utils'
+import { convertContentActorToDataObjectOwner, readProtobuf, readProtobufWithAssets, RawVideoMetadata } from './utils'
 
-// primary entities
 import {
   AssetAvailability,
   Channel,
+  License,
   Video,
   VideoCategory,
   VideoMediaEncoding,
   VideoMediaMetadata,
 } from 'query-node'
 
-// secondary entities
-import { License } from 'query-node'
-
 // Joystream types
-import {
-  ChannelId,
-} from '@joystream/types/augment'
+import { ChannelId } from '@joystream/types/augment'
 
 // eslint-disable-next-line @typescript-eslint/naming-convention
-export async function content_VideoCategoryCreated(
-  db: DatabaseManager,
-  event: SubstrateEvent
-) {
+export async function content_VideoCategoryCreated(db: DatabaseManager, event: SubstrateEvent) {
   // read event data
-  const {
-    videoCategoryId,
-    videoCategoryCreationParameters,
-    contentActor,
-  } = new Content.VideoCategoryCreatedEvent(event).data
+  const { videoCategoryId, videoCategoryCreationParameters, contentActor } = new Content.VideoCategoryCreatedEvent(
+    event
+  ).data
 
   // read metadata
-  const protobufContent = await readProtobuf(
-    new VideoCategory(),
-    {
-      metadata: videoCategoryCreationParameters.meta,
-      db,
-      blockNumber: event.blockNumber,
-    }
-  )
+  const protobufContent = await readProtobuf(new VideoCategory(), {
+    metadata: videoCategoryCreationParameters.meta,
+    db,
+    event,
+  })
 
   // create new video category
   const videoCategory = new VideoCategory({
@@ -72,30 +49,27 @@ export async function content_VideoCategoryCreated(
     updatedAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
 
     // integrate metadata
-    ...protobufContent
+    ...protobufContent,
   })
 
   // save video category
   await db.save<VideoCategory>(videoCategory)
 
   // emit log event
-  logger.info('Video category has been created', {id: videoCategoryId})
+  logger.info('Video category has been created', { id: videoCategoryId })
 }
 
 // eslint-disable-next-line @typescript-eslint/naming-convention
-export async function content_VideoCategoryUpdated(
-  db: DatabaseManager,
-  event: SubstrateEvent
-) {
+export async function content_VideoCategoryUpdated(db: DatabaseManager, event: SubstrateEvent) {
   // read event data
-  const {
-    videoCategoryId,
-    videoCategoryUpdateParameters,
-    contentActor,
-  } = new Content.VideoCategoryUpdatedEvent(event).data
+  const { videoCategoryId, videoCategoryUpdateParameters, contentActor } = new Content.VideoCategoryUpdatedEvent(
+    event
+  ).data
 
   // load video category
-  const videoCategory = await db.get(VideoCategory, { where: { id: videoCategoryId.toString() } as FindConditions<VideoCategory> })
+  const videoCategory = await db.get(VideoCategory, {
+    where: { id: videoCategoryId.toString() } as FindConditions<VideoCategory>,
+  })
 
   // ensure video category exists
   if (!videoCategory) {
@@ -103,17 +77,14 @@ export async function content_VideoCategoryUpdated(
   }
 
   // read metadata
-  const protobufContent = await readProtobuf(
-    new VideoCategory(),
-    {
-      metadata: videoCategoryUpdateParameters.new_meta,
-      db,
-      blockNumber: event.blockNumber,
-    }
-  )
+  const protobufContent = await readProtobuf(new VideoCategory(), {
+    metadata: videoCategoryUpdateParameters.new_meta,
+    db,
+    event,
+  })
 
   // update all fields read from protobuf
-  for (let [key, value] of Object.entries(protobufContent)) {
+  for (const [key, value] of Object.entries(protobufContent)) {
     videoCategory[key] = value
   }
 
@@ -124,19 +95,18 @@ export async function content_VideoCategoryUpdated(
   await db.save<VideoCategory>(videoCategory)
 
   // emit log event
-  logger.info('Video category has been updated', {id: videoCategoryId})
+  logger.info('Video category has been updated', { id: videoCategoryId })
 }
 
 // eslint-disable-next-line @typescript-eslint/naming-convention
-export async function content_VideoCategoryDeleted(
-  db: DatabaseManager,
-  event: SubstrateEvent
-) {
+export async function content_VideoCategoryDeleted(db: DatabaseManager, event: SubstrateEvent) {
   // read event data
-  const {videoCategoryId} = new Content.VideoCategoryDeletedEvent(event).data
+  const { videoCategoryId } = new Content.VideoCategoryDeletedEvent(event).data
 
   // load video category
-  const videoCategory = await db.get(VideoCategory, { where: { id: videoCategoryId.toString() } as FindConditions<VideoCategory> })
+  const videoCategory = await db.get(VideoCategory, {
+    where: { id: videoCategoryId.toString() } as FindConditions<VideoCategory>,
+  })
 
   // ensure video category exists
   if (!videoCategory) {
@@ -147,35 +117,24 @@ export async function content_VideoCategoryDeleted(
   await db.remove<VideoCategory>(videoCategory)
 
   // emit log event
-  logger.info('Video category has been deleted', {id: videoCategoryId})
+  logger.info('Video category has been deleted', { id: videoCategoryId })
 }
 
-/////////////////// Video //////////////////////////////////////////////////////
+/// ///////////////// Video //////////////////////////////////////////////////////
 
 // eslint-disable-next-line @typescript-eslint/naming-convention
-export async function content_VideoCreated(
-  db: DatabaseManager,
-  event: SubstrateEvent
-) {
+export async function content_VideoCreated(db: DatabaseManager, event: SubstrateEvent) {
   // read event data
-  const {
-    channelId,
-    videoId,
-    videoCreationParameters,
-    contentActor,
-  } = new Content.VideoCreatedEvent(event).data
+  const { channelId, videoId, videoCreationParameters, contentActor } = new Content.VideoCreatedEvent(event).data
 
   // read metadata
-  const protobufContent = await readProtobufWithAssets(
-    new Video(),
-    {
-      metadata: videoCreationParameters.meta,
-      db,
-      blockNumber: event.blockNumber,
-      assets: videoCreationParameters.assets,
-      contentOwner: convertContentActorToDataObjectOwner(contentActor, channelId.toNumber()),
-    }
-  )
+  const protobufContent = await readProtobufWithAssets(new Video(), {
+    metadata: videoCreationParameters.meta,
+    db,
+    event,
+    assets: videoCreationParameters.assets,
+    contentOwner: convertContentActorToDataObjectOwner(contentActor, channelId.toNumber()),
+  })
 
   // load channel
   const channel = await db.get(Channel, { where: { id: channelId.toString() } as FindConditions<Channel> })
@@ -186,10 +145,11 @@ export async function content_VideoCreated(
   }
 
   // prepare video media metadata (if any)
-  const fixedProtobuf = integrateVideoMediaMetadata(null, protobufContent, event.blockNumber)
+  const fixedProtobuf = await integrateVideoMediaMetadata(db, null, protobufContent, event)
 
   const licenseIsEmpty = fixedProtobuf.license && !Object.keys(fixedProtobuf.license).length
-  if (licenseIsEmpty) { // license deletion was requested - ignore it and consider it empty
+  if (licenseIsEmpty) {
+    // license deletion was requested - ignore it and consider it empty
     delete fixedProtobuf.license
   }
 
@@ -208,36 +168,31 @@ export async function content_VideoCreated(
     mediaUrls: [],
     mediaAvailability: AssetAvailability.INVALID,
 
-
     // fill in auto-generated fields
     createdAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
     updatedAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
 
     // integrate metadata
-    ...fixedProtobuf
+    ...fixedProtobuf,
   })
 
   // save video
   await db.save<Video>(video)
 
   // emit log event
-  logger.info('Video has been created', {id: videoId})
+  logger.info('Video has been created', { id: videoId })
 }
 
 // eslint-disable-next-line @typescript-eslint/naming-convention
-export async function content_VideoUpdated(
-  db: DatabaseManager,
-  event: SubstrateEvent
-) {
+export async function content_VideoUpdated(db: DatabaseManager, event: SubstrateEvent) {
   // read event data
-  const {
-    videoId,
-    videoUpdateParameters,
-    contentActor,
-  } = new Content.VideoUpdatedEvent(event).data
+  const { videoId, videoUpdateParameters, contentActor } = new Content.VideoUpdatedEvent(event).data
 
   // load video
-  const video = await db.get(Video, { where: { id: videoId.toString() } as FindConditions<Video>, relations: ['channel', 'license'] })
+  const video = await db.get(Video, {
+    where: { id: videoId.toString() } as FindConditions<Video>,
+    relations: ['channel', 'license'],
+  })
 
   // ensure video exists
   if (!video) {
@@ -252,32 +207,30 @@ export async function content_VideoUpdated(
 
   // update metadata if it was changed
   if (newMetadata) {
-    const protobufContent = await readProtobufWithAssets(
-      new Video(),
-      {
-        metadata: newMetadata,
-        db,
-        blockNumber: event.blockNumber,
-        assets: videoUpdateParameters.assets.unwrapOr([]),
-        contentOwner: convertContentActorToDataObjectOwner(contentActor, (new BN(video.channel.id)).toNumber()),
-      }
-    )
+    const protobufContent = await readProtobufWithAssets(new Video(), {
+      metadata: newMetadata,
+      db,
+      event,
+      assets: videoUpdateParameters.assets.unwrapOr([]),
+      contentOwner: convertContentActorToDataObjectOwner(contentActor, new BN(video.channel.id).toNumber()),
+    })
 
     // prepare video media metadata (if any)
-    const fixedProtobuf = integrateVideoMediaMetadata(video, protobufContent, event.blockNumber)
+    const fixedProtobuf = await integrateVideoMediaMetadata(db, video, protobufContent, event)
 
     // remember original license
     const originalLicense = video.license
 
     // update all fields read from protobuf
-    for (let [key, value] of Object.entries(fixedProtobuf)) {
+    for (const [key, value] of Object.entries(fixedProtobuf)) {
       video[key] = value
     }
 
     // license has changed - plan old license delete
-    if (originalLicense && video.license != originalLicense) {
-      ([video.license, licenseToDelete] = handleLicenseUpdate(originalLicense, video.license))
-    } else if (!Object.keys(video.license || {}).length) { // license deletion was requested event no license exists?
+    if (originalLicense && video.license !== originalLicense) {
+      ;[video.license, licenseToDelete] = handleLicenseUpdate(originalLicense, video.license)
+    } else if (!Object.keys(video.license || {}).length) {
+      // license deletion was requested event no license exists?
       delete video.license // ensure license is empty
     }
   }
@@ -294,16 +247,13 @@ export async function content_VideoUpdated(
   }
 
   // emit log event
-  logger.info('Video has been updated', {id: videoId})
+  logger.info('Video has been updated', { id: videoId })
 }
 
 // eslint-disable-next-line @typescript-eslint/naming-convention
-export async function content_VideoDeleted(
-  db: DatabaseManager,
-  event: SubstrateEvent
-) {
+export async function content_VideoDeleted(db: DatabaseManager, event: SubstrateEvent) {
   // read event data
-  const {videoId} = new Content.VideoDeletedEvent(event).data
+  const { videoId } = new Content.VideoDeletedEvent(event).data
 
   // load video
   const video = await db.get(Video, { where: { id: videoId.toString() } as FindConditions<Video> })
@@ -317,17 +267,13 @@ export async function content_VideoDeleted(
   await db.remove<Video>(video)
 
   // emit log event
-  logger.info('Video has been deleted', {id: videoId})
+  logger.info('Video has been deleted', { id: videoId })
 }
 
-
 // eslint-disable-next-line @typescript-eslint/naming-convention
-export async function content_VideoCensorshipStatusUpdated(
-  db: DatabaseManager,
-  event: SubstrateEvent
-) {
+export async function content_VideoCensorshipStatusUpdated(db: DatabaseManager, event: SubstrateEvent) {
   // read event data
-  const {videoId, isCensored} = new Content.VideoCensorshipStatusUpdatedEvent(event).data
+  const { videoId, isCensored } = new Content.VideoCensorshipStatusUpdatedEvent(event).data
 
   // load video
   const video = await db.get(Video, { where: { id: videoId.toString() } as FindConditions<Video> })
@@ -338,7 +284,7 @@ export async function content_VideoCensorshipStatusUpdated(
   }
 
   // update video
-  video.isCensored = isCensored.isTrue;
+  video.isCensored = isCensored.isTrue
 
   // set last update time
   video.updatedAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
@@ -347,77 +293,84 @@ export async function content_VideoCensorshipStatusUpdated(
   await db.save<Video>(video)
 
   // emit log event
-  logger.info('Video censorship status has been updated', {id: videoId, isCensored: isCensored.isTrue})
+  logger.info('Video censorship status has been updated', { id: videoId, isCensored: isCensored.isTrue })
 }
 
 // eslint-disable-next-line @typescript-eslint/naming-convention
-export async function content_FeaturedVideosSet(
-  db: DatabaseManager,
-  event: SubstrateEvent
-) {
+export async function content_FeaturedVideosSet(db: DatabaseManager, event: SubstrateEvent) {
   // read event data
-  const {videoId: videoIds} = new Content.FeaturedVideosSetEvent(event).data
+  const { videoId: videoIds } = new Content.FeaturedVideosSetEvent(event).data
 
   // load old featured videos
   const existingFeaturedVideos = await db.getMany(Video, { where: { isFeatured: true } as FindConditions<Video> })
 
   // comparsion utility
-  const isSame = (videoIdA: string) => (videoIdB: string) => videoIdA == videoIdB
+  const isSame = (videoIdA: string) => (videoIdB: string) => videoIdA === videoIdB
 
   // calculate diff sets
-  const toRemove = existingFeaturedVideos.filter(existingFV =>
-    !videoIds
-      .map(item => item.toHex())
-      .some(isSame(existingFV.id))
+  const toRemove = existingFeaturedVideos.filter(
+    (existingFV) => !videoIds.map((item) => item.toString()).some(isSame(existingFV.id))
   )
-  const toAdd = videoIds.filter(video =>
-    !existingFeaturedVideos
-      .map(item => item.id)
-      .some(isSame(video.toHex()))
+  const toAdd = videoIds.filter(
+    (video) => !existingFeaturedVideos.map((item) => item.id).some(isSame(video.toString()))
   )
 
+  // escape if no featured video needs to be added or removed
+  if (!toRemove.length && !toAdd.length) {
+    // emit log event
+    logger.info('Featured videos unchanged')
+
+    return
+  }
+
   // mark previously featured videos as not-featured
-  for (let video of toRemove) {
-    video.isFeatured = false;
+  await Promise.all(
+    toRemove.map(async (video) => {
+      video.isFeatured = false
 
-    // set last update time
-    video.updatedAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
+      // set last update time
+      video.updatedAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
 
-    await db.save<Video>(video)
-  }
+      await db.save<Video>(video)
+    })
+  )
 
   // escape if no featured video needs to be added
-  if (!toAdd) {
+  if (!toAdd.length) {
     // emit log event
-    logger.info('Featured videos unchanged')
+    logger.info('Some featured videos have been unset.', { videoIds: toRemove.map((item) => item.id.toString()) })
 
     return
   }
 
   // read videos previously not-featured videos that are meant to be featured
-  const videosToAdd = await db.getMany(Video, { where: {
-    id: In(toAdd.map(item => item.toString()))
-  } as FindConditions<Video> })
+  const videosToAdd = await db.getMany(Video, {
+    where: {
+      id: In(toAdd.map((item) => item.toString())),
+    } as FindConditions<Video>,
+  })
 
-  if (videosToAdd.length != toAdd.length) {
+  if (videosToAdd.length !== toAdd.length) {
     return inconsistentState('At least one non-existing video featuring requested', toAdd)
   }
 
   // mark previously not-featured videos as featured
-  for (let video of videosToAdd) {
-    video.isFeatured = true;
+  await Promise.all(
+    videosToAdd.map(async (video) => {
+      video.isFeatured = true
 
-    // set last update time
-    video.updatedAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
+      // set last update time
+      video.updatedAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
 
-    await db.save<Video>(video)
-  }
+      await db.save<Video>(video)
+    })
+  )
 
   // emit log event
-  logger.info('New featured videos have been set', {videoIds})
+  logger.info('New featured videos have been set', { videoIds })
 }
 
-/////////////////// Helpers ////////////////////////////////////////////////////
+/// ///////////////// Helpers ////////////////////////////////////////////////////
 
 /*
   Integrates video metadata-related data into existing data (if any) or creates a new record.
@@ -425,24 +378,31 @@ export async function content_FeaturedVideosSet(
   NOTE: type hack - `RawVideoMetadata` is accepted for `metadata` instead of `Partial<Video>`
         see `prepareVideoMetadata()` in `utils.ts` for more info
 */
-function integrateVideoMediaMetadata(
+async function integrateVideoMediaMetadata(
+  db: DatabaseManager,
   existingRecord: Video | null,
   metadata: Partial<Video>,
-  blockNumber: number,
-): Partial<Video> {
+  event: SubstrateEvent
+): Promise<Partial<Video>> {
   if (!metadata.mediaMetadata) {
     return metadata
   }
 
+  const now = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
+
   // fix TS type
-  const rawMediaMetadata = metadata.mediaMetadata as unknown as RawVideoMetadata
+  const rawMediaMetadata = (metadata.mediaMetadata as unknown) as RawVideoMetadata
 
   // ensure encoding object
-  const encoding = (existingRecord && existingRecord.mediaMetadata && existingRecord.mediaMetadata.encoding)
-    || new VideoMediaEncoding({
-        createdById: '1',
-        updatedById: '1',
-      })
+  const encoding =
+    (existingRecord && existingRecord.mediaMetadata && existingRecord.mediaMetadata.encoding) ||
+    new VideoMediaEncoding({
+      createdAt: now,
+      updatedAt: now,
+
+      createdById: '1',
+      updatedById: '1',
+    })
 
   // integrate media encoding-related data
   rawMediaMetadata.encoding.codecName.integrateInto(encoding, 'codecName')
@@ -450,12 +410,17 @@ function integrateVideoMediaMetadata(
   rawMediaMetadata.encoding.mimeMediaType.integrateInto(encoding, 'mimeMediaType')
 
   // ensure media metadata object
-  const mediaMetadata = (existingRecord && existingRecord.mediaMetadata) || new VideoMediaMetadata({
-    createdInBlock: blockNumber,
+  const mediaMetadata =
+    (existingRecord && existingRecord.mediaMetadata) ||
+    new VideoMediaMetadata({
+      createdInBlock: event.blockNumber,
 
-    createdById: '1',
-    updatedById: '1',
-  })
+      createdAt: now,
+      updatedAt: now,
+
+      createdById: '1',
+      updatedById: '1',
+    })
 
   // integrate media-related data
   rawMediaMetadata.pixelWidth.integrateInto(mediaMetadata, 'pixelWidth')
@@ -465,9 +430,41 @@ function integrateVideoMediaMetadata(
   // connect encoding to media metadata object
   mediaMetadata.encoding = encoding
 
+  // ensure predictable ids
+  if (!mediaMetadata.encoding.id) {
+    mediaMetadata.encoding.id = await getNextId(db)
+  }
+  if (!mediaMetadata.id) {
+    mediaMetadata.id = await getNextId(db)
+  }
+
+  /// ///////////////// update updatedAt if needed ///////////////////////////////
+
+  const encodingNoChange =
+    true &&
+    rawMediaMetadata.encoding.codecName.isNoChange() &&
+    rawMediaMetadata.encoding.container.isNoChange() &&
+    rawMediaMetadata.encoding.mimeMediaType.isNoChange()
+  const mediaMetadataNoChange =
+    encodingNoChange &&
+    rawMediaMetadata.encoding.codecName.isNoChange() &&
+    rawMediaMetadata.encoding.container.isNoChange() &&
+    rawMediaMetadata.encoding.mimeMediaType.isNoChange()
+
+  if (!encodingNoChange) {
+    // encoding changed?
+    mediaMetadata.encoding.updatedAt = now
+  }
+  if (!mediaMetadataNoChange) {
+    // metadata changed?
+    mediaMetadata.updatedAt = now
+  }
+
+  /// ////////////////////////////////////////////////////////////////////////////
+
   return {
     ...metadata,
-    mediaMetadata
+    mediaMetadata,
   }
 }
 
@@ -479,17 +476,19 @@ function handleLicenseUpdate(originalLicense, newLicense): [License | undefined,
     return [undefined, null]
   }
 
-  if (!originalLicense) { // && !isNewEmpty
+  if (!originalLicense) {
+    // && !isNewEmpty
     return [newLicense, null]
   }
 
-  if (!isNewEmpty) { // && originalLicense
+  if (!isNewEmpty) {
+    // && originalLicense
     return [
       new License({
         ...originalLicense,
         ...newLicense,
       }),
-      null
+      null,
     ]
   }
 

+ 1 - 1
query-node/mappings/src/eventFix.ts

@@ -2,5 +2,5 @@ import BN from 'bn.js'
 
 // Workaround for https://github.com/Joystream/hydra/issues/326 . This file can be removed after it's fixed
 export function fixBlockTimestamp(blockTimestamp: unknown): BN {
-    return new BN(blockTimestamp as string)
+  return new BN(blockTimestamp as string)
 }

+ 34 - 53
query-node/mappings/src/membership.ts

@@ -1,5 +1,4 @@
 import { fixBlockTimestamp } from './eventFix'
-import BN from 'bn.js'
 import { Bytes } from '@polkadot/types'
 import { MemberId } from '@joystream/types/members'
 import { SubstrateEvent } from '@dzlzv/hydra-common'
@@ -7,6 +6,7 @@ import { DatabaseManager } from '@dzlzv/hydra-db-utils'
 import { FindConditions } from 'typeorm'
 
 import {
+  convertBytesToString,
   inconsistentState,
   logger,
   extractExtrinsicArgs,
@@ -20,15 +20,11 @@ import { EntryMethod } from '@joystream/types/augment'
 export async function members_MemberRegistered(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
   // read event data
   const { accountId, memberId, entryMethod } = new Members.MemberRegisteredEvent(event).data
-  const { avatarUri, about, handle } = extractExtrinsicArgs(
-    event,
-    Members.BuyMembershipCall,
-    {
-      handle: 1,
-      avatarUri: 2,
-      about: 3,
-    },
-  )
+  const { avatarUri, about, handle } = extractExtrinsicArgs(event, Members.BuyMembershipCall, {
+    handle: 1,
+    avatarUri: 2,
+    about: 3,
+  })
 
   // create new membership
   const member = new Membership({
@@ -51,7 +47,7 @@ export async function members_MemberRegistered(db: DatabaseManager, event: Subst
   await db.save<Membership>(member)
 
   // emit log event
-  logger.info('Member has been registered', {ids: memberId})
+  logger.info('Member has been registered', { ids: memberId })
 }
 
 // eslint-disable-next-line @typescript-eslint/naming-convention
@@ -59,9 +55,9 @@ export async function members_MemberUpdatedAboutText(db: DatabaseManager, event:
   // read event data
   const { text, memberId } = isUpdateMembershipExtrinsic(event)
     ? unpackUpdateMembershipOptions(
-        extractExtrinsicArgs(event, Members.UpdateMembershipCall, {memberId: 0, about: 3})
+        extractExtrinsicArgs(event, Members.UpdateMembershipCall, { memberId: 0, about: 3 })
       )
-    : extractExtrinsicArgs(event, Members.ChangeMemberAboutTextCall, {memberId: 0, text: 1})
+    : extractExtrinsicArgs(event, Members.ChangeMemberAboutTextCall, { memberId: 0, text: 1 })
 
   // load member
   const member = await db.get(Membership, { where: { id: memberId.toString() } as FindConditions<Membership> })
@@ -81,7 +77,7 @@ export async function members_MemberUpdatedAboutText(db: DatabaseManager, event:
   await db.save<Membership>(member)
 
   // emit log event
-  logger.info("Member's about text has been updated", {ids: memberId})
+  logger.info("Member's about text has been updated", { ids: memberId })
 }
 
 // eslint-disable-next-line @typescript-eslint/naming-convention
@@ -89,9 +85,9 @@ export async function members_MemberUpdatedAvatar(db: DatabaseManager, event: Su
   // read event data
   const { uri, memberId } = isUpdateMembershipExtrinsic(event)
     ? unpackUpdateMembershipOptions(
-        extractExtrinsicArgs(event, Members.UpdateMembershipCall, {memberId: 0, avatarUri: 2})
+        extractExtrinsicArgs(event, Members.UpdateMembershipCall, { memberId: 0, avatarUri: 2 })
       )
-    : extractExtrinsicArgs(event, Members.ChangeMemberAvatarCall, {memberId: 0, uri: 1})
+    : extractExtrinsicArgs(event, Members.ChangeMemberAvatarCall, { memberId: 0, uri: 1 })
 
   // load member
   const member = await db.get(Membership, { where: { id: memberId.toString() } as FindConditions<Membership> })
@@ -111,7 +107,7 @@ export async function members_MemberUpdatedAvatar(db: DatabaseManager, event: Su
   await db.save<Membership>(member)
 
   // emit log event
-  logger.info("Member's avatar has been updated", {ids: memberId})
+  logger.info("Member's avatar has been updated", { ids: memberId })
 }
 
 // eslint-disable-next-line @typescript-eslint/naming-convention
@@ -119,9 +115,9 @@ export async function members_MemberUpdatedHandle(db: DatabaseManager, event: Su
   // read event data
   const { handle, memberId } = isUpdateMembershipExtrinsic(event)
     ? unpackUpdateMembershipOptions(
-        extractExtrinsicArgs(event, Members.UpdateMembershipCall, {memberId: 0, handle: 1})
+        extractExtrinsicArgs(event, Members.UpdateMembershipCall, { memberId: 0, handle: 1 })
       )
-    : extractExtrinsicArgs(event, Members.ChangeMemberHandleCall, {memberId: 0, handle: 1})
+    : extractExtrinsicArgs(event, Members.ChangeMemberHandleCall, { memberId: 0, handle: 1 })
 
   // load member
   const member = await db.get(Membership, { where: { id: memberId.toString() } as FindConditions<Membership> })
@@ -141,13 +137,16 @@ export async function members_MemberUpdatedHandle(db: DatabaseManager, event: Su
   await db.save<Membership>(member)
 
   // emit log event
-  logger.info("Member's avatar has been updated", {ids: memberId})
+  logger.info("Member's avatar has been updated", { ids: memberId })
 }
 
 // eslint-disable-next-line @typescript-eslint/naming-convention
 export async function members_MemberSetRootAccount(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
   // read event data
-  const { newRootAccount, memberId } = extractExtrinsicArgs(event, Members.SetRootAccountCall, {memberId: 0, newRootAccount: 1})
+  const { newRootAccount, memberId } = extractExtrinsicArgs(event, Members.SetRootAccountCall, {
+    memberId: 0,
+    newRootAccount: 1,
+  })
 
   // load member
   const member = await db.get(Membership, { where: { id: memberId.toString() } as FindConditions<Membership> })
@@ -167,17 +166,16 @@ export async function members_MemberSetRootAccount(db: DatabaseManager, event: S
   await db.save<Membership>(member)
 
   // emit log event
-  logger.info("Member's root has been updated", {ids: memberId})
+  logger.info("Member's root has been updated", { ids: memberId })
 }
 
 // eslint-disable-next-line @typescript-eslint/naming-convention
 export async function members_MemberSetControllerAccount(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
   // read event data
-  const { newControllerAccount, memberId } = extractExtrinsicArgs(
-    event,
-    Members.SetControllerAccountCall,
-    {memberId: 0, newControllerAccount: 1},
-  )
+  const { newControllerAccount, memberId } = extractExtrinsicArgs(event, Members.SetControllerAccountCall, {
+    memberId: 0,
+    newControllerAccount: 1,
+  })
 
   // load member
   const member = await db.get(Membership, { where: { id: memberId.toString() } as FindConditions<Membership> })
@@ -197,28 +195,10 @@ export async function members_MemberSetControllerAccount(db: DatabaseManager, ev
   await db.save<Membership>(member)
 
   // emit log event
-  logger.info("Member's controller has been updated", {ids: memberId})
+  logger.info("Member's controller has been updated", { ids: memberId })
 }
 
-/////////////////// Helpers ////////////////////////////////////////////////////
-
-/*
-  Helper for converting Bytes type to string
-*/
-function convertBytesToString(b: Bytes | null): string {
-  if (!b) {
-    return ''
-  }
-
-  const result = Buffer.from(b.toU8a(true)).toString()
-
-  // prevent utf-8 null character
-  if (result.match(/^\0$/)) {
-    return ''
-  }
-
-  return result
-}
+/// ///////////////// Helpers ////////////////////////////////////////////////////
 
 function convertEntryMethod(entryMethod: EntryMethod): MembershipEntryMethod {
   // paid membership?
@@ -237,31 +217,32 @@ function convertEntryMethod(entryMethod: EntryMethod): MembershipEntryMethod {
   }
 
   // should never happen
-  logger.error('Not implemented entry method', {entryMethod: entryMethod.toString()})
-  throw 'Not implemented entry method'
+  logger.error('Not implemented entry method', { entryMethod: entryMethod.toString() })
+  throw new Error('Not implemented entry method')
 }
 
 /*
   Returns true if event is emitted inside of `update_membership` extrinsic.
 */
 function isUpdateMembershipExtrinsic(event: SubstrateEvent): boolean {
-  if (!event.extrinsic) { // this should never happen
+  if (!event.extrinsic) {
+    // this should never happen
     return false
   }
 
-  if (event.extrinsic.method == 'updateMembership') {
+  if (event.extrinsic.method === 'updateMembership') {
     return true
   }
 
   // no sudo was used to update membership -> this is not updateMembership
-  if (event.extrinsic.section != 'sudo') {
+  if (event.extrinsic.section !== 'sudo') {
     return false
   }
 
   const sudoCallParameters = extractSudoCallParameters<unknown[]>(event)
 
   // very trivial check if update_membership extrinsic was used
-  return sudoCallParameters.args.length == 4 // memberId, handle, avatarUri, about
+  return sudoCallParameters.args.length === 4 // memberId, handle, avatarUri, about
 }
 
 interface IUnpackedUpdateMembershipOptions {

+ 60 - 56
query-node/mappings/src/storage.ts

@@ -3,20 +3,10 @@ import { SubstrateEvent } from '@dzlzv/hydra-common'
 import { DatabaseManager } from '@dzlzv/hydra-db-utils'
 import { FindConditions, In } from 'typeorm'
 
-import {
-  inconsistentState,
-  logger,
-  prepareDataObject,
-} from './common'
+import { inconsistentState, logger, prepareDataObject } from './common'
 
-import {
-  DataDirectory,
-} from '../../generated/types'
-import {
-  ContentId,
-  ContentParameters,
-  StorageObjectOwner,
-} from '@joystream/types/augment'
+import { DataDirectory } from '../../generated/types'
+import { ContentId, ContentParameters, StorageObjectOwner } from '@joystream/types/augment'
 
 import { ContentId as Custom_ContentId, ContentParameters as Custom_ContentParameters } from '@joystream/types/storage'
 import { registry } from '@joystream/types'
@@ -25,7 +15,6 @@ import {
   Channel,
   Video,
   AssetAvailability,
-
   DataObject,
   DataObjectOwner,
   DataObjectOwnerMember,
@@ -40,12 +29,12 @@ import {
 
 export async function dataDirectory_ContentAdded(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
   // read event data
-  const {contentParameters, storageObjectOwner} = new DataDirectory.ContentAddedEvent(event).data
+  const { contentParameters, storageObjectOwner } = new DataDirectory.ContentAddedEvent(event).data
 
   // save all content objects
-  for (let parameters of contentParameters) {
+  for (const parameters of contentParameters) {
     const owner = convertStorageObjectOwner(storageObjectOwner)
-    const dataObject = await prepareDataObject(parameters, event.blockNumber, owner)
+    const dataObject = await prepareDataObject(db, parameters, event, owner)
 
     // fill in auto-generated fields
     dataObject.createdAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
@@ -55,23 +44,27 @@ export async function dataDirectory_ContentAdded(db: DatabaseManager, event: Sub
   }
 
   // emit log event
-  logger.info("Storage content has beed added", {ids: contentParameters.map(item => encodeContentId(item.content_id))})
+  logger.info('Storage content has beed added', {
+    ids: contentParameters.map((item) => encodeContentId(item.content_id)),
+  })
 }
 
 export async function dataDirectory_ContentRemoved(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
   // read event data
-  const {contentId: contentIds} = new DataDirectory.ContentRemovedEvent(event).data
+  const { contentId: contentIds } = new DataDirectory.ContentRemovedEvent(event).data
 
   // load assets
-  const dataObjects = await db.getMany(DataObject, { where: {
-    joystreamContentId: In(contentIds.map(item => encodeContentId(item)))
-  } as FindConditions<DataObject> })
+  const dataObjects = await db.getMany(DataObject, {
+    where: {
+      joystreamContentId: In(contentIds.map((item) => encodeContentId(item))),
+    } as FindConditions<DataObject>,
+  })
 
   // store dataObject ids before they are deleted (for logging purposes)
-  const dataObjectIds = dataObjects.map(item => item.id)
+  const dataObjectIds = dataObjects.map((item) => item.id)
 
   // remove assets from database
-  for (let item of dataObjects) {
+  for (const item of dataObjects) {
     // ensure dataObject is nowhere used to prevent db constraint error
     await disconnectDataObjectRelations(db, item)
 
@@ -80,16 +73,18 @@ export async function dataDirectory_ContentRemoved(db: DatabaseManager, event: S
   }
 
   // emit log event
-  logger.info("Storage content have been removed", {id: contentIds, dataObjectIds})
+  logger.info('Storage content have been removed', { id: contentIds, dataObjectIds })
 }
 
 export async function dataDirectory_ContentAccepted(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
   // read event data
-  const {contentId, storageProviderId} = new DataDirectory.ContentAcceptedEvent(event).data
+  const { contentId, storageProviderId } = new DataDirectory.ContentAcceptedEvent(event).data
   const encodedContentId = encodeContentId(contentId)
 
   // load asset
-  const dataObject = await db.get(DataObject, { where: { joystreamContentId: encodedContentId } as FindConditions<DataObject>})
+  const dataObject = await db.get(DataObject, {
+    where: { joystreamContentId: encodedContentId } as FindConditions<DataObject>,
+  })
 
   // ensure object exists
   if (!dataObject) {
@@ -101,7 +96,7 @@ export async function dataDirectory_ContentAccepted(db: DatabaseManager, event:
     where: {
       workerId: storageProviderId.toString(),
       type: WorkerType.STORAGE,
-    } as FindConditions<Worker>
+    } as FindConditions<Worker>,
   })
 
   // ensure object exists
@@ -120,14 +115,14 @@ export async function dataDirectory_ContentAccepted(db: DatabaseManager, event:
   await db.save<DataObject>(dataObject)
 
   // emit log event
-  logger.info("Storage content has been accepted", {id: encodedContentId})
+  logger.info('Storage content has been accepted', { id: encodedContentId })
 
   // update asset availability for all connected channels and videos
   // this will not be needed after redudant AssetAvailability will be removed (after some Hydra upgrades)
   await updateConnectedAssets(db, dataObject)
 }
 
-/////////////////// Updating connected entities ////////////////////////////////
+/// ///////////////// Updating connected entities ////////////////////////////////
 
 async function updateConnectedAssets(db: DatabaseManager, dataObject: DataObject) {
   await updateSingleConnectedAsset(db, new Channel(), 'avatarPhoto', dataObject)
@@ -137,22 +132,25 @@ async function updateConnectedAssets(db: DatabaseManager, dataObject: DataObject
   await updateSingleConnectedAsset(db, new Video(), 'media', dataObject)
 }
 
-//async function updateSingleConnectedAsset(db: DatabaseManager, type: typeof Channel | typeof Video, propertyName: string, dataObject: DataObject) {
-async function updateSingleConnectedAsset<T extends Channel | Video>(db: DatabaseManager, type: T, propertyName: string, dataObject: DataObject) {
+// async function updateSingleConnectedAsset(db: DatabaseManager, type: typeof Channel | typeof Video, propertyName: string, dataObject: DataObject) {
+async function updateSingleConnectedAsset<T extends Channel | Video>(
+  db: DatabaseManager,
+  type: T,
+  propertyName: string,
+  dataObject: DataObject
+) {
   // prepare lookup condition
   const condition = {
     where: {
-      [propertyName + 'DataObject']: dataObject
-    }
+      [propertyName + 'DataObject']: dataObject,
+    },
   } // as FindConditions<T>
 
   // NOTE: we don't need to retrieve multiple channels/videos via `db.getMany()` because dataObject
   //       is allowed to be associated only with one channel/video in runtime
 
   // in therory the following condition(s) can be generalized `... db.get(type, ...` but in practice it doesn't work :-\
-  const item = type instanceof Channel
-    ? await db.get(Channel, condition)
-    : await db.get(Video, condition)
+  const item = type instanceof Channel ? await db.get(Channel, condition) : await db.get(Video, condition)
 
   // escape when no dataObject association found
   if (!item) {
@@ -165,17 +163,17 @@ async function updateSingleConnectedAsset<T extends Channel | Video>(db: Databas
     await db.save<Channel>(item)
 
     // emit log event
-    logger.info("Channel using Content has been accepted", {
+    logger.info('Channel using Content has been accepted', {
       channelId: item.id.toString(),
-      joystreamContentId: dataObject.joystreamContentId
+      joystreamContentId: dataObject.joystreamContentId,
     })
   } else {
     await db.save<Video>(item)
 
     // emit log event
-    logger.info("Video using Content has been accepted", {
+    logger.info('Video using Content has been accepted', {
       videoId: item.id.toString(),
-      joystreamContentId: dataObject.joystreamContentId
+      joystreamContentId: dataObject.joystreamContentId,
     })
   }
 }
@@ -189,21 +187,24 @@ async function disconnectDataObjectRelations(db: DatabaseManager, dataObject: Da
   await disconnectSingleDataObjectRelation(db, new Video(), 'media', dataObject)
 }
 
-async function disconnectSingleDataObjectRelation<T extends Channel | Video>(db: DatabaseManager, type: T, propertyName: string, dataObject: DataObject) {
+async function disconnectSingleDataObjectRelation<T extends Channel | Video>(
+  db: DatabaseManager,
+  type: T,
+  propertyName: string,
+  dataObject: DataObject
+) {
   // prepare lookup condition
   const condition = {
     where: {
-      [propertyName + 'DataObject']: dataObject
-    }
+      [propertyName + 'DataObject']: dataObject,
+    },
   } // as FindConditions<T>
 
   // NOTE: we don't need to retrieve multiple channels/videos via `db.getMany()` because dataObject
   //       is allowed to be associated only with one channel/video in runtime
 
   // in therory the following condition(s) can be generalized `... db.get(type, ...` but in practice it doesn't work :-\
-  const item = type instanceof Channel
-    ? await db.get(Channel, condition)
-    : await db.get(Video, condition)
+  const item = type instanceof Channel ? await db.get(Channel, condition) : await db.get(Video, condition)
 
   // escape when no dataObject association found
   if (!item) {
@@ -217,20 +218,23 @@ async function disconnectSingleDataObjectRelation<T extends Channel | Video>(db:
     await db.save<Channel>(item)
 
     // emit log event
-    logger.info("Content has been disconnected from Channel", {
+    logger.info('Content has been disconnected from Channel', {
       channelId: item.id.toString(),
-      joystreamContentId: dataObject.joystreamContentId})
-  } else { // type instanceof Video
+      joystreamContentId: dataObject.joystreamContentId,
+    })
+  } else {
+    // type instanceof Video
     await db.save<Video>(item)
 
     // emit log event
-    logger.info("Content has been disconnected from Video", {
+    logger.info('Content has been disconnected from Video', {
       videoId: item.id.toString(),
-      joystreamContentId: dataObject.joystreamContentId})
+      joystreamContentId: dataObject.joystreamContentId,
+    })
   }
 }
 
-/////////////////// Helpers ////////////////////////////////////////////////////
+/// ///////////////// Helpers ////////////////////////////////////////////////////
 
 function convertStorageObjectOwner(objectOwner: StorageObjectOwner): typeof DataObjectOwner {
   if (objectOwner.isMember) {
@@ -265,12 +269,12 @@ function convertStorageObjectOwner(objectOwner: StorageObjectOwner): typeof Data
     return owner
   }
 
-  logger.error('Not implemented StorageObjectOwner type', {objectOwner: objectOwner.toString()})
-  throw 'Not implemented StorageObjectOwner type'
+  logger.error('Not implemented StorageObjectOwner type', { objectOwner: objectOwner.toString() })
+  throw new Error('Not implemented StorageObjectOwner type')
 }
 
 function encodeContentId(contentId: ContentId) {
-  const customContentId = new Custom_ContentId(registry, contentId);
+  const customContentId = new Custom_ContentId(registry, contentId)
 
   return customContentId.encode()
 }

+ 98 - 64
query-node/mappings/src/workingGroup.ts

@@ -2,40 +2,30 @@ import { SubstrateEvent } from '@dzlzv/hydra-common'
 import { DatabaseManager } from '@dzlzv/hydra-db-utils'
 import { FindConditions } from 'typeorm'
 import { Bytes } from '@polkadot/types'
+import { fixBlockTimestamp } from './eventFix'
 
-import {
-  inconsistentState,
-  logger,
-} from './common'
-
-import {
-  Channel,
-  Worker,
-  WorkerType,
-} from 'query-node'
-import {
-  GatewayWorkingGroup,
-  StorageWorkingGroup,
-} from '../../generated/types'
-import {
-  ApplicationId,
-  ApplicationIdToWorkerIdMap,
-  WorkerId,
-} from "@joystream/types/augment";
-
-/////////////////// Storage working group //////////////////////////////////////
+import { convertBytesToString, inconsistentState, logger, getNextId } from './common'
+
+import { Channel, Worker, WorkerType } from 'query-node'
+import { GatewayWorkingGroup, StorageWorkingGroup } from '../../generated/types'
+import { ApplicationId, ApplicationIdToWorkerIdMap, WorkerId } from '@joystream/types/augment'
+
+/// ///////////////// Storage working group //////////////////////////////////////
 
 export async function storageWorkingGroup_OpeningFilled(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
   // read event data
-  const {applicationIdToWorkerIdMap} = new StorageWorkingGroup.OpeningFilledEvent(event).data
+  const { applicationIdToWorkerIdMap } = new StorageWorkingGroup.OpeningFilledEvent(event).data
 
   // call generic processing
-  await workingGroup_OpeningFilled(db, WorkerType.STORAGE, applicationIdToWorkerIdMap)
+  await workingGroup_OpeningFilled(db, WorkerType.STORAGE, applicationIdToWorkerIdMap, event)
 }
 
-export async function storageWorkingGroup_WorkerStorageUpdated(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
+export async function storageWorkingGroup_WorkerStorageUpdated(
+  db: DatabaseManager,
+  event: SubstrateEvent
+): Promise<void> {
   // read event data
-  const {workerId, bytes: newMetadata} = new StorageWorkingGroup.WorkerStorageUpdatedEvent(event).data
+  const { workerId, bytes: newMetadata } = new StorageWorkingGroup.WorkerStorageUpdatedEvent(event).data
 
   // call generic processing
   await workingGroup_WorkerStorageUpdated(db, WorkerType.STORAGE, workerId, newMetadata)
@@ -43,41 +33,44 @@ export async function storageWorkingGroup_WorkerStorageUpdated(db: DatabaseManag
 
 export async function storageWorkingGroup_TerminatedWorker(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
   // read event data
-  const {workerId} = new StorageWorkingGroup.TerminatedWorkerEvent(event).data
+  const { workerId } = new StorageWorkingGroup.TerminatedWorkerEvent(event).data
 
   // call generic processing
-  await workingGroup_TerminatedWorker(db, WorkerType.STORAGE, workerId)
+  await workingGroup_TerminatedWorker(db, event, WorkerType.STORAGE, workerId)
 }
 
 export async function storageWorkingGroup_WorkerExited(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
   // read event data
-  const {workerId} = new StorageWorkingGroup.WorkerExitedEvent(event).data
+  const { workerId } = new StorageWorkingGroup.WorkerExitedEvent(event).data
 
   // call generic processing
-  await workingGroup_WorkerExited(db, WorkerType.STORAGE, workerId)
+  await workingGroup_WorkerExited(db, event, WorkerType.STORAGE, workerId)
 }
 
 export async function storageWorkingGroup_TerminatedLeader(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
   // read event data
-  const {workerId} = new StorageWorkingGroup.TerminatedLeaderEvent(event).data
+  const { workerId } = new StorageWorkingGroup.TerminatedLeaderEvent(event).data
 
   // call generic processing
-  await workingGroup_TerminatedLeader(db, WorkerType.STORAGE, workerId)
+  await workingGroup_TerminatedLeader(db, event, WorkerType.STORAGE, workerId)
 }
 
-/////////////////// Gateway working group //////////////////////////////////////
+/// ///////////////// Gateway working group //////////////////////////////////////
 
 export async function gatewayWorkingGroup_OpeningFilled(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
   // read event data
-  const {applicationIdToWorkerIdMap} = new GatewayWorkingGroup.OpeningFilledEvent(event).data
+  const { applicationIdToWorkerIdMap } = new GatewayWorkingGroup.OpeningFilledEvent(event).data
 
   // call generic processing
-  await workingGroup_OpeningFilled(db, WorkerType.GATEWAY, applicationIdToWorkerIdMap)
+  await workingGroup_OpeningFilled(db, WorkerType.GATEWAY, applicationIdToWorkerIdMap, event)
 }
 
-export async function gatewayWorkingGroup_WorkerStorageUpdated(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
+export async function gatewayWorkingGroup_WorkerStorageUpdated(
+  db: DatabaseManager,
+  event: SubstrateEvent
+): Promise<void> {
   // read event data
-  const {workerId, bytes: newMetadata} = new GatewayWorkingGroup.WorkerStorageUpdatedEvent(event).data
+  const { workerId, bytes: newMetadata } = new GatewayWorkingGroup.WorkerStorageUpdatedEvent(event).data
 
   // call generic processing
   await workingGroup_WorkerStorageUpdated(db, WorkerType.GATEWAY, workerId, newMetadata)
@@ -85,52 +78,58 @@ export async function gatewayWorkingGroup_WorkerStorageUpdated(db: DatabaseManag
 
 export async function gatewayWorkingGroup_TerminatedWorker(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
   // read event data
-  const {workerId} = new GatewayWorkingGroup.TerminatedWorkerEvent(event).data
+  const { workerId } = new GatewayWorkingGroup.TerminatedWorkerEvent(event).data
 
   // call generic processing
-  await workingGroup_TerminatedWorker(db, WorkerType.GATEWAY, workerId)
+  await workingGroup_TerminatedWorker(db, event, WorkerType.GATEWAY, workerId)
 }
 
 export async function gatewayWorkingGroup_WorkerExited(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
   // read event data
-  const {workerId} = new GatewayWorkingGroup.WorkerExitedEvent(event).data
+  const { workerId } = new GatewayWorkingGroup.WorkerExitedEvent(event).data
 
   // call generic processing
-  await workingGroup_WorkerExited(db, WorkerType.GATEWAY, workerId)
+  await workingGroup_WorkerExited(db, event, WorkerType.GATEWAY, workerId)
 }
 
 export async function gatewayWorkingGroup_TerminatedLeader(db: DatabaseManager, event: SubstrateEvent): Promise<void> {
   // read event data
-  const {workerId} = new GatewayWorkingGroup.TerminatedLeaderEvent(event).data
+  const { workerId } = new GatewayWorkingGroup.TerminatedLeaderEvent(event).data
 
   // call generic processing
-  await workingGroup_TerminatedLeader(db, WorkerType.GATEWAY, workerId)
+  await workingGroup_TerminatedLeader(db, event, WorkerType.GATEWAY, workerId)
 }
 
-/////////////////// Generic working group processing ///////////////////////////
+/// ///////////////// Generic working group processing ///////////////////////////
 
 export async function workingGroup_OpeningFilled(
   db: DatabaseManager,
   workerType: WorkerType,
-  applicationIdToWorkerIdMap: ApplicationIdToWorkerIdMap
+  applicationIdToWorkerIdMap: ApplicationIdToWorkerIdMap,
+  event: SubstrateEvent
 ): Promise<void> {
   const workerIds = [...applicationIdToWorkerIdMap.values()]
 
   for (const workerId of workerIds) {
-    await createWorker(db, workerId, workerType)
+    await createWorker(db, workerId, workerType, event)
   }
 
   // emit log event
-  logger.info("Workers have been created", {ids: workerIds.map(item => item.toString()), workerType})
+  logger.info('Workers have been created', { ids: workerIds.map((item) => item.toString()), workerType })
 }
 
-export async function workingGroup_WorkerStorageUpdated(db: DatabaseManager, workerType: WorkerType, workerId: WorkerId, newMetadata: Bytes): Promise<void> {
+export async function workingGroup_WorkerStorageUpdated(
+  db: DatabaseManager,
+  workerType: WorkerType,
+  workerId: WorkerId,
+  newMetadata: Bytes
+): Promise<void> {
   // load worker
   const worker = await db.get(Worker, {
     where: {
       workerId: workerId.toString(),
       type: workerType,
-    } as FindConditions<Worker>
+    } as FindConditions<Worker>,
   })
 
   // ensure worker exists
@@ -138,58 +137,88 @@ export async function workingGroup_WorkerStorageUpdated(db: DatabaseManager, wor
     return inconsistentState('Non-existing worker update requested', workerId)
   }
 
-  worker.metadata = newMetadata.toUtf8()
+  worker.metadata = convertBytesToString(newMetadata)
 
   await db.save<Worker>(worker)
 
   // emit log event
-  logger.info("Worker has been updated", {workerId, workerType})
+  logger.info('Worker has been updated', { workerId, workerType })
 }
 
-export async function workingGroup_TerminatedWorker(db: DatabaseManager, workerType: WorkerType, workerId: WorkerId): Promise<void> {
+export async function workingGroup_TerminatedWorker(
+  db: DatabaseManager,
+  event: SubstrateEvent,
+  workerType: WorkerType,
+  workerId: WorkerId
+): Promise<void> {
   // do removal logic
-  await deactivateWorker(db, workerType, workerId)
+  await deactivateWorker(db, event, workerType, workerId)
 
   // emit log event
-  logger.info("Worker has been removed (worker terminated)", {workerId, workerType})
+  logger.info('Worker has been removed (worker terminated)', { workerId, workerType })
 }
 
-export async function workingGroup_WorkerExited(db: DatabaseManager, workerType: WorkerType, workerId: WorkerId): Promise<void> {
+export async function workingGroup_WorkerExited(
+  db: DatabaseManager,
+  event: SubstrateEvent,
+  workerType: WorkerType,
+  workerId: WorkerId
+): Promise<void> {
   // do removal logic
-  await deactivateWorker(db, workerType, workerId)
+  await deactivateWorker(db, event, workerType, workerId)
 
   // emit log event
-  logger.info("Worker has been removed (worker exited)", {workerId, workerType})
+  logger.info('Worker has been removed (worker exited)', { workerId, workerType })
 }
 
-export async function workingGroup_TerminatedLeader(db: DatabaseManager, workerType: WorkerType, workerId: WorkerId): Promise<void> {
+export async function workingGroup_TerminatedLeader(
+  db: DatabaseManager,
+  event: SubstrateEvent,
+  workerType: WorkerType,
+  workerId: WorkerId
+): Promise<void> {
   // do removal logic
-  await deactivateWorker(db, workerType, workerId)
+  await deactivateWorker(db, event, workerType, workerId)
 
   // emit log event
-  logger.info("Working group leader has been removed (worker exited)", {workerId, workerType})
+  logger.info('Working group leader has been removed (worker exited)', { workerId, workerType })
 }
 
-/////////////////// Helpers ////////////////////////////////////////////////////
+/// ///////////////// Helpers ////////////////////////////////////////////////////
 
-async function createWorker(db: DatabaseManager, workerId: WorkerId, workerType: WorkerType): Promise<void> {
-  // create new worker
+async function createWorker(
+  db: DatabaseManager,
+  workerId: WorkerId,
+  workerType: WorkerType,
+  event: SubstrateEvent
+): Promise<void> {
+  // create entity
   const newWorker = new Worker({
+    id: await getNextId(db),
     workerId: workerId.toString(),
     type: workerType,
     isActive: true,
+
+    createdAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
+    updatedAt: new Date(fixBlockTimestamp(event.blockTimestamp).toNumber()),
   })
 
+  // save worker
   await db.save<Worker>(newWorker)
 }
 
-async function deactivateWorker(db: DatabaseManager, workerType: WorkerType, workerId: WorkerId) {
+async function deactivateWorker(
+  db: DatabaseManager,
+  event: SubstrateEvent,
+  workerType: WorkerType,
+  workerId: WorkerId
+) {
   // load worker
   const worker = await db.get(Worker, {
     where: {
       workerId: workerId.toString(),
       type: workerType,
-    } as FindConditions<Worker>
+    } as FindConditions<Worker>,
   })
 
   // ensure worker exists
@@ -197,7 +226,12 @@ async function deactivateWorker(db: DatabaseManager, workerType: WorkerType, wor
     return inconsistentState('Non-existing worker deletion requested', workerId)
   }
 
+  // update worker
   worker.isActive = false
 
+  // set last update time
+  worker.updatedAt = new Date(fixBlockTimestamp(event.blockTimestamp).toNumber())
+
+  // save worker
   await db.save<Worker>(worker)
 }

+ 1 - 1
query-node/package.json

@@ -5,7 +5,7 @@
   "scripts": {
     "build": "./build.sh",
     "rebuild": "yarn db:drop && yarn clean:query-node && yarn codegen:query-node && yarn db:prepare && yarn db:migrate",
-    "lint": "echo \"Skippinng\"",
+    "lint": "yarn workspace query-node-mappings lint",
     "clean": "rm -rf ./generated",
     "clean:query-node": "rm -rf ./generated/graphql-server",
     "processor:bootstrap": "./bootstrap.sh",

+ 8 - 0
query-node/schema.graphql

@@ -400,3 +400,11 @@ type Worker @entity {
 
   dataObjects: [DataObject!]! @derivedFrom(field: "liaison")
 }
+
+type NextEntityId @entity {
+  "Unique identifier"
+  id: ID!
+
+  "Next deterministic id for entities without custom id"
+  nextId: Int!
+}

+ 0 - 2
scripts/raspberry-cross-build.sh

@@ -14,5 +14,3 @@ docker run \
     --volume ${HOME}/.cargo/registry:/home/cross/.cargo/registry \
     joystream/rust-raspberry \
     build --release -p joystream-node
-
-ls -l target/arm-unknown-linux-gnueabihf/joystream-node

+ 3 - 1
scripts/runtime-code-shasum.sh

@@ -19,4 +19,6 @@ ${TAR} -c --sort=name --owner=root:0 --group=root:0 --mode 644 --mtime='UTC 2020
     runtime \
     runtime-modules \
     utils/chain-spec-builder \
-    joystream-node.Dockerfile | shasum | cut -d " " -f 1
+    joystream-node.Dockerfile \
+    node \
+    joystream-node-armv7.Dockerfile | shasum | cut -d " " -f 1

Some files were not shown because too many files changed in this diff