Эх сурвалжийг харах

Merge branch 'giza_feature_NewContentFeaturesAndNFT' into olympia

Leszek Wiesner 3 жил өмнө
parent
commit
8cd0d2d850
100 өөрчлөгдсөн 2530 нэмэгдсэн , 907 устгасан
  1. 1 0
      .dockerignore
  2. 41 17
      .env
  3. 60 53
      .github/workflows/create-ami.yml
  4. 152 0
      .github/workflows/create-release.yml
  5. 68 0
      .github/workflows/deploy-playground.yml
  6. 2 2
      .github/workflows/integration-tests.yml
  7. 32 32
      .github/workflows/joystream-cli.yml
  8. 163 47
      .github/workflows/joystream-node-docker.yml
  9. 43 0
      .github/workflows/query-node.yml
  10. 176 0
      .github/workflows/run-network-tests.yml
  11. 5 3
      .github/workflows/storage-node.yml
  12. 4 0
      .gitignore
  13. 9 2
      Cargo.lock
  14. 5 2
      README.md
  15. 2 3
      apps.Dockerfile
  16. 0 35
      build-docker-images.sh
  17. 24 0
      build-node-docker.sh
  18. 2 1
      build-packages.sh
  19. 0 0
      chain-metadata.json
  20. 1 1
      cli/examples/content/CreateVideo.json
  21. 13 9
      cli/package.json
  22. 8 0
      cli/scripts/content-test.sh
  23. 40 130
      cli/src/Api.ts
  24. 2 1
      cli/src/ExitCodes.ts
  25. 140 0
      cli/src/QueryNodeApi.ts
  26. 49 22
      cli/src/Types.ts
  27. 56 42
      cli/src/base/AccountsCommandBase.ts
  28. 103 73
      cli/src/base/ApiCommandBase.ts
  29. 107 43
      cli/src/base/ContentDirectoryCommandBase.ts
  30. 3 0
      cli/src/base/DefaultCommandBase.ts
  31. 3 3
      cli/src/base/StateAwareCommandBase.ts
  32. 199 83
      cli/src/base/UploadCommandBase.ts
  33. 6 15
      cli/src/base/WorkingGroupsCommandBase.ts
  34. 1 1
      cli/src/commands/account/create.ts
  35. 9 5
      cli/src/commands/account/export.ts
  36. 1 1
      cli/src/commands/account/forget.ts
  37. 7 3
      cli/src/commands/account/import.ts
  38. 1 1
      cli/src/commands/account/info.ts
  39. 14 10
      cli/src/commands/account/list.ts
  40. 1 1
      cli/src/commands/account/transferTokens.ts
  41. 3 3
      cli/src/commands/api/getQueryNodeEndpoint.ts
  42. 2 2
      cli/src/commands/api/inspect.ts
  43. 7 10
      cli/src/commands/api/setQueryNodeEndpoint.ts
  44. 3 6
      cli/src/commands/api/setUri.ts
  45. 5 7
      cli/src/commands/content/addCuratorToGroup.ts
  46. 7 13
      cli/src/commands/content/channel.ts
  47. 4 3
      cli/src/commands/content/channels.ts
  48. 45 24
      cli/src/commands/content/createChannel.ts
  49. 2 2
      cli/src/commands/content/createChannelCategory.ts
  50. 3 7
      cli/src/commands/content/createCuratorGroup.ts
  51. 48 38
      cli/src/commands/content/createVideo.ts
  52. 2 2
      cli/src/commands/content/createVideoCategory.ts
  53. 1 1
      cli/src/commands/content/curatorGroup.ts
  54. 101 0
      cli/src/commands/content/deleteChannel.ts
  55. 1 1
      cli/src/commands/content/deleteChannelCategory.ts
  56. 80 0
      cli/src/commands/content/deleteVideo.ts
  57. 1 1
      cli/src/commands/content/deleteVideoCategory.ts
  58. 40 0
      cli/src/commands/content/removeChannelAssets.ts
  59. 5 7
      cli/src/commands/content/removeCuratorFromGroup.ts
  60. 18 7
      cli/src/commands/content/reuploadAssets.ts
  61. 5 7
      cli/src/commands/content/setCuratorGroupStatus.ts
  62. 1 1
      cli/src/commands/content/setFeaturedVideos.ts
  63. 88 17
      cli/src/commands/content/updateChannel.ts
  64. 2 2
      cli/src/commands/content/updateChannelCategory.ts
  65. 1 1
      cli/src/commands/content/updateChannelCensorshipStatus.ts
  66. 69 17
      cli/src/commands/content/updateVideo.ts
  67. 2 2
      cli/src/commands/content/updateVideoCategory.ts
  68. 1 1
      cli/src/commands/content/updateVideoCensorshipStatus.ts
  69. 2 2
      cli/src/commands/content/video.ts
  70. 4 6
      cli/src/commands/content/videos.ts
  71. 13 14
      cli/src/commands/working-groups/createOpening.ts
  72. 1 1
      cli/src/commands/working-groups/decreaseWorkerStake.ts
  73. 2 2
      cli/src/commands/working-groups/evictWorker.ts
  74. 5 7
      cli/src/commands/working-groups/fillOpening.ts
  75. 1 1
      cli/src/commands/working-groups/increaseStake.ts
  76. 2 2
      cli/src/commands/working-groups/leaveRole.ts
  77. 2 2
      cli/src/commands/working-groups/slashWorker.ts
  78. 4 4
      cli/src/commands/working-groups/updateRewardAccount.ts
  79. 3 3
      cli/src/commands/working-groups/updateRoleAccount.ts
  80. 2 2
      cli/src/commands/working-groups/updateRoleStorage.ts
  81. 2 2
      cli/src/commands/working-groups/updateWorkerReward.ts
  82. 65 0
      cli/src/graphql/queries/storage.graphql
  83. 2 4
      cli/src/helpers/JsonSchemaPrompt.ts
  84. 5 5
      cli/src/helpers/display.ts
  85. 2 2
      cli/src/helpers/validation.ts
  86. 0 22
      cli/src/json-schemas/Assets.schema.json
  87. 7 0
      cli/src/schemas/ContentDirectory.ts
  88. 34 0
      cli/src/schemas/json/Assets.schema.json
  89. 0 0
      cli/src/schemas/json/WorkingGroupOpening.schema.json
  90. 30 0
      cli/src/schemas/typings/Assets.schema.d.ts
  91. 0 0
      cli/src/schemas/typings/WorkingGroupOpening.schema.d.ts
  92. 41 0
      colossus.Dockerfile
  93. 0 0
      devops/aws/.gitignore
  94. 9 3
      devops/aws/README.md
  95. 0 0
      devops/aws/ansible.cfg
  96. 45 0
      devops/aws/build-arm64-playbook.yml
  97. 0 0
      devops/aws/build-code.yml
  98. 0 0
      devops/aws/chain-spec-pioneer.yml
  99. 13 0
      devops/aws/cloudformation/infrastructure.yml
  100. 134 0
      devops/aws/cloudformation/single-instance-docker.yml

+ 1 - 0
.dockerignore

@@ -7,3 +7,4 @@ query-node/lib
 cli/
 tests/
 !tests/integration-tests/proposal-parameters.json
+devops/

+ 41 - 17
.env

@@ -9,33 +9,57 @@ INDEXER_DB_NAME=query_node_indexer
 DB_NAME=query_node_processor
 DB_USER=postgres
 DB_PASS=postgres
+# This value will not be used by query-node docker containers.
+# When running query-node with docker these services will always use the db service
 DB_HOST=localhost
 DB_PORT=5432
 DEBUG=index-builder:*
 TYPEORM_LOGGING=error
 
-DEBUG=index-builder:*
-TYPEORM_LOGGING=error
-
-###########################
-#    Indexer options      #
-###########################
-
+## Indexer options
 # Block height to start indexing from.
 # Note, that if there are already some indexed events, this setting is ignored
 BLOCK_HEIGHT=0
 
-###############################
-#    Processor GraphQL API    #
-###############################
+# Query node GraphQL server port
+GRAPHQL_SERVER_PORT=8081
 
-GRAPHQL_SERVER_PORT=4002
+# Query node playground subscription endpoint
+GRAPHQL_PLAYGROUND_SUBSCRIPTION_ENDPOINT=ws://localhost:8081/graphql
+
+# Hydra indexer gateway GraphQL server port
+HYDRA_INDEXER_GATEWAY_PORT=4000
+
+# Default GraphQL server host. It is required during "query-node config:dev"
 GRAPHQL_SERVER_HOST=localhost
-WARTHOG_APP_PORT=4002
-WARTHOG_APP_HOST=localhost
 
-# Default configuration is to use the docker container
-WS_PROVIDER_ENDPOINT_URI=ws://joystream-node:9944/
+# Websocket RPC endpoint containers will use.
+JOYSTREAM_NODE_WS=ws://joystream-node:9944/
+
+# Query node which colossus will use
+COLOSSUS_QUERY_NODE_URL=http://graphql-server:${GRAPHQL_SERVER_PORT}/graphql
+
+# Query node which distributor will use
+DISTRIBUTOR_QUERY_NODE_URL=http://graphql-server:${GRAPHQL_SERVER_PORT}/graphql
+
+# Indexer gateway used by processor. If you don't use the local indexer set this to a remote gateway
+PROCESSOR_INDEXER_GATEWAY=http://hydra-indexer-gateway:${HYDRA_INDEXER_GATEWAY_PORT}/graphql
+
+# Colossus services identities
+COLOSSUS_1_WORKER_ID=0
+COLOSSUS_1_WORKER_URI=//testing//worker//Storage//${COLOSSUS_1_WORKER_ID}
+COLOSSUS_1_TRANSACTOR_URI=//Colossus1
+
+COLOSSUS_2_WORKER_ID=1
+COLOSSUS_2_WORKER_URI=//testing//worker//Storage//${COLOSSUS_2_WORKER_ID}
+COLOSSUS_2_TRANSACTOR_URI=//Colossus2
+
+# Distributor node services identities
+DISTRIBUTOR_1_WORKER_ID=0
+DISTRIBUTOR_1_ACCOUNT_URI=//testing//worker//Distribution//${DISTRIBUTOR_1_WORKER_ID}
+
+DISTRIBUTOR_2_WORKER_ID=1
+DISTRIBUTOR_2_ACCOUNT_URI=//testing//worker//Distribution//${DISTRIBUTOR_2_WORKER_ID}
 
-# If running joystream-node on host machine you can use following address to reach it instead
-# WS_PROVIDER_ENDPOINT_URI=ws://host.docker.internal:9944/
+# joystream/node docker image tag
+JOYSTREAM_NODE_TAG=latest

+ 60 - 53
.github/workflows/create-ami.yml

@@ -1,63 +1,70 @@
-name: Build code and create AMI
+# Creates an AWS AMI (system image) with compiled joystream-node and subkey
+# 
+name: Create AWS AMI
 
 on:
-  push:
-    branches:
-      - master
-      - olympia
-      - test_branch
+  workflow_dispatch:
 
 jobs:
   build:
     name: Build the code and run setup
     runs-on: ubuntu-latest
     env:
-      STACK_NAME: joystream-github-action-${{ github.run_number }}
+      STACK_NAME: create-joystream-node-ami-ga-${{ github.run_number }}
       KEY_NAME: joystream-github-action-key
     steps:
-    - name: Extract branch name
-      shell: bash
-      run: echo "##[set-output name=branch;]$(echo ${GITHUB_REF#refs/heads/})"
-      id: extract_branch
-
-    - name: Set AMI Name environment variable
-      shell: bash
-      run: echo "ami_name=joystream-${{ steps.extract_branch.outputs.branch }}-${{ github.run_number }}" >> $GITHUB_ENV
-      id: ami_name
-
-    - name: Checkout
-      uses: actions/checkout@v2
-
-    - name: Configure AWS credentials
-      uses: aws-actions/configure-aws-credentials@v1
-      with:
-        aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
-        aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
-        aws-region: us-east-1
-
-    - name: Deploy to AWS CloudFormation
-      uses: aws-actions/aws-cloudformation-github-deploy@v1
-      id: deploy_stack
-      with:
-        name: ${{ env.STACK_NAME }}
-        template: devops/infrastructure/single-instance.yml
-        no-fail-on-empty-changeset: "1"
-        parameter-overrides: "KeyName=${{ env.KEY_NAME }}"
-
-    - name: Install Ansible dependencies
-      run: pipx inject ansible-base boto3 botocore
-
-    - name: Run playbook
-      uses: dawidd6/action-ansible-playbook@v2
-      with:
-        playbook: github-action-playbook.yml
-        directory: devops/infrastructure
-        requirements: requirements.yml
-        key: ${{ secrets.SSH_PRIVATE_KEY }}
-        inventory: |
-          [all]
-          ${{ steps.deploy_stack.outputs.PublicIp }}
-        options: |
-          --extra-vars "git_repo=https://github.com/${{ github.repository }} \
-                        branch_name=${{ steps.extract_branch.outputs.branch }} instance_id=${{ steps.deploy_stack.outputs.InstanceId }}
-                        stack_name=${{ env.STACK_NAME }} ami_name=${{ env.ami_name }}"
+      - name: Extract branch name
+        shell: bash
+        run: echo "##[set-output name=branch;]$(echo ${GITHUB_REF#refs/heads/})"
+        id: extract_branch
+
+      - name: Set AMI Name environment variable
+        shell: bash
+        run: echo "ami_name=joystream-node-${{ steps.extract_branch.outputs.branch }}-${{ github.run_number }}" >> $GITHUB_ENV
+        id: ami_name
+
+      - name: Checkout
+        uses: actions/checkout@v2
+
+      - name: Configure AWS credentials
+        uses: aws-actions/configure-aws-credentials@v1
+        with:
+          aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
+          aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+          aws-region: us-east-1
+
+      - name: Deploy to AWS CloudFormation
+        uses: aws-actions/aws-cloudformation-github-deploy@v1
+        id: deploy_stack
+        with:
+          name: ${{ env.STACK_NAME }}
+          template: devops/aws/cloudformation/single-instance.yml
+          no-fail-on-empty-changeset: '1'
+          parameter-overrides: 'KeyName=${{ env.KEY_NAME }}'
+
+      - name: Install Ansible dependencies
+        run: pipx inject ansible-core boto3 botocore
+
+      - name: Run playbook
+        uses: dawidd6/action-ansible-playbook@v2
+        with:
+          playbook: create-joystream-node-ami-playbook.yml
+          directory: devops/aws
+          requirements: requirements.yml
+          key: ${{ secrets.SSH_PRIVATE_KEY }}
+          inventory: |
+            [all]
+            ${{ steps.deploy_stack.outputs.PublicIp }}
+          options: |
+            --extra-vars "git_repo=https://github.com/${{ github.repository }} \
+                          branch_name=${{ steps.extract_branch.outputs.branch }} instance_id=${{ steps.deploy_stack.outputs.InstanceId }}
+                          ami_name=${{ env.ami_name }}"
+
+      - name: Delete CloudFormation Stack
+        if: always()
+        continue-on-error: true
+        run: |
+          echo "Deleting ${{ env.STACK_NAME }} stack"
+          aws cloudformation delete-stack --stack-name ${{ env.STACK_NAME }}
+          echo "Waiting for ${{ env.STACK_NAME }} to be deleted..."
+          aws cloudformation wait stack-delete-complete --stack-name ${{ env.STACK_NAME }}

+ 152 - 0
.github/workflows/create-release.yml

@@ -0,0 +1,152 @@
+name: Create release with node binaries
+
+on:
+  workflow_dispatch:
+    inputs:
+      name:
+        description: 'Release name (v9.3.0 - Antioch)'
+        required: true
+      tag:
+        description: 'Tag (v9.3.0)'
+        required: true
+
+env:
+  REPOSITORY: joystream/node
+
+jobs:
+  build-mac-binary:
+    runs-on: macos-latest
+    steps:
+      - name: Checkout
+        uses: actions/checkout@v2
+
+      - id: compute_shasum
+        name: Compute runtime code shasum
+        run: |
+          export RUNTIME_CODE_SHASUM=`scripts/runtime-code-shasum.sh`
+          echo "::set-output name=shasum::${RUNTIME_CODE_SHASUM}"
+
+      - name: Run Setup
+        run: |
+          ./setup.sh
+
+      - name: Build binaries
+        run: |
+          yarn cargo-build
+
+      - name: Tar the binary
+        run: |
+          tar czvf joystream-node-macos.tar.gz -C ./target/release joystream-node
+
+      - name: Temporarily save node binary
+        uses: actions/upload-artifact@v2
+        with:
+          name: joystream-node-macos-${{ steps.compute_shasum.outputs.shasum }}
+          path: joystream-node-macos.tar.gz
+          retention-days: 1
+
+  build-rpi-binary:
+    runs-on: ubuntu-latest
+    steps:
+      - name: Checkout
+        uses: actions/checkout@v2
+
+      - id: compute_shasum
+        name: Compute runtime code shasum
+        run: |
+          export RUNTIME_CODE_SHASUM=`scripts/runtime-code-shasum.sh`
+          echo "::set-output name=shasum::${RUNTIME_CODE_SHASUM}"
+
+      - name: Run Setup
+        run: |
+          ./setup.sh
+
+      - name: Build binaries
+        run: |
+          export WORKSPACE_ROOT=`cargo metadata --offline --no-deps --format-version 1 | jq .workspace_root -r`
+          sudo chmod a+w $WORKSPACE_ROOT
+          ./scripts/raspberry-cross-build.sh
+
+      - name: Tar the binary
+        run: |
+          tar czvf joystream-node-rpi.tar.gz -C ./target/arm-unknown-linux-gnueabihf/release joystream-node
+
+      - name: Temporarily save node binary
+        uses: actions/upload-artifact@v2
+        with:
+          name: joystream-node-rpi-${{ steps.compute_shasum.outputs.shasum }}
+          path: joystream-node-rpi.tar.gz
+          retention-days: 1
+
+  create-release:
+    runs-on: ubuntu-latest
+    needs: [build-mac-binary, build-rpi-binary]
+    steps:
+      - name: Checkout
+        uses: actions/checkout@v2
+
+      - id: compute_shasum
+        name: Compute runtime code shasum
+        run: |
+          export RUNTIME_CODE_SHASUM=`scripts/runtime-code-shasum.sh`
+          echo "::set-output name=shasum::${RUNTIME_CODE_SHASUM}"
+
+      - id: extract_binaries
+        name: Copy binaries & wasm file from docker images
+        run: |
+          IMAGE=${{ env.REPOSITORY }}:${{ steps.compute_shasum.outputs.shasum }}
+
+          docker run -d --entrypoint tail --name temp-container-joystream-node $IMAGE-amd64 -f /dev/null
+
+          RESULT=$(docker exec temp-container-joystream-node b2sum -l 256 runtime.compact.wasm | awk '{print $1}')
+          VERSION_AND_COMMIT=$(docker exec temp-container-joystream-node /joystream/node --version | awk '{print $2}' | cut -d- -f -2)
+          echo "::set-output name=blob_hash::${RESULT}"
+          echo "::set-output name=version_and_commit::${VERSION_AND_COMMIT}"
+
+          docker cp temp-container-joystream-node:/joystream/runtime.compact.wasm ./joystream_runtime_${{ github.event.inputs.tag }}.wasm
+          docker cp temp-container-joystream-node:/joystream/node ./joystream-node
+          tar -czvf joystream-node-$VERSION_AND_COMMIT-x86_64-linux-gnu.tar.gz joystream-node
+
+          docker rm --force temp-container-joystream-node
+
+          docker cp $(docker create --rm $IMAGE-arm64):/joystream/node ./joystream-node
+          tar -czvf joystream-node-$VERSION_AND_COMMIT-arm64-linux-gnu.tar.gz joystream-node
+
+          docker cp $(docker create --rm $IMAGE-arm):/joystream/node ./joystream-node
+          tar -czvf joystream-node-$VERSION_AND_COMMIT-armv7-linux-gnu.tar.gz joystream-node
+
+      - name: Retrieve saved MacOS binary
+        uses: actions/download-artifact@v2
+        with:
+          name: joystream-node-macos-${{ steps.compute_shasum.outputs.shasum }}
+
+      - name: Retrieve saved RPi binary
+        uses: actions/download-artifact@v2
+        with:
+          name: joystream-node-rpi-${{ steps.compute_shasum.outputs.shasum }}
+
+      - name: Rename MacOS and RPi tar
+        run: |
+          mv joystream-node-macos.tar.gz joystream-node-${{ steps.extract_binaries.outputs.version_and_commit }}-x86_64-macos.tar.gz
+          mv joystream-node-rpi.tar.gz joystream-node-${{ steps.extract_binaries.outputs.version_and_commit }}-rpi.tar.gz
+
+      - name: Release
+        uses: softprops/action-gh-release@v1
+        with:
+          files: |
+            *.tar.gz
+            *.wasm
+          tag_name: ${{ github.event.inputs.tag }}
+          name: ${{ github.event.inputs.name }}
+          draft: true
+          body: 'Verify wasm hash:
+            ```
+            $ b2sum -l 256 joystream_runtime_${{ github.event.inputs.tag }}.wasm
+            ```
+
+            This should be the output
+
+            ```
+            ${{ steps.extract_binaries.outputs.blob_hash }}
+            ```
+            '

+ 68 - 0
.github/workflows/deploy-playground.yml

@@ -0,0 +1,68 @@
+name: Deploy Playground
+
+on:
+  workflow_dispatch:
+    inputs:
+      gitRepo:
+        description: 'Code repository'
+        required: false
+        default: 'https://github.com/Joystream/joystream.git'
+      branchName:
+        description: 'Branch to deploy'
+        required: false
+        default: 'master'
+      keyName:
+        description: 'SSH key pair on AWS'
+        required: false
+        default: 'joystream-github-action-key'
+      instanceType:
+        description: 'AWS EC2 instance type (t2.micro, t2.large)'
+        required: false
+        default: 't2.micro'
+
+defaults:
+  run:
+    working-directory: devops/aws
+
+jobs:
+  deploy-playground:
+    name: Create an EC2 instance and configure docker-compose stack
+    runs-on: ubuntu-latest
+    env:
+      STACK_NAME: joystream-playground-${{ github.event.inputs.branchName }}-${{ github.run_number }}
+    steps:
+      - name: Checkout
+        uses: actions/checkout@v2
+
+      - name: Install Ansible dependencies
+        run: pipx inject ansible-core boto3 botocore
+
+      - name: Configure AWS credentials
+        uses: aws-actions/configure-aws-credentials@v1
+        with:
+          aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
+          aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+          aws-region: us-east-1
+
+      - name: Deploy to AWS CloudFormation
+        uses: aws-actions/aws-cloudformation-github-deploy@v1
+        id: deploy_stack
+        with:
+          name: ${{ env.STACK_NAME }}
+          template: devops/aws/cloudformation/single-instance-docker.yml
+          no-fail-on-empty-changeset: '1'
+          parameter-overrides: 'KeyName=${{ github.event.inputs.keyName }},EC2InstanceType=${{ github.event.inputs.instanceType }}'
+
+      - name: Run playbook
+        uses: dawidd6/action-ansible-playbook@v2
+        with:
+          playbook: deploy-playground-playbook.yml
+          directory: devops/aws
+          requirements: requirements.yml
+          key: ${{ secrets.SSH_PRIVATE_KEY }}
+          inventory: |
+            [all]
+            ${{ steps.deploy_stack.outputs.PublicIp }}
+          options: |
+            --extra-vars "git_repo=${{ github.event.inputs.gitRepo }} \
+                          branch_name=${{ github.event.inputs.branchName }}"

+ 2 - 2
.github/workflows/integration-tests.yml

@@ -19,8 +19,8 @@ jobs:
         yarn install --frozen-lockfile
         yarn workspace @joystream/types build
         yarn workspace @joystream/metadata-protobuf build
+        yarn workspace @joystream/cli build
         yarn workspace integration-tests checks --quiet
-        yarn workspace query-node-root lint
 
   network_build_osx:
     name: MacOS Checks
@@ -39,5 +39,5 @@ jobs:
         yarn install --frozen-lockfile --network-timeout 120000
         yarn workspace @joystream/types build
         yarn workspace @joystream/metadata-protobuf build
+        yarn workspace @joystream/cli build
         yarn workspace integration-tests checks --quiet
-        yarn workspace query-node-root lint

+ 32 - 32
.github/workflows/joystream-cli.yml

@@ -9,22 +9,22 @@ jobs:
       matrix:
         node-version: [14.x]
     steps:
-    - uses: actions/checkout@v1
-    - name: Use Node.js ${{ matrix.node-version }}
-      uses: actions/setup-node@v1
-      with:
-        node-version: ${{ matrix.node-version }}
-    - name: checks
-      run: |
-        yarn install --frozen-lockfile
-        yarn workspace @joystream/types build
-        yarn workspace @joystream/metadata-protobuf build
-        yarn workspace @joystream/cli checks --quiet
-    - name: yarn pack test
-      run: |
-        yarn workspace @joystream/cli pack --filename cli-pack-test.tgz
-        tar zxvf ./cli/cli-pack-test.tgz -C cli
-        cd ./cli/package && yarn link
+      - uses: actions/checkout@v1
+      - name: Use Node.js ${{ matrix.node-version }}
+        uses: actions/setup-node@v1
+        with:
+          node-version: ${{ matrix.node-version }}
+      - name: checks
+        run: |
+          yarn install --frozen-lockfile
+          yarn workspace @joystream/types build
+          yarn workspace @joystream/metadata-protobuf build
+          yarn workspace @joystream/cli checks --quiet
+      - name: yarn pack test
+        run: |
+          yarn workspace @joystream/cli pack --filename cli-pack-test.tgz
+          tar zxvf ./cli/cli-pack-test.tgz -C cli
+          cd ./cli/package && yarn link
 
   cli_build_osx:
     name: MacOS Checks
@@ -33,19 +33,19 @@ jobs:
       matrix:
         node-version: [14.x]
     steps:
-    - uses: actions/checkout@v1
-    - name: Use Node.js ${{ matrix.node-version }}
-      uses: actions/setup-node@v1
-      with:
-        node-version: ${{ matrix.node-version }}
-    - name: checks
-      run: |
-        yarn install --frozen-lockfile --network-timeout 120000
-        yarn workspace @joystream/types build
-        yarn workspace @joystream/metadata-protobuf build
-        yarn workspace @joystream/cli checks --quiet
-    - name: yarn pack test
-      run: |
-        yarn workspace @joystream/cli pack --filename cli-pack-test.tgz
-        tar zxvf ./cli/cli-pack-test.tgz -C cli
-        cd ./cli/package && yarn link
+      - uses: actions/checkout@v1
+      - name: Use Node.js ${{ matrix.node-version }}
+        uses: actions/setup-node@v1
+        with:
+          node-version: ${{ matrix.node-version }}
+      - name: checks
+        run: |
+          yarn install --frozen-lockfile --network-timeout 120000
+          yarn workspace @joystream/types build
+          yarn workspace @joystream/metadata-protobuf build
+          yarn workspace @joystream/cli checks --quiet
+      - name: yarn pack test
+        run: |
+          yarn workspace @joystream/cli pack --filename cli-pack-test.tgz
+          tar zxvf ./cli/cli-pack-test.tgz -C cli
+          cd ./cli/package && yarn link

+ 163 - 47
.github/workflows/joystream-node-docker.yml

@@ -1,13 +1,22 @@
 name: joystream-node-docker
+
 on: push
 
+env:
+  REPOSITORY: joystream/node
+  KEY_NAME: joystream-github-action-key
+
 jobs:
-  build:
-    name: Build joystream/node Docker image
-    if: github.repository == 'Joystream/joystream'
+  push-amd64:
+    name: Build joystream/node Docker image for amd64
     runs-on: ubuntu-latest
+    outputs:
+      tag_shasum: ${{ steps.compute_shasum.outputs.shasum }}
+      image_exists: ${{ steps.compute_main_image_exists.outputs.image_exists }}
     steps:
-      - uses: actions/checkout@v1
+      - name: Checkout
+        uses: actions/checkout@v2
+
       - uses: actions/setup-node@v1
         with:
           node-version: '14.x'
@@ -18,62 +27,169 @@ jobs:
           export RUNTIME_CODE_SHASUM=`scripts/runtime-code-shasum.sh`
           echo "::set-output name=shasum::${RUNTIME_CODE_SHASUM}"
 
-      - name: Setup cache directory
-        run: mkdir ~/docker-images
-
-      - name: Cache docker images
-        uses: actions/cache@v2
-        env:
-          cache-name: joystream-node-docker
+      - name: Login to DockerHub
+        uses: docker/login-action@v1
         with:
-          path: ~/docker-images
-          key: ${{ env.cache-name }}-${{ steps.compute_shasum.outputs.shasum }}
+          username: ${{ secrets.DOCKERHUB_USERNAME }}
+          password: ${{ secrets.DOCKERHUB_PASSWORD }}
 
-      - name: Check if we have cached image
-        continue-on-error: true
+      - name: Check if we have already have the manifest on Dockerhub
+        id: compute_main_image_exists
+        # Will output 0 if image exists and 1 if does not exists
         run: |
-          if [ -f ~/docker-images/joystream-node-docker-image.tar.gz ]; then
-            docker load --input ~/docker-images/joystream-node-docker-image.tar.gz
-            cp ~/docker-images/joystream-node-docker-image.tar.gz .
-          fi
+          export IMAGE_EXISTS=$(docker manifest inspect ${{ env.REPOSITORY }}:${{ steps.compute_shasum.outputs.shasum }} > /dev/null ; echo $?)
+          echo "::set-output name=image_exists::${IMAGE_EXISTS}"
 
       - name: Check if we have pre-built image on Dockerhub
-        continue-on-error: true
+        id: compute_image_exists
+        # Will output 0 if image exists and 1 if does not exists
         run: |
-          if ! [ -f joystream-node-docker-image.tar.gz ]; then
-            docker pull joystream/node:${{ steps.compute_shasum.outputs.shasum }}
-            docker image tag joystream/node:${{ steps.compute_shasum.outputs.shasum }} joystream/node:latest
-            docker save --output joystream-node-docker-image.tar joystream/node:latest
-            gzip joystream-node-docker-image.tar
-            cp joystream-node-docker-image.tar.gz ~/docker-images/
-          fi
-
-      - name: Build new joystream/node image
+          export IMAGE_EXISTS=$(docker manifest inspect ${{ env.REPOSITORY }}:${{ steps.compute_shasum.outputs.shasum }}-amd64 > /dev/null ; echo $?)
+          echo "::set-output name=image_exists::${IMAGE_EXISTS}"
+
+      - name: Build and push
+        uses: docker/build-push-action@v2
+        with:
+          context: .
+          file: joystream-node.Dockerfile
+          platforms: linux/amd64
+          push: true
+          tags: ${{ env.REPOSITORY }}:${{ steps.compute_shasum.outputs.shasum }}-amd64
+        if: ${{ steps.compute_image_exists.outputs.image_exists == 1 }}
+
+  push-arm:
+    name: Build joystream/node Docker image for arm
+    runs-on: ubuntu-latest
+    strategy:
+      matrix:
+        platform: ['linux/arm64', 'linux/arm/v7']
+        include:
+          - platform: 'linux/arm64'
+            platform_tag: 'arm64'
+            file: 'joystream-node.Dockerfile'
+          - platform: 'linux/arm/v7'
+            platform_tag: 'arm'
+            file: 'joystream-node-armv7.Dockerfile'
+    env:
+      STACK_NAME: build-joystream-node-docker-ga-${{ github.run_number }}-${{ matrix.platform_tag }}
+    steps:
+      - name: Extract branch name
+        shell: bash
+        run: echo "##[set-output name=branch;]$(echo ${GITHUB_REF#refs/heads/})"
+        id: extract_branch
+
+      - name: Checkout
+        uses: actions/checkout@v2
+
+      - uses: actions/setup-node@v1
+        with:
+          node-version: '14.x'
+
+      - name: Install Ansible dependencies
+        run: pipx inject ansible-core boto3 botocore
+
+      - id: compute_shasum
+        name: Compute runtime code shasum
         run: |
-          if ! [ -f joystream-node-docker-image.tar.gz ]; then
-            docker build . --file joystream-node.Dockerfile --tag joystream/node
-            docker save --output joystream-node-docker-image.tar joystream/node
-            gzip joystream-node-docker-image.tar
-            cp joystream-node-docker-image.tar.gz ~/docker-images/
-            echo "NEW_BUILD=true" >> $GITHUB_ENV
-          fi
-
-      - name: Save joystream/node image to Artifacts
-        uses: actions/upload-artifact@v2
+          export RUNTIME_CODE_SHASUM=`scripts/runtime-code-shasum.sh`
+          echo "::set-output name=shasum::${RUNTIME_CODE_SHASUM}"
+
+      - name: Login to DockerHub
+        uses: docker/login-action@v1
+        with:
+          username: ${{ secrets.DOCKERHUB_USERNAME }}
+          password: ${{ secrets.DOCKERHUB_PASSWORD }}
+
+      - name: Check if we have pre-built image on Dockerhub
+        id: compute_image_exists
+        # Will output 0 if image exists and 1 if does not exists
+        run: |
+          export IMAGE_EXISTS=$(docker manifest inspect ${{ env.REPOSITORY }}:${{ steps.compute_shasum.outputs.shasum }}-${{ matrix.platform_tag }} > /dev/null ; echo $?)
+          echo "::set-output name=image_exists::${IMAGE_EXISTS}"
+
+      - name: Configure AWS credentials
+        uses: aws-actions/configure-aws-credentials@v1
         with:
-          name: ${{ steps.compute_shasum.outputs.shasum }}-joystream-node-docker-image.tar.gz
-          path: joystream-node-docker-image.tar.gz
+          aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
+          aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+          aws-region: us-east-1
+        if: ${{ steps.compute_image_exists.outputs.image_exists == 1 }}
 
+      - name: Deploy to AWS CloudFormation
+        uses: aws-actions/aws-cloudformation-github-deploy@v1
+        id: deploy_stack
+        with:
+          name: ${{ env.STACK_NAME }}
+          template: devops/aws/cloudformation/single-instance-docker.yml
+          no-fail-on-empty-changeset: '1'
+          parameter-overrides: 'KeyName=${{ env.KEY_NAME }},EC2AMI=ami-00d1ab6b335f217cf,EC2InstanceType=t4g.xlarge'
+        if: ${{ steps.compute_image_exists.outputs.image_exists == 1 }}
+
+      - name: Run playbook
+        uses: dawidd6/action-ansible-playbook@v2
+        with:
+          playbook: build-arm64-playbook.yml
+          directory: devops/aws
+          requirements: requirements.yml
+          key: ${{ secrets.SSH_PRIVATE_KEY }}
+          inventory: |
+            [all]
+            ${{ steps.deploy_stack.outputs.PublicIp }}
+          options: |
+            --extra-vars "git_repo=https://github.com/${{ github.repository }} \
+                          branch_name=${{ steps.extract_branch.outputs.branch }} \
+                          docker_username=${{ secrets.DOCKERHUB_USERNAME }} \
+                          docker_password=${{ secrets.DOCKERHUB_PASSWORD }} \
+                          tag_name=${{ steps.compute_shasum.outputs.shasum }}-${{ matrix.platform_tag }} \
+                          repository=${{ env.REPOSITORY }} dockerfile=${{ matrix.file }} \
+                          platform=${{ matrix.platform }}"
+        if: ${{ steps.compute_image_exists.outputs.image_exists == 1 }}
+
+      - name: Delete CloudFormation Stack
+        if: always()
+        continue-on-error: true
+        run: |
+          echo "Deleting ${{ env.STACK_NAME }} stack"
+          aws cloudformation delete-stack --stack-name ${{ env.STACK_NAME }}
+          echo "Waiting for ${{ env.STACK_NAME }} to be deleted..."
+          aws cloudformation wait stack-delete-complete --stack-name ${{ env.STACK_NAME }}
+
+  push-manifest:
+    name: Create manifest using both the arch images
+    needs: [push-amd64, push-arm]
+    # Only run this job if the image does not exist with tag equal to the shasum
+    if: needs.push-amd64.outputs.image_exists == 1
+    runs-on: ubuntu-latest
+    env:
+      TAG_SHASUM: ${{ needs.push-amd64.outputs.tag_shasum }}
+    steps:
       - name: Login to DockerHub
         uses: docker/login-action@v1
         with:
           username: ${{ secrets.DOCKERHUB_USERNAME }}
           password: ${{ secrets.DOCKERHUB_PASSWORD }}
-        if: env.NEW_BUILD
 
-      - name: Publish new image to DockerHub
+      - name: Create manifest for multi-arch images
+        run: |
+          # get artifacts from previous steps
+          IMAGE=${{ env.REPOSITORY }}:${{ env.TAG_SHASUM }}
+          echo $IMAGE
+          docker pull $IMAGE-amd64
+          docker pull $IMAGE-arm64
+          docker pull $IMAGE-arm
+          docker manifest create $IMAGE $IMAGE-amd64 $IMAGE-arm64 $IMAGE-arm
+          docker manifest annotate $IMAGE $IMAGE-amd64 --arch amd64
+          docker manifest annotate $IMAGE $IMAGE-arm64 --arch arm64
+          docker manifest annotate $IMAGE $IMAGE-arm --arch arm
+          docker manifest push $IMAGE
+
+      - name: Create manifest with latest tag for master
+        if: github.ref == 'refs/heads/master'
         run: |
-          docker image tag joystream/node joystream/node:${{ steps.compute_shasum.outputs.shasum }}
-          docker push joystream/node:${{ steps.compute_shasum.outputs.shasum }}
-        if: env.NEW_BUILD
-  
+          IMAGE=${{ env.REPOSITORY }}:${{ env.TAG_SHASUM }}
+          LATEST_TAG=${{ env.REPOSITORY }}:latest
+          docker manifest create $LATEST_TAG $IMAGE-amd64 $IMAGE-arm64 $IMAGE-arm
+          docker manifest annotate $LATEST_TAG $IMAGE-amd64 --arch amd64
+          docker manifest annotate $LATEST_TAG $IMAGE-arm64 --arch arm64
+          docker manifest annotate $LATEST_TAG $IMAGE-arm --arch arm
+          docker manifest push $LATEST_TAG

+ 43 - 0
.github/workflows/query-node.yml

@@ -0,0 +1,43 @@
+name: query-node
+on: [pull_request, push]
+
+jobs:
+  query_node_build_ubuntu:
+    name: Ubuntu Checks
+    runs-on: ubuntu-latest
+    strategy:
+      matrix:
+        node-version: [14.x]
+    steps:
+    - uses: actions/checkout@v1
+    - name: Use Node.js ${{ matrix.node-version }}
+      uses: actions/setup-node@v1
+      with:
+        node-version: ${{ matrix.node-version }}
+    - name: checks
+      run: |
+        yarn install --frozen-lockfile
+        yarn workspace @joystream/types build
+        yarn workspace @joystream/metadata-protobuf build
+        yarn workspace query-node-root build
+        yarn workspace query-node-mappings checks --quiet
+
+  query_node_build_osx:
+    name: MacOS Checks
+    runs-on: macos-latest
+    strategy:
+      matrix:
+        node-version: [14.x]
+    steps:
+    - uses: actions/checkout@v1
+    - name: Use Node.js ${{ matrix.node-version }}
+      uses: actions/setup-node@v1
+      with:
+        node-version: ${{ matrix.node-version }}
+    - name: checks
+      run: |
+        yarn install --frozen-lockfile --network-timeout 120000
+        yarn workspace @joystream/types build
+        yarn workspace @joystream/metadata-protobuf build
+        yarn workspace query-node-root build
+        yarn workspace query-node-mappings checks --quiet

+ 176 - 0
.github/workflows/run-network-tests.yml

@@ -0,0 +1,176 @@
+name: run-network-tests
+on:
+  pull_request:
+    types: [opened, synchronize]
+
+  workflow_dispatch:
+    # TODO: add an input so dispatcher can specify a list of tests to run,
+    # composed of the job ids separated by `:`
+    # for eg.
+    #   'network_tests_1:network_tests_3'
+    #   'network_tests_2'
+    # inputs:
+    #   test_to_run:
+    #     description: 'Tests to run'
+    #     required: false
+    #     default: 'all'
+
+jobs:
+  build_images:
+    name: Build joystream/node
+    runs-on: ubuntu-latest
+    outputs:
+      use_artifact: ${{ steps.compute_shasum.outputs.shasum }}-joystream-node-docker-image.tar.gz
+    steps:
+      - uses: actions/checkout@v1
+      - uses: actions/setup-node@v1
+        with:
+          node-version: '14.x'
+
+      - id: compute_shasum
+        name: Compute runtime code shasum
+        run: |
+          export RUNTIME_CODE_SHASUM=`scripts/runtime-code-shasum.sh`
+          echo "::set-output name=shasum::${RUNTIME_CODE_SHASUM}"
+
+      - name: Setup cache directory
+        run: mkdir ~/docker-images
+
+      - name: Cache docker images
+        uses: actions/cache@v2
+        env:
+          cache-name: joystream-node-docker
+        with:
+          path: ~/docker-images
+          key: ${{ env.cache-name }}-${{ steps.compute_shasum.outputs.shasum }}
+
+      - name: Check if we have cached image
+        continue-on-error: true
+        run: |
+          if [ -f ~/docker-images/joystream-node-docker-image.tar.gz ]; then
+            docker load --input ~/docker-images/joystream-node-docker-image.tar.gz
+            cp ~/docker-images/joystream-node-docker-image.tar.gz .
+          fi
+
+      - name: Check if we have pre-built image on Dockerhub
+        continue-on-error: true
+        run: |
+          if ! [ -f joystream-node-docker-image.tar.gz ]; then
+            docker pull joystream/node:${{ steps.compute_shasum.outputs.shasum }}
+            docker image tag joystream/node:${{ steps.compute_shasum.outputs.shasum }} joystream/node:latest
+            docker save --output joystream-node-docker-image.tar joystream/node:latest
+            gzip joystream-node-docker-image.tar
+            cp joystream-node-docker-image.tar.gz ~/docker-images/
+          fi
+
+      - name: Build new joystream/node image
+        run: |
+          if ! [ -f joystream-node-docker-image.tar.gz ]; then
+            docker build . --file joystream-node.Dockerfile --tag joystream/node
+            docker save --output joystream-node-docker-image.tar joystream/node
+            gzip joystream-node-docker-image.tar
+            cp joystream-node-docker-image.tar.gz ~/docker-images/
+          fi
+
+      - name: Save joystream/node image to Artifacts
+        uses: actions/upload-artifact@v2
+        with:
+          name: ${{ steps.compute_shasum.outputs.shasum }}-joystream-node-docker-image.tar.gz
+          path: joystream-node-docker-image.tar.gz
+
+  basic_runtime_with_upgrade:
+    # if: ${{ false }}
+    name: Integration Tests (Runtime Upgrade)
+    needs: build_images
+    runs-on: ubuntu-latest
+    steps:
+      - uses: actions/checkout@v1
+      - uses: actions/setup-node@v1
+        with:
+          node-version: '14.x'
+      - name: Get artifacts
+        uses: actions/download-artifact@v2
+        with:
+          name: ${{ needs.build_images.outputs.use_artifact }}
+      - name: Install artifacts
+        run: |
+          docker load --input joystream-node-docker-image.tar.gz
+          docker images
+      - name: Install packages and dependencies
+        run: |
+          yarn install --frozen-lockfile
+          yarn workspace @joystream/types build
+          yarn workspace @joystream/metadata-protobuf build
+          yarn workspace @joystream/cli build
+      - name: Ensure tests are runnable
+        run: yarn workspace network-tests build
+      - name: Install joystream-cli @joystream/cli/0.5.1
+        run: npm -g install @joystream/cli
+      - name: Execute network tests
+        run: |
+          export HOME=${PWD}
+          mkdir -p ${HOME}/.local/share/joystream-cli
+          joystream-cli api:setUri ws://localhost:9944
+          export RUNTIME=sumer
+          tests/network-tests/run-migration-tests.sh
+
+  basic_runtime:
+    name: Integration Tests (New Chain)
+    needs: build_images
+    runs-on: ubuntu-latest
+    steps:
+      - uses: actions/checkout@v1
+      - uses: actions/setup-node@v1
+        with:
+          node-version: '14.x'
+      - name: Get artifacts
+        uses: actions/download-artifact@v2
+        with:
+          name: ${{ needs.build_images.outputs.use_artifact }}
+      - name: Install artifacts
+        run: |
+          docker load --input joystream-node-docker-image.tar.gz
+          docker images
+      - name: Install packages and dependencies
+        run: |
+          yarn install --frozen-lockfile
+          yarn workspace @joystream/types build
+          yarn workspace @joystream/metadata-protobuf build
+          yarn workspace @joystream/cli build
+      - name: Ensure tests are runnable
+        run: yarn workspace network-tests build
+      - name: Execute network tests
+        run: tests/network-tests/run-full-tests.sh
+
+  new_chain_setup:
+    name: Initialize new chain
+    needs: build_images
+    runs-on: ubuntu-latest
+    steps:
+      - uses: actions/checkout@v1
+      - uses: actions/setup-node@v1
+        with:
+          node-version: '14.x'
+      - name: Get artifacts
+        uses: actions/download-artifact@v2
+        with:
+          name: ${{ needs.build_images.outputs.use_artifact }}
+      - name: Install artifacts
+        run: |
+          docker load --input joystream-node-docker-image.tar.gz
+          docker images
+      - name: Install packages and dependencies
+        run: |
+          yarn install --frozen-lockfile
+          yarn workspace @joystream/types build
+          yarn workspace @joystream/metadata-protobuf build
+          yarn workspace @joystream/cli build
+      - name: Ensure query-node builds
+        run: yarn workspace query-node-root build
+      - name: Ensure tests are runnable
+        run: yarn workspace network-tests build
+      # Bring up hydra query-node development instance, then run content directory
+      # integration tests
+      - name: Execute Tests
+        run: tests/network-tests/test-setup-new-chain.sh
+

+ 5 - 3
.github/workflows/storage-node.yml

@@ -18,7 +18,8 @@ jobs:
       run: |
         yarn install --frozen-lockfile
         yarn workspace @joystream/types build
-        yarn workspace storage-node checks --quiet
+        yarn workspace @joystream/metadata-protobuf build
+        yarn workspace storage-node lint --quiet
         yarn workspace storage-node build
 
   storage_node_build_osx:
@@ -37,5 +38,6 @@ jobs:
       run: |
         yarn install --frozen-lockfile --network-timeout 120000
         yarn workspace @joystream/types build
-        yarn workspace storage-node checks --quiet
-        yarn workspace storage-node build
+        yarn workspace @joystream/metadata-protobuf build
+        yarn workspace storage-node lint --quiet
+        yarn workspace storage-node build

+ 4 - 0
.gitignore

@@ -39,3 +39,7 @@ yarn*
 # test data for local node tests
 test-data/
 tmp.*
+
+.my_setup
+
+devops/infrastructure

+ 9 - 2
Cargo.lock

@@ -3806,13 +3806,18 @@ dependencies = [
 
 [[package]]
 name = "pallet-content"
-version = "5.0.0"
+version = "3.2.0"
 dependencies = [
  "frame-support",
  "frame-system",
  "pallet-balances",
  "pallet-common",
+ "pallet-membership",
+ "pallet-randomness-collective-flip",
+ "pallet-staking-handler",
+ "pallet-storage",
  "pallet-timestamp",
+ "pallet-working-group",
  "parity-scale-codec",
  "serde",
  "sp-arithmetic",
@@ -4194,13 +4199,15 @@ dependencies = [
 
 [[package]]
 name = "pallet-storage"
-version = "5.0.0"
+version = "4.0.1"
 dependencies = [
+ "frame-benchmarking",
  "frame-support",
  "frame-system",
  "pallet-balances",
  "pallet-common",
  "pallet-membership",
+ "pallet-randomness-collective-flip",
  "pallet-staking-handler",
  "pallet-timestamp",
  "pallet-working-group",

+ 5 - 2
README.md

@@ -14,7 +14,7 @@ The following tools are required for building, testing and contributing to this
 - [Rust](https://www.rust-lang.org/tools/install) toolchain - _required_
 - [nodejs](https://nodejs.org/) v14.x - _required_
 - [yarn classic](https://classic.yarnpkg.com/en/docs/install) package manager v1.22.x- _required_
-- [docker](https://www.docker.com/get-started) and docker-compose - _optional_
+- [docker](https://www.docker.com/get-started) and docker-compose - _required_
 - [ansible](https://www.ansible.com/) - _optional_
 
 If you use VSCode as your code editor we recommend using the workspace [settings](devops/vscode/settings.json) for recommend eslint plugin to function properly.
@@ -25,9 +25,12 @@ After cloning the repo run the following initialization scripts:
 # Install rust toolchain
 ./setup.sh
 
-# Install npm package dependencies, build npm packages
+# build local npm packages
 yarn build:packages
 
+# Build joystream/node docker image
+yarn build:node:docker
+
 # start a local development network
 yarn start
 ```

+ 2 - 3
apps.Dockerfile

@@ -1,10 +1,9 @@
-FROM node:14 as builder
+FROM --platform=linux/x86-64 node:14 as builder
 
 WORKDIR /joystream
 COPY . /joystream
+
 RUN rm -fr /joystream/pioneer
-# Replaced by "integration-tests" on Olympia
-RUN rm -fr /joystream/tests/network-tests
 
 # Do not set NODE_ENV=production until after running yarn install
 # to ensure dev dependencies are installed.

+ 0 - 35
build-docker-images.sh

@@ -1,35 +0,0 @@
-#!/usr/bin/env bash
-
-set -e
-
-if ! command -v docker-compose &> /dev/null
-then
-  echo "docker-compose not found. Skipping docker image builds."
-  exit 0
-fi
-
-# Build or fetch cached joystream/node docker image
-if [[ "$SKIP_JOYSTREAM_NODE" = 1 || "$SKIP_JOYSTREAM_NODE" = "true" ]]; then
-  echo "Skipping build of joystream/node docker image."
-else
-  # Fetch a cached joystream/node image if one is found matching code shasum instead of building
-  CODE_HASH=`scripts/runtime-code-shasum.sh`
-  IMAGE=joystream/node:${CODE_HASH}
-  echo "Trying to fetch cached ${IMAGE} image"
-  docker pull ${IMAGE} || :
-
-  if ! docker inspect ${IMAGE} > /dev/null;
-  then
-    echo "Fetch failed, building image locally"
-    docker-compose build joystream-node
-  else
-    echo "Tagging cached image as 'latest'"
-    docker image tag ${IMAGE} joystream/node:latest
-  fi
-fi
-
-docker-compose up -d joystream-node
-
-# Build joystream/apps docker image
-echo "Building 'joystream/apps' docker image..."
-docker-compose build colossus

+ 24 - 0
build-node-docker.sh

@@ -0,0 +1,24 @@
+#!/usr/bin/env bash
+
+set -e
+
+if ! command -v docker-compose &> /dev/null
+then
+  echo "docker-compose not found. Skipping docker image builds."
+  exit 0
+fi
+
+# Fetch a cached joystream/node image if one is found matching code shasum instead of building
+CODE_HASH=`scripts/runtime-code-shasum.sh`
+IMAGE=joystream/node:${CODE_HASH}
+echo "Trying to fetch cached ${IMAGE} image"
+docker pull ${IMAGE} || :
+
+if ! docker inspect ${IMAGE} > /dev/null;
+then
+  echo "Fetch failed, building image locally"
+  docker-compose build joystream-node
+else
+  echo "Tagging cached image as 'latest'"
+  docker image tag ${IMAGE} joystream/node:latest
+fi

+ 2 - 1
build-npm-packages.sh → build-packages.sh

@@ -2,9 +2,10 @@
 
 set -e
 
-yarn
+yarn --frozen-lockfile
 yarn workspace @joystream/types build
 yarn workspace @joystream/metadata-protobuf build
 yarn workspace query-node-root build
 yarn workspace @joystream/cli build
 yarn workspace storage-node build
+yarn workspace @joystream/distributor-cli build

Файлын зөрүү хэтэрхий том тул дарагдсан байна
+ 0 - 0
chain-metadata.json


+ 1 - 1
cli/examples/content/CreateVideo.json

@@ -7,7 +7,7 @@
   "hasMarketing": false,
   "isPublic": true,
   "isExplicit": false,
-  "personsList": [],
+  "persons": [],
   "category": 1,
   "license": {
     "code": 1001,

+ 13 - 9
cli/package.json

@@ -12,14 +12,15 @@
     "cross-fetch": "^3.0.6",
     "@apidevtools/json-schema-ref-parser": "^9.0.6",
     "@ffprobe-installer/ffprobe": "^1.1.0",
-    "@joystream/types": "^0.17.0",
+    "@joystream/metadata-protobuf": "^2.0.0",
+    "@joystream/types": "^0.18.0",
     "@oclif/command": "^1.5.19",
     "@oclif/config": "^1.14.0",
     "@oclif/plugin-autocomplete": "^0.2.0",
     "@oclif/plugin-help": "^3.2.2",
     "@oclif/plugin-not-found": "^1.2.4",
     "@oclif/plugin-warn-if-update-available": "^1.7.0",
-    "@polkadot/api": "5.3.2",
+    "@polkadot/api": "5.9.1",
     "@types/cli-progress": "^3.9.1",
     "@types/fluent-ffmpeg": "^2.1.16",
     "@types/inquirer": "^6.5.0",
@@ -44,12 +45,15 @@
     "moment": "^2.24.0",
     "proper-lockfile": "^4.1.1",
     "slug": "^2.1.1",
-    "tslib": "^1.11.1"
+    "tslib": "^1.11.1",
+    "blake3-wasm": "^2.1.5",
+    "multihashes": "^4.0.3",
+    "form-data": "^4.0.0"
   },
   "devDependencies": {
     "@oclif/dev-cli": "^1.22.2",
     "@oclif/test": "^1.2.5",
-    "@polkadot/ts": "^0.4.4",
+    "@polkadot/ts": "^0.4.8",
     "@types/chai": "^4.2.11",
     "@types/mocha": "^5.2.7",
     "@types/node": "^10.17.18",
@@ -60,9 +64,9 @@
     "globby": "^10.0.2",
     "mocha": "^5.2.0",
     "nyc": "^14.1.1",
-    "ts-node": "^8.8.2",
-    "typescript": "^3.8.3",
-    "json-schema-to-typescript": "^9.1.1",
+    "ts-node": "^10.2.1",
+    "typescript": "^4.4.3",
+    "json-schema-to-typescript": "^10.1.4",
     "@graphql-codegen/cli": "^1.21.4",
     "@graphql-codegen/typescript": "^1.22.0",
     "@graphql-codegen/import-types-preset": "^1.18.1",
@@ -132,13 +136,13 @@
     "lint": "eslint ./src --ext .ts",
     "checks": "tsc --noEmit --pretty && prettier ./ --check && yarn lint",
     "format": "prettier ./ --write",
-    "generate:schema-typings": "rm -rf ./src/json-schemas/typings && json2ts -i ./src/json-schemas/ -o ./src/json-schemas/typings/ && yarn format",
+    "generate:schema-typings": "rm -rf ./src/schemas/typings && json2ts -i ./src/schemas/json/ -o ./src/schemas/typings/ && yarn format",
     "generate:graphql-typings": "graphql-codegen",
     "generate:all": "yarn generate:schema-typings && yarn generate:graphql-typings"
   },
   "types": "lib/index.d.ts",
   "volta": {
     "node": "14.16.1",
-    "yarn": "1.22.4"
+    "yarn": "1.22.15"
   }
 }

+ 8 - 0
cli/content-test.sh → cli/scripts/content-test.sh

@@ -52,3 +52,11 @@ yarn joystream-cli content:channels
 yarn joystream-cli content:channel 1
 yarn joystream-cli content:curatorGroups
 yarn joystream-cli content:curatorGroup 1
+# Remove videos/channels/assets
+yarn joystream-cli content:removeChannelAssets -c 1 -o 0
+yarn joystream-cli content:deleteVideo -v 1 -f
+yarn joystream-cli content:deleteVideo -v 2 -f
+yarn joystream-cli content:deleteVideo -v 3 -f
+yarn joystream-cli content:deleteChannel -c 1 -f
+yarn joystream-cli content:deleteChannel -c 2 -f
+yarn joystream-cli content:deleteChannel -c 3 -f

+ 40 - 130
cli/src/Api.ts

@@ -1,12 +1,12 @@
 import BN from 'bn.js'
-import { types } from '@joystream/types/'
+import { createType, types } from '@joystream/types/'
 import { ApiPromise, WsProvider } from '@polkadot/api'
 import { SubmittableExtrinsic, AugmentedQuery } from '@polkadot/api/types'
 import { formatBalance } from '@polkadot/util'
 import { Balance } from '@polkadot/types/interfaces'
 import { KeyringPair } from '@polkadot/keyring/types'
 import { Codec, Observable } from '@polkadot/types/types'
-import { UInt, Bytes } from '@polkadot/types'
+import { UInt } from '@polkadot/types'
 import {
   AccountSummary,
   WorkingGroups,
@@ -25,7 +25,6 @@ import {
   OpeningId,
   Application,
   ApplicationId,
-  StorageProviderId,
   Opening,
 } from '@joystream/types/working-group'
 import { Membership, StakingAccountMemberBinding } from '@joystream/types/members'
@@ -37,20 +36,9 @@ import {
   VideoId,
   CuratorGroupId,
   CuratorGroup,
-  ChannelCategory,
   VideoCategoryId,
-  VideoCategory,
 } from '@joystream/types/content'
-import { ContentId, DataObject } from '@joystream/types/storage'
-import { ApolloClient, InMemoryCache, HttpLink, NormalizedCacheObject, DocumentNode } from '@apollo/client/core'
-import fetch from 'cross-fetch'
-import { Maybe } from './graphql/generated/schema'
-import {
-  GetMemberById,
-  GetMemberByIdQuery,
-  GetMemberByIdQueryVariables,
-  MembershipFieldsFragment,
-} from './graphql/generated/queries'
+import { BagId, DataObject, DataObjectId } from '@joystream/types/storage'
 
 export const DEFAULT_API_URI = 'ws://localhost:9944/'
 
@@ -60,8 +48,11 @@ export const apiModuleByGroup = {
   [WorkingGroups.Curators]: 'contentDirectoryWorkingGroup',
   [WorkingGroups.Forum]: 'forumWorkingGroup',
   [WorkingGroups.Membership]: 'membershipWorkingGroup',
-  [WorkingGroups.Operations]: 'operationsWorkingGroup',
   [WorkingGroups.Gateway]: 'gatewayWorkingGroup',
+  [WorkingGroups.OperationsAlpha]: 'operationsWorkingGroupAlpha',
+  [WorkingGroups.OperationsBeta]: 'operationsWorkingGroupBeta',
+  [WorkingGroups.OperationsGamma]: 'operationsWorkingGroupGamma',
+  [WorkingGroups.Distribution]: 'distributionWorkingGroup',
 } as const
 
 export const lockIdByWorkingGroup: { [K in WorkingGroups]: string } = {
@@ -69,24 +60,18 @@ export const lockIdByWorkingGroup: { [K in WorkingGroups]: string } = {
   [WorkingGroups.Curators]: '0x0707070707070707',
   [WorkingGroups.Forum]: '0x0808080808080808',
   [WorkingGroups.Membership]: '0x0909090909090909',
-  [WorkingGroups.Operations]: '0x0d0d0d0d0d0d0d0d',
   [WorkingGroups.Gateway]: '0x0e0e0e0e0e0e0e0e',
+  // TODO: TBD. OperationsAlpha, OperationsBeta, OperationsGamma, Distribution
 }
 
 // Api wrapper for handling most common api calls and allowing easy API implementation switch in the future
 export default class Api {
   private _api: ApiPromise
-  private _queryNode?: ApolloClient<NormalizedCacheObject>
   public isDevelopment = false
 
-  private constructor(
-    originalApi: ApiPromise,
-    isDevelopment: boolean,
-    queryNodeClient?: ApolloClient<NormalizedCacheObject>
-  ) {
+  private constructor(originalApi: ApiPromise, isDevelopment: boolean) {
     this.isDevelopment = isDevelopment
     this._api = originalApi
-    this._queryNode = queryNodeClient
   }
 
   public getOriginalApi(): ApiPromise {
@@ -118,63 +103,9 @@ export default class Api {
     return { api, properties, chainType }
   }
 
-  private static async createQueryNodeClient(uri: string) {
-    return new ApolloClient({
-      link: new HttpLink({ uri, fetch }),
-      cache: new InMemoryCache(),
-      defaultOptions: { query: { fetchPolicy: 'no-cache', errorPolicy: 'all' } },
-    })
-  }
-
-  static async create(
-    apiUri = DEFAULT_API_URI,
-    metadataCache: Record<string, any>,
-    queryNodeUri?: string
-  ): Promise<Api> {
+  static async create(apiUri = DEFAULT_API_URI, metadataCache: Record<string, any>): Promise<Api> {
     const { api, chainType } = await Api.initApi(apiUri, metadataCache)
-    const queryNodeClient = queryNodeUri ? await this.createQueryNodeClient(queryNodeUri) : undefined
-    return new Api(api, chainType.isDevelopment || chainType.isLocal, queryNodeClient)
-  }
-
-  // Query-node: get entity by unique input
-  protected async uniqueEntityQuery<
-    QueryT extends { [k: string]: Maybe<Record<string, unknown>> | undefined },
-    VariablesT extends Record<string, unknown>
-  >(
-    query: DocumentNode,
-    variables: VariablesT,
-    resultKey: keyof QueryT
-  ): Promise<Required<QueryT>[keyof QueryT] | null | undefined> {
-    if (!this._queryNode) {
-      return
-    }
-    return (await this._queryNode.query<QueryT, VariablesT>({ query, variables })).data[resultKey] || null
-  }
-
-  // Query-node: get entities by "non-unique" input and return first result
-  protected async firstEntityQuery<
-    QueryT extends { [k: string]: unknown[] },
-    VariablesT extends Record<string, unknown>
-  >(
-    query: DocumentNode,
-    variables: VariablesT,
-    resultKey: keyof QueryT
-  ): Promise<QueryT[keyof QueryT][number] | null | undefined> {
-    if (!this._queryNode) {
-      return
-    }
-    return (await this._queryNode.query<QueryT, VariablesT>({ query, variables })).data[resultKey][0] || null
-  }
-
-  // Query-node: get multiple entities
-  protected async multipleEntitiesQuery<
-    QueryT extends { [k: string]: unknown[] },
-    VariablesT extends Record<string, unknown>
-  >(query: DocumentNode, variables: VariablesT, resultKey: keyof QueryT): Promise<QueryT[keyof QueryT] | undefined> {
-    if (!this._queryNode) {
-      return
-    }
-    return (await this._queryNode.query<QueryT, VariablesT>({ query, variables })).data[resultKey]
+    return new Api(api, chainType.isDevelopment || chainType.isLocal)
   }
 
   async bestNumber(): Promise<number> {
@@ -211,7 +142,7 @@ export default class Api {
   // TODO: This is a lot of repeated logic from "/pioneer/joy-utils/transport"
   // It will be refactored to "joystream-js" soon
   async entriesByIds<IDType extends UInt, ValueType extends Codec>(
-    apiMethod: AugmentedQuery<'promise', (key: IDType) => Observable<ValueType>>
+    apiMethod: AugmentedQuery<'promise', (key: IDType) => Observable<ValueType>, [IDType]>
   ): Promise<[IDType, ValueType][]> {
     const entries: [IDType, ValueType][] = (await apiMethod.entries()).map(([storageKey, value]) => [
       storageKey.args[0] as IDType,
@@ -238,15 +169,7 @@ export default class Api {
     return this._api.query[module]
   }
 
-  protected async fetchMemberQueryNodeData(memberId: MemberId): Promise<MembershipFieldsFragment | null | undefined> {
-    return this.uniqueEntityQuery<GetMemberByIdQuery, GetMemberByIdQueryVariables>(
-      GetMemberById,
-      {
-        id: memberId.toString(),
-      },
-      'membershipByUniqueInput'
-    )
-  }
+  // TODO: old fetchMemberQueryNodeData moved to QueryNodeApi and will be made available here
 
   async memberDetails(memberId: MemberId, membership: Membership): Promise<MemberDetails> {
     const memberData = await this.fetchMemberQueryNodeData(memberId)
@@ -326,14 +249,14 @@ export default class Api {
   }
 
   async workerByWorkerId(group: WorkingGroups, workerId: number): Promise<Worker> {
-    const nextId = await this.workingGroupApiQuery(group).nextWorkerId<WorkerId>()
+    const nextId = await this.workingGroupApiQuery(group).nextWorkerId()
 
     // This is chain specfic, but if next id is still 0, it means no workers have been added yet
     if (workerId < 0 || workerId >= nextId.toNumber()) {
       throw new CLIError('Invalid worker id!')
     }
 
-    const worker = await this.workingGroupApiQuery(group).workerById<Worker>(workerId)
+    const worker = await this.workingGroupApiQuery(group).workerById(workerId)
 
     if (worker.isEmpty) {
       throw new CLIError('This worker is not active anymore')
@@ -342,7 +265,7 @@ export default class Api {
     return worker
   }
 
-  async groupMember(group: WorkingGroups, workerId: number) {
+  async groupMember(group: WorkingGroups, workerId: number): Promise<GroupMember> {
     const worker = await this.workerByWorkerId(group, workerId)
     return await this.parseGroupMember(group, this._api.createType('WorkerId', workerId), worker)
   }
@@ -358,7 +281,7 @@ export default class Api {
   }
 
   groupWorkers(group: WorkingGroups): Promise<[WorkerId, Worker][]> {
-    return this.entriesByIds<WorkerId, Worker>(this.workingGroupApiQuery(group).workerById)
+    return this.entriesByIds(this.workingGroupApiQuery(group).workerById)
   }
 
   async openingsByGroup(group: WorkingGroups): Promise<OpeningDetails[]> {
@@ -460,15 +383,15 @@ export default class Api {
 
   // Content directory
   async availableChannels(): Promise<[ChannelId, Channel][]> {
-    return await this.entriesByIds<ChannelId, Channel>(this._api.query.content.channelById)
+    return await this.entriesByIds(this._api.query.content.channelById)
   }
 
   async availableVideos(): Promise<[VideoId, Video][]> {
-    return await this.entriesByIds<VideoId, Video>(this._api.query.content.videoById)
+    return await this.entriesByIds(this._api.query.content.videoById)
   }
 
   availableCuratorGroups(): Promise<[CuratorGroupId, CuratorGroup][]> {
-    return this.entriesByIds<CuratorGroupId, CuratorGroup>(this._api.query.content.curatorGroupById)
+    return this.entriesByIds(this._api.query.content.curatorGroupById)
   }
 
   async curatorGroupById(id: number): Promise<CuratorGroup | null> {
@@ -491,19 +414,6 @@ export default class Api {
     return channel
   }
 
-  async videosByChannelId(channelId: ChannelId | number | string): Promise<[VideoId, Video][]> {
-    const channel = await this.channelById(channelId)
-    if (channel) {
-      return Promise.all(
-        channel.videos.map(
-          async (videoId) => [videoId, await this._api.query.content.videoById(videoId)] as [VideoId, Video]
-        )
-      )
-    } else {
-      return []
-    }
-  }
-
   async videoById(videoId: VideoId | number | string): Promise<Video> {
     const video = await this._api.query.content.videoById(videoId)
     if (video.isEmpty) {
@@ -513,40 +423,40 @@ export default class Api {
     return video
   }
 
+  async dataObjectsByIds(bagId: BagId, ids: DataObjectId[]): Promise<DataObject[]> {
+    return this._api.query.storage.dataObjectsById.multi(ids.map((id) => [bagId, id]))
+  }
+
   async channelCategoryIds(): Promise<ChannelCategoryId[]> {
     // There is currently no way to differentiate between unexisting and existing category
     // other than fetching all existing category ids (event the .size() trick does not work, as the object is empty)
-    return (
-      await this.entriesByIds<ChannelCategoryId, ChannelCategory>(this._api.query.content.channelCategoryById)
-    ).map(([id]) => id)
+    return (await this.entriesByIds(this._api.query.content.channelCategoryById)).map(([id]) => id)
   }
 
   async videoCategoryIds(): Promise<VideoCategoryId[]> {
     // There is currently no way to differentiate between unexisting and existing category
     // other than fetching all existing category ids (event the .size() trick does not work, as the object is empty)
-    return (await this.entriesByIds<VideoCategoryId, VideoCategory>(this._api.query.content.videoCategoryById)).map(
-      ([id]) => id
-    )
+    return (await this.entriesByIds(this._api.query.content.videoCategoryById)).map(([id]) => id)
   }
 
-  async dataObjectsByContentIds(contentIds: ContentId[]): Promise<DataObject[]> {
-    const dataObjects = await this._api.query.dataDirectory.dataByContentId.multi<DataObject>(contentIds)
-    const notFoundIndex = dataObjects.findIndex((o) => o.isEmpty)
-    if (notFoundIndex !== -1) {
-      throw new CLIError(`DataObject not found by id ${contentIds[notFoundIndex].toString()}`)
-    }
-    return dataObjects
+  async dataObjectsInBag(bagId: BagId): Promise<[DataObjectId, DataObject][]> {
+    return (await this._api.query.storage.dataObjectsById.entries(bagId)).map(([{ args: [, dataObjectId] }, value]) => [
+      dataObjectId,
+      value,
+    ])
+  }
+
+  async getMembers(ids: MemberId[] | number[]): Promise<Membership[]> {
+    return this._api.query.members.membershipById.multi(ids)
   }
 
-  async storageProviderEndpoint(storageProviderId: StorageProviderId | number): Promise<string> {
-    const value = await this._api.query.storageWorkingGroup.workerStorage(storageProviderId)
-    return this._api.createType('Text', value).toString()
+  async memberEntriesByIds(ids: MemberId[] | number[]): Promise<[MemberId, Membership][]> {
+    const memberships = await this._api.query.members.membershipById.multi<Membership>(ids)
+    return ids.map((id, i) => [createType('MemberId', id), memberships[i]])
   }
 
-  async allStorageProviderEndpoints(): Promise<string[]> {
-    const workerIds = (await this.groupWorkers(WorkingGroups.StorageProviders)).map(([id]) => id)
-    const workerStorages = await this._api.query.storageWorkingGroup.workerStorage.multi<Bytes>(workerIds)
-    return workerStorages.map((storage) => this._api.createType('Text', storage).toString())
+  allMemberEntries(): Promise<[MemberId, Membership][]> {
+    return this.entriesByIds(this._api.query.members.membershipById)
   }
 
   async stakingAccountStatus(account: string): Promise<StakingAccountMemberBinding | null> {

+ 2 - 1
cli/src/ExitCodes.ts

@@ -11,7 +11,8 @@ enum ExitCodes {
   UnexpectedException = 500,
   FsOperationFailed = 501,
   ApiError = 502,
-  ExternalInfrastructureError = 503,
+  StorageNodeError = 503,
   ActionCurrentlyUnavailable = 504,
+  QueryNodeError = 505,
 }
 export = ExitCodes

+ 140 - 0
cli/src/QueryNodeApi.ts

@@ -0,0 +1,140 @@
+import { StorageNodeInfo } from './Types'
+import {
+  ApolloClient,
+  InMemoryCache,
+  HttpLink,
+  NormalizedCacheObject,
+  DocumentNode,
+  from,
+  ApolloLink,
+} from '@apollo/client/core'
+import { ErrorLink, onError } from '@apollo/client/link/error'
+import { Maybe } from './graphql/generated/schema'
+import {
+  GetStorageNodesInfoByBagId,
+  GetStorageNodesInfoByBagIdQuery,
+  GetStorageNodesInfoByBagIdQueryVariables,
+  DataObjectInfoFragment,
+  GetDataObjectsByBagId,
+  GetDataObjectsByBagIdQuery,
+  GetDataObjectsByBagIdQueryVariables,
+  GetDataObjectsByVideoId,
+  GetDataObjectsByVideoIdQuery,
+  GetDataObjectsByVideoIdQueryVariables,
+  GetDataObjectsByChannelId,
+  GetDataObjectsByChannelIdQuery,
+  GetDataObjectsByChannelIdQueryVariables,
+  GetMemberById,
+  GetMemberByIdQuery,
+  GetMemberByIdQueryVariables,
+  MembershipFieldsFragment,
+} from './graphql/generated/queries'
+import { URL } from 'url'
+import fetch from 'cross-fetch'
+import { MemberId } from '@joystream/types/common'
+
+export default class QueryNodeApi {
+  private _qnClient: ApolloClient<NormalizedCacheObject>
+
+  public constructor(uri?: string, errorHandler?: ErrorLink.ErrorHandler) {
+    const links: ApolloLink[] = []
+    if (errorHandler) {
+      links.push(onError(errorHandler))
+    }
+    links.push(new HttpLink({ uri, fetch }))
+    this._qnClient = new ApolloClient({
+      link: from(links),
+      cache: new InMemoryCache(),
+      defaultOptions: { query: { fetchPolicy: 'no-cache', errorPolicy: 'all' } },
+    })
+  }
+
+  // Get entity by unique input
+  protected async uniqueEntityQuery<
+    QueryT extends { [k: string]: Maybe<Record<string, unknown>> | undefined },
+    VariablesT extends Record<string, unknown>
+  >(
+    query: DocumentNode,
+    variables: VariablesT,
+    resultKey: keyof QueryT
+  ): Promise<Required<QueryT>[keyof QueryT] | null> {
+    return (await this._qnClient.query<QueryT, VariablesT>({ query, variables })).data[resultKey] || null
+  }
+
+  // Get entities by "non-unique" input and return first result
+  protected async firstEntityQuery<
+    QueryT extends { [k: string]: unknown[] },
+    VariablesT extends Record<string, unknown>
+  >(query: DocumentNode, variables: VariablesT, resultKey: keyof QueryT): Promise<QueryT[keyof QueryT][number] | null> {
+    return (await this._qnClient.query<QueryT, VariablesT>({ query, variables })).data[resultKey][0] || null
+  }
+
+  // Get multiple entities
+  protected async multipleEntitiesQuery<
+    QueryT extends { [k: string]: unknown[] },
+    VariablesT extends Record<string, unknown>
+  >(query: DocumentNode, variables: VariablesT, resultKey: keyof QueryT): Promise<QueryT[keyof QueryT]> {
+    return (await this._qnClient.query<QueryT, VariablesT>({ query, variables })).data[resultKey]
+  }
+
+  async dataObjectsByBagId(bagId: string): Promise<DataObjectInfoFragment[]> {
+    return this.multipleEntitiesQuery<GetDataObjectsByBagIdQuery, GetDataObjectsByBagIdQueryVariables>(
+      GetDataObjectsByBagId,
+      { bagId },
+      'storageDataObjects'
+    )
+  }
+
+  async dataObjectsByVideoId(videoId: string): Promise<DataObjectInfoFragment[]> {
+    return this.multipleEntitiesQuery<GetDataObjectsByVideoIdQuery, GetDataObjectsByVideoIdQueryVariables>(
+      GetDataObjectsByVideoId,
+      { videoId },
+      'storageDataObjects'
+    )
+  }
+
+  async dataObjectsByChannelId(channelId: string): Promise<DataObjectInfoFragment[]> {
+    return this.multipleEntitiesQuery<GetDataObjectsByChannelIdQuery, GetDataObjectsByChannelIdQueryVariables>(
+      GetDataObjectsByChannelId,
+      { channelId },
+      'storageDataObjects'
+    )
+  }
+
+  async storageNodesInfoByBagId(bagId: string): Promise<StorageNodeInfo[]> {
+    const result = await this.multipleEntitiesQuery<
+      GetStorageNodesInfoByBagIdQuery,
+      GetStorageNodesInfoByBagIdQueryVariables
+    >(GetStorageNodesInfoByBagId, { bagId }, 'storageBuckets')
+
+    const validNodesInfo: StorageNodeInfo[] = []
+    for (const { operatorMetadata, id } of result) {
+      if (operatorMetadata?.nodeEndpoint) {
+        try {
+          const rootEndpoint = operatorMetadata.nodeEndpoint
+          const apiEndpoint = new URL(
+            'api/v1',
+            rootEndpoint.endsWith('/') ? rootEndpoint : rootEndpoint + '/'
+          ).toString()
+          validNodesInfo.push({
+            apiEndpoint,
+            bucketId: parseInt(id),
+          })
+        } catch (e) {
+          continue
+        }
+      }
+    }
+    return validNodesInfo
+  }
+
+  async fetchMemberQueryNodeData(memberId: MemberId): Promise<MembershipFieldsFragment | null | undefined> {
+    return this.uniqueEntityQuery<GetMemberByIdQuery, GetMemberByIdQueryVariables>(
+      GetMemberById,
+      {
+        id: memberId.toString(),
+      },
+      'membershipByUniqueInput'
+    )
+  }
+}

+ 49 - 22
cli/src/Types.ts

@@ -1,3 +1,4 @@
+import BN from 'bn.js'
 import { Codec } from '@polkadot/types/types'
 import { Balance, AccountId } from '@polkadot/types/interfaces'
 import { DeriveBalancesAll } from '@polkadot/api-derive/types'
@@ -8,15 +9,14 @@ import { MemberId } from '@joystream/types/common'
 import { Validator } from 'inquirer'
 import { ApiPromise } from '@polkadot/api'
 import { SubmittableModuleExtrinsics, QueryableModuleStorage, QueryableModuleConsts } from '@polkadot/api/types'
-import { ContentId, ContentParameters } from '@joystream/types/storage'
-
-import { JSONSchema7, JSONSchema7Definition } from 'json-schema'
+import { JSONSchema4 } from 'json-schema'
 import {
   IChannelMetadata,
   IVideoMetadata,
   IVideoCategoryMetadata,
   IChannelCategoryMetadata,
 } from '@joystream/metadata-protobuf'
+import { DataObjectCreationParameters } from '@joystream/types/storage'
 
 // KeyringPair type extended with mandatory "meta.name"
 // It's used for accounts/keys management within CLI.
@@ -43,8 +43,11 @@ export enum WorkingGroups {
   Curators = 'curators',
   Forum = 'forum',
   Membership = 'membership',
-  Operations = 'operations',
+  OperationsAlpha = 'operationsAlpha',
+  OperationsBeta = 'operationsBeta',
+  OperationsGamma = 'operationsGamma',
   Gateway = 'gateway',
+  Distribution = 'distributors',
 }
 
 // In contrast to Pioneer, currently only StorageProviders group is available in CLI
@@ -53,8 +56,11 @@ export const AvailableGroups: readonly WorkingGroups[] = [
   WorkingGroups.Curators,
   WorkingGroups.Forum,
   WorkingGroups.Membership,
-  WorkingGroups.Operations,
   WorkingGroups.Gateway,
+  WorkingGroups.OperationsAlpha,
+  WorkingGroups.OperationsBeta,
+  WorkingGroups.OperationsGamma,
+  WorkingGroups.Distribution,
 ] as const
 
 export type Reward = {
@@ -134,18 +140,14 @@ export type UnaugmentedApiPromise = Omit<ApiPromise, 'query' | 'tx' | 'consts'>
   consts: { [key: string]: QueryableModuleConsts }
 }
 
-// Content-related
-export enum AssetType {
-  AnyAsset = 1,
-}
-
-export type InputAsset = {
+export type AssetToUpload = {
+  dataObjectId: BN
   path: string
-  contentId: ContentId
 }
 
-export type InputAssetDetails = InputAsset & {
-  parameters: ContentParameters
+export type ResolvedAsset = {
+  path: string
+  parameters: DataObjectCreationParameters
 }
 
 export type VideoFFProbeMetadata = {
@@ -171,6 +173,7 @@ export type ChannelInputParameters = Omit<IChannelMetadata, 'coverPhoto' | 'avat
   coverPhotoPath?: string
   avatarPhotoPath?: string
   rewardAccount?: string
+  collaborators?: number[]
 }
 
 export type ChannelCategoryInputParameters = IChannelCategoryMetadata
@@ -180,6 +183,14 @@ export type VideoCategoryInputParameters = IVideoCategoryMetadata
 type AnyNonObject = string | number | boolean | any[] | Long
 
 // JSONSchema utility types
+
+// Based on: https://stackoverflow.com/questions/51465182/how-to-remove-index-signature-using-mapped-types
+type RemoveIndex<T> = {
+  [K in keyof T as string extends K ? never : number extends K ? never : K]: T[K]
+}
+
+type AnyJSONSchema = RemoveIndex<JSONSchema4>
+
 export type JSONTypeName<T> = T extends string
   ? 'string' | ['string', 'null']
   : T extends number
@@ -192,19 +203,35 @@ export type JSONTypeName<T> = T extends string
   ? 'number' | ['number', 'null']
   : 'object' | ['object', 'null']
 
-export type PropertySchema<P> = Omit<
-  JSONSchema7Definition & {
-    type: JSONTypeName<P>
-    properties: P extends AnyNonObject ? never : JsonSchemaProperties<P>
-  },
-  P extends AnyNonObject ? 'properties' : ''
->
+export type PropertySchema<P> = Omit<AnyJSONSchema, 'type' | 'properties'> & {
+  type: JSONTypeName<P>
+} & (P extends AnyNonObject ? { properties?: never } : { properties: JsonSchemaProperties<P> })
 
 export type JsonSchemaProperties<T> = {
   [K in keyof Required<T>]: PropertySchema<Required<T>[K]>
 }
 
-export type JsonSchema<T> = JSONSchema7 & {
+export type JsonSchema<T> = Omit<AnyJSONSchema, 'type' | 'properties'> & {
   type: 'object'
   properties: JsonSchemaProperties<T>
 }
+
+// Storage node related types
+
+export type StorageNodeInfo = {
+  bucketId: number
+  apiEndpoint: string
+}
+
+export type TokenRequest = {
+  data: TokenRequestData
+  signature: string
+}
+
+export type TokenRequestData = {
+  memberId: number
+  accountId: string
+  dataObjectId: number
+  storageBucketId: number
+  bagId: string
+}

+ 56 - 42
cli/src/base/AccountsCommandBase.ts

@@ -1,4 +1,4 @@
-import fs from 'fs'
+import fs, { readdirSync } from 'fs'
 import path from 'path'
 import inquirer from 'inquirer'
 import ExitCodes from '../ExitCodes'
@@ -35,21 +35,20 @@ export const STAKING_ACCOUNT_CANDIDATE_STAKE = new BN(200)
  * Where: APP_DATA_PATH is provided by StateAwareCommandBase and ACCOUNTS_DIRNAME is a const (see above).
  */
 export default abstract class AccountsCommandBase extends ApiCommandBase {
-  private keyring: KeyringInstance | undefined
+  private selectedMember: [MemberId, Membership] | undefined
+  private _keyring: KeyringInstance | undefined
 
-  getKeyring(): KeyringInstance {
-    if (!this.keyring) {
+  private get keyring(): KeyringInstance {
+    if (!this._keyring) {
       this.error('Trying to access Keyring before AccountsCommandBase initialization', {
         exit: ExitCodes.UnexpectedException,
       })
     }
-    return this.keyring
+    return this._keyring
   }
 
   isKeyAvailable(key: AccountId | string): boolean {
-    return this.getKeyring()
-      .getPairs()
-      .some((p) => p.address === key.toString())
+    return this.keyring.getPairs().some((p) => p.address === key.toString())
   }
 
   getAccountsDirPath(): string {
@@ -65,7 +64,7 @@ export default abstract class AccountsCommandBase extends ApiCommandBase {
   }
 
   isAccountNameTaken(accountName: string): boolean {
-    return this.getPairs().some((p) => this.getAccountFileName(p.meta.name) === this.getAccountFileName(accountName))
+    return readdirSync(this.getAccountsDirPath()).some((filename) => filename === this.getAccountFileName(accountName))
   }
 
   private initAccountsFs(): void {
@@ -94,16 +93,15 @@ export default abstract class AccountsCommandBase extends ApiCommandBase {
       masterKey = keyring.getPairs()[0]
       this.log(chalk.magentaBright(`${chalk.bold('New account memonic: ')}${mnemonic}`))
     } else {
-      const existingAcc = this.getPairs().find((p) => p.address === masterKey!.address)
+      const { address } = masterKey
+      const existingAcc = this.getPairs().find((p) => p.address === address)
       if (existingAcc) {
         this.error(`Account with this key already exists (${chalk.magentaBright(existingAcc.meta.name)})`, {
           exit: ExitCodes.InvalidInput,
         })
       }
       await this.requestPairDecoding(masterKey, 'Current account password')
-      if (!masterKey.meta.name) {
-        masterKey.meta.name = name
-      }
+      masterKey.meta.name = name
     }
 
     while (password === undefined) {
@@ -122,9 +120,9 @@ export default abstract class AccountsCommandBase extends ApiCommandBase {
     const destPath = this.getAccountFilePath(name)
     fs.writeFileSync(destPath, JSON.stringify(masterKey.toJson(password)))
 
-    this.getKeyring().addPair(masterKey)
+    this.keyring.addPair(masterKey)
 
-    this.log(chalk.greenBright(`\nNew account succesfully created!`))
+    this.log(chalk.greenBright(`\nNew account successfully created!`))
 
     return masterKey as NamedKeyringPair
   }
@@ -148,7 +146,7 @@ export default abstract class AccountsCommandBase extends ApiCommandBase {
 
     if (!accountJsonObj.meta) accountJsonObj.meta = {}
     // Normalize the CLI account name based on file name
-    // (makes sure getFilePath(name) will always point to the correct file, preserving backward-compatibility
+    // (makes sure getAccountFilePath(name) will always point to the correct file, preserving backward-compatibility
     // with older CLI versions)
     accountJsonObj.meta.name = path.basename(jsonBackupFilePath, '.json')
 
@@ -159,7 +157,7 @@ export default abstract class AccountsCommandBase extends ApiCommandBase {
       keyring.addFromJson(accountJsonObj)
       account = keyring.getPair(accountJsonObj.address) as NamedKeyringPair // We can be sure it's named, because we forced it before
     } catch (e) {
-      throw new CLIError(`Provided backup file is not valid (${e.message})`, { exit: ExitCodes.InvalidFile })
+      throw new CLIError(`Provided backup file is not valid (${(e as Error).message})`, { exit: ExitCodes.InvalidFile })
     }
 
     return account
@@ -194,17 +192,15 @@ export default abstract class AccountsCommandBase extends ApiCommandBase {
   }
 
   getPairs(includeDevAccounts = true): NamedKeyringPair[] {
-    return this.getKeyring()
-      .getPairs()
-      .filter((p) => includeDevAccounts || !p.meta.isTesting) as NamedKeyringPair[]
+    return this.keyring.getPairs().filter((p) => includeDevAccounts || !p.meta.isTesting) as NamedKeyringPair[]
   }
 
   getPair(key: string): NamedKeyringPair {
-    return this.getKeyring().getPair(key) as NamedKeyringPair
+    return this.keyring.getPair(key) as NamedKeyringPair
   }
 
-  async getDecodedPair(key: string): Promise<NamedKeyringPair> {
-    const pair = this.getPair(key)
+  async getDecodedPair(key: string | AccountId): Promise<NamedKeyringPair> {
+    const pair = this.getPair(key.toString())
 
     return (await this.requestPairDecoding(pair)) as NamedKeyringPair
   }
@@ -240,9 +236,9 @@ export default abstract class AccountsCommandBase extends ApiCommandBase {
   }
 
   initKeyring(): void {
-    this.keyring = this.getApi().isDevelopment ? createTestKeyring(KEYRING_OPTIONS) : new Keyring(KEYRING_OPTIONS)
+    this._keyring = this.getApi().isDevelopment ? createTestKeyring(KEYRING_OPTIONS) : new Keyring(KEYRING_OPTIONS)
     const accounts = this.fetchAccounts()
-    accounts.forEach((a) => this.getKeyring().addPair(a))
+    accounts.forEach((a) => this.keyring.addPair(a))
   }
 
   async promptForPassword(message = "Your account's password"): Promise<string> {
@@ -325,35 +321,53 @@ export default abstract class AccountsCommandBase extends ApiCommandBase {
     }
   }
 
-  async getRequiredMemberContext(): Promise<MemberDetails> {
-    // TODO: Limit only to a set of members provided by the user?
-    const allMembers = await this.getApi().allMembers()
-    const availableMembers = await Promise.all(
-      allMembers
-        .filter(([, m]) => this.isKeyAvailable(m.controller_account.toString()))
-        .map(([id, m]) => this.getApi().memberDetails(id, m))
+  async getRequiredMemberContext(useSelected = false, allowedIds?: MemberId[]): Promise<[MemberId, Membership]> {
+    if (
+      useSelected &&
+      this.selectedMember &&
+      (!allowedIds || allowedIds.some((id) => id.eq(this.selectedMember?.[0])))
+    ) {
+      return this.selectedMember
+    }
+
+    const membersEntries = allowedIds
+      ? await this.getApi().memberEntriesByIds(allowedIds)
+      : await this.getApi().allMemberEntries()
+    const availableMemberships = await Promise.all(
+      membersEntries.filter(([, m]) => this.isKeyAvailable(m.controller_account.toString()))
     )
 
-    if (!availableMembers.length) {
-      this.error('No member controller key available!', { exit: ExitCodes.AccessDenied })
-    } else if (availableMembers.length === 1) {
-      return availableMembers[0]
+    if (!availableMemberships.length) {
+      this.error(
+        `No ${allowedIds ? 'allowed ' : ''}member controller key available!` +
+          (allowedIds ? ` Allowed members: ${allowedIds.join(', ')}.` : ''),
+        {
+          exit: ExitCodes.AccessDenied,
+        }
+      )
+    } else if (availableMemberships.length === 1) {
+      this.selectedMember = availableMemberships[0]
     } else {
-      return this.promptForMember(availableMembers, 'Choose member context')
+      this.selectedMember = await this.promptForMember(availableMemberships, 'Choose member context')
     }
+
+    return this.selectedMember
   }
 
-  async promptForMember(availableMembers: MemberDetails[], message = 'Choose a member'): Promise<MemberDetails> {
+  async promptForMember(
+    availableMemberships: [MemberId, Membership][],
+    message = 'Choose a member'
+  ): Promise<[MemberId, Membership]> {
     const memberIndex = await this.simplePrompt({
       type: 'list',
       message,
-      choices: availableMembers.map((m, i) => ({
-        name: memberHandle(m),
+      choices: availableMemberships.map(([, membership], i) => ({
+        name: membership.handle.toString(),
         value: i,
       })),
     })
 
-    return availableMembers[memberIndex]
+    return availableMemberships[memberIndex]
   }
 
   async promptForStakingAccount(stakeValue: BN, memberId: MemberId, member: Membership): Promise<string> {
@@ -449,7 +463,7 @@ export default abstract class AccountsCommandBase extends ApiCommandBase {
     return stakingAccount
   }
 
-  async init() {
+  async init(): Promise<void> {
     await super.init()
     try {
       this.initAccountsFs()

+ 103 - 73
cli/src/base/ApiCommandBase.ts

@@ -3,18 +3,19 @@ import { CLIError } from '@oclif/errors'
 import StateAwareCommandBase from './StateAwareCommandBase'
 import Api from '../Api'
 import { getTypeDef, Option, Tuple } from '@polkadot/types'
-import { Registry, Codec, TypeDef, TypeDefInfo, IEvent } from '@polkadot/types/types'
+import { Registry, Codec, TypeDef, TypeDefInfo, IEvent, DetectCodec } from '@polkadot/types/types'
 import { Vec, Struct, Enum } from '@polkadot/types/codec'
-import { SubmittableResult, WsProvider } from '@polkadot/api'
+import { SubmittableResult, WsProvider, ApiPromise } from '@polkadot/api'
 import { KeyringPair } from '@polkadot/keyring/types'
 import chalk from 'chalk'
 import { InterfaceTypes } from '@polkadot/types/types/registry'
-import { ApiMethodArg, ApiMethodNamedArgs, ApiParamsOptions, ApiParamOptions } from '../Types'
+import { ApiMethodArg, ApiMethodNamedArgs, ApiParamsOptions, ApiParamOptions, UnaugmentedApiPromise } from '../Types'
 import { createParamOptions } from '../helpers/promptOptions'
 import { AugmentedSubmittables, SubmittableExtrinsic, AugmentedEvents, AugmentedEvent } from '@polkadot/api/types'
 import { DistinctQuestion } from 'inquirer'
 import { BOOL_PROMPT_OPTIONS } from '../helpers/prompting'
 import { DispatchError } from '@polkadot/types/interfaces/system'
+import QueryNodeApi from '../QueryNodeApi'
 import { formatBalance } from '@polkadot/util'
 import BN from 'bn.js'
 import _ from 'lodash'
@@ -25,19 +26,38 @@ export class ExtrinsicFailedError extends Error {}
  * Abstract base class for commands that require access to the API.
  */
 export default abstract class ApiCommandBase extends StateAwareCommandBase {
-  private api: Api | null = null
+  private api: Api | undefined
+  private queryNodeApi: QueryNodeApi | null | undefined
+
+  // Command configuration
+  protected requiresApiConnection = true
+  protected requiresQueryNode = false
 
   getApi(): Api {
-    if (!this.api) throw new CLIError('Tried to get API before initialization.', { exit: ExitCodes.ApiError })
+    if (!this.api) {
+      throw new CLIError('Tried to access API before initialization.', { exit: ExitCodes.ApiError })
+    }
     return this.api
   }
 
+  getQNApi(): QueryNodeApi {
+    if (this.queryNodeApi === undefined) {
+      throw new CLIError('Tried to access QueryNodeApi before initialization.', { exit: ExitCodes.QueryNodeError })
+    }
+    if (this.queryNodeApi === null) {
+      throw new CLIError('Query node endpoint uri is required in order to run this command!', {
+        exit: ExitCodes.QueryNodeError,
+      })
+    }
+    return this.queryNodeApi
+  }
+
   // Shortcuts
-  getOriginalApi() {
+  getOriginalApi(): ApiPromise {
     return this.getApi().getOriginalApi()
   }
 
-  getUnaugmentedApi() {
+  getUnaugmentedApi(): UnaugmentedApiPromise {
     return this.getApi().getUnaugmentedApi()
   }
 
@@ -45,13 +65,18 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
     return this.getOriginalApi().registry
   }
 
-  createType<K extends keyof InterfaceTypes>(typeName: K, value?: unknown): InterfaceTypes[K] {
-    return this.getOriginalApi().createType(typeName, value)
+  createType<T extends Codec = Codec, TN extends string = string>(typeName: TN, value?: unknown): DetectCodec<T, TN> {
+    return this.getOriginalApi().createType<T, TN>(typeName, value)
+  }
+
+  isQueryNodeUriSet(): boolean {
+    const { queryNodeUri } = this.getPreservedState()
+    return !!queryNodeUri
   }
 
-  async init(skipConnection = false): Promise<void> {
+  async init(): Promise<void> {
     await super.init()
-    if (!skipConnection) {
+    if (this.requiresApiConnection) {
       let apiUri: string = this.getPreservedState().apiUri
 
       if (!apiUri) {
@@ -59,8 +84,12 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
         apiUri = await this.promptForApiUri()
       }
 
-      let queryNodeUri: string = this.getPreservedState().queryNodeUri
-      if (!queryNodeUri) {
+      let queryNodeUri: string | null | undefined = this.getPreservedState().queryNodeUri
+
+      if (this.requiresQueryNode && !queryNodeUri) {
+        this.warn('Query node endpoint uri is required in order to run this command!')
+        queryNodeUri = await this.promptForQueryNodeUri(true)
+      } else if (queryNodeUri === undefined) {
         this.warn("You haven't provided a Joystream query node uri for the CLI to connect to yet!")
         queryNodeUri = await this.promptForQueryNodeUri()
       }
@@ -75,6 +104,12 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
         metadataCache[metadataKey] = await this.getOriginalApi().runtimeMetadata.toJSON()
         await this.setPreservedState({ metadataCache })
       }
+
+      this.queryNodeApi = queryNodeUri
+        ? new QueryNodeApi(queryNodeUri, (err) => {
+            this.warn(`Query node error: ${err.networkError?.message || err.graphQLErrors?.join('\n')}`)
+          })
+        : null
     }
   }
 
@@ -115,28 +150,31 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
     return selectedNodeUri
   }
 
-  async promptForQueryNodeUri(): Promise<string> {
-    let selectedUri = await this.simplePrompt({
+  async promptForQueryNodeUri(isRequired = false): Promise<string | null> {
+    const choices = [
+      {
+        name: 'Local query node (http://localhost:8081/graphql)',
+        value: 'http://localhost:8081/graphql',
+      },
+      {
+        name: 'Jsgenesis-hosted query node (https://hydra.joystream.org/graphql)',
+        value: 'https://hydra.joystream.org/graphql',
+      },
+      {
+        name: 'Custom endpoint',
+        value: '',
+      },
+    ]
+    if (!isRequired) {
+      choices.push({
+        name: "No endpoint (if you don't use query node some features will not be available)",
+        value: 'none',
+      })
+    }
+    let selectedUri: string = await this.simplePrompt({
       type: 'list',
       message: 'Choose a query node endpoint:',
-      choices: [
-        {
-          name: 'Local query node (http://localhost:8081/graphql)',
-          value: 'http://localhost:8081/graphql',
-        },
-        {
-          name: 'Jsgenesis-hosted query node (https://hydra.joystream.org/graphql)',
-          value: 'https://hydra.joystream.org/graphql',
-        },
-        {
-          name: 'Custom endpoint',
-          value: '',
-        },
-        {
-          name: "No endpoint (if you don't use query node some features will not be available)",
-          value: 'none',
-        },
-      ],
+      choices,
     })
 
     if (!selectedUri) {
@@ -145,18 +183,20 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
           type: 'input',
           message: 'Provide a query node endpoint',
         })
-        if (!this.isApiUriValid(selectedUri)) {
+        if (!this.isQueryNodeUriValid(selectedUri)) {
           this.warn('Provided uri seems incorrect! Please try again...')
         }
-      } while (!this.isApiUriValid(selectedUri))
+      } while (!this.isQueryNodeUriValid(selectedUri))
     }
 
-    await this.setPreservedState({ queryNodeUri: selectedUri })
+    const queryNodeUri = selectedUri === 'none' ? null : selectedUri
 
-    return selectedUri
+    await this.setPreservedState({ queryNodeUri })
+
+    return queryNodeUri
   }
 
-  isApiUriValid(uri: string) {
+  isApiUriValid(uri: string): boolean {
     try {
       // eslint-disable-next-line no-new
       new WsProvider(uri)
@@ -166,7 +206,7 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
     return true
   }
 
-  isQueryNodeUriValid(uri: string) {
+  isQueryNodeUriValid(uri: string): boolean {
     let url: URL
     try {
       url = new URL(uri)
@@ -179,13 +219,13 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
 
   // This is needed to correctly handle some structs, enums etc.
   // Where the main typeDef doesn't provide enough information
-  protected getRawTypeDef(type: keyof InterfaceTypes) {
+  protected getRawTypeDef(type: keyof InterfaceTypes): TypeDef {
     const instance = this.createType(type)
     return getTypeDef(instance.toRawType())
   }
 
   // Prettifier for type names which are actually JSON strings
-  protected prettifyJsonTypeName(json: string) {
+  protected prettifyJsonTypeName(json: string): string {
     const obj = JSON.parse(json) as { [key: string]: string }
     return (
       '{\n' +
@@ -197,7 +237,7 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
   }
 
   // Get param name based on TypeDef object
-  protected paramName(typeDef: TypeDef) {
+  protected paramName(typeDef: TypeDef): string {
     return chalk.green(
       typeDef.displayName ||
         typeDef.name ||
@@ -244,10 +284,10 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
         createParamOptions(subtype.name, defaultValue?.unwrapOr(undefined))
       )
       this.closeIndentGroup()
-      return this.createType(`Option<${subtype.type}>` as any, value)
+      return this.createType<Option<Codec>>(`Option<${subtype.type}>`, value)
     }
 
-    return this.createType(`Option<${subtype.type}>` as any, null)
+    return this.createType<Option<Codec>>(`Option<${subtype.type}>`, null)
   }
 
   // Prompt for Tuple
@@ -268,7 +308,11 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
     }
     this.closeIndentGroup()
 
-    return new Tuple(this.getTypesRegistry(), subtypes.map((subtype) => subtype.type) as any, result)
+    return new Tuple(
+      this.getTypesRegistry(),
+      subtypes.map((subtype) => subtype.type),
+      result
+    )
   }
 
   // Prompt for Struct
@@ -295,7 +339,7 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
     }
     this.closeIndentGroup()
 
-    return this.createType(structType as any, structValues)
+    return this.createType(structType, structValues)
   }
 
   // Prompt for Vec
@@ -323,7 +367,7 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
     } while (addAnother)
     this.closeIndentGroup()
 
-    return this.createType(`Vec<${subtype.type}>` as any, entries)
+    return this.createType<Vec<Codec>>(`Vec<${subtype.type}>`, entries)
   }
 
   // Prompt for Enum
@@ -348,12 +392,12 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
 
     if (enumSubtype.type !== 'Null') {
       const subtypeOptions = createParamOptions(enumSubtype.name, defaultValue?.value)
-      return this.createType(enumType as any, {
+      return this.createType<Enum>(enumType, {
         [enumSubtype.name!]: await this.promptForParam(enumSubtype.type, subtypeOptions),
       })
     }
 
-    return this.createType(enumType as any, enumSubtype.name)
+    return this.createType<Enum>(enumType, enumSubtype.name)
   }
 
   // Prompt for param based on "paramType" string (ie. Option<MemeberId>)
@@ -389,7 +433,7 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
   }
 
   // More typesafe version
-  async promptForType(type: keyof InterfaceTypes, options?: ApiParamOptions) {
+  async promptForType(type: keyof InterfaceTypes, options?: ApiParamOptions): Promise<Codec> {
     return await this.promptForParam(type, options)
   }
 
@@ -458,11 +502,7 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
     })
   }
 
-  async sendAndFollowTx(
-    account: KeyringPair,
-    tx: SubmittableExtrinsic<'promise'>,
-    warnOnly = false // If specified - only warning will be displayed in case of failure (instead of error beeing thrown)
-  ): Promise<SubmittableResult | false> {
+  async sendAndFollowTx(account: KeyringPair, tx: SubmittableExtrinsic<'promise'>): Promise<SubmittableResult> {
     // Calculate fee and ask for confirmation
     const fee = await this.getApi().estimateFee(account, tx)
 
@@ -475,10 +515,7 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
       this.log(chalk.green(`Extrinsic successful!`))
       return res
     } catch (e) {
-      if (e instanceof ExtrinsicFailedError && warnOnly) {
-        this.warn(`Extrinsic failed! ${e.message}`)
-        return false
-      } else if (e instanceof ExtrinsicFailedError) {
+      if (e instanceof ExtrinsicFailedError) {
         throw new CLIError(`Extrinsic failed! ${e.message}`, { exit: ExitCodes.ApiError })
       } else {
         throw e
@@ -510,17 +547,16 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
     account: KeyringPair,
     module: Module,
     method: Method,
-    params: Submittable extends (...args: any[]) => any ? Parameters<Submittable> : [],
-    warnOnly = false
-  ): Promise<SubmittableResult | false> {
+    params: Submittable extends (...args: any[]) => any ? Parameters<Submittable> : []
+  ): Promise<SubmittableResult> {
     this.log(
       chalk.magentaBright(
         `\nSending ${module}.${method} extrinsic from ${account.meta.name ? account.meta.name : account.address}...`
       )
     )
-    console.log('Params:', this.humanize(params))
+    this.log('Tx params:', this.humanize(params))
     const tx = await this.getUnaugmentedApi().tx[module][method](...params)
-    return await this.sendAndFollowTx(account, tx, warnOnly)
+    return this.sendAndFollowTx(account, tx)
   }
 
   public findEvent<
@@ -534,15 +570,9 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
   async buildAndSendExtrinsic<
     Module extends keyof AugmentedSubmittables<'promise'>,
     Method extends keyof AugmentedSubmittables<'promise'>[Module] & string
-  >(
-    account: KeyringPair,
-    module: Module,
-    method: Method,
-    paramsOptions?: ApiParamsOptions,
-    warnOnly = false // If specified - only warning will be displayed (instead of error beeing thrown)
-  ): Promise<ApiMethodArg[]> {
+  >(account: KeyringPair, module: Module, method: Method, paramsOptions?: ApiParamsOptions): Promise<ApiMethodArg[]> {
     const params = await this.promptForExtrinsicParams(module, method, paramsOptions)
-    await this.sendAndFollowNamedTx(account, module, method, params as any, warnOnly)
+    await this.sendAndFollowNamedTx(account, module, method, params as any)
 
     return params
   }
@@ -564,7 +594,7 @@ export default abstract class ApiCommandBase extends StateAwareCommandBase {
       const argName = arg.name.toString()
       const argType = arg.type.toString()
       try {
-        parsedArgs.push({ name: argName, value: this.createType(argType as any, draftJSONObj[parseInt(index)]) })
+        parsedArgs.push({ name: argName, value: this.createType(argType, draftJSONObj[parseInt(index)]) })
       } catch (e) {
         throw new CLIError(`Couldn't parse ${argName} value from draft at ${draftFilePath}!`, {
           exit: ExitCodes.InvalidFile,

+ 107 - 43
cli/src/base/ContentDirectoryCommandBase.ts

@@ -6,57 +6,51 @@ import { CLIError } from '@oclif/errors'
 import { RolesCommandBase } from './WorkingGroupsCommandBase'
 import { flags } from '@oclif/command'
 import { memberHandle } from '../helpers/display'
+import { MemberId } from '@joystream/types/common'
+import { createType } from '@joystream/types'
 
-const CONTEXTS = ['Member', 'Curator', 'Lead'] as const
-const OWNER_CONTEXTS = ['Member', 'Curator'] as const
+const CHANNEL_CREATION_CONTEXTS = ['Member', 'Curator'] as const
 const CATEGORIES_CONTEXTS = ['Lead', 'Curator'] as const
+const CHANNEL_MANAGEMENT_CONTEXTS = ['Owner', 'Collaborator'] as const
 
-type Context = typeof CONTEXTS[number]
-type OwnerContext = typeof OWNER_CONTEXTS[number]
+type ChannelManagementContext = typeof CHANNEL_MANAGEMENT_CONTEXTS[number]
+type ChannelCreationContext = typeof CHANNEL_CREATION_CONTEXTS[number]
 type CategoriesContext = typeof CATEGORIES_CONTEXTS[number]
 
 /**
  * Abstract base class for commands related to content directory
  */
 export default abstract class ContentDirectoryCommandBase extends RolesCommandBase {
-  group = WorkingGroups.Curators // override group for RolesCommandBase
-
-  static contextFlag = flags.enum({
-    name: 'context',
+  static channelCreationContextFlag = flags.enum({
     required: false,
-    description: `Actor context to execute the command in (${CONTEXTS.join('/')})`,
-    options: [...CONTEXTS],
+    description: `Actor context to execute the command in (${CHANNEL_CREATION_CONTEXTS.join('/')})`,
+    options: [...CHANNEL_CREATION_CONTEXTS],
   })
 
-  static ownerContextFlag = flags.enum({
-    name: 'ownerContext',
+  static channelManagementContextFlag = flags.enum({
     required: false,
-    description: `Actor context to execute the command in (${OWNER_CONTEXTS.join('/')})`,
-    options: [...OWNER_CONTEXTS],
+    description: `Actor context to execute the command in (${CHANNEL_MANAGEMENT_CONTEXTS.join('/')})`,
+    options: [...CHANNEL_MANAGEMENT_CONTEXTS],
   })
 
   static categoriesContextFlag = flags.enum({
-    name: 'categoriesContext',
     required: false,
     description: `Actor context to execute the command in (${CATEGORIES_CONTEXTS.join('/')})`,
     options: [...CATEGORIES_CONTEXTS],
   })
 
-  async promptForContext(message = 'Choose in which context you wish to execute the command'): Promise<Context> {
-    return this.simplePrompt({
-      message,
-      type: 'list',
-      choices: CONTEXTS.map((c) => ({ name: c, value: c })),
-    })
+  async init(): Promise<void> {
+    await super.init()
+    this.group = WorkingGroups.Curators // override group for RolesCommandBase
   }
 
-  async promptForOwnerContext(
+  async promptForChannelCreationContext(
     message = 'Choose in which context you wish to execute the command'
-  ): Promise<OwnerContext> {
+  ): Promise<ChannelCreationContext> {
     return this.simplePrompt({
       message,
       type: 'list',
-      choices: OWNER_CONTEXTS.map((c) => ({ name: c, value: c })),
+      choices: CHANNEL_CREATION_CONTEXTS.map((c) => ({ name: c, value: c })),
     })
   }
 
@@ -75,30 +69,83 @@ export default abstract class ContentDirectoryCommandBase extends RolesCommandBa
     await this.getRequiredLeadContext()
   }
 
-  async getCurationActorByChannel(channel: Channel): Promise<[ContentActor, string]> {
-    return channel.owner.isOfType('Curators')
-      ? await this.getContentActor('Lead')
-      : await this.getContentActor('Curator')
+  getCurationActorByChannel(channel: Channel): Promise<[ContentActor, string]> {
+    return channel.owner.isOfType('Curators') ? this.getContentActor('Lead') : this.getContentActor('Curator')
   }
 
   async getChannelOwnerActor(channel: Channel): Promise<[ContentActor, string]> {
     if (channel.owner.isOfType('Curators')) {
       try {
-        return await this.getContentActor('Lead')
+        return this.getContentActor('Lead')
       } catch (e) {
-        return await this.getCuratorContext(channel.owner.asType('Curators'))
+        return this.getCuratorContext(channel.owner.asType('Curators'))
       }
     } else {
-      return await this.getContentActor('Member')
+      const [id, membership] = await this.getRequiredMemberContext(false, [channel.owner.asType('Member')])
+      return [
+        createType<ContentActor, 'ContentActor'>('ContentActor', { Member: id }),
+        membership.controller_account.toString(),
+      ]
     }
   }
 
+  async getChannelCollaboratorActor(channel: Channel): Promise<[ContentActor, string]> {
+    const [id, membership] = await this.getRequiredMemberContext(false, Array.from(channel.collaborators))
+    return [
+      createType<ContentActor, 'ContentActor'>('ContentActor', { Collaborator: id }),
+      membership.controller_account.toString(),
+    ]
+  }
+
+  async getChannelManagementActor(
+    channel: Channel,
+    context: ChannelManagementContext
+  ): Promise<[ContentActor, string]> {
+    if (context && context === 'Owner') {
+      return this.getChannelOwnerActor(channel)
+    }
+    if (context && context === 'Collaborator') {
+      return this.getChannelCollaboratorActor(channel)
+    }
+
+    // Context not set - derive
+
+    try {
+      const owner = await this.getChannelOwnerActor(channel)
+      this.log('Derived context: Channel owner')
+      return owner
+    } catch (e) {
+      // continue
+    }
+
+    try {
+      const collaborator = await this.getChannelCollaboratorActor(channel)
+      this.log('Derived context: Channel collaborator')
+      return collaborator
+    } catch (e) {
+      // continue
+    }
+
+    this.error('No account found with access to manage the provided channel', { exit: ExitCodes.AccessDenied })
+  }
+
   async getCategoryManagementActor(): Promise<[ContentActor, string]> {
     try {
-      return await this.getContentActor('Lead')
+      const lead = await this.getContentActor('Lead')
+      this.log('Derived context: Lead')
+      return lead
     } catch (e) {
-      return await this.getContentActor('Curator')
+      // continue
     }
+    try {
+      const curator = await this.getContentActor('Curator')
+      this.log('Derived context: Curator')
+      return curator
+    } catch (e) {
+      // continue
+    }
+
+    this.error('Lead / Curator Group member permissions are required for this action', { exit: ExitCodes.AccessDenied })
   }
 
   async getCuratorContext(requiredGroupId?: CuratorGroupId): Promise<[ContentActor, string]> {
@@ -135,7 +182,7 @@ export default abstract class ContentDirectoryCommandBase extends RolesCommandBa
     }
 
     return [
-      this.createType('ContentActor', { Curator: [groupId, curator.workerId.toNumber()] }),
+      createType<ContentActor, 'ContentActor'>('ContentActor', { Curator: [groupId, curator.workerId.toNumber()] }),
       curator.roleAccount.toString(),
     ]
   }
@@ -229,18 +276,35 @@ export default abstract class ContentDirectoryCommandBase extends RolesCommandBa
     return group
   }
 
-  async getContentActor(context: typeof CONTEXTS[number]): Promise<[ContentActor, string]> {
-    let contentActorContext: [ContentActor, string]
+  async getContentActor(
+    context: Exclude<keyof typeof ContentActor.typeDefinitions, 'Collaborator'>
+  ): Promise<[ContentActor, string]> {
     if (context === 'Member') {
-      const { id, membership } = await this.getRequiredMemberContext()
-      contentActorContext = [this.createType('ContentActor', { Member: id }), membership.controller_account.toString()]
-    } else if (context === 'Curator') {
-      contentActorContext = await this.getCuratorContext()
-    } else {
+      const [id, membership] = await this.getRequiredMemberContext()
+      return [
+        createType<ContentActor, 'ContentActor'>('ContentActor', { Member: id }),
+        membership.controller_account.toString(),
+      ]
+    }
+
+    if (context === 'Curator') {
+      return this.getCuratorContext()
+    }
+
+    if (context === 'Lead') {
       const lead = await this.getRequiredLeadContext()
-      contentActorContext = [this.createType('ContentActor', { Lead: null }), lead.roleAccount.toString()]
+      return [createType<ContentActor, 'ContentActor'>('ContentActor', { Lead: null }), lead.roleAccount.toString()]
     }
 
-    return contentActorContext
+    throw new Error(`Unrecognized context: ${context}`)
+  }
+
+  async validateCollaborators(collaborators: number[] | MemberId[]): Promise<void> {
+    const collaboratorMembers = await this.getApi().getMembers(collaborators)
+    if (collaboratorMembers.length < collaborators.length || collaboratorMembers.some((m) => m.isEmpty)) {
+      this.error(`Invalid collaborator set! All collaborators must be existing members.`, {
+        exit: ExitCodes.InvalidInput,
+      })
+    }
   }
 }

+ 3 - 0
cli/src/base/DefaultCommandBase.ts

@@ -115,6 +115,9 @@ export default abstract class DefaultCommandBase extends Command {
     // called after run and catch regardless of whether or not the command errored
     // We'll force exit here, in case there is no error, to prevent console.log from hanging the process
     if (!err) this.exit(ExitCodes.OK)
+    if (err && process.env.DEBUG === 'true') {
+      console.log(err)
+    }
     super.finally(err)
   }
 

+ 3 - 3
cli/src/base/StateAwareCommandBase.ts

@@ -11,7 +11,7 @@ import { WorkingGroups } from '../Types'
 // Type for the state object (which is preserved as json in the state file)
 type StateObject = {
   apiUri: string
-  queryNodeUri: string
+  queryNodeUri: string | null | undefined
   defaultWorkingGroup: WorkingGroups
   metadataCache: Record<string, any>
 }
@@ -19,7 +19,7 @@ type StateObject = {
 // State object default values
 const DEFAULT_STATE: StateObject = {
   apiUri: '',
-  queryNodeUri: '',
+  queryNodeUri: undefined,
   defaultWorkingGroup: WorkingGroups.StorageProviders,
   metadataCache: {},
 }
@@ -88,7 +88,7 @@ export default abstract class StateAwareCommandBase extends DefaultCommandBase {
 
   private initStateFs(): void {
     if (!fs.existsSync(this.getAppDataPath())) {
-      fs.mkdirSync(this.getAppDataPath())
+      fs.mkdirSync(this.getAppDataPath(), { recursive: true })
     }
     if (!fs.existsSync(this.getStateFilePath())) {
       fs.writeFileSync(this.getStateFilePath(), JSON.stringify(DEFAULT_STATE, null, 4))

+ 199 - 83
cli/src/base/UploadCommandBase.ts

@@ -1,18 +1,33 @@
 import ContentDirectoryCommandBase from './ContentDirectoryCommandBase'
-import { VideoFFProbeMetadata, VideoFileMetadata, AssetType, InputAsset, InputAssetDetails } from '../Types'
-import { ContentId, ContentParameters } from '@joystream/types/storage'
+import {
+  AssetToUpload,
+  ResolvedAsset,
+  StorageNodeInfo,
+  TokenRequest,
+  TokenRequestData,
+  VideoFFProbeMetadata,
+  VideoFileMetadata,
+} from '../Types'
 import { MultiBar, Options, SingleBar } from 'cli-progress'
-import { Assets } from '../json-schemas/typings/Assets.schema'
 import ExitCodes from '../ExitCodes'
-import ipfsHash from 'ipfs-only-hash'
 import fs from 'fs'
 import _ from 'lodash'
-import axios, { AxiosRequestConfig } from 'axios'
+import axios from 'axios'
 import ffprobeInstaller from '@ffprobe-installer/ffprobe'
 import ffmpeg from 'fluent-ffmpeg'
 import path from 'path'
-import chalk from 'chalk'
 import mimeTypes from 'mime-types'
+import { Assets } from '../schemas/typings/Assets.schema'
+import chalk from 'chalk'
+import { DataObjectCreationParameters } from '@joystream/types/storage'
+import { createHash } from 'blake3-wasm'
+import * as multihash from 'multihashes'
+import { u8aToHex, formatBalance } from '@polkadot/util'
+import { KeyringPair } from '@polkadot/keyring/types'
+import FormData from 'form-data'
+import BN from 'bn.js'
+import { createTypeFromConstructor } from '@joystream/types'
+import { StorageAssets } from '@joystream/types/content'
 
 ffmpeg.setFfprobePath(ffprobeInstaller.path)
 
@@ -21,19 +36,19 @@ ffmpeg.setFfprobePath(ffprobeInstaller.path)
  */
 export default abstract class UploadCommandBase extends ContentDirectoryCommandBase {
   private fileSizeCache: Map<string, number> = new Map<string, number>()
+  private maxFileSize: undefined | BN = undefined
   private progressBarOptions: Options = {
+    noTTYOutput: true,
     format: `{barTitle} | {bar} | {value}/{total} KB processed`,
   }
 
+  protected requiresQueryNode = true
+
   getFileSize(path: string): number {
     const cachedSize = this.fileSizeCache.get(path)
     return cachedSize !== undefined ? cachedSize : fs.statSync(path).size
   }
 
-  normalizeEndpoint(endpoint: string) {
-    return endpoint.endsWith('/') ? endpoint : endpoint + '/'
-  }
-
   createReadStreamWithProgressBar(
     filePath: string,
     barTitle: string,
@@ -49,9 +64,13 @@ export default abstract class UploadCommandBase extends ContentDirectoryCommandB
     let processedKB = 0
     const fileSizeKB = Math.ceil(fileSize / 1024)
     const progress = multiBar
-      ? multiBar.create(fileSizeKB, processedKB, { barTitle })
+      ? (multiBar.create(fileSizeKB, processedKB, { barTitle }) as SingleBar | undefined)
       : new SingleBar(this.progressBarOptions)
 
+    if (!progress) {
+      throw new Error('Provided multibar does not support noTTY mode!')
+    }
+
     progress.start(fileSizeKB, processedKB, { barTitle })
     return {
       fileStream: fs
@@ -103,7 +122,7 @@ export default abstract class UploadCommandBase extends ContentDirectoryCommandB
     try {
       ffProbeMetadata = await this.getVideoFFProbeMetadata(filePath)
     } catch (e) {
-      const message = e.message || e
+      const message = e instanceof Error ? e.message : e
       this.warn(`Failed to get video metadata via ffprobe (${message})`)
     }
 
@@ -118,114 +137,183 @@ export default abstract class UploadCommandBase extends ContentDirectoryCommandB
     }
   }
 
-  async calculateFileIpfsHash(filePath: string): Promise<string> {
+  async calculateFileHash(filePath: string): Promise<string> {
     const { fileStream } = this.createReadStreamWithProgressBar(filePath, 'Calculating file hash')
-    const hash: string = await ipfsHash.of(fileStream)
-
-    return hash
+    let blake3Hash: Uint8Array
+    return new Promise<string>((resolve, reject) => {
+      fileStream
+        .pipe(createHash())
+        .on('data', (data) => (blake3Hash = data))
+        .on('end', () => resolve(multihash.toB58String(multihash.encode(blake3Hash, 'blake3'))))
+        .on('error', (err) => reject(err))
+    })
   }
 
-  validateFile(filePath: string): void {
+  async validateFile(filePath: string): Promise<void> {
     // Basic file validation
     if (!fs.existsSync(filePath)) {
       this.error(`${filePath} - file does not exist under provided path!`, { exit: ExitCodes.FileNotFound })
     }
+    if (!this.maxFileSize) {
+      this.maxFileSize = await this.getOriginalApi().consts.storage.maxDataObjectSize
+    }
+    if (this.maxFileSize.ltn(this.getFileSize(filePath))) {
+      this.error(`${filePath} - file is too big. Max file size is ${this.maxFileSize.toString()} bytes`)
+    }
   }
 
-  assetUrl(endpointRoot: string, contentId: ContentId): string {
-    // This will also make sure the resulting url is a valid url
-    return new URL(`asset/v0/${contentId.encode()}`, this.normalizeEndpoint(endpointRoot)).toString()
-  }
-
-  async getRandomProviderEndpoint(): Promise<string | null> {
-    const endpoints = _.shuffle(await this.getApi().allStorageProviderEndpoints())
-    for (const endpoint of endpoints) {
-      try {
-        const url = new URL('swagger.json', this.normalizeEndpoint(endpoint)).toString()
-        await axios.head(url)
-        return endpoint
-      } catch (e) {
-        continue
+  async getRandomActiveStorageNodeInfo(bagId: string, retryTime = 6, retryCount = 5): Promise<StorageNodeInfo | null> {
+    for (let i = 0; i <= retryCount; ++i) {
+      const nodesInfo = _.shuffle(await this.getQNApi().storageNodesInfoByBagId(bagId))
+      for (const info of nodesInfo) {
+        try {
+          await axios.get(info.apiEndpoint + '/version', {
+            headers: {
+              connection: 'close',
+            },
+          })
+          return info
+        } catch (err) {
+          continue
+        }
+      }
+      if (i !== retryCount) {
+        this.log(`No storage provider can serve the request yet, retrying in ${retryTime}s (${i + 1}/${retryCount})...`)
+        await new Promise((resolve) => setTimeout(resolve, retryTime * 1000))
       }
     }
 
     return null
   }
 
-  async generateContentParameters(filePath: string, type: AssetType): Promise<ContentParameters> {
-    return this.createType('ContentParameters', {
-      content_id: ContentId.generate(this.getTypesRegistry()),
-      type_id: type,
+  async generateDataObjectParameters(filePath: string): Promise<DataObjectCreationParameters> {
+    return createTypeFromConstructor(DataObjectCreationParameters, {
       size: this.getFileSize(filePath),
-      ipfs_content_id: await this.calculateFileIpfsHash(filePath),
+      ipfsContentId: await this.calculateFileHash(filePath),
     })
   }
 
-  async prepareInputAssets(paths: string[], basePath?: string): Promise<InputAssetDetails[]> {
-    // Resolve assets
-    if (basePath) {
-      paths = paths.map((p) => basePath && path.resolve(path.dirname(basePath), p))
+  async resolveAndValidateAssets<T extends Record<string, string | null | undefined>>(
+    paths: T,
+    basePath: string
+  ): Promise<[ResolvedAsset[], { [K in keyof T]?: number }]> {
+    const assetIndices: { [K in keyof T]?: number } = {}
+    const resolvedAssets: ResolvedAsset[] = []
+    for (let [assetKey, assetPath] of Object.entries(paths)) {
+      const assetType = assetKey as keyof T
+      if (!assetPath) {
+        assetIndices[assetType] = undefined
+        continue
+      }
+      if (basePath) {
+        assetPath = path.resolve(path.dirname(basePath), assetPath)
+      }
+      await this.validateFile(assetPath)
+      const parameters = await this.generateDataObjectParameters(assetPath)
+      assetIndices[assetType] = resolvedAssets.length
+      resolvedAssets.push({
+        path: assetPath,
+        parameters,
+      })
+    }
+    return [resolvedAssets, assetIndices]
+  }
+
+  async getStorageNodeUploadToken(
+    storageNodeInfo: StorageNodeInfo,
+    account: KeyringPair,
+    memberId: number,
+    objectId: BN,
+    bagId: string
+  ): Promise<string> {
+    const data: TokenRequestData = {
+      storageBucketId: storageNodeInfo.bucketId,
+      accountId: account.address,
+      bagId,
+      memberId,
+      dataObjectId: objectId.toNumber(),
+    }
+    const message = JSON.stringify(data)
+    const signature = u8aToHex(account.sign(message))
+    const postData: TokenRequest = { data, signature }
+    const {
+      data: { token },
+    } = await axios.post(`${storageNodeInfo.apiEndpoint}/authToken`, postData)
+    if (!token) {
+      this.error('Recieved empty token from the storage node!', { exit: ExitCodes.StorageNodeError })
     }
-    // Validate assets
-    paths.forEach((p) => this.validateFile(p))
 
-    // Return data
-    return await Promise.all(
-      paths.map(async (path) => {
-        const parameters = await this.generateContentParameters(path, AssetType.AnyAsset)
-        return {
-          path,
-          contentId: parameters.content_id,
-          parameters,
-        }
-      })
-    )
+    return token
   }
 
-  async uploadAsset(contentId: ContentId, filePath: string, endpoint?: string, multiBar?: MultiBar): Promise<void> {
-    const providerEndpoint = endpoint || (await this.getRandomProviderEndpoint())
-    if (!providerEndpoint) {
-      this.error('No active provider found!', { exit: ExitCodes.ActionCurrentlyUnavailable })
+  async uploadAsset(
+    account: KeyringPair,
+    memberId: number,
+    objectId: BN,
+    bagId: string,
+    filePath: string,
+    storageNode?: StorageNodeInfo,
+    multiBar?: MultiBar
+  ): Promise<void> {
+    const storageNodeInfo = storageNode || (await this.getRandomActiveStorageNodeInfo(bagId))
+    if (!storageNodeInfo) {
+      this.error('No active storage node found!', { exit: ExitCodes.ActionCurrentlyUnavailable })
     }
-    const uploadUrl = this.assetUrl(providerEndpoint, contentId)
-    const fileSize = this.getFileSize(filePath)
+    this.log(`Chosen storage node endpoint: ${storageNodeInfo.apiEndpoint}`)
     const { fileStream, progressBar } = this.createReadStreamWithProgressBar(
       filePath,
-      `Uploading ${contentId.encode()}`,
+      `Uploading ${filePath}`,
       multiBar
     )
     fileStream.on('end', () => {
       // Temporarly disable because with Promise.all it breaks the UI
       // cli.action.start('Waiting for the file to be processed...')
     })
-
+    const formData = new FormData()
+    formData.append('dataObjectId', objectId.toString())
+    formData.append('storageBucketId', storageNodeInfo.bucketId)
+    formData.append('bagId', bagId)
+    formData.append('file', fileStream, {
+      filename: path.basename(filePath),
+      filepath: filePath,
+      knownLength: this.getFileSize(filePath),
+    })
+    this.log(`Uploading object ${objectId.toString()} (${filePath})`)
     try {
-      const config: AxiosRequestConfig = {
+      await axios.post(`${storageNodeInfo.apiEndpoint}/files`, formData, {
+        maxBodyLength: Infinity,
+        maxContentLength: Infinity,
         headers: {
-          'Content-Type': '', // https://github.com/Joystream/storage-node-joystream/issues/16
-          'Content-Length': fileSize.toString(),
+          'content-type': 'multipart/form-data',
+          ...formData.getHeaders(),
         },
-        maxBodyLength: fileSize,
-      }
-      await axios.put(uploadUrl, fileStream, config)
+      })
     } catch (e) {
       progressBar.stop()
-      const msg = (e.response && e.response.data && e.response.data.message) || e.message || e
-      this.error(`Unexpected error when trying to upload a file: ${msg}`, {
-        exit: ExitCodes.ExternalInfrastructureError,
-      })
+      if (axios.isAxiosError(e)) {
+        const msg = e.response && e.response.data ? JSON.stringify(e.response.data) : e.message
+        this.error(`Unexpected error when trying to upload a file: ${msg}`, {
+          exit: ExitCodes.StorageNodeError,
+        })
+      } else {
+        throw e
+      }
     }
   }
 
   async uploadAssets(
-    assets: InputAsset[],
+    account: KeyringPair,
+    memberId: number,
+    bagId: string,
+    assets: AssetToUpload[],
     inputFilePath: string,
     outputFilePostfix = '__rejectedContent'
   ): Promise<void> {
-    const endpoint = await this.getRandomProviderEndpoint()
-    if (!endpoint) {
+    const storageNodeInfo = await this.getRandomActiveStorageNodeInfo(bagId)
+    if (!storageNodeInfo) {
       this.warn('No storage provider is currently available!')
       this.handleRejectedUploads(
+        bagId,
         assets,
         assets.map(() => false),
         inputFilePath,
@@ -234,39 +322,67 @@ export default abstract class UploadCommandBase extends ContentDirectoryCommandB
       this.exit(ExitCodes.ActionCurrentlyUnavailable)
     }
     const multiBar = new MultiBar(this.progressBarOptions)
+    const errors: [string, string][] = []
     // Workaround replacement for Promise.allSettled (which is only available in ES2020)
     const results = await Promise.all(
       assets.map(async (a) => {
         try {
-          await this.uploadAsset(a.contentId, a.path, endpoint, multiBar)
+          await this.uploadAsset(account, memberId, a.dataObjectId, bagId, a.path, storageNodeInfo, multiBar)
           return true
         } catch (e) {
+          errors.push([a.dataObjectId.toString(), e instanceof Error ? e.message : 'Unknown error'])
           return false
         }
       })
     )
-    this.handleRejectedUploads(assets, results, inputFilePath, outputFilePostfix)
+    errors.forEach(([objectId, message]) => this.warn(`Upload of object ${objectId} failed: ${message}`))
+    this.handleRejectedUploads(bagId, assets, results, inputFilePath, outputFilePostfix)
     multiBar.stop()
   }
 
-  public assetsIndexes(originalPaths: (string | undefined)[], filteredPaths: string[]): (number | undefined)[] {
-    let lastIndex = -1
-    return originalPaths.map((path) => (filteredPaths.includes(path as string) ? ++lastIndex : undefined))
+  async prepareAssetsForExtrinsic(resolvedAssets: ResolvedAsset[]): Promise<StorageAssets | undefined> {
+    const feePerMB = await this.getOriginalApi().query.storage.dataObjectPerMegabyteFee()
+    const { dataObjectDeletionPrize } = this.getOriginalApi().consts.storage
+    if (resolvedAssets.length) {
+      const totalBytes = resolvedAssets
+        .reduce((a, b) => {
+          return a.add(b.parameters.getField('size'))
+        }, new BN(0))
+        .toNumber()
+      const totalStorageFee = feePerMB.muln(Math.ceil(totalBytes / 1024 / 1024))
+      const totalDeletionPrize = dataObjectDeletionPrize.muln(resolvedAssets.length)
+      await this.requireConfirmation(
+        `Some additional costs will be associated with this operation:\n` +
+          `Total data storage fee: ${chalk.cyan(formatBalance(totalStorageFee))}\n` +
+          `Total deletion prize: ${chalk.cyan(
+            formatBalance(totalDeletionPrize)
+          )} (recoverable on data object(s) removal)\n` +
+          `Are you sure you want to continue?`
+      )
+      return createTypeFromConstructor(StorageAssets, {
+        expected_data_size_fee: feePerMB,
+        object_creation_list: resolvedAssets.map((a) => a.parameters),
+      })
+    }
+
+    return undefined
   }
 
   private handleRejectedUploads(
-    assets: InputAsset[],
+    bagId: string,
+    assets: AssetToUpload[],
     results: boolean[],
     inputFilePath: string,
     outputFilePostfix: string
   ): void {
     // Try to save rejected contentIds and paths for reupload purposes
-    const rejectedAssetsOutput: Assets = []
+    const rejectedAssetsOutput: Assets = { bagId, assets: [] }
     results.forEach(
       (r, i) =>
-        r === false && rejectedAssetsOutput.push({ contentId: assets[i].contentId.encode(), path: assets[i].path })
+        r === false &&
+        rejectedAssetsOutput.assets.push({ objectId: assets[i].dataObjectId.toString(), path: assets[i].path })
     )
-    if (rejectedAssetsOutput.length) {
+    if (rejectedAssetsOutput.assets.length) {
       this.warn(
         `Some assets were not uploaded successfully. Try reuploading them with ${chalk.magentaBright(
           'content:reuploadAssets'

+ 6 - 15
cli/src/base/WorkingGroupsCommandBase.ts

@@ -4,18 +4,16 @@ import { flags } from '@oclif/command'
 import { WorkingGroups, AvailableGroups, GroupMember, OpeningDetails, ApplicationDetails } from '../Types'
 import _ from 'lodash'
 import chalk from 'chalk'
-import { IConfig } from '@oclif/config'
 import { memberHandle } from '../helpers/display'
 
 /**
  * Abstract base class for commands that need to use gates based on user's roles
  */
 export abstract class RolesCommandBase extends AccountsCommandBase {
-  group: WorkingGroups
+  group!: WorkingGroups
 
-  constructor(argv: string[], config: IConfig) {
-    super(argv, config)
-    // Can be modified by child class constructor
+  async init(): Promise<void> {
+    await super.init()
     this.group = this.getPreservedState().defaultWorkingGroup
   }
 
@@ -70,13 +68,6 @@ export abstract class RolesCommandBase extends AccountsCommandBase {
  * Abstract base class for commands directly related to working groups
  */
 export default abstract class WorkingGroupsCommandBase extends RolesCommandBase {
-  group: WorkingGroups
-
-  constructor(argv: string[], config: IConfig) {
-    super(argv, config)
-    this.group = this.getPreservedState().defaultWorkingGroup
-  }
-
   static flags = {
     group: flags.enum({
       char: 'g',
@@ -125,7 +116,7 @@ export default abstract class WorkingGroupsCommandBase extends RolesCommandBase
     return application
   }
 
-  async getWorkerForLeadAction(id: number, requireStakeProfile = false) {
+  async getWorkerForLeadAction(id: number, requireStakeProfile = false): Promise<GroupMember> {
     const groupMember = await this.getApi().groupMember(this.group, id)
     const groupLead = await this.getApi().groupLead(this.group)
 
@@ -142,11 +133,11 @@ export default abstract class WorkingGroupsCommandBase extends RolesCommandBase
 
   // Helper for better TS handling.
   // We could also use some magic with conditional types instead, but those don't seem be very well supported yet.
-  async getWorkerWithStakeForLeadAction(id: number) {
+  async getWorkerWithStakeForLeadAction(id: number): Promise<GroupMember & Required<Pick<GroupMember, 'stake'>>> {
     return (await this.getWorkerForLeadAction(id, true)) as GroupMember & Required<Pick<GroupMember, 'stake'>>
   }
 
-  async init() {
+  async init(): Promise<void> {
     await super.init()
     const { flags } = this.parse(this.constructor as typeof WorkingGroupsCommandBase)
     if (flags.group) {

+ 1 - 1
cli/src/commands/account/create.ts

@@ -17,7 +17,7 @@ export default class AccountCreate extends AccountsCommandBase {
     }),
   }
 
-  async run() {
+  async run(): Promise<void> {
     const { name, type } = this.parse(AccountCreate).flags
     await this.createAccount(name, undefined, undefined, type)
   }

+ 9 - 5
cli/src/commands/account/export.ts

@@ -48,7 +48,7 @@ export default class AccountExport extends AccountsCommandBase {
     return destFilePath
   }
 
-  async run() {
+  async run(): Promise<void> {
     const { destPath } = this.parse(AccountExport).args as AccountExportArgs
     let { name, all } = this.parse(AccountExport).flags
     const accounts = this.fetchAccounts()
@@ -56,20 +56,24 @@ export default class AccountExport extends AccountsCommandBase {
     if (all) {
       const exportPath: string = path.join(destPath, AccountExport.MULTI_EXPORT_FOLDER_NAME)
       try {
-        if (!fs.existsSync(exportPath)) fs.mkdirSync(exportPath)
+        if (!fs.existsSync(exportPath)) {
+          fs.mkdirSync(exportPath, { recursive: true })
+        }
       } catch (e) {
         this.error(`Failed to create the export folder (${exportPath})`, { exit: ExitCodes.FsOperationFailed })
       }
       for (const acc of accounts) {
         this.exportAccount(acc.meta.name, exportPath)
       }
-      this.log(chalk.greenBright(`All accounts succesfully exported to: ${chalk.magentaBright(exportPath)}!`))
+      this.log(chalk.greenBright(`All accounts successfully exported to: ${chalk.magentaBright(exportPath)}!`))
     } else {
       if (!name) {
-        name = await this.promptForAccount()
+        const key = await this.promptForAccount('Select an account to export', false, false)
+        const { meta } = this.getPair(key)
+        name = meta.name
       }
       const exportedFilePath: string = this.exportAccount(name, destPath)
-      this.log(chalk.greenBright(`Account succesfully exported to: ${chalk.magentaBright(exportedFilePath)}`))
+      this.log(chalk.greenBright(`Account successfully exported to: ${chalk.magentaBright(exportedFilePath)}`))
     }
   }
 }

+ 1 - 1
cli/src/commands/account/forget.ts

@@ -6,7 +6,7 @@ import AccountsCommandBase from '../../base/AccountsCommandBase'
 export default class AccountForget extends AccountsCommandBase {
   static description = 'Forget (remove) account from the list of available accounts'
 
-  async run() {
+  async run(): Promise<void> {
     const selecteKey = await this.promptForAccount('Select an account to forget', false, false)
     await this.requireConfirmation('Are you sure you want to PERMANENTLY FORGET this account?')
 

+ 7 - 3
cli/src/commands/account/import.ts

@@ -37,10 +37,14 @@ export default class AccountImport extends AccountsCommandBase {
       options: ['sr25519', 'ed25519'],
       exclusive: ['backupFilePath'],
     }),
+    password: flags.string({
+      required: false,
+      description: `Account password`,
+    }),
   }
 
-  async run() {
-    const { name, mnemonic, seed, backupFilePath, suri, type } = this.parse(AccountImport).flags
+  async run(): Promise<void> {
+    const { name, mnemonic, seed, backupFilePath, suri, type, password } = this.parse(AccountImport).flags
 
     const keyring = new Keyring(KEYRING_OPTIONS)
 
@@ -58,6 +62,6 @@ export default class AccountImport extends AccountsCommandBase {
       return
     }
 
-    await this.createAccount(name, keyring.getPairs()[0])
+    await this.createAccount(name, keyring.getPairs()[0], password)
   }
 }

+ 1 - 1
cli/src/commands/account/info.ts

@@ -13,7 +13,7 @@ export default class AccountInfo extends AccountsCommandBase {
     { name: 'address', required: false, description: 'An address to inspect (can also be provided interavtively)' },
   ]
 
-  async run() {
+  async run(): Promise<void> {
     let { address } = this.parse(AccountInfo).args
 
     if (!address) {

+ 14 - 10
cli/src/commands/account/list.ts

@@ -5,18 +5,22 @@ import { formatBalance } from '@polkadot/util'
 export default class AccountList extends AccountsCommandBase {
   static description = 'List all available accounts'
 
-  async run() {
+  async run(): Promise<void> {
     const pairs = this.getPairs()
     const balances = await this.getApi().getAccountsBalancesInfo(pairs.map((p) => p.address))
 
-    displayTable(
-      pairs.map((p, i) => ({
-        'Name': p.meta.name,
-        'Address': p.address,
-        'Available balance': formatBalance(balances[i].availableBalance),
-        'Total balance': formatBalance(balances[i].votingBalance),
-      })),
-      3
-    )
+    if (pairs.length) {
+      displayTable(
+        pairs.map((p, i) => ({
+          'Name': p.meta.name,
+          'Address': p.address,
+          'Available balance': formatBalance(balances[i].availableBalance),
+          'Total balance': formatBalance(balances[i].votingBalance),
+        })),
+        3
+      )
+    } else {
+      this.log('No accounts available!')
+    }
   }
 }

+ 1 - 1
cli/src/commands/account/transferTokens.ts

@@ -22,7 +22,7 @@ export default class AccountTransferTokens extends AccountsCommandBase {
     }),
   }
 
-  async run() {
+  async run(): Promise<void> {
     let { from, to, amount } = this.parse(AccountTransferTokens).flags
 
     if (!isValidBalance(amount)) {

+ 3 - 3
cli/src/commands/api/getQueryNodeEndpoint.ts

@@ -4,8 +4,8 @@ import chalk from 'chalk'
 export default class ApiGetQueryNodeEndpoint extends StateAwareCommandBase {
   static description = 'Get current query node endpoint'
 
-  async run() {
-    const currentEndpoint: string = this.getPreservedState().queryNodeUri
-    this.log(chalk.green(currentEndpoint))
+  async run(): Promise<void> {
+    const currentEndpoint: string | null | undefined = this.getPreservedState().queryNodeUri
+    this.log(chalk.green(JSON.stringify(currentEndpoint)))
   }
 }

+ 2 - 2
cli/src/commands/api/inspect.ts

@@ -2,11 +2,11 @@ import { flags } from '@oclif/command'
 import { CLIError } from '@oclif/errors'
 import { displayNameValueTable } from '../../helpers/display'
 import { Codec } from '@polkadot/types/types'
+import { ConstantCodec } from '@polkadot/types/metadata/decorate/types'
 import ExitCodes from '../../ExitCodes'
 import chalk from 'chalk'
 import { NameValueObj, ApiMethodArg, UnaugmentedApiPromise } from '../../Types'
 import ApiCommandBase from '../../base/ApiCommandBase'
-import { AugmentedConst } from '@polkadot/api/types'
 
 // Command flags type
 type ApiInspectFlags = {
@@ -80,7 +80,7 @@ export default class ApiInspect extends ApiCommandBase {
       return this.getUnaugmentedApi().query[apiModule][apiMethod].creator.meta
     } else {
       // Currently the only other optoin is api.consts
-      const method = (this.getUnaugmentedApi().consts[apiModule][apiMethod] as unknown) as AugmentedConst<'promise'>
+      const method = this.getUnaugmentedApi().consts[apiModule][apiMethod] as ConstantCodec
       return method.meta
     }
   }

+ 7 - 10
cli/src/commands/api/setQueryNodeEndpoint.ts

@@ -5,6 +5,8 @@ import ExitCodes from '../../ExitCodes'
 type ApiSetQueryNodeEndpointArgs = { endpoint: string }
 
 export default class ApiSetQueryNodeEndpoint extends ApiCommandBase {
+  protected requiresApiConnection = false
+
   static description = 'Set query node endpoint'
   static args = [
     {
@@ -14,24 +16,19 @@ export default class ApiSetQueryNodeEndpoint extends ApiCommandBase {
     },
   ]
 
-  async init() {
-    await super.init()
-  }
-
-  async run() {
+  async run(): Promise<void> {
     const { endpoint }: ApiSetQueryNodeEndpointArgs = this.parse(ApiSetQueryNodeEndpoint)
       .args as ApiSetQueryNodeEndpointArgs
-    let newEndpoint = ''
+    let newEndpoint: string | null = null
     if (endpoint) {
-      if (this.isQueryNodeUriValid(endpoint)) {
-        await this.setPreservedState({ queryNodeUri: endpoint })
-        newEndpoint = endpoint
-      } else {
+      if (!this.isQueryNodeUriValid(endpoint)) {
         this.error('Provided endpoint seems to be incorrect!', { exit: ExitCodes.InvalidInput })
       }
+      newEndpoint = endpoint
     } else {
       newEndpoint = await this.promptForQueryNodeUri()
     }
+    await this.setPreservedState({ queryNodeUri: newEndpoint })
     this.log(
       chalk.greenBright('Query node endpoint successfuly changed! New endpoint: ') + chalk.magentaBright(newEndpoint)
     )

+ 3 - 6
cli/src/commands/api/setUri.ts

@@ -5,6 +5,8 @@ import ExitCodes from '../../ExitCodes'
 type ApiSetUriArgs = { uri: string }
 
 export default class ApiSetUri extends ApiCommandBase {
+  protected requiresApiConnection = false
+
   static description = 'Set api WS provider uri'
   static args = [
     {
@@ -14,12 +16,7 @@ export default class ApiSetUri extends ApiCommandBase {
     },
   ]
 
-  async init() {
-    // Pass "skipConnection" arg to prevent command from exiting if current api uri is invalid
-    await super.init(true)
-  }
-
-  async run() {
+  async run(): Promise<void> {
     const args: ApiSetUriArgs = this.parse(ApiSetUri).args as ApiSetUriArgs
     let newUri = ''
     if (args.uri) {

+ 5 - 7
cli/src/commands/content/addCuratorToGroup.ts

@@ -16,7 +16,7 @@ export default class AddCuratorToGroupCommand extends ContentDirectoryCommandBas
     },
   ]
 
-  async run() {
+  async run(): Promise<void> {
     const lead = await this.getRequiredLeadContext()
 
     let { groupId, curatorId } = this.parse(AddCuratorToGroupCommand).args
@@ -33,12 +33,10 @@ export default class AddCuratorToGroupCommand extends ContentDirectoryCommandBas
       await this.getCurator(curatorId)
     }
 
-    await this.sendAndFollowNamedTx(
-      await this.getDecodedPair(lead.roleAccount.toString()),
-      'content',
-      'addCuratorToGroup',
-      [groupId, curatorId]
-    )
+    await this.sendAndFollowNamedTx(await this.getDecodedPair(lead.roleAccount), 'content', 'addCuratorToGroup', [
+      groupId,
+      curatorId,
+    ])
 
     console.log(
       chalk.green(

+ 7 - 13
cli/src/commands/content/channel.ts

@@ -11,7 +11,7 @@ export default class ChannelCommand extends ContentDirectoryCommandBase {
     },
   ]
 
-  async run() {
+  async run(): Promise<void> {
     const { channelId } = this.parse(ChannelCommand).args
     const channel = await this.getApi().channelById(channelId)
     if (channel) {
@@ -19,24 +19,18 @@ export default class ChannelCommand extends ContentDirectoryCommandBase {
         'ID': channelId.toString(),
         'Owner': JSON.stringify(channel.owner.toJSON()),
         'IsCensored': channel.is_censored.toString(),
-        'RewardAccount': channel.reward_account ? channel.reward_account.toString() : 'NONE',
+        'RewardAccount': channel.reward_account.unwrapOr('NONE').toString(),
       })
 
       displayHeader(`Media`)
-
       displayCollapsedRow({
-        'NumberOfVideos': channel.videos.length,
-        'NumberOfPlaylists': channel.playlists.length,
-        'NumberOfSeries': channel.series.length,
+        'NumberOfVideos': channel.num_videos.toNumber(),
       })
 
-      displayHeader(`MediaData`)
-
-      displayCollapsedRow({
-        'Videos': JSON.stringify(channel.videos.toJSON()),
-        'Playlists': JSON.stringify(channel.playlists.toJSON()),
-        'Series': JSON.stringify(channel.series.toJSON()),
-      })
+      displayHeader(`Collaborators`)
+      const collaboratorIds = Array.from(channel.collaborators)
+      const collaborators = await this.getApi().getMembers(collaboratorIds)
+      this.log(collaborators.map((c, i) => `${collaboratorIds[i].toString()} (${c.handle.toString()})`).join(', '))
     } else {
       this.error(`Channel not found by channel id: "${channelId}"!`)
     }

+ 4 - 3
cli/src/commands/content/channels.ts

@@ -1,11 +1,11 @@
 import ContentDirectoryCommandBase from '../../base/ContentDirectoryCommandBase'
 // import chalk from 'chalk'
-import { displayTable } from '../../helpers/display'
+import { displayTable, shortAddress } from '../../helpers/display'
 
 export default class ChannelsCommand extends ContentDirectoryCommandBase {
   static description = 'List existing content directory channels.'
 
-  async run() {
+  async run(): Promise<void> {
     const channels = await this.getApi().availableChannels()
 
     if (channels.length > 0) {
@@ -14,7 +14,8 @@ export default class ChannelsCommand extends ContentDirectoryCommandBase {
           'ID': id.toString(),
           'Owner': JSON.stringify(c.owner.toJSON()),
           'IsCensored': c.is_censored.toString(),
-          'RewardAccount': c.reward_account ? c.reward_account.toString() : 'NONE',
+          'RewardAccount': c.reward_account ? shortAddress(c.reward_account.toString()) : 'NONE',
+          'Collaborators': c.collaborators.size,
         })),
         3
       )

+ 45 - 24
cli/src/commands/content/createChannel.ts

@@ -2,9 +2,9 @@ import { getInputJson } from '../../helpers/InputOutput'
 import { ChannelInputParameters } from '../../Types'
 import { asValidatedMetadata, metadataToBytes } from '../../helpers/serialization'
 import { flags } from '@oclif/command'
-import { CreateInterface } from '@joystream/types'
+import { createType } from '@joystream/types'
 import { ChannelCreationParameters } from '@joystream/types/content'
-import { ChannelInputSchema } from '../../json-schemas/ContentDirectory'
+import { ChannelInputSchema } from '../../schemas/ContentDirectory'
 import ContentDirectoryCommandBase from '../../base/ContentDirectoryCommandBase'
 import UploadCommandBase from '../../base/UploadCommandBase'
 import chalk from 'chalk'
@@ -13,7 +13,7 @@ import { ChannelMetadata } from '@joystream/metadata-protobuf'
 export default class CreateChannelCommand extends UploadCommandBase {
   static description = 'Create channel inside content directory.'
   static flags = {
-    context: ContentDirectoryCommandBase.ownerContextFlag,
+    context: ContentDirectoryCommandBase.channelCreationContextFlag,
     input: flags.string({
       char: 'i',
       required: true,
@@ -21,46 +21,67 @@ export default class CreateChannelCommand extends UploadCommandBase {
     }),
   }
 
-  async run() {
+  async run(): Promise<void> {
     let { context, input } = this.parse(CreateChannelCommand).flags
 
     // Context
     if (!context) {
-      context = await this.promptForOwnerContext()
+      context = await this.promptForChannelCreationContext()
     }
     const [actor, address] = await this.getContentActor(context)
+    const [memberId] = await this.getRequiredMemberContext(true)
+    const keypair = await this.getDecodedPair(address)
 
     const channelInput = await getInputJson<ChannelInputParameters>(input, ChannelInputSchema)
     const meta = asValidatedMetadata(ChannelMetadata, channelInput)
 
+    if (channelInput.collaborators) {
+      await this.validateCollaborators(channelInput.collaborators)
+    }
+
     const { coverPhotoPath, avatarPhotoPath } = channelInput
-    const assetsPaths = [coverPhotoPath, avatarPhotoPath].filter((v) => v !== undefined) as string[]
-    const inputAssets = await this.prepareInputAssets(assetsPaths, input)
-    const assets = inputAssets.map(({ parameters }) => ({ Upload: parameters }))
-    // Set assets indexes in the metadata
-    const [coverPhotoIndex, avatarPhotoIndex] = this.assetsIndexes([coverPhotoPath, avatarPhotoPath], assetsPaths)
-    meta.coverPhoto = coverPhotoIndex
-    meta.avatarPhoto = avatarPhotoIndex
+    const [resolvedAssets, assetIndices] = await this.resolveAndValidateAssets(
+      { coverPhotoPath, avatarPhotoPath },
+      input
+    )
+    meta.coverPhoto = assetIndices.coverPhotoPath
+    meta.avatarPhoto = assetIndices.avatarPhotoPath
 
-    const channelCreationParameters: CreateInterface<ChannelCreationParameters> = {
-      assets,
-      meta: metadataToBytes(ChannelMetadata, meta),
-      reward_account: channelInput.rewardAccount,
-    }
+    // Preare and send the extrinsic
+    const assets = await this.prepareAssetsForExtrinsic(resolvedAssets)
+    const channelCreationParameters = createType<ChannelCreationParameters, 'ChannelCreationParameters'>(
+      'ChannelCreationParameters',
+      {
+        assets,
+        meta: metadataToBytes(ChannelMetadata, meta),
+        collaborators: channelInput.collaborators,
+        reward_account: channelInput.rewardAccount,
+      }
+    )
 
-    this.jsonPrettyPrint(JSON.stringify({ assets, metadata: meta }))
+    this.jsonPrettyPrint(JSON.stringify({ assets: assets?.toJSON(), metadata: meta }))
 
     await this.requireConfirmation('Do you confirm the provided input?', true)
 
-    const result = await this.sendAndFollowNamedTx(await this.getDecodedPair(address), 'content', 'createChannel', [
+    const result = await this.sendAndFollowNamedTx(keypair, 'content', 'createChannel', [
       actor,
       channelCreationParameters,
     ])
-    if (result) {
-      const event = this.findEvent(result, 'content', 'ChannelCreated')
-      this.log(chalk.green(`Channel with id ${chalk.cyanBright(event?.data[1].toString())} successfully created!`))
-    }
 
-    await this.uploadAssets(inputAssets, input)
+    const channelCreatedEvent = this.findEvent(result, 'content', 'ChannelCreated')
+    const channelId = channelCreatedEvent!.data[1]
+    this.log(chalk.green(`Channel with id ${chalk.cyanBright(channelId.toString())} successfully created!`))
+
+    const dataObjectsUploadedEvent = this.findEvent(result, 'storage', 'DataObjectsUploaded')
+    if (dataObjectsUploadedEvent) {
+      const [objectIds] = dataObjectsUploadedEvent.data
+      await this.uploadAssets(
+        keypair,
+        memberId.toNumber(),
+        `dynamic:channel:${channelId.toString()}`,
+        objectIds.map((id, index) => ({ dataObjectId: id, path: resolvedAssets[index].path })),
+        input
+      )
+    }
   }
 }

+ 2 - 2
cli/src/commands/content/createChannelCategory.ts

@@ -5,7 +5,7 @@ import { asValidatedMetadata, metadataToBytes } from '../../helpers/serializatio
 import { flags } from '@oclif/command'
 import { CreateInterface } from '@joystream/types'
 import { ChannelCategoryCreationParameters } from '@joystream/types/content'
-import { ChannelCategoryInputSchema } from '../../json-schemas/ContentDirectory'
+import { ChannelCategoryInputSchema } from '../../schemas/ContentDirectory'
 import chalk from 'chalk'
 import { ChannelCategoryMetadata } from '@joystream/metadata-protobuf'
 
@@ -20,7 +20,7 @@ export default class CreateChannelCategoryCommand extends ContentDirectoryComman
     }),
   }
 
-  async run() {
+  async run(): Promise<void> {
     const { context, input } = this.parse(CreateChannelCategoryCommand).flags
 
     const [actor, address] = context ? await this.getContentActor(context) : await this.getCategoryManagementActor()

+ 3 - 7
cli/src/commands/content/createCuratorGroup.ts

@@ -5,16 +5,12 @@ export default class CreateCuratorGroupCommand extends ContentDirectoryCommandBa
   static description = 'Create new Curator Group.'
   static aliases = ['createCuratorGroup']
 
-  async run() {
+  async run(): Promise<void> {
     const lead = await this.getRequiredLeadContext()
 
-    await this.buildAndSendExtrinsic(
-      await this.getDecodedPair(lead.roleAccount.toString()),
-      'content',
-      'createCuratorGroup'
-    )
+    await this.buildAndSendExtrinsic(await this.getDecodedPair(lead.roleAccount), 'content', 'createCuratorGroup')
 
     // TODO: Get id from event?
-    console.log(chalk.green(`New group succesfully created!`))
+    console.log(chalk.green(`New group successfully created!`))
   }
 }

+ 48 - 38
cli/src/commands/content/createVideo.ts

@@ -2,12 +2,13 @@ import UploadCommandBase from '../../base/UploadCommandBase'
 import { getInputJson } from '../../helpers/InputOutput'
 import { asValidatedMetadata, metadataToBytes } from '../../helpers/serialization'
 import { VideoInputParameters, VideoFileMetadata } from '../../Types'
-import { CreateInterface } from '@joystream/types'
+import { createTypeFromConstructor } from '@joystream/types'
 import { flags } from '@oclif/command'
 import { VideoCreationParameters } from '@joystream/types/content'
 import { IVideoMetadata, VideoMetadata } from '@joystream/metadata-protobuf'
-import { integrateMeta } from '@joystream/metadata-protobuf/utils'
+import { VideoInputSchema } from '../../schemas/ContentDirectory'
 import chalk from 'chalk'
+import ContentDirectoryCommandBase from '../../base/ContentDirectoryCommandBase'
 
 export default class CreateVideoCommand extends UploadCommandBase {
   static description = 'Create video under specific channel inside content directory.'
@@ -22,73 +23,82 @@ export default class CreateVideoCommand extends UploadCommandBase {
       required: true,
       description: 'ID of the Channel',
     }),
+    context: ContentDirectoryCommandBase.channelManagementContextFlag,
   }
 
-  setVideoMetadataDefaults(metadata: IVideoMetadata, videoFileMetadata: VideoFileMetadata): void {
-    const videoMetaToIntegrate = {
+  setVideoMetadataDefaults(metadata: IVideoMetadata, videoFileMetadata: VideoFileMetadata): IVideoMetadata {
+    return {
       duration: videoFileMetadata.duration,
       mediaPixelWidth: videoFileMetadata.width,
       mediaPixelHeight: videoFileMetadata.height,
+      mediaType: {
+        codecName: videoFileMetadata.codecName,
+        container: videoFileMetadata.container,
+        mimeMediaType: videoFileMetadata.mimeType,
+      },
+      ...metadata,
     }
-    const mediaTypeMetaToIntegrate = {
-      codecName: videoFileMetadata.codecName,
-      container: videoFileMetadata.container,
-      mimeMediaType: videoFileMetadata.mimeType,
-    }
-    integrateMeta(metadata, videoMetaToIntegrate, ['duration', 'mediaPixelWidth', 'mediaPixelHeight'])
-    integrateMeta(metadata.mediaType || {}, mediaTypeMetaToIntegrate, ['codecName', 'container', 'mimeMediaType'])
   }
 
-  async run() {
-    const { input, channelId } = this.parse(CreateVideoCommand).flags
+  async run(): Promise<void> {
+    const { input, channelId, context } = this.parse(CreateVideoCommand).flags
 
     // Get context
     const channel = await this.getApi().channelById(channelId)
-    const [actor, address] = await this.getChannelOwnerActor(channel)
+    const [actor, address] = await this.getChannelManagementActor(channel, context)
+    const [memberId] = await this.getRequiredMemberContext(true)
+    const keypair = await this.getDecodedPair(address)
 
     // Get input from file
-    const videoCreationParametersInput = await getInputJson<VideoInputParameters>(input)
-    const meta = asValidatedMetadata(VideoMetadata, videoCreationParametersInput)
+    const videoCreationParametersInput = await getInputJson<VideoInputParameters>(input, VideoInputSchema)
+    let meta = asValidatedMetadata(VideoMetadata, videoCreationParametersInput)
 
     // Assets
     const { videoPath, thumbnailPhotoPath } = videoCreationParametersInput
-    const assetsPaths = [videoPath, thumbnailPhotoPath].filter((a) => a !== undefined) as string[]
-    const inputAssets = await this.prepareInputAssets(assetsPaths, input)
-    const assets = inputAssets.map(({ parameters }) => ({ Upload: parameters }))
-    // Set assets indexes in the metadata
-    const [videoIndex, thumbnailPhotoIndex] = this.assetsIndexes([videoPath, thumbnailPhotoPath], assetsPaths)
-    meta.video = videoIndex
-    meta.thumbnailPhoto = thumbnailPhotoIndex
+    const [resolvedAssets, assetIndices] = await this.resolveAndValidateAssets({ videoPath, thumbnailPhotoPath }, input)
+    // Set assets indices in the metadata
+    meta.video = assetIndices.videoPath
+    meta.thumbnailPhoto = assetIndices.thumbnailPhotoPath
 
     // Try to get video file metadata
-    if (videoIndex !== undefined) {
-      const videoFileMetadata = await this.getVideoFileMetadata(inputAssets[videoIndex].path)
+    if (assetIndices.videoPath !== undefined) {
+      const videoFileMetadata = await this.getVideoFileMetadata(resolvedAssets[assetIndices.videoPath].path)
       this.log('Video media file parameters established:', videoFileMetadata)
-      this.setVideoMetadataDefaults(meta, videoFileMetadata)
+      meta = this.setVideoMetadataDefaults(meta, videoFileMetadata)
     }
 
-    // Create final extrinsic params and send the extrinsic
-    const videoCreationParameters: CreateInterface<VideoCreationParameters> = {
+    // Preare and send the extrinsic
+    const assets = await this.prepareAssetsForExtrinsic(resolvedAssets)
+    const videoCreationParameters = createTypeFromConstructor(VideoCreationParameters, {
       assets,
       meta: metadataToBytes(VideoMetadata, meta),
-    }
+    })
 
-    this.jsonPrettyPrint(JSON.stringify({ assets, metadata: meta }))
+    this.jsonPrettyPrint(JSON.stringify({ assets: assets?.toJSON(), metadata: meta }))
 
     await this.requireConfirmation('Do you confirm the provided input?', true)
 
-    const result = await this.sendAndFollowNamedTx(await this.getDecodedPair(address), 'content', 'createVideo', [
+    const result = await this.sendAndFollowNamedTx(keypair, 'content', 'createVideo', [
       actor,
       channelId,
       videoCreationParameters,
     ])
-    if (result) {
-      const event = this.findEvent(result, 'content', 'VideoCreated')
-      const videoId = event?.data[2]
-      this.log(chalk.green(`Video with id ${chalk.cyanBright(videoId?.toString())} successfully created!`))
-    }
 
-    // Upload assets
-    await this.uploadAssets(inputAssets, input)
+    const videoCreatedEvent = this.findEvent(result, 'content', 'VideoCreated')
+    this.log(
+      chalk.green(`Video with id ${chalk.cyanBright(videoCreatedEvent?.data[2].toString())} successfully created!`)
+    )
+
+    const dataObjectsUploadedEvent = this.findEvent(result, 'storage', 'DataObjectsUploaded')
+    if (dataObjectsUploadedEvent) {
+      const [objectIds] = dataObjectsUploadedEvent.data
+      await this.uploadAssets(
+        keypair,
+        memberId.toNumber(),
+        `dynamic:channel:${channelId.toString()}`,
+        objectIds.map((id, index) => ({ dataObjectId: id, path: resolvedAssets[index].path })),
+        input
+      )
+    }
   }
 }

+ 2 - 2
cli/src/commands/content/createVideoCategory.ts

@@ -5,7 +5,7 @@ import { asValidatedMetadata, metadataToBytes } from '../../helpers/serializatio
 import { flags } from '@oclif/command'
 import { CreateInterface } from '@joystream/types'
 import { VideoCategoryCreationParameters } from '@joystream/types/content'
-import { VideoCategoryInputSchema } from '../../json-schemas/ContentDirectory'
+import { VideoCategoryInputSchema } from '../../schemas/ContentDirectory'
 import chalk from 'chalk'
 import { VideoCategoryMetadata } from '@joystream/metadata-protobuf'
 
@@ -20,7 +20,7 @@ export default class CreateVideoCategoryCommand extends ContentDirectoryCommandB
     }),
   }
 
-  async run() {
+  async run(): Promise<void> {
     const { context, input } = this.parse(CreateVideoCategoryCommand).flags
 
     const [actor, address] = context ? await this.getContentActor(context) : await this.getCategoryManagementActor()

+ 1 - 1
cli/src/commands/content/curatorGroup.ts

@@ -13,7 +13,7 @@ export default class CuratorGroupCommand extends ContentDirectoryCommandBase {
     },
   ]
 
-  async run() {
+  async run(): Promise<void> {
     const { id } = this.parse(CuratorGroupCommand).args
     const group = await this.getCuratorGroup(id)
     const members = (await this.getApi().groupMembers(WorkingGroups.Curators)).filter((curator) =>

+ 101 - 0
cli/src/commands/content/deleteChannel.ts

@@ -0,0 +1,101 @@
+import ContentDirectoryCommandBase from '../../base/ContentDirectoryCommandBase'
+import { flags } from '@oclif/command'
+import chalk from 'chalk'
+import { createTypeFromConstructor } from '@joystream/types'
+import { BagId } from '@joystream/types/storage'
+import ExitCodes from '../../ExitCodes'
+import { formatBalance } from '@polkadot/util'
+import BN from 'bn.js'
+
+export default class DeleteChannelCommand extends ContentDirectoryCommandBase {
+  static description = 'Delete the channel and optionally all associated data objects.'
+
+  static flags = {
+    channelId: flags.integer({
+      char: 'c',
+      required: true,
+      description: 'ID of the Channel',
+    }),
+    force: flags.boolean({
+      char: 'f',
+      default: false,
+      description: 'Force-remove all associated channel data objects',
+    }),
+  }
+
+  async getDataObjectsInfoFromQueryNode(channelId: number): Promise<[string, BN][]> {
+    const dataObjects = await this.getQNApi().dataObjectsByBagId(`dynamic:channel:${channelId}`)
+
+    if (dataObjects.length) {
+      this.log('Following data objects are still associated with the channel:')
+      dataObjects.forEach((o) => {
+        let parentStr = ''
+        if ('video' in o.type && o.type.video) {
+          parentStr = ` (video: ${o.type.video.id})`
+        }
+        this.log(`- ${o.id} - ${o.type.__typename}${parentStr}`)
+      })
+    }
+
+    return dataObjects.map((o) => [o.id, new BN(o.deletionPrize)])
+  }
+
+  async getDataObjectsInfoFromChain(channelId: number): Promise<[string, BN][]> {
+    const dataObjects = await this.getApi().dataObjectsInBag(
+      createTypeFromConstructor(BagId, { Dynamic: { Channel: channelId } })
+    )
+
+    if (dataObjects.length) {
+      const dataObjectIds = dataObjects.map(([id]) => id.toString())
+      this.log(`Following data objects are still associated with the channel: ${dataObjectIds.join(', ')}`)
+    }
+
+    return dataObjects.map(([id, o]) => [id.toString(), o.deletion_prize])
+  }
+
+  async run(): Promise<void> {
+    const {
+      flags: { channelId, force },
+    } = this.parse(DeleteChannelCommand)
+    // Context
+    const channel = await this.getApi().channelById(channelId)
+    const [actor, address] = await this.getChannelOwnerActor(channel)
+
+    if (channel.num_videos.toNumber()) {
+      this.error(
+        `This channel still has ${channel.num_videos.toNumber()} associated video(s)!\n` +
+          `Delete the videos first using ${chalk.magentaBright('content:deleteVideo')} command`
+      )
+    }
+
+    const dataObjectsInfo = this.isQueryNodeUriSet()
+      ? await this.getDataObjectsInfoFromQueryNode(channelId)
+      : await this.getDataObjectsInfoFromChain(channelId)
+
+    if (dataObjectsInfo.length) {
+      if (!force) {
+        this.error(`Cannot remove associated data objects unless ${chalk.magentaBright('--force')} flag is used`, {
+          exit: ExitCodes.InvalidInput,
+        })
+      }
+      const deletionPrize = dataObjectsInfo.reduce((sum, [, prize]) => sum.add(prize), new BN(0))
+      this.log(
+        `Data objects deletion prize of ${chalk.cyanBright(
+          formatBalance(deletionPrize)
+        )} will be transferred to ${chalk.magentaBright(address)}`
+      )
+    }
+
+    await this.requireConfirmation(
+      `Are you sure you want to remove channel ${chalk.magentaBright(channelId.toString())}${
+        force ? ' and all associated data objects' : ''
+      }?`
+    )
+
+    await this.sendAndFollowNamedTx(await this.getDecodedPair(address), 'content', 'deleteChannel', [
+      actor,
+      channelId,
+      force ? dataObjectsInfo.length : 0,
+    ])
+  }
+}

+ 1 - 1
cli/src/commands/content/deleteChannelCategory.ts

@@ -14,7 +14,7 @@ export default class DeleteChannelCategoryCommand extends ContentDirectoryComman
     },
   ]
 
-  async run() {
+  async run(): Promise<void> {
     const { context } = this.parse(DeleteChannelCategoryCommand).flags
 
     const { channelCategoryId } = this.parse(DeleteChannelCategoryCommand).args

+ 80 - 0
cli/src/commands/content/deleteVideo.ts

@@ -0,0 +1,80 @@
+import ContentDirectoryCommandBase from '../../base/ContentDirectoryCommandBase'
+import { flags } from '@oclif/command'
+import BN from 'bn.js'
+import chalk from 'chalk'
+import { formatBalance } from '@polkadot/util'
+import { createType } from '@joystream/types'
+import ExitCodes from '../../ExitCodes'
+
+export default class DeleteVideoCommand extends ContentDirectoryCommandBase {
+  static description = 'Delete the video and optionally all associated data objects.'
+
+  protected requiresQueryNode = true
+
+  static flags = {
+    videoId: flags.integer({
+      char: 'v',
+      required: true,
+      description: 'ID of the Video',
+    }),
+    force: flags.boolean({
+      char: 'f',
+      default: false,
+      description: 'Force-remove all associated video data objects',
+    }),
+    context: ContentDirectoryCommandBase.channelManagementContextFlag,
+  }
+
+  async getDataObjectsInfo(videoId: number): Promise<[string, BN][]> {
+    const dataObjects = await this.getQNApi().dataObjectsByVideoId(videoId.toString())
+
+    if (dataObjects.length) {
+      this.log('Following data objects are still associated with the video:')
+      dataObjects.forEach((o) => {
+        this.log(`${o.id} - ${o.type.__typename}`)
+      })
+    }
+
+    return dataObjects.map((o) => [o.id, new BN(o.deletionPrize)])
+  }
+
+  async run(): Promise<void> {
+    const {
+      flags: { videoId, force, context },
+    } = this.parse(DeleteVideoCommand)
+    // Context
+    const video = await this.getApi().videoById(videoId)
+    const channel = await this.getApi().channelById(video.in_channel.toNumber())
+    const [actor, address] = await this.getChannelManagementActor(channel, context)
+
+    const dataObjectsInfo = await this.getDataObjectsInfo(videoId)
+    if (dataObjectsInfo.length) {
+      if (!force) {
+        this.error(`Cannot remove associated data objects unless ${chalk.magentaBright('--force')} flag is used`, {
+          exit: ExitCodes.InvalidInput,
+        })
+      }
+      const deletionPrize = dataObjectsInfo.reduce((sum, [, prize]) => sum.add(prize), new BN(0))
+      this.log(
+        `Data objects deletion prize of ${chalk.cyanBright(
+          formatBalance(deletionPrize)
+        )} will be transferred to ${chalk.magentaBright(address)}`
+      )
+    }
+
+    await this.requireConfirmation(
+      `Are you sure you want to remove video ${chalk.magentaBright(videoId)}${
+        force ? ' and all associated data objects' : ''
+      }?`
+    )
+
+    await this.sendAndFollowNamedTx(await this.getDecodedPair(address), 'content', 'deleteVideo', [
+      actor,
+      videoId,
+      createType(
+        'BTreeSet<DataObjectId>',
+        dataObjectsInfo.map(([id]) => id)
+      ),
+    ])
+  }
+}

+ 1 - 1
cli/src/commands/content/deleteVideoCategory.ts

@@ -14,7 +14,7 @@ export default class DeleteVideoCategoryCommand extends ContentDirectoryCommandB
     },
   ]
 
-  async run() {
+  async run(): Promise<void> {
     const { context } = this.parse(DeleteVideoCategoryCommand).flags
 
     const { videoCategoryId } = this.parse(DeleteVideoCategoryCommand).args

+ 40 - 0
cli/src/commands/content/removeChannelAssets.ts

@@ -0,0 +1,40 @@
+import ContentDirectoryCommandBase from '../../base/ContentDirectoryCommandBase'
+import { flags } from '@oclif/command'
+import { createType } from '@joystream/types'
+
+export default class RemoveChannelAssetsCommand extends ContentDirectoryCommandBase {
+  static description = 'Remove data objects associated with the channel or any of its videos.'
+
+  static flags = {
+    channelId: flags.integer({
+      char: 'c',
+      required: true,
+      description: 'ID of the Channel',
+    }),
+    objectId: flags.integer({
+      char: 'o',
+      required: true,
+      multiple: true,
+      description: 'ID of an object to remove',
+    }),
+    context: ContentDirectoryCommandBase.channelManagementContextFlag,
+  }
+
+  async run(): Promise<void> {
+    const {
+      flags: { channelId, objectId: objectIds, context },
+    } = this.parse(RemoveChannelAssetsCommand)
+    // Context
+    const channel = await this.getApi().channelById(channelId)
+    const [actor, address] = await this.getChannelManagementActor(channel, context)
+
+    this.jsonPrettyPrint(JSON.stringify({ channelId, assetsToRemove: objectIds }))
+    await this.requireConfirmation('Do you confirm the provided input?', true)
+
+    await this.sendAndFollowNamedTx(await this.getDecodedPair(address), 'content', 'updateChannel', [
+      actor,
+      channelId,
+      { assets_to_remove: createType('BTreeSet<DataObjectId>', objectIds) },
+    ])
+  }
+}

+ 5 - 7
cli/src/commands/content/removeCuratorFromGroup.ts

@@ -16,7 +16,7 @@ export default class RemoveCuratorFromGroupCommand extends ContentDirectoryComma
     },
   ]
 
-  async run() {
+  async run(): Promise<void> {
     const lead = await this.getRequiredLeadContext()
 
     let { groupId, curatorId } = this.parse(RemoveCuratorFromGroupCommand).args
@@ -37,12 +37,10 @@ export default class RemoveCuratorFromGroupCommand extends ContentDirectoryComma
       await this.getCurator(curatorId)
     }
 
-    await this.sendAndFollowNamedTx(
-      await this.getDecodedPair(lead.roleAccount.toString()),
-      'content',
-      'removeCuratorFromGroup',
-      [groupId, curatorId]
-    )
+    await this.sendAndFollowNamedTx(await this.getDecodedPair(lead.roleAccount), 'content', 'removeCuratorFromGroup', [
+      groupId,
+      curatorId,
+    ])
 
     this.log(
       chalk.green(

+ 18 - 7
cli/src/commands/content/reuploadAssets.ts

@@ -1,9 +1,9 @@
 import UploadCommandBase from '../../base/UploadCommandBase'
 import { getInputJson } from '../../helpers/InputOutput'
-import AssetsSchema from '../../json-schemas/Assets.schema.json'
-import { Assets as AssetsInput } from '../../json-schemas/typings/Assets.schema'
+import AssetsSchema from '../../schemas/json/Assets.schema.json'
+import { Assets as AssetsInput } from '../../schemas/typings/Assets.schema'
 import { flags } from '@oclif/command'
-import { ContentId } from '@joystream/types/storage'
+import BN from 'bn.js'
 
 export default class ReuploadVideoAssetsCommand extends UploadCommandBase {
   static description = 'Allows reuploading assets that were not successfully uploaded during channel/video creation'
@@ -16,17 +16,28 @@ export default class ReuploadVideoAssetsCommand extends UploadCommandBase {
     }),
   }
 
-  async run() {
+  async run(): Promise<void> {
     const { input } = this.parse(ReuploadVideoAssetsCommand).flags
 
+    // Get context
+    const [memberId, membership] = await this.getRequiredMemberContext()
+
     // Get input from file
     const inputData = await getInputJson<AssetsInput>(input, AssetsSchema)
-    const inputAssets = inputData.map(({ contentId, path }) => ({
-      contentId: ContentId.decode(this.getTypesRegistry(), contentId),
+    const { bagId } = inputData
+    const inputAssets = inputData.assets.map(({ objectId, path }) => ({
+      dataObjectId: new BN(objectId),
       path,
     }))
 
     // Upload assets
-    await this.uploadAssets(inputAssets, input, '')
+    await this.uploadAssets(
+      await this.getDecodedPair(membership.controller_account),
+      memberId.toNumber(),
+      bagId,
+      inputAssets,
+      input,
+      ''
+    )
   }
 }

+ 5 - 7
cli/src/commands/content/setCuratorGroupStatus.ts

@@ -17,7 +17,7 @@ export default class SetCuratorGroupStatusCommand extends ContentDirectoryComman
     },
   ]
 
-  async run() {
+  async run(): Promise<void> {
     const lead = await this.getRequiredLeadContext()
 
     let { id, status } = this.parse(SetCuratorGroupStatusCommand).args
@@ -46,12 +46,10 @@ export default class SetCuratorGroupStatusCommand extends ContentDirectoryComman
       status = !!parseInt(status)
     }
 
-    await this.sendAndFollowNamedTx(
-      await this.getDecodedPair(lead.roleAccount.toString()),
-      'content',
-      'setCuratorGroupStatus',
-      [id, status]
-    )
+    await this.sendAndFollowNamedTx(await this.getDecodedPair(lead.roleAccount), 'content', 'setCuratorGroupStatus', [
+      id,
+      status,
+    ])
 
     console.log(
       chalk.green(

+ 1 - 1
cli/src/commands/content/setFeaturedVideos.ts

@@ -11,7 +11,7 @@ export default class SetFeaturedVideosCommand extends ContentDirectoryCommandBas
     },
   ]
 
-  async run() {
+  async run(): Promise<void> {
     const { featuredVideoIds } = this.parse(SetFeaturedVideosCommand).args
 
     const [actor, address] = await this.getContentActor('Lead')

+ 88 - 17
cli/src/commands/content/updateChannel.ts

@@ -3,14 +3,21 @@ import { asValidatedMetadata, metadataToBytes } from '../../helpers/serializatio
 import { ChannelInputParameters } from '../../Types'
 import { flags } from '@oclif/command'
 import UploadCommandBase from '../../base/UploadCommandBase'
-import { CreateInterface } from '@joystream/types'
+import { CreateInterface, createType } from '@joystream/types'
 import { ChannelUpdateParameters } from '@joystream/types/content'
-import { ChannelInputSchema } from '../../json-schemas/ContentDirectory'
+import { ChannelInputSchema } from '../../schemas/ContentDirectory'
 import { ChannelMetadata } from '@joystream/metadata-protobuf'
+import { DataObjectInfoFragment } from '../../graphql/generated/queries'
+import BN from 'bn.js'
+import { formatBalance } from '@polkadot/util'
+import chalk from 'chalk'
+import ContentDirectoryCommandBase from '../../base/ContentDirectoryCommandBase'
+import ExitCodes from '../../ExitCodes'
 
 export default class UpdateChannelCommand extends UploadCommandBase {
   static description = 'Update existing content directory channel.'
   static flags = {
+    context: ContentDirectoryCommandBase.channelManagementContextFlag,
     input: flags.string({
       char: 'i',
       required: true,
@@ -39,44 +46,108 @@ export default class UpdateChannelCommand extends UploadCommandBase {
     }
   }
 
-  async run() {
+  async getAssetsToRemove(
+    channelId: number,
+    coverPhotoIndex: number | undefined,
+    avatarPhotoIndex: number | undefined
+  ): Promise<string[]> {
+    let assetsToRemove: DataObjectInfoFragment[] = []
+    if (coverPhotoIndex !== undefined || avatarPhotoIndex !== undefined) {
+      const currentAssets = await this.getQNApi().dataObjectsByChannelId(channelId.toString())
+      const currentCovers = currentAssets.filter((a) => a.type.__typename === 'DataObjectTypeChannelCoverPhoto')
+      const currentAvatars = currentAssets.filter((a) => a.type.__typename === 'DataObjectTypeChannelAvatar')
+      if (currentCovers.length && coverPhotoIndex !== undefined) {
+        assetsToRemove = assetsToRemove.concat(currentCovers)
+      }
+      if (currentAvatars.length && avatarPhotoIndex !== undefined) {
+        assetsToRemove = assetsToRemove.concat(currentAvatars)
+      }
+      if (assetsToRemove.length) {
+        this.log(`\nData objects to be removed due to replacement:`)
+        assetsToRemove.forEach((a) => this.log(`- ${a.id} (${a.type.__typename})`))
+        const totalPrize = assetsToRemove.reduce((sum, { deletionPrize }) => sum.add(new BN(deletionPrize)), new BN(0))
+        this.log(`Total deletion prize: ${chalk.cyanBright(formatBalance(totalPrize))}\n`)
+      }
+    }
+
+    return assetsToRemove.map((a) => a.id)
+  }
+
+  async run(): Promise<void> {
     const {
-      flags: { input },
+      flags: { input, context },
       args: { channelId },
     } = this.parse(UpdateChannelCommand)
 
     // Context
     const channel = await this.getApi().channelById(channelId)
-    const [actor, address] = await this.getChannelOwnerActor(channel)
+    const [actor, address] = await this.getChannelManagementActor(channel, context)
+    const [memberId] = await this.getRequiredMemberContext(true)
+    const keypair = await this.getDecodedPair(address)
 
     const channelInput = await getInputJson<ChannelInputParameters>(input, ChannelInputSchema)
     const meta = asValidatedMetadata(ChannelMetadata, channelInput)
 
+    if (channelInput.rewardAccount !== undefined && actor.type === 'Collaborator') {
+      this.error("Collaborators are not allowed to update channel's reward account!", { exit: ExitCodes.AccessDenied })
+    }
+
+    if (channelInput.collaborators !== undefined && actor.type === 'Collaborator') {
+      this.error("Collaborators are not allowed to update channel's collaborators!", { exit: ExitCodes.AccessDenied })
+    }
+
+    if (channelInput.collaborators) {
+      await this.validateCollaborators(channelInput.collaborators)
+    }
+
     const { coverPhotoPath, avatarPhotoPath, rewardAccount } = channelInput
-    const inputPaths = [coverPhotoPath, avatarPhotoPath].filter((p) => p !== undefined) as string[]
-    const inputAssets = await this.prepareInputAssets(inputPaths, input)
-    const assets = inputAssets.map(({ parameters }) => ({ Upload: parameters }))
-    // Set assets indexes in the metadata
-    const [coverPhotoIndex, avatarPhotoIndex] = this.assetsIndexes([coverPhotoPath, avatarPhotoPath], inputPaths)
-    meta.coverPhoto = coverPhotoIndex
-    meta.avatarPhoto = avatarPhotoIndex
+    const [resolvedAssets, assetIndices] = await this.resolveAndValidateAssets(
+      { coverPhotoPath, avatarPhotoPath },
+      input
+    )
+    // Set assets indices in the metadata
+    // "undefined" values will be omitted when the metadata is encoded. It's not possible to "unset" an asset this way.
+    meta.coverPhoto = assetIndices.coverPhotoPath
+    meta.avatarPhoto = assetIndices.avatarPhotoPath
 
+    // Preare and send the extrinsic
+    const assetsToUpload = await this.prepareAssetsForExtrinsic(resolvedAssets)
+    const assetsToRemove = await this.getAssetsToRemove(
+      channelId,
+      assetIndices.coverPhotoPath,
+      assetIndices.avatarPhotoPath
+    )
+
+    const collaborators = createType('Option<BTreeSet<MemberId>>', channelInput.collaborators)
     const channelUpdateParameters: CreateInterface<ChannelUpdateParameters> = {
-      assets,
+      assets_to_upload: assetsToUpload,
+      assets_to_remove: createType('BTreeSet<DataObjectId>', assetsToRemove),
       new_meta: metadataToBytes(ChannelMetadata, meta),
       reward_account: this.parseRewardAccountInput(rewardAccount),
+      collaborators,
     }
 
-    this.jsonPrettyPrint(JSON.stringify({ assets, metadata: meta, rewardAccount }))
+    this.jsonPrettyPrint(
+      JSON.stringify({ assetsToUpload: assetsToUpload?.toJSON(), assetsToRemove, metadata: meta, rewardAccount })
+    )
 
     await this.requireConfirmation('Do you confirm the provided input?', true)
 
-    await this.sendAndFollowNamedTx(await this.getDecodedPair(address), 'content', 'updateChannel', [
+    const result = await this.sendAndFollowNamedTx(keypair, 'content', 'updateChannel', [
       actor,
       channelId,
       channelUpdateParameters,
     ])
-
-    await this.uploadAssets(inputAssets, input)
+    const dataObjectsUploadedEvent = this.findEvent(result, 'storage', 'DataObjectsUploaded')
+    if (dataObjectsUploadedEvent) {
+      const [objectIds] = dataObjectsUploadedEvent.data
+      await this.uploadAssets(
+        keypair,
+        memberId.toNumber(),
+        `dynamic:channel:${channelId.toString()}`,
+        objectIds.map((id, index) => ({ dataObjectId: id, path: resolvedAssets[index].path })),
+        input
+      )
+    }
   }
 }

+ 2 - 2
cli/src/commands/content/updateChannelCategory.ts

@@ -5,7 +5,7 @@ import { asValidatedMetadata, metadataToBytes } from '../../helpers/serializatio
 import { CreateInterface } from '@joystream/types'
 import { ChannelCategoryUpdateParameters } from '@joystream/types/content'
 import { flags } from '@oclif/command'
-import { ChannelCategoryInputSchema } from '../../json-schemas/ContentDirectory'
+import { ChannelCategoryInputSchema } from '../../schemas/ContentDirectory'
 import { ChannelCategoryMetadata } from '@joystream/metadata-protobuf'
 export default class UpdateChannelCategoryCommand extends ContentDirectoryCommandBase {
   static description = 'Update channel category inside content directory.'
@@ -26,7 +26,7 @@ export default class UpdateChannelCategoryCommand extends ContentDirectoryComman
     },
   ]
 
-  async run() {
+  async run(): Promise<void> {
     const { context, input } = this.parse(UpdateChannelCategoryCommand).flags
 
     const { channelCategoryId } = this.parse(UpdateChannelCategoryCommand).args

+ 1 - 1
cli/src/commands/content/updateChannelCensorshipStatus.ts

@@ -26,7 +26,7 @@ export default class UpdateChannelCensorshipStatusCommand extends ContentDirecto
     },
   ]
 
-  async run() {
+  async run(): Promise<void> {
     let {
       args: { id, status },
       flags: { rationale },

+ 69 - 17
cli/src/commands/content/updateVideo.ts

@@ -3,10 +3,15 @@ import { VideoInputParameters } from '../../Types'
 import { asValidatedMetadata, metadataToBytes } from '../../helpers/serialization'
 import UploadCommandBase from '../../base/UploadCommandBase'
 import { flags } from '@oclif/command'
-import { CreateInterface } from '@joystream/types'
+import { CreateInterface, createType } from '@joystream/types'
 import { VideoUpdateParameters } from '@joystream/types/content'
-import { VideoInputSchema } from '../../json-schemas/ContentDirectory'
+import { VideoInputSchema } from '../../schemas/ContentDirectory'
 import { VideoMetadata } from '@joystream/metadata-protobuf'
+import { DataObjectInfoFragment } from '../../graphql/generated/queries'
+import BN from 'bn.js'
+import { formatBalance } from '@polkadot/util'
+import chalk from 'chalk'
+import ContentDirectoryCommandBase from '../../base/ContentDirectoryCommandBase'
 
 export default class UpdateVideoCommand extends UploadCommandBase {
   static description = 'Update video under specific id.'
@@ -16,6 +21,7 @@ export default class UpdateVideoCommand extends UploadCommandBase {
       required: true,
       description: `Path to JSON file to use as input`,
     }),
+    context: ContentDirectoryCommandBase.channelManagementContextFlag,
   }
 
   static args = [
@@ -26,44 +32,90 @@ export default class UpdateVideoCommand extends UploadCommandBase {
     },
   ]
 
-  async run() {
+  async getAssetsToRemove(
+    videoId: number,
+    videoIndex: number | undefined,
+    thumbnailIndex: number | undefined
+  ): Promise<string[]> {
+    let assetsToRemove: DataObjectInfoFragment[] = []
+    if (videoIndex !== undefined || thumbnailIndex !== undefined) {
+      const currentAssets = await this.getQNApi().dataObjectsByVideoId(videoId.toString())
+      const currentThumbs = currentAssets.filter((a) => a.type.__typename === 'DataObjectTypeVideoThumbnail')
+      const currentMedias = currentAssets.filter((a) => a.type.__typename === 'DataObjectTypeVideoMedia')
+      if (currentThumbs.length && thumbnailIndex !== undefined) {
+        assetsToRemove = assetsToRemove.concat(currentThumbs)
+      }
+      if (currentMedias.length && videoIndex !== undefined) {
+        assetsToRemove = assetsToRemove.concat(currentMedias)
+      }
+      if (assetsToRemove.length) {
+        this.log(`\nData objects to be removed due to replacement:`)
+        assetsToRemove.forEach((a) => this.log(`- ${a.id} (${a.type.__typename})`))
+        const totalPrize = assetsToRemove.reduce((sum, { deletionPrize }) => sum.add(new BN(deletionPrize)), new BN(0))
+        this.log(`Total deletion prize: ${chalk.cyanBright(formatBalance(totalPrize))}\n`)
+      }
+    }
+
+    return assetsToRemove.map((a) => a.id)
+  }
+
+  async run(): Promise<void> {
     const {
-      flags: { input },
+      flags: { input, context },
       args: { videoId },
     } = this.parse(UpdateVideoCommand)
 
     // Context
     const video = await this.getApi().videoById(videoId)
     const channel = await this.getApi().channelById(video.in_channel.toNumber())
-    const [actor, address] = await this.getChannelOwnerActor(channel)
+    const [actor, address] = await this.getChannelManagementActor(channel, context)
+    const [memberId] = await this.getRequiredMemberContext(true)
+    const keypair = await this.getDecodedPair(address)
 
     const videoInput = await getInputJson<VideoInputParameters>(input, VideoInputSchema)
     const meta = asValidatedMetadata(VideoMetadata, videoInput)
 
     const { videoPath, thumbnailPhotoPath } = videoInput
-    const inputPaths = [videoPath, thumbnailPhotoPath].filter((p) => p !== undefined) as string[]
-    const inputAssets = await this.prepareInputAssets(inputPaths, input)
-    const assets = inputAssets.map(({ parameters }) => ({ Upload: parameters }))
-    // Set assets indexes in the metadata
-    const [videoIndex, thumbnailPhotoIndex] = this.assetsIndexes([videoPath, thumbnailPhotoPath], inputPaths)
-    meta.video = videoIndex
-    meta.thumbnailPhoto = thumbnailPhotoIndex
+    const [resolvedAssets, assetIndices] = await this.resolveAndValidateAssets({ videoPath, thumbnailPhotoPath }, input)
+    // Set assets indices in the metadata
+    // "undefined" values will be omitted when the metadata is encoded. It's not possible to "unset" an asset this way.
+    meta.video = assetIndices.videoPath
+    meta.thumbnailPhoto = assetIndices.thumbnailPhotoPath
 
+    // Preare and send the extrinsic
+    const assetsToUpload = await this.prepareAssetsForExtrinsic(resolvedAssets)
+    const assetsToRemove = await this.getAssetsToRemove(
+      videoId,
+      assetIndices.videoPath,
+      assetIndices.thumbnailPhotoPath
+    )
     const videoUpdateParameters: CreateInterface<VideoUpdateParameters> = {
-      assets,
+      assets_to_upload: assetsToUpload,
       new_meta: metadataToBytes(VideoMetadata, meta),
+      assets_to_remove: createType('BTreeSet<DataObjectId>', assetsToRemove),
     }
 
-    this.jsonPrettyPrint(JSON.stringify({ assets, newMetadata: meta }))
+    this.jsonPrettyPrint(
+      JSON.stringify({ assetsToUpload: assetsToUpload?.toJSON(), newMetadata: meta, assetsToRemove })
+    )
 
     await this.requireConfirmation('Do you confirm the provided input?', true)
 
-    await this.sendAndFollowNamedTx(await this.getDecodedPair(address), 'content', 'updateVideo', [
+    const result = await this.sendAndFollowNamedTx(keypair, 'content', 'updateVideo', [
       actor,
       videoId,
       videoUpdateParameters,
     ])
-
-    await this.uploadAssets(inputAssets, input)
+    const dataObjectsUploadedEvent = this.findEvent(result, 'storage', 'DataObjectsUploaded')
+    if (dataObjectsUploadedEvent) {
+      const [objectIds] = dataObjectsUploadedEvent.data
+      await this.uploadAssets(
+        keypair,
+        memberId.toNumber(),
+        `dynamic:channel:${video.in_channel.toString()}`,
+        objectIds.map((id, index) => ({ dataObjectId: id, path: resolvedAssets[index].path })),
+        input
+      )
+    }
   }
 }

+ 2 - 2
cli/src/commands/content/updateVideoCategory.ts

@@ -5,7 +5,7 @@ import { asValidatedMetadata, metadataToBytes } from '../../helpers/serializatio
 import { flags } from '@oclif/command'
 import { CreateInterface } from '@joystream/types'
 import { VideoCategoryUpdateParameters } from '@joystream/types/content'
-import { VideoCategoryInputSchema } from '../../json-schemas/ContentDirectory'
+import { VideoCategoryInputSchema } from '../../schemas/ContentDirectory'
 import { VideoCategoryMetadata } from '@joystream/metadata-protobuf'
 
 export default class UpdateVideoCategoryCommand extends ContentDirectoryCommandBase {
@@ -27,7 +27,7 @@ export default class UpdateVideoCategoryCommand extends ContentDirectoryCommandB
     },
   ]
 
-  async run() {
+  async run(): Promise<void> {
     const { context, input } = this.parse(UpdateVideoCategoryCommand).flags
 
     const { videoCategoryId } = this.parse(UpdateVideoCategoryCommand).args

+ 1 - 1
cli/src/commands/content/updateVideoCensorshipStatus.ts

@@ -26,7 +26,7 @@ export default class UpdateVideoCensorshipStatusCommand extends ContentDirectory
     },
   ]
 
-  async run() {
+  async run(): Promise<void> {
     let {
       args: { id, status },
       flags: { rationale },

+ 2 - 2
cli/src/commands/content/video.ts

@@ -11,14 +11,14 @@ export default class VideoCommand extends ContentDirectoryCommandBase {
     },
   ]
 
-  async run() {
+  async run(): Promise<void> {
     const { videoId } = this.parse(VideoCommand).args
     const aVideo = await this.getApi().videoById(videoId)
     if (aVideo) {
       displayCollapsedRow({
         'ID': videoId.toString(),
         'InChannel': aVideo.in_channel.toString(),
-        'InSeries': aVideo.in_series.toString(),
+        'InSeries': aVideo.in_series.unwrapOr('NONE').toString(),
         'IsCensored': aVideo.is_censored.toString(),
       })
     } else {

+ 4 - 6
cli/src/commands/content/videos.ts

@@ -13,14 +13,12 @@ export default class VideosCommand extends ContentDirectoryCommandBase {
     },
   ]
 
-  async run() {
+  async run(): Promise<void> {
     const { channelId } = this.parse(VideosCommand).args
 
-    let videos: [VideoId, Video][]
+    let videos: [VideoId, Video][] = await this.getApi().availableVideos()
     if (channelId) {
-      videos = await this.getApi().videosByChannelId(channelId)
-    } else {
-      videos = await this.getApi().availableVideos()
+      videos = videos.filter(([, v]) => v.in_channel.eqn(parseInt(channelId)))
     }
 
     if (videos.length > 0) {
@@ -28,7 +26,7 @@ export default class VideosCommand extends ContentDirectoryCommandBase {
         videos.map(([id, v]) => ({
           'ID': id.toString(),
           'InChannel': v.in_channel.toString(),
-          'InSeries': v.in_series.toString(),
+          'InSeries': v.in_series.unwrapOr('NONE').toString(),
           'IsCensored': v.is_censored.toString(),
         })),
         3

+ 13 - 14
cli/src/commands/working-groups/createOpening.ts

@@ -4,14 +4,15 @@ import chalk from 'chalk'
 import { apiModuleByGroup } from '../../Api'
 import { JsonSchemaPrompter } from '../../helpers/JsonSchemaPrompt'
 import { JSONSchema } from '@apidevtools/json-schema-ref-parser'
-import OpeningParamsSchema from '../../json-schemas/WorkingGroupOpening.schema.json'
-import { WorkingGroupOpening as OpeningParamsJson } from '../../json-schemas/typings/WorkingGroupOpening.schema'
+import OpeningParamsSchema from '../../schemas/json/WorkingGroupOpening.schema.json'
+import { WorkingGroupOpening as OpeningParamsJson } from '../../schemas/typings/WorkingGroupOpening.schema'
 import { IOFlags, getInputJson, ensureOutputFileIsWriteable, saveOutputJsonToFile } from '../../helpers/InputOutput'
 import ExitCodes from '../../ExitCodes'
 import { flags } from '@oclif/command'
 import { AugmentedSubmittables } from '@polkadot/api/types'
 import { formatBalance } from '@polkadot/util'
 import BN from 'bn.js'
+import { CLIError } from '@oclif/errors'
 
 const OPENING_STAKE = new BN(2000)
 
@@ -92,7 +93,7 @@ export default class WorkingGroupsCreateOpening extends WorkingGroupsCommandBase
     }
   }
 
-  async run() {
+  async run(): Promise<void> {
     // lead-only gate
     const lead = await this.getRequiredLeadContext()
 
@@ -146,19 +147,17 @@ export default class WorkingGroupsCreateOpening extends WorkingGroupsCommandBase
       }
 
       // Send the tx
-      const result = await this.sendAndFollowNamedTx(
-        await this.getDecodedPair(lead.roleAccount.toString()),
-        apiModuleByGroup[this.group],
-        'addOpening',
-        txParams,
-        true // warnOnly
-      )
-
-      // Display a success message on success or ask to try again on error
-      if (result) {
+      try {
+        await this.sendAndFollowTx(
+          await this.getDecodedPair(lead.roleAccount),
+          this.getOriginalApi().tx[apiModuleByGroup[this.group]].addOpening(...txParams)
+        )
         this.log(chalk.green('Opening successfully created!'))
         tryAgain = false
-      } else {
+      } catch (e) {
+        if (e instanceof CLIError) {
+          this.warn(e.message)
+        }
         tryAgain = await this.simplePrompt({ type: 'confirm', message: 'Try again with remembered input?' })
       }
     } while (tryAgain)

+ 1 - 1
cli/src/commands/working-groups/decreaseWorkerStake.ts

@@ -28,7 +28,7 @@ export default class WorkingGroupsDecreaseWorkerStake extends WorkingGroupsComma
     ...WorkingGroupsCommandBase.flags,
   }
 
-  async run() {
+  async run(): Promise<void> {
     const {
       args: { workerId, amount },
     } = this.parse(WorkingGroupsDecreaseWorkerStake)

+ 2 - 2
cli/src/commands/working-groups/evictWorker.ts

@@ -29,9 +29,9 @@ export default class WorkingGroupsEvictWorker extends WorkingGroupsCommandBase {
     }),
   }
 
-  async run() {
+  async run(): Promise<void> {
     const {
-      args,
+       args,
       flags: { penalty, rationale },
     } = this.parse(WorkingGroupsEvictWorker)
 

+ 5 - 7
cli/src/commands/working-groups/fillOpening.ts

@@ -1,9 +1,7 @@
 import WorkingGroupsCommandBase from '../../base/WorkingGroupsCommandBase'
 import { apiModuleByGroup } from '../../Api'
 import chalk from 'chalk'
-import { ApplicationId } from '@joystream/types/working-group'
-import { BTreeSet } from '@polkadot/types'
-import { registry } from '@joystream/types'
+import { createType } from '@joystream/types'
 
 export default class WorkingGroupsFillOpening extends WorkingGroupsCommandBase {
   static description = "Allows filling working group opening that's currently in review. Requires lead access."
@@ -19,7 +17,7 @@ export default class WorkingGroupsFillOpening extends WorkingGroupsCommandBase {
     ...WorkingGroupsCommandBase.flags,
   }
 
-  async run() {
+  async run(): Promise<void> {
     const { args } = this.parse(WorkingGroupsFillOpening)
 
     // Lead-only gate
@@ -31,13 +29,13 @@ export default class WorkingGroupsFillOpening extends WorkingGroupsCommandBase {
     const applicationIds = await this.promptForApplicationsToAccept(opening)
 
     await this.sendAndFollowNamedTx(
-      await this.getDecodedPair(lead.roleAccount.toString()),
+      await this.getDecodedPair(lead.roleAccount),
       apiModuleByGroup[this.group],
       'fillOpening',
-      [openingId, new (BTreeSet.with(ApplicationId))(registry, applicationIds)]
+      [openingId, createType('BTreeSet<ApplicationId>', applicationIds)]
     )
 
-    this.log(chalk.green(`Opening ${chalk.magentaBright(openingId.toString())} succesfully filled!`))
+    this.log(chalk.green(`Opening ${chalk.magentaBright(openingId.toString())} successfully filled!`))
     this.log(
       chalk.green('Accepted working group application IDs: ') +
         chalk.magentaBright(applicationIds.length ? applicationIds.join(chalk.green(', ')) : 'NONE')

+ 1 - 1
cli/src/commands/working-groups/increaseStake.ts

@@ -19,7 +19,7 @@ export default class WorkingGroupsIncreaseStake extends WorkingGroupsCommandBase
     ...WorkingGroupsCommandBase.flags,
   }
 
-  async run() {
+  async run(): Promise<void> {
     // Worker-only gate
     const worker = await this.getRequiredWorkerContext()
 

+ 2 - 2
cli/src/commands/working-groups/leaveRole.ts

@@ -13,7 +13,7 @@ export default class WorkingGroupsLeaveRole extends WorkingGroupsCommandBase {
     }),
   }
 
-  async run() {
+  async run(): Promise<void>  {
     // Worker-only gate
     const worker = await this.getRequiredWorkerContext()
 
@@ -28,6 +28,6 @@ export default class WorkingGroupsLeaveRole extends WorkingGroupsCommandBase {
       [worker.workerId, rationale || null]
     )
 
-    this.log(chalk.green(`Succesfully left the role! (worker id: ${chalk.magentaBright(worker.workerId.toString())})`))
+    this.log(chalk.green(`Successfully left the role! (worker id: ${chalk.magentaBright(worker.workerId.toString())})`))
   }
 }

+ 2 - 2
cli/src/commands/working-groups/slashWorker.ts

@@ -30,7 +30,7 @@ export default class WorkingGroupsSlashWorker extends WorkingGroupsCommandBase {
     }),
   }
 
-  async run() {
+  async run(): Promise<void> {
     const {
       args: { amount, workerId },
       flags: { rationale },
@@ -58,7 +58,7 @@ export default class WorkingGroupsSlashWorker extends WorkingGroupsCommandBase {
       chalk.green(
         `${chalk.magentaBright(formatBalance(amount))} from worker ${chalk.magentaBright(
           workerId.toString()
-        )} stake has been succesfully slashed!`
+        )} stake has been successfully slashed!`
       )
     )
   }

+ 4 - 4
cli/src/commands/working-groups/updateRewardAccount.ts

@@ -10,7 +10,7 @@ export default class WorkingGroupsUpdateRewardAccount extends WorkingGroupsComma
     {
       name: 'address',
       required: false,
-      description: 'New reward account address (if omitted, one of the existing CLI accounts can be selected)',
+      description: 'New reward account address (if omitted, can be provided interactivel)',
     },
   ]
 
@@ -18,7 +18,7 @@ export default class WorkingGroupsUpdateRewardAccount extends WorkingGroupsComma
     ...WorkingGroupsCommandBase.flags,
   }
 
-  async run() {
+  async run(): Promise<void> {
     let { address } = this.parse(WorkingGroupsUpdateRewardAccount).args
 
     // Worker-only gate
@@ -35,12 +35,12 @@ export default class WorkingGroupsUpdateRewardAccount extends WorkingGroupsComma
     }
 
     await this.sendAndFollowNamedTx(
-      await this.getDecodedPair(worker.roleAccount.toString()),
+      await this.getDecodedPair(worker.roleAccount),
       apiModuleByGroup[this.group],
       'updateRewardAccount',
       [worker.workerId, address]
     )
 
-    this.log(chalk.green(`Succesfully updated the reward account to: ${chalk.magentaBright(address)})`))
+    this.log(chalk.green(`Successfully updated the reward account to: ${chalk.magentaBright(address)})`))
   }
 }

+ 3 - 3
cli/src/commands/working-groups/updateRoleAccount.ts

@@ -18,7 +18,7 @@ export default class WorkingGroupsUpdateRoleAccount extends WorkingGroupsCommand
     ...WorkingGroupsCommandBase.flags,
   }
 
-  async run() {
+  async run(): Promise<void> {
     let { address } = this.parse(WorkingGroupsUpdateRoleAccount).args
 
     const worker = await this.getRequiredWorkerContext('MemberController')
@@ -30,12 +30,12 @@ export default class WorkingGroupsUpdateRoleAccount extends WorkingGroupsCommand
     }
 
     await this.sendAndFollowNamedTx(
-      await this.getDecodedPair(worker.profile.membership.controller_account.toString()),
+      await this.getDecodedPair(worker.profile.membership.controller_account),
       apiModuleByGroup[this.group],
       'updateRoleAccount',
       [worker.workerId, address]
     )
 
-    this.log(chalk.green(`Succesfully updated the role account to: ${chalk.magentaBright(address)})`))
+    this.log(chalk.green(`Successfully updated the role account to: ${chalk.magentaBright(address)})`))
   }
 }

+ 2 - 2
cli/src/commands/working-groups/updateRoleStorage.ts

@@ -16,13 +16,13 @@ export default class WorkingGroupsUpdateRoleStorage extends WorkingGroupsCommand
     ...WorkingGroupsCommandBase.flags,
   }
 
-  async run() {
+  async run(): Promise<void> {
     const { storage } = this.parse(WorkingGroupsUpdateRoleStorage).args
 
     const worker = await this.getRequiredWorkerContext()
 
     await this.sendAndFollowNamedTx(
-      await this.getDecodedPair(worker.roleAccount.toString()),
+      await this.getDecodedPair(worker.roleAccount),
       apiModuleByGroup[this.group],
       'updateRoleStorage',
       [worker.workerId, storage]

+ 2 - 2
cli/src/commands/working-groups/updateWorkerReward.ts

@@ -29,7 +29,7 @@ export default class WorkingGroupsUpdateWorkerReward extends WorkingGroupsComman
     return reward ? formatBalance(reward.valuePerBlock) + ' / block' : 'NONE'
   }
 
-  async run() {
+  async run(): Promise<void> {
     const {
       args: { workerId, newReward },
     } = this.parse(WorkingGroupsUpdateWorkerReward)
@@ -48,7 +48,7 @@ export default class WorkingGroupsUpdateWorkerReward extends WorkingGroupsComman
     this.log(chalk.magentaBright(`Current worker reward: ${this.formatReward(reward)}`))
 
     await this.sendAndFollowNamedTx(
-      await this.getDecodedPair(lead.roleAccount.toString()),
+      await this.getDecodedPair(lead.roleAccount),
       apiModuleByGroup[this.group],
       'updateRewardAmount',
       [workerId, newReward]

+ 65 - 0
cli/src/graphql/queries/storage.graphql

@@ -0,0 +1,65 @@
+fragment StorageNodeInfo on StorageBucket {
+  id
+  operatorMetadata {
+    nodeEndpoint
+  }
+}
+
+query getStorageNodesInfoByBagId($bagId: ID) {
+  storageBuckets(
+    where: {
+      operatorStatus_json: { isTypeOf_eq: "StorageBucketOperatorStatusActive" }
+      bags_some: { id_eq: $bagId }
+      operatorMetadata: { nodeEndpoint_contains: "http" }
+    }
+  ) {
+    ...StorageNodeInfo
+  }
+}
+
+fragment DataObjectInfo on StorageDataObject {
+  id
+  size
+  deletionPrize
+  type {
+    __typename
+    ... on DataObjectTypeVideoMedia {
+      video {
+        id
+      }
+    }
+    ... on DataObjectTypeVideoThumbnail {
+      video {
+        id
+      }
+    }
+    ... on DataObjectTypeChannelAvatar {
+      channel {
+        id
+      }
+    }
+    ... on DataObjectTypeChannelCoverPhoto {
+      channel {
+        id
+      }
+    }
+  }
+}
+
+query getDataObjectsByBagId($bagId: ID) {
+  storageDataObjects(where: { storageBag: { id_eq: $bagId } }) {
+    ...DataObjectInfo
+  }
+}
+
+query getDataObjectsByChannelId($channelId: ID) {
+  storageDataObjects(where: { type_json: { channelId_eq: $channelId } }) {
+    ...DataObjectInfo
+  }
+}
+
+query getDataObjectsByVideoId($videoId: ID) {
+  storageDataObjects(where: { type_json: { videoId_eq: $videoId } }) {
+    ...DataObjectInfo
+  }
+}

+ 2 - 4
cli/src/helpers/JsonSchemaPrompt.ts

@@ -129,15 +129,13 @@ export class JsonSchemaPrompter<JsonResult> {
           confirmed = await this.inquirerSinglePrompt({
             message: `Do you want to provide optional ${chalk.greenBright(objectPropertyPath)}?`,
             type: 'confirm',
-            default:
-              _.get(this.filledObject, objectPropertyPath) !== undefined &&
-              _.get(this.filledObject, objectPropertyPath) !== null,
+            default: _.get(this.filledObject, objectPropertyPath) !== undefined,
           })
         }
         if (confirmed) {
           value[pName] = await this.prompt(pSchema, objectPropertyPath)
         } else {
-          _.set(this.filledObject, objectPropertyPath, null)
+          _.set(this.filledObject, objectPropertyPath, undefined)
         }
       }
       return value

+ 5 - 5
cli/src/helpers/display.ts

@@ -3,7 +3,7 @@ import chalk from 'chalk'
 import { MemberDetails, NameValueObj } from '../Types'
 import { AccountId } from '@polkadot/types/interfaces'
 
-export function displayHeader(caption: string, placeholderSign = '_', size = 50) {
+export function displayHeader(caption: string, placeholderSign = '_', size = 50): void {
   const singsPerSide: number = Math.floor((size - (caption.length + 2)) / 2)
   let finalStr = ''
   for (let i = 0; i < singsPerSide; ++i) finalStr += placeholderSign
@@ -13,7 +13,7 @@ export function displayHeader(caption: string, placeholderSign = '_', size = 50)
   process.stdout.write('\n' + chalk.bold.blueBright(finalStr) + '\n\n')
 }
 
-export function displayNameValueTable(rows: NameValueObj[]) {
+export function displayNameValueTable(rows: NameValueObj[]): void {
   cli.table(
     rows,
     {
@@ -24,7 +24,7 @@ export function displayNameValueTable(rows: NameValueObj[]) {
   )
 }
 
-export function displayCollapsedRow(row: { [k: string]: string | number }) {
+export function displayCollapsedRow(row: { [k: string]: string | number }): void {
   const collapsedRow: NameValueObj[] = Object.keys(row).map((name) => ({
     name,
     value: typeof row[name] === 'string' ? (row[name] as string) : row[name].toString(),
@@ -33,11 +33,11 @@ export function displayCollapsedRow(row: { [k: string]: string | number }) {
   displayNameValueTable(collapsedRow)
 }
 
-export function displayCollapsedTable(rows: { [k: string]: string | number }[]) {
+export function displayCollapsedTable(rows: { [k: string]: string | number }[]): void {
   for (const row of rows) displayCollapsedRow(row)
 }
 
-export function displayTable(rows: { [k: string]: string | number }[], cellHorizontalPadding = 0) {
+export function displayTable(rows: { [k: string]: string | number }[], cellHorizontalPadding = 0): void {
   if (!rows.length) {
     return
   }

+ 2 - 2
cli/src/helpers/validation.ts

@@ -20,7 +20,7 @@ export function checkBalance(accBalances: DeriveBalancesAll, requiredBalance: BN
   }
 }
 
-// We assume balance can be bigger than JavaScript integer
+// We assume balance to be u128, which is bigger than JavaScript integer
 export function isValidBalance(balance: string): boolean {
-  return /^[1-9][0-9]{0,38}$/.test(balance)
+  return /^[1-9][0-9]{0,37}$/.test(balance)
 }

+ 0 - 22
cli/src/json-schemas/Assets.schema.json

@@ -1,22 +0,0 @@
-{
-  "$schema": "http://json-schema.org/draft-07/schema",
-  "$id": "https://joystream.org/Assets.schema.json",
-  "title": "Assets",
-  "description": "List of assets to upload/reupload",
-  "type": "array",
-  "items": {
-    "type": "object",
-    "required": ["contentId", "path"],
-    "additionalProperties": false,
-    "properties": {
-      "contentId": {
-        "type": "string",
-        "description": "Already existing ContentID"
-      },
-      "path": {
-        "type": "string",
-        "description": "Path to the content file (relative to input json file)"
-      }
-    }
-  }
-}

+ 7 - 0
cli/src/json-schemas/ContentDirectory.ts → cli/src/schemas/ContentDirectory.ts

@@ -30,6 +30,13 @@ export const ChannelInputSchema: JsonSchema<ChannelInputParameters> = {
     coverPhotoPath: { type: 'string' },
     avatarPhotoPath: { type: 'string' },
     rewardAccount: { type: ['string', 'null'] },
+    collaborators: {
+      type: ['array', 'null'],
+      items: {
+        type: 'integer',
+        min: 0,
+      },
+    },
   },
 }
 

+ 34 - 0
cli/src/schemas/json/Assets.schema.json

@@ -0,0 +1,34 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema",
+  "$id": "https://joystream.org/Assets.schema.json",
+  "title": "Assets",
+  "description": "List of assets to upload/reupload",
+  "type": "object",
+  "required": ["bagId", "assets"],
+  "properties": {
+    "bagId": {
+      "type": "string",
+      "description": "Target bag id"
+    },
+    "assets": {
+      "type": "array",
+      "description": "List of assets to upload",
+      "items": {
+        "type": "object",
+        "required": ["objectId", "path"],
+        "additionalProperties": false,
+        "properties": {
+          "objectId": {
+            "type": "string",
+            "description": "Already existing data object ID",
+            "pattern": "[0-9]+"
+          },
+          "path": {
+            "type": "string",
+            "description": "Path to the content file (relative to input json file)"
+          }
+        }
+      }
+    }
+  }
+}

+ 0 - 0
cli/src/json-schemas/WorkingGroupOpening.schema.json → cli/src/schemas/json/WorkingGroupOpening.schema.json


+ 30 - 0
cli/src/schemas/typings/Assets.schema.d.ts

@@ -0,0 +1,30 @@
+/* tslint:disable */
+/**
+ * This file was automatically generated by json-schema-to-typescript.
+ * DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file,
+ * and run json-schema-to-typescript to regenerate this file.
+ */
+
+/**
+ * List of assets to upload/reupload
+ */
+export interface Assets {
+  /**
+   * Target bag id
+   */
+  bagId: string
+  /**
+   * List of assets to upload
+   */
+  assets: {
+    /**
+     * Already existing data object ID
+     */
+    objectId: string
+    /**
+     * Path to the content file (relative to input json file)
+     */
+    path: string
+  }[]
+  [k: string]: unknown
+}

+ 0 - 0
cli/src/json-schemas/typings/WorkingGroupOpening.schema.d.ts → cli/src/schemas/typings/WorkingGroupOpening.schema.d.ts


+ 41 - 0
colossus.Dockerfile

@@ -0,0 +1,41 @@
+FROM --platform=linux/x86-64 node:14 as builder
+
+WORKDIR /joystream
+COPY . /joystream
+
+RUN yarn --frozen-lockfile
+
+RUN yarn workspace @joystream/types build
+RUN yarn workspace @joystream/metadata-protobuf build
+RUN yarn workspace storage-node build
+
+# Use these volumes to persist uploading data and to pass the keyfile.
+VOLUME ["/data", "/keystore", "/logs"]
+
+# Required variables
+ENV WS_PROVIDER_ENDPOINT_URI=ws://not-set
+ENV COLOSSUS_PORT=3333
+ENV QUERY_NODE_ENDPOINT=http://not-set/graphql
+ENV WORKER_ID=not-set
+# - set external key file using the `/keystore` volume
+ENV ACCOUNT_KEYFILE=
+ENV ACCOUNT_PWD=
+# Optional variables
+ENV SYNC_INTERVAL=1
+ENV ELASTIC_SEARCH_ENDPOINT=
+# warn, error, debug, info
+ENV ELASTIC_LOG_LEVEL=debug
+# - overrides account key file
+ENV ACCOUNT_URI=
+
+# Colossus node port
+EXPOSE ${COLOSSUS_PORT}
+
+WORKDIR /joystream/storage-node
+ENTRYPOINT yarn storage-node server --queryNodeEndpoint ${QUERY_NODE_ENDPOINT} \
+    --port ${COLOSSUS_PORT} --uploads /data  \
+    --apiUrl ${WS_PROVIDER_ENDPOINT_URI} --sync --syncInterval=${SYNC_INTERVAL} \
+    --elasticSearchEndpoint=${ELASTIC_SEARCH_ENDPOINT} \
+    --accountUri=${ACCOUNT_URI} \
+    --worker ${WORKER_ID} \
+    --logFilePath=/logs

+ 0 - 0
devops/infrastructure/.gitignore → devops/aws/.gitignore


+ 9 - 3
devops/infrastructure/README.md → devops/aws/README.md

@@ -26,10 +26,16 @@ On Mac run the command:
 Follow [the official installation guide](https://docs.ansible.com/ansible/latest/installation_guide/intro_installation.html) for your system.
 
 # How to run
-Edit the file `bash-config.cfg` and update parameters like AWS_KEY_PAIR_NAME, KEY_PATH
+Copy and edit the file `deploy-infra.sample.cfg` and update parameters like AWS_KEY_PAIR_NAME, KEY_PATH
 Run the `deploy-infra.sh` script to deploy the infrastructure
 
 ```
-cd devops/infrastructure
-./deploy-infra.sh
+cd devops/aws
+./deploy-infra.sh your-deploy-config.cfg
+```
+
+# To tear down a network
+
+```
+./destroy-infra.sh your-deploy-config.cfg
 ```

+ 0 - 0
devops/infrastructure/ansible.cfg → devops/aws/ansible.cfg


+ 45 - 0
devops/aws/build-arm64-playbook.yml

@@ -0,0 +1,45 @@
+---
+# Setup joystream code, build docker image
+
+- name: Build image and push to docker hub
+  hosts: all
+
+  tasks:
+    - name: Get code from git repo
+      include_role:
+        name: common
+        tasks_from: get-code-git
+
+    - name: Install Docker Module for Python
+      pip:
+        name: docker
+
+    - name: Log into DockerHub
+      community.docker.docker_login:
+        username: '{{ docker_username }}'
+        password: '{{ docker_password }}'
+
+    - name: Build an image and push it to a private repo
+      community.docker.docker_image:
+        build:
+          path: ./joystream
+          dockerfile: '{{ dockerfile }}'
+          platform: '{{ platform }}'
+        name: '{{ repository }}'
+        tag: '{{ tag_name }}'
+        push: yes
+        source: build
+      # Run in async fashion for max duration of 2 hours
+      async: 7200
+      poll: 0
+      register: build_result
+
+    - name: Check on build async task
+      async_status:
+        jid: '{{ build_result.ansible_job_id }}'
+      register: job_result
+      until: job_result.finished
+      # Max number of times to check for status
+      retries: 72
+      # Check for the status every 100s
+      delay: 100

+ 0 - 0
devops/infrastructure/build-code.yml → devops/aws/build-code.yml


+ 0 - 0
devops/infrastructure/chain-spec-pioneer.yml → devops/aws/chain-spec-pioneer.yml


+ 13 - 0
devops/infrastructure/infrastructure.yml → devops/aws/cloudformation/infrastructure.yml

@@ -1,3 +1,9 @@
+# Deploy inftrastructure required to run a new joystream chain.
+# This is comprised of:
+#   - N validators
+#   - One RPC node
+#   - s3 bucket with a build of Pionner
+
 AWSTemplateFormatVersion: 2010-09-09
 
 Parameters:
@@ -73,6 +79,10 @@ Resources:
           FromPort: 443
           ToPort: 443
           CidrIp: 0.0.0.0/0
+        - IpProtocol: tcp
+          FromPort: 80
+          ToPort: 80
+          CidrIp: 0.0.0.0/0
         - IpProtocol: tcp
           FromPort: 22
           ToPort: 22
@@ -112,6 +122,9 @@ Resources:
             # Update all packages
             apt-get update -y
 
+            # Prevent interactive prompts that would interrupt the installation
+            export DEBIAN_FRONTEND=noninteractive
+
             # Install the updates
             apt-get upgrade -y
 

+ 134 - 0
devops/aws/cloudformation/single-instance-docker.yml

@@ -0,0 +1,134 @@
+# Deploys and EC2 node with docker tools suitable for
+# building joystream node docker images
+
+AWSTemplateFormatVersion: 2010-09-09
+
+Parameters:
+  EC2InstanceType:
+    Type: String
+    Default: t2.xlarge
+  EC2AMI:
+    Type: String
+    Default: 'ami-09e67e426f25ce0d7'
+  KeyName:
+    Description: Name of an existing EC2 KeyPair to enable SSH access to the instance
+    Type: 'AWS::EC2::KeyPair::KeyName'
+    Default: 'joystream-key'
+    ConstraintDescription: must be the name of an existing EC2 KeyPair.
+
+Resources:
+  SecurityGroup:
+    Type: AWS::EC2::SecurityGroup
+    Properties:
+      GroupDescription: !Sub 'Internal Security group for validator nodes ${AWS::StackName}'
+      SecurityGroupIngress:
+        - IpProtocol: tcp
+          FromPort: 22
+          ToPort: 22
+          CidrIp: 0.0.0.0/0
+        - IpProtocol: tcp
+          FromPort: 443
+          ToPort: 443
+          CidrIp: 0.0.0.0/0
+        - IpProtocol: tcp
+          FromPort: 80
+          ToPort: 80
+          CidrIp: 0.0.0.0/0
+      Tags:
+        - Key: Name
+          Value: !Sub '${AWS::StackName}_validator'
+
+  InstanceLaunchTemplate:
+    Type: AWS::EC2::LaunchTemplate
+    Metadata:
+      AWS::CloudFormation::Init:
+        config:
+          packages:
+            apt:
+              wget: []
+              unzip: []
+    Properties:
+      LaunchTemplateName: !Sub 'LaunchTemplate_${AWS::StackName}'
+      LaunchTemplateData:
+        ImageId: !Ref EC2AMI
+        InstanceType: !Ref EC2InstanceType
+        KeyName: !Ref KeyName
+        SecurityGroupIds:
+          - !GetAtt SecurityGroup.GroupId
+        BlockDeviceMappings:
+          - DeviceName: /dev/sda1
+            Ebs:
+              VolumeSize: '30'
+        UserData:
+          Fn::Base64: !Sub |
+            #!/bin/bash -xe
+
+            # send script output to /tmp so we can debug boot failures
+            exec > /tmp/userdata.log 2>&1
+
+            # Update all packages
+            apt-get update -y
+
+            # Prevent interactive prompts that would interrupt the installation
+            export DEBIAN_FRONTEND=noninteractive
+
+            # Install the updates
+            apt-get upgrade -y
+
+            apt-get install -y apt-transport-https ca-certificates curl gnupg lsb-release
+
+            curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg
+
+            echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
+
+            apt-get update -y
+
+            apt-get install -y docker-ce docker-ce-cli containerd.io
+
+            usermod -aG docker ubuntu
+
+            # Update docker-compose to 1.28+
+            curl -L "https://github.com/docker/compose/releases/download/1.29.2/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose
+            chmod +x /usr/local/bin/docker-compose
+            ln -s /usr/local/bin/docker-compose /usr/bin/docker-compose
+
+            # Get latest cfn scripts and install them;
+            apt-get install -y python3-setuptools
+            mkdir -p /opt/aws/bin
+            wget https://s3.amazonaws.com/cloudformation-examples/aws-cfn-bootstrap-py3-latest.tar.gz
+            python3 -m easy_install --script-dir /opt/aws/bin aws-cfn-bootstrap-py3-latest.tar.gz
+
+            apt-get install -y python3-pip
+
+            /opt/aws/bin/cfn-signal -e $? -r "Instance Created" '${WaitHandle}'
+
+  Instance:
+    Type: AWS::EC2::Instance
+    Properties:
+      LaunchTemplate:
+        LaunchTemplateId: !Ref InstanceLaunchTemplate
+        Version: !GetAtt InstanceLaunchTemplate.LatestVersionNumber
+      Tags:
+        - Key: Name
+          Value: !Sub '${AWS::StackName}_1'
+
+  WaitHandle:
+    Type: AWS::CloudFormation::WaitConditionHandle
+
+  WaitCondition:
+    Type: AWS::CloudFormation::WaitCondition
+    Properties:
+      Handle: !Ref 'WaitHandle'
+      Timeout: '600'
+      Count: 1
+
+Outputs:
+  PublicIp:
+    Description: The DNS name for the created instance
+    Value: !Sub '${Instance.PublicIp}'
+    Export:
+      Name: !Sub '${AWS::StackName}PublicIp'
+
+  InstanceId:
+    Description: The Instance ID
+    Value: !Ref Instance

Энэ ялгаанд хэт олон файл өөрчлөгдсөн тул зарим файлыг харуулаагүй болно