Browse Source

storage-node-v2: Refactor the code.

- fix review comments
Shamil Gadelshin 3 years ago
parent
commit
5cfe15b8ba
54 changed files with 595 additions and 880 deletions
  1. 11 11
      runtime-modules/storage/src/lib.rs
  2. 5 5
      runtime-modules/storage/src/tests/fixtures.rs
  3. 0 11
      storage-node-v2/.editorconfig
  4. 0 1
      storage-node-v2/.eslintrc.js
  5. 39 21
      storage-node-v2/README.md
  6. 0 1
      storage-node-v2/package.json
  7. 1 1
      storage-node-v2/scripts/create-auth-request-signature.ts
  8. 0 39
      storage-node-v2/scripts/create-several-buckets.sh
  9. 11 6
      storage-node-v2/scripts/init-dev-bucket.sh
  10. 30 21
      storage-node-v2/scripts/run-all-commands.sh
  11. 18 2
      storage-node-v2/src/api-spec/openapi.yaml
  12. 7 17
      storage-node-v2/src/command-base/ApiCommandBase.ts
  13. 1 2
      storage-node-v2/src/commands/dev/init.ts
  14. 5 1
      storage-node-v2/src/commands/dev/multihash.ts
  15. 3 4
      storage-node-v2/src/commands/dev/upload.ts
  16. 1 2
      storage-node-v2/src/commands/dev/verify-bag-id.ts
  17. 1 5
      storage-node-v2/src/commands/leader/cancel-invite.ts
  18. 7 3
      storage-node-v2/src/commands/leader/create-bucket.ts
  19. 1 6
      storage-node-v2/src/commands/leader/invite-operator.ts
  20. 1 5
      storage-node-v2/src/commands/leader/remove-operator.ts
  21. 5 12
      storage-node-v2/src/commands/leader/set-bucket-limits.ts
  22. 10 12
      storage-node-v2/src/commands/leader/set-global-uploading-status.ts
  23. 2 3
      storage-node-v2/src/commands/leader/update-bag-limit.ts
  24. 31 17
      storage-node-v2/src/commands/leader/update-bag.ts
  25. 19 10
      storage-node-v2/src/commands/leader/update-blacklist.ts
  26. 10 9
      storage-node-v2/src/commands/leader/update-bucket-status.ts
  27. 16 22
      storage-node-v2/src/commands/leader/update-dynamic-bag-policy.ts
  28. 3 8
      storage-node-v2/src/commands/leader/update-voucher-limits.ts
  29. 3 8
      storage-node-v2/src/commands/operator/accept-invitation.ts
  30. 3 9
      storage-node-v2/src/commands/operator/set-metadata.ts
  31. 3 12
      storage-node-v2/src/services/helpers/auth.ts
  32. 4 19
      storage-node-v2/src/services/helpers/bagTypes.ts
  33. 44 11
      storage-node-v2/src/services/logger.ts
  34. 2 7
      storage-node-v2/src/services/runtime/accounts.ts
  35. 65 52
      storage-node-v2/src/services/runtime/api.ts
  36. 107 184
      storage-node-v2/src/services/runtime/extrinsics.ts
  37. 29 67
      storage-node-v2/src/services/runtime/hireLead.ts
  38. 1 4
      storage-node-v2/src/services/runtime/transactionNonceKeeper.ts
  39. 21 37
      storage-node-v2/src/services/webApi/app.ts
  40. 15 55
      storage-node-v2/src/services/webApi/controllers/publicApi.ts
  41. 0 70
      storage-node-v2/test/commands/leader/create-bucket.test.ts
  42. 0 5
      storage-node-v2/test/mocha.opts
  43. 0 9
      storage-node-v2/test/tsconfig.json
  44. 1 0
      storage-node-v2/tsconfig.json
  45. 2 2
      types/augment-codec/augment-api-events.ts
  46. 2 2
      types/augment-codec/augment-api-query.ts
  47. 2 2
      types/augment-codec/augment-api-tx.ts
  48. 6 6
      types/augment/all/defs.json
  49. 6 6
      types/augment/all/types.ts
  50. 2 2
      types/augment/augment-api-events.ts
  51. 2 2
      types/augment/augment-api-query.ts
  52. 2 2
      types/augment/augment-api-tx.ts
  53. 2 1
      types/src/common.ts
  54. 33 49
      types/src/storage.ts

+ 11 - 11
runtime-modules/storage/src/lib.rs

@@ -433,8 +433,8 @@ pub type DistributionBucketsPerBagValueConstraint = BoundedValueConstraint<u64>;
 /// Local module account handler.
 pub type StorageTreasury<T> = ModuleAccountHandler<T, <T as Trait>::ModuleId>;
 
-/// IPFS hash type alias.
-pub type ContentId = Vec<u8>;
+/// IPFS hash type alias (content ID).
+pub type Cid = Vec<u8>;
 
 // Alias for the Substrate balances pallet.
 type Balances<T> = balances::Module<T>;
@@ -912,7 +912,7 @@ decl_storage! {
             T::StorageBucketId => StorageBucket<WorkerId<T>>;
 
         /// Blacklisted data object hashes.
-        pub Blacklist get (fn blacklist): map hasher(blake2_128_concat) ContentId => ();
+        pub Blacklist get (fn blacklist): map hasher(blake2_128_concat) Cid => ();
 
         /// Blacklist collection counter.
         pub CurrentBlacklistSize get (fn current_blacklist_size): u64;
@@ -1085,7 +1085,7 @@ decl_event! {
         /// Params
         /// - hashes to remove from the blacklist
         /// - hashes to add to the blacklist
-        UpdateBlacklist(BTreeSet<ContentId>, BTreeSet<ContentId>),
+        UpdateBlacklist(BTreeSet<Cid>, BTreeSet<Cid>),
 
         /// Emits on deleting a dynamic bag.
         /// Params
@@ -1602,8 +1602,8 @@ decl_module! {
         #[weight = 10_000_000] // TODO: adjust weight
         pub fn update_blacklist(
             origin,
-            remove_hashes: BTreeSet<ContentId>,
-            add_hashes: BTreeSet<ContentId>
+            remove_hashes: BTreeSet<Cid>,
+            add_hashes: BTreeSet<Cid>
         ){
             T::ensure_storage_working_group_leader_origin(origin)?;
 
@@ -3032,20 +3032,20 @@ impl<T: Trait> Module<T> {
 
     // Returns only existing hashes in the blacklist from the original collection.
     #[allow(clippy::redundant_closure)] // doesn't work with Substrate storage functions.
-    fn get_existing_hashes(hashes: &BTreeSet<ContentId>) -> BTreeSet<ContentId> {
+    fn get_existing_hashes(hashes: &BTreeSet<Cid>) -> BTreeSet<Cid> {
         Self::get_hashes_by_predicate(hashes, |cid| Blacklist::contains_key(cid))
     }
 
     // Returns only nonexisting hashes in the blacklist from the original collection.
-    fn get_nonexisting_hashes(hashes: &BTreeSet<ContentId>) -> BTreeSet<ContentId> {
+    fn get_nonexisting_hashes(hashes: &BTreeSet<Cid>) -> BTreeSet<Cid> {
         Self::get_hashes_by_predicate(hashes, |cid| !Blacklist::contains_key(cid))
     }
 
     // Returns hashes from the original collection selected by predicate.
-    fn get_hashes_by_predicate<P: FnMut(&&ContentId) -> bool>(
-        hashes: &BTreeSet<ContentId>,
+    fn get_hashes_by_predicate<P: FnMut(&&Cid) -> bool>(
+        hashes: &BTreeSet<Cid>,
         predicate: P,
-    ) -> BTreeSet<ContentId> {
+    ) -> BTreeSet<Cid> {
         hashes
             .iter()
             .filter(predicate)

+ 5 - 5
runtime-modules/storage/src/tests/fixtures.rs

@@ -14,7 +14,7 @@ use crate::tests::mocks::{
     DEFAULT_DISTRIBUTION_PROVIDER_ACCOUNT_ID, DISTRIBUTION_WG_LEADER_ACCOUNT_ID,
 };
 use crate::{
-    BagId, ContentId, DataObjectCreationParameters, DataObjectStorage, DistributionBucketFamily,
+    BagId, Cid, DataObjectCreationParameters, DataObjectStorage, DistributionBucketFamily,
     DynamicBagDeletionPrize, DynamicBagId, DynamicBagType, RawEvent, StaticBagId,
     StorageBucketOperatorStatus, UploadParameters,
 };
@@ -720,8 +720,8 @@ impl UpdateStorageBucketStatusFixture {
 
 pub struct UpdateBlacklistFixture {
     origin: RawOrigin<u64>,
-    remove_hashes: BTreeSet<ContentId>,
-    add_hashes: BTreeSet<ContentId>,
+    remove_hashes: BTreeSet<Cid>,
+    add_hashes: BTreeSet<Cid>,
 }
 
 impl UpdateBlacklistFixture {
@@ -737,11 +737,11 @@ impl UpdateBlacklistFixture {
         Self { origin, ..self }
     }
 
-    pub fn with_add_hashes(self, add_hashes: BTreeSet<ContentId>) -> Self {
+    pub fn with_add_hashes(self, add_hashes: BTreeSet<Cid>) -> Self {
         Self { add_hashes, ..self }
     }
 
-    pub fn with_remove_hashes(self, remove_hashes: BTreeSet<ContentId>) -> Self {
+    pub fn with_remove_hashes(self, remove_hashes: BTreeSet<Cid>) -> Self {
         Self {
             remove_hashes,
             ..self

+ 0 - 11
storage-node-v2/.editorconfig

@@ -1,11 +0,0 @@
-root = true
-
-[*]
-indent_style = space
-indent_size = 2
-charset = utf-8
-trim_trailing_whitespace = true
-insert_final_newline = true
-
-[*.md]
-trim_trailing_whitespace = false

+ 0 - 1
storage-node-v2/.eslintrc.js

@@ -11,6 +11,5 @@ module.exports = {
     'no-unused-vars': 'off', // Required by the typescript rule below
     '@typescript-eslint/no-unused-vars': ['error'],
     '@typescript-eslint/no-floating-promises': 'error',
-    'prettier/prettier': 'off', // prettier-eslint conflicts inherited from @joystream/eslint-config
   },
 }

+ 39 - 21
storage-node-v2/README.md

@@ -32,13 +32,14 @@ USAGE
 * [`storage-node dev:multihash`](#storage-node-devmultihash)
 * [`storage-node dev:upload`](#storage-node-devupload)
 * [`storage-node dev:verify-bag-id`](#storage-node-devverify-bag-id)
+* [`storage-node help [COMMAND]`](#storage-node-help-command)
 * [`storage-node leader:cancel-invite`](#storage-node-leadercancel-invite)
 * [`storage-node leader:create-bucket`](#storage-node-leadercreate-bucket)
 * [`storage-node leader:delete-bucket`](#storage-node-leaderdelete-bucket)
 * [`storage-node leader:invite-operator`](#storage-node-leaderinvite-operator)
 * [`storage-node leader:remove-operator`](#storage-node-leaderremove-operator)
 * [`storage-node leader:set-bucket-limits`](#storage-node-leaderset-bucket-limits)
-* [`storage-node leader:set-uploading-block`](#storage-node-leaderset-uploading-block)
+* [`storage-node leader:set-global-uploading-status`](#storage-node-leaderset-global-uploading-status)
 * [`storage-node leader:update-bag`](#storage-node-leaderupdate-bag)
 * [`storage-node leader:update-bag-limit`](#storage-node-leaderupdate-bag-limit)
 * [`storage-node leader:update-blacklist`](#storage-node-leaderupdate-blacklist)
@@ -48,7 +49,7 @@ USAGE
 * [`storage-node leader:update-voucher-limits`](#storage-node-leaderupdate-voucher-limits)
 * [`storage-node operator:accept-invitation`](#storage-node-operatoraccept-invitation)
 * [`storage-node operator:set-metadata`](#storage-node-operatorset-metadata)
-* [`storage-node server`](#storage-node-server-file)
+* [`storage-node server`](#storage-node-server)
 
 ## `storage-node dev:init`
 
@@ -144,6 +145,23 @@ OPTIONS
 
 _See code: [src/commands/dev/verify-bag-id.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/dev/verify-bag-id.ts)_
 
+## `storage-node help [COMMAND]`
+
+display help for storage-node
+
+```
+USAGE
+  $ storage-node help [COMMAND]
+
+ARGUMENTS
+  COMMAND  command to show help for
+
+OPTIONS
+  --all  see all commands in CLI
+```
+
+_See code: [@oclif/plugin-help](https://github.com/oclif/plugin-help/blob/v3.0.1/src/commands/help.ts)_
+
 ## `storage-node leader:cancel-invite`
 
 Cancel a storage bucket operator invite. Requires storage working group leader permissions.
@@ -264,25 +282,24 @@ OPTIONS
 
 _See code: [src/commands/leader/set-bucket-limits.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/set-bucket-limits.ts)_
 
-## `storage-node leader:set-uploading-block`
+## `storage-node leader:set-global-uploading-status`
 
 Set global uploading block. Requires storage working group leader permissions.
 
 ```
 USAGE
-  $ storage-node leader:set-uploading-block
+  $ storage-node leader:set-global-uploading-status
 
 OPTIONS
-  -d, --disable            Disables global uploading block.
-  -e, --enable             Enables global uploading block (default).
   -h, --help               show CLI help
   -k, --keyfile=keyfile    Key file for the account. Mandatory in non-dev environment.
   -m, --dev                Use development mode
   -p, --password=password  Key file password (optional).
+  -s, --set=(on|off)       (required) Sets global uploading block (on/off).
   -u, --apiUrl=apiUrl      Runtime API URL. Mandatory in non-dev environment. Default is ws://localhost:9944
 ```
 
-_See code: [src/commands/leader/set-uploading-block.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/set-uploading-block.ts)_
+_See code: [src/commands/leader/set-global-uploading-status.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/set-global-uploading-status.ts)_
 
 ## `storage-node leader:update-bag`
 
@@ -293,8 +310,8 @@ USAGE
   $ storage-node leader:update-bag
 
 OPTIONS
-  -b, --bucket=bucket
-      (required) Storage bucket ID
+  -a, --add=add
+      [default: ] ID of a bucket to add to bag
 
   -h, --help
       show CLI help
@@ -322,8 +339,8 @@ OPTIONS
   -p, --password=password
       Key file password (optional).
 
-  -r, --remove
-      Remove a bucket from the bag
+  -r, --remove=remove
+      [default: ] ID of a bucket to remove from bag
 
   -u, --apiUrl=apiUrl
       Runtime API URL. Mandatory in non-dev environment. Default is ws://localhost:9944
@@ -359,12 +376,12 @@ USAGE
   $ storage-node leader:update-blacklist
 
 OPTIONS
-  -c, --cid=cid            (required) Content ID
+  -a, --add=add            [default: ] Content ID to add
   -h, --help               show CLI help
   -k, --keyfile=keyfile    Key file for the account. Mandatory in non-dev environment.
   -m, --dev                Use development mode
   -p, --password=password  Key file password (optional).
-  -r, --remove             Remove a content ID from the blaclist
+  -r, --remove=remove      [default: ] Content ID to remove
   -u, --apiUrl=apiUrl      Runtime API URL. Mandatory in non-dev environment. Default is ws://localhost:9944
 ```
 
@@ -386,6 +403,7 @@ OPTIONS
   -k, --keyfile=keyfile    Key file for the account. Mandatory in non-dev environment.
   -m, --dev                Use development mode
   -p, --password=password  Key file password (optional).
+  -s, --set=(on|off)       (required) Sets 'accepting new bags' parameter for the bucket (on/off).
   -u, --apiUrl=apiUrl      Runtime API URL. Mandatory in non-dev environment. Default is ws://localhost:9944
 ```
 
@@ -419,14 +437,13 @@ USAGE
   $ storage-node leader:update-dynamic-bag-policy
 
 OPTIONS
-  -c, --channel            Channel dynamic bag type
-  -e, --member             Member dynamic bag type (default)
-  -h, --help               show CLI help
-  -k, --keyfile=keyfile    Key file for the account. Mandatory in non-dev environment.
-  -m, --dev                Use development mode
-  -n, --number=number      (required) New storage buckets number
-  -p, --password=password  Key file password (optional).
-  -u, --apiUrl=apiUrl      Runtime API URL. Mandatory in non-dev environment. Default is ws://localhost:9944
+  -h, --help                      show CLI help
+  -k, --keyfile=keyfile           Key file for the account. Mandatory in non-dev environment.
+  -m, --dev                       Use development mode
+  -n, --number=number             (required) New storage buckets number
+  -p, --password=password         Key file password (optional).
+  -t, --bagType=(Channel|Member)  (required) Dynamic bag type (Channel, Member).
+  -u, --apiUrl=apiUrl             Runtime API URL. Mandatory in non-dev environment. Default is ws://localhost:9944
 ```
 
 _See code: [src/commands/leader/update-dynamic-bag-policy.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/leader/update-dynamic-bag-policy.ts)_
@@ -512,4 +529,5 @@ OPTIONS
 ```
 
 _See code: [src/commands/server.ts](https://github.com/Joystream/joystream/blob/v0.1.0/src/commands/server.ts)_
+
 <!-- commandsstop -->

+ 0 - 1
storage-node-v2/package.json

@@ -100,7 +100,6 @@
     "postpack": "rm -f oclif.manifest.json",
     "posttest": "yarn lint",
     "prepack": "rm -rf lib && tsc -b && oclif-dev manifest && oclif-dev readme",
-    "test": "mocha --forbid-only \"test/**/*.test.ts\"",
     "version": "oclif-dev readme && git add README.md",
     "build": "tsc --build tsconfig.json",
     "format": "prettier ./src --write",

+ 1 - 1
storage-node-v2/scripts/create-auth-request-signature.ts

@@ -1,7 +1,7 @@
 #!/usr/bin/env ts-node
 
-import { cryptoWaitReady } from '@polkadot/util-crypto'
 import { getAlicePair } from '../src/services/runtime/accounts'
+import { cryptoWaitReady } from '@polkadot/util-crypto'
 import { UploadTokenRequestBody, signTokenBody, UploadTokenRequest } from '../src/services/helpers/auth'
 import { exit } from 'process'
 

+ 0 - 39
storage-node-v2/scripts/create-several-buckets.sh

@@ -1,39 +0,0 @@
-#!/usr/bin/env bash
-
-yarn storage-node dev:init
-
-curl http://localhost:3333/test &
-curl http://localhost:3333/test &
-curl http://localhost:3333/test &
-curl http://localhost:3333/test &
-curl http://localhost:3333/test &
-
-curl http://localhost:3333/test &
-curl http://localhost:3333/test &
-curl http://localhost:3333/test &
-curl http://localhost:3333/test &
-curl http://localhost:3333/test &
-
-curl http://localhost:3333/test &
-curl http://localhost:3333/test &
-curl http://localhost:3333/test &
-curl http://localhost:3333/test &
-curl http://localhost:3333/test &
-
-curl http://localhost:3333/test &
-curl http://localhost:3333/test &
-curl http://localhost:3333/test &
-curl http://localhost:3333/test &
-curl http://localhost:3333/test &
-
-curl http://localhost:3333/test &
-curl http://localhost:3333/test &
-curl http://localhost:3333/test &
-curl http://localhost:3333/test &
-curl http://localhost:3333/test &
-
-curl http://localhost:3333/test &
-curl http://localhost:3333/test &
-curl http://localhost:3333/test &
-curl http://localhost:3333/test &
-curl http://localhost:3333/test &

+ 11 - 6
storage-node-v2/scripts/init-dev-bucket.sh

@@ -4,9 +4,14 @@
 # It prepares an environment, creates a storage bucket and links it to the
 # 'static council bag'.
 
-yarn storage-node dev:init
-yarn storage-node leader:update-bag-limit -l 7 --dev
-yarn storage-node leader:update-voucher-limits -o 100 -s 10000000 --dev
-yarn storage-node leader:create-bucket -i=0 -a -n=100 -s=10000000  --dev 
-yarn storage-node operator:accept-invitation -w=0 -i=0 --dev
-yarn storage-node leader:update-bag -b=0 -i static:council --dev 
+SCRIPT_PATH="$(dirname "${BASH_SOURCE[0]}")"
+cd $SCRIPT_PATH
+
+CLI=../bin/run
+
+${CLI} dev:init
+${CLI} leader:update-bag-limit -l 7 --dev
+${CLI} leader:update-voucher-limits -o 100 -s 10000000 --dev
+BUCKET_ID=`${CLI} leader:create-bucket -i=0 -a -n=100 -s=10000000  --dev` 
+${CLI} operator:accept-invitation -w=0 -i=${BUCKET_ID} --dev
+${CLI} leader:update-bag -a=${BUCKET_ID} -i static:council --dev 

+ 30 - 21
storage-node-v2/scripts/run-all-commands.sh

@@ -4,35 +4,44 @@
 # Must be run on the clean development chain.
 # It tests all leader and operator commands.
 
+
+SCRIPT_PATH="$(dirname "${BASH_SOURCE[0]}")"
+cd $SCRIPT_PATH
+
+CLI=../bin/run
+
 # Set Alice as leader
-yarn storage-node dev:init # leader workerId = 0
+${CLI}  dev:init # leader workerId = 0
 
 # Update limits and constants
-yarn storage-node leader:update-bag-limit -l 7 --dev
-yarn storage-node leader:update-voucher-limits -o 100 -s 10000000 --dev
-yarn storage-node leader:update-data-fee -f 10000 --dev
-yarn storage-node leader:update-dynamic-bag-policy -n 10 -m
+${CLI} leader:update-bag-limit -l 7 --dev
+${CLI} leader:update-voucher-limits -o 100 -s 10000000 --dev
+${CLI} leader:update-data-fee -f 10000 --dev
+${CLI} leader:update-dynamic-bag-policy -n 10 -t Member --dev
 
 # Create and configure a bucket.
-yarn storage-node leader:create-bucket -i=0 --dev # bucketId = 0
-yarn storage-node operator:accept-invitation -w=0 -i=0 --dev
-yarn storage-node leader:set-bucket-limits -i=0 -o=100 -s=10000000 --dev
-yarn storage-node leader:update-bucket-status -i=0 --enable --dev
-yarn storage-node leader:update-bag -b=0 -i static:council --dev 
-yarn storage-node operator:set-metadata -w=0 -i=0 -m=http://google.com --dev
+BUCKET_ID=`${CLI} leader:create-bucket -i=0 --dev` # bucketId = 0
+${CLI} operator:accept-invitation -w=0 -i=${BUCKET_ID} --dev
+${CLI} leader:set-bucket-limits -i=${BUCKET_ID} -o=100 -s=10000000 --dev
+${CLI} leader:update-bucket-status -i=${BUCKET_ID} --set on --dev
+${CLI} leader:update-bag -a=${BUCKET_ID} -i static:council --dev 
+${CLI} operator:set-metadata -w=0 -i=${BUCKET_ID} -m=http://google.com --dev
 
 # Create and delete a bucket
-yarn storage-node leader:create-bucket -a -n=100 -s=10000000  --dev # bucketId = 1
-yarn storage-node leader:delete-bucket -i=1  --dev 
+BUCKET_ID=`${CLI} leader:create-bucket -a -n=100 -s=10000000  --dev` # bucketId = 1
+${CLI} leader:delete-bucket -i=${BUCKET_ID}  --dev 
 
 # Create a bucket, invite operator, cancel invite, accept invitation and remove operator.
-yarn storage-node leader:create-bucket -a -n=100 -s=10000000  --dev # bucketId = 2
-yarn storage-node leader:invite-operator -i=2 -w=0  --dev 
-yarn storage-node leader:cancel-invite -i=2   --dev 
-yarn storage-node leader:invite-operator -i=2 -w=0  --dev 
-yarn storage-node operator:accept-invitation -w=0 -i=2 --dev
-yarn storage-node leader:remove-operator -i=2   --dev 
+BUCKET_ID=`${CLI} leader:create-bucket -a -n=100 -s=10000000  --dev` # bucketId = 2
+${CLI} leader:invite-operator -i=${BUCKET_ID} -w=0  --dev 
+${CLI} leader:cancel-invite -i=${BUCKET_ID} --dev 
+${CLI} leader:invite-operator -i=${BUCKET_ID} -w=0  --dev 
+${CLI} operator:accept-invitation -i=${BUCKET_ID} -w=0 --dev
+${CLI} leader:remove-operator -i=${BUCKET_ID}   --dev 
 
 # Toggle uploading block.
-yarn storage-node leader:set-uploading-block --enable --dev 
-yarn storage-node leader:set-uploading-block --disable --dev 
+${CLI} leader:set-global-uploading-status --set on --dev 
+${CLI} leader:set-global-uploading-status --set off --dev 
+
+# Blacklist.
+${CLI} leader:update-blacklist -a BLACKLISTED_CID -r SOME_CID --dev

+ 18 - 2
storage-node-v2/src/api-spec/openapi.yaml

@@ -5,8 +5,8 @@ info:
   contact:
     email: info@joystream.org
   license:
-    name: MIT
-    url: https://opensource.org/licenses/MIT
+    name: GPL-3.0-only
+    url: https://opensource.org/licenses/GPL-3.0
   version: 0.1.0
 externalDocs:
   description: Storage node API
@@ -36,6 +36,18 @@ paths:
         200:
           description: Ok
           content:
+            video/*:
+              schema:
+                type: string
+                format: binary
+            audio/*:
+              schema:
+                type: string
+                format: binary
+            image/*:
+              schema:
+                type: string
+                format: binary
             application/octet-stream:
               schema:
                 type: string
@@ -52,6 +64,8 @@ paths:
             application/json:
               schema:
                 $ref: '#/components/schemas/ErrorResponse'
+        500:
+          description: Unknown server error
     head:
       operationId: publicApi.getFileHeaders
       description: Returns a media file headers.
@@ -71,6 +85,8 @@ paths:
           description: File not found
         410:
           description: Header request problem
+        500:
+          description: Unknown server error
   /files:
     post:
       security:

+ 7 - 17
storage-node-v2/src/command-base/ApiCommandBase.ts

@@ -1,9 +1,6 @@
 import { Command, flags } from '@oclif/command'
 import { createApi } from '../services/runtime/api'
-import {
-  getAccountFromJsonFile,
-  getAlicePair,
-} from '../services/runtime/accounts'
+import { getAccountFromJsonFile, getAlicePair } from '../services/runtime/accounts'
 import { KeyringPair } from '@polkadot/keyring/types'
 import { ApiPromise } from '@polkadot/api'
 import logger from '../services/logger'
@@ -22,13 +19,11 @@ export default abstract class ApiCommandBase extends Command {
     dev: flags.boolean({ char: 'm', description: 'Use development mode' }),
     apiUrl: flags.string({
       char: 'u',
-      description:
-        'Runtime API URL. Mandatory in non-dev environment. Default is ws://localhost:9944',
+      description: 'Runtime API URL. Mandatory in non-dev environment. Default is ws://localhost:9944',
     }),
     keyfile: flags.string({
       char: 'k',
-      description:
-        'Key file for the account. Mandatory in non-dev environment.',
+      description: 'Key file for the account. Mandatory in non-dev environment.',
     }),
     password: flags.string({
       char: 'p',
@@ -87,10 +82,9 @@ export default abstract class ApiCommandBase extends Command {
     const chainType = await api.rpc.system.chainType()
 
     if (!chainType.isDevelopment && !chainType.isLocal) {
-      throw new CLIError(
-        'This command should only be run on a Development chain.',
-        { exit: ExitCodes.DevelopmentModeOnly }
-      )
+      throw new CLIError('This command should only be run on a Development chain.', {
+        exit: ExitCodes.DevelopmentModeOnly,
+      })
     }
 
     logger.info('Development mode is ON.')
@@ -105,11 +99,7 @@ export default abstract class ApiCommandBase extends Command {
    * @param password - password for the key file (optional).
    * @returns KeyringPair instance.
    */
-  getAccount(flags: {
-    dev?: boolean
-    keyfile?: string
-    password?: string
-  }): KeyringPair {
+  getAccount(flags: { dev?: boolean; keyfile?: string; password?: string }): KeyringPair {
     const keyfile = flags.keyfile ?? ''
     const password = flags.password
 

+ 1 - 2
storage-node-v2/src/commands/dev/init.ts

@@ -10,8 +10,7 @@ import ApiCommandBase from '../../command-base/ApiCommandBase'
  * Shell command: "dev:init"
  */
 export default class DevInit extends ApiCommandBase {
-  static description =
-    'Initialize development environment. Sets Alice as storage working group leader.'
+  static description = 'Initialize development environment. Sets Alice as storage working group leader.'
 
   async run(): Promise<void> {
     await this.ensureDevelopmentChain()

+ 5 - 1
storage-node-v2/src/commands/dev/multihash.ts

@@ -1,6 +1,6 @@
 import { Command, flags } from '@oclif/command'
 import { hashFile } from '../../services/helpers/hashing'
-import logger from '../../services/logger'
+import logger, { createStdConsoleLogger } from '../../services/logger'
 
 /**
  * CLI command:
@@ -31,5 +31,9 @@ export default class DevMultihash extends Command {
     const multi = await hashFile(flags.file)
 
     logger.info(`Hash: ${multi}`)
+
+    const stdConsoleLogger = createStdConsoleLogger()
+
+    stdConsoleLogger.info(multi)
   }
 }

+ 3 - 4
storage-node-v2/src/commands/dev/upload.ts

@@ -34,16 +34,15 @@ export default class DevUpload extends ApiCommandBase {
 
     await this.ensureDevelopmentChain()
 
-    const objectSize = flags.size ?? 0
+    const objectSize = flags.size
     const objectCid = flags.cid
 
     logger.info('Uploading data objects...')
 
     const api = await this.getApi()
 
-    // Must be number.
-    const dataFee =
-      (await api.query.storage.dataObjectPerMegabyteFee()) as unknown as BN
+    // Must be a number.
+    const dataFee = (await api.query.storage.dataObjectPerMegabyteFee()) as BN
 
     logger.info(`Current data fee: ${dataFee}`)
 

+ 1 - 2
storage-node-v2/src/commands/dev/verify-bag-id.ts

@@ -12,8 +12,7 @@ import logger from '../../services/logger'
  * Shell command: "dev:verify-bag-id"
  */
 export default class DevVerifyBagId extends ApiCommandBase {
-  static description =
-    'The command verifies bag id supported by the storage node. Requires chain connection.'
+  static description = 'The command verifies bag id supported by the storage node. Requires chain connection.'
 
   static flags = {
     bagId: flags.string({

+ 1 - 5
storage-node-v2/src/commands/leader/cancel-invite.ts

@@ -36,11 +36,7 @@ export default class LeaderCancelInvite extends ApiCommandBase {
     const account = this.getAccount(flags)
     const api = await this.getApi()
 
-    const success = await cancelStorageBucketOperatorInvite(
-      api,
-      account,
-      storageBucketId
-    )
+    const success = await cancelStorageBucketOperatorInvite(api, account, storageBucketId)
 
     this.exitAfterRuntimeCall(success)
   }

+ 7 - 3
storage-node-v2/src/commands/leader/create-bucket.ts

@@ -1,7 +1,7 @@
 import { createStorageBucket } from '../../services/runtime/extrinsics'
 import { flags } from '@oclif/command'
 import ApiCommandBase from '../../command-base/ApiCommandBase'
-import logger from '../../services/logger'
+import logger, { createStdConsoleLogger } from '../../services/logger'
 
 /**
  * CLI command:
@@ -36,7 +36,7 @@ export default class LeaderCreateBucket extends ApiCommandBase {
 
     const objectSize = flags.size ?? 0
     const objectNumber = flags.number ?? 0
-    const allowNewBags = flags.allow ?? false
+    const allowNewBags = flags.allow
     const invitedWorker = flags.invited
 
     logger.info('Creating storage bucket...')
@@ -47,7 +47,7 @@ export default class LeaderCreateBucket extends ApiCommandBase {
     const account = this.getAccount(flags)
     const api = await this.getApi()
 
-    const success = await createStorageBucket(
+    const [success, bucketId] = await createStorageBucket(
       api,
       account,
       invitedWorker,
@@ -55,7 +55,11 @@ export default class LeaderCreateBucket extends ApiCommandBase {
       objectSize,
       objectNumber
     )
+    if (success) {
+      const stdConsoleLogger = createStdConsoleLogger()
 
+      stdConsoleLogger.info(bucketId)
+    }
     this.exitAfterRuntimeCall(success)
   }
 }

+ 1 - 6
storage-node-v2/src/commands/leader/invite-operator.ts

@@ -42,12 +42,7 @@ export default class LeaderInviteOperator extends ApiCommandBase {
     const account = this.getAccount(flags)
     const api = await this.getApi()
 
-    const success = await inviteStorageBucketOperator(
-      api,
-      account,
-      storageBucketId,
-      operatorId
-    )
+    const success = await inviteStorageBucketOperator(api, account, storageBucketId, operatorId)
 
     this.exitAfterRuntimeCall(success)
   }

+ 1 - 5
storage-node-v2/src/commands/leader/remove-operator.ts

@@ -36,11 +36,7 @@ export default class LeaderRemoveOperator extends ApiCommandBase {
     const account = this.getAccount(flags)
     const api = await this.getApi()
 
-    const success = await removeStorageBucketOperator(
-      api,
-      account,
-      storageBucketId
-    )
+    const success = await removeStorageBucketOperator(api, account, storageBucketId)
 
     this.exitAfterRuntimeCall(success)
   }

+ 5 - 12
storage-node-v2/src/commands/leader/set-bucket-limits.ts

@@ -12,8 +12,7 @@ import logger from '../../services/logger'
  * Shell command: "leader:set-bucket-limits"
  */
 export default class LeaderSetBucketLimits extends ApiCommandBase {
-  static description =
-    'Set VoucherObjectsSizeLimit and VoucherObjectsNumberLimit for the storage bucket.'
+  static description = 'Set VoucherObjectsSizeLimit and VoucherObjectsNumberLimit for the storage bucket.'
 
   static flags = {
     bucketId: flags.integer({
@@ -43,18 +42,12 @@ export default class LeaderSetBucketLimits extends ApiCommandBase {
     }
 
     const account = this.getAccount(flags)
-    const bucket = flags.bucketId ?? 0
-    const objectsLimit = flags.objects ?? 0
-    const sizeLimit = flags.size ?? 0
+    const bucket = flags.bucketId
+    const objectsLimit = flags.objects
+    const sizeLimit = flags.size
 
     const api = await this.getApi()
-    const success = await setStorageBucketVoucherLimits(
-      api,
-      account,
-      bucket,
-      sizeLimit,
-      objectsLimit
-    )
+    const success = await setStorageBucketVoucherLimits(api, account, bucket, sizeLimit, objectsLimit)
 
     this.exitAfterRuntimeCall(success)
   }

+ 10 - 12
storage-node-v2/src/commands/leader/set-uploading-block.ts → storage-node-v2/src/commands/leader/set-global-uploading-status.ts

@@ -9,28 +9,26 @@ import logger from '../../services/logger'
  *
  * @remarks
  * Storage working group leader command. Requires storage WG leader priviliges.
- * Shell command: "leader:set-uploading-block"
+ * Shell command: "leader:set-global-uploading-status"
  */
-export default class LeaderSetUploadingBlock extends ApiCommandBase {
+export default class LeaderSetGlobalUploadingStatus extends ApiCommandBase {
   static description = `Set global uploading block. Requires storage working group leader permissions.`
 
   static flags = {
-    enable: flags.boolean({
-      char: 'e',
-      description: 'Enables global uploading block (default).',
-    }),
-    disable: flags.boolean({
-      char: 'd',
-      description: 'Disables global uploading block.',
+    set: flags.enum({
+      char: 's',
+      description: 'Sets global uploading block (on/off).',
+      options: ['on', 'off'],
+      required: true,
     }),
     ...ApiCommandBase.flags,
   }
 
   async run(): Promise<void> {
-    const { flags } = this.parse(LeaderSetUploadingBlock)
+    const { flags } = this.parse(LeaderSetGlobalUploadingStatus)
 
-    const disable = flags.disable
-    const newStatus = !disable
+    // Enable blocking?
+    const newStatus = flags.set === 'on'
 
     logger.info('Setting global uploading block...')
     if (flags.dev) {

+ 2 - 3
storage-node-v2/src/commands/leader/update-bag-limit.ts

@@ -12,8 +12,7 @@ import logger from '../../services/logger'
  * Shell command: "leader:update-bag-limit"
  */
 export default class LeaderUpdateBagLimit extends ApiCommandBase {
-  static description =
-    'Update StorageBucketsPerBagLimit variable in the Joystream node storage.'
+  static description = 'Update StorageBucketsPerBagLimit variable in the Joystream node storage.'
 
   static flags = {
     limit: flags.integer({
@@ -33,7 +32,7 @@ export default class LeaderUpdateBagLimit extends ApiCommandBase {
     }
 
     const account = this.getAccount(flags)
-    const limit = flags.limit ?? 0
+    const limit = flags.limit
 
     const api = await this.getApi()
     const success = await updateStorageBucketsPerBagLimit(api, account, limit)

+ 31 - 17
storage-node-v2/src/commands/leader/update-bag.ts

@@ -3,6 +3,23 @@ import { updateStorageBucketsForBag } from '../../services/runtime/extrinsics'
 import ApiCommandBase from '../../command-base/ApiCommandBase'
 import { parseBagId } from '../../services/helpers/bagTypes'
 import logger from '../../services/logger'
+import ExitCodes from '../../command-base/ExitCodes'
+import _ from 'lodash'
+
+// Custom 'integer array' oclif flag.
+const integerArrFlags = {
+  integerArr: flags.build({
+    parse: (value: string) => {
+      const arr: number[] = value.split(',').map((v) => {
+        if (!/^-?\d+$/.test(v)) {
+          throw new Error(`Expected comma-separated integers, but received: ${value}`)
+        }
+        return parseInt(v)
+      })
+      return arr
+    },
+  }),
+}
 
 /**
  * CLI command:
@@ -13,18 +30,18 @@ import logger from '../../services/logger'
  * Shell command: "leader:update-bag"
  */
 export default class LeaderUpdateBag extends ApiCommandBase {
-  static description =
-    'Add/remove a storage bucket from a bag (adds by default).'
+  static description = 'Add/remove a storage bucket from a bag (adds by default).'
 
   static flags = {
-    bucket: flags.integer({
-      char: 'b',
-      required: true,
-      description: 'Storage bucket ID',
+    add: integerArrFlags.integerArr({
+      char: 'a',
+      description: 'ID of a bucket to add to bag',
+      default: [],
     }),
-    remove: flags.boolean({
+    remove: integerArrFlags.integerArr({
       char: 'r',
-      description: 'Remove a bucket from the bag',
+      description: 'ID of a bucket to remove from bag',
+      default: [],
     }),
     bagId: flags.string({
       char: 'i',
@@ -49,24 +66,21 @@ export default class LeaderUpdateBag extends ApiCommandBase {
   async run(): Promise<void> {
     const { flags } = this.parse(LeaderUpdateBag)
 
-    const bucket = flags.bucket ?? 0
-
     logger.info('Updating the bag...')
     if (flags.dev) {
       await this.ensureDevelopmentChain()
     }
 
+    if (_.isEmpty(flags.add) && _.isEmpty(flags.remove)) {
+      logger.error('No bucket ID provided.')
+      this.exit(ExitCodes.InvalidParameters)
+    }
+
     const account = this.getAccount(flags)
     const api = await this.getApi()
     const bagId = parseBagId(api, flags.bagId)
 
-    const success = await updateStorageBucketsForBag(
-      api,
-      bagId,
-      account,
-      bucket,
-      flags.remove
-    )
+    const success = await updateStorageBucketsForBag(api, bagId, account, flags.add, flags.remove)
 
     this.exitAfterRuntimeCall(success)
   }

+ 19 - 10
storage-node-v2/src/commands/leader/update-blacklist.ts

@@ -1,7 +1,9 @@
 import { flags } from '@oclif/command'
 import { updateBlacklist } from '../../services/runtime/extrinsics'
 import ApiCommandBase from '../../command-base/ApiCommandBase'
+import ExitCodes from '../../command-base/ExitCodes'
 import logger from '../../services/logger'
+import _ from 'lodash'
 
 /**
  * CLI command:
@@ -13,18 +15,20 @@ import logger from '../../services/logger'
  * Shell command: "leader:update-blacklist"
  */
 export default class LeaderUpdateBlacklist extends ApiCommandBase {
-  static description =
-    'Add/remove a content ID from the blacklist (adds by default).'
+  static description = 'Add/remove a content ID from the blacklist (adds by default).'
 
   static flags = {
-    cid: flags.string({
-      char: 'c',
-      required: true,
-      description: 'Content ID',
+    add: flags.string({
+      char: 'a',
+      multiple: true,
+      description: 'Content ID to add',
+      default: [],
     }),
-    remove: flags.boolean({
+    remove: flags.string({
       char: 'r',
-      description: 'Remove a content ID from the blaclist',
+      description: 'Content ID to remove',
+      multiple: true,
+      default: [],
     }),
     ...ApiCommandBase.flags,
   }
@@ -32,15 +36,20 @@ export default class LeaderUpdateBlacklist extends ApiCommandBase {
   async run(): Promise<void> {
     const { flags } = this.parse(LeaderUpdateBlacklist)
 
-    logger.info('Updating blacklist...')
+    logger.info('Updating the blacklist...')
     if (flags.dev) {
       await this.ensureDevelopmentChain()
     }
 
+    if (_.isEmpty(flags.add) && _.isEmpty(flags.remove)) {
+      logger.error('No Content ID provided.')
+      this.exit(ExitCodes.InvalidParameters)
+    }
+
     const account = this.getAccount(flags)
     const api = await this.getApi()
 
-    const success = await updateBlacklist(api, account, flags.cid, flags.remove)
+    const success = await updateBlacklist(api, account, flags.add, flags.remove)
 
     this.exitAfterRuntimeCall(success)
   }

+ 10 - 9
storage-node-v2/src/commands/leader/update-bucket-status.ts

@@ -28,15 +28,21 @@ export default class LeaderUpdateStorageBucketStatus extends ApiCommandBase {
       char: 'd',
       description: 'Disables accepting new bags.',
     }),
+    set: flags.enum({
+      char: 's',
+      description: `Sets 'accepting new bags' parameter for the bucket (on/off).`,
+      options: ['on', 'off'],
+      required: true,
+    }),
     ...ApiCommandBase.flags,
   }
 
   async run(): Promise<void> {
     const { flags } = this.parse(LeaderUpdateStorageBucketStatus)
 
-    const bucket = flags.bucketId ?? 0
-    const disable = flags.disable
-    const newStatus = !disable
+    const bucket = flags.bucketId
+    // Accept new bags?
+    const newStatus = flags.set === 'on'
 
     logger.info('Updating the storage bucket status...')
     if (flags.dev) {
@@ -46,12 +52,7 @@ export default class LeaderUpdateStorageBucketStatus extends ApiCommandBase {
     const account = this.getAccount(flags)
 
     const api = await this.getApi()
-    const success = await updateStorageBucketStatus(
-      api,
-      account,
-      bucket,
-      newStatus
-    )
+    const success = await updateStorageBucketStatus(api, account, bucket, newStatus)
 
     this.exitAfterRuntimeCall(success)
   }

+ 16 - 22
storage-node-v2/src/commands/leader/update-dynamic-bag-policy.ts

@@ -14,8 +14,7 @@ import { parseDynamicBagType } from '../../services/helpers/bagTypes'
  * Shell command: "leader:update-dynamic-bag-policy"
  */
 export default class LeaderUpdateDynamicBagPolicy extends ApiCommandBase {
-  static description =
-    'Update number of storage buckets used in the dynamic bag creation policy.'
+  static description = 'Update number of storage buckets used in the dynamic bag creation policy.'
 
   static flags = {
     number: flags.integer({
@@ -23,13 +22,11 @@ export default class LeaderUpdateDynamicBagPolicy extends ApiCommandBase {
       required: true,
       description: 'New storage buckets number',
     }),
-    member: flags.boolean({
-      char: 'e',
-      description: 'Member dynamic bag type (default)',
-    }),
-    channel: flags.boolean({
-      char: 'c',
-      description: 'Channel dynamic bag type',
+    bagType: flags.enum({
+      char: 't',
+      description: 'Dynamic bag type (Channel, Member).',
+      options: ['Channel', 'Member'],
+      required: true,
     }),
     ...ApiCommandBase.flags,
   }
@@ -37,28 +34,25 @@ export default class LeaderUpdateDynamicBagPolicy extends ApiCommandBase {
   async run(): Promise<void> {
     const { flags } = this.parse(LeaderUpdateDynamicBagPolicy)
 
-    logger.info('Update "Storage buckets per bag" number limit....')
+    logger.info('Update dynamic bag creation policy....')
     if (flags.dev) {
       await this.ensureDevelopmentChain()
     }
 
     const account = this.getAccount(flags)
-    const newNumber = flags.number ?? 0
+    const newNumber = flags.number
 
-    let dynamicBagTypeString: 'Member' | 'Channel' = 'Member' // Default
-    if (flags.channel) {
-      dynamicBagTypeString = 'Channel'
-    }
+    // Verified by enum argument parser.
+    const dynamicBagTypeString = flags.bagType as 'Member' | 'Channel'
 
     const api = await this.getApi()
     const dynamicBagType = parseDynamicBagType(api, dynamicBagTypeString)
-    const success =
-      await updateNumberOfStorageBucketsInDynamicBagCreationPolicy(
-        api,
-        account,
-        dynamicBagType,
-        newNumber
-      )
+    const success = await updateNumberOfStorageBucketsInDynamicBagCreationPolicy(
+      api,
+      account,
+      dynamicBagType,
+      newNumber
+    )
 
     this.exitAfterRuntimeCall(success)
   }

+ 3 - 8
storage-node-v2/src/commands/leader/update-voucher-limits.ts

@@ -38,16 +38,11 @@ export default class LeaderUpdateVoucherLimits extends ApiCommandBase {
     }
 
     const account = this.getAccount(flags)
-    const objectsLimit = flags.objects ?? 0
-    const sizeLimit = flags.size ?? 0
+    const objectsLimit = flags.objects
+    const sizeLimit = flags.size
 
     const api = await this.getApi()
-    const success = await updateStorageBucketsVoucherMaxLimits(
-      api,
-      account,
-      sizeLimit,
-      objectsLimit
-    )
+    const success = await updateStorageBucketsVoucherMaxLimits(api, account, sizeLimit, objectsLimit)
 
     this.exitAfterRuntimeCall(success)
   }

+ 3 - 8
storage-node-v2/src/commands/operator/accept-invitation.ts

@@ -32,8 +32,8 @@ export default class OperatorAcceptInvitation extends ApiCommandBase {
   async run(): Promise<void> {
     const { flags } = this.parse(OperatorAcceptInvitation)
 
-    const worker = flags.workerId ?? 0
-    const bucket = flags.bucketId ?? 0
+    const worker = flags.workerId
+    const bucket = flags.bucketId
 
     logger.info('Accepting pending storage bucket invitation...')
     if (flags.dev) {
@@ -43,12 +43,7 @@ export default class OperatorAcceptInvitation extends ApiCommandBase {
     const account = this.getAccount(flags)
 
     const api = await this.getApi()
-    const success = await acceptStorageBucketInvitation(
-      api,
-      account,
-      worker,
-      bucket
-    )
+    const success = await acceptStorageBucketInvitation(api, account, worker, bucket)
 
     this.exitAfterRuntimeCall(success)
   }

+ 3 - 9
storage-node-v2/src/commands/operator/set-metadata.ts

@@ -36,8 +36,8 @@ export default class OperatorSetMetadata extends ApiCommandBase {
   async run(): Promise<void> {
     const { flags } = this.parse(OperatorSetMetadata)
 
-    const operator = flags.operatorId ?? 0
-    const bucket = flags.bucketId ?? 0
+    const operator = flags.operatorId
+    const bucket = flags.bucketId
     const metadata = flags.metadata ?? ''
 
     logger.info('Setting the storage operator metadata...')
@@ -48,13 +48,7 @@ export default class OperatorSetMetadata extends ApiCommandBase {
     const account = this.getAccount(flags)
 
     const api = await this.getApi()
-    const success = await setStorageOperatorMetadata(
-      api,
-      account,
-      operator,
-      bucket,
-      metadata
-    )
+    const success = await setStorageOperatorMetadata(api, account, operator, bucket, metadata)
 
     this.exitAfterRuntimeCall(success)
   }

+ 3 - 12
storage-node-v2/src/services/helpers/auth.ts

@@ -96,10 +96,7 @@ export function parseUploadToken(tokenString: string): UploadToken {
  * @param address - public key(account ID)
  * @returns The UploadToken instance.
  */
-export function verifyTokenSignature(
-  token: UploadToken | UploadTokenRequest,
-  address: string
-): boolean {
+export function verifyTokenSignature(token: UploadToken | UploadTokenRequest, address: string): boolean {
   const message = JSON.stringify(token.data)
   const { isValid } = signatureVerify(message, token.signature, address)
 
@@ -113,10 +110,7 @@ export function verifyTokenSignature(
  * @param account - KeyringPair instance
  * @returns object signature.
  */
-export function signTokenBody(
-  tokenBody: UploadTokenBody | UploadTokenRequestBody,
-  account: KeyringPair
-): string {
+export function signTokenBody(tokenBody: UploadTokenBody | UploadTokenRequestBody, account: KeyringPair): string {
   const message = JSON.stringify(tokenBody)
   const signature = u8aToHex(account.sign(message))
 
@@ -130,10 +124,7 @@ export function signTokenBody(
  * @param account - KeyringPair instance
  * @returns object signature.
  */
-export function createUploadToken(
-  tokenBody: UploadTokenBody,
-  account: KeyringPair
-): string {
+export function createUploadToken(tokenBody: UploadTokenBody, account: KeyringPair): string {
   const signature = signTokenBody(tokenBody, account)
 
   const token = {

+ 4 - 19
storage-node-v2/src/services/helpers/bagTypes.ts

@@ -1,10 +1,4 @@
-import {
-  BagId,
-  DynamicBagType,
-  DynamicBagTypeKey,
-  Static,
-  Dynamic,
-} from '@joystream/types/storage'
+import { BagId, DynamicBagType, DynamicBagTypeKey, Static, Dynamic } from '@joystream/types/storage'
 import { WorkingGroup } from '@joystream/types/common'
 import { ApiPromise } from '@polkadot/api'
 import ExitCodes from '../../command-base/ExitCodes'
@@ -20,10 +14,7 @@ import { CLIError } from '@oclif/errors'
  * @param bagType - dynamic bag type string
  * @returns The DynamicBagType instance.
  */
-export function parseDynamicBagType(
-  api: ApiPromise,
-  bagType: DynamicBagTypeKey
-): DynamicBagType {
+export function parseDynamicBagType(api: ApiPromise, bagType: DynamicBagTypeKey): DynamicBagType {
   return api.createType('DynamicBagType', bagType)
 }
 
@@ -107,10 +98,7 @@ class BagIdParser {
 
         for (const group of groups) {
           if (group.toLowerCase() === actualGroup) {
-            const workingGroup: WorkingGroup = this.api.createType(
-              'WorkingGroup',
-              group
-            )
+            const workingGroup: WorkingGroup = this.api.createType('WorkingGroup', group)
             const staticBagId: Static = this.api.createType('Static', {
               'WorkingGroup': workingGroup,
             })
@@ -148,10 +136,7 @@ class BagIdParser {
             const dynamic = {} as Record<DynamicBagTypeKey, number>
             dynamic[dynamicBagType as DynamicBagTypeKey] = parsedId
 
-            const dynamicBagId: Dynamic = this.api.createType(
-              'Dynamic',
-              dynamic
-            )
+            const dynamicBagId: Dynamic = this.api.createType('Dynamic', dynamic)
             const constructedBagId: BagId = this.api.createType('BagId', {
               'Dynamic': dynamicBagId,
             })

+ 44 - 11
storage-node-v2/src/services/logger.ts

@@ -1,9 +1,9 @@
 import winston from 'winston'
 import expressWinston from 'express-winston'
-import { Handler } from 'express'
+import { Handler, ErrorRequestHandler } from 'express'
 
 /**
- * Creates basic Winston logger.
+ * Creates basic Winston logger. Console output redirected to the stderr.
  *
  * @returns Winston logger
  *
@@ -36,12 +36,11 @@ function createDefaultLogger(): winston.Logger {
   const format = winston.format.combine(
     winston.format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss:ms' }),
     winston.format.colorize({ all: true }),
-    winston.format.printf(
-      (info) => `${info.timestamp} ${info.level}: ${info.message}`
-    )
+    winston.format.printf((info) => `${info.timestamp} ${info.level}: ${info.message}`)
   )
 
-  const transports = [new winston.transports.Console()]
+  // Redirect all logs to the stderr
+  const transports = [new winston.transports.Console({ stderrLevels: Object.keys(levels) })]
 
   return winston.createLogger({
     level: level(),
@@ -54,7 +53,6 @@ function createDefaultLogger(): winston.Logger {
 const Logger = createDefaultLogger()
 
 export default Logger
-
 /**
  * Creates Express-Winston logger handler.
  *
@@ -64,10 +62,7 @@ export default Logger
 export function httpLogger(): Handler {
   const opts: expressWinston.LoggerOptions = {
     transports: [new winston.transports.Console()],
-    format: winston.format.combine(
-      winston.format.colorize(),
-      winston.format.json()
-    ),
+    format: winston.format.combine(winston.format.json()),
     meta: true,
     msg: 'HTTP {{req.method}} {{req.url}}',
     expressFormat: true,
@@ -76,3 +71,41 @@ export function httpLogger(): Handler {
 
   return expressWinston.logger(opts)
 }
+
+/**
+ * Creates Express-Winston error logger.
+ *
+ * @returns  Express-Winston error logger
+ *
+ */
+export function errorLogger(): ErrorRequestHandler {
+  return expressWinston.errorLogger({
+    transports: [new winston.transports.Console()],
+    format: winston.format.combine(winston.format.json()),
+  })
+}
+
+/**
+ * Creates clean Console Winston logger for standard output.
+ *
+ * @returns Winston logger
+ *
+ */
+export function createStdConsoleLogger(): winston.Logger {
+  const levels = {
+    error: 0,
+    warn: 1,
+    info: 2,
+    http: 3,
+    debug: 4,
+  }
+  const format = winston.format.printf((info) => `${info.message}`)
+
+  const transports = [new winston.transports.Console()]
+
+  return winston.createLogger({
+    levels,
+    format,
+    transports,
+  })
+}

+ 2 - 7
storage-node-v2/src/services/runtime/accounts.ts

@@ -11,9 +11,7 @@ import ExitCodes from '../../command-base/ExitCodes'
  * @param jsonBackupFilePath - JSON-file path
  * @returns KeyPair instance.
  */
-export function getAccountFromJsonFile(
-  jsonBackupFilePath: string
-): KeyringPair {
+export function getAccountFromJsonFile(jsonBackupFilePath: string): KeyringPair {
   if (!fs.existsSync(jsonBackupFilePath)) {
     throw new CLIError('Input file does not exist!', {
       exit: ExitCodes.FileError,
@@ -29,10 +27,7 @@ export function getAccountFromJsonFile(
     const accountJson = fs.readFileSync(jsonBackupFilePath)
     accountJsonObj = JSON.parse(accountJson.toString())
   } catch (e) {
-    throw new CLIError(
-      'Provided backup file is not valid or cannot be accessed',
-      { exit: ExitCodes.FileError }
-    )
+    throw new CLIError('Provided backup file is not valid or cannot be accessed', { exit: ExitCodes.FileError })
   }
   if (typeof accountJsonObj !== 'object' || accountJsonObj === null) {
     throw new CLIError('Provided backup file is not valid', {

+ 65 - 52
storage-node-v2/src/services/runtime/api.ts

@@ -1,14 +1,11 @@
-import { ApiPromise, WsProvider } from '@polkadot/api'
+import { ApiPromise, WsProvider, SubmittableResult } from '@polkadot/api'
 import type { Index } from '@polkadot/types/interfaces/runtime'
-import { CodecArg, ISubmittableResult } from '@polkadot/types/types'
+import { ISubmittableResult, IEvent } from '@polkadot/types/types'
 import { types } from '@joystream/types/'
 import { TypeRegistry } from '@polkadot/types'
 import { KeyringPair } from '@polkadot/keyring/types'
-import { SubmittableExtrinsic } from '@polkadot/api/types'
-import {
-  DispatchError,
-  DispatchResult,
-} from '@polkadot/types/interfaces/system'
+import { SubmittableExtrinsic, AugmentedEvent } from '@polkadot/api/types'
+import { DispatchError, DispatchResult } from '@polkadot/types/interfaces/system'
 import { getNonce } from './transactionNonceKeeper'
 import logger from '../../services/logger'
 import ExitCodes from '../../command-base/ExitCodes'
@@ -71,12 +68,9 @@ function sendExtrinsic(
                 }
               }
               reject(
-                new ExtrinsicFailedError(
-                  `Extrinsic execution error: ${errorMsg}`,
-                  {
-                    exit: ExitCodes.ApiError,
-                  }
-                )
+                new ExtrinsicFailedError(`Extrinsic execution error: ${errorMsg}`, {
+                  exit: ExitCodes.ApiError,
+                })
               )
             } else if (event.method === 'ExtrinsicSuccess') {
               const sudid = result.findRecord('sudo', 'Sudid')
@@ -86,17 +80,11 @@ function sendExtrinsic(
                 if (dispatchResult.isOk) {
                   resolve(result)
                 } else {
-                  const errorMsg = formatDispatchError(
-                    api,
-                    dispatchResult.asErr
-                  )
+                  const errorMsg = formatDispatchError(api, dispatchResult.asErr)
                   reject(
-                    new ExtrinsicFailedError(
-                      `Sudo extrinsic execution error! ${errorMsg}`,
-                      {
-                        exit: ExitCodes.ApiError,
-                      }
-                    )
+                    new ExtrinsicFailedError(`Sudo extrinsic execution error! ${errorMsg}`, {
+                      exit: ExitCodes.ApiError,
+                    })
                   )
                 }
               } else {
@@ -115,14 +103,9 @@ function sendExtrinsic(
       .then((unsubFunc) => (unsubscribe = unsubFunc))
       .catch((e) =>
         reject(
-          new ExtrinsicFailedError(
-            `Cannot send the extrinsic: ${
-              e.message ? e.message : JSON.stringify(e)
-            }`,
-            {
-              exit: ExitCodes.ApiError,
-            }
-          )
+          new ExtrinsicFailedError(`Cannot send the extrinsic: ${e.message ? e.message : JSON.stringify(e)}`, {
+            exit: ExitCodes.ApiError,
+          })
         )
       )
   })
@@ -151,30 +134,43 @@ function formatDispatchError(api: ApiPromise, error: DispatchError): string {
  *
  * @param api - API promise
  * @param account - KeyPair instance
- * @param module - runtime module name
- * @param method - runtime extrinsic name
- * @param params - extrinsic parameter
+ * @param tx - prepared extrinsic with arguments
  * @param sudoCall - defines whether the transaction call should be wrapped in
- * the sudo call.
- * @returns void promise.
+ * the sudo call (false by default).
+ * @param eventParser - defines event parsing function (null by default) for
+ * getting any information from the successful extrinsic events.
+ * @returns void or event parsing result promise.
  */
-export async function sendAndFollowNamedTx(
+export async function sendAndFollowNamedTx<T>(
   api: ApiPromise,
   account: KeyringPair,
-  module: string,
-  method: string,
-  params: CodecArg[],
-  sudoCall = false
-): Promise<void> {
-  logger.debug(`Sending ${module}.${method} extrinsic...`)
-  let tx = api.tx[module][method](...params)
+  tx: SubmittableExtrinsic<'promise'>,
+  sudoCall = false,
+  eventParser: ((result: ISubmittableResult) => T) | null = null
+): Promise<T | void> {
+  const description = tx.toHuman() as {
+    method: {
+      method: string
+      section: string
+    }
+  }
+
+  logger.debug(`Sending ${description?.method?.section}.${description?.method?.method} extrinsic...`)
+
   if (sudoCall) {
     tx = api.tx.sudo.sudo(tx)
   }
   const nonce = await getNonce(api, account)
 
-  await sendExtrinsic(api, account, tx, nonce)
+  const result = await sendExtrinsic(api, account, tx, nonce)
+
+  let eventResult: T | void
+  if (eventParser) {
+    eventResult = eventParser(result)
+  }
   logger.debug(`Extrinsic successful!`)
+
+  return eventResult
 }
 
 /**
@@ -183,17 +179,34 @@ export async function sendAndFollowNamedTx(
  *
  * @param api - API promise
  * @param account - KeyPair instance
- * @param module - runtime module name
- * @param method - runtime extrinsic name
- * @param params - extrinsic parameter
+ * @param tx - prepared extrinsic with arguments
  * @returns void promise.
  */
 export async function sendAndFollowSudoNamedTx(
   api: ApiPromise,
   account: KeyringPair,
-  module: string,
-  method: string,
-  params: CodecArg[]
+  tx: SubmittableExtrinsic<'promise'>
 ): Promise<void> {
-  return sendAndFollowNamedTx(api, account, module, method, params, true)
+  return sendAndFollowNamedTx(api, account, tx, true)
+}
+
+/**
+ * Helper function for parsing the successful extrinsic result for event.
+ *
+ * @param result - extrinsic result
+ * @param section - pallet name
+ * @param eventName - event name
+ * @returns void promise.
+ */
+export function getEvent<
+  S extends keyof ApiPromise['events'] & string,
+  M extends keyof ApiPromise['events'][S] & string,
+  EventType = ApiPromise['events'][S][M] extends AugmentedEvent<'promise', infer T> ? IEvent<T> : never
+>(result: SubmittableResult, section: S, eventName: M): EventType {
+  const event = result.findRecord(section, eventName)?.event as EventType | undefined
+
+  if (!event) {
+    throw new Error(`Cannot find expected ${section}.${eventName} event in result: ${result.toHuman()}`)
+  }
+  return event as EventType
 }

+ 107 - 184
storage-node-v2/src/services/runtime/extrinsics.ts

@@ -1,10 +1,10 @@
-import { sendAndFollowSudoNamedTx, sendAndFollowNamedTx } from './api'
+import { sendAndFollowSudoNamedTx, sendAndFollowNamedTx, getEvent } from './api'
 import { getAlicePair } from './accounts'
 import { KeyringPair } from '@polkadot/keyring/types'
-import { CodecArg } from '@polkadot/types/types'
 import { ApiPromise } from '@polkadot/api'
 import { BagId, DynamicBagType } from '@joystream/types/storage'
 import logger from '../../services/logger'
+import BN from 'bn.js'
 
 /**
  * Creates storage bucket.
@@ -19,7 +19,7 @@ import logger from '../../services/logger'
  * @param allowedNewBags - bucket allows new bag assignments
  * @param sizeLimit - size limit in bytes for the new bucket (default 0)
  * @param objectsLimit - object number limit for the new bucket (default 0)
- * @returns promise with a success flag.
+ * @returns promise with a success flag and the bucket id (on success).
  */
 export async function createStorageBucket(
   api: ApiPromise,
@@ -28,18 +28,21 @@ export async function createStorageBucket(
   allowedNewBags = true,
   sizeLimit = 0,
   objectsLimit = 0
-): Promise<boolean> {
-  return await extrinsicWrapper(() => {
+): Promise<[boolean, number | void]> {
+  let bucketId: number | void = 0
+  const success = await extrinsicWrapper(async () => {
     const invitedWorkerValue = api.createType('Option<WorkerId>', invitedWorker)
 
-    return sendAndFollowNamedTx(
-      api,
-      account,
-      'storage',
-      'createStorageBucket',
-      [invitedWorkerValue, allowedNewBags, sizeLimit, objectsLimit]
-    )
+    const tx = api.tx.storage.createStorageBucket(invitedWorkerValue, allowedNewBags, sizeLimit, objectsLimit)
+    bucketId = await sendAndFollowNamedTx(api, account, tx, false, (result) => {
+      const event = getEvent(result, 'storage', 'StorageBucketCreated')
+      const bucketId = event?.data[0] as BN
+
+      return bucketId.toNumber()
+    })
   })
+
+  return [success, bucketId]
 }
 
 /**
@@ -60,15 +63,11 @@ export async function acceptStorageBucketInvitation(
   workerId: number,
   storageBucketId: number
 ): Promise<boolean> {
-  return await extrinsicWrapper(() =>
-    sendAndFollowNamedTx(
-      api,
-      account,
-      'storage',
-      'acceptStorageBucketInvitation',
-      [workerId, storageBucketId]
-    )
-  )
+  return await extrinsicWrapper(() => {
+    const tx = api.tx.storage.acceptStorageBucketInvitation(workerId, storageBucketId)
+
+    return sendAndFollowNamedTx(api, account, tx)
+  })
 }
 
 /**
@@ -80,35 +79,24 @@ export async function acceptStorageBucketInvitation(
  * @param api - runtime API promise
  * @param bagId - BagId instance
  * @param account - KeyringPair instance
- * @param bucketId - runtime storage bucket ID
- * @param removeBucket - defines whether to remove bucket. If set to false
- * the bucket will be added instead.
+ * @param add - runtime storage bucket IDs to add
+ * @param remove - runtime storage bucket IDs to remove
  * @returns promise with a success flag.
  */
 export async function updateStorageBucketsForBag(
   api: ApiPromise,
   bagId: BagId,
   account: KeyringPair,
-  bucketId: number,
-  removeBucket: boolean
+  add: number[],
+  remove: number[]
 ): Promise<boolean> {
   return await extrinsicWrapper(() => {
-    let addBuckets: CodecArg
-    let removeBuckets: CodecArg
+    const removeBuckets = api.createType('StorageBucketIdSet', remove)
+    const addBuckets = api.createType('StorageBucketIdSet', add)
 
-    if (removeBucket) {
-      removeBuckets = api.createType('StorageBucketIdSet', [bucketId])
-    } else {
-      addBuckets = api.createType('StorageBucketIdSet', [bucketId])
-    }
+    const tx = api.tx.storage.updateStorageBucketsForBag(bagId, addBuckets, removeBuckets)
 
-    return sendAndFollowNamedTx(
-      api,
-      account,
-      'storage',
-      'updateStorageBucketsForBag',
-      [bagId, addBuckets, removeBuckets]
-    )
+    return sendAndFollowNamedTx(api, account, tx)
   })
 }
 
@@ -145,13 +133,9 @@ export async function uploadDataObjects(
       expectedDataSizeFee: dataFee,
     })
 
-    return sendAndFollowSudoNamedTx(
-      api,
-      alice,
-      'storage',
-      'sudoUploadDataObjects',
-      [data]
-    )
+    const tx = api.tx.storage.sudoUploadDataObjects(data)
+
+    return sendAndFollowSudoNamedTx(api, alice, tx)
   })
 }
 
@@ -178,18 +162,11 @@ export async function acceptPendingDataObjects(
   dataObjects: number[]
 ): Promise<boolean> {
   return await extrinsicWrapper(() => {
-    const dataObjectSet: CodecArg = api.createType(
-      'DataObjectIdSet',
-      dataObjects
-    )
-
-    return sendAndFollowNamedTx(
-      api,
-      account,
-      'storage',
-      'acceptPendingDataObjects',
-      [workerId, storageBucketId, bagId, dataObjectSet]
-    )
+    const dataObjectSet = api.createType('DataObjectIdSet', dataObjects)
+
+    const tx = api.tx.storage.acceptPendingDataObjects(workerId, storageBucketId, bagId, dataObjectSet)
+
+    return sendAndFollowNamedTx(api, account, tx)
   }, true)
 }
 
@@ -209,15 +186,11 @@ export async function updateStorageBucketsPerBagLimit(
   account: KeyringPair,
   newLimit: number
 ): Promise<boolean> {
-  return extrinsicWrapper(() =>
-    sendAndFollowNamedTx(
-      api,
-      account,
-      'storage',
-      'updateStorageBucketsPerBagLimit',
-      [newLimit]
-    )
-  )
+  return await extrinsicWrapper(() => {
+    const tx = api.tx.storage.updateStorageBucketsPerBagLimit(newLimit)
+
+    return sendAndFollowNamedTx(api, account, tx)
+  })
 }
 
 /**
@@ -238,15 +211,11 @@ export async function updateStorageBucketsVoucherMaxLimits(
   newSizeLimit: number,
   newObjectLimit: number
 ): Promise<boolean> {
-  return extrinsicWrapper(() =>
-    sendAndFollowNamedTx(
-      api,
-      account,
-      'storage',
-      'updateStorageBucketsVoucherMaxLimits',
-      [newSizeLimit, newObjectLimit]
-    )
-  )
+  return await extrinsicWrapper(() => {
+    const tx = api.tx.storage.updateStorageBucketsVoucherMaxLimits(newSizeLimit, newObjectLimit)
+
+    return sendAndFollowNamedTx(api, account, tx)
+  })
 }
 
 /**
@@ -260,16 +229,12 @@ export async function updateStorageBucketsVoucherMaxLimits(
  * @param bucketId - runtime storage bucket ID
  * @returns promise with a success flag.
  */
-export async function deleteStorageBucket(
-  api: ApiPromise,
-  account: KeyringPair,
-  bucketId: number
-): Promise<boolean> {
-  return extrinsicWrapper(() =>
-    sendAndFollowNamedTx(api, account, 'storage', 'deleteStorageBucket', [
-      bucketId,
-    ])
-  )
+export async function deleteStorageBucket(api: ApiPromise, account: KeyringPair, bucketId: number): Promise<boolean> {
+  return await extrinsicWrapper(() => {
+    const tx = api.tx.storage.deleteStorageBucket(bucketId)
+
+    return sendAndFollowNamedTx(api, account, tx)
+  })
 }
 
 /**
@@ -290,15 +255,11 @@ export async function inviteStorageBucketOperator(
   bucketId: number,
   operatorId: number
 ): Promise<boolean> {
-  return extrinsicWrapper(() =>
-    sendAndFollowNamedTx(
-      api,
-      account,
-      'storage',
-      'inviteStorageBucketOperator',
-      [bucketId, operatorId]
-    )
-  )
+  return await extrinsicWrapper(() => {
+    const tx = api.tx.storage.inviteStorageBucketOperator(bucketId, operatorId)
+
+    return sendAndFollowNamedTx(api, account, tx)
+  })
 }
 
 /**
@@ -309,10 +270,7 @@ export async function inviteStorageBucketOperator(
  * after logging.
  * @returns promise with a success flag.
  */
-async function extrinsicWrapper(
-  extrinsic: () => Promise<void>,
-  throwErr = false
-): Promise<boolean> {
+async function extrinsicWrapper(extrinsic: () => Promise<void>, throwErr = false): Promise<boolean> {
   try {
     await extrinsic()
   } catch (err) {
@@ -343,15 +301,11 @@ export async function cancelStorageBucketOperatorInvite(
   account: KeyringPair,
   bucketId: number
 ): Promise<boolean> {
-  return extrinsicWrapper(() =>
-    sendAndFollowNamedTx(
-      api,
-      account,
-      'storage',
-      'cancelStorageBucketOperatorInvite',
-      [bucketId]
-    )
-  )
+  return await extrinsicWrapper(() => {
+    const tx = api.tx.storage.cancelStorageBucketOperatorInvite(bucketId)
+
+    return sendAndFollowNamedTx(api, account, tx)
+  })
 }
 
 /**
@@ -370,15 +324,11 @@ export async function removeStorageBucketOperator(
   account: KeyringPair,
   bucketId: number
 ): Promise<boolean> {
-  return extrinsicWrapper(() =>
-    sendAndFollowNamedTx(
-      api,
-      account,
-      'storage',
-      'removeStorageBucketOperator',
-      [bucketId]
-    )
-  )
+  return await extrinsicWrapper(() => {
+    const tx = api.tx.storage.removeStorageBucketOperator(bucketId)
+
+    return sendAndFollowNamedTx(api, account, tx)
+  })
 }
 
 /**
@@ -392,14 +342,12 @@ export async function removeStorageBucketOperator(
  * @param fee - new fee
  * @returns promise with a success flag.
  */
-export async function updateDataSizeFee(
-  api: ApiPromise,
-  account: KeyringPair,
-  fee: number
-): Promise<boolean> {
-  return extrinsicWrapper(() =>
-    sendAndFollowNamedTx(api, account, 'storage', 'updateDataSizeFee', [fee])
-  )
+export async function updateDataSizeFee(api: ApiPromise, account: KeyringPair, fee: number): Promise<boolean> {
+  return await extrinsicWrapper(() => {
+    const tx = api.tx.storage.updateDataSizeFee(fee)
+
+    return sendAndFollowNamedTx(api, account, tx)
+  })
 }
 
 /**
@@ -422,15 +370,11 @@ export async function setStorageOperatorMetadata(
   bucketId: number,
   metadata: string
 ): Promise<boolean> {
-  return extrinsicWrapper(() =>
-    sendAndFollowNamedTx(
-      api,
-      account,
-      'storage',
-      'setStorageOperatorMetadata',
-      [operatorId, bucketId, metadata]
-    )
-  )
+  return await extrinsicWrapper(() => {
+    const tx = api.tx.storage.setStorageOperatorMetadata(operatorId, bucketId, metadata)
+
+    return sendAndFollowNamedTx(api, account, tx)
+  })
 }
 
 /**
@@ -449,15 +393,11 @@ export async function updateUploadingBlockedStatus(
   account: KeyringPair,
   newStatus: boolean
 ): Promise<boolean> {
-  return extrinsicWrapper(() =>
-    sendAndFollowNamedTx(
-      api,
-      account,
-      'storage',
-      'updateUploadingBlockedStatus',
-      [newStatus]
-    )
-  )
+  return await extrinsicWrapper(() => {
+    const tx = api.tx.storage.updateUploadingBlockedStatus(newStatus)
+
+    return sendAndFollowNamedTx(api, account, tx)
+  })
 }
 
 /**
@@ -478,12 +418,11 @@ export async function updateStorageBucketStatus(
   storageBucketId: number,
   newStatus: boolean
 ): Promise<boolean> {
-  return await extrinsicWrapper(() =>
-    sendAndFollowNamedTx(api, account, 'storage', 'updateStorageBucketStatus', [
-      storageBucketId,
-      newStatus,
-    ])
-  )
+  return await extrinsicWrapper(() => {
+    const tx = api.tx.storage.updateStorageBucketStatus(storageBucketId, newStatus)
+
+    return sendAndFollowNamedTx(api, account, tx)
+  })
 }
 
 /**
@@ -507,15 +446,11 @@ export async function setStorageBucketVoucherLimits(
   newSizeLimit: number,
   newObjectLimit: number
 ): Promise<boolean> {
-  return await extrinsicWrapper(() =>
-    sendAndFollowNamedTx(
-      api,
-      account,
-      'storage',
-      'setStorageBucketVoucherLimits',
-      [storageBucketId, newSizeLimit, newObjectLimit]
-    )
-  )
+  return await extrinsicWrapper(() => {
+    const tx = api.tx.storage.setStorageBucketVoucherLimits(storageBucketId, newSizeLimit, newObjectLimit)
+
+    return sendAndFollowNamedTx(api, account, tx)
+  })
 }
 
 /**
@@ -536,15 +471,11 @@ export async function updateNumberOfStorageBucketsInDynamicBagCreationPolicy(
   dynamicBagType: DynamicBagType,
   newNumber: number
 ): Promise<boolean> {
-  return await extrinsicWrapper(() =>
-    sendAndFollowNamedTx(
-      api,
-      account,
-      'storage',
-      'updateNumberOfStorageBucketsInDynamicBagCreationPolicy',
-      [dynamicBagType, newNumber]
-    )
-  )
+  return await extrinsicWrapper(() => {
+    const tx = api.tx.storage.updateNumberOfStorageBucketsInDynamicBagCreationPolicy(dynamicBagType, newNumber)
+
+    return sendAndFollowNamedTx(api, account, tx)
+  })
 }
 
 /**
@@ -555,30 +486,22 @@ export async function updateNumberOfStorageBucketsInDynamicBagCreationPolicy(
  *
  * @param api - runtime API promise
  * @param account - KeyringPair instance
- * @param cid - content ID (multihash)
- * @param removeCid - defines whether the cid should be removed from the
- * blacklist, cid is added when 'false'
+ * @param add - content IDs (multihash) to add
+ * @param remove - content IDs (multihash) to add
  * @returns promise with a success flag.
  */
 export async function updateBlacklist(
   api: ApiPromise,
   account: KeyringPair,
-  cid: string,
-  removeCid: boolean
+  add: string[],
+  remove: string[]
 ): Promise<boolean> {
   return await extrinsicWrapper(() => {
-    let addHashes: CodecArg
-    let removeHashes: CodecArg
+    const removeHashes = api.createType('ContentIdSet', remove)
+    const addHashes = api.createType('ContentIdSet', add)
 
-    if (removeCid) {
-      removeHashes = api.createType('ContentIdSet', [cid])
-    } else {
-      addHashes = api.createType('ContentIdSet', [cid])
-    }
+    const tx = api.tx.storage.updateBlacklist(removeHashes, addHashes)
 
-    return sendAndFollowNamedTx(api, account, 'storage', 'updateBlacklist', [
-      removeHashes,
-      addHashes,
-    ])
+    return sendAndFollowNamedTx(api, account, tx)
   })
 }

+ 29 - 67
storage-node-v2/src/services/runtime/hireLead.ts

@@ -1,12 +1,7 @@
 import { sendAndFollowSudoNamedTx, sendAndFollowNamedTx } from './api'
 import { getAlicePair } from './accounts'
-import { CodecArg } from '@polkadot/types/types'
 import { Option, Vec } from '@polkadot/types'
-import {
-  WorkerId,
-  OpeningId,
-  ApplicationId,
-} from '@joystream/types/working-group'
+import { WorkerId, OpeningId, ApplicationId } from '@joystream/types/working-group'
 import { MemberId } from '@joystream/types/members'
 import { ApiPromise } from '@polkadot/api'
 import logger from '../../services/logger'
@@ -21,35 +16,24 @@ import logger from '../../services/logger'
  * @return void promise
  *
  */
-export async function hireStorageWorkingGroupLead(
-  api: ApiPromise
-): Promise<void> {
+export async function hireStorageWorkingGroupLead(api: ApiPromise): Promise<void> {
   const SudoKeyPair = getAlicePair()
   const LeadKeyPair = getAlicePair()
 
-  const nullValue = null as unknown as CodecArg
-
   // Create membership if not already created
-  const members = (await api.query.members.memberIdsByControllerAccountId(
-    LeadKeyPair.address
-  )) as Vec<MemberId>
+  const members = (await api.query.members.memberIdsByControllerAccountId(LeadKeyPair.address)) as Vec<MemberId>
 
   let memberId: MemberId | undefined = members.toArray()[0] as MemberId
 
   if (memberId === undefined) {
     logger.info('Preparing member account creation extrinsic...')
     memberId = (await api.query.members.nextMemberId()) as MemberId
-    await sendAndFollowNamedTx(api, LeadKeyPair, 'members', 'buyMembership', [
-      0,
-      'alice',
-      nullValue,
-      nullValue,
-    ])
+    const tx = api.tx.members.buyMembership(0, 'alice', null, null)
+    await sendAndFollowNamedTx(api, LeadKeyPair, tx)
   }
 
   // Create a new lead opening.
-  const currentLead =
-    (await api.query.storageWorkingGroup.currentLead()) as Option<WorkerId>
+  const currentLead = (await api.query.storageWorkingGroup.currentLead()) as Option<WorkerId>
   if (currentLead.isSome) {
     logger.info('Storage lead already exists, skipping...')
     return
@@ -57,64 +41,42 @@ export async function hireStorageWorkingGroupLead(
 
   logger.info(`Making member id: ${memberId} the content lead.`)
 
-  const newOpeningId =
-    (await api.query.storageWorkingGroup.nextOpeningId()) as OpeningId
-  const newApplicationId =
-    (await api.query.storageWorkingGroup.nextApplicationId()) as ApplicationId
+  const newOpeningId = (await api.query.storageWorkingGroup.nextOpeningId()) as OpeningId
+  const newApplicationId = (await api.query.storageWorkingGroup.nextApplicationId()) as ApplicationId
 
   // Create curator lead opening
   logger.info('Preparing Create Storage Lead Opening extrinsic...')
-  await sendAndFollowSudoNamedTx(
-    api,
-    SudoKeyPair,
-    'storageWorkingGroup',
-    'addOpening',
-    [
-      { CurrentBlock: nullValue }, // activate_at
-      { max_review_period_length: 9999 }, // OpeningPolicyCommitment
-      'storage opening', // human_readable_text
-      'Leader', // opening_type
-    ]
+  let tx = api.tx.storageWorkingGroup.addOpening(
+    { CurrentBlock: null }, // activate_at
+    { max_review_period_length: 9999 }, // OpeningPolicyCommitment
+    'storage opening', // human_readable_text
+    'Leader' // opening_type
   )
+  await sendAndFollowSudoNamedTx(api, SudoKeyPair, tx)
 
   // Apply to lead opening
   logger.info('Preparing Apply to Storage Lead Opening extrinsic...')
-  await sendAndFollowNamedTx(
-    api,
-    LeadKeyPair,
-    'storageWorkingGroup',
-    'applyOnOpening',
-    [
-      memberId, // member id
-      newOpeningId, // opening id
-      LeadKeyPair.address, // address
-      nullValue, // opt role stake
-      nullValue, // opt appl. stake
-      'bootstrap opening', // human_readable_text
-    ]
+  tx = api.tx.storageWorkingGroup.applyOnOpening(
+    memberId, // member id
+    newOpeningId, // opening id
+    LeadKeyPair.address, // address
+    null, // opt role stake
+    null, // opt appl. stake
+    'bootstrap opening' // human_readable_text
   )
+  await sendAndFollowNamedTx(api, LeadKeyPair, tx)
 
   // Begin review period
   logger.info('Preparing Begin Applicant Review extrinsic...')
-  await sendAndFollowSudoNamedTx(
-    api,
-    SudoKeyPair,
-    'storageWorkingGroup',
-    'beginApplicantReview',
-    [newOpeningId]
-  )
+  tx = api.tx.storageWorkingGroup.beginApplicantReview(newOpeningId)
+  await sendAndFollowSudoNamedTx(api, SudoKeyPair, tx)
 
   // Fill opening
   logger.info('Preparing Fill Opening extrinsic...')
-  await sendAndFollowSudoNamedTx(
-    api,
-    SudoKeyPair,
-    'storageWorkingGroup',
-    'fillOpening',
-    [
-      newOpeningId, // opening id
-      api.createType('ApplicationIdSet', [newApplicationId]), // successful applicants
-      nullValue, // reward policy
-    ]
+  tx = api.tx.storageWorkingGroup.fillOpening(
+    newOpeningId, // opening id
+    api.createType('ApplicationIdSet', [newApplicationId]), // successful applicants
+    null // reward policy
   )
+  await sendAndFollowSudoNamedTx(api, SudoKeyPair, tx)
 }

+ 1 - 4
storage-node-v2/src/services/runtime/transactionNonceKeeper.ts

@@ -16,10 +16,7 @@ const lock = new AwaitLock()
  * @returns promise with transaction nonce for a given account.
  *
  */
-export async function getNonce(
-  api: ApiPromise,
-  account: KeyringPair
-): Promise<Index> {
+export async function getNonce(api: ApiPromise, account: KeyringPair): Promise<Index> {
   await lock.acquireAsync()
   try {
     if (nonce === null) {

+ 21 - 37
storage-node-v2/src/services/webApi/app.ts

@@ -3,20 +3,12 @@ import path from 'path'
 import cors from 'cors'
 import { Express, NextFunction } from 'express-serve-static-core'
 import * as OpenApiValidator from 'express-openapi-validator'
-import {
-  HttpError,
-  OpenAPIV3,
-} from 'express-openapi-validator/dist/framework/types'
+import { HttpError, OpenAPIV3 } from 'express-openapi-validator/dist/framework/types'
 import { KeyringPair } from '@polkadot/keyring/types'
 import { ApiPromise } from '@polkadot/api'
-import {
-  RequestData,
-  verifyTokenSignature,
-  parseUploadToken,
-  UploadToken,
-} from '../helpers/auth'
+import { RequestData, verifyTokenSignature, parseUploadToken, UploadToken } from '../helpers/auth'
 import { checkRemoveNonce } from '../../services/helpers/tokenNonceKeeper'
-import { httpLogger } from '../../services/logger'
+import { httpLogger, errorLogger } from '../../services/logger'
 
 /**
  * Creates Express web application. Uses the OAS spec file for the API.
@@ -67,25 +59,28 @@ export async function createApp(
         },
       },
     })
-  )
+  ) // Required signature.
 
-  // Request validation error handler.
-  /* eslint-disable @typescript-eslint/no-unused-vars */ // Required signature.
-  app.use(
-    (
-      err: HttpError,
-      req: express.Request,
-      res: express.Response,
-      next: express.NextFunction
-    ) => {
+  app.use(errorLogger())
+
+  /* eslint-disable @typescript-eslint/no-unused-vars */
+  app.use((err: Error, req: express.Request, res: express.Response, next: express.NextFunction) => {
+    // Request validation error handler.
+    if (err instanceof HttpError) {
       res.status(err.status).json({
         type: 'request_validation',
         message: err.message,
         errors: err.errors,
       })
-      next(err)
+    } else {
+      res.status(500).json({
+        type: 'unknown_error',
+        message: err.message,
+      })
     }
-  )
+
+    next()
+  })
 
   return app
 }
@@ -104,17 +99,10 @@ type ValidateUploadFunction = (
  * @param account - KeyringPair instance
  * @returns ValidateUploadFunction.
  */
-function validateUpload(
-  api: ApiPromise,
-  account: KeyringPair
-): ValidateUploadFunction {
+function validateUpload(api: ApiPromise, account: KeyringPair): ValidateUploadFunction {
   // We don't use these variables yet.
   /* eslint-disable @typescript-eslint/no-unused-vars */
-  return (
-    req: express.Request,
-    scopes: string[],
-    schema: OpenAPIV3.SecuritySchemeObject
-  ) => {
+  return (req: express.Request, scopes: string[], schema: OpenAPIV3.SecuritySchemeObject) => {
     const tokenString = req.headers['x-api-key'] as string
     const token = parseUploadToken(tokenString)
 
@@ -137,11 +125,7 @@ function validateUpload(
  * @param token - token object
  * @param request - data from the request to validate token
  */
-function verifyUploadTokenData(
-  accountAddress: string,
-  token: UploadToken,
-  request: RequestData
-): void {
+function verifyUploadTokenData(accountAddress: string, token: UploadToken, request: RequestData): void {
   if (!verifyTokenSignature(token, accountAddress)) {
     throw new Error('Invalid signature')
   }

+ 15 - 55
storage-node-v2/src/services/webApi/controllers/publicApi.ts

@@ -7,16 +7,12 @@ import {
   verifyTokenSignature,
 } from '../../helpers/auth'
 import { hashFile } from '../../../services/helpers/hashing'
-import {
-  createNonce,
-  getTokenExpirationTime,
-} from '../../../services/helpers/tokenNonceKeeper'
+import { createNonce, getTokenExpirationTime } from '../../../services/helpers/tokenNonceKeeper'
 import { getFileInfo } from '../../../services/helpers/fileInfo'
 import { parseBagId } from '../../helpers/bagTypes'
 import logger from '../../../services/logger'
 import { KeyringPair } from '@polkadot/keyring/types'
 import { ApiPromise } from '@polkadot/api'
-import { Membership } from '@joystream/types/members'
 import * as express from 'express'
 import fs from 'fs'
 import path from 'path'
@@ -26,10 +22,7 @@ const fsPromises = fs.promises
 /**
  * A public endpoint: serves files by CID.
  */
-export async function getFile(
-  req: express.Request,
-  res: express.Response
-): Promise<void> {
+export async function getFile(req: express.Request, res: express.Response): Promise<void> {
   try {
     const cid = getCid(req)
     const uploadsDir = getUploadsDir(res)
@@ -60,10 +53,7 @@ export async function getFile(
 /**
  * A public endpoint: sends file headers by CID.
  */
-export async function getFileHeaders(
-  req: express.Request,
-  res: express.Response
-): Promise<void> {
+export async function getFileHeaders(req: express.Request, res: express.Response): Promise<void> {
   try {
     const cid = getCid(req)
     const uploadsDir = getUploadsDir(res)
@@ -88,10 +78,7 @@ export async function getFileHeaders(
 /**
  * A public endpoint: receives file.
  */
-export async function uploadFile(
-  req: express.Request,
-  res: express.Response
-): Promise<void> {
+export async function uploadFile(req: express.Request, res: express.Response): Promise<void> {
   const uploadRequest: RequestData = req.body
 
   // saved filename to delete on verification or extrinsic errors
@@ -114,14 +101,9 @@ export async function uploadFile(
 
     const api = getApi(res)
     const bagId = parseBagId(api, uploadRequest.bagId)
-    await acceptPendingDataObjects(
-      api,
-      bagId,
-      getAccount(res),
-      getWorkerId(res),
-      uploadRequest.storageBucketId,
-      [uploadRequest.dataObjectId]
-    )
+    await acceptPendingDataObjects(api, bagId, getAccount(res), getWorkerId(res), uploadRequest.storageBucketId, [
+      uploadRequest.dataObjectId,
+    ])
     res.status(201).json({
       id: hash,
     })
@@ -138,10 +120,7 @@ export async function uploadFile(
 /**
  * A public endpoint: creates auth token for file uploads.
  */
-export async function authTokenForUploading(
-  req: express.Request,
-  res: express.Response
-): Promise<void> {
+export async function authTokenForUploading(req: express.Request, res: express.Response): Promise<void> {
   try {
     const account = getAccount(res)
     const tokenRequest = getTokenRequest(req)
@@ -287,23 +266,15 @@ function getTokenRequest(req: express.Request): UploadTokenRequest {
  * @param tokenRequest - UploadTokenRequest instance
  * @returns void promise.
  */
-async function validateTokenRequest(
-  api: ApiPromise,
-  tokenRequest: UploadTokenRequest
-): Promise<void> {
+async function validateTokenRequest(api: ApiPromise, tokenRequest: UploadTokenRequest): Promise<void> {
   const result = verifyTokenSignature(tokenRequest, tokenRequest.data.accountId)
 
   if (!result) {
     throw new Error('Invalid upload token request signature.')
   }
 
-  const membership = (await api.query.members.membershipById(
-    tokenRequest.data.memberId
-  )) as Membership
-
-  if (
-    membership.controller_account.toString() !== tokenRequest.data.accountId
-  ) {
+  const membership = await api.query.members.membershipById(tokenRequest.data.memberId)
+  if (membership.controller_account.toString() !== tokenRequest.data.accountId) {
     throw new Error(`Provided controller account and member id don't match.`)
   }
 }
@@ -329,17 +300,12 @@ function verifyFileSize(fileSize: number) {
  * @param error - external error
  * @returns void promise.
  */
-async function cleanupFileOnError(
-  cleanupFileName: string,
-  error: string
-): Promise<void> {
+async function cleanupFileOnError(cleanupFileName: string, error: string): Promise<void> {
   if (cleanupFileName) {
     try {
       await fsPromises.unlink(cleanupFileName)
     } catch (err) {
-      logger.error(
-        `Cannot delete the file (${cleanupFileName}) on error: ${error}. IO error: ${err}`
-      )
+      logger.error(`Cannot delete the file (${cleanupFileName}) on error: ${error}. IO error: ${err}`)
     }
   }
 }
@@ -356,9 +322,7 @@ async function verifyFileMimeType(filePath: string): Promise<void> {
   const allowedMimeTypes = ['image/', 'video/', 'audio/']
 
   const fileInfo = await getFileInfo(filePath)
-  const correctMimeType = allowedMimeTypes.some((allowedType) =>
-    fileInfo.mimeType.startsWith(allowedType)
-  )
+  const correctMimeType = allowedMimeTypes.some((allowedType) => fileInfo.mimeType.startsWith(allowedType))
 
   if (!correctMimeType) {
     throw new Error(`Incorrect mime type detected: ${fileInfo.mimeType}`)
@@ -373,11 +337,7 @@ async function verifyFileMimeType(filePath: string): Promise<void> {
  * @param errorType - defines request type
  * @returns void promise.
  */
-function sendResponseWithError(
-  res: express.Response,
-  err: Error,
-  errorType: string
-): void {
+function sendResponseWithError(res: express.Response, err: Error, errorType: string): void {
   const errorString = err.toString()
   // Special case - file not found.
   if (isNofileError(err)) {

+ 0 - 70
storage-node-v2/test/commands/leader/create-bucket.test.ts

@@ -1,70 +0,0 @@
-import { assert } from 'chai'
-import * as child from 'child_process';
-
-class CommandBuilder {
-  private executable: string = 'yarn storage-node'
-  private command: string
-  private arguments: string
-
-  constructor (command: string = '', args: string = ''){
-    this.command = command
-    this.arguments = args
-  }
-
-  args(args: string): CommandBuilder {
-    this.arguments = args
-    return this
-  }
-
-  cmd(cmd: string): CommandBuilder {
-    this.command = cmd
-    return this
-  }
-
-  final(): string {
-    return [this.executable, this.command, this.arguments].join(' ')
-  }
-
-}
-
-describe('leader:create-bucket', () => {
-  before(function(done) {
-    this.timeout(60000)
-    const cmd =  new CommandBuilder().cmd('dev:init').final()
-    console.log('dev:init started.')
-    child.exec(cmd,(error: any, stdout: string, stderr: string) => {
-      console.log("Initialized.")
-      done()
-    });
-  });
-
-  const cmd =  new CommandBuilder().cmd('leader:create-bucket').args('').final()
-  describe(cmd, () => {
-    it("fails without keyfile", function(done) {
-      child.exec(cmd, {timeout: 5000}, (error: any, stdout: string, stderr: string) => {
-          assert(stderr.includes('Key'), 'Should contain an error.')
-
-          done()
-      });
-    });
-  })  
-  
-  const cmd2 =  new CommandBuilder().cmd('leader:create-bucket').args('--dev').final()
-  describe(cmd2, () => {
-
-    it("fails without keyfile", function(done) {
-      this.timeout(60000)
-      child.exec(cmd2, {timeout: 9000}, (error: any, stdout: string, stderr: string) => {
-          assert(stdout.includes('Extrinsic successful!'), 'Should be successful extrinsic')
-
-          done()
-      })
-    })
-  })
-})
-
-
-
-
-
-

+ 0 - 5
storage-node-v2/test/mocha.opts

@@ -1,5 +0,0 @@
---require ts-node/register
---watch-extensions ts
---recursive
---reporter spec
---timeout 5000

+ 0 - 9
storage-node-v2/test/tsconfig.json

@@ -1,9 +0,0 @@
-{
-  "extends": "../tsconfig",
-  "compilerOptions": {
-    "noEmit": true
-  },
-  "references": [
-    {"path": ".."}
-  ]
-}

+ 1 - 0
storage-node-v2/tsconfig.json

@@ -11,6 +11,7 @@
     "baseUrl": ".",
     "paths": {
       "@polkadot/types/augment": ["../types/augment-codec/augment-types.ts"],
+      "@polkadot/api/augment": ["../types/augment-codec/augment-api.ts"],
     },
     "esModuleInterop": true,
     "types" : [ "node", "mocha" ],

+ 2 - 2
types/augment-codec/augment-api-events.ts

@@ -2,7 +2,7 @@
 /* eslint-disable */
 
 import type { BTreeMap, BTreeSet, Bytes, Option, Vec, bool, u16, u32, u64 } from '@polkadot/types';
-import type { Actor, ApplicationId, ApplicationIdToWorkerIdMap, BagId, CategoryId, ChannelId, ClassId, ContentId, CuratorApplicationId, CuratorApplicationIdToCuratorIdMap, CuratorGroupId, CuratorId, CuratorOpeningId, DataObjectId, DistributionBucketFamilyId, DistributionBucketId, DynamicBagDeletionPrizeRecord, DynamicBagId, DynamicBagType, EntityController, EntityCreationVoucher, EntityId, FailedAt, LeadId, MemberId, MintBalanceOf, MintId, Nonce, OpeningId, PostId, PropertyId, ProposalId, ProposalStatus, RationaleText, SchemaId, SideEffect, SideEffects, Status, StorageBucketId, ThreadId, UploadParameters, VecMaxLength, VoteKind, Voucher, WorkerId } from './all';
+import type { Actor, ApplicationId, ApplicationIdToWorkerIdMap, BagId, CategoryId, ChannelId, Cid, ClassId, CuratorApplicationId, CuratorApplicationIdToCuratorIdMap, CuratorGroupId, CuratorId, CuratorOpeningId, DataObjectId, DistributionBucketFamilyId, DistributionBucketId, DynamicBagDeletionPrizeRecord, DynamicBagId, DynamicBagType, EntityController, EntityCreationVoucher, EntityId, FailedAt, LeadId, MemberId, MintBalanceOf, MintId, Nonce, OpeningId, PostId, PropertyId, ProposalId, ProposalStatus, RationaleText, SchemaId, SideEffect, SideEffects, Status, StorageBucketId, ThreadId, UploadParameters, VecMaxLength, VoteKind, Voucher, WorkerId } from './all';
 import type { BalanceStatus } from '@polkadot/types/interfaces/balances';
 import type { AuthorityId } from '@polkadot/types/interfaces/consensus';
 import type { AuthorityList } from '@polkadot/types/interfaces/grandpa';
@@ -814,7 +814,7 @@ declare module '@polkadot/api/types/events' {
        * - hashes to remove from the blacklist
        * - hashes to add to the blacklist
        **/
-      UpdateBlacklist: AugmentedEvent<ApiType, [BTreeSet<ContentId>, BTreeSet<ContentId>]>;
+      UpdateBlacklist: AugmentedEvent<ApiType, [BTreeSet<Cid>, BTreeSet<Cid>]>;
       /**
        * Emits on changing the size-based pricing of new objects uploaded.
        * Params

+ 2 - 2
types/augment-codec/augment-api-query.ts

@@ -3,7 +3,7 @@
 
 import type { Bytes, Option, Vec, bool, u32, u64 } from '@polkadot/types';
 import type { AnyNumber, ITuple, Observable } from '@polkadot/types/types';
-import type { Application, ApplicationId, ApplicationOf, Bag, BagId, Category, CategoryId, Channel, ChannelId, Class, ClassId, ClassOf, ClassPermissionsType, ContentId, Credential, Curator, CuratorApplication, CuratorApplicationId, CuratorGroup, CuratorGroupId, CuratorId, CuratorOpening, CuratorOpeningId, DataObject, DataObjectId, DiscussionPost, DiscussionThread, DistributionBucketFamily, DistributionBucketFamilyId, DistributionBucketId, DynamicBagCreationPolicy, DynamicBagType, ElectionStage, ElectionStake, Entity, EntityController, EntityCreationVoucher, EntityId, EntityOf, HiringApplicationId, InputValidationLengthConstraint, Lead, LeadId, MemberId, Membership, MemoText, Mint, MintId, Opening, OpeningId, OpeningOf, PaidMembershipTerms, PaidTermId, Post, PostId, Principal, PrincipalId, PropertyId, ProposalDetailsOf, ProposalId, ProposalOf, Recipient, RecipientId, RewardRelationship, RewardRelationshipId, SealedVote, Seats, Stake, StakeId, StorageBucket, StorageBucketId, Thread, ThreadCounter, ThreadId, TransferableStake, VoteKind, WorkerId, WorkerOf, WorkingGroupUnstaker } from './all';
+import type { Application, ApplicationId, ApplicationOf, Bag, BagId, Category, CategoryId, Channel, ChannelId, Cid, Class, ClassId, ClassOf, ClassPermissionsType, Credential, Curator, CuratorApplication, CuratorApplicationId, CuratorGroup, CuratorGroupId, CuratorId, CuratorOpening, CuratorOpeningId, DataObject, DataObjectId, DiscussionPost, DiscussionThread, DistributionBucketFamily, DistributionBucketFamilyId, DistributionBucketId, DynamicBagCreationPolicy, DynamicBagType, ElectionStage, ElectionStake, Entity, EntityController, EntityCreationVoucher, EntityId, EntityOf, HiringApplicationId, InputValidationLengthConstraint, Lead, LeadId, MemberId, Membership, MemoText, Mint, MintId, Opening, OpeningId, OpeningOf, PaidMembershipTerms, PaidTermId, Post, PostId, Principal, PrincipalId, PropertyId, ProposalDetailsOf, ProposalId, ProposalOf, Recipient, RecipientId, RewardRelationship, RewardRelationshipId, SealedVote, Seats, Stake, StakeId, StorageBucket, StorageBucketId, Thread, ThreadCounter, ThreadId, TransferableStake, VoteKind, WorkerId, WorkerOf, WorkingGroupUnstaker } from './all';
 import type { UncleEntryItem } from '@polkadot/types/interfaces/authorship';
 import type { BabeAuthorityWeight, MaybeRandomness, NextConfigDescriptor, Randomness } from '@polkadot/types/interfaces/babe';
 import type { AccountData, BalanceLock } from '@polkadot/types/interfaces/balances';
@@ -1019,7 +1019,7 @@ declare module '@polkadot/api/types/storage' {
       /**
        * Blacklisted data object hashes.
        **/
-      blacklist: AugmentedQuery<ApiType, (arg: ContentId | string | Uint8Array) => Observable<ITuple<[]>>, [ContentId]>;
+      blacklist: AugmentedQuery<ApiType, (arg: Cid | string | Uint8Array) => Observable<ITuple<[]>>, [Cid]>;
       /**
        * Blacklist collection counter.
        **/

+ 2 - 2
types/augment-codec/augment-api-tx.ts

@@ -3,7 +3,7 @@
 
 import type { BTreeMap, BTreeSet, Bytes, Compact, Option, Vec, bool, u16, u32, u64 } from '@polkadot/types';
 import type { AnyNumber, ITuple } from '@polkadot/types/types';
-import type { ActivateOpeningAt, Actor, AddOpeningParameters, ApplicationId, ApplicationIdSet, BagId, BalanceOfMint, CategoryId, ChannelContentType, ChannelCurationStatus, ChannelId, ChannelPublicationStatus, ClassId, ClassPermissions, ClassPermissionsType, ClassPropertyValue, ContentId, Credential, CredentialSet, CurationActor, CuratorApplicationId, CuratorApplicationIdSet, CuratorGroupId, CuratorId, CuratorOpeningId, DataObjectId, DistributionBucketFamilyId, DistributionBucketId, DynamicBagDeletionPrize, DynamicBagId, DynamicBagType, ElectionParameters, EntityController, EntityId, EntityPermissions, FillOpeningParameters, InputPropertyValue, InputValue, MemberId, MemoText, Nonce, OpeningId, OpeningPolicyCommitment, OpeningType, Operation, OperationType, OptionalText, PaidTermId, PostId, Property, PropertyId, ProposalId, ReferenceConstraint, RewardPolicy, SchemaId, StorageBucketId, TerminateRoleParameters, ThreadId, UploadParameters, VecMaxLength, VoteKind, WorkerId, WorkingGroup } from './all';
+import type { ActivateOpeningAt, Actor, AddOpeningParameters, ApplicationId, ApplicationIdSet, BagId, BalanceOfMint, CategoryId, ChannelContentType, ChannelCurationStatus, ChannelId, ChannelPublicationStatus, Cid, ClassId, ClassPermissions, ClassPermissionsType, ClassPropertyValue, Credential, CredentialSet, CurationActor, CuratorApplicationId, CuratorApplicationIdSet, CuratorGroupId, CuratorId, CuratorOpeningId, DataObjectId, DistributionBucketFamilyId, DistributionBucketId, DynamicBagDeletionPrize, DynamicBagId, DynamicBagType, ElectionParameters, EntityController, EntityId, EntityPermissions, FillOpeningParameters, InputPropertyValue, InputValue, MemberId, MemoText, Nonce, OpeningId, OpeningPolicyCommitment, OpeningType, Operation, OperationType, OptionalText, PaidTermId, PostId, Property, PropertyId, ProposalId, ReferenceConstraint, RewardPolicy, SchemaId, StorageBucketId, TerminateRoleParameters, ThreadId, UploadParameters, VecMaxLength, VoteKind, WorkerId, WorkingGroup } from './all';
 import type { BabeEquivocationProof } from '@polkadot/types/interfaces/babe';
 import type { Extrinsic, Signature } from '@polkadot/types/interfaces/extrinsics';
 import type { GrandpaEquivocationProof, KeyOwnerProof } from '@polkadot/types/interfaces/grandpa';
@@ -1325,7 +1325,7 @@ declare module '@polkadot/api/types/submittable' {
       /**
        * Add and remove hashes to the current blacklist.
        **/
-      updateBlacklist: AugmentedSubmittable<(removeHashes: BTreeSet<ContentId>, addHashes: BTreeSet<ContentId>) => SubmittableExtrinsic<ApiType>, [BTreeSet<ContentId>, BTreeSet<ContentId>]>;
+      updateBlacklist: AugmentedSubmittable<(removeHashes: BTreeSet<Cid>, addHashes: BTreeSet<Cid>) => SubmittableExtrinsic<ApiType>, [BTreeSet<Cid>, BTreeSet<Cid>]>;
       /**
        * Updates size-based pricing of new objects uploaded.
        **/

+ 6 - 6
types/augment/all/defs.json

@@ -1017,7 +1017,7 @@
         "operator_status": "StorageBucketOperatorStatus",
         "accepting_new_bags": "bool",
         "voucher": "Voucher",
-        "metadata": "Text"
+        "metadata": "Bytes"
     },
     "StaticBagId": {
         "_enum": {
@@ -1045,7 +1045,7 @@
     },
     "DataObjectCreationParameters": {
         "size": "u64",
-        "ipfsContentId": "Text"
+        "ipfsContentId": "Bytes"
     },
     "BagIdType": {
         "_enum": {
@@ -1054,7 +1054,7 @@
         }
     },
     "UploadParameters": {
-        "authenticationKey": "Text",
+        "authenticationKey": "Bytes",
         "bagId": "BagId",
         "objectCreationList": "Vec<DataObjectCreationParameters>",
         "deletionPrizeSourceAccountId": "GenericAccountId",
@@ -1063,12 +1063,12 @@
     "StorageBucketIdSet": "BTreeSet<StorageBucketId>",
     "DataObjectIdSet": "BTreeSet<DataObjectId>",
     "ContentIdSet": "BTreeSet<Cid>",
-    "Cid": "Text",
+    "Cid": "Bytes",
     "StorageBucketOperatorStatus": {
         "_enum": {
             "Missing": "Null",
-            "InvitedStorageWorker": "u64",
-            "StorageWorker": "u64"
+            "InvitedStorageWorker": "WorkerId",
+            "StorageWorker": "WorkerId"
         }
     },
     "StorageDataObject": {

+ 6 - 6
types/augment/all/types.ts

@@ -255,7 +255,7 @@ export interface ChildPositionInParentCategory extends Struct {
 }
 
 /** @name Cid */
-export interface Cid extends Text {}
+export interface Cid extends Bytes {}
 
 /** @name Class */
 export interface Class extends Struct {
@@ -420,7 +420,7 @@ export interface DataObject extends Struct {
 
 /** @name DataObjectCreationParameters */
 export interface DataObjectCreationParameters extends Struct {
-  readonly ipfsContentId: Text;
+  readonly ipfsContentId: Bytes;
 }
 
 /** @name DataObjectId */
@@ -1313,7 +1313,7 @@ export interface StorageBucket extends Struct {
   readonly operator_status: StorageBucketOperatorStatus;
   readonly accepting_new_bags: bool;
   readonly voucher: Voucher;
-  readonly metadata: Text;
+  readonly metadata: Bytes;
 }
 
 /** @name StorageBucketId */
@@ -1326,9 +1326,9 @@ export interface StorageBucketIdSet extends BTreeSet<StorageBucketId> {}
 export interface StorageBucketOperatorStatus extends Enum {
   readonly isMissing: boolean;
   readonly isInvitedStorageWorker: boolean;
-  readonly asInvitedStorageWorker: u64;
+  readonly asInvitedStorageWorker: WorkerId;
   readonly isStorageWorker: boolean;
-  readonly asStorageWorker: u64;
+  readonly asStorageWorker: WorkerId;
 }
 
 /** @name StorageBucketsPerBagValueConstraint */
@@ -1441,7 +1441,7 @@ export interface UpdatePropertyValuesOperation extends Struct {
 
 /** @name UploadParameters */
 export interface UploadParameters extends Struct {
-  readonly authenticationKey: Text;
+  readonly authenticationKey: Bytes;
   readonly bagId: BagId;
   readonly objectCreationList: Vec<DataObjectCreationParameters>;
   readonly deletionPrizeSourceAccountId: GenericAccountId;

+ 2 - 2
types/augment/augment-api-events.ts

@@ -2,7 +2,7 @@
 /* eslint-disable */
 
 import type { BTreeMap, BTreeSet, Bytes, Option, Vec, bool, u16, u32, u64 } from '@polkadot/types';
-import type { Actor, ApplicationId, ApplicationIdToWorkerIdMap, BagId, CategoryId, ChannelId, ClassId, ContentId, CuratorApplicationId, CuratorApplicationIdToCuratorIdMap, CuratorGroupId, CuratorId, CuratorOpeningId, DataObjectId, DistributionBucketFamilyId, DistributionBucketId, DynamicBagDeletionPrizeRecord, DynamicBagId, DynamicBagType, EntityController, EntityCreationVoucher, EntityId, FailedAt, LeadId, MemberId, MintBalanceOf, MintId, Nonce, OpeningId, PostId, PropertyId, ProposalId, ProposalStatus, RationaleText, SchemaId, SideEffect, SideEffects, Status, StorageBucketId, ThreadId, UploadParameters, VecMaxLength, VoteKind, Voucher, WorkerId } from './all';
+import type { Actor, ApplicationId, ApplicationIdToWorkerIdMap, BagId, CategoryId, ChannelId, Cid, ClassId, CuratorApplicationId, CuratorApplicationIdToCuratorIdMap, CuratorGroupId, CuratorId, CuratorOpeningId, DataObjectId, DistributionBucketFamilyId, DistributionBucketId, DynamicBagDeletionPrizeRecord, DynamicBagId, DynamicBagType, EntityController, EntityCreationVoucher, EntityId, FailedAt, LeadId, MemberId, MintBalanceOf, MintId, Nonce, OpeningId, PostId, PropertyId, ProposalId, ProposalStatus, RationaleText, SchemaId, SideEffect, SideEffects, Status, StorageBucketId, ThreadId, UploadParameters, VecMaxLength, VoteKind, Voucher, WorkerId } from './all';
 import type { BalanceStatus } from '@polkadot/types/interfaces/balances';
 import type { AuthorityId } from '@polkadot/types/interfaces/consensus';
 import type { AuthorityList } from '@polkadot/types/interfaces/grandpa';
@@ -814,7 +814,7 @@ declare module '@polkadot/api/types/events' {
        * - hashes to remove from the blacklist
        * - hashes to add to the blacklist
        **/
-      UpdateBlacklist: AugmentedEvent<ApiType, [BTreeSet<ContentId>, BTreeSet<ContentId>]>;
+      UpdateBlacklist: AugmentedEvent<ApiType, [BTreeSet<Cid>, BTreeSet<Cid>]>;
       /**
        * Emits on changing the size-based pricing of new objects uploaded.
        * Params

+ 2 - 2
types/augment/augment-api-query.ts

@@ -3,7 +3,7 @@
 
 import type { Bytes, Option, Vec, bool, u32, u64 } from '@polkadot/types';
 import type { AnyNumber, ITuple, Observable } from '@polkadot/types/types';
-import type { Application, ApplicationId, ApplicationOf, Bag, BagId, Category, CategoryId, Channel, ChannelId, Class, ClassId, ClassOf, ClassPermissionsType, ContentId, Credential, Curator, CuratorApplication, CuratorApplicationId, CuratorGroup, CuratorGroupId, CuratorId, CuratorOpening, CuratorOpeningId, DataObject, DataObjectId, DiscussionPost, DiscussionThread, DistributionBucketFamily, DistributionBucketFamilyId, DistributionBucketId, DynamicBagCreationPolicy, DynamicBagType, ElectionStage, ElectionStake, Entity, EntityController, EntityCreationVoucher, EntityId, EntityOf, HiringApplicationId, InputValidationLengthConstraint, Lead, LeadId, MemberId, Membership, MemoText, Mint, MintId, Opening, OpeningId, OpeningOf, PaidMembershipTerms, PaidTermId, Post, PostId, Principal, PrincipalId, PropertyId, ProposalDetailsOf, ProposalId, ProposalOf, Recipient, RecipientId, RewardRelationship, RewardRelationshipId, SealedVote, Seats, Stake, StakeId, StorageBucket, StorageBucketId, Thread, ThreadCounter, ThreadId, TransferableStake, VoteKind, WorkerId, WorkerOf, WorkingGroupUnstaker } from './all';
+import type { Application, ApplicationId, ApplicationOf, Bag, BagId, Category, CategoryId, Channel, ChannelId, Cid, Class, ClassId, ClassOf, ClassPermissionsType, Credential, Curator, CuratorApplication, CuratorApplicationId, CuratorGroup, CuratorGroupId, CuratorId, CuratorOpening, CuratorOpeningId, DataObject, DataObjectId, DiscussionPost, DiscussionThread, DistributionBucketFamily, DistributionBucketFamilyId, DistributionBucketId, DynamicBagCreationPolicy, DynamicBagType, ElectionStage, ElectionStake, Entity, EntityController, EntityCreationVoucher, EntityId, EntityOf, HiringApplicationId, InputValidationLengthConstraint, Lead, LeadId, MemberId, Membership, MemoText, Mint, MintId, Opening, OpeningId, OpeningOf, PaidMembershipTerms, PaidTermId, Post, PostId, Principal, PrincipalId, PropertyId, ProposalDetailsOf, ProposalId, ProposalOf, Recipient, RecipientId, RewardRelationship, RewardRelationshipId, SealedVote, Seats, Stake, StakeId, StorageBucket, StorageBucketId, Thread, ThreadCounter, ThreadId, TransferableStake, VoteKind, WorkerId, WorkerOf, WorkingGroupUnstaker } from './all';
 import type { UncleEntryItem } from '@polkadot/types/interfaces/authorship';
 import type { BabeAuthorityWeight, MaybeRandomness, NextConfigDescriptor, Randomness } from '@polkadot/types/interfaces/babe';
 import type { AccountData, BalanceLock } from '@polkadot/types/interfaces/balances';
@@ -1019,7 +1019,7 @@ declare module '@polkadot/api/types/storage' {
       /**
        * Blacklisted data object hashes.
        **/
-      blacklist: AugmentedQuery<ApiType, (arg: ContentId | string | Uint8Array) => Observable<ITuple<[]>>, [ContentId]>;
+      blacklist: AugmentedQuery<ApiType, (arg: Cid | string | Uint8Array) => Observable<ITuple<[]>>, [Cid]>;
       /**
        * Blacklist collection counter.
        **/

+ 2 - 2
types/augment/augment-api-tx.ts

@@ -3,7 +3,7 @@
 
 import type { BTreeMap, BTreeSet, Bytes, Compact, Option, Vec, bool, u16, u32, u64 } from '@polkadot/types';
 import type { AnyNumber, ITuple } from '@polkadot/types/types';
-import type { ActivateOpeningAt, Actor, AddOpeningParameters, ApplicationId, ApplicationIdSet, BagId, BalanceOfMint, CategoryId, ChannelContentType, ChannelCurationStatus, ChannelId, ChannelPublicationStatus, ClassId, ClassPermissions, ClassPermissionsType, ClassPropertyValue, ContentId, Credential, CredentialSet, CurationActor, CuratorApplicationId, CuratorApplicationIdSet, CuratorGroupId, CuratorId, CuratorOpeningId, DataObjectId, DistributionBucketFamilyId, DistributionBucketId, DynamicBagDeletionPrize, DynamicBagId, DynamicBagType, ElectionParameters, EntityController, EntityId, EntityPermissions, FillOpeningParameters, InputPropertyValue, InputValue, MemberId, MemoText, Nonce, OpeningId, OpeningPolicyCommitment, OpeningType, Operation, OperationType, OptionalText, PaidTermId, PostId, Property, PropertyId, ProposalId, ReferenceConstraint, RewardPolicy, SchemaId, StorageBucketId, TerminateRoleParameters, ThreadId, UploadParameters, VecMaxLength, VoteKind, WorkerId, WorkingGroup } from './all';
+import type { ActivateOpeningAt, Actor, AddOpeningParameters, ApplicationId, ApplicationIdSet, BagId, BalanceOfMint, CategoryId, ChannelContentType, ChannelCurationStatus, ChannelId, ChannelPublicationStatus, Cid, ClassId, ClassPermissions, ClassPermissionsType, ClassPropertyValue, Credential, CredentialSet, CurationActor, CuratorApplicationId, CuratorApplicationIdSet, CuratorGroupId, CuratorId, CuratorOpeningId, DataObjectId, DistributionBucketFamilyId, DistributionBucketId, DynamicBagDeletionPrize, DynamicBagId, DynamicBagType, ElectionParameters, EntityController, EntityId, EntityPermissions, FillOpeningParameters, InputPropertyValue, InputValue, MemberId, MemoText, Nonce, OpeningId, OpeningPolicyCommitment, OpeningType, Operation, OperationType, OptionalText, PaidTermId, PostId, Property, PropertyId, ProposalId, ReferenceConstraint, RewardPolicy, SchemaId, StorageBucketId, TerminateRoleParameters, ThreadId, UploadParameters, VecMaxLength, VoteKind, WorkerId, WorkingGroup } from './all';
 import type { BabeEquivocationProof } from '@polkadot/types/interfaces/babe';
 import type { Extrinsic, Signature } from '@polkadot/types/interfaces/extrinsics';
 import type { GrandpaEquivocationProof, KeyOwnerProof } from '@polkadot/types/interfaces/grandpa';
@@ -1325,7 +1325,7 @@ declare module '@polkadot/api/types/submittable' {
       /**
        * Add and remove hashes to the current blacklist.
        **/
-      updateBlacklist: AugmentedSubmittable<(removeHashes: BTreeSet<ContentId>, addHashes: BTreeSet<ContentId>) => SubmittableExtrinsic<ApiType>, [BTreeSet<ContentId>, BTreeSet<ContentId>]>;
+      updateBlacklist: AugmentedSubmittable<(removeHashes: BTreeSet<Cid>, addHashes: BTreeSet<Cid>) => SubmittableExtrinsic<ApiType>, [BTreeSet<Cid>, BTreeSet<Cid>]>;
       /**
        * Updates size-based pricing of new objects uploaded.
        **/

+ 2 - 1
types/src/common.ts

@@ -1,4 +1,4 @@
-import { Struct, Option, Text, bool, u16, u32, u64, Null, U8aFixed, BTreeSet, UInt } from '@polkadot/types'
+import { Struct, Option, Text, bool, u16, u32, u64, Null, U8aFixed, BTreeSet, UInt, u128 } from '@polkadot/types'
 import { BlockNumber, Hash as PolkadotHash, Moment } from '@polkadot/types/interfaces'
 import { Codec, Constructor, RegistryTypes } from '@polkadot/types/types'
 import { u8aConcat, u8aToHex, compactToU8a } from '@polkadot/util'
@@ -135,6 +135,7 @@ export class MemoText extends Text {}
 // see: https://polkadot.js.org/api/start/FAQ.html#the-node-returns-a-could-not-convert-error-on-send
 export class Address extends AccountId {}
 export class LookupSource extends AccountId {}
+export class BalanceOf extends u128 {}
 
 export const commonTypes: RegistryTypes = {
   Credential,

+ 33 - 49
types/src/storage.ts

@@ -1,25 +1,13 @@
-import {
-  Null,
-  u128,
-  u64,
-  Text,
-  Vec,
-  bool,
-  GenericAccountId as AccountId,
-  BTreeSet,
-  BTreeMap,
-  Option,
-} from '@polkadot/types'
+import { Null, u64, Bytes, Vec, bool, GenericAccountId as AccountId, BTreeSet, BTreeMap, Option } from '@polkadot/types'
 import { RegistryTypes } from '@polkadot/types/types'
-import { JoyBTreeSet, JoyEnum, JoyStructDecorated, WorkingGroup } from './common'
+import { JoyBTreeSet, JoyEnum, JoyStructDecorated, WorkingGroup, BalanceOf } from './common'
 import { MemberId } from './members'
 import { WorkerId } from './working-group'
 
-export class BalanceOf extends u128 {}
 export class DataObjectId extends u64 {}
 export class StorageBucketId extends u64 {}
 
-export type StorageBucketsPerBagValueConstraintType = {
+export type IStorageBucketsPerBagValueConstraint = {
   min: u64
   max_min_diff: u64
 }
@@ -29,9 +17,9 @@ export class StorageBucketsPerBagValueConstraint
     min: u64,
     max_min_diff: u64,
   })
-  implements StorageBucketsPerBagValueConstraintType {}
+  implements IStorageBucketsPerBagValueConstraint {}
 
-export type StorageDataObjectType = {
+export type IStorageDataObject = {
   accepted: bool
   deletion_prize: BalanceOf
   size: u64
@@ -44,7 +32,7 @@ export class StorageDataObject
     deletion_prize: BalanceOf,
     size: u64,
   })
-  implements StorageDataObjectType {}
+  implements IStorageDataObject {}
 
 export class DataObjectIdSet extends JoyBTreeSet(DataObjectId) {}
 export class DistributionBucketId extends u64 {}
@@ -52,7 +40,7 @@ export class DistributionBucketFamilyId extends u64 {}
 export class StorageBucketIdSet extends JoyBTreeSet(StorageBucketId) {}
 export class DistributionBucketSet extends JoyBTreeSet(DistributionBucketId) {}
 
-export type DynamicBagDeletionPrizeTypeDef = {
+export type IDynamicBagDeletionPrize = {
   account_id: AccountId
   prize: BalanceOf
 }
@@ -62,11 +50,11 @@ export class DynamicBagDeletionPrize
     account_id: AccountId,
     prize: BalanceOf,
   })
-  implements DynamicBagDeletionPrizeTypeDef {}
+  implements IDynamicBagDeletionPrize {}
 
 export class DynamicBagDeletionPrizeRecord extends DynamicBagDeletionPrize {}
 
-export type BagTypeDef = {
+export type IBag = {
   objects: BTreeMap<DataObjectId, StorageDataObject>
   stored_by: StorageBucketIdSet
   distributed_by: DistributionBucketSet
@@ -80,9 +68,9 @@ export class Bag
     distributed_by: DistributionBucketSet,
     deletion_prize: Option.with(BalanceOf),
   })
-  implements BagTypeDef {}
+  implements IBag {}
 
-export type DynamicBagCreationPolicyType = {
+export type IDynamicBagCreationPolicy = {
   numberOfStorageBuckets: u64
 }
 
@@ -90,7 +78,7 @@ export class DynamicBagCreationPolicy
   extends JoyStructDecorated({
     numberOfStorageBuckets: u64,
   })
-  implements DynamicBagCreationPolicyType {}
+  implements IDynamicBagCreationPolicy {}
 
 export const DynamicBagTypeDef = {
   Member: Null,
@@ -103,32 +91,29 @@ export const StaticBagIdDef = {
   Council: Null,
   WorkingGroup: WorkingGroup,
 } as const
-export type StaticBagIdKey = keyof typeof StaticBagIdDef
 export class StaticBagId extends JoyEnum(StaticBagIdDef) {}
-
 export class Static extends StaticBagId {}
 
+// This type should be imported from content-directory/common types once the Olympia release is merged.
 export class ChannelId extends u64 {}
+
 export const DynamicBagIdDef = {
   Member: MemberId,
   Channel: ChannelId,
 } as const
-export type DynamicBagIdKey = keyof typeof DynamicBagIdDef
 export class DynamicBagId extends JoyEnum(DynamicBagIdDef) {}
-
 export class Dynamic extends DynamicBagId {}
 
 export const BagIdDef = {
   Static,
   Dynamic,
 } as const
-export type BagIdKey = keyof typeof BagIdDef
 export class BagId extends JoyEnum(BagIdDef) {}
 
 // Alias
 export class BagIdType extends BagId {}
 
-export type VoucherType = {
+export type IVoucher = {
   sizeLimit: u64
   objectsLimit: u64
   sizeUsed: u64
@@ -142,21 +127,20 @@ export class Voucher
     sizeUsed: u64,
     objectsUsed: u64,
   })
-  implements VoucherType {}
+  implements IVoucher {}
 
 export const StorageBucketOperatorStatusDef = {
   Missing: Null,
   InvitedStorageWorker: WorkerId,
   StorageWorker: WorkerId,
 } as const
-export type StorageBucketOperatorStatusKey = keyof typeof StorageBucketOperatorStatusDef
 export class StorageBucketOperatorStatus extends JoyEnum(StorageBucketOperatorStatusDef) {}
 
-export type StorageBucketType = {
+export type IStorageBucket = {
   operator_status: StorageBucketOperatorStatus
   accepting_new_bags: bool
   voucher: Voucher
-  metadata: Text
+  metadata: Bytes
 }
 
 export class StorageBucket
@@ -164,24 +148,24 @@ export class StorageBucket
     operator_status: StorageBucketOperatorStatus,
     accepting_new_bags: bool,
     voucher: Voucher,
-    metadata: Text,
+    metadata: Bytes,
   })
-  implements StorageBucketType {}
+  implements IStorageBucket {}
 
-export type DataObjectCreationParametersType = {
+export type IDataObjectCreationParameters = {
   size: u64
-  ipfsContentId: Text
+  ipfsContentId: Bytes
 }
 
 export class DataObjectCreationParameters
   extends JoyStructDecorated({
     size: u64,
-    ipfsContentId: Text,
+    ipfsContentId: Bytes,
   })
-  implements DataObjectCreationParametersType {}
+  implements IDataObjectCreationParameters {}
 
-export type UploadParametersType = {
-  authenticationKey: Text
+export type IUploadParameters = {
+  authenticationKey: Bytes
   bagId: BagId
   objectCreationList: Vec<DataObjectCreationParameters>
   deletionPrizeSourceAccountId: AccountId
@@ -190,18 +174,18 @@ export type UploadParametersType = {
 
 export class UploadParameters
   extends JoyStructDecorated({
-    authenticationKey: Text,
+    authenticationKey: Bytes,
     bagId: BagId,
     objectCreationList: Vec.with(DataObjectCreationParameters),
     deletionPrizeSourceAccountId: AccountId,
     expectedDataSizeFee: BalanceOf,
   })
-  implements UploadParametersType {}
+  implements IUploadParameters {}
 
-export class Cid extends Text {}
+export class Cid extends Bytes {}
 export class ContentIdSet extends BTreeSet.with(Cid) {}
 
-export type DistributionBucketType = {
+export type IDistributionBucket = {
   accepting_new_bags: bool
   distributing: bool
   pending_invitations: BTreeSet<WorkerId>
@@ -217,9 +201,9 @@ export class DistributionBucket
     operators: BTreeSet.with(WorkerId),
     assigned_bags: u64,
   })
-  implements DistributionBucketType {}
+  implements IDistributionBucket {}
 
-export type DistributionBucketFamilyType = {
+export type IDistributionBucketFamily = {
   distribution_buckets: BTreeMap<DistributionBucketId, DistributionBucket>
 }
 
@@ -227,7 +211,7 @@ export class DistributionBucketFamily
   extends JoyStructDecorated({
     distribution_buckets: BTreeMap.with(DistributionBucketId, DistributionBucket),
   })
-  implements DistributionBucketFamilyType {}
+  implements IDistributionBucketFamily {}
 
 export const storageTypes: RegistryTypes = {
   StorageBucketId,