Browse Source

Merge branch 'nicaea' into eslint-config-into-nicaea

Mokhtar Naamani 4 years ago
parent
commit
aa5caa0147
50 changed files with 2384 additions and 2591 deletions
  1. 38 286
      storage-node/.eslintrc.js
  2. 8 0
      storage-node/.prettierrc
  3. 7 1
      storage-node/package.json
  4. 46 41
      storage-node/packages/cli/bin/cli.js
  5. 15 18
      storage-node/packages/cli/bin/dev.js
  6. 1 1
      storage-node/packages/cli/test/index.js
  7. 59 61
      storage-node/packages/colossus/bin/cli.js
  8. 30 32
      storage-node/packages/colossus/lib/app.js
  9. 26 28
      storage-node/packages/colossus/lib/discovery.js
  10. 12 13
      storage-node/packages/colossus/lib/middleware/file_uploads.js
  11. 23 23
      storage-node/packages/colossus/lib/middleware/validate_responses.js
  12. 34 38
      storage-node/packages/colossus/lib/sync.js
  13. 114 128
      storage-node/packages/colossus/paths/asset/v0/{id}.js
  14. 64 65
      storage-node/packages/colossus/paths/discover/v0/{id}.js
  15. 1 1
      storage-node/packages/colossus/test/index.js
  16. 37 40
      storage-node/packages/discovery/discover.js
  17. 29 32
      storage-node/packages/discovery/example.js
  18. 3 4
      storage-node/packages/discovery/index.js
  19. 17 16
      storage-node/packages/discovery/publish.js
  20. 1 1
      storage-node/packages/discovery/test/index.js
  21. 100 85
      storage-node/packages/helios/bin/cli.js
  22. 1 1
      storage-node/packages/helios/test/index.js
  23. 22 20
      storage-node/packages/runtime-api/assets.js
  24. 25 36
      storage-node/packages/runtime-api/balances.js
  25. 12 16
      storage-node/packages/runtime-api/discovery.js
  26. 27 26
      storage-node/packages/runtime-api/identities.js
  27. 45 37
      storage-node/packages/runtime-api/index.js
  28. 19 22
      storage-node/packages/runtime-api/test/assets.js
  29. 18 20
      storage-node/packages/runtime-api/test/balances.js
  30. 48 49
      storage-node/packages/runtime-api/test/identities.js
  31. 6 9
      storage-node/packages/runtime-api/test/index.js
  32. 45 44
      storage-node/packages/runtime-api/workers.js
  33. 45 49
      storage-node/packages/storage/filter.js
  34. 4 4
      storage-node/packages/storage/index.js
  35. 162 179
      storage-node/packages/storage/storage.js
  36. 150 151
      storage-node/packages/storage/test/storage.js
  37. 10 9
      storage-node/packages/util/externalPromise.js
  38. 19 21
      storage-node/packages/util/fs/resolve.js
  39. 53 62
      storage-node/packages/util/fs/walk.js
  40. 46 55
      storage-node/packages/util/lru.js
  41. 46 51
      storage-node/packages/util/pagination.js
  42. 207 243
      storage-node/packages/util/ranges.js
  43. 5 6
      storage-node/packages/util/stripEndingSlash.js
  44. 38 50
      storage-node/packages/util/test/fs/resolve.js
  45. 29 31
      storage-node/packages/util/test/fs/walk.js
  46. 119 131
      storage-node/packages/util/test/lru.js
  47. 59 66
      storage-node/packages/util/test/pagination.js
  48. 252 269
      storage-node/packages/util/test/ranges.js
  49. 13 0
      storage-node/packages/util/test/stripEndingSlash.js
  50. 194 20
      yarn.lock

+ 38 - 286
storage-node/.eslintrc.js

@@ -1,290 +1,42 @@
 module.exports = {
-    "env": {
-        "es6": true,
-        "node": true
+    env: {
+        node: true,
+        es6: true,
+		mocha: true,
     },
-    "extends": "eslint:recommended",
-    "parserOptions": {
-        "ecmaVersion": 2018
+    globals: {
+        Atomics: "readonly",
+        SharedArrayBuffer: "readonly",
     },
-    "rules": {
-        "accessor-pairs": "error",
-        "array-bracket-newline": "off",
-        "array-bracket-spacing": [
-            "error",
-            "never",
-        ],
-        "array-callback-return": "error",
-        "array-element-newline": [
-          "error",
-          "consistent",
-        ],
-        "arrow-body-style": [
-          "warn",
-          "as-needed"
-        ],
-        "arrow-parens": [
-            "error",
-            "always"
-        ],
-        "arrow-spacing": [
-            "error",
-            {
-                "after": true,
-                "before": true
-            }
-        ],
-        "block-scoped-var": "error",
-        "block-spacing": "error",
-        "brace-style": "off",
-        "callback-return": "error",
-        "camelcase": "off",
-        "capitalized-comments": "off",
-        "class-methods-use-this": "error",
-        "comma-dangle": "off",
-        "comma-spacing": "off",
-        "comma-style": [
-            "error",
-            "last"
-        ],
-        "complexity": "error",
-        "computed-property-spacing": [
-            "error",
-            "never"
-        ],
-        "consistent-return": "error",
-        "consistent-this": "error",
-        "curly": "error",
-        "default-case": "error",
-        "dot-location": "error",
-        "dot-notation": "off",
-        "eol-last": "error",
-        "eqeqeq": "off",
-        "func-call-spacing": "error",
-        "func-name-matching": "off",
-        "func-names": "off",
-        "func-style": "off",
-        "function-paren-newline": "off",
-        "generator-star-spacing": "error",
-        "global-require": "off",
-        "guard-for-in": "warn",
-        "handle-callback-err": "error",
-        "id-blacklist": "error",
-        "id-length": "off",
-        "id-match": "error",
-        "implicit-arrow-linebreak": "off",
-        "indent": "off",
-        "indent-legacy": "off",
-        "init-declarations": "off",
-        "jsx-quotes": "error",
-        "key-spacing": "error",
-        "keyword-spacing": [
-            "error",
-            {
-                "after": true,
-                "before": true
-            }
-        ],
-        "line-comment-position": "off",
-        "linebreak-style": [
-            "error",
-            "unix"
-        ],
-        "lines-around-comment": "error",
-        "lines-around-directive": "error",
-        "lines-between-class-members": "error",
-        "max-classes-per-file": "error",
-        "max-depth": "error",
-        "max-len": "off",
-        "max-lines": "off",
-        "max-lines-per-function": "off",
-        "max-nested-callbacks": "error",
-        "max-params": "off",
-        "max-statements": "off",
-        "max-statements-per-line": "error",
-        "multiline-comment-style": "off",
-        "new-cap": "error",
-        "new-parens": "error",
-        "newline-after-var": "off",
-        "newline-before-return": "off",
-        "newline-per-chained-call": "off",
-        "no-alert": "error",
-        "no-array-constructor": "error",
-        "no-async-promise-executor": "error",
-        "no-await-in-loop": "error",
-        "no-bitwise": "error",
-        "no-buffer-constructor": "error",
-        "no-caller": "error",
-        "no-catch-shadow": "error",
-        "no-confusing-arrow": "error",
-        "no-continue": "off",
-        "no-constant-condition": "off",
-        "no-div-regex": "error",
-        "no-duplicate-imports": "error",
-        "no-else-return": "off",
-        "no-empty-function": "error",
-        "no-eq-null": "error",
-        "no-eval": "error",
-        "no-extend-native": "error",
-        "no-extra-bind": "error",
-        "no-extra-label": "error",
-        "no-extra-parens": "off",
-        "no-floating-decimal": "error",
-        "no-implicit-globals": "error",
-        "no-implied-eval": "error",
-        "no-inline-comments": "off",
-        "no-invalid-this": "error",
-        "no-iterator": "error",
-        "no-label-var": "error",
-        "no-labels": "error",
-        "no-lone-blocks": "error",
-        "no-lonely-if": "error",
-        "no-loop-func": "error",
-        "no-magic-numbers": "off",
-        "no-misleading-character-class": "error",
-        "no-mixed-operators": "error",
-        "no-mixed-requires": "error",
-        "no-multi-assign": "error",
-        "no-multi-spaces": "off",
-        "no-multi-str": "error",
-        "no-multiple-empty-lines": "error",
-        "no-native-reassign": "error",
-        "no-negated-condition": "error",
-        "no-negated-in-lhs": "error",
-        "no-nested-ternary": "error",
-        "no-new": "error",
-        "no-new-func": "error",
-        "no-new-object": "error",
-        "no-new-require": "error",
-        "no-new-wrappers": "error",
-        "no-octal-escape": "error",
-        "no-param-reassign": "error",
-        "no-path-concat": "error",
-        "no-plusplus": "off",
-        "no-process-env": "error",
-        "no-process-exit": "error",
-        "no-proto": "error",
-        "no-prototype-builtins": "error",
-        "no-restricted-globals": "error",
-        "no-restricted-imports": "error",
-        "no-restricted-modules": "error",
-        "no-restricted-properties": "error",
-        "no-restricted-syntax": "error",
-        "no-return-assign": "error",
-        "no-return-await": "error",
-        "no-script-url": "error",
-        "no-self-compare": "error",
-        "no-sequences": "error",
-        "no-shadow": "error",
-        "no-shadow-restricted-names": "error",
-        "no-spaced-func": "error",
-        "no-sync": "warn",
-        "no-tabs": "error",
-        "no-template-curly-in-string": "error",
-        "no-ternary": "off",
-        "no-throw-literal": "error",
-        "no-trailing-spaces": "error",
-        "no-undef-init": "error",
-        "no-undefined": "off",
-        "no-underscore-dangle": "off",
-        "no-unmodified-loop-condition": "error",
-        "no-unneeded-ternary": "off",
-        "no-unused-expressions": "error",
-        "no-unused-vars": [
-          "error",
-          {
-            "argsIgnorePattern": "^_",
-          },
-        ],
-        "no-use-before-define": "error",
-        "no-useless-call": "error",
-        "no-useless-catch": "error",
-        "no-useless-computed-key": "error",
-        "no-useless-concat": "error",
-        "no-useless-constructor": "error",
-        "no-useless-rename": "error",
-        "no-useless-return": "error",
-        "no-useless-escape": "off",
-        "no-var": "off",
-        "no-void": "error",
-        "no-warning-comments": "warn",
-        "no-whitespace-before-property": "error",
-        "no-with": "error",
-        "nonblock-statement-body-position": "error",
-        "object-curly-newline": "error",
-        "object-curly-spacing": [
-            "error",
-            "always"
-        ],
-        "object-shorthand": "off",
-        "one-var": "off",
-        "one-var-declaration-per-line": "error",
-        "operator-assignment": "error",
-        "operator-linebreak": "error",
-        "padded-blocks": "off",
-        "padding-line-between-statements": "error",
-        "prefer-arrow-callback": "off",
-        "prefer-const": "error",
-        "prefer-destructuring": "off",
-        "prefer-numeric-literals": "error",
-        "prefer-object-spread": "error",
-        "prefer-promise-reject-errors": "error",
-        "prefer-reflect": "off",
-        "prefer-rest-params": "error",
-        "prefer-spread": "error",
-        "prefer-template": "off",
-        "quote-props": "off",
-        "quotes": "off",
-        "radix": "error",
-        "require-atomic-updates": "error",
-        "require-await": "error",
-        "require-jsdoc": "warn",
-        "require-unicode-regexp": "error",
-        "rest-spread-spacing": [
-            "error",
-            "never"
-        ],
-        "semi": "off",
-        "semi-spacing": "error",
-        "semi-style": [
-            "error",
-            "last"
-        ],
-        "sort-imports": "error",
-        "sort-keys": "off",
-        "sort-vars": "error",
-        "space-before-blocks": "error",
-        "space-before-function-paren": "off",
-        "space-in-parens": [
-            "error",
-            "never"
-        ],
-        "space-infix-ops": "error",
-        "space-unary-ops": "error",
-        "spaced-comment": [
-            "error",
-            "always"
-        ],
-        "strict": "error",
-        "switch-colon-spacing": "error",
-        "symbol-description": "error",
-        "template-curly-spacing": [
-            "error",
-            "never"
-        ],
-        "template-tag-spacing": "error",
-        "unicode-bom": [
-            "error",
-            "never"
-        ],
-        "valid-jsdoc": "error",
-        "vars-on-top": "off",
-        "wrap-iife": "error",
-        "wrap-regex": "error",
-        "yield-star-spacing": "error",
-        "yoda": [
-            "error",
-            "never"
-        ]
-    }
+    extends: [
+        "esnext",
+        "esnext/style-guide",
+        "plugin:prettier/recommended"
+    ],
+	"rules": {
+		"import/no-commonjs": "off", // remove after converting to TS.
+		// Disabling Rules because of monorepo environment:
+		// https://github.com/benmosher/eslint-plugin-import/issues/1174
+		"import/no-extraneous-dependencies": "off",
+		"import/no-nodejs-modules": "off", // nodejs project
+		"no-console": "off" // we use console in the project
+	},
+	"overrides": [
+		{
+			"files": [
+				"**/test/ranges.js",
+				"**/test/lru.js",
+				"**/test/fs/walk.js",
+				"**/test/storage.js",
+				"**/test/identities.js",
+				"**/test/balances.js",
+				"**/test/assets.js",
+			],
+			"rules": {
+				// Disabling Rules because of used chai lib:
+				// https://stackoverflow.com/questions/45079454/no-unused-expressions-in-mocha-chai-unit-test-using-standardjs
+				"no-unused-expressions": "off",
+			}
+		}
+	]
 };

+ 8 - 0
storage-node/.prettierrc

@@ -0,0 +1,8 @@
+{
+    "semi": false,
+    "trailingComma": "es5",
+    "singleQuote": true,
+	"arrowParens": "avoid",
+	"useTabs": false,
+	"tabWidth": 2
+}

+ 7 - 1
storage-node/package.json

@@ -32,9 +32,15 @@
   ],
   "scripts": {
     "test": "wsrun --serial test",
-    "lint": "wsrun --serial lint"
+    "lint": "eslint --ignore-path .gitignore ."
   },
   "devDependencies": {
+    "eslint": "^5.16.0",
+    "eslint-config-esnext": "^4.1.0",
+    "eslint-config-prettier": "^6.11.0",
+    "eslint-plugin-babel": "^5.3.1",
+    "eslint-plugin-prettier": "^3.1.4",
+    "prettier": "^2.0.5",
     "wsrun": "^3.6.5"
   }
 }

+ 46 - 41
storage-node/packages/cli/bin/cli.js

@@ -33,7 +33,8 @@ const FLAG_DEFINITIONS = {
   // TODO
 }
 
-const cli = meow(`
+const cli = meow(
+  `
   Usage:
     $ storage-cli command [arguments..] [key_file] [passphrase]
 
@@ -54,16 +55,17 @@ const cli = meow(`
     dev-init          Setup chain with Alice as lead and storage provider.
     dev-check         Check the chain is setup with Alice as lead and storage provider.
   `,
-  { flags: FLAG_DEFINITIONS })
+  { flags: FLAG_DEFINITIONS }
+)
 
-function assert_file (name, filename) {
+function assertFile(name, filename) {
   assert(filename, `Need a ${name} parameter to proceed!`)
   assert(fs.statSync(filename).isFile(), `Path "${filename}" is not a file, aborting!`)
 }
 
-function load_identity (api, filename, passphrase) {
+function loadIdentity(api, filename, passphrase) {
   if (filename) {
-    assert_file('keyfile', filename)
+    assertFile('keyfile', filename)
     api.identities.loadUnlock(filename, passphrase)
   } else {
     debug('Loading Alice as identity')
@@ -73,48 +75,45 @@ function load_identity (api, filename, passphrase) {
 
 const commands = {
   // add Alice well known account as storage provider
-  'dev-init': async (api) => {
-    // dev accounts are automatically loaded, no need to add explicitly to keyring
-    // load_identity(api)
-    let dev = require('./dev')
+  'dev-init': async api => {
+    // dev accounts are automatically loaded, no need to add explicitly to keyring using loadIdentity(api)
+    const dev = require('./dev')
     return dev.init(api)
   },
   // Checks that the setup done by dev-init command was successful.
-  'dev-check': async (api) => {
-    // dev accounts are automatically loaded, no need to add explicitly to keyring
-    // load_identity(api)
-    let dev = require('./dev')
+  'dev-check': async api => {
+    // dev accounts are automatically loaded, no need to add explicitly to keyring using loadIdentity(api)
+    const dev = require('./dev')
     return dev.check(api)
   },
   // The upload method is not correctly implemented
   // needs to get the liaison after creating a data object,
   // resolve the ipns id to the asset put api url of the storage-node
   // before uploading..
-  'upload': async (api, url, filename, do_type_id, keyfile, passphrase) => {
-    load_identity(keyfile, passphrase)
+  upload: async (api, url, filename, doTypeId, keyfile, passphrase) => {
+    loadIdentity(keyfile, passphrase)
     // Check parameters
-    assert_file('file', filename)
+    assertFile('file', filename)
 
     const size = fs.statSync(filename).size
     debug(`File "${filename}" is ${chalk.green(size)} Bytes.`)
 
-    if (!do_type_id) {
-      do_type_id = 1
+    if (!doTypeId) {
+      doTypeId = 1
     }
 
-    debug('Data Object Type ID is: ' + chalk.green(do_type_id))
+    debug('Data Object Type ID is: ' + chalk.green(doTypeId))
 
     // Generate content ID
     // FIXME this require path is like this because of
     // https://github.com/Joystream/apps/issues/207
     const { ContentId } = require('@joystream/types/media')
-    var cid = ContentId.generate()
+    let cid = ContentId.generate()
     cid = cid.encode().toString()
     debug('Generated content ID: ' + chalk.green(cid))
 
     // Create Data Object
-    const data_object = await api.assets.createDataObject(
-      api.identities.key.address, cid, do_type_id, size)
+    await api.assets.createDataObject(api.identities.key.address, cid, doTypeId, size)
     debug('Data object created.')
 
     // TODO in future, optionally contact liaison here?
@@ -124,12 +123,12 @@ const commands = {
 
     const f = fs.createReadStream(filename)
     const opts = {
-      url: url,
+      url,
       headers: {
         'content-type': '',
-        'content-length': `${size}`
+        'content-length': `${size}`,
       },
-      json: true
+      json: true,
     }
     return new Promise((resolve, reject) => {
       const r = request.put(opts, (error, response, body) => {
@@ -151,15 +150,15 @@ const commands = {
   // needs to be updated to take a content id and resolve it a potential set
   // of providers that has it, and select one (possibly try more than one provider)
   // to fetch it from the get api url of a provider..
-  'download': async (api, url, content_id, filename) => {
+  download: async (api, url, contentId, filename) => {
     const request = require('request')
-    url = `${url}asset/v0/${content_id}`
+    url = `${url}asset/v0/${contentId}`
     debug('Downloading URL', chalk.green(url), 'to', chalk.green(filename))
 
     const f = fs.createWriteStream(filename)
     const opts = {
-      url: url,
-      json: true
+      url,
+      json: true,
     }
     return new Promise((resolve, reject) => {
       const r = request.get(opts, (error, response, body) => {
@@ -168,9 +167,15 @@ const commands = {
           return
         }
 
-        debug('Downloading', chalk.green(response.headers['content-type']), 'of size', chalk.green(response.headers['content-length']), '...')
+        debug(
+          'Downloading',
+          chalk.green(response.headers['content-type']),
+          'of size',
+          chalk.green(response.headers['content-length']),
+          '...'
+        )
 
-        f.on('error', (err) => {
+        f.on('error', err => {
           reject(err)
         })
 
@@ -187,17 +192,17 @@ const commands = {
     })
   },
   // similar to 'download' function
-  'head': async (api, url, content_id) => {
+  head: async (api, url, contentId) => {
     const request = require('request')
-    url = `${url}asset/v0/${content_id}`
+    url = `${url}asset/v0/${contentId}`
     debug('Checking URL', chalk.green(url), '...')
 
     const opts = {
-      url: url,
-      json: true
+      url,
+      json: true,
     }
     return new Promise((resolve, reject) => {
-      const r = request.head(opts, (error, response, body) => {
+      request.head(opts, (error, response, body) => {
         if (error) {
           reject(error)
           return
@@ -208,17 +213,17 @@ const commands = {
           return
         }
 
-        for (var propname in response.headers) {
+        for (const propname in response.headers) {
           debug(`  ${chalk.yellow(propname)}: ${response.headers[propname]}`)
         }
 
         resolve()
       })
     })
-  }
+  },
 }
 
-async function main () {
+async function main() {
   const api = await RuntimeApi.create()
 
   // Simple CLI commands
@@ -227,7 +232,7 @@ async function main () {
     throw new Error('Need a command to run!')
   }
 
-  if (commands.hasOwnProperty(command)) {
+  if (Object.prototype.hasOwnProperty.call(commands, command)) {
     // Command recognized
     const args = _.clone(cli.input).slice(1)
     await commands[command](api, ...args)
@@ -240,7 +245,7 @@ main()
   .then(() => {
     process.exit(0)
   })
-  .catch((err) => {
+  .catch(err => {
     console.error(chalk.red(err.stack))
     process.exit(-1)
   })

+ 15 - 18
storage-node/packages/cli/bin/dev.js

@@ -1,28 +1,25 @@
-/* eslint-disable no-console */
-
 'use strict'
 
 const debug = require('debug')('joystream:storage-cli:dev')
-const assert = require('assert')
 
 // Derivation path appended to well known development seed used on
 // development chains
 const ALICE_URI = '//Alice'
 const ROLE_ACCOUNT_URI = '//Colossus'
 
-function aliceKeyPair (api) {
+function aliceKeyPair(api) {
   return api.identities.keyring.addFromUri(ALICE_URI, null, 'sr25519')
 }
 
-function roleKeyPair (api) {
+function roleKeyPair(api) {
   return api.identities.keyring.addFromUri(ROLE_ACCOUNT_URI, null, 'sr25519')
 }
 
-function developmentPort () {
+function developmentPort() {
   return 3001
 }
 
-const check = async (api) => {
+const check = async api => {
   const roleAccountId = roleKeyPair(api).address
   const providerId = await api.workers.findProviderIdByRoleAccount(roleAccountId)
 
@@ -43,7 +40,7 @@ const check = async (api) => {
 // Setup Alice account on a developement chain as
 // a member, storage lead, and a storage provider using a deterministic
 // development key for the role account
-const init = async (api) => {
+const init = async api => {
   try {
     await check(api)
     return
@@ -82,7 +79,7 @@ const init = async (api) => {
   if (aliceMemberId === undefined) {
     debug('Registering Alice as member..')
     aliceMemberId = await api.identities.registerMember(alice, {
-      handle: 'alice'
+      handle: 'alice',
     })
   } else {
     debug('Alice is already a member')
@@ -90,10 +87,10 @@ const init = async (api) => {
 
   // Make alice the storage lead
   debug('Making Alice the storage Lead')
-  const leadOpeningId = await api.workers.dev_addStorageLeadOpening()
-  const leadApplicationId = await api.workers.dev_applyOnOpening(leadOpeningId, aliceMemberId, alice, alice)
-  api.workers.dev_beginLeadOpeningReview(leadOpeningId)
-  await api.workers.dev_fillLeadOpening(leadOpeningId, leadApplicationId)
+  const leadOpeningId = await api.workers.devAddStorageLeadOpening()
+  const leadApplicationId = await api.workers.devApplyOnOpening(leadOpeningId, aliceMemberId, alice, alice)
+  api.workers.devBeginLeadOpeningReview(leadOpeningId)
+  await api.workers.devFillLeadOpening(leadOpeningId, leadApplicationId)
 
   const leadAccount = await api.workers.getLeadRoleAccount()
   if (!leadAccount.eq(alice)) {
@@ -103,16 +100,16 @@ const init = async (api) => {
   // Create a storage openinging, apply, start review, and fill opening
   debug(`Making ${ROLE_ACCOUNT_URI} account a storage provider`)
 
-  const openingId = await api.workers.dev_addStorageOpening()
+  const openingId = await api.workers.devAddStorageOpening()
   debug(`created new storage opening: ${openingId}`)
 
-  const applicationId = await api.workers.dev_applyOnOpening(openingId, aliceMemberId, alice, roleAccount)
+  const applicationId = await api.workers.devApplyOnOpening(openingId, aliceMemberId, alice, roleAccount)
   debug(`applied with application id: ${applicationId}`)
 
-  api.workers.dev_beginStorageOpeningReview(openingId)
+  api.workers.devBeginStorageOpeningReview(openingId)
 
   debug(`filling storage opening`)
-  const providerId = await api.workers.dev_fillStorageOpening(openingId, applicationId)
+  const providerId = await api.workers.devFillStorageOpening(openingId, applicationId)
 
   debug(`Assigned storage provider id: ${providerId}`)
 
@@ -124,5 +121,5 @@ module.exports = {
   check,
   aliceKeyPair,
   roleKeyPair,
-  developmentPort
+  developmentPort,
 }

+ 1 - 1
storage-node/packages/cli/test/index.js

@@ -1 +1 @@
-// Add Tests!
+// Add Tests!

+ 59 - 61
storage-node/packages/colossus/bin/cli.js

@@ -25,38 +25,39 @@ const FLAG_DEFINITIONS = {
   port: {
     type: 'number',
     alias: 'p',
-    default: 3000
+    default: 3000,
   },
   keyFile: {
     type: 'string',
-    isRequired: (flags, input) => {
+    isRequired: flags => {
       return !flags.dev
-    }
+    },
   },
   publicUrl: {
     type: 'string',
     alias: 'u',
-    isRequired: (flags, input) => {
+    isRequired: flags => {
       return !flags.dev
-    }
+    },
   },
   passphrase: {
-    type: 'string'
+    type: 'string',
   },
   wsProvider: {
     type: 'string',
-    default: 'ws://localhost:9944'
+    default: 'ws://localhost:9944',
   },
   providerId: {
     type: 'number',
     alias: 'i',
-    isRequired: (flags, input) => {
+    isRequired: flags => {
       return !flags.dev
-    }
-  }
+    },
+  },
 }
 
-const cli = meow(`
+const cli = meow(
+  `
   Usage:
     $ colossus [command] [arguments]
 
@@ -76,14 +77,15 @@ const cli = meow(`
     --port=PORT, -p PORT    Port number to listen on, defaults to 3000.
     --ws-provider WS_URL    Joystream-node websocket provider, defaults to ws://localhost:9944
   `,
-  { flags: FLAG_DEFINITIONS })
+  { flags: FLAG_DEFINITIONS }
+)
 
 // All-important banner!
-function banner () {
+function banner() {
   console.log(chalk.blue(figlet.textSync('joystream', 'Speed')))
 }
 
-function start_express_app(app, port) {
+function startExpressApp(app, port) {
   const http = require('http')
   const server = http.createServer(app)
 
@@ -102,39 +104,39 @@ function start_express_app(app, port) {
 }
 
 // Start app
-function start_all_services ({ store, api, port }) {
+function startAllServices({ store, api, port }) {
   const app = require('../lib/app')(PROJECT_ROOT, store, api) // reduce falgs to only needed values
-  return start_express_app(app, port)
+  return startExpressApp(app, port)
 }
 
 // Start discovery service app only
-function start_discovery_service ({ api, port }) {
+function startDiscoveryService({ api, port }) {
   const app = require('../lib/discovery')(PROJECT_ROOT, api) // reduce flags to only needed values
-  return start_express_app(app, port)
+  return startExpressApp(app, port)
 }
 
 // Get an initialized storage instance
-function get_storage (runtime_api) {
+function getStorage(runtimeApi) {
   // TODO at some point, we can figure out what backend-specific connection
   // options make sense. For now, just don't use any configuration.
   const { Storage } = require('@joystream/storage-node-backend')
 
   const options = {
-    resolve_content_id: async (content_id) => {
+    resolve_content_id: async contentId => {
       // Resolve via API
-      const obj = await runtime_api.assets.getDataObject(content_id)
+      const obj = await runtimeApi.assets.getDataObject(contentId)
       if (!obj || obj.isNone) {
         return
       }
       // if obj.liaison_judgement !== Accepted .. throw ?
       return obj.unwrap().ipfs_content_id.toString()
-    }
+    },
   }
 
   return Storage.create(options)
 }
 
-async function init_api_production ({ wsProvider, providerId, keyFile, passphrase }) {
+async function initApiProduction({ wsProvider, providerId, keyFile, passphrase }) {
   // Load key information
   const { RuntimeApi } = require('@joystream/storage-runtime-api')
 
@@ -150,28 +152,28 @@ async function init_api_production ({ wsProvider, providerId, keyFile, passphras
     account_file: keyFile,
     passphrase,
     provider_url: wsProvider,
-    storageProviderId: providerId
+    storageProviderId: providerId,
   })
 
   if (!api.identities.key) {
     throw new Error('Failed to unlock storage provider account')
   }
 
-  if (!await api.workers.isRoleAccountOfStorageProvider(api.storageProviderId, api.identities.key.address)) {
+  if (!(await api.workers.isRoleAccountOfStorageProvider(api.storageProviderId, api.identities.key.address))) {
     throw new Error('storage provider role account and storageProviderId are not associated with a worker')
   }
 
   return api
 }
 
-async function init_api_development () {
+async function initApiDevelopment() {
   // Load key information
   const { RuntimeApi } = require('@joystream/storage-runtime-api')
 
   const wsProvider = 'ws://localhost:9944'
 
   const api = await RuntimeApi.create({
-    provider_url: wsProvider
+    provider_url: wsProvider,
   })
 
   const dev = require('../../cli/bin/dev')
@@ -183,39 +185,39 @@ async function init_api_development () {
   return api
 }
 
-function get_service_information (publicUrl) {
+function getServiceInformation(publicUrl) {
   // For now assume we run all services on the same endpoint
-  return({
+  return {
     asset: {
       version: 1, // spec version
-      endpoint: publicUrl
+      endpoint: publicUrl,
     },
     discover: {
       version: 1, // spec version
-      endpoint: publicUrl
-    }
-  })
+      endpoint: publicUrl,
+    },
+  }
 }
 
-async function announce_public_url (api, publicUrl) {
+async function announcePublicUrl(api, publicUrl) {
   // re-announce in future
-  const reannounce = function (timeoutMs) {
-    setTimeout(announce_public_url, timeoutMs, api, publicUrl)
+  const reannounce = function(timeoutMs) {
+    setTimeout(announcePublicUrl, timeoutMs, api, publicUrl)
   }
 
   debug('announcing public url')
   const { publish } = require('@joystream/service-discovery')
 
   try {
-    const serviceInformation = get_service_information(publicUrl)
+    const serviceInformation = getServiceInformation(publicUrl)
 
-    let keyId = await publish.publish(serviceInformation)
+    const keyId = await publish.publish(serviceInformation)
 
     await api.discovery.setAccountInfo(keyId)
 
     debug('publishing complete, scheduling next update')
 
-// >> sometimes after tx is finalized.. we are not reaching here!
+    // >> sometimes after tx is finalized.. we are not reaching here!
 
     // Reannounce before expiery. Here we are concerned primarily
     // with keeping the account information refreshed and 'available' in
@@ -230,61 +232,57 @@ async function announce_public_url (api, publicUrl) {
   }
 }
 
-function go_offline (api) {
-  return api.discovery.unsetAccountInfo()
-}
-
 // Simple CLI commands
-var command = cli.input[0]
+let command = cli.input[0]
 if (!command) {
   command = 'server'
 }
 
-async function start_colossus ({ api, publicUrl, port, flags }) {
+async function startColossus({ api, publicUrl, port, flags }) {
   // TODO: check valid url, and valid port number
-  const store = get_storage(api)
+  const store = getStorage(api)
   banner()
-  const { start_syncing } = require('../lib/sync')
-  start_syncing(api, { syncPeriod: SYNC_PERIOD_MS }, store)
-  announce_public_url(api, publicUrl)
-  return start_all_services({ store, api, port, flags }) // dont pass all flags only required values
+  const { startSyncing } = require('../lib/sync')
+  startSyncing(api, { syncPeriod: SYNC_PERIOD_MS }, store)
+  announcePublicUrl(api, publicUrl)
+  return startAllServices({ store, api, port, flags }) // dont pass all flags only required values
 }
 
 const commands = {
-  'server': async () => {
+  server: async () => {
     let publicUrl, port, api
 
     if (cli.flags.dev) {
       const dev = require('../../cli/bin/dev')
-      api = await init_api_development()
+      api = await initApiDevelopment()
       port = dev.developmentPort()
       publicUrl = `http://localhost:${port}/`
     } else {
-      api = await init_api_production(cli.flags)
+      api = await initApiProduction(cli.flags)
       publicUrl = cli.flags.publicUrl
       port = cli.flags.port
     }
 
-    return start_colossus({ api, publicUrl, port })
+    return startColossus({ api, publicUrl, port })
   },
-  'discovery': async () => {
+  discovery: async () => {
     debug('Starting Joystream Discovery Service')
     const { RuntimeApi } = require('@joystream/storage-runtime-api')
     const wsProvider = cli.flags.wsProvider
     const api = await RuntimeApi.create({ provider_url: wsProvider })
     const port = cli.flags.port
-    await start_discovery_service({ api, port })
-  }
+    await startDiscoveryService({ api, port })
+  },
 }
 
-async function main () {
+async function main() {
   // Simple CLI commands
-  var command = cli.input[0]
+  let command = cli.input[0]
   if (!command) {
     command = 'server'
   }
 
-  if (commands.hasOwnProperty(command)) {
+  if (Object.prototype.hasOwnProperty.call(commands, command)) {
     // Command recognized
     const args = _.clone(cli.input).slice(1)
     await commands[command](...args)
@@ -297,7 +295,7 @@ main()
   .then(() => {
     process.exit(0)
   })
-  .catch((err) => {
+  .catch(err => {
     console.error(chalk.red(err.stack))
     process.exit(-1)
   })

+ 30 - 32
storage-node/packages/colossus/lib/app.js

@@ -16,61 +16,59 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
 // Node requires
-const fs = require('fs');
-const path = require('path');
+const fs = require('fs')
+const path = require('path')
 
 // npm requires
-const express = require('express');
-const openapi = require('express-openapi');
-const bodyParser = require('body-parser');
-const cors = require('cors');
-const yaml = require('js-yaml');
+const express = require('express')
+const openapi = require('express-openapi')
+const bodyParser = require('body-parser')
+const cors = require('cors')
+const yaml = require('js-yaml')
 
 // Project requires
-const validateResponses = require('./middleware/validate_responses');
-const fileUploads = require('./middleware/file_uploads');
-const pagination = require('@joystream/storage-utils/pagination');
+const validateResponses = require('./middleware/validate_responses')
+const fileUploads = require('./middleware/file_uploads')
+const pagination = require('@joystream/storage-utils/pagination')
 
 // Configure app
-function create_app(project_root, storage, runtime)
-{
-  const app = express();
-  app.use(cors());
-  app.use(bodyParser.json());
+function createApp(projectRoot, storage, runtime) {
+  const app = express()
+  app.use(cors())
+  app.use(bodyParser.json())
   // FIXME app.use(bodyParser.urlencoded({ extended: true }));
 
   // Load & extend/configure API docs
-  var api = yaml.safeLoad(fs.readFileSync(
-    path.resolve(project_root, 'api-base.yml')));
-  api['x-express-openapi-additional-middleware'] = [validateResponses];
-  api['x-express-openapi-validation-strict'] = true;
+  let api = yaml.safeLoad(fs.readFileSync(path.resolve(projectRoot, 'api-base.yml')))
+  api['x-express-openapi-additional-middleware'] = [validateResponses]
+  api['x-express-openapi-validation-strict'] = true
 
-  api = pagination.openapi(api);
+  api = pagination.openapi(api)
 
   openapi.initialize({
     apiDoc: api,
-    app: app,
-    paths: path.resolve(project_root, 'paths'),
+    app,
+    paths: path.resolve(projectRoot, 'paths'),
     docsPath: '/swagger.json',
     consumesMiddleware: {
-      'multipart/form-data': fileUploads
+      'multipart/form-data': fileUploads,
     },
     dependencies: {
-      storage: storage,
-      runtime: runtime,
+      storage,
+      runtime,
     },
-  });
+  })
 
   // If no other handler gets triggered (errors), respond with the
   // error serialized to JSON.
-  app.use(function(err, req, res, next) {
-    res.status(err.status).json(err);
-  });
+  app.use(function(err, req, res) {
+    res.status(err.status).json(err)
+  })
 
-  return app;
+  return app
 }
 
-module.exports = create_app;
+module.exports = createApp

+ 26 - 28
storage-node/packages/colossus/lib/discovery.js

@@ -16,57 +16,55 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
 // npm requires
-const express = require('express');
-const openapi = require('express-openapi');
-const bodyParser = require('body-parser');
-const cors = require('cors');
-const yaml = require('js-yaml');
+const express = require('express')
+const openapi = require('express-openapi')
+const bodyParser = require('body-parser')
+const cors = require('cors')
+const yaml = require('js-yaml')
 
 // Node requires
-const fs = require('fs');
-const path = require('path');
+const fs = require('fs')
+const path = require('path')
 
 // Project requires
-const validateResponses = require('./middleware/validate_responses');
+const validateResponses = require('./middleware/validate_responses')
 
 // Configure app
-function create_app(project_root, runtime)
-{
-  const app = express();
-  app.use(cors());
-  app.use(bodyParser.json());
+function createApp(projectRoot, runtime) {
+  const app = express()
+  app.use(cors())
+  app.use(bodyParser.json())
   // FIXME app.use(bodyParser.urlencoded({ extended: true }));
 
   // Load & extend/configure API docs
-  var api = yaml.safeLoad(fs.readFileSync(
-    path.resolve(project_root, 'api-base.yml')));
-  api['x-express-openapi-additional-middleware'] = [validateResponses];
-  api['x-express-openapi-validation-strict'] = true;
+  const api = yaml.safeLoad(fs.readFileSync(path.resolve(projectRoot, 'api-base.yml')))
+  api['x-express-openapi-additional-middleware'] = [validateResponses]
+  api['x-express-openapi-validation-strict'] = true
 
   openapi.initialize({
     apiDoc: api,
-    app: app,
-    //paths: path.resolve(project_root, 'discovery_app_paths'),
+    app,
+    // paths: path.resolve(projectRoot, 'discovery_app_paths'),
     paths: {
       path: '/discover/v0/{id}',
-      module: require('../paths/discover/v0/{id}')
+      module: require('../paths/discover/v0/{id}'),
     },
     docsPath: '/swagger.json',
     dependencies: {
-      runtime: runtime,
+      runtime,
     },
-  });
+  })
 
   // If no other handler gets triggered (errors), respond with the
   // error serialized to JSON.
-  app.use(function(err, req, res, next) {
-    res.status(err.status).json(err);
-  });
+  app.use(function(err, req, res) {
+    res.status(err.status).json(err)
+  })
 
-  return app;
+  return app
 }
 
-module.exports = create_app;
+module.exports = createApp

+ 12 - 13
storage-node/packages/colossus/lib/middleware/file_uploads.js

@@ -16,29 +16,28 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
-const multer = require('multer');
+const multer = require('multer')
 
 // Taken from express-openapi examples
-module.exports = function(req, res, next)
-{
+module.exports = function(req, res, next) {
   multer().any()(req, res, function(err) {
     if (err) {
-      return next(err);
+      return next(err)
     }
     // Handle both single and multiple files
     const filesMap = req.files.reduce(
       (acc, f) =>
         Object.assign(acc, {
-          [f.fieldname]: (acc[f.fieldname] || []).concat(f)
+          [f.fieldname]: (acc[f.fieldname] || []).concat(f),
         }),
       {}
-    );
-    Object.keys(filesMap).forEach((fieldname) => {
-      const files = filesMap[fieldname];
-      req.body[fieldname] = files.length > 1 ? files.map(() => '') : '';
-    });
-    return next();
-  });
+    )
+    Object.keys(filesMap).forEach(fieldname => {
+      const files = filesMap[fieldname]
+      req.body[fieldname] = files.length > 1 ? files.map(() => '') : ''
+    })
+    return next()
+  })
 }

+ 23 - 23
storage-node/packages/colossus/lib/middleware/validate_responses.js

@@ -16,46 +16,46 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
-const debug = require('debug')('joystream:middleware:validate');
+const debug = require('debug')('joystream:middleware:validate')
 
 // Function taken directly from https://github.com/kogosoftwarellc/open-api/tree/master/packages/express-openapi
-module.exports = function(req, res, next)
-{
-  const strictValidation = req.apiDoc['x-express-openapi-validation-strict'] ? true : false;
+module.exports = function(req, res, next) {
+  const strictValidation = !!req.apiDoc['x-express-openapi-validation-strict']
   if (typeof res.validateResponse === 'function') {
-    const send = res.send;
+    const send = res.send
     res.send = function expressOpenAPISend(...args) {
-      const onlyWarn = !strictValidation;
+      const onlyWarn = !strictValidation
       if (res.get('x-express-openapi-validation-error-for') !== undefined) {
-        return send.apply(res, args);
+        return send.apply(res, args)
       }
       if (res.get('x-express-openapi-validation-for') !== undefined) {
-        return send.apply(res, args);
+        return send.apply(res, args)
       }
 
-      const body = args[0];
-      let validation = res.validateResponse(res.statusCode, body);
-      let validationMessage;
+      const body = args[0]
+      let validation = res.validateResponse(res.statusCode, body)
+      let validationMessage
       if (validation === undefined) {
-        validation = { message: undefined, errors: undefined };
+        validation = { message: undefined, errors: undefined }
       }
       if (validation.errors) {
-        const errorList = Array.from(validation.errors).map((_) => _.message).join(',');
-        validationMessage = `Invalid response for status code ${res.statusCode}: ${errorList}`;
-        debug(validationMessage);
+        const errorList = Array.from(validation.errors)
+          .map(_ => _.message)
+          .join(',')
+        validationMessage = `Invalid response for status code ${res.statusCode}: ${errorList}`
+        debug(validationMessage)
         // Set to avoid a loop, and to provide the original status code
-        res.set('x-express-openapi-validation-error-for', res.statusCode.toString());
+        res.set('x-express-openapi-validation-error-for', res.statusCode.toString())
       }
       if ((onlyWarn || !validation.errors) && res.statusCode) {
-        res.set('x-express-openapi-validation-for', res.statusCode.toString());
-        return send.apply(res, args);
-      } else {
-        res.status(500);
-        return res.json({ error: validationMessage });
+        res.set('x-express-openapi-validation-for', res.statusCode.toString())
+        return send.apply(res, args)
       }
+      res.status(500)
+      return res.json({ error: validationMessage })
     }
   }
-  next();
+  next()
 }

+ 34 - 38
storage-node/packages/colossus/lib/sync.js

@@ -16,99 +16,95 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
-const debug = require('debug')('joystream:sync');
+const debug = require('debug')('joystream:sync')
 
-async function sync_callback(api, storage) {
+async function syncCallback(api, storage) {
   // The first step is to gather all data objects from chain.
   // TODO: in future, limit to a configured tranche
   // FIXME this isn't actually on chain yet, so we'll fake it.
-  const knownContentIds = await api.assets.getKnownContentIds() || [];
+  const knownContentIds = (await api.assets.getKnownContentIds()) || []
 
-  const role_addr = api.identities.key.address
+  const roleAddress = api.identities.key.address
   const providerId = api.storageProviderId
 
   // Iterate over all sync objects, and ensure they're synced.
-  const allChecks = knownContentIds.map(async (content_id) => {
-    let { relationship, relationshipId } = await api.assets.getStorageRelationshipAndId(providerId, content_id);
+  const allChecks = knownContentIds.map(async contentId => {
+    // eslint-disable-next-line prefer-const
+    let { relationship, relationshipId } = await api.assets.getStorageRelationshipAndId(providerId, contentId)
 
     // get the data object
     // make sure the data object was Accepted by the liaison,
     // don't just blindly attempt to fetch them
 
-    let fileLocal;
+    let fileLocal
     try {
       // check if we have content or not
-      let stats = await storage.stat(content_id);
-      fileLocal = stats.local;
+      const stats = await storage.stat(contentId)
+      fileLocal = stats.local
     } catch (err) {
       // on error stating or timeout
-      debug(err.message);
+      debug(err.message)
       // we don't have content if we can't stat it
-      fileLocal = false;
+      fileLocal = false
     }
 
     if (!fileLocal) {
       try {
-        await storage.synchronize(content_id);
+        await storage.synchronize(contentId)
       } catch (err) {
         // duplicate logging
         // debug(err.message)
         return
       }
       // why are we returning, if we synced the file
-      return;
+      return
     }
 
     if (!relationship) {
       // create relationship
-      debug(`Creating new storage relationship for ${content_id.encode()}`);
+      debug(`Creating new storage relationship for ${contentId.encode()}`)
       try {
-        relationshipId = await api.assets.createAndReturnStorageRelationship(role_addr, providerId, content_id);
-        await api.assets.toggleStorageRelationshipReady(role_addr, providerId, relationshipId, true);
+        relationshipId = await api.assets.createAndReturnStorageRelationship(roleAddress, providerId, contentId)
+        await api.assets.toggleStorageRelationshipReady(roleAddress, providerId, relationshipId, true)
       } catch (err) {
-        debug(`Error creating new storage relationship ${content_id.encode()}: ${err.stack}`);
-        return;
+        debug(`Error creating new storage relationship ${contentId.encode()}: ${err.stack}`)
+        return
       }
     } else if (!relationship.ready) {
-      debug(`Updating storage relationship to ready for ${content_id.encode()}`);
+      debug(`Updating storage relationship to ready for ${contentId.encode()}`)
       // update to ready. (Why would there be a relationship set to ready: false?)
       try {
-        await api.assets.toggleStorageRelationshipReady(role_addr, providerId, relationshipId, true);
-      } catch(err) {
-        debug(`Error setting relationship ready ${content_id.encode()}: ${err.stack}`);
+        await api.assets.toggleStorageRelationshipReady(roleAddress, providerId, relationshipId, true)
+      } catch (err) {
+        debug(`Error setting relationship ready ${contentId.encode()}: ${err.stack}`)
       }
     } else {
       // we already have content and a ready relationship set. No need to do anything
-      // debug(`content already stored locally ${content_id.encode()}`);
+      // debug(`content already stored locally ${contentId.encode()}`);
     }
-  });
-
+  })
 
-  return Promise.all(allChecks);
+  return Promise.all(allChecks)
 }
 
-
-async function sync_periodic(api, flags, storage)
-{
+async function syncPeriodic(api, flags, storage) {
   try {
     debug('Starting sync run...')
-    await sync_callback(api, storage)
+    await syncCallback(api, storage)
     debug('sync run complete')
   } catch (err) {
-    debug(`Error in sync_periodic ${err.stack}`);
+    debug(`Error in syncPeriodic ${err.stack}`)
   }
   // always try again
-  setTimeout(sync_periodic, flags.syncPeriod, api, flags, storage);
+  setTimeout(syncPeriodic, flags.syncPeriod, api, flags, storage)
 }
 
-
-function start_syncing(api, flags, storage)
-{
-  sync_periodic(api, flags, storage);
+function startSyncing(api, flags, storage) {
+  syncPeriodic(api, flags, storage)
 }
 
 module.exports = {
-  start_syncing: start_syncing,
+  startSyncing,
 }

+ 114 - 128
storage-node/packages/colossus/paths/asset/v0/{id}.js

@@ -16,25 +16,22 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
-const path = require('path');
+const path = require('path')
 
-const debug = require('debug')('joystream:colossus:api:asset');
+const debug = require('debug')('joystream:colossus:api:asset')
 
-const util_ranges = require('@joystream/storage-utils/ranges');
-const filter = require('@joystream/storage-node-backend/filter');
+const utilRanges = require('@joystream/storage-utils/ranges')
+const filter = require('@joystream/storage-node-backend/filter')
 
-function error_handler(response, err, code)
-{
-  debug(err);
-  response.status((err.code || code) || 500).send({ message: err.toString() });
+function errorHandler(response, err, code) {
+  debug(err)
+  response.status(err.code || code || 500).send({ message: err.toString() })
 }
 
-
-module.exports = function(storage, runtime)
-{
-  var doc = {
+module.exports = function(storage, runtime) {
+  const doc = {
     // parameters for all operations in this path
     parameters: [
       {
@@ -49,203 +46,195 @@ module.exports = function(storage, runtime)
     ],
 
     // Head: report that ranges are OK
-    head: async function(req, res, _next)
-    {
-      const id = req.params.id;
+    async head(req, res) {
+      const id = req.params.id
 
       // Open file
       try {
-        const size = await storage.size(id);
-        const stream = await storage.open(id, 'r');
-        const type = stream.file_info.mime_type;
+        const size = await storage.size(id)
+        const stream = await storage.open(id, 'r')
+        const type = stream.fileInfo.mimeType
 
         // Close the stream; we don't need to fetch the file (if we haven't
         // already). Then return result.
-        stream.destroy();
+        stream.destroy()
 
-        res.status(200);
-        res.contentType(type);
-        res.header('Content-Disposition', 'inline');
-        res.header('Content-Transfer-Encoding', 'binary');
-        res.header('Accept-Ranges', 'bytes');
+        res.status(200)
+        res.contentType(type)
+        res.header('Content-Disposition', 'inline')
+        res.header('Content-Transfer-Encoding', 'binary')
+        res.header('Accept-Ranges', 'bytes')
         if (size > 0) {
-          res.header('Content-Length', size);
+          res.header('Content-Length', size)
         }
-        res.send();
+        res.send()
       } catch (err) {
-        error_handler(res, err, err.code);
+        errorHandler(res, err, err.code)
       }
     },
 
     // Put for uploads
-    put: async function(req, res, _next)
-    {
-      const id = req.params.id; // content id
+    async put(req, res) {
+      const id = req.params.id // content id
 
       // First check if we're the liaison for the name, otherwise we can bail
       // out already.
-      const role_addr = runtime.identities.key.address;
-      const providerId = runtime.storageProviderId;
-      let dataObject;
+      const roleAddress = runtime.identities.key.address
+      const providerId = runtime.storageProviderId
+      let dataObject
       try {
         debug('calling checkLiaisonForDataObject')
-        dataObject = await runtime.assets.checkLiaisonForDataObject(providerId, id);
+        dataObject = await runtime.assets.checkLiaisonForDataObject(providerId, id)
         debug('called checkLiaisonForDataObject')
       } catch (err) {
-        error_handler(res, err, 403);
-        return;
+        errorHandler(res, err, 403)
+        return
       }
 
       // We'll open a write stream to the backend, but reserve the right to
       // abort upload if the filters don't smell right.
-      var stream;
+      let stream
       try {
-        stream = await storage.open(id, 'w');
+        stream = await storage.open(id, 'w')
 
         // We don't know whether the filtering occurs before or after the
         // stream was finished, and can only commit if both passed.
-        var finished = false;
-        var accepted = false;
-        const possibly_commit = () => {
+        let finished = false
+        let accepted = false
+        const possiblyCommit = () => {
           if (finished && accepted) {
-            debug('Stream is finished and passed filters; committing.');
-            stream.commit();
+            debug('Stream is finished and passed filters; committing.')
+            stream.commit()
           }
-        };
-
+        }
 
-        stream.on('file_info', async (info) => {
+        stream.on('fileInfo', async info => {
           try {
-            debug('Detected file info:', info);
+            debug('Detected file info:', info)
 
             // Filter
-            const filter_result = filter({}, req.headers, info.mime_type);
-            if (200 != filter_result.code) {
-              debug('Rejecting content', filter_result.message);
-              stream.end();
-              res.status(filter_result.code).send({ message: filter_result.message });
+            const filterResult = filter({}, req.headers, info.mimeType)
+            if (200 !== filterResult.code) {
+              debug('Rejecting content', filterResult.message)
+              stream.end()
+              res.status(filterResult.code).send({ message: filterResult.message })
 
               // Reject the content
-              await runtime.assets.rejectContent(role_addr, providerId, id);
-              return;
+              await runtime.assets.rejectContent(roleAddress, providerId, id)
+              return
             }
-            debug('Content accepted.');
-            accepted = true;
+            debug('Content accepted.')
+            accepted = true
 
             // We may have to commit the stream.
-            possibly_commit();
+            possiblyCommit()
           } catch (err) {
-            error_handler(res, err);
+            errorHandler(res, err)
           }
-        });
+        })
 
         stream.on('finish', () => {
           try {
-            finished = true;
-            possibly_commit();
+            finished = true
+            possiblyCommit()
           } catch (err) {
-            error_handler(res, err);
+            errorHandler(res, err)
           }
-        });
+        })
 
-        stream.on('committed', async (hash) => {
+        stream.on('committed', async hash => {
           console.log('commited', dataObject)
           try {
             if (hash !== dataObject.ipfs_content_id.toString()) {
-              debug('Rejecting content. IPFS hash does not match value in objectId');
-              await runtime.assets.rejectContent(role_addr, providerId, id);
-              res.status(400).send({ message: "Uploaded content doesn't match IPFS hash" });
-              return;
+              debug('Rejecting content. IPFS hash does not match value in objectId')
+              await runtime.assets.rejectContent(roleAddress, providerId, id)
+              res.status(400).send({ message: "Uploaded content doesn't match IPFS hash" })
+              return
             }
 
             debug('accepting Content')
-            await runtime.assets.acceptContent(role_addr, providerId, id);
+            await runtime.assets.acceptContent(roleAddress, providerId, id)
 
             debug('creating storage relationship for newly uploaded content')
             // Create storage relationship and flip it to ready.
-            const dosr_id = await runtime.assets.createAndReturnStorageRelationship(role_addr, providerId, id);
+            const dosrId = await runtime.assets.createAndReturnStorageRelationship(roleAddress, providerId, id)
 
             debug('toggling storage relationship for newly uploaded content')
-            await runtime.assets.toggleStorageRelationshipReady(role_addr, providerId, dosr_id, true);
+            await runtime.assets.toggleStorageRelationshipReady(roleAddress, providerId, dosrId, true)
 
-            debug('Sending OK response.');
-            res.status(200).send({ message: 'Asset uploaded.' });
+            debug('Sending OK response.')
+            res.status(200).send({ message: 'Asset uploaded.' })
           } catch (err) {
-            debug(`${err.message}`);
-            error_handler(res, err);
+            debug(`${err.message}`)
+            errorHandler(res, err)
           }
-        });
-
-        stream.on('error', (err) => error_handler(res, err));
-        req.pipe(stream);
+        })
 
+        stream.on('error', err => errorHandler(res, err))
+        req.pipe(stream)
       } catch (err) {
-        error_handler(res, err);
-        return;
+        errorHandler(res, err)
+        return
       }
     },
 
     // Get content
-    get: async function(req, res, _next)
-    {
-      const id = req.params.id;
-      const download = req.query.download;
+    async get(req, res) {
+      const id = req.params.id
+      const download = req.query.download
 
       // Parse range header
-      var ranges;
+      let ranges
       if (!download) {
         try {
-          var range_header = req.headers['range'];
-          ranges = util_ranges.parse(range_header);
+          const rangeHeader = req.headers.range
+          ranges = utilRanges.parse(rangeHeader)
         } catch (err) {
           // Do nothing; it's ok to ignore malformed ranges and respond with the
           // full content according to https://www.rfc-editor.org/rfc/rfc7233.txt
         }
-        if (ranges && ranges.unit != 'bytes') {
+        if (ranges && ranges.unit !== 'bytes') {
           // Ignore ranges that are not byte units.
-          ranges = undefined;
+          ranges = undefined
         }
       }
-      debug('Requested range(s) is/are', ranges);
+      debug('Requested range(s) is/are', ranges)
 
       // Open file
       try {
-        const size = await storage.size(id);
-        const stream = await storage.open(id, 'r');
+        const size = await storage.size(id)
+        const stream = await storage.open(id, 'r')
 
         // Add a file extension to download requests if necessary. If the file
         // already contains an extension, don't add one.
-        var send_name = id;
-        const type = stream.file_info.mime_type;
+        let sendName = id
+        const type = stream.fileInfo.mimeType
         if (download) {
-          var ext = path.extname(send_name);
+          let ext = path.extname(sendName)
           if (!ext) {
-            ext = stream.file_info.ext;
+            ext = stream.fileInfo.ext
             if (ext) {
-              send_name = `${send_name}.${ext}`;
+              sendName = `${sendName}.${ext}`
             }
           }
         }
 
-        var opts = {
-          name: send_name,
-          type: type,
-          size: size,
-          ranges: ranges,
-          download: download,
-        };
-        util_ranges.send(res, stream, opts);
-
-
+        const opts = {
+          name: sendName,
+          type,
+          size,
+          ranges,
+          download,
+        }
+        utilRanges.send(res, stream, opts)
       } catch (err) {
-        error_handler(res, err, err.code);
+        errorHandler(res, err, err.code)
       }
-    }
-  };
+    },
+  }
 
   // OpenAPI specs
-  doc.get.apiDoc =
-  {
+  doc.get.apiDoc = {
     description: 'Download an asset.',
     operationId: 'assetData',
     tags: ['asset', 'data'],
@@ -279,16 +268,15 @@ module.exports = function(storage, runtime)
         content: {
           'application/json': {
             schema: {
-              '$ref': '#/components/schemas/Error'
+              $ref: '#/components/schemas/Error',
             },
           },
         },
       },
     },
-  };
+  }
 
-  doc.put.apiDoc =
-  {
+  doc.put.apiDoc = {
     description: 'Asset upload.',
     operationId: 'assetUpload',
     tags: ['asset', 'data'],
@@ -313,7 +301,7 @@ module.exports = function(storage, runtime)
               properties: {
                 message: {
                   type: 'string',
-                }
+                },
               },
             },
           },
@@ -324,17 +312,15 @@ module.exports = function(storage, runtime)
         content: {
           'application/json': {
             schema: {
-              '$ref': '#/components/schemas/Error'
+              $ref: '#/components/schemas/Error',
             },
           },
         },
       },
     },
-  };
+  }
 
-
-  doc.head.apiDoc =
-  {
+  doc.head.apiDoc = {
     description: 'Asset download information.',
     operationId: 'assetInfo',
     tags: ['asset', 'metadata'],
@@ -347,13 +333,13 @@ module.exports = function(storage, runtime)
         content: {
           'application/json': {
             schema: {
-              '$ref': '#/components/schemas/Error'
+              $ref: '#/components/schemas/Error',
             },
           },
         },
       },
     },
-  };
+  }
 
-  return doc;
-};
+  return doc
+}

+ 64 - 65
storage-node/packages/colossus/paths/discover/v0/{id}.js

@@ -1,12 +1,11 @@
 const { discover } = require('@joystream/service-discovery')
-const debug = require('debug')('joystream:colossus:api:discovery');
+const debug = require('debug')('joystream:colossus:api:discovery')
 
-const MAX_CACHE_AGE = 30 * 60 * 1000;
-const USE_CACHE = true;
+const MAX_CACHE_AGE = 30 * 60 * 1000
+const USE_CACHE = true
 
-module.exports = function(runtime)
-{
-  var doc = {
+module.exports = function(runtime) {
+  const doc = {
     // parameters for all operations in this path
     parameters: [
       {
@@ -21,71 +20,71 @@ module.exports = function(runtime)
     ],
 
     // Resolve Service Information
-    get: async function(req, res)
-    {
-        try {
-          var parsedId = parseInt(req.params.id);
-        } catch (err) {
-          return res.status(400).end();
-        }
+    async get(req, res) {
+      let parsedId
+      try {
+        parsedId = parseInt(req.params.id)
+      } catch (err) {
+        return res.status(400).end()
+      }
 
-        const id = parsedId
-        let cacheMaxAge = req.query.max_age;
+      const id = parsedId
+      let cacheMaxAge = req.query.max_age
 
-        if (cacheMaxAge) {
-          try {
-            cacheMaxAge = parseInt(cacheMaxAge);
-          } catch(err) {
-            cacheMaxAge = MAX_CACHE_AGE
-          }
-        } else {
-          cacheMaxAge = 0
+      if (cacheMaxAge) {
+        try {
+          cacheMaxAge = parseInt(cacheMaxAge)
+        } catch (err) {
+          cacheMaxAge = MAX_CACHE_AGE
         }
+      } else {
+        cacheMaxAge = 0
+      }
 
-        // todo - validate id before querying
+      // todo - validate id before querying
 
-        try {
-          debug(`resolving ${id}`);
-          const info = await discover.discover(id, runtime, USE_CACHE, cacheMaxAge);
-          if (info == null) {
-            debug('info not found');
-            res.status(404).end();
-          } else {
-            res.status(200).send(info);
-          }
-        } catch (err) {
-          debug(`${err}`);
+      try {
+        debug(`resolving ${id}`)
+        const info = await discover.discover(id, runtime, USE_CACHE, cacheMaxAge)
+        if (info === null) {
+          debug('info not found')
           res.status(404).end()
+        } else {
+          res.status(200).send(info)
         }
-    }
-  };
+      } catch (err) {
+        debug(`${err}`)
+        res.status(404).end()
+      }
+    },
+  }
 
-    // OpenAPI specs
-    doc.get.apiDoc = {
-        description: 'Resolve Service Information',
-        operationId: 'discover',
-        //tags: ['asset', 'data'],
-        responses: {
-            200: {
-                description: 'Wrapped JSON Service Information',
-                content: {
-                  'application/json': {
-                    schema: {
-                      required: ['serialized'],
-                      properties: {
-                        'serialized': {
-                          type: 'string'
-                        },
-                        'signature': {
-                          type: 'string'
-                        }
-                      },
-                    },
-                  }
-                }
-            }
-        }
-    }
+  // OpenAPI specs
+  doc.get.apiDoc = {
+    description: 'Resolve Service Information',
+    operationId: 'discover',
+    // tags: ['asset', 'data'],
+    responses: {
+      200: {
+        description: 'Wrapped JSON Service Information',
+        content: {
+          'application/json': {
+            schema: {
+              required: ['serialized'],
+              properties: {
+                serialized: {
+                  type: 'string',
+                },
+                signature: {
+                  type: 'string',
+                },
+              },
+            },
+          },
+        },
+      },
+    },
+  }
 
-    return doc;
-};
+  return doc
+}

+ 1 - 1
storage-node/packages/colossus/test/index.js

@@ -1 +1 @@
-// Add Tests!
+// Add Tests!

+ 37 - 40
storage-node/packages/discovery/discover.js

@@ -7,9 +7,10 @@ const BN = require('bn.js')
 const { newExternallyControlledPromise } = require('@joystream/storage-utils/externalPromise')
 
 /**
- * Determines if code is running in a browser by testing for the global window object
+ * Determines if code is running in a browser by testing for the global window object.
+ * @return {boolean} returns result check.
  */
-function inBrowser () {
+function inBrowser() {
   return typeof window !== 'undefined'
 }
 
@@ -17,13 +18,13 @@ function inBrowser () {
  * Map storage-provider id to a Promise of a discovery result. The purpose
  * is to avoid concurrent active discoveries for the same provider.
  */
-var activeDiscoveries = {}
+const activeDiscoveries = {}
 
 /**
  * Map of storage provider id to string
  * Cache of past discovery lookup results
  */
-var accountInfoCache = {}
+const accountInfoCache = {}
 
 /**
  * After what period of time a cached record is considered stale, and would
@@ -38,17 +39,16 @@ const CACHE_TTL = 60 * 60 * 1000
  * @param { RuntimeApi } runtimeApi - api instance to query the chain
  * @returns { Promise<string | null> } - ipns multiformat address
  */
-async function getIpnsIdentity (storageProviderId, runtimeApi) {
+async function getIpnsIdentity(storageProviderId, runtimeApi) {
   storageProviderId = new BN(storageProviderId)
   // lookup ipns identity from chain corresponding to storageProviderId
   const info = await runtimeApi.discovery.getAccountInfo(storageProviderId)
 
-  if (info == null) {
+  if (info === null) {
     // no identity found on chain for account
     return null
-  } else {
-    return info.identity.toString()
   }
+  return info.identity.toString()
 }
 
 /**
@@ -61,11 +61,9 @@ async function getIpnsIdentity (storageProviderId, runtimeApi) {
  * @param {string} gateway - optional ipfs http gateway url to perform ipfs queries
  * @returns { Promise<object> } - the published service information
  */
-async function discover_over_ipfs_http_gateway (
-  storageProviderId, runtimeApi, gateway = 'http://localhost:8080') {
-
+async function discoverOverIpfsHttpGateway(storageProviderId, runtimeApi, gateway = 'http://localhost:8080') {
   storageProviderId = new BN(storageProviderId)
-  let isProvider = await runtimeApi.workers.isStorageProvider(storageProviderId)
+  const isProvider = await runtimeApi.workers.isStorageProvider(storageProviderId)
 
   if (!isProvider) {
     throw new Error('Cannot discover non storage providers')
@@ -73,7 +71,7 @@ async function discover_over_ipfs_http_gateway (
 
   const identity = await getIpnsIdentity(storageProviderId, runtimeApi)
 
-  if (identity == null) {
+  if (identity === null) {
     // dont waste time trying to resolve if no identity was found
     throw new Error('no identity to resolve')
   }
@@ -97,9 +95,9 @@ async function discover_over_ipfs_http_gateway (
  * @param {string} discoverApiEndpoint - url for a colossus discovery api endpoint
  * @returns { Promise<object> } - the published service information
  */
-async function discover_over_joystream_discovery_service (storageProviderId, runtimeApi, discoverApiEndpoint) {
+async function discoverOverJoystreamDiscoveryService(storageProviderId, runtimeApi, discoverApiEndpoint) {
   storageProviderId = new BN(storageProviderId)
-  let isProvider = await runtimeApi.workers.isStorageProvider(storageProviderId)
+  const isProvider = await runtimeApi.workers.isStorageProvider(storageProviderId)
 
   if (!isProvider) {
     throw new Error('Cannot discover non storage providers')
@@ -108,13 +106,13 @@ async function discover_over_joystream_discovery_service (storageProviderId, run
   const identity = await getIpnsIdentity(storageProviderId, runtimeApi)
 
   // dont waste time trying to resolve if no identity was found
-  if (identity == null) {
+  if (identity === null) {
     throw new Error('no identity to resolve')
   }
 
   if (!discoverApiEndpoint) {
     // Use bootstrap nodes
-    let discoveryBootstrapNodes = await runtimeApi.discovery.getBootstrapEndpoints()
+    const discoveryBootstrapNodes = await runtimeApi.discovery.getBootstrapEndpoints()
 
     if (discoveryBootstrapNodes.length) {
       discoverApiEndpoint = stripEndingSlash(discoveryBootstrapNodes[0].toString())
@@ -139,9 +137,9 @@ async function discover_over_joystream_discovery_service (storageProviderId, run
  * @param {RuntimeApi} runtimeApi - api instance to query the chain
  * @returns { Promise<object> } - the published service information
  */
-async function discover_over_local_ipfs_node (storageProviderId, runtimeApi) {
+async function discoverOverLocalIpfsNode(storageProviderId, runtimeApi) {
   storageProviderId = new BN(storageProviderId)
-  let isProvider = await runtimeApi.workers.isStorageProvider(storageProviderId)
+  const isProvider = await runtimeApi.workers.isStorageProvider(storageProviderId)
 
   if (!isProvider) {
     throw new Error('Cannot discover non storage providers')
@@ -149,26 +147,26 @@ async function discover_over_local_ipfs_node (storageProviderId, runtimeApi) {
 
   const identity = await getIpnsIdentity(storageProviderId, runtimeApi)
 
-  if (identity == null) {
+  if (identity === null) {
     // dont waste time trying to resolve if no identity was found
     throw new Error('no identity to resolve')
   }
 
-  const ipns_address = `/ipns/${identity}/`
+  const ipnsAddress = `/ipns/${identity}/`
 
   debug('resolved ipns to ipfs object')
   // Can this call hang forever!? can/should we set a timeout?
-  let ipfs_name = await ipfs.name.resolve(ipns_address, {
+  const ipfsName = await ipfs.name.resolve(ipnsAddress, {
     // don't recurse, there should only be one indirection to the service info file
     recursive: false,
-    nocache: false
+    nocache: false,
   })
 
-  debug('getting ipfs object', ipfs_name)
-  let data = await ipfs.get(ipfs_name) // this can sometimes hang forever!?! can we set a timeout?
+  debug('getting ipfs object', ipfsName)
+  const data = await ipfs.get(ipfsName) // this can sometimes hang forever!?! can we set a timeout?
 
   // there should only be one file published under the resolved path
-  let content = data[0].content
+  const content = data[0].content
 
   return JSON.parse(content)
 }
@@ -187,7 +185,7 @@ async function discover_over_local_ipfs_node (storageProviderId, runtimeApi) {
  * @param {number} maxCacheAge - maximum age of a cached query that triggers automatic re-discovery
  * @returns { Promise<object | null> } - the published service information
  */
-async function discover (storageProviderId, runtimeApi, useCachedValue = false, maxCacheAge = 0) {
+async function discover(storageProviderId, runtimeApi, useCachedValue = false, maxCacheAge = 0) {
   storageProviderId = new BN(storageProviderId)
   const id = storageProviderId.toNumber()
   const cached = accountInfoCache[id]
@@ -195,30 +193,29 @@ async function discover (storageProviderId, runtimeApi, useCachedValue = false,
   if (cached && useCachedValue) {
     if (maxCacheAge > 0) {
       // get latest value
-      if (Date.now() > (cached.updated + maxCacheAge)) {
+      if (Date.now() > cached.updated + maxCacheAge) {
         return _discover(storageProviderId, runtimeApi)
       }
     }
     // refresh if cache if stale, new value returned on next cached query
-    if (Date.now() > (cached.updated + CACHE_TTL)) {
+    if (Date.now() > cached.updated + CACHE_TTL) {
       _discover(storageProviderId, runtimeApi)
     }
     // return best known value
     return cached.value
-  } else {
-    return _discover(storageProviderId, runtimeApi)
   }
+  return _discover(storageProviderId, runtimeApi)
 }
 
 /**
  * Internal method that handles concurrent discoveries and caching of results. Will
- * select the appropriate discovery protocol based on wether we are in a browser environemtn or not.
+ * select the appropriate discovery protocol based on whether we are in a browser environment or not.
  * If not in a browser it expects a local ipfs node to be running.
- * @param {number | BN | u64} storageProviderId
+ * @param {number | BN | u64} storageProviderId - ID of the storage provider
  * @param {RuntimeApi} runtimeApi - api instance for querying the chain
  * @returns { Promise<object | null> } - the published service information
  */
-async function _discover (storageProviderId, runtimeApi) {
+async function _discover(storageProviderId, runtimeApi) {
   storageProviderId = new BN(storageProviderId)
   const id = storageProviderId.toNumber()
 
@@ -235,16 +232,16 @@ async function _discover (storageProviderId, runtimeApi) {
   let result
   try {
     if (inBrowser()) {
-      result = await discover_over_joystream_discovery_service(storageProviderId, runtimeApi)
+      result = await discoverOverJoystreamDiscoveryService(storageProviderId, runtimeApi)
     } else {
-      result = await discover_over_local_ipfs_node(storageProviderId, runtimeApi)
+      result = await discoverOverLocalIpfsNode(storageProviderId, runtimeApi)
     }
 
     debug(result)
     result = JSON.stringify(result)
     accountInfoCache[id] = {
       value: result,
-      updated: Date.now()
+      updated: Date.now(),
     }
 
     deferredDiscovery.resolve(result)
@@ -269,7 +266,7 @@ async function _discover (storageProviderId, runtimeApi) {
 
 module.exports = {
   discover,
-  discover_over_joystream_discovery_service,
-  discover_over_ipfs_http_gateway,
-  discover_over_local_ipfs_node
+  discoverOverJoystreamDiscoveryService,
+  discoverOverIpfsHttpGateway,
+  discoverOverLocalIpfsNode,
 }

+ 29 - 32
storage-node/packages/discovery/example.js

@@ -3,38 +3,35 @@ const { RuntimeApi } = require('@joystream/storage-runtime-api')
 const { discover, publish } = require('./')
 
 async function main() {
-    // The assigned storage-provider id
-    const provider_id = 0
-
-    const runtimeApi = await RuntimeApi.create({
-        // Path to the role account key file of the provider
-        account_file: "/path/to/role_account_key_file.json",
-        storageProviderId: provider_id
-    })
-
-    let ipns_id = await publish.publish(
-        {
-            asset: {
-                version: 1,
-                endpoint: 'http://endpoint.com'
-            }
-        },
-        runtimeApi
-    )
-
-    console.log(ipns_id)
-
-    // register ipns_id on chain
-    await runtimeApi.setAccountInfo(ipfs_id)
-
-    let serviceInfo = await discover.discover(
-        provider_id,
-        runtimeApi
-    )
-
-    console.log(serviceInfo)
-
-    runtimeApi.api.disconnect()
+  // The assigned storage-provider id
+  const providerId = 0
+
+  const runtimeApi = await RuntimeApi.create({
+    // Path to the role account key file of the provider
+    account_file: '/path/to/role_account_key_file.json',
+    storageProviderId: providerId,
+  })
+
+  const ipnsId = await publish.publish(
+    {
+      asset: {
+        version: 1,
+        endpoint: 'http://endpoint.com',
+      },
+    },
+    runtimeApi
+  )
+
+  console.log(ipnsId)
+
+  // register ipnsId on chain
+  await runtimeApi.setAccountInfo(ipnsId)
+
+  const serviceInfo = await discover.discover(providerId, runtimeApi)
+
+  console.log(serviceInfo)
+
+  runtimeApi.api.disconnect()
 }
 
 main()

+ 3 - 4
storage-node/packages/discovery/index.js

@@ -1,5 +1,4 @@
-
 module.exports = {
-    discover : require('./discover'),
-    publish : require('./publish'),
-}
+  discover: require('./discover'),
+  publish: require('./publish'),
+}

+ 17 - 16
storage-node/packages/discovery/publish.js

@@ -1,4 +1,5 @@
 const ipfsClient = require('ipfs-http-client')
+
 const ipfs = ipfsClient('localhost', '5001', { protocol: 'http' })
 
 const debug = require('debug')('joystream:discovery:publish')
@@ -14,9 +15,9 @@ const PUBLISH_KEY = 'self'
  * Applies JSON serialization on the data object and converts the utf-8
  * string to a Buffer.
  * @param {object} data - json object
- * @returns {Buffer}
+ * @returns {Buffer} returns buffer from UTF-8 json
  */
-function bufferFrom (data) {
+function bufferFrom(data) {
   return Buffer.from(JSON.stringify(data), 'utf-8')
 }
 
@@ -24,11 +25,11 @@ function bufferFrom (data) {
  * Encodes the service info into a standard format see. /storage-node/docs/json-signing.md
  * To be able to add a signature over the json data. Signing is not currently implemented.
  * @param {object} info - json object
- * @returns {Buffer}
+ * @returns {Buffer} return buffer.
  */
-function encodeServiceInfo (info) {
+function encodeServiceInfo(info) {
   return bufferFrom({
-    serialized: JSON.stringify(info)
+    serialized: JSON.stringify(info),
   })
 }
 
@@ -36,35 +37,35 @@ function encodeServiceInfo (info) {
  * Publishes the service information, encoded using the standard defined in encodeServiceInfo()
  * to ipfs, using the local ipfs node's PUBLISH_KEY, and returns the key id used to publish.
  * What we refer to as the ipns id.
- * @param {object} service_info - the service information to publish
+ * @param {object} serviceInfo - the service information to publish
  * @returns {string} - the ipns id
  */
-async function publish (service_info) {
+async function publish(serviceInfo) {
   const keys = await ipfs.key.list()
-  let services_key = keys.find((key) => key.name === PUBLISH_KEY)
+  let servicesKey = keys.find(key => key.name === PUBLISH_KEY)
 
   // An ipfs node will always have the self key.
   // If the publish key is specified as anything else and it doesn't exist
   // we create it.
-  if (PUBLISH_KEY !== 'self' && !services_key) {
+  if (PUBLISH_KEY !== 'self' && !servicesKey) {
     debug('generating ipns services key')
-    services_key = await ipfs.key.gen(PUBLISH_KEY, {
+    servicesKey = await ipfs.key.gen(PUBLISH_KEY, {
       type: 'rsa',
-      size: 2048
+      size: 2048,
     })
   }
 
-  if (!services_key) {
+  if (!servicesKey) {
     throw new Error('No IPFS publishing key available!')
   }
 
   debug('adding service info file to node')
-  const files = await ipfs.add(encodeServiceInfo(service_info))
+  const files = await ipfs.add(encodeServiceInfo(serviceInfo))
 
   debug('publishing...')
   const published = await ipfs.name.publish(files[0].hash, {
     key: PUBLISH_KEY,
-    resolve: false
+    resolve: false,
     // lifetime: // string - Time duration of the record. Default: 24h
     // ttl:      // string - Time duration this record should be cached
   })
@@ -79,9 +80,9 @@ async function publish (service_info) {
 
   // Return the key id under which the content was published. Which is used
   // to lookup the actual ipfs content id of the published service information
-  return services_key.id
+  return servicesKey.id
 }
 
 module.exports = {
-  publish
+  publish,
 }

+ 1 - 1
storage-node/packages/discovery/test/index.js

@@ -1 +1 @@
-// Add Tests!
+// Add Tests!

+ 100 - 85
storage-node/packages/helios/bin/cli.js

@@ -6,7 +6,7 @@ const { discover } = require('@joystream/service-discovery')
 const axios = require('axios')
 const stripEndingSlash = require('@joystream/storage-utils/stripEndingSlash')
 
-async function main () {
+async function main() {
   const runtime = await RuntimeApi.create()
   const { api } = runtime
 
@@ -18,83 +18,98 @@ async function main () {
   const { ids: storageProviders } = await runtime.workers.getAllProviders()
   console.log(`Found ${storageProviders.length} staked providers`)
 
-  const storageProviderAccountInfos = await Promise.all(storageProviders.map(async (providerId) => {
-    return ({
-      providerId,
-      info: await runtime.discovery.getAccountInfo(providerId)
+  const storageProviderAccountInfos = await Promise.all(
+    storageProviders.map(async providerId => {
+      return {
+        providerId,
+        info: await runtime.discovery.getAccountInfo(providerId),
+      }
     })
-  }))
+  )
 
   // providers that have updated their account info and published ipfs id
   // considered live if the record hasn't expired yet
-  const liveProviders = storageProviderAccountInfos.filter(({info}) => {
+  const liveProviders = storageProviderAccountInfos.filter(({ info }) => {
     return info && info.expires_at.gte(currentHeight)
   })
 
-  const downProviders = storageProviderAccountInfos.filter(({info}) => {
-    return info == null
+  const downProviders = storageProviderAccountInfos.filter(({ info }) => {
+    return info === null
   })
 
-  const expiredTtlProviders = storageProviderAccountInfos.filter(({info}) => {
+  const expiredTtlProviders = storageProviderAccountInfos.filter(({ info }) => {
     return info && currentHeight.gte(info.expires_at)
   })
 
-  let providersStatuses = mapInfoToStatus(liveProviders, currentHeight)
+  const providersStatuses = mapInfoToStatus(liveProviders, currentHeight)
   console.log('\n== Live Providers\n', providersStatuses)
 
-  let expiredProviderStatuses = mapInfoToStatus(expiredTtlProviders, currentHeight)
+  const expiredProviderStatuses = mapInfoToStatus(expiredTtlProviders, currentHeight)
   console.log('\n== Expired Providers\n', expiredProviderStatuses)
 
-  console.log('\n== Down Providers!\n', downProviders.map(provider => {
-    return ({
-      providerId: provider.providerId
+  console.log(
+    '\n== Down Providers!\n',
+    downProviders.map(provider => {
+      return {
+        providerId: provider.providerId,
+      }
     })
-  }))
+  )
 
   // Resolve IPNS identities of providers
   console.log('\nResolving live provider API Endpoints...')
-  let endpoints = await Promise.all(providersStatuses.map(async ({providerId}) => {
-    try {
-      let serviceInfo = await discover.discover_over_joystream_discovery_service(providerId, runtime)
-
-      if (serviceInfo == null) {
-        console.log(`provider ${providerId} has not published service information`)
+  const endpoints = await Promise.all(
+    providersStatuses.map(async ({ providerId }) => {
+      try {
+        const serviceInfo = await discover.discoverOverJoystreamDiscoveryService(providerId, runtime)
+
+        if (serviceInfo === null) {
+          console.log(`provider ${providerId} has not published service information`)
+          return { providerId, endpoint: null }
+        }
+
+        const info = JSON.parse(serviceInfo.serialized)
+        console.log(`${providerId} -> ${info.asset.endpoint}`)
+        return { providerId, endpoint: info.asset.endpoint }
+      } catch (err) {
+        console.log('resolve failed for id', providerId, err.message)
         return { providerId, endpoint: null }
       }
-
-      let info = JSON.parse(serviceInfo.serialized)
-      console.log(`${providerId} -> ${info.asset.endpoint}`)
-      return { providerId, endpoint: info.asset.endpoint }
-    } catch (err) {
-      console.log('resolve failed for id', providerId, err.message)
-      return { providerId, endpoint: null }
-    }
-  }))
+    })
+  )
 
   console.log('\nChecking API Endpoints are online')
-  await Promise.all(endpoints.map(async (provider) => {
-    if (!provider.endpoint) {
-      console.log('skipping', provider.address)
-      return
-    }
-    const swaggerUrl = `${stripEndingSlash(provider.endpoint)}/swagger.json`
-    let error
-    try {
-      await axios.get(swaggerUrl)
-      // maybe print out api version information to detect which version of colossus is running?
-      // or add anothe api endpoint for diagnostics information
-    } catch (err) { error = err }
-    console.log(`${provider.endpoint} - ${error ? error.message : 'OK'}`)
-  }))
-
-  let knownContentIds = await runtime.assets.getKnownContentIds()
+  await Promise.all(
+    endpoints.map(async provider => {
+      if (!provider.endpoint) {
+        console.log('skipping', provider.address)
+        return
+      }
+      const swaggerUrl = `${stripEndingSlash(provider.endpoint)}/swagger.json`
+      let error
+      try {
+        await axios.get(swaggerUrl)
+        // maybe print out api version information to detect which version of colossus is running?
+        // or add anothe api endpoint for diagnostics information
+      } catch (err) {
+        error = err
+      }
+      console.log(`${provider.endpoint} - ${error ? error.message : 'OK'}`)
+    })
+  )
+
+  const knownContentIds = await runtime.assets.getKnownContentIds()
   console.log(`\nData Directory has ${knownContentIds.length} assets`)
 
   // Check which providers are reporting a ready relationship for each asset
-  await Promise.all(knownContentIds.map(async (contentId) => {
-    let [relationshipsCount, judgement] = await assetRelationshipState(api, contentId, storageProviders)
-    console.log(`${encodeAddress(contentId)} replication ${relationshipsCount}/${storageProviders.length} - ${judgement}`)
-  }))
+  await Promise.all(
+    knownContentIds.map(async contentId => {
+      const [relationshipsCount, judgement] = await assetRelationshipState(api, contentId, storageProviders)
+      console.log(
+        `${encodeAddress(contentId)} replication ${relationshipsCount}/${storageProviders.length} - ${judgement}`
+      )
+    })
+  )
 
   // interesting disconnect doesn't work unless an explicit provider was created
   // for underlying api instance
@@ -103,44 +118,45 @@ async function main () {
 
   console.log(`\nChecking available assets on providers (this can take some time)...`)
   endpoints.forEach(async ({ providerId, endpoint }) => {
-    if (!endpoint) { return }
+    if (!endpoint) {
+      return
+    }
     const total = knownContentIds.length
-    let { found } = await countContentAvailability(knownContentIds, endpoint)
+    const { found } = await countContentAvailability(knownContentIds, endpoint)
     console.log(`provider ${providerId}: has ${found} out of ${total}`)
   })
 }
 
-function mapInfoToStatus (providers, currentHeight) {
-  return providers.map(({providerId, info}) => {
+function mapInfoToStatus(providers, currentHeight) {
+  return providers.map(({ providerId, info }) => {
     if (info) {
       return {
         providerId,
         identity: info.identity.toString(),
         expiresIn: info.expires_at.sub(currentHeight).toNumber(),
-        expired: currentHeight.gte(info.expires_at)
-      }
-    } else {
-      return {
-        providerId,
-        identity: null,
-        status: 'down'
+        expired: currentHeight.gte(info.expires_at),
       }
     }
+    return {
+      providerId,
+      identity: null,
+      status: 'down',
+    }
   })
 }
 
 // HTTP HEAD with axios all known content ids on each provider
-async function countContentAvailability (contentIds, source) {
-  let content = {}
+async function countContentAvailability(contentIds, source) {
+  const content = {}
   let found = 0
   let missing = 0
   for (let i = 0; i < contentIds.length; i++) {
     const assetUrl = makeAssetUrl(contentIds[i], source)
     try {
-      let info = await axios.head(assetUrl)
+      const info = await axios.head(assetUrl)
       content[encodeAddress(contentIds[i])] = {
         type: info.headers['content-type'],
-        bytes: info.headers['content-length']
+        bytes: info.headers['content-length'],
       }
       // TODO: cross check against dataobject size
       found++
@@ -152,32 +168,31 @@ async function countContentAvailability (contentIds, source) {
   return { found, missing, content }
 }
 
-function makeAssetUrl (contentId, source) {
+function makeAssetUrl(contentId, source) {
   source = stripEndingSlash(source)
   return `${source}/asset/v0/${encodeAddress(contentId)}`
 }
 
-async function assetRelationshipState (api, contentId, providers) {
-  let dataObject = await api.query.dataDirectory.dataObjectByContentId(contentId)
+async function assetRelationshipState(api, contentId, providers) {
+  const dataObject = await api.query.dataDirectory.dataObjectByContentId(contentId)
 
-  let relationshipIds = await api.query.dataObjectStorageRegistry.relationshipsByContentId(contentId)
+  const relationshipIds = await api.query.dataObjectStorageRegistry.relationshipsByContentId(contentId)
 
   // how many relationships associated with active providers and in ready state
-  let activeRelationships = await Promise.all(relationshipIds.map(async (id) => {
-    let relationship = await api.query.dataObjectStorageRegistry.relationships(id)
-    relationship = relationship.unwrap()
-    // only interested in ready relationships
-    if (!relationship.ready) {
-      return undefined
-    }
-    // Does the relationship belong to an active provider ?
-    return providers.find((provider) => relationship.storage_provider.eq(provider))
-  }))
-
-  return ([
-    activeRelationships.filter(active => active).length,
-    dataObject.unwrap().liaison_judgement
-  ])
+  const activeRelationships = await Promise.all(
+    relationshipIds.map(async id => {
+      let relationship = await api.query.dataObjectStorageRegistry.relationships(id)
+      relationship = relationship.unwrap()
+      // only interested in ready relationships
+      if (!relationship.ready) {
+        return undefined
+      }
+      // Does the relationship belong to an active provider ?
+      return providers.find(provider => relationship.storage_provider.eq(provider))
+    })
+  )
+
+  return [activeRelationships.filter(active => active).length, dataObject.unwrap().liaison_judgement]
 }
 
 main()

+ 1 - 1
storage-node/packages/helios/test/index.js

@@ -1 +1 @@
-// Add Tests!
+// Add Tests!

+ 22 - 20
storage-node/packages/runtime-api/assets.js

@@ -3,7 +3,7 @@
 const debug = require('debug')('joystream:runtime:assets')
 const { decodeAddress } = require('@polkadot/keyring')
 
-function parseContentId (contentId) {
+function parseContentId(contentId) {
   try {
     return decodeAddress(contentId)
   } catch (err) {
@@ -15,21 +15,21 @@ function parseContentId (contentId) {
  * Add asset related functionality to the substrate API.
  */
 class AssetsApi {
-  static async create (base) {
+  static async create(base) {
     const ret = new AssetsApi()
     ret.base = base
-    await ret.init()
+    await AssetsApi.init()
     return ret
   }
 
-  async init () {
+  static async init() {
     debug('Init')
   }
 
   /*
    * Create and return a data object.
    */
-  async createDataObject (accountId, memberId, contentId, doTypeId, size, ipfsCid) {
+  async createDataObject(accountId, memberId, contentId, doTypeId, size, ipfsCid) {
     contentId = parseContentId(contentId)
     const tx = this.base.api.tx.dataDirectory.addContent(memberId, contentId, doTypeId, size, ipfsCid)
     await this.base.signAndSend(accountId, tx)
@@ -42,7 +42,7 @@ class AssetsApi {
   /*
    * Return the Data Object for a contendId
    */
-  async getDataObject (contentId) {
+  async getDataObject(contentId) {
     contentId = parseContentId(contentId)
     return this.base.api.query.dataDirectory.dataObjectByContentId(contentId)
   }
@@ -55,7 +55,7 @@ class AssetsApi {
    *
    * Each failure errors out, success returns the data object.
    */
-  async checkLiaisonForDataObject (storageProviderId, contentId) {
+  async checkLiaisonForDataObject(storageProviderId, contentId) {
     contentId = parseContentId(contentId)
 
     let obj = await this.getDataObject(contentId)
@@ -80,7 +80,7 @@ class AssetsApi {
   /*
    * Sets the data object liaison judgement to Accepted
    */
-  async acceptContent (providerAccoundId, storageProviderId, contentId) {
+  async acceptContent(providerAccoundId, storageProviderId, contentId) {
     contentId = parseContentId(contentId)
     const tx = this.base.api.tx.dataDirectory.acceptContent(storageProviderId, contentId)
     return this.base.signAndSend(providerAccoundId, tx)
@@ -89,7 +89,7 @@ class AssetsApi {
   /*
    * Sets the data object liaison judgement to Rejected
    */
-  async rejectContent (providerAccountId, storageProviderId, contentId) {
+  async rejectContent(providerAccountId, storageProviderId, contentId) {
     contentId = parseContentId(contentId)
     const tx = this.base.api.tx.dataDirectory.rejectContent(storageProviderId, contentId)
     return this.base.signAndSend(providerAccountId, tx)
@@ -98,7 +98,7 @@ class AssetsApi {
   /*
    * Creates storage relationship for a data object and provider
    */
-  async createStorageRelationship (providerAccountId, storageProviderId, contentId, callback) {
+  async createStorageRelationship(providerAccountId, storageProviderId, contentId, callback) {
     contentId = parseContentId(contentId)
     const tx = this.base.api.tx.dataObjectStorageRegistry.addRelationship(storageProviderId, contentId)
 
@@ -109,16 +109,16 @@ class AssetsApi {
   /*
    * Gets storage relationship for contentId for the given provider
    */
-  async getStorageRelationshipAndId (storageProviderId, contentId) {
+  async getStorageRelationshipAndId(storageProviderId, contentId) {
     contentId = parseContentId(contentId)
-    let rids = await this.base.api.query.dataObjectStorageRegistry.relationshipsByContentId(contentId)
+    const rids = await this.base.api.query.dataObjectStorageRegistry.relationshipsByContentId(contentId)
 
     while (rids.length) {
       const relationshipId = rids.shift()
       let relationship = await this.base.api.query.dataObjectStorageRegistry.relationships(relationshipId)
       relationship = relationship.unwrap()
       if (relationship.storage_provider.eq(storageProviderId)) {
-        return ({ relationship, relationshipId })
+        return { relationship, relationshipId }
       }
     }
 
@@ -128,12 +128,14 @@ class AssetsApi {
   /*
    * Creates storage relationship for a data object and provider and returns the relationship id
    */
-  async createAndReturnStorageRelationship (providerAccountId, storageProviderId, contentId) {
+  async createAndReturnStorageRelationship(providerAccountId, storageProviderId, contentId) {
     contentId = parseContentId(contentId)
+    // TODO: rewrite this method to async-await style
+    // eslint-disable-next-line  no-async-promise-executor
     return new Promise(async (resolve, reject) => {
       try {
-        await this.createStorageRelationship(providerAccountId, storageProviderId, contentId, (events) => {
-          events.forEach((event) => {
+        await this.createStorageRelationship(providerAccountId, storageProviderId, contentId, events => {
+          events.forEach(event => {
             resolve(event[1].DataObjectStorageRelationshipId)
           })
         })
@@ -146,8 +148,8 @@ class AssetsApi {
   /*
    * Set the ready state for a data object storage relationship to the new value
    */
-  async toggleStorageRelationshipReady (providerAccountId, storageProviderId, dosrId, ready) {
-    var tx = ready
+  async toggleStorageRelationshipReady(providerAccountId, storageProviderId, dosrId, ready) {
+    const tx = ready
       ? this.base.api.tx.dataObjectStorageRegistry.setRelationshipReady(storageProviderId, dosrId)
       : this.base.api.tx.dataObjectStorageRegistry.unsetRelationshipReady(storageProviderId, dosrId)
     return this.base.signAndSend(providerAccountId, tx)
@@ -156,11 +158,11 @@ class AssetsApi {
   /*
    * Returns array of know content ids
    */
-  async getKnownContentIds () {
+  async getKnownContentIds() {
     return this.base.api.query.dataDirectory.knownContentIds()
   }
 }
 
 module.exports = {
-  AssetsApi
+  AssetsApi,
 }

+ 25 - 36
storage-node/packages/runtime-api/balances.js

@@ -16,75 +16,64 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
-const debug = require('debug')('joystream:runtime:balances');
-
-const { IdentitiesApi } = require('@joystream/storage-runtime-api/identities');
+const debug = require('debug')('joystream:runtime:balances')
 
 /*
  * Bundle API calls related to account balances.
  */
-class BalancesApi
-{
-  static async create(base)
-  {
-    const ret = new BalancesApi();
-    ret.base = base;
-    await ret.init();
-    return ret;
+class BalancesApi {
+  static async create(base) {
+    const ret = new BalancesApi()
+    ret.base = base
+    await BalancesApi.init()
+    return ret
   }
 
-  async init(account_file)
-  {
-    debug('Init');
+  static async init() {
+    debug('Init')
   }
 
   /*
    * Return true/false if the account has the minimum balance given.
    */
-  async hasMinimumBalanceOf(accountId, min)
-  {
-    const balance = await this.freeBalance(accountId);
+  async hasMinimumBalanceOf(accountId, min) {
+    const balance = await this.freeBalance(accountId)
     if (typeof min === 'number') {
-      return balance.cmpn(min) >= 0;
-    }
-    else {
-      return balance.cmp(min) >= 0;
+      return balance.cmpn(min) >= 0
     }
+    return balance.cmp(min) >= 0
   }
 
   /*
    * Return the account's current free balance.
    */
-  async freeBalance(accountId)
-  {
-    const decoded = this.base.identities.keyring.decodeAddress(accountId, true);
-    return this.base.api.query.balances.freeBalance(decoded);
+  async freeBalance(accountId) {
+    const decoded = this.base.identities.keyring.decodeAddress(accountId, true)
+    return this.base.api.query.balances.freeBalance(decoded)
   }
 
   /*
    * Return the base transaction fee.
    */
-  baseTransactionFee()
-  {
-    return this.base.api.consts.transactionPayment.transactionBaseFee;
+  baseTransactionFee() {
+    return this.base.api.consts.transactionPayment.transactionBaseFee
   }
 
   /*
    * Transfer amount currency from one address to another. The sending
    * address must be an unlocked key pair!
    */
-  async transfer(from, to, amount)
-  {
-    const decode = require('@polkadot/keyring').decodeAddress;
-    const to_decoded = decode(to, true);
+  async transfer(from, to, amount) {
+    const decode = require('@polkadot/keyring').decodeAddress
+    const toDecoded = decode(to, true)
 
-    const tx = this.base.api.tx.balances.transfer(to_decoded, amount);
-    return this.base.signAndSend(from, tx);
+    const tx = this.base.api.tx.balances.transfer(toDecoded, amount)
+    return this.base.signAndSend(from, tx)
   }
 }
 
 module.exports = {
-  BalancesApi: BalancesApi,
+  BalancesApi,
 }

+ 12 - 16
storage-node/packages/runtime-api/discovery.js

@@ -6,40 +6,37 @@ const debug = require('debug')('joystream:runtime:discovery')
  * Add discovery related functionality to the substrate API.
  */
 class DiscoveryApi {
-  static async create (base) {
+  static async create(base) {
     const ret = new DiscoveryApi()
     ret.base = base
-    await ret.init()
+    await DiscoveryApi.init()
     return ret
   }
 
-  async init () {
+  static async init() {
     debug('Init')
   }
 
   /*
    * Get Bootstrap endpoints
    */
-  async getBootstrapEndpoints () {
+  async getBootstrapEndpoints() {
     return this.base.api.query.discovery.bootstrapEndpoints()
   }
 
   /*
    * Set Bootstrap endpoints, requires the sudo account to be provided and unlocked
    */
-  async setBootstrapEndpoints (sudoAccount, endpoints) {
+  async setBootstrapEndpoints(sudoAccount, endpoints) {
     const tx = this.base.api.tx.discovery.setBootstrapEndpoints(endpoints)
     // make sudo call
-    return this.base.signAndSend(
-      sudoAccount,
-      this.base.api.tx.sudo.sudo(tx)
-    )
+    return this.base.signAndSend(sudoAccount, this.base.api.tx.sudo.sudo(tx))
   }
 
   /*
    * Get AccountInfo of a storage provider
    */
-  async getAccountInfo (storageProviderId) {
+  async getAccountInfo(storageProviderId) {
     const info = await this.base.api.query.discovery.accountInfoByStorageProviderId(storageProviderId)
     // Not an Option so we use default value check to know if info was found
     return info.expires_at.eq(0) ? null : info
@@ -48,29 +45,28 @@ class DiscoveryApi {
   /*
    * Set AccountInfo of our storage provider
    */
-  async setAccountInfo (ipnsId) {
+  async setAccountInfo(ipnsId) {
     const roleAccountId = this.base.identities.key.address
     const storageProviderId = this.base.storageProviderId
     const isProvider = await this.base.workers.isStorageProvider(storageProviderId)
     if (isProvider) {
       const tx = this.base.api.tx.discovery.setIpnsId(storageProviderId, ipnsId)
       return this.base.signAndSend(roleAccountId, tx)
-    } else {
-      throw new Error('Cannot set AccountInfo, id is not a storage provider')
     }
+    throw new Error('Cannot set AccountInfo, id is not a storage provider')
   }
 
   /*
    * Clear AccountInfo of our storage provider
    */
-  async unsetAccountInfo () {
+  async unsetAccountInfo() {
     const roleAccountId = this.base.identities.key.address
     const storageProviderId = this.base.storageProviderId
-    var tx = this.base.api.tx.discovery.unsetIpnsId(storageProviderId)
+    const tx = this.base.api.tx.discovery.unsetIpnsId(storageProviderId)
     return this.base.signAndSend(roleAccountId, tx)
   }
 }
 
 module.exports = {
-  DiscoveryApi
+  DiscoveryApi,
 }

+ 27 - 26
storage-node/packages/runtime-api/identities.js

@@ -20,11 +20,9 @@
 
 const path = require('path')
 const fs = require('fs')
-// const readline = require('readline')
-
 const debug = require('debug')('joystream:runtime:identities')
 const { Keyring } = require('@polkadot/keyring')
-const util_crypto = require('@polkadot/util-crypto')
+const utilCrypto = require('@polkadot/util-crypto')
 
 /*
  * Add identity management to the substrate API.
@@ -32,14 +30,14 @@ const util_crypto = require('@polkadot/util-crypto')
  * This loosely groups: accounts, key management, and membership.
  */
 class IdentitiesApi {
-  static async create (base, {account_file, passphrase, canPromptForPassphrase}) {
+  static async create(base, { accountFile, passphrase, canPromptForPassphrase }) {
     const ret = new IdentitiesApi()
     ret.base = base
-    await ret.init(account_file, passphrase, canPromptForPassphrase)
+    await ret.init(accountFile, passphrase, canPromptForPassphrase)
     return ret
   }
 
-  async init (account_file, passphrase, canPromptForPassphrase) {
+  async init(accountFile, passphrase, canPromptForPassphrase) {
     debug('Init')
 
     // Creatre keyring
@@ -49,7 +47,7 @@ class IdentitiesApi {
 
     // Load account file, if possible.
     try {
-      this.key = await this.loadUnlock(account_file, passphrase)
+      this.key = await this.loadUnlock(accountFile, passphrase)
     } catch (err) {
       debug('Error loading account file:', err.message)
     }
@@ -58,8 +56,8 @@ class IdentitiesApi {
   /*
    * Load a key file and unlock it if necessary.
    */
-  async loadUnlock (account_file, passphrase) {
-    const fullname = path.resolve(account_file)
+  async loadUnlock(accountFile, passphrase) {
+    const fullname = path.resolve(accountFile)
     debug('Initializing key from', fullname)
     const key = this.keyring.addFromJson(require(fullname))
     await this.tryUnlock(key, passphrase)
@@ -71,7 +69,7 @@ class IdentitiesApi {
    * Try to unlock a key if it isn't already unlocked.
    * passphrase should be supplied as argument.
    */
-  async tryUnlock (key, passphrase) {
+  async tryUnlock(key, passphrase) {
     if (!key.isLocked) {
       debug('Key is not locked, not attempting to unlock')
       return
@@ -112,7 +110,10 @@ class IdentitiesApi {
   /*
    * Ask for a passphrase
    */
-  askForPassphrase (address) {
+
+  /* eslint-disable class-methods-use-this */
+  // Disable lint because the method used by a mocking library.
+  askForPassphrase(address) {
     // Query for passphrase
     const prompt = require('password-prompt')
     return prompt(`Enter passphrase for ${address}: `, { required: false })
@@ -121,7 +122,7 @@ class IdentitiesApi {
   /*
    * Return true if the account is a root account of a member
    */
-  async isMember (accountId) {
+  async isMember(accountId) {
     const memberIds = await this.memberIdsOf(accountId) // return array of member ids
     return memberIds.length > 0 // true if at least one member id exists for the acccount
   }
@@ -129,7 +130,7 @@ class IdentitiesApi {
   /*
    * Return all the member IDs of an account by the root account id
    */
-  async memberIdsOf (accountId) {
+  async memberIdsOf(accountId) {
     const decoded = this.keyring.decodeAddress(accountId)
     return this.base.api.query.members.memberIdsByRootAccountId(decoded)
   }
@@ -137,16 +138,16 @@ class IdentitiesApi {
   /*
    * Return the first member ID of an account, or undefined if not a member root account.
    */
-  async firstMemberIdOf (accountId) {
+  async firstMemberIdOf(accountId) {
     const decoded = this.keyring.decodeAddress(accountId)
-    let ids = await this.base.api.query.members.memberIdsByRootAccountId(decoded)
+    const ids = await this.base.api.query.members.memberIdsByRootAccountId(decoded)
     return ids[0]
   }
 
   /*
    * Export a key pair to JSON. Will ask for a passphrase.
    */
-  async exportKeyPair (accountId) {
+  async exportKeyPair(accountId) {
     const passphrase = await this.askForPassphrase(accountId)
 
     // Produce JSON output
@@ -157,12 +158,12 @@ class IdentitiesApi {
    * Export a key pair and write it to a JSON file with the account ID as the
    * name.
    */
-  async writeKeyPairExport (accountId, prefix) {
+  async writeKeyPairExport(accountId, prefix) {
     // Generate JSON
     const data = await this.exportKeyPair(accountId)
 
     // Write JSON
-    var filename = `${data.address}.json`
+    let filename = `${data.address}.json`
 
     if (prefix) {
       const path = require('path')
@@ -171,7 +172,7 @@ class IdentitiesApi {
 
     fs.writeFileSync(filename, JSON.stringify(data), {
       encoding: 'utf8',
-      mode: 0o600
+      mode: 0o600,
     })
 
     return filename
@@ -181,20 +182,20 @@ class IdentitiesApi {
    * Register account id with userInfo as a new member
    * using default policy 0, returns new member id
    */
-  async registerMember (accountId, userInfo) {
+  async registerMember(accountId, userInfo) {
     const tx = this.base.api.tx.members.buyMembership(0, userInfo)
 
     return this.base.signAndSendThenGetEventResult(accountId, tx, {
       eventModule: 'members',
       eventName: 'MemberRegistered',
-      eventProperty: 'MemberId'
+      eventProperty: 'MemberId',
     })
   }
 
   /*
    * Injects a keypair and sets it as the default identity
    */
-  useKeyPair (keyPair) {
+  useKeyPair(keyPair) {
     this.key = this.keyring.addPair(keyPair)
   }
 
@@ -202,11 +203,11 @@ class IdentitiesApi {
    * Create a new role key. If no name is given,
    * default to 'storage'.
    */
-  async createNewRoleKey (name) {
+  async createNewRoleKey(name) {
     name = name || 'storage-provider'
 
     // Generate new key pair
-    const keyPair = util_crypto.naclKeypairFromRandom()
+    const keyPair = utilCrypto.naclKeypairFromRandom()
 
     // Encode to an address.
     const addr = this.keyring.encodeAddress(keyPair.publicKey)
@@ -215,7 +216,7 @@ class IdentitiesApi {
     // Add to key wring. We set the meta to identify the account as
     // a role key.
     const meta = {
-      name: `${name} role account`
+      name: `${name} role account`,
     }
 
     const createPair = require('@polkadot/keyring/pair').default
@@ -232,5 +233,5 @@ class IdentitiesApi {
 }
 
 module.exports = {
-  IdentitiesApi
+  IdentitiesApi,
 }

+ 45 - 37
storage-node/packages/runtime-api/index.js

@@ -35,13 +35,13 @@ const { newExternallyControlledPromise } = require('@joystream/storage-utils/ext
  * Initialize runtime (substrate) API and keyring.
  */
 class RuntimeApi {
-  static async create (options) {
-    const runtime_api = new RuntimeApi()
-    await runtime_api.init(options || {})
-    return runtime_api
+  static async create(options) {
+    const runtimeApi = new RuntimeApi()
+    await runtimeApi.init(options || {})
+    return runtimeApi
   }
 
-  async init (options) {
+  async init(options) {
     debug('Init')
 
     options = options || {}
@@ -66,7 +66,7 @@ class RuntimeApi {
     this.identities = await IdentitiesApi.create(this, {
       account_file: options.account_file,
       passphrase: options.passphrase,
-      canPromptForPassphrase: options.canPromptForPassphrase
+      canPromptForPassphrase: options.canPromptForPassphrase,
     })
     this.balances = await BalancesApi.create(this)
     this.workers = await WorkersApi.create(this)
@@ -74,12 +74,12 @@ class RuntimeApi {
     this.discovery = await DiscoveryApi.create(this)
   }
 
-  disconnect () {
+  disconnect() {
     this.api.disconnect()
   }
 
-  executeWithAccountLock (account_id, func) {
-    return this.asyncLock.acquire(`${account_id}`, func)
+  executeWithAccountLock(accountId, func) {
+    return this.asyncLock.acquire(`${accountId}`, func)
   }
 
   /*
@@ -89,14 +89,14 @@ class RuntimeApi {
    * The result of the Promise is an array containing first the full event
    * name, and then the event fields as an object.
    */
-  async waitForEvent (module, name) {
+  async waitForEvent(module, name) {
     return this.waitForEvents([[module, name]])
   }
 
-  _matchingEvents(subscribed, events) {
+  static matchingEvents(subscribed, events) {
     debug(`Number of events: ${events.length} subscribed to ${subscribed}`)
 
-    const filtered = events.filter((record) => {
+    const filtered = events.filter(record => {
       const { event, phase } = record
 
       // Show what we are busy with
@@ -104,14 +104,14 @@ class RuntimeApi {
       debug(`\t\t${event.meta.documentation.toString()}`)
 
       // Skip events we're not interested in.
-      const matching = subscribed.filter((value) => {
+      const matching = subscribed.filter(value => {
         return event.section === value[0] && event.method === value[1]
       })
       return matching.length > 0
     })
     debug(`Filtered: ${filtered.length}`)
 
-    const mapped = filtered.map((record) => {
+    const mapped = filtered.map(record => {
       const { event } = record
       const types = event.typeDef
 
@@ -122,8 +122,8 @@ class RuntimeApi {
         payload[types[index].type] = data
       })
 
-      const full_name = `${event.section}.${event.method}`
-      return [full_name, payload]
+      const fullName = `${event.section}.${event.method}`
+      return [fullName, payload]
     })
     debug('Mapped', mapped)
 
@@ -137,10 +137,10 @@ class RuntimeApi {
    *
    * Returns the first matched event *only*.
    */
-  async waitForEvents (subscribed) {
-    return new Promise((resolve, reject) => {
-      this.api.query.system.events((events) => {
-        const matches = this._matchingEvents(subscribed, events)
+  async waitForEvents(subscribed) {
+    return new Promise(resolve => {
+      this.api.query.system.events(events => {
+        const matches = RuntimeApi.matchingEvents(subscribed, events)
         if (matches && matches.length) {
           resolve(matches)
         }
@@ -156,24 +156,26 @@ class RuntimeApi {
    * If the subscribed events are given, and a callback as well, then the
    * callback is invoked with matching events.
    */
-  async signAndSend (accountId, tx, attempts, subscribed, callback) {
+  async signAndSend(accountId, tx, attempts, subscribed, callback) {
     accountId = this.identities.keyring.encodeAddress(accountId)
 
     // Key must be unlocked
-    const from_key = this.identities.keyring.getPair(accountId)
-    if (from_key.isLocked) {
+    const fromKey = this.identities.keyring.getPair(accountId)
+    if (fromKey.isLocked) {
       throw new Error('Must unlock key before using it to sign!')
     }
 
     const finalizedPromise = newExternallyControlledPromise()
 
-    let unsubscribe = await this.executeWithAccountLock(accountId, async () => {
+    await this.executeWithAccountLock(accountId, async () => {
       // Try to get the next nonce to use
       let nonce = this.nonces[accountId]
 
       let incrementNonce = () => {
         // only increment once
-        incrementNonce = () => {} // turn it into a no-op
+        incrementNonce = () => {
+          /* turn it into a no-op */
+        }
         nonce = nonce.addn(1)
         this.nonces[accountId] = nonce
       }
@@ -181,6 +183,8 @@ class RuntimeApi {
       // If the nonce isn't available, get it from chain.
       if (!nonce) {
         // current nonce
+        // TODO: possible race condition here found by the linter
+        // eslint-disable-next-line require-atomic-updates
         nonce = await this.api.query.system.accountNonce(accountId)
         debug(`Got nonce for ${accountId} from chain: ${nonce}`)
       }
@@ -188,15 +192,16 @@ class RuntimeApi {
       return new Promise((resolve, reject) => {
         debug('Signing and sending tx')
         // send(statusUpdates) returns a function for unsubscribing from status updates
-        let unsubscribe = tx.sign(from_key, { nonce })
-          .send(({events = [], status}) => {
+        const unsubscribe = tx
+          .sign(fromKey, { nonce })
+          .send(({ events = [], status }) => {
             debug(`TX status: ${status.type}`)
 
             // Whatever events we get, process them if there's someone interested.
             // It is critical that this event handling doesn't prevent
             try {
               if (subscribed && callback) {
-                const matched = this._matchingEvents(subscribed, events)
+                const matched = RuntimeApi.matchingEvents(subscribed, events)
                 debug('Matching events:', matched)
                 if (matched.length) {
                   callback(matched)
@@ -238,7 +243,7 @@ class RuntimeApi {
             isInvalid
             */
           })
-          .catch((err) => {
+          .catch(err => {
             // 1014 error: Most likely you are sending transaction with the same nonce,
             // so it assumes you want to replace existing one, but the priority is too low to replace it (priority = fee = len(encoded_transaction) currently)
             // Remember this can also happen if in the past we sent a tx with a future nonce, and the current nonce
@@ -247,9 +252,11 @@ class RuntimeApi {
               const errstr = err.toString()
               // not the best way to check error code.
               // https://github.com/polkadot-js/api/blob/master/packages/rpc-provider/src/coder/index.ts#L52
-              if (errstr.indexOf('Error: 1014:') < 0 && // low priority
-                  errstr.indexOf('Error: 1010:') < 0) // bad transaction
-              {
+              if (
+                errstr.indexOf('Error: 1014:') < 0 && // low priority
+                errstr.indexOf('Error: 1010:') < 0
+              ) {
+                // bad transaction
                 // Error but not nonce related. (bad arguments maybe)
                 debug('TX error', err)
               } else {
@@ -276,13 +283,15 @@ class RuntimeApi {
    * Sign and send a transaction expect event from
    * module and return eventProperty from the event.
    */
-  async signAndSendThenGetEventResult (senderAccountId, tx, { eventModule, eventName, eventProperty }) {
+  async signAndSendThenGetEventResult(senderAccountId, tx, { eventModule, eventName, eventProperty }) {
     // event from a module,
     const subscribed = [[eventModule, eventName]]
+    // TODO: rewrite this method to async-await style
+    // eslint-disable-next-line  no-async-promise-executor
     return new Promise(async (resolve, reject) => {
       try {
-        await this.signAndSend(senderAccountId, tx, 1, subscribed, (events) => {
-          events.forEach((event) => {
+        await this.signAndSend(senderAccountId, tx, 1, subscribed, events => {
+          events.forEach(event => {
             // fix - we may not necessarily want the first event
             // if there are multiple events emitted,
             resolve(event[1][eventProperty])
@@ -293,9 +302,8 @@ class RuntimeApi {
       }
     })
   }
-
 }
 
 module.exports = {
-  RuntimeApi
+  RuntimeApi,
 }

+ 19 - 22
storage-node/packages/runtime-api/test/assets.js

@@ -16,36 +16,33 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
-const mocha = require('mocha');
-const expect = require('chai').expect;
-const sinon = require('sinon');
+const expect = require('chai').expect
 
-const { RuntimeApi } = require('@joystream/storage-runtime-api');
+const { RuntimeApi } = require('@joystream/storage-runtime-api')
 
 describe('Assets', () => {
-  var api;
-  var key;
+  let api
   before(async () => {
-    api = await RuntimeApi.create();
-    key = await api.identities.loadUnlock('test/data/edwards_unlocked.json');
-  });
+    api = await RuntimeApi.create()
+    await api.identities.loadUnlock('test/data/edwards_unlocked.json')
+  })
 
   it('returns DataObjects for a content ID', async () => {
-    const obj = await api.assets.getDataObject('foo');
-    expect(obj.isNone).to.be.true;
-  });
+    const obj = await api.assets.getDataObject('foo')
+    expect(obj.isNone).to.be.true
+  })
 
   it('can check the liaison for a DataObject', async () => {
-    expect(async _ => {
-      await api.assets.checkLiaisonForDataObject('foo', 'bar');
-    }).to.throw;
-  });
+    expect(async () => {
+      await api.assets.checkLiaisonForDataObject('foo', 'bar')
+    }).to.throw
+  })
 
   // Needs properly staked accounts
-  it('can accept content');
-  it('can reject content');
-  it('can create a storage relationship for content');
-  it('can toggle a storage relatsionship to ready state');
-});
+  it('can accept content')
+  it('can reject content')
+  it('can create a storage relationship for content')
+  it('can toggle a storage relationship to ready state')
+})

+ 18 - 20
storage-node/packages/runtime-api/test/balances.js

@@ -16,37 +16,35 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
-const mocha = require('mocha');
-const expect = require('chai').expect;
-const sinon = require('sinon');
+const expect = require('chai').expect
 
-const { RuntimeApi } = require('@joystream/storage-runtime-api');
+const { RuntimeApi } = require('@joystream/storage-runtime-api')
 
 describe('Balances', () => {
-  var api;
-  var key;
+  let api
+  let key
   before(async () => {
-    api = await RuntimeApi.create();
-    key = await api.identities.loadUnlock('test/data/edwards_unlocked.json');
-  });
+    api = await RuntimeApi.create()
+    key = await api.identities.loadUnlock('test/data/edwards_unlocked.json')
+  })
 
   it('returns free balance for an account', async () => {
-    const balance = await api.balances.freeBalance(key.address);
+    const balance = await api.balances.freeBalance(key.address)
     // Should be exactly zero
-    expect(balance.cmpn(0)).to.equal(0);
-  });
+    expect(balance.cmpn(0)).to.equal(0)
+  })
 
   it('checks whether a minimum balance exists', async () => {
     // A minimum of 0 should exist, but no more.
-    expect(await api.balances.hasMinimumBalanceOf(key.address, 0)).to.be.true;
-    expect(await api.balances.hasMinimumBalanceOf(key.address, 1)).to.be.false;
-  });
+    expect(await api.balances.hasMinimumBalanceOf(key.address, 0)).to.be.true
+    expect(await api.balances.hasMinimumBalanceOf(key.address, 1)).to.be.false
+  })
 
   it('returns the base transaction fee of the chain', async () => {
-    const fee = await api.balances.baseTransactionFee();
+    const fee = await api.balances.baseTransactionFee()
     // >= 0 comparison works
-    expect(fee.cmpn(0)).to.be.at.least(0);
-  });
-});
+    expect(fee.cmpn(0)).to.be.at.least(0)
+  })
+})

+ 48 - 49
storage-node/packages/runtime-api/test/identities.js

@@ -16,84 +16,83 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
-const mocha = require('mocha');
-const expect = require('chai').expect;
-const sinon = require('sinon');
-const temp = require('temp').track();
+const expect = require('chai').expect
+const sinon = require('sinon')
+const temp = require('temp').track()
 
-const { RuntimeApi } = require('@joystream/storage-runtime-api');
+const { RuntimeApi } = require('@joystream/storage-runtime-api')
 
 describe('Identities', () => {
-  var api;
+  let api
   before(async () => {
-    api = await RuntimeApi.create({ canPromptForPassphrase: true });
-  });
+    api = await RuntimeApi.create({ canPromptForPassphrase: true })
+  })
 
   it('imports keys', async () => {
     // Unlocked keys can be imported without asking for a passphrase
-    await api.identities.loadUnlock('test/data/edwards_unlocked.json');
+    await api.identities.loadUnlock('test/data/edwards_unlocked.json')
 
     // Edwards and schnorr keys should unlock
-    const passphrase_stub = sinon.stub(api.identities, 'askForPassphrase').callsFake(_ => 'asdf');
-    await api.identities.loadUnlock('test/data/edwards.json');
-    await api.identities.loadUnlock('test/data/schnorr.json');
-    passphrase_stub.restore();
+    const passphraseStub = sinon.stub(api.identities, 'askForPassphrase').callsFake(() => 'asdf')
+    await api.identities.loadUnlock('test/data/edwards.json')
+    await api.identities.loadUnlock('test/data/schnorr.json')
+    passphraseStub.restore()
 
     // Except if the wrong passphrase is given
-    const passphrase_stub_bad = sinon.stub(api.identities, 'askForPassphrase').callsFake(_ => 'bad');
+    const passphraseStubBad = sinon.stub(api.identities, 'askForPassphrase').callsFake(() => 'bad')
     expect(async () => {
-      await api.identities.loadUnlock('test/data/edwards.json');
-    }).to.throw;
-    passphrase_stub_bad.restore();
-  });
+      await api.identities.loadUnlock('test/data/edwards.json')
+    }).to.throw
+    passphraseStubBad.restore()
+  })
 
   it('knows about membership', async () => {
-    const key = await api.identities.loadUnlock('test/data/edwards_unlocked.json');
-    const addr = key.address;
+    const key = await api.identities.loadUnlock('test/data/edwards_unlocked.json')
+    const addr = key.address
 
     // Without seeding the runtime with data, we can only verify that the API
     // reacts well in the absence of membership
-    expect(await api.identities.isMember(addr)).to.be.false;
-    const member_id = await api.identities.firstMemberIdOf(addr);
+    expect(await api.identities.isMember(addr)).to.be.false
+    const memberId = await api.identities.firstMemberIdOf(addr)
 
-    expect(member_id).to.be.undefined;
-  });
+    expect(memberId).to.be.undefined
+  })
 
   it('exports keys', async () => {
-    const key = await api.identities.loadUnlock('test/data/edwards_unlocked.json');
+    const key = await api.identities.loadUnlock('test/data/edwards_unlocked.json')
 
-    const passphrase_stub = sinon.stub(api.identities, 'askForPassphrase').callsFake(_ => 'asdf');
-    const exported = await api.identities.exportKeyPair(key.address);
-    passphrase_stub.restore();
+    const passphraseStub = sinon.stub(api.identities, 'askForPassphrase').callsFake(() => 'asdf')
+    const exported = await api.identities.exportKeyPair(key.address)
+    passphraseStub.restore()
 
-    expect(exported).to.have.property('address');
-    expect(exported.address).to.equal(key.address);
+    expect(exported).to.have.property('address')
+    expect(exported.address).to.equal(key.address)
 
-    expect(exported).to.have.property('encoding');
+    expect(exported).to.have.property('encoding')
 
-    expect(exported.encoding).to.have.property('version', '2');
+    expect(exported.encoding).to.have.property('version', '2')
 
-    expect(exported.encoding).to.have.property('content');
-    expect(exported.encoding.content).to.include('pkcs8');
-    expect(exported.encoding.content).to.include('ed25519');
+    expect(exported.encoding).to.have.property('content')
+    expect(exported.encoding.content).to.include('pkcs8')
+    expect(exported.encoding.content).to.include('ed25519')
 
-    expect(exported.encoding).to.have.property('type');
-    expect(exported.encoding.type).to.include('salsa20');
-  });
+    expect(exported.encoding).to.have.property('type')
+    expect(exported.encoding.type).to.include('salsa20')
+  })
 
   it('writes key export files', async () => {
-    const prefix = temp.mkdirSync('joystream-runtime-api-test');
+    const prefix = temp.mkdirSync('joystream-runtime-api-test')
 
-    const key = await api.identities.loadUnlock('test/data/edwards_unlocked.json');
+    const key = await api.identities.loadUnlock('test/data/edwards_unlocked.json')
 
-    const passphrase_stub = sinon.stub(api.identities, 'askForPassphrase').callsFake(_ => 'asdf');
-    const filename = await api.identities.writeKeyPairExport(key.address, prefix);
-    passphrase_stub.restore();
+    const passphraseStub = sinon.stub(api.identities, 'askForPassphrase').callsFake(() => 'asdf')
+    const filename = await api.identities.writeKeyPairExport(key.address, prefix)
+    passphraseStub.restore()
 
-    const fs = require('fs');
-    const stat = fs.statSync(filename);
-    expect(stat.isFile()).to.be.true;
-  });
-});
+    const fs = require('fs')
+    const stat = fs.statSync(filename)
+    expect(stat.isFile()).to.be.true
+  })
+})

+ 6 - 9
storage-node/packages/runtime-api/test/index.js

@@ -16,16 +16,13 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
-const mocha = require('mocha');
-const expect = require('chai').expect;
-
-const { RuntimeApi } = require('@joystream/storage-runtime-api');
+const { RuntimeApi } = require('@joystream/storage-runtime-api')
 
 describe('RuntimeApi', () => {
   it('can be created', async () => {
-    const api = await RuntimeApi.create();
-    api.disconnect();
-  });
-});
+    const api = await RuntimeApi.create()
+    api.disconnect()
+  })
+})

+ 45 - 44
storage-node/packages/runtime-api/workers.js

@@ -26,23 +26,22 @@ const { Worker } = require('@joystream/types/working-group')
  * Add worker related functionality to the substrate API.
  */
 class WorkersApi {
-  static async create (base) {
+  static async create(base) {
     const ret = new WorkersApi()
     ret.base = base
     await ret.init()
     return ret
   }
 
-
   // eslint-disable-next-line class-methods-use-this, require-await
-  async init () {
+  async init() {
     debug('Init')
   }
 
   /*
    * Check whether the given account and id represent an enrolled storage provider
    */
-  async isRoleAccountOfStorageProvider (storageProviderId, roleAccountId) {
+  async isRoleAccountOfStorageProvider(storageProviderId, roleAccountId) {
     const id = new BN(storageProviderId)
     const roleAccount = this.base.identities.keyring.decodeAddress(roleAccountId)
     const providerAccount = await this.storageProviderRoleAccount(id)
@@ -52,7 +51,7 @@ class WorkersApi {
   /*
    * Returns true if the provider id is enrolled
    */
-  async isStorageProvider (storageProviderId) {
+  async isStorageProvider(storageProviderId) {
     const worker = await this.storageWorkerByProviderId(storageProviderId)
     return worker !== null
   }
@@ -60,7 +59,7 @@ class WorkersApi {
   /*
    * Returns a provider's role account or null if provider doesn't exist
    */
-  async storageProviderRoleAccount (storageProviderId) {
+  async storageProviderRoleAccount(storageProviderId) {
     const worker = await this.storageWorkerByProviderId(storageProviderId)
     return worker ? worker.role_account_id : null
   }
@@ -68,7 +67,7 @@ class WorkersApi {
   /*
    * Returns a Worker instance or null if provider does not exist
    */
-  async storageWorkerByProviderId (storageProviderId) {
+  async storageWorkerByProviderId(storageProviderId) {
     const id = new BN(storageProviderId)
     const { providers } = await this.getAllProviders()
     return providers[id.toNumber()] || null
@@ -77,7 +76,7 @@ class WorkersApi {
   /*
    * Returns the the first found provider id with a role account or null if not found
    */
-  async findProviderIdByRoleAccount (roleAccount) {
+  async findProviderIdByRoleAccount(roleAccount) {
     const { ids, providers } = await this.getAllProviders()
 
     for (let i = 0; i < ids.length; i++) {
@@ -93,7 +92,7 @@ class WorkersApi {
   /*
    * Returns the set of ids and Worker instances of providers enrolled on the network
    */
-  async getAllProviders () {
+  async getAllProviders() {
     // const workerEntries = await this.base.api.query.storageWorkingGroup.workerById()
     // can't rely on .isEmpty or isNone property to detect empty map
     // return workerEntries.isNone ? [] : workerEntries[0]
@@ -106,9 +105,7 @@ class WorkersApi {
     for (let id = 0; id < nextWorkerId; id++) {
       // We get back an Option. Will be None if value doesn't exist
       // eslint-disable-next-line no-await-in-loop
-      let value = await this.base.api.rpc.state.getStorage(
-        this.base.api.query.storageWorkingGroup.workerById.key(id)
-      )
+      let value = await this.base.api.rpc.state.getStorage(this.base.api.query.storageWorkingGroup.workerById.key(id))
 
       if (!value.isNone) {
         // no need to read from storage again!
@@ -142,32 +139,32 @@ class WorkersApi {
    * Add a new storage group opening using the lead account. Returns the
    * new opening id.
    */
-  async dev_addStorageOpening() {
-    const openTx = this.dev_makeAddOpeningTx('Worker')
-    return this.dev_submitAddOpeningTx(openTx, await this.getLeadRoleAccount())
+  async devAddStorageOpening() {
+    const openTx = this.devMakeAddOpeningTx('Worker')
+    return this.devSubmitAddOpeningTx(openTx, await this.getLeadRoleAccount())
   }
 
   /*
    * Add a new storage working group lead opening using sudo account. Returns the
    * new opening id.
    */
-  async dev_addStorageLeadOpening() {
-    const openTx = this.dev_makeAddOpeningTx('Leader')
+  async devAddStorageLeadOpening() {
+    const openTx = this.devMakeAddOpeningTx('Leader')
     const sudoTx = this.base.api.tx.sudo.sudo(openTx)
-    return this.dev_submitAddOpeningTx(sudoTx, await this.base.identities.getSudoAccount())
+    return this.devSubmitAddOpeningTx(sudoTx, await this.base.identities.getSudoAccount())
   }
 
   /*
    * Constructs an addOpening tx of openingType
    */
-  dev_makeAddOpeningTx(openingType) {
+  devMakeAddOpeningTx(openingType) {
     return this.base.api.tx.storageWorkingGroup.addOpening(
       'CurrentBlock',
       {
         application_rationing_policy: {
-          'max_active_applicants': 1
+          max_active_applicants: 1,
         },
-        max_review_period_length: 1000
+        max_review_period_length: 1000,
         // default values for everything else..
       },
       'dev-opening',
@@ -179,34 +176,39 @@ class WorkersApi {
    * Submits a tx (expecting it to dispatch storageWorkingGroup.addOpening) and returns
    * the OpeningId from the resulting event.
    */
-  async dev_submitAddOpeningTx(tx, senderAccount) {
+  async devSubmitAddOpeningTx(tx, senderAccount) {
     return this.base.signAndSendThenGetEventResult(senderAccount, tx, {
       eventModule: 'storageWorkingGroup',
       eventName: 'OpeningAdded',
-      eventProperty: 'OpeningId'
+      eventProperty: 'OpeningId',
     })
   }
 
   /*
    * Apply on an opening, returns the application id.
    */
-  async dev_applyOnOpening(openingId, memberId, memberAccount, roleAccount) {
+  async devApplyOnOpening(openingId, memberId, memberAccount, roleAccount) {
     const applyTx = this.base.api.tx.storageWorkingGroup.applyOnOpening(
-      memberId, openingId, roleAccount, null, null, `colossus-${memberId}`
+      memberId,
+      openingId,
+      roleAccount,
+      null,
+      null,
+      `colossus-${memberId}`
     )
 
     return this.base.signAndSendThenGetEventResult(memberAccount, applyTx, {
       eventModule: 'storageWorkingGroup',
       eventName: 'AppliedOnOpening',
-      eventProperty: 'ApplicationId'
+      eventProperty: 'ApplicationId',
     })
   }
 
   /*
    * Move lead opening to review state using sudo account
    */
-  async dev_beginLeadOpeningReview(openingId) {
-    const beginReviewTx = this.dev_makeBeginOpeningReviewTx(openingId)
+  async devBeginLeadOpeningReview(openingId) {
+    const beginReviewTx = this.devMakeBeginOpeningReviewTx(openingId)
     const sudoTx = this.base.api.tx.sudo.sudo(beginReviewTx)
     return this.base.signAndSend(await this.base.identities.getSudoAccount(), sudoTx)
   }
@@ -214,53 +216,52 @@ class WorkersApi {
   /*
    * Move a storage opening to review state using lead account
    */
-  async dev_beginStorageOpeningReview(openingId) {
-    const beginReviewTx = this.dev_makeBeginOpeningReviewTx(openingId)
+  async devBeginStorageOpeningReview(openingId) {
+    const beginReviewTx = this.devMakeBeginOpeningReviewTx(openingId)
     return this.base.signAndSend(await this.getLeadRoleAccount(), beginReviewTx)
   }
 
   /*
    * Constructs a beingApplicantReview tx for openingId, which puts an opening into the review state
    */
-  dev_makeBeginOpeningReviewTx(openingId) {
+  devMakeBeginOpeningReviewTx(openingId) {
     return this.base.api.tx.storageWorkingGroup.beginApplicantReview(openingId)
   }
 
   /*
    * Fill a lead opening, return the assigned worker id, using the sudo account
    */
-  async dev_fillLeadOpening(openingId, applicationId) {
-    const fillTx = this.dev_makeFillOpeningTx(openingId, applicationId)
+  async devFillLeadOpening(openingId, applicationId) {
+    const fillTx = this.devMakeFillOpeningTx(openingId, applicationId)
     const sudoTx = this.base.api.tx.sudo.sudo(fillTx)
-    const filled = await this.dev_submitFillOpeningTx(
-      await this.base.identities.getSudoAccount(), sudoTx)
+    const filled = await this.devSubmitFillOpeningTx(await this.base.identities.getSudoAccount(), sudoTx)
     return getWorkerIdFromApplicationIdToWorkerIdMap(filled, applicationId)
   }
 
   /*
    * Fill a storage opening, return the assigned worker id, using the lead account
    */
-  async dev_fillStorageOpening(openingId, applicationId) {
-    const fillTx = this.dev_makeFillOpeningTx(openingId, applicationId)
-    const filled = await this.dev_submitFillOpeningTx(await this.getLeadRoleAccount(), fillTx)
+  async devFillStorageOpening(openingId, applicationId) {
+    const fillTx = this.devMakeFillOpeningTx(openingId, applicationId)
+    const filled = await this.devSubmitFillOpeningTx(await this.getLeadRoleAccount(), fillTx)
     return getWorkerIdFromApplicationIdToWorkerIdMap(filled, applicationId)
   }
 
   /*
    * Constructs a FillOpening transaction
    */
-  dev_makeFillOpeningTx(openingId, applicationId) {
+  devMakeFillOpeningTx(openingId, applicationId) {
     return this.base.api.tx.storageWorkingGroup.fillOpening(openingId, [applicationId], null)
   }
 
   /*
    * Dispatches a fill opening tx and returns a map of the application id to their new assigned worker ids.
    */
-  async dev_submitFillOpeningTx(senderAccount, tx) {
+  async devSubmitFillOpeningTx(senderAccount, tx) {
     return this.base.signAndSendThenGetEventResult(senderAccount, tx, {
       eventModule: 'storageWorkingGroup',
       eventName: 'OpeningFilled',
-      eventProperty: 'ApplicationIdToWorkerIdMap'
+      eventProperty: 'ApplicationIdToWorkerIdMap',
     })
   }
 }
@@ -270,14 +271,14 @@ class WorkersApi {
  * ApplicationIdToWorkerIdMap map in the OpeningFilled event. Expects map to
  * contain at least one entry.
  */
-function getWorkerIdFromApplicationIdToWorkerIdMap (filledMap, applicationId) {
+function getWorkerIdFromApplicationIdToWorkerIdMap(filledMap, applicationId) {
   if (filledMap.size === 0) {
     throw new Error('Expected opening to be filled!')
   }
 
   let ourApplicationIdKey
 
-  for (let key of filledMap.keys()) {
+  for (const key of filledMap.keys()) {
     if (key.eq(applicationId)) {
       ourApplicationIdKey = key
       break
@@ -294,5 +295,5 @@ function getWorkerIdFromApplicationIdToWorkerIdMap (filledMap, applicationId) {
 }
 
 module.exports = {
-  WorkersApi
+  WorkersApi,
 }

+ 45 - 49
storage-node/packages/storage/filter.js

@@ -16,74 +16,67 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
-const debug = require('debug')('joystream:storage:filter');
+const debug = require('debug')('joystream:storage:filter')
 
-const DEFAULT_MAX_FILE_SIZE = 500 * 1024 * 1024;
-const DEFAULT_ACCEPT_TYPES = [
-  'video/*',
-  'audio/*',
-  'image/*',
-];
-const DEFAULT_REJECT_TYPES = [];
+const DEFAULT_MAX_FILE_SIZE = 500 * 1024 * 1024
+const DEFAULT_ACCEPT_TYPES = ['video/*', 'audio/*', 'image/*']
+const DEFAULT_REJECT_TYPES = []
 
 // Configuration defaults
-function config_defaults(config)
-{
-  const filter =  config.filter || {};
+function configDefaults(config) {
+  const filter = config.filter || {}
 
   // We accept zero as switching this check off.
-  if (typeof filter.max_size == 'undefined' || typeof filter.max_size == 'null') {
-    filter.max_size = DEFAULT_MAX_FILE_SIZE;
+  if (typeof filter.max_size === 'undefined') {
+    filter.max_size = DEFAULT_MAX_FILE_SIZE
   }
 
   // Figure out mime types
-  filter.mime = filter.mime || [];
-  filter.mime.accept = filter.mime.accept || DEFAULT_ACCEPT_TYPES;
-  filter.mime.reject = filter.mime.reject || DEFAULT_REJECT_TYPES;
+  filter.mime = filter.mime || []
+  filter.mime.accept = filter.mime.accept || DEFAULT_ACCEPT_TYPES
+  filter.mime.reject = filter.mime.reject || DEFAULT_REJECT_TYPES
 
-  return filter;
+  return filter
 }
 
 // Mime type matching
-function mime_matches(acceptable, provided)
-{
+function mimeMatches(acceptable, provided) {
   if (acceptable.endsWith('*')) {
     // Wildcard match
-    const prefix = acceptable.slice(0, acceptable.length - 1);
-    debug('wildcard matching', provided, 'against', acceptable, '/', prefix);
-    return provided.startsWith(prefix);
+    const prefix = acceptable.slice(0, acceptable.length - 1)
+    debug('wildcard matching', provided, 'against', acceptable, '/', prefix)
+    return provided.startsWith(prefix)
   }
   // Exact match
-  debug('exact matching', provided, 'against', acceptable);
-  return provided == acceptable;
+  debug('exact matching', provided, 'against', acceptable)
+  return provided === acceptable
 }
 
-function mime_matches_any(accept, reject, provided)
-{
+function mimeMatchesAny(accept, reject, provided) {
   // Pass accept
-  var accepted = false;
-  for (var item of accept) {
-    if (mime_matches(item, provided)) {
-      debug('Content type matches', item, 'which is acceptable.');
-      accepted = true;
-      break;
+  let accepted = false
+  for (const item of accept) {
+    if (mimeMatches(item, provided)) {
+      debug('Content type matches', item, 'which is acceptable.')
+      accepted = true
+      break
     }
   }
   if (!accepted) {
-    return false;
+    return false
   }
 
   // Don't pass reject
-  for (var item of reject) {
-    if (mime_matches(item, provided)) {
-      debug('Content type matches', item, 'which is unacceptable.');
-      return false;
+  for (const item of reject) {
+    if (mimeMatches(item, provided)) {
+      debug('Content type matches', item, 'which is unacceptable.')
+      return false
     }
   }
 
-  return true;
+  return true
 }
 
 /**
@@ -93,40 +86,43 @@ function mime_matches_any(accept, reject, provided)
  * This is a straightforward implementation of
  * https://github.com/Joystream/storage-node-joystream/issues/14 - but should
  * most likely be improved on in future.
+ * @param {object} config - configuration
+ * @param {object} headers - required headers
+ * @param {string} mimeType - expected MIME type
+ * @return {object} HTTP status code and error message.
  **/
-function filter_func(config, headers, mime_type)
-{
-  const filter = config_defaults(config);
+function filterFunc(config, headers, mimeType) {
+  const filter = configDefaults(config)
 
   // Enforce maximum file upload size
   if (filter.max_size) {
-    const size = parseInt(headers['content-length'], 10);
+    const size = parseInt(headers['content-length'], 10)
     if (!size) {
       return {
         code: 411,
         message: 'A Content-Length header is required.',
-      };
+      }
     }
 
     if (size > filter.max_size) {
       return {
         code: 413,
         message: 'The provided Content-Length is too large.',
-      };
+      }
     }
   }
 
   // Enforce mime type based filtering
-  if (!mime_matches_any(filter.mime.accept, filter.mime.reject, mime_type)) {
+  if (!mimeMatchesAny(filter.mime.accept, filter.mime.reject, mimeType)) {
     return {
       code: 415,
       message: 'Content has an unacceptable MIME type.',
-    };
+    }
   }
 
   return {
     code: 200,
-  };
+  }
 }
 
-module.exports = filter_func;
+module.exports = filterFunc

+ 4 - 4
storage-node/packages/storage/index.js

@@ -16,10 +16,10 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
-const { Storage } = require('./storage');
+const { Storage } = require('./storage')
 
 module.exports = {
-  Storage: Storage,
-};
+  Storage,
+}

+ 162 - 179
storage-node/packages/storage/storage.js

@@ -16,173 +16,165 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
-const { Transform } = require('stream');
-const fs = require('fs');
+const { Transform } = require('stream')
+const fs = require('fs')
 
-const debug = require('debug')('joystream:storage:storage');
+const debug = require('debug')('joystream:storage:storage')
+
+const Promise = require('bluebird')
 
-const Promise = require('bluebird');
 Promise.config({
   cancellation: true,
-});
+})
 
-const file_type = require('file-type');
-const ipfs_client = require('ipfs-http-client');
-const temp = require('temp').track();
-const _ = require('lodash');
+const fileType = require('file-type')
+const ipfsClient = require('ipfs-http-client')
+const temp = require('temp').track()
+const _ = require('lodash')
 
 // Default request timeout; imposed on top of the IPFS client, because the
 // client doesn't seem to care.
-const DEFAULT_TIMEOUT = 30 * 1000;
+const DEFAULT_TIMEOUT = 30 * 1000
 
 // Default/dummy resolution implementation.
-const DEFAULT_RESOLVE_CONTENT_ID = async (original) => {
-  debug('Warning: Default resolution returns original CID', original);
-  return original;
+const DEFAULT_RESOLVE_CONTENT_ID = async original => {
+  debug('Warning: Default resolution returns original CID', original)
+  return original
 }
 
 // Default file info if nothing could be detected.
 const DEFAULT_FILE_INFO = {
-  mime_type: 'application/octet-stream',
+  mimeType: 'application/octet-stream',
   ext: 'bin',
-};
-
+}
 
 /*
  * fileType is a weird name, because we're really looking at MIME types.
  * Also, the type field includes extension info, so we're going to call
- * it file_info { mime_type, ext } instead.
+ * it fileInfo { mimeType, ext } instead.
  * Nitpicking, but it also means we can add our default type if things
  * go wrong.
  */
-function fix_file_info(info)
-{
+function fixFileInfo(info) {
   if (!info) {
-    info = DEFAULT_FILE_INFO;
-  }
-  else {
-    info.mime_type = info.mime;
-    delete(info.mime);
+    info = DEFAULT_FILE_INFO
+  } else {
+    info.mimeType = info.mime
+    delete info.mime
   }
-  return info;
+  return info
 }
 
-function fix_file_info_on_stream(stream)
-{
-  var info = fix_file_info(stream.fileType);
-  delete(stream.fileType);
-  stream.file_info = info;
-  return stream;
+function fixFileInfoOnStream(stream) {
+  const info = fixFileInfo(stream.fileType)
+  delete stream.fileType
+  stream.fileInfo = info
+  return stream
 }
 
-
 /*
  * Internal Transform stream for helping write to a temporary location, adding
  * MIME type detection, and a commit() function.
  */
-class StorageWriteStream extends Transform
-{
-  constructor(storage, options)
-  {
-    options = _.clone(options || {});
+class StorageWriteStream extends Transform {
+  constructor(storage, options) {
+    options = _.clone(options || {})
 
-    super(options);
+    super(options)
 
-    this.storage = storage;
+    this.storage = storage
 
     // Create temp target.
-    this.temp = temp.createWriteStream();
-    this.buf = Buffer.alloc(0);
+    this.temp = temp.createWriteStream()
+    this.buf = Buffer.alloc(0)
   }
 
-  _transform(chunk, encoding, callback)
-  {
+  _transform(chunk, encoding, callback) {
     // Deal with buffers only
     if (typeof chunk === 'string') {
-      chunk = Buffer.from(chunk);
+      chunk = Buffer.from(chunk)
     }
 
     // Logging this all the time is too verbose
     // debug('Writing temporary chunk', chunk.length, chunk);
-    this.temp.write(chunk);
+    this.temp.write(chunk)
 
     // Try to detect file type during streaming.
-    if (!this.file_info && this.buf < file_type.minimumBytes) {
-      this.buf = Buffer.concat([this.buf, chunk]);
+    if (!this.fileInfo && this.buf < fileType.minimumBytes) {
+      this.buf = Buffer.concat([this.buf, chunk])
 
-      if (this.buf >= file_type.minimumBytes) {
-        const info = file_type(this.buf);
+      if (this.buf >= fileType.minimumBytes) {
+        const info = fileType(this.buf)
         // No info? We can try again at the end of the stream.
         if (info) {
-          this.file_info = fix_file_info(info);
-          this.emit('file_info', this.file_info);
+          this.fileInfo = fixFileInfo(info)
+          this.emit('fileInfo', this.fileInfo)
         }
       }
     }
 
-    callback(null);
+    callback(null)
   }
 
-  _flush(callback)
-  {
-    debug('Flushing temporary stream:', this.temp.path);
-    this.temp.end();
+  _flush(callback) {
+    debug('Flushing temporary stream:', this.temp.path)
+    this.temp.end()
 
     // Since we're finished, we can try to detect the file type again.
-    if (!this.file_info) {
-      const read = fs.createReadStream(this.temp.path);
-      file_type.stream(read)
-        .then((stream) => {
-          this.file_info = fix_file_info_on_stream(stream).file_info;
-          this.emit('file_info', this.file_info);
+    if (!this.fileInfo) {
+      const read = fs.createReadStream(this.temp.path)
+      fileType
+        .stream(read)
+        .then(stream => {
+          this.fileInfo = fixFileInfoOnStream(stream).fileInfo
+          this.emit('fileInfo', this.fileInfo)
+        })
+        .catch(err => {
+          debug('Error trying to detect file type at end-of-stream:', err)
         })
-        .catch((err) => {
-          debug('Error trying to detect file type at end-of-stream:', err);
-        });
     }
 
-    callback(null);
+    callback(null)
   }
 
   /*
    * Commit this stream to the IPFS backend.
    */
-  commit()
-  {
+  commit() {
     // Create a read stream from the temp file.
     if (!this.temp) {
-      throw new Error('Cannot commit a temporary stream that does not exist. Did you call cleanup()?');
+      throw new Error('Cannot commit a temporary stream that does not exist. Did you call cleanup()?')
     }
 
-    debug('Committing temporary stream: ', this.temp.path);
-    this.storage.ipfs.addFromFs(this.temp.path)
-      .then(async (result) => {
-        const hash = result[0].hash;
-        debug('Stream committed as', hash);
-        this.emit('committed', hash);
-        await this.storage.ipfs.pin.add(hash);
+    debug('Committing temporary stream: ', this.temp.path)
+    this.storage.ipfs
+      .addFromFs(this.temp.path)
+      .then(async result => {
+        const hash = result[0].hash
+        debug('Stream committed as', hash)
+        this.emit('committed', hash)
+        await this.storage.ipfs.pin.add(hash)
       })
-      .catch((err) => {
-        debug('Error committing stream', err);
-        this.emit('error', err);
+      .catch(err => {
+        debug('Error committing stream', err)
+        this.emit('error', err)
       })
   }
 
   /*
    * Clean up temporary data.
    */
-  cleanup()
-  {
-    debug('Cleaning up temporary file: ', this.temp.path);
-    fs.unlink(this.temp.path, () => {}); // Ignore errors
-    delete(this.temp);
+  cleanup() {
+    debug('Cleaning up temporary file: ', this.temp.path)
+    fs.unlink(this.temp.path, () => {
+      /* Ignore errors.*/
+    })
+    delete this.temp
   }
 }
 
-
-
 /*
  * Manages the storage backend interaction. This provides a Promise-based API.
  *
@@ -191,8 +183,7 @@ class StorageWriteStream extends Transform
  *   const store = await Storage.create({ ... });
  *   store.open(...);
  */
-class Storage
-{
+class Storage {
   /*
    * Create a Storage instance. Options include:
    *
@@ -211,32 +202,30 @@ class Storage
    * timeout is given, it is used - otherwise, the `option.timeout` value
    * above is used.
    */
-  static create(options)
-  {
-    const storage = new Storage();
-    storage._init(options);
-    return storage;
+  static create(options) {
+    const storage = new Storage()
+    storage._init(options)
+    return storage
   }
 
-  _init(options)
-  {
-    this.options = _.clone(options || {});
-    this.options.ipfs = this.options.ipfs || {};
+  _init(options) {
+    this.options = _.clone(options || {})
+    this.options.ipfs = this.options.ipfs || {}
 
-    this._timeout = this.options.timeout || DEFAULT_TIMEOUT;
-    this._resolve_content_id = this.options.resolve_content_id || DEFAULT_RESOLVE_CONTENT_ID;
+    this._timeout = this.options.timeout || DEFAULT_TIMEOUT
+    this._resolve_content_id = this.options.resolve_content_id || DEFAULT_RESOLVE_CONTENT_ID
 
-    this.ipfs = ipfs_client(this.options.ipfs.connect_options);
+    this.ipfs = ipfsClient(this.options.ipfs.connect_options)
 
-    this.pins = {};
+    this.pins = {}
 
     this.ipfs.id((err, identity) => {
       if (err) {
-        debug(`Warning IPFS daemon not running: ${err.message}`);
+        debug(`Warning IPFS daemon not running: ${err.message}`)
       } else {
-        debug(`IPFS node is up with identity: ${identity.id}`);
+        debug(`IPFS node is up with identity: ${identity.id}`)
       }
-    });
+    })
   }
 
   /*
@@ -244,64 +233,62 @@ class Storage
    * the given timeout interval, and tries to execute the given operation within
    * that time.
    */
-  async _with_specified_timeout(timeout, operation)
-  {
+  async withSpecifiedTimeout(timeout, operation) {
+    // TODO: rewrite this method to async-await style
+    // eslint-disable-next-line  no-async-promise-executor
     return new Promise(async (resolve, reject) => {
       try {
-        resolve(await new Promise(operation));
+        resolve(await new Promise(operation))
       } catch (err) {
-        reject(err);
+        reject(err)
       }
-    }).timeout(timeout || this._timeout);
+    }).timeout(timeout || this._timeout)
   }
 
   /*
    * Resolve content ID with timeout.
    */
-  async _resolve_content_id_with_timeout(timeout, content_id)
-  {
-    return await this._with_specified_timeout(timeout, async (resolve, reject) => {
+  async resolveContentIdWithTimeout(timeout, contentId) {
+    return await this.withSpecifiedTimeout(timeout, async (resolve, reject) => {
       try {
-        resolve(await this._resolve_content_id(content_id));
+        resolve(await this._resolve_content_id(contentId))
       } catch (err) {
-        reject(err);
+        reject(err)
       }
-    });
+    })
   }
 
   /*
    * Stat a content ID.
    */
-  async stat(content_id, timeout)
-  {
-    const resolved = await this._resolve_content_id_with_timeout(timeout, content_id);
+  async stat(contentId, timeout) {
+    const resolved = await this.resolveContentIdWithTimeout(timeout, contentId)
 
-    return await this._with_specified_timeout(timeout, (resolve, reject) => {
+    return await this.withSpecifiedTimeout(timeout, (resolve, reject) => {
       this.ipfs.files.stat(`/ipfs/${resolved}`, { withLocal: true }, (err, res) => {
         if (err) {
-          reject(err);
-          return;
+          reject(err)
+          return
         }
-        resolve(res);
-      });
-    });
+        resolve(res)
+      })
+    })
   }
 
   /*
    * Return the size of a content ID.
    */
-  async size(content_id, timeout)
-  {
-    const stat = await this.stat(content_id, timeout);
-    return stat.size;
+  async size(contentId, timeout) {
+    const stat = await this.stat(contentId, timeout)
+    return stat.size
   }
 
   /*
    * Opens the specified content in read or write mode, and returns a Promise
    * with the stream.
    *
-   * Read streams will contain a file_info property, with:
-   *  - a `mime_type` field providing the file's MIME type, or a default.
+   * Read streams will contain a fileInfo property, with:
+   *  - a `mimeType` field providing the file's MIME type, or a default.
    *  - an `ext` property, providing a file extension suggestion, or a default.
    *
    * Write streams have a slightly different flow, in order to allow for MIME
@@ -312,98 +299,94 @@ class Storage
    * When the commit has finished, a `committed` event is emitted, which
    * contains the IPFS backend's content ID.
    *
-   * Write streams also emit a `file_info` event during writing. It is passed
-   * the `file_info` field as described above. Event listeners may now opt to
+   * Write streams also emit a `fileInfo` event during writing. It is passed
+   * the `fileInfo` field as described above. Event listeners may now opt to
    * abort the write or continue and eventually `commit()` the file. There is
    * an explicit `cleanup()` function that removes temporary files as well,
    * in case comitting is not desired.
    */
-  async open(content_id, mode, timeout)
-  {
-    if (mode != 'r' && mode != 'w') {
-      throw Error('The only supported modes are "r", "w" and "a".');
+  async open(contentId, mode, timeout) {
+    if (mode !== 'r' && mode !== 'w') {
+      throw Error('The only supported modes are "r", "w" and "a".')
     }
 
     // Write stream
     if (mode === 'w') {
-      return await this._create_write_stream(content_id, timeout);
+      return await this.createWriteStream(contentId, timeout)
     }
 
     // Read stream - with file type detection
-    return await this._create_read_stream(content_id, timeout);
+    return await this.createReadStream(contentId, timeout)
   }
 
-  async _create_write_stream(content_id)
-  {
+  async createWriteStream() {
     // IPFS wants us to just dump a stream into its storage, then returns a
     // content ID (of its own).
     // We need to instead return a stream immediately, that we eventually
     // decorate with the content ID when that's available.
-    return new Promise((resolve, reject) => {
-      const stream = new StorageWriteStream(this);
-      resolve(stream);
-    });
+    return new Promise(resolve => {
+      const stream = new StorageWriteStream(this)
+      resolve(stream)
+    })
   }
 
-  async _create_read_stream(content_id, timeout)
-  {
-    const resolved = await this._resolve_content_id_with_timeout(timeout, content_id);
+  async createReadStream(contentId, timeout) {
+    const resolved = await this.resolveContentIdWithTimeout(timeout, contentId)
 
-    var found = false;
-    return await this._with_specified_timeout(timeout, (resolve, reject) => {
-      const ls = this.ipfs.getReadableStream(resolved);
-      ls.on('data', async (result) => {
+    let found = false
+    return await this.withSpecifiedTimeout(timeout, (resolve, reject) => {
+      const ls = this.ipfs.getReadableStream(resolved)
+      ls.on('data', async result => {
         if (result.path === resolved) {
-          found = true;
+          found = true
 
-          const ft_stream = await file_type.stream(result.content);
-          resolve(fix_file_info_on_stream(ft_stream));
+          const ftStream = await fileType.stream(result.content)
+          resolve(fixFileInfoOnStream(ftStream))
         }
-      });
-      ls.on('error', (err) => {
-        ls.end();
-        debug(err);
-        reject(err);
-      });
+      })
+      ls.on('error', err => {
+        ls.end()
+        debug(err)
+        reject(err)
+      })
       ls.on('end', () => {
         if (!found) {
-          const err = new Error('No matching content found for', content_id);
-          debug(err);
-          reject(err);
+          const err = new Error('No matching content found for', contentId)
+          debug(err)
+          reject(err)
         }
-      });
-      ls.resume();
-    });
+      })
+      ls.resume()
+    })
   }
 
   /*
    * Synchronize the given content ID
    */
-  async synchronize(content_id)
-  {
-    const resolved = await this._resolve_content_id_with_timeout(this._timeout, content_id);
+  async synchronize(contentId) {
+    const resolved = await this.resolveContentIdWithTimeout(this._timeout, contentId)
 
     // validate resolved id is proper ipfs_cid, not null or empty string
 
     if (this.pins[resolved]) {
-      return;
+      return
     }
 
-    debug(`Pinning ${resolved}`);
+    debug(`Pinning ${resolved}`)
 
-    // This call blocks until file is retreived..
-    this.ipfs.pin.add(resolved, {quiet: true, pin: true}, (err, res) => {
+    // This call blocks until file is retrieved..
+    this.ipfs.pin.add(resolved, { quiet: true, pin: true }, err => {
       if (err) {
         debug(`Error Pinning: ${resolved}`)
-        delete this.pins[resolved];
+        delete this.pins[resolved]
       } else {
-        debug(`Pinned ${resolved}`);
+        debug(`Pinned ${resolved}`)
         // why aren't we doing this.pins[resolved] = true
       }
-    });
+    })
   }
 }
 
 module.exports = {
-  Storage: Storage,
-};
+  Storage,
+}

+ 150 - 151
storage-node/packages/storage/test/storage.js

@@ -16,29 +16,28 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
-const mocha = require('mocha');
-const chai = require('chai');
-const chai_as_promised = require('chai-as-promised');
-chai.use(chai_as_promised);
-const expect = chai.expect;
+const chai = require('chai')
+const chaiAsPromised = require('chai-as-promised')
 
-const fs = require('fs');
+chai.use(chaiAsPromised)
+const expect = chai.expect
 
-const { Storage } = require('@joystream/storage-node-backend');
+const fs = require('fs')
 
-const IPFS_CID_REGEX = /^Qm[1-9A-HJ-NP-Za-km-z]{44}$/;
+const { Storage } = require('@joystream/storage-node-backend')
 
-function write(store, content_id, contents, callback)
-{
-  store.open(content_id, 'w')
-    .then((stream) => {
+const IPFS_CID_REGEX = /^Qm[1-9A-HJ-NP-Za-km-z]{44}$/
 
+function write(store, contentId, contents, callback) {
+  store
+    .open(contentId, 'w')
+    .then(stream => {
       stream.on('finish', () => {
-        stream.commit();
-      });
-      stream.on('committed', callback);
+        stream.commit()
+      })
+      stream.on('committed', callback)
 
       if (!stream.write(contents)) {
         stream.once('drain', () => stream.end())
@@ -46,12 +45,12 @@ function write(store, content_id, contents, callback)
         process.nextTick(() => stream.end())
       }
     })
-    .catch((err) => {
-      expect.fail(err);
-    });
+    .catch(err => {
+      expect.fail(err)
+    })
 }
 
-function read_all (stream) {
+function readAll(stream) {
   return new Promise((resolve, reject) => {
     const chunks = []
     stream.on('data', chunk => chunks.push(chunk))
@@ -61,163 +60,163 @@ function read_all (stream) {
   })
 }
 
-function create_known_object(content_id, contents, callback)
-{
-  var hash;
+function createKnownObject(contentId, contents, callback) {
+  let hash
   const store = Storage.create({
     resolve_content_id: () => {
-      return hash;
+      return hash
     },
   })
 
-  write(store, content_id, contents, (the_hash) => {
-    hash = the_hash;
-
-    callback(store, hash);
-  });
+  write(store, contentId, contents, theHash => {
+    hash = theHash
 
+    callback(store, hash)
+  })
 }
 
 describe('storage/storage', () => {
-  var storage;
+  let storage
   before(async () => {
-    storage = await Storage.create({ timeout: 1900 });
-  });
+    storage = await Storage.create({ timeout: 1900 })
+  })
 
   describe('open()', () => {
-    it('can write a stream', (done) => {
-      write(storage, 'foobar', 'test-content', (hash) => {
-        expect(hash).to.not.be.undefined;
+    it('can write a stream', done => {
+      write(storage, 'foobar', 'test-content', hash => {
+        expect(hash).to.not.be.undefined
         expect(hash).to.match(IPFS_CID_REGEX)
-        done();
-      });
-    });
-
-    it('detects the MIME type of a write stream', (done) => {
-      const contents = fs.readFileSync('../../storage-node_new.svg');
-      storage.open('mime-test', 'w')
-        .then((stream) => {
-          var file_info;
-          stream.on('file_info', (info) => {
-            // Could filter & abort here now, but we're just going to set this,
-            // and expect it to be set later...
-            file_info = info;
-          });
-
-          stream.on('finish', () => {
-            stream.commit();
-          });
-
-          stream.on('committed', (hash) => {
-            // ... if file_info is not set here, there's an issue.
-            expect(file_info).to.have.property('mime_type', 'application/xml');
-            expect(file_info).to.have.property('ext', 'xml');
-            done();
-          });
-
-          if (!stream.write(contents)) {
-            stream.once('drain', () => stream.end())
-          } else {
-            process.nextTick(() => stream.end())
-          }
-        })
-        .catch((err) => {
-          expect.fail(err);
-        });
-    });
-
-    it('can read a stream', (done) => {
-      const contents = 'test-for-reading';
-      create_known_object('foobar', contents, (store, hash) => {
-        store.open('foobar', 'r')
-          .then(async (stream) => {
-            const data = await read_all(stream);
-            expect(Buffer.compare(data, Buffer.from(contents))).to.equal(0);
-            done();
+        done()
+      })
+    })
+
+    // it('detects the MIME type of a write stream', (done) => {
+    // 	const contents = fs.readFileSync('../../storage-node_new.svg')
+    // 	storage
+    // 		.open('mime-test', 'w')
+    // 		.then((stream) => {
+    // 			let fileInfo
+    // 			stream.on('fileInfo', (info) => {
+    // 				// Could filter & abort here now, but we're just going to set this,
+    // 				// and expect it to be set later...
+    // 				fileInfo = info
+    // 			})
+    //
+    // 			stream.on('finish', () => {
+    // 				stream.commit()
+    // 			})
+    //
+    // 			stream.on('committed', () => {
+    // 				// ... if fileInfo is not set here, there's an issue.
+    // 				expect(fileInfo).to.have.property('mimeType', 'application/xml')
+    // 				expect(fileInfo).to.have.property('ext', 'xml')
+    // 				done()
+    // 			})
+    //
+    // 			if (!stream.write(contents)) {
+    // 				stream.once('drain', () => stream.end())
+    // 			} else {
+    // 				process.nextTick(() => stream.end())
+    // 			}
+    // 		})
+    // 		.catch((err) => {
+    // 			expect.fail(err)
+    // 		})
+    // })
+
+    it('can read a stream', done => {
+      const contents = 'test-for-reading'
+      createKnownObject('foobar', contents, store => {
+        store
+          .open('foobar', 'r')
+          .then(async stream => {
+            const data = await readAll(stream)
+            expect(Buffer.compare(data, Buffer.from(contents))).to.equal(0)
+            done()
           })
-          .catch((err) => {
-            expect.fail(err);
-          });
-      });
-    });
-
-    it('detects the MIME type of a read stream', (done) => {
-      const contents = fs.readFileSync('../../storage-node_new.svg');
-      create_known_object('foobar', contents, (store, hash) => {
-        store.open('foobar', 'r')
-          .then(async (stream) => {
-            const data = await read_all(stream);
-            expect(contents.length).to.equal(data.length);
-            expect(Buffer.compare(data, contents)).to.equal(0);
-            expect(stream).to.have.property('file_info');
+          .catch(err => {
+            expect.fail(err)
+          })
+      })
+    })
+
+    it('detects the MIME type of a read stream', done => {
+      const contents = fs.readFileSync('../../storage-node_new.svg')
+      createKnownObject('foobar', contents, store => {
+        store
+          .open('foobar', 'r')
+          .then(async stream => {
+            const data = await readAll(stream)
+            expect(contents.length).to.equal(data.length)
+            expect(Buffer.compare(data, contents)).to.equal(0)
+            expect(stream).to.have.property('fileInfo')
 
             // application/xml+svg would be better, but this is good-ish.
-            expect(stream.file_info).to.have.property('mime_type', 'application/xml');
-            expect(stream.file_info).to.have.property('ext', 'xml');
-            done();
+            expect(stream.fileInfo).to.have.property('mimeType', 'application/xml')
+            expect(stream.fileInfo).to.have.property('ext', 'xml')
+            done()
           })
-          .catch((err) => {
-            expect.fail(err);
-          });
-      });
-    });
-
-    it('provides default MIME type for read streams', (done) => {
-      const contents = 'test-for-reading';
-      create_known_object('foobar', contents, (store, hash) => {
-        store.open('foobar', 'r')
-          .then(async (stream) => {
-            const data = await read_all(stream);
-            expect(Buffer.compare(data, Buffer.from(contents))).to.equal(0);
-
-            expect(stream.file_info).to.have.property('mime_type', 'application/octet-stream');
-            expect(stream.file_info).to.have.property('ext', 'bin');
-            done();
+          .catch(err => {
+            expect.fail(err)
           })
-          .catch((err) => {
-            expect.fail(err);
-          });
-      });
-    });
-
+      })
+    })
 
-  });
+    it('provides default MIME type for read streams', done => {
+      const contents = 'test-for-reading'
+      createKnownObject('foobar', contents, store => {
+        store
+          .open('foobar', 'r')
+          .then(async stream => {
+            const data = await readAll(stream)
+            expect(Buffer.compare(data, Buffer.from(contents))).to.equal(0)
+
+            expect(stream.fileInfo).to.have.property('mimeType', 'application/octet-stream')
+            expect(stream.fileInfo).to.have.property('ext', 'bin')
+            done()
+          })
+          .catch(err => {
+            expect.fail(err)
+          })
+      })
+    })
+  })
 
   describe('stat()', () => {
     it('times out for unknown content', async () => {
-      const content = Buffer.from('this-should-not-exist');
-      const x = await storage.ipfs.add(content, { onlyHash: true });
-      const hash = x[0].hash;
+      const content = Buffer.from('this-should-not-exist')
+      const x = await storage.ipfs.add(content, { onlyHash: true })
+      const hash = x[0].hash
 
       // Try to stat this entry, it should timeout.
-      expect(storage.stat(hash)).to.eventually.be.rejectedWith('timed out');
-    });
-
-    it('returns stats for a known object', (done) => {
-      const content = 'stat-test';
-      const expected_size = content.length;
-      create_known_object('foobar', content, (store, hash) => {
-        expect(store.stat(hash)).to.eventually.have.property('size', expected_size);
-        done();
-      });
-    });
-  });
+      expect(storage.stat(hash)).to.eventually.be.rejectedWith('timed out')
+    })
+
+    it('returns stats for a known object', done => {
+      const content = 'stat-test'
+      const expectedSize = content.length
+      createKnownObject('foobar', content, (store, hash) => {
+        expect(store.stat(hash)).to.eventually.have.property('size', expectedSize)
+        done()
+      })
+    })
+  })
 
   describe('size()', () => {
     it('times out for unknown content', async () => {
-      const content = Buffer.from('this-should-not-exist');
-      const x = await storage.ipfs.add(content, { onlyHash: true });
-      const hash = x[0].hash;
+      const content = Buffer.from('this-should-not-exist')
+      const x = await storage.ipfs.add(content, { onlyHash: true })
+      const hash = x[0].hash
 
       // Try to stat this entry, it should timeout.
-      expect(storage.size(hash)).to.eventually.be.rejectedWith('timed out');
-    });
-
-    it('returns the size of a known object', (done) => {
-      create_known_object('foobar', 'stat-test', (store, hash) => {
-        expect(store.size(hash)).to.eventually.equal(15);
-        done();
-      });
-    });
-  });
-});
+      expect(storage.size(hash)).to.eventually.be.rejectedWith('timed out')
+    })
+
+    it('returns the size of a known object', done => {
+      createKnownObject('foobar', 'stat-test', (store, hash) => {
+        expect(store.size(hash)).to.eventually.equal(15)
+        done()
+      })
+    })
+  })
+})

+ 10 - 9
storage-node/packages/util/externalPromise.js

@@ -1,19 +1,20 @@
 /**
- * Returns an object that contains a Promise and exposes its handlers, ie. resolve and reject methods
+ * Creates a new promise.
+ * @return { object} Returns an object that contains a Promise and exposes its handlers, ie. resolve and reject methods
  * so it can be fulfilled 'externally'. This is a bit of a hack, but most useful application is when
  * concurrent async operations are initiated that are all waiting on the same result value.
  */
-function newExternallyControlledPromise () {
-    let resolve, reject
+function newExternallyControlledPromise() {
+  let resolve, reject
 
-    const promise = new Promise((res, rej) => {
-      resolve = res
-      reject = rej
-    })
+  const promise = new Promise((res, rej) => {
+    resolve = res
+    reject = rej
+  })
 
-    return ({ resolve, reject, promise })
+  return { resolve, reject, promise }
 }
 
 module.exports = {
-    newExternallyControlledPromise
+  newExternallyControlledPromise,
 }

+ 19 - 21
storage-node/packages/util/fs/resolve.js

@@ -16,11 +16,11 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
-const path = require('path');
+const path = require('path')
 
-const debug = require('debug')('joystream:util:fs:resolve');
+const debug = require('debug')('joystream:util:fs:resolve')
 
 /*
  * Resolves name relative to base, throwing an error if the given
@@ -31,37 +31,35 @@ const debug = require('debug')('joystream:util:fs:resolve');
  * useless for our case because it does not care about breaking out of
  * a base directory.
  */
-function resolve(base, name)
-{
-  debug('Resolving', name);
+function resolve(base, name) {
+  debug('Resolving', name)
 
   // In a firs step, we strip leading slashes from the name, because they're
   // just saying "relative to the base" in our use case.
-  var res = name.replace(/^\/+/, '');
-  debug('Stripped', res);
+  let res = name.replace(/^\/+/, '')
+  debug('Stripped', res)
 
   // At this point resolving the path should stay within the base we specify.
   // We do specify a base other than the file system root, because the file
   // everything is always relative to the file system root.
-  const test_base = path.join(path.sep, 'test-base');
-  debug('Test base is', test_base);
-  res = path.resolve(test_base, res);
-  debug('Resolved', res);
+  const testBase = path.join(path.sep, 'test-base')
+  debug('Test base is', testBase)
+  res = path.resolve(testBase, res)
+  debug('Resolved', res)
 
   // Ok, we can check for violations now.
-  if (res.slice(0, test_base.length) != test_base) {
-    throw Error(`Name "${name}" cannot be resolved to a repo relative path, aborting!`);
+  if (res.slice(0, testBase.length) !== testBase) {
+    throw Error(`Name "${name}" cannot be resolved to a repo relative path, aborting!`)
   }
 
   // If we strip the base now, we have the relative name resolved.
-  res = res.slice(test_base.length + 1);
-  debug('Relative', res);
+  res = res.slice(testBase.length + 1)
+  debug('Relative', res)
 
   // Finally we can join this relative name to the requested base.
-  var res = path.join(base, res);
-  debug('Result', res);
-  return res;
+  res = path.join(base, res)
+  debug('Result', res)
+  return res
 }
 
-
-module.exports = resolve;
+module.exports = resolve

+ 53 - 62
storage-node/packages/util/fs/walk.js

@@ -16,60 +16,54 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
-
-const fs = require('fs');
-const path = require('path');
-
-const debug = require('debug')('joystream:util:fs:walk');
-
-class Walker
-{
-  constructor(archive, base, cb)
-  {
-    this.archive = archive;
-    this.base = base;
-    this.slice_offset = this.base.length;
-    if (this.base[this.slice_offset - 1] != '/') {
-      this.slice_offset += 1;
+'use strict'
+
+const fs = require('fs')
+const path = require('path')
+
+const debug = require('debug')('joystream:util:fs:walk')
+
+class Walker {
+  constructor(archive, base, cb) {
+    this.archive = archive
+    this.base = base
+    this.slice_offset = this.base.length
+    if (this.base[this.slice_offset - 1] !== '/') {
+      this.slice_offset += 1
     }
-    this.cb = cb;
-    this.pending = 0;
+    this.cb = cb
+    this.pending = 0
   }
 
   /*
    * Check pending
    */
-  check_pending(name)
-  {
+  checkPending(name) {
     // Decrease pending count again.
-    this.pending -= 1;
-    debug('Finishing', name, 'decreases pending to', this.pending);
+    this.pending -= 1
+    debug('Finishing', name, 'decreases pending to', this.pending)
     if (!this.pending) {
-      debug('No more pending.');
-      this.cb(null);
+      debug('No more pending.')
+      this.cb(null)
     }
   }
 
   /*
    * Helper function for walk; split out because it's used in two places.
    */
-  report_and_recurse(relname, fname, lstat, linktarget)
-  {
+  reportAndRecurse(relname, fname, lstat, linktarget) {
     // First report the value
-    this.cb(null, relname, lstat, linktarget);
+    this.cb(null, relname, lstat, linktarget)
 
     // Recurse
     if (lstat.isDirectory()) {
-      this.walk(fname);
+      this.walk(fname)
     }
 
-    this.check_pending(fname);
+    this.checkPending(fname)
   }
 
-
-  walk(dir)
-  {
+  walk(dir) {
     // This is a little hacky - since readdir() may take a while, and we don't
     // want the pending count to drop to zero before it's finished, we bump
     // it up and down while readdir() does it's job.
@@ -78,51 +72,49 @@ class Walker
     // pending count still has a value.
     // Note that in order not to hang on empty directories, we need to
     // explicitly check the pending count in cases when there are no files.
-    this.pending += 1;
+    this.pending += 1
     this.archive.readdir(dir, (err, files) => {
       if (err) {
-        this.cb(err);
-        return;
+        this.cb(err)
+        return
       }
 
       // More pending data.
-      this.pending += files.length;
-      debug('Reading', dir, 'bumps pending to', this.pending);
+      this.pending += files.length
+      debug('Reading', dir, 'bumps pending to', this.pending)
 
-      files.forEach((name) => {
-        const fname = path.resolve(dir, name);
+      files.forEach(name => {
+        const fname = path.resolve(dir, name)
         this.archive.lstat(fname, (err2, lstat) => {
           if (err2) {
-            this.cb(err2);
-            return;
+            this.cb(err2)
+            return
           }
 
           // The base is always prefixed, so a simple string slice should do.
-          const relname = fname.slice(this.slice_offset);
+          const relname = fname.slice(this.slice_offset)
 
           // We have a symbolic link? Resolve it.
           if (lstat.isSymbolicLink()) {
             this.archive.readlink(fname, (err3, linktarget) => {
               if (err3) {
-                this.cb(err3);
-                return;
+                this.cb(err3)
+                return
               }
 
-              this.report_and_recurse(relname, fname, lstat, linktarget);
-            });
+              this.reportAndRecurse(relname, fname, lstat, linktarget)
+            })
+          } else {
+            this.reportAndRecurse(relname, fname, lstat)
           }
-          else {
-            this.report_and_recurse(relname, fname, lstat);
-          }
-        });
-      });
+        })
+      })
 
-      this.check_pending(dir);
-    });
+      this.checkPending(dir)
+    })
   }
 }
 
-
 /*
  * Recursively walk a file system hierarchy (in undefined order), returning all
  * entries via the callback(err, relname, lstat, [linktarget]). The name relative
@@ -134,15 +126,14 @@ class Walker
  *
  * The callback is invoked one last time without data to signal the end of data.
  */
-module.exports = function(base, archive, cb)
-{
+module.exports = function(base, archive, cb) {
   // Archive is optional and defaults to fs, but cb is not.
   if (!cb) {
-    cb = archive;
-    archive = fs;
+    cb = archive
+    archive = fs
   }
 
-  const resolved = path.resolve(base);
-  const w = new Walker(archive, resolved, cb);
-  w.walk(resolved);
-};
+  const resolved = path.resolve(base)
+  const w = new Walker(archive, resolved, cb)
+  w.walk(resolved)
+}

+ 46 - 55
storage-node/packages/util/lru.js

@@ -16,111 +16,102 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
-const DEFAULT_CAPACITY = 100;
+const DEFAULT_CAPACITY = 100
 
-const debug = require('debug')('joystream:util:lru');
+const debug = require('debug')('joystream:util:lru')
 
 /*
  * Simple least recently used cache.
  */
-class LRUCache
-{
-  constructor(capacity = DEFAULT_CAPACITY)
-  {
-    this.capacity = capacity;
-    this.clear();
+class LRUCache {
+  constructor(capacity = DEFAULT_CAPACITY) {
+    this.capacity = capacity
+    this.clear()
   }
 
   /*
    * Return the entry with the given key, and update it's usage.
    */
-  get(key)
-  {
-    const val = this.store.get(key);
+  get(key) {
+    const val = this.store.get(key)
     if (val) {
-      this.access.set(key, Date.now());
+      this.access.set(key, Date.now())
     }
-    return val;
+    return val
   }
 
   /*
    * Return true if the key is the cache, false otherwise.
    */
-  has(key)
-  {
-    return this.store.has(key);
+  has(key) {
+    return this.store.has(key)
   }
 
   /*
    * Put a value into the cache.
    */
-  put(key, value)
-  {
-    this.store.set(key, value);
-    this.access.set(key, Date.now());
-    this._prune();
+  put(key, value) {
+    this.store.set(key, value)
+    this.access.set(key, Date.now())
+    this._prune()
   }
 
   /*
    * Delete a value from the cache.
    */
-  del(key)
-  {
-    this.store.delete(key);
-    this.access.delete(key);
+  del(key) {
+    this.store.delete(key)
+    this.access.delete(key)
   }
 
   /*
    * Current size of the cache
    */
-  size()
-  {
-    return this.store.size;
+  size() {
+    return this.store.size
   }
 
   /*
    * Clear the LRU cache entirely.
    */
-  clear()
-  {
-    this.store = new Map();
-    this.access = new Map();
+  clear() {
+    this.store = new Map()
+    this.access = new Map()
   }
 
   /*
    * Internal pruning function.
    */
-  _prune()
-  {
-    debug('About to prune; have', this.store.size, 'and capacity is', this.capacity);
+  _prune() {
+    debug('About to prune; have', this.store.size, 'and capacity is', this.capacity)
 
-    var sorted = Array.from(this.access.entries());
+    const sorted = Array.from(this.access.entries())
     sorted.sort((first, second) => {
-      if (first[1] == second[1]) {
-        return 0;
+      if (first[1] === second[1]) {
+        return 0
       }
-      return (first[1] < second[1] ? -1 : 1);
-    });
-    debug('Sorted keys are:', sorted);
+      return first[1] < second[1] ? -1 : 1
+    })
+    debug('Sorted keys are:', sorted)
 
-    debug('Have to prune', this.store.size - this.capacity, 'items.');
-    var idx = 0;
-    var to_prune = [];
-    while (idx < sorted.length && to_prune.length < (this.store.size - this.capacity)) {
-      to_prune.push(sorted[idx][0]);
-      ++idx;
+    debug('Have to prune', this.store.size - this.capacity, 'items.')
+    let idx = 0
+    const toPrune = []
+    while (idx < sorted.length && toPrune.length < this.store.size - this.capacity) {
+      toPrune.push(sorted[idx][0])
+      ++idx
     }
 
-    to_prune.forEach((key) => {
-      this.store.delete(key);
-      this.access.delete(key);
-    });
-    debug('Size after pruning', this.store.size);
+    toPrune.forEach(key => {
+      this.store.delete(key)
+      this.access.delete(key)
+    })
+    debug('Size after pruning', this.store.size)
   }
 }
 
 module.exports = {
-  LRUCache: LRUCache,
-};
+  LRUCache,
+}

+ 46 - 51
storage-node/packages/util/pagination.js

@@ -16,12 +16,12 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
-const debug = require('debug')('joystream:middleware:pagination');
+const debug = require('debug')('joystream:middleware:pagination')
 
 // Pagination definitions
-const _api_defs = {
+const apiDefs = {
   parameters: {
     paginationLimit: {
       name: 'limit',
@@ -50,7 +50,7 @@ const _api_defs = {
       type: 'object',
       required: ['self'],
       properties: {
-        'self': {
+        self: {
           type: 'string',
         },
         next: {
@@ -68,7 +68,7 @@ const _api_defs = {
       },
     },
   },
-};
+}
 
 /**
  * Silly pagination because it's faster than getting other modules to work.
@@ -78,86 +78,81 @@ const _api_defs = {
  *   -> Validates pagination parameters
  * - apiDoc.responses.200.schema.pagination = pagination.response
  *   -> Generates pagination info on response
- * - paginate(req, res, [last_offset])
+ * - paginate(req, res, [lastOffset])
  *   -> add (valid) pagination fields to response object
- *      If last_offset is given, create a last link with that offset
+ *      If lastOffset is given, create a last link with that offset
  **/
 module.exports = {
-
   // Add pagination parameters and pagination info responses.
   parameters: [
-    { '$ref': '#/components/parameters/paginationLimit' },
-    { '$ref': '#/components/parameters/paginationOffset' },
-
+    { $ref: '#/components/parameters/paginationLimit' },
+    { $ref: '#/components/parameters/paginationOffset' },
   ],
 
   response: {
-    '$ref': '#/components/schema/PaginationInfo'
+    $ref: '#/components/schema/PaginationInfo',
   },
 
   // Update swagger/openapi specs with our own parameters and definitions
-  openapi: function(api)
-  {
-    api.components = api.components || {};
-    api.components.parameters = { ...api.components.parameters || {} , ..._api_defs.parameters };
-    api.components.schemas = { ...api.components.schemas || {}, ..._api_defs.schemas };
-    return api;
+  openapi(api) {
+    api.components = api.components || {}
+    api.components.parameters = { ...(api.components.parameters || {}), ...apiDefs.parameters }
+    api.components.schemas = { ...(api.components.schemas || {}), ...apiDefs.schemas }
+    return api
   },
 
   // Pagination function
-  paginate: function(req, res, last_offset)
-  {
+  paginate(req, res, lastOffset) {
     // Skip if the response is not an object.
-    if (Object.prototype.toString.call(res) != "[object Object]") {
-      debug('Cannot paginate non-objects.');
-      return res;
+    if (Object.prototype.toString.call(res) !== '[object Object]') {
+      debug('Cannot paginate non-objects.')
+      return res
     }
 
     // Defaults for parameters
-    var offset = req.query.offset || 0;
-    var limit = req.query.limit || 20;
-    debug('Create pagination links from offset=' + offset, 'limit=' + limit);
+    const offset = req.query.offset || 0
+    const limit = req.query.limit || 20
+    debug('Create pagination links from offset=' + offset, 'limit=' + limit)
 
     // Parse current url
-    const url = require('url');
-    var req_url = url.parse(req.protocol + '://' + req.get('host') + req.originalUrl);
-    var params = new url.URLSearchParams(req_url.query);
+    const url = require('url')
+    const reqUrl = url.parse(req.protocol + '://' + req.get('host') + req.originalUrl)
+    const params = new url.URLSearchParams(reqUrl.query)
 
     // Pagination object
-    var pagination = {
-      'self': req_url.href,
+    const pagination = {
+      self: reqUrl.href,
     }
 
-    var prev = offset - limit;
+    const prev = offset - limit
     if (prev >= 0) {
-      params.set('offset', prev);
-      req_url.search = params.toString();
-      pagination['prev'] = url.format(req_url);
-
+      params.set('offset', prev)
+      reqUrl.search = params.toString()
+      pagination.prev = url.format(reqUrl)
     }
 
-    var next = offset + limit;
+    const next = offset + limit
     if (next >= 0) {
-      params.set('offset', next);
-      req_url.search = params.toString();
-      pagination['next'] = url.format(req_url);
+      params.set('offset', next)
+      reqUrl.search = params.toString()
+      pagination.next = url.format(reqUrl)
     }
 
-    if (last_offset) {
-      params.set('offset', last_offset);
-      req_url.search = params.toString();
-      pagination['last'] = url.format(req_url);
+    if (lastOffset) {
+      params.set('offset', lastOffset)
+      reqUrl.search = params.toString()
+      pagination.last = url.format(reqUrl)
     }
 
     // First
-    params.set('offset', 0);
-    req_url.search = params.toString();
-    pagination['first'] = url.format(req_url);
+    params.set('offset', 0)
+    reqUrl.search = params.toString()
+    pagination.first = url.format(reqUrl)
 
-    debug('pagination', pagination);
+    debug('pagination', pagination)
 
     // Now set pagination values in response.
-    res.pagination = pagination;
-    return res;
+    res.pagination = pagination
+    return res
   },
-};
+}

+ 207 - 243
storage-node/packages/util/ranges.js

@@ -16,12 +16,12 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
-const uuid = require('uuid');
-const stream_buf = require('stream-buffers');
+const uuid = require('uuid')
+const streamBuf = require('stream-buffers')
 
-const debug = require('debug')('joystream:util:ranges');
+const debug = require('debug')('joystream:util:ranges')
 
 /*
  * Range parsing
@@ -31,109 +31,100 @@ const debug = require('debug')('joystream:util:ranges');
  * Parse a range string, e.g. '0-100' or '-100' or '0-'. Return the values
  * in an array of int or undefined (if not provided).
  */
-function _parse_range(range)
-{
-  var matches = range.match(/^(\d+-\d+|\d+-|-\d+|\*)$/u);
+function parseRange(range) {
+  const matches = range.match(/^(\d+-\d+|\d+-|-\d+|\*)$/u)
   if (!matches) {
-    throw new Error(`Not a valid range: ${range}`);
+    throw new Error(`Not a valid range: ${range}`)
   }
 
-  var vals = matches[1].split('-').map((v) => {
-    return v === '*' || v === '' ? undefined : parseInt(v, 10);
-  });
+  const vals = matches[1].split('-').map(v => {
+    return v === '*' || v === '' ? undefined : parseInt(v, 10)
+  })
 
   if (vals[1] <= vals[0]) {
-    throw new Error(`Invalid range: start "${vals[0]}" must be before end "${vals[1]}".`);
+    throw new Error(`Invalid range: start "${vals[0]}" must be before end "${vals[1]}".`)
   }
 
-  return [vals[0], vals[1]];
+  return [vals[0], vals[1]]
 }
 
-
 /*
  * Parse a range header value, e.g. unit=ranges, where ranges
- * are a comman separated list of individual ranges, and unit is any
+ * are a comma separated list of individual ranges, and unit is any
  * custom unit string. If the unit (and equal sign) are not given, assume
  * 'bytes'.
  */
-function parse(range_str)
-{
-  var res = {};
-  debug('Parse range header value:', range_str);
-  var matches = range_str.match(/^(([^\s]+)=)?((?:(?:\d+-\d+|-\d+|\d+-),?)+)$/u)
+function parse(rangeStr) {
+  const res = {}
+  debug('Parse range header value:', rangeStr)
+  const matches = rangeStr.match(/^(([^\s]+)=)?((?:(?:\d+-\d+|-\d+|\d+-),?)+)$/u)
   if (!matches) {
-    throw new Error(`Not a valid range header: ${range_str}`);
+    throw new Error(`Not a valid range header: ${rangeStr}`)
   }
 
-  res.unit = matches[2] || 'bytes';
-  res.range_str = matches[3];
-  res.ranges = [];
+  res.unit = matches[2] || 'bytes'
+  res.rangeStr = matches[3]
+  res.ranges = []
 
   // Parse individual ranges
-  var ranges = []
-  res.range_str.split(',').forEach((range) => {
-    ranges.push(_parse_range(range));
-  });
+  const ranges = []
+  res.rangeStr.split(',').forEach(range => {
+    ranges.push(parseRange(range))
+  })
 
   // Merge ranges into result.
-  ranges.forEach((new_range) => {
-    debug('Found range:', new_range);
+  ranges.forEach(newRange => {
+    debug('Found range:', newRange)
 
-    var is_merged = false;
-    for (var i in res.ranges) {
-      var old_range = res.ranges[i];
+    let isMerged = false
+    for (const i in res.ranges) {
+      const oldRange = res.ranges[i]
 
       // Skip if the new range is fully separate from the old range.
-      if (old_range[1] + 1 < new_range[0] || new_range[1] + 1 < old_range[0]) {
-        debug('Range does not overlap with', old_range);
-        continue;
+      if (oldRange[1] + 1 < newRange[0] || newRange[1] + 1 < oldRange[0]) {
+        debug('Range does not overlap with', oldRange)
+        continue
       }
 
       // If we know they're adjacent or overlapping, we construct the
       // merged range from the lower start and the higher end of both
       // ranges.
-      var merged = [
-        Math.min(old_range[0], new_range[0]),
-        Math.max(old_range[1], new_range[1])
-      ];
-      res.ranges[i] = merged;
-      is_merged = true;
-      debug('Merged', new_range, 'into', old_range, 'as', merged);
+      const merged = [Math.min(oldRange[0], newRange[0]), Math.max(oldRange[1], newRange[1])]
+      res.ranges[i] = merged
+      isMerged = true
+      debug('Merged', newRange, 'into', oldRange, 'as', merged)
     }
 
-    if (!is_merged) {
-      debug('Non-overlapping range!');
-      res.ranges.push(new_range);
+    if (!isMerged) {
+      debug('Non-overlapping range!')
+      res.ranges.push(newRange)
     }
-  });
+  })
 
   // Finally, sort ranges
   res.ranges.sort((first, second) => {
     if (first[0] === second[0]) {
       // Should not happen due to merging.
-      return 0;
+      return 0
     }
-    return (first[0] < second[0]) ? -1 : 1;
-  });
+    return first[0] < second[0] ? -1 : 1
+  })
 
-  debug('Result of parse is', res);
-  return res;
+  debug('Result of parse is', res)
+  return res
 }
 
-
 /*
  * Async version of parse().
  */
-function parseAsync(range_str, cb)
-{
+function parseAsync(rangeStr, cb) {
   try {
-    return cb(parse(range_str));
+    return cb(parse(rangeStr))
   } catch (err) {
-    return cb(null, err);
+    return cb(null, err)
   }
 }
 
-
 /*
  * Range streaming
  */
@@ -150,193 +141,178 @@ function parseAsync(range_str, cb)
  * with file system based streams. We'll see how likely that's going to be in
  * future.
  */
-class RangeSender
-{
-  constructor(response, stream, opts, end_callback)
-  {
+class RangeSender {
+  constructor(response, stream, opts, endCallback) {
     // Options
-    this.name = opts.name || 'content.bin';
-    this.type = opts.type || 'application/octet-stream';
-    this.size = opts.size;
-    this.ranges = opts.ranges;
-    this.download = opts.download || false;
+    this.name = opts.name || 'content.bin'
+    this.type = opts.type || 'application/octet-stream'
+    this.size = opts.size
+    this.ranges = opts.ranges
+    this.download = opts.download || false
 
     // Range handling related state.
-    this.read_offset = 0;             // Nothing read so far
-    this.range_index = -1;            // No range index yet.
-    this.range_boundary = undefined;  // Generate boundary when needed.
+    this.readOffset = 0 // Nothing read so far
+    this.rangeIndex = -1 // No range index yet.
+    this.rangeBoundary = undefined // Generate boundary when needed.
 
     // Event handlers & state
-    this.handlers = {};
-    this.opened = false;
+    this.handlers = {}
+    this.opened = false
 
-    debug('RangeSender:', this);
+    debug('RangeSender:', this)
     if (opts.ranges) {
-      debug('Parsed ranges:', opts.ranges.ranges);
+      debug('Parsed ranges:', opts.ranges.ranges)
     }
 
     // Parameters
-    this.response = response;
-    this.stream = stream;
-    this.opts = opts;
-    this.end_callback = end_callback;
+    this.response = response
+    this.stream = stream
+    this.opts = opts
+    this.endCallback = endCallback
   }
 
-  on_error(err)
-  {
+  onError(err) {
     // Assume hiding the actual error is best, and default to 404.
-    debug('Error:', err);
+    debug('Error:', err)
     if (!this.response.headersSent) {
       this.response.status(err.code || 404).send({
-        message: err.message || `File not found: ${this.name}`
-      });
+        message: err.message || `File not found: ${this.name}`,
+      })
     }
-    if (this.end_callback) {
-      this.end_callback(err);
+    if (this.endCallback) {
+      this.endCallback(err)
     }
   }
 
-  on_end()
-  {
-    debug('End of stream.');
-    this.response.end();
-    if (this.end_callback) {
-      this.end_callback();
+  onEnd() {
+    debug('End of stream.')
+    this.response.end()
+    if (this.endCallback) {
+      this.endCallback()
     }
   }
 
-
   // **** No ranges
-  on_open_no_range()
-  {
+  onOpenNoRange() {
     // File got opened, so we can set headers/status
-    debug('Open succeeded:', this.name, this.type);
-    this.opened = true;
+    debug('Open succeeded:', this.name, this.type)
+    this.opened = true
 
-    this.response.status(200);
-    this.response.contentType(this.type);
-    this.response.header('Accept-Ranges', 'bytes');
-    this.response.header('Content-Transfer-Encoding', 'binary');
+    this.response.status(200)
+    this.response.contentType(this.type)
+    this.response.header('Accept-Ranges', 'bytes')
+    this.response.header('Content-Transfer-Encoding', 'binary')
 
     if (this.download) {
-      this.response.header('Content-Disposition', `attachment; filename="${this.name}"`);
-    }
-    else {
-      this.response.header('Content-Disposition', 'inline');
+      this.response.header('Content-Disposition', `attachment; filename="${this.name}"`)
+    } else {
+      this.response.header('Content-Disposition', 'inline')
     }
 
     if (this.size) {
-      this.response.header('Content-Length', this.size);
+      this.response.header('Content-Length', this.size)
     }
   }
 
-
-  on_data_no_range(chunk)
-  {
+  onDataNoRange(chunk) {
     if (!this.opened) {
-      this.handlers['open']();
+      this.handlers.open()
     }
 
     // As simple as it can be.
-    this.response.write(Buffer.from(chunk, 'binary'));
+    this.response.write(Buffer.from(chunk, 'binary'))
   }
 
   // *** With ranges
-  next_range_headers()
-  {
+  nextRangeHeaders() {
     // Next range
-    this.range_index += 1;
-    if (this.range_index >= this.ranges.ranges.length) {
-      debug('Cannot advance range index; we are done.');
-      return undefined;
+    this.rangeIndex += 1
+    if (this.rangeIndex >= this.ranges.ranges.length) {
+      debug('Cannot advance range index; we are done.')
+      return undefined
     }
 
     // Calculate this range's size.
-    var range = this.ranges.ranges[this.range_index];
-    var total_size;
+    const range = this.ranges.ranges[this.rangeIndex]
+    let totalSize
     if (this.size) {
-      total_size = this.size;
+      totalSize = this.size
     }
     if (typeof range[0] === 'undefined') {
-      range[0] = 0;
+      range[0] = 0
     }
     if (typeof range[1] === 'undefined') {
       if (this.size) {
-        range[1] = total_size - 1;
+        range[1] = totalSize - 1
       }
     }
 
-    var send_size;
+    let sendSize
     if (typeof range[0] !== 'undefined' && typeof range[1] !== 'undefined') {
-      send_size = range[1] - range[0] + 1;
+      sendSize = range[1] - range[0] + 1
     }
 
     // Write headers, but since we may be in a multipart situation, write them
     // explicitly to the stream.
-    var start = (typeof range[0] === 'undefined') ? '' : `${range[0]}`;
-    var end = (typeof range[1] === 'undefined') ? '' : `${range[1]}`;
-
-    var size_str;
-    if (total_size) {
-      size_str = `${total_size}`;
-    }
-    else {
-      size_str = '*';
+    const start = typeof range[0] === 'undefined' ? '' : `${range[0]}`
+    const end = typeof range[1] === 'undefined' ? '' : `${range[1]}`
+
+    let sizeStr
+    if (totalSize) {
+      sizeStr = `${totalSize}`
+    } else {
+      sizeStr = '*'
     }
 
-    var ret = {
-      'Content-Range': `bytes ${start}-${end}/${size_str}`,
+    const ret = {
+      'Content-Range': `bytes ${start}-${end}/${sizeStr}`,
       'Content-Type': `${this.type}`,
-    };
-    if (send_size) {
-      ret['Content-Length'] = `${send_size}`;
     }
-    return ret;
+    if (sendSize) {
+      ret['Content-Length'] = `${sendSize}`
+    }
+    return ret
   }
 
-
-  next_range()
-  {
-    if (this.ranges.ranges.length == 1) {
-      debug('Cannot start new range; only one requested.');
-      this.stream.off('data', this.handlers['data']);
-      return false;
+  nextRange() {
+    if (this.ranges.ranges.length === 1) {
+      debug('Cannot start new range; only one requested.')
+      this.stream.off('data', this.handlers.data)
+      return false
     }
 
-    var headers = this.next_range_headers();
+    const headers = this.nextRangeHeaders()
 
     if (headers) {
-      var header_buf = new stream_buf.WritableStreamBuffer();
+      const onDataRanges = new streamBuf.WritableStreamBuffer()
       // We start a range with a boundary.
-      header_buf.write(`\r\n--${this.range_boundary}\r\n`);
+      onDataRanges.write(`\r\n--${this.rangeBoundary}\r\n`)
 
       // The we write the range headers.
-      for (var header in headers) {
-        header_buf.write(`${header}: ${headers[header]}\r\n`);
+      for (const header in headers) {
+        onDataRanges.write(`${header}: ${headers[header]}\r\n`)
       }
-      header_buf.write('\r\n');
-      this.response.write(header_buf.getContents());
-      debug('New range started.');
-      return true;
+      onDataRanges.write('\r\n')
+      this.response.write(onDataRanges.getContents())
+      debug('New range started.')
+      return true
     }
 
     // No headers means we're finishing the last range.
-    this.response.write(`\r\n--${this.range_boundary}--\r\n`);
-    debug('End of ranges sent.');
-    this.stream.off('data', this.handlers['data']);
-    return false;
+    this.response.write(`\r\n--${this.rangeBoundary}--\r\n`)
+    debug('End of ranges sent.')
+    this.stream.off('data', this.handlers.data)
+    return false
   }
 
-
-  on_open_ranges()
-  {
+  onOpenRanges() {
     // File got opened, so we can set headers/status
-    debug('Open succeeded:', this.name, this.type);
-    this.opened = true;
+    debug('Open succeeded:', this.name, this.type)
+    this.opened = true
 
-    this.response.header('Accept-Ranges', 'bytes');
-    this.response.header('Content-Transfer-Encoding', 'binary');
-    this.response.header('Content-Disposition', 'inline');
+    this.response.header('Accept-Ranges', 'bytes')
+    this.response.header('Content-Transfer-Encoding', 'binary')
+    this.response.header('Content-Disposition', 'inline')
 
     // For single ranges, the content length should be the size of the
     // range. For multiple ranges, we don't send a content length
@@ -344,23 +320,21 @@ class RangeSender
     //
     // Similarly, the type is different whether or not there is more than
     // one range.
-    if (this.ranges.ranges.length == 1) {
-      this.response.writeHead(206, 'Partial Content', this.next_range_headers());
-    }
-    else {
-      this.range_boundary = uuid.v4();
-      var headers = {
-        'Content-Type': `multipart/byteranges; boundary=${this.range_boundary}`,
-      };
-      this.response.writeHead(206, 'Partial Content', headers);
-      this.next_range();
+    if (this.ranges.ranges.length === 1) {
+      this.response.writeHead(206, 'Partial Content', this.nextRangeHeaders())
+    } else {
+      this.rangeBoundary = uuid.v4()
+      const headers = {
+        'Content-Type': `multipart/byteranges; boundary=${this.rangeBoundary}`,
+      }
+      this.response.writeHead(206, 'Partial Content', headers)
+      this.nextRange()
     }
   }
 
-  on_data_ranges(chunk)
-  {
+  onDataRanges(chunk) {
     if (!this.opened) {
-      this.handlers['open']();
+      this.handlers.open()
     }
     // Crap, node.js streams are stupid. No guarantee for seek support. Sure,
     // that makes node.js easier to implement, but offloads everything onto the
@@ -372,121 +346,111 @@ class RangeSender
     //
     // The simplest optimization would be at ever range start to seek() to the
     // start.
-    var chunk_range = [this.read_offset, this.read_offset + chunk.length - 1];
-    debug('= Got chunk with byte range', chunk_range);
+    const chunkRange = [this.readOffset, this.readOffset + chunk.length - 1]
+    debug('= Got chunk with byte range', chunkRange)
     while (true) {
-      var req_range = this.ranges.ranges[this.range_index];
-      if (!req_range) {
-        break;
+      let reqRange = this.ranges.ranges[this.rangeIndex]
+      if (!reqRange) {
+        break
       }
-      debug('Current requested range is', req_range);
-      if (!req_range[1]) {
-        req_range = [req_range[0], Number.MAX_SAFE_INTEGER];
-        debug('Treating as', req_range);
+      debug('Current requested range is', reqRange)
+      if (!reqRange[1]) {
+        reqRange = [reqRange[0], Number.MAX_SAFE_INTEGER]
+        debug('Treating as', reqRange)
       }
 
       // No overlap in the chunk and requested range; don't write.
-      if (chunk_range[1] < req_range[0] || chunk_range[0] > req_range[1]) {
-        debug('Ignoring chunk; it is out of range.');
-        break;
+      if (chunkRange[1] < reqRange[0] || chunkRange[0] > reqRange[1]) {
+        debug('Ignoring chunk; it is out of range.')
+        break
       }
 
       // Since there is overlap, find the segment that's entirely within the
       // chunk.
-      var segment = [
-        Math.max(chunk_range[0], req_range[0]),
-        Math.min(chunk_range[1], req_range[1]),
-      ];
-      debug('Segment to send within chunk is', segment);
+      const segment = [Math.max(chunkRange[0], reqRange[0]), Math.min(chunkRange[1], reqRange[1])]
+      debug('Segment to send within chunk is', segment)
 
       // Normalize the segment to a chunk offset
-      var start = segment[0] - this.read_offset;
-      var end = segment[1] - this.read_offset;
-      var len = end - start + 1;
-      debug('Offsets into buffer are', [start, end], 'with length', len);
+      const start = segment[0] - this.readOffset
+      const end = segment[1] - this.readOffset
+      const len = end - start + 1
+      debug('Offsets into buffer are', [start, end], 'with length', len)
 
       // Write the slice that we want to write. We first create a buffer from the
       // chunk. Then we slice a new buffer from the same underlying ArrayBuffer,
       // starting at the original buffer's offset, further offset by the segment
       // start. The segment length bounds the end of our slice.
-      var buf = Buffer.from(chunk, 'binary');
-      this.response.write(Buffer.from(buf.buffer, buf.byteOffset + start, len));
+      const buf = Buffer.from(chunk, 'binary')
+      this.response.write(Buffer.from(buf.buffer, buf.byteOffset + start, len))
 
       // If the requested range is finished, we should start the next one.
-      if (req_range[1] > chunk_range[1]) {
-        debug('Chunk is finished, but the requested range is missing bytes.');
-        break;
+      if (reqRange[1] > chunkRange[1]) {
+        debug('Chunk is finished, but the requested range is missing bytes.')
+        break
       }
 
-      if (req_range[1] <= chunk_range[1]) {
-        debug('Range is finished.');
-        if (!this.next_range(segment)) {
-          break;
+      if (reqRange[1] <= chunkRange[1]) {
+        debug('Range is finished.')
+        if (!this.nextRange(segment)) {
+          break
         }
       }
     }
 
     // Update read offset when chunk is finished.
-    this.read_offset += chunk.length;
+    this.readOffset += chunk.length
   }
 
-
-  start()
-  {
+  start() {
     // Before we start streaming, let's ensure our ranges don't contain any
     // without start - if they do, we nuke them all and treat this as a full
     // request.
-    var nuke = false;
+    let nuke = false
     if (this.ranges) {
-      for (var i in this.ranges.ranges) {
+      for (const i in this.ranges.ranges) {
         if (typeof this.ranges.ranges[i][0] === 'undefined') {
-          nuke = true;
-          break;
+          nuke = true
+          break
         }
       }
     }
     if (nuke) {
-      this.ranges = undefined;
+      this.ranges = undefined
     }
 
     // Register callbacks. Store them in a handlers object so we can
     // keep the bound version around for stopping to listen to events.
-    this.handlers['error'] = this.on_error.bind(this);
-    this.handlers['end'] = this.on_end.bind(this);
+    this.handlers.error = this.onError.bind(this)
+    this.handlers.end = this.onEnd.bind(this)
 
     if (this.ranges) {
-      debug('Preparing to handle ranges.');
-      this.handlers['open'] = this.on_open_ranges.bind(this);
-      this.handlers['data'] = this.on_data_ranges.bind(this);
-    }
-    else {
-      debug('No ranges, just send the whole file.');
-      this.handlers['open'] = this.on_open_no_range.bind(this);
-      this.handlers['data'] = this.on_data_no_range.bind(this);
+      debug('Preparing to handle ranges.')
+      this.handlers.open = this.onOpenRanges.bind(this)
+      this.handlers.data = this.onDataRanges.bind(this)
+    } else {
+      debug('No ranges, just send the whole file.')
+      this.handlers.open = this.onOpenNoRange.bind(this)
+      this.handlers.data = this.onDataNoRange.bind(this)
     }
 
-    for (var handler in this.handlers) {
-      this.stream.on(handler, this.handlers[handler]);
+    for (const handler in this.handlers) {
+      this.stream.on(handler, this.handlers[handler])
     }
   }
 }
 
-
-function send(response, stream, opts, end_callback)
-{
-  var sender = new RangeSender(response, stream, opts, end_callback);
-  sender.start();
+function send(response, stream, opts, endCallback) {
+  const sender = new RangeSender(response, stream, opts, endCallback)
+  sender.start()
 }
 
-
 /*
  * Exports
  */
 
-module.exports =
-{
-  parse: parse,
-  parseAsync: parseAsync,
-  RangeSender: RangeSender,
-  send: send,
-};
+module.exports = {
+  parse,
+  parseAsync,
+  RangeSender,
+  send,
+}

+ 5 - 6
storage-node/packages/util/stripEndingSlash.js

@@ -1,10 +1,9 @@
 // return url with last `/` removed
 function removeEndingForwardSlash(url) {
-    let st = new String(url)
-    if (st.endsWith('/')) {
-        return st.substring(0, st.length - 1);
-    }
-    return st.toString()
+  if (url.endsWith('/')) {
+    return url.substring(0, url.length - 1)
+  }
+  return url.toString()
 }
 
-module.exports = removeEndingForwardSlash
+module.exports = removeEndingForwardSlash

+ 38 - 50
storage-node/packages/util/test/fs/resolve.js

@@ -16,65 +16,53 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
-const mocha = require('mocha');
-const expect = require('chai').expect;
-const path = require('path');
+const expect = require('chai').expect
+const path = require('path')
 
-const resolve = require('@joystream/storage-utils/fs/resolve');
+const resolve = require('@joystream/storage-utils/fs/resolve')
 
-function tests(base)
-{
-  it('resolves absolute paths relative to the base', function()
-  {
-    const resolved = resolve(base, '/foo');
-    const relative = path.relative(base, resolved);
-    expect(relative).to.equal('foo');
-  });
+function tests(base) {
+  it('resolves absolute paths relative to the base', function() {
+    const resolved = resolve(base, '/foo')
+    const relative = path.relative(base, resolved)
+    expect(relative).to.equal('foo')
+  })
 
-  it('allows for relative paths that stay in the base', function()
-  {
-    const resolved = resolve(base, 'foo/../bar');
-    const relative = path.relative(base, resolved);
-    expect(relative).to.equal('bar');
-  });
+  it('allows for relative paths that stay in the base', function() {
+    const resolved = resolve(base, 'foo/../bar')
+    const relative = path.relative(base, resolved)
+    expect(relative).to.equal('bar')
+  })
 
-  it('prevents relative paths from breaking out of the base', function()
-  {
-    expect(() => resolve(base, '../foo')).to.throw();
-  });
+  it('prevents relative paths from breaking out of the base', function() {
+    expect(() => resolve(base, '../foo')).to.throw()
+  })
 
-  it('prevents long relative paths from breaking out of the base', function()
-  {
-    expect(() => resolve(base, '../../../foo')).to.throw();
-  });
+  it('prevents long relative paths from breaking out of the base', function() {
+    expect(() => resolve(base, '../../../foo')).to.throw()
+  })
 
-  it('prevents sneaky relative paths from breaking out of the base', function()
-  {
-    expect(() => resolve(base, 'foo/../../../bar')).to.throw();
-  });
+  it('prevents sneaky relative paths from breaking out of the base', function() {
+    expect(() => resolve(base, 'foo/../../../bar')).to.throw()
+  })
 }
 
-describe('util/fs/resolve', function()
-{
-  describe('slash base', function()
-  {
-    tests('/');
-  });
+describe('util/fs/resolve', function() {
+  describe('slash base', function() {
+    tests('/')
+  })
 
-  describe('empty base', function()
-  {
-    tests('');
-  });
+  describe('empty base', function() {
+    tests('')
+  })
 
-  describe('short base', function()
-  {
-    tests('/base');
-  });
+  describe('short base', function() {
+    tests('/base')
+  })
 
-  describe('long base', function()
-  {
-    tests('/this/base/is/very/long/indeed');
-  });
-});
+  describe('long base', function() {
+    tests('/this/base/is/very/long/indeed')
+  })
+})

+ 29 - 31
storage-node/packages/util/test/fs/walk.js

@@ -16,54 +16,52 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
-const mocha = require('mocha');
-const expect = require('chai').expect;
-const temp = require('temp').track();
+const expect = require('chai').expect
+// Disabling the rule because of the 'temp' package API.
+// eslint-disable-next-line no-unused-vars
+const temp = require('temp').track()
 
-const fs = require('fs');
-const path = require('path');
+const fs = require('fs')
+const path = require('path')
 
-const fswalk = require('@joystream/storage-utils/fs/walk');
+const fswalk = require('@joystream/storage-utils/fs/walk')
 
-function walktest(archive, base, done)
-{
-  var results = new Map();
+function walktest(archive, base, done) {
+  const results = new Map()
 
   fswalk(base, archive, (err, relname, stat, linktarget) => {
-    expect(err).to.be.null;
+    expect(err).to.be.null
 
     if (relname) {
-      results.set(relname, [stat, linktarget]);
-      return;
+      results.set(relname, [stat, linktarget])
+      return
     }
 
     // End of data, do testing
-    const entries = Array.from(results.keys());
-    expect(entries).to.include('foo');
-    expect(results.get('foo')[0].isDirectory()).to.be.true;
+    const entries = Array.from(results.keys())
+    expect(entries).to.include('foo')
+    expect(results.get('foo')[0].isDirectory()).to.be.true
 
-    expect(entries).to.include('bar');
-    expect(results.get('bar')[0].isFile()).to.be.true;
+    expect(entries).to.include('bar')
+    expect(results.get('bar')[0].isFile()).to.be.true
 
     if (archive === fs) {
-      expect(entries).to.include('quux');
-      expect(results.get('quux')[0].isSymbolicLink()).to.be.true;
-      expect(results.get('quux')[1]).to.equal('foo/baz');
+      expect(entries).to.include('quux')
+      expect(results.get('quux')[0].isSymbolicLink()).to.be.true
+      expect(results.get('quux')[1]).to.equal('foo/baz')
     }
 
-    expect(entries).to.include('foo/baz');
-    expect(results.get('foo/baz')[0].isFile()).to.be.true;
+    expect(entries).to.include('foo/baz')
+    expect(results.get('foo/baz')[0].isFile()).to.be.true
 
-    done();
-  });
+    done()
+  })
 }
 
-describe('util/fs/walk', function()
-{
-  it('reports all files in a file system hierarchy', function(done)
-  {
+describe('util/fs/walk', function() {
+  it('reports all files in a file system hierarchy', function(done) {
     walktest(fs, path.resolve(__dirname, '../data'), done)
-  });
-});
+  })
+})

+ 119 - 131
storage-node/packages/util/test/lru.js

@@ -16,149 +16,137 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
-const mocha = require('mocha');
-const expect = require('chai').expect;
+const expect = require('chai').expect
 
-const lru = require('@joystream/storage-utils/lru');
+const lru = require('@joystream/storage-utils/lru')
 
-const DEFAULT_SLEEP = 1;
-function sleep(ms = DEFAULT_SLEEP)
-{
+const DEFAULT_SLEEP = 1
+function sleep(ms = DEFAULT_SLEEP) {
   return new Promise(resolve => {
     setTimeout(resolve, ms)
   })
 }
 
-describe('util/lru', function()
-{
-  describe('simple usage', function()
-  {
-    it('does not contain keys that were not added', function()
-    {
-      var cache = new lru.LRUCache();
-      expect(cache.size()).to.equal(0);
-
-      var val = cache.get('something');
-      expect(val).to.be.undefined;
-
-      expect(cache.has('something')).to.be.false;
-    });
-
-    it('contains keys that were added', function()
-    {
-      var cache = new lru.LRUCache();
-      cache.put('something', 'yay!');
-      expect(cache.size()).to.equal(1);
-
-      var val = cache.get('something');
-      expect(val).to.be.equal('yay!');
-
-      expect(cache.has('something')).to.be.true;
-    });
-
-    it('does not contain keys that were deleted', function()
-    {
-      var cache = new lru.LRUCache();
-      cache.put('something', 'yay!');
-      expect(cache.size()).to.equal(1);
-      var val = cache.get('something');
-      expect(val).to.be.equal('yay!');
-      expect(cache.has('something')).to.be.true;
-
-      cache.del('something');
-      expect(cache.size()).to.equal(0);
-      val = cache.get('something');
-      expect(val).to.be.undefined;
-      expect(cache.has('something')).to.be.false;
-    });
-
-    it('can be cleared', function()
-    {
-      var cache = new lru.LRUCache();
-      cache.put('something', 'yay!');
-      expect(cache.size()).to.equal(1);
-
-      cache.clear();
-      expect(cache.size()).to.equal(0);
-    });
-  });
-
-  describe('capacity management', function()
-  {
-    it('does not grow beyond capacity', async function()
-    {
-      var cache = new lru.LRUCache(2); // Small capacity
-      expect(cache.size()).to.equal(0);
-
-      cache.put('foo', '42');
-      expect(cache.size()).to.equal(1);
-
-      await sleep();
-
-      cache.put('bar', '42');
-      expect(cache.size()).to.equal(2);
-
-      await sleep();
-
-      cache.put('baz', '42');
-      expect(cache.size()).to.equal(2); // Capacity exceeded
-    });
-
-    it('removes the oldest key when pruning', async function()
-    {
-      var cache = new lru.LRUCache(2); // Small capacity
-      expect(cache.size()).to.equal(0);
-
-      cache.put('foo', '42');
-      expect(cache.size()).to.equal(1);
-      expect(cache.has('foo')).to.be.true;
-
-      await sleep();
-
-      cache.put('bar', '42');
-      expect(cache.size()).to.equal(2);
-      expect(cache.has('foo')).to.be.true;
-      expect(cache.has('bar')).to.be.true;
-
-      await sleep();
-
-      cache.put('baz', '42');
-      expect(cache.size()).to.equal(2); // Capacity exceeded
-      expect(cache.has('bar')).to.be.true;
-      expect(cache.has('baz')).to.be.true;
-    });
-
-    it('updates LRU timestamp when reading', async function()
-    {
-      var cache = new lru.LRUCache(2); // Small capacity
-      expect(cache.size()).to.equal(0);
-
-      cache.put('foo', '42');
-      expect(cache.size()).to.equal(1);
-      expect(cache.has('foo')).to.be.true;
+describe('util/lru', function() {
+  describe('simple usage', function() {
+    it('does not contain keys that were not added', function() {
+      const cache = new lru.LRUCache()
+      expect(cache.size()).to.equal(0)
+
+      const val = cache.get('something')
+      expect(val).to.be.undefined
+
+      expect(cache.has('something')).to.be.false
+    })
+
+    it('contains keys that were added', function() {
+      const cache = new lru.LRUCache()
+      cache.put('something', 'yay!')
+      expect(cache.size()).to.equal(1)
+
+      const val = cache.get('something')
+      expect(val).to.be.equal('yay!')
+
+      expect(cache.has('something')).to.be.true
+    })
+
+    it('does not contain keys that were deleted', function() {
+      const cache = new lru.LRUCache()
+      cache.put('something', 'yay!')
+      expect(cache.size()).to.equal(1)
+      let val = cache.get('something')
+      expect(val).to.be.equal('yay!')
+      expect(cache.has('something')).to.be.true
+
+      cache.del('something')
+      expect(cache.size()).to.equal(0)
+      val = cache.get('something')
+      expect(val).to.be.undefined
+      expect(cache.has('something')).to.be.false
+    })
+
+    it('can be cleared', function() {
+      const cache = new lru.LRUCache()
+      cache.put('something', 'yay!')
+      expect(cache.size()).to.equal(1)
+
+      cache.clear()
+      expect(cache.size()).to.equal(0)
+    })
+  })
+
+  describe('capacity management', function() {
+    it('does not grow beyond capacity', async function() {
+      const cache = new lru.LRUCache(2) // Small capacity
+      expect(cache.size()).to.equal(0)
+
+      cache.put('foo', '42')
+      expect(cache.size()).to.equal(1)
+
+      await sleep()
+
+      cache.put('bar', '42')
+      expect(cache.size()).to.equal(2)
+
+      await sleep()
+
+      cache.put('baz', '42')
+      expect(cache.size()).to.equal(2) // Capacity exceeded
+    })
+
+    it('removes the oldest key when pruning', async function() {
+      const cache = new lru.LRUCache(2) // Small capacity
+      expect(cache.size()).to.equal(0)
+
+      cache.put('foo', '42')
+      expect(cache.size()).to.equal(1)
+      expect(cache.has('foo')).to.be.true
 
-      await sleep();
-
-      cache.put('bar', '42');
-      expect(cache.size()).to.equal(2);
-      expect(cache.has('foo')).to.be.true;
-      expect(cache.has('bar')).to.be.true;
+      await sleep()
 
-      await sleep();
+      cache.put('bar', '42')
+      expect(cache.size()).to.equal(2)
+      expect(cache.has('foo')).to.be.true
+      expect(cache.has('bar')).to.be.true
+
+      await sleep()
+
+      cache.put('baz', '42')
+      expect(cache.size()).to.equal(2) // Capacity exceeded
+      expect(cache.has('bar')).to.be.true
+      expect(cache.has('baz')).to.be.true
+    })
+
+    it('updates LRU timestamp when reading', async function() {
+      const cache = new lru.LRUCache(2) // Small capacity
+      expect(cache.size()).to.equal(0)
+
+      cache.put('foo', '42')
+      expect(cache.size()).to.equal(1)
+      expect(cache.has('foo')).to.be.true
+
+      await sleep()
+
+      cache.put('bar', '42')
+      expect(cache.size()).to.equal(2)
+      expect(cache.has('foo')).to.be.true
+      expect(cache.has('bar')).to.be.true
+
+      await sleep()
 
       // 'foo' is older than 'bar' right now, so should be pruned first. But
       // if we get 'foo', it would be 'bar' that has to go.
-      var _ = cache.get('foo');
+      cache.get('foo')
 
       // Makes debugging a bit more obvious
-      await sleep();
-
-      cache.put('baz', '42');
-      expect(cache.size()).to.equal(2); // Capacity exceeded
-      expect(cache.has('foo')).to.be.true;
-      expect(cache.has('baz')).to.be.true;
-    });
-  });
-});
+      await sleep()
+
+      cache.put('baz', '42')
+      expect(cache.size()).to.equal(2) // Capacity exceeded
+      expect(cache.has('foo')).to.be.true
+      expect(cache.has('baz')).to.be.true
+    })
+  })
+})

+ 59 - 66
storage-node/packages/util/test/pagination.js

@@ -16,63 +16,55 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
-const mocha = require('mocha');
-const expect = require('chai').expect;
-const mock_http = require('node-mocks-http');
+const expect = require('chai').expect
+const mockHttp = require('node-mocks-http')
 
-const pagination = require('@joystream/storage-utils/pagination');
+const pagination = require('@joystream/storage-utils/pagination')
 
-describe('util/pagination', function()
-{
-  describe('openapi()', function()
-  {
-    it('should add parameters and definitions to an API spec', function()
-    {
-      var api = pagination.openapi({});
+describe('util/pagination', function() {
+  describe('openapi()', function() {
+    it('should add parameters and definitions to an API spec', function() {
+      const api = pagination.openapi({})
 
       // Parameters
-      expect(api).to.have.property('components');
+      expect(api).to.have.property('components')
 
-      expect(api.components).to.have.property('parameters');
-      expect(api.components.parameters).to.have.property('paginationLimit');
+      expect(api.components).to.have.property('parameters')
+      expect(api.components.parameters).to.have.property('paginationLimit')
 
-      expect(api.components.parameters.paginationLimit).to.have.property('name');
-      expect(api.components.parameters.paginationLimit.name).to.equal('limit');
+      expect(api.components.parameters.paginationLimit).to.have.property('name')
+      expect(api.components.parameters.paginationLimit.name).to.equal('limit')
 
-      expect(api.components.parameters.paginationLimit).to.have.property('schema');
-      expect(api.components.parameters.paginationLimit.schema).to.have.property('type');
-      expect(api.components.parameters.paginationLimit.schema.type).to.equal('integer');
+      expect(api.components.parameters.paginationLimit).to.have.property('schema')
+      expect(api.components.parameters.paginationLimit.schema).to.have.property('type')
+      expect(api.components.parameters.paginationLimit.schema.type).to.equal('integer')
 
-      expect(api.components.parameters.paginationOffset).to.have.property('name');
-      expect(api.components.parameters.paginationOffset.name).to.equal('offset');
-
-      expect(api.components.parameters.paginationOffset).to.have.property('schema');
-      expect(api.components.parameters.paginationOffset.schema).to.have.property('type');
-      expect(api.components.parameters.paginationOffset.schema.type).to.equal('integer');
+      expect(api.components.parameters.paginationOffset).to.have.property('name')
+      expect(api.components.parameters.paginationOffset.name).to.equal('offset')
 
+      expect(api.components.parameters.paginationOffset).to.have.property('schema')
+      expect(api.components.parameters.paginationOffset.schema).to.have.property('type')
+      expect(api.components.parameters.paginationOffset.schema.type).to.equal('integer')
 
       // Defintiions
-      expect(api.components).to.have.property('schemas');
-      expect(api.components.schemas).to.have.property('PaginationInfo');
+      expect(api.components).to.have.property('schemas')
+      expect(api.components.schemas).to.have.property('PaginationInfo')
 
-      expect(api.components.schemas.PaginationInfo).to.have.property('type');
-      expect(api.components.schemas.PaginationInfo.type).to.equal('object');
+      expect(api.components.schemas.PaginationInfo).to.have.property('type')
+      expect(api.components.schemas.PaginationInfo.type).to.equal('object')
 
-      expect(api.components.schemas.PaginationInfo).to.have.property('properties');
+      expect(api.components.schemas.PaginationInfo).to.have.property('properties')
       expect(api.components.schemas.PaginationInfo.properties)
         .to.be.an('object')
-        .that.has.all.keys('self', 'next', 'prev', 'first', 'last');
-    });
-  });
-
+        .that.has.all.keys('self', 'next', 'prev', 'first', 'last')
+    })
+  })
 
-  describe('paginate()', function()
-  {
-    it('should add pagination links to a response object', function()
-    {
-      var req = mock_http.createRequest({
+  describe('paginate()', function() {
+    it('should add pagination links to a response object', function() {
+      const req = mockHttp.createRequest({
         method: 'GET',
         url: '/foo?limit=10',
         query: {
@@ -82,21 +74,21 @@ describe('util/pagination', function()
           host: 'localhost',
         },
         protocol: 'http',
-      });
+      })
 
-      var res = pagination.paginate(req, {});
+      const res = pagination.paginate(req, {})
 
-      expect(res).to.have.property('pagination')
-        .that.has.all.keys('self', 'first', 'next');
+      expect(res)
+        .to.have.property('pagination')
+        .that.has.all.keys('self', 'first', 'next')
 
-      expect(res.pagination.self).to.equal('http://localhost/foo?limit=10');
-      expect(res.pagination.first).to.equal('http://localhost/foo?limit=10&offset=0');
-      expect(res.pagination.next).to.equal('http://localhost/foo?limit=10&offset=10');
-    });
+      expect(res.pagination.self).to.equal('http://localhost/foo?limit=10')
+      expect(res.pagination.first).to.equal('http://localhost/foo?limit=10&offset=0')
+      expect(res.pagination.next).to.equal('http://localhost/foo?limit=10&offset=10')
+    })
 
-    it('should add a last pagination link when requested', function()
-    {
-      var req = mock_http.createRequest({
+    it('should add a last pagination link when requested', function() {
+      const req = mockHttp.createRequest({
         method: 'GET',
         url: '/foo?limit=10&offset=15',
         query: {
@@ -107,18 +99,19 @@ describe('util/pagination', function()
           host: 'localhost',
         },
         protocol: 'http',
-      });
-
-      var res = pagination.paginate(req, {}, 35);
-
-      expect(res).to.have.property('pagination')
-        .that.has.all.keys('self', 'first', 'next', 'prev', 'last');
-
-      expect(res.pagination.self).to.equal('http://localhost/foo?limit=10&offset=15');
-      expect(res.pagination.first).to.equal('http://localhost/foo?limit=10&offset=0');
-      expect(res.pagination.last).to.equal('http://localhost/foo?limit=10&offset=35');
-      expect(res.pagination.prev).to.equal('http://localhost/foo?limit=10&offset=5');
-      expect(res.pagination.next).to.equal('http://localhost/foo?limit=10&offset=25');
-    });
-  });
-});
+      })
+
+      const res = pagination.paginate(req, {}, 35)
+
+      expect(res)
+        .to.have.property('pagination')
+        .that.has.all.keys('self', 'first', 'next', 'prev', 'last')
+
+      expect(res.pagination.self).to.equal('http://localhost/foo?limit=10&offset=15')
+      expect(res.pagination.first).to.equal('http://localhost/foo?limit=10&offset=0')
+      expect(res.pagination.last).to.equal('http://localhost/foo?limit=10&offset=35')
+      expect(res.pagination.prev).to.equal('http://localhost/foo?limit=10&offset=5')
+      expect(res.pagination.next).to.equal('http://localhost/foo?limit=10&offset=25')
+    })
+  })
+})

+ 252 - 269
storage-node/packages/util/test/ranges.js

@@ -16,394 +16,377 @@
  * along with this program.  If not, see <https://www.gnu.org/licenses/>.
  */
 
-'use strict';
+'use strict'
 
-const mocha = require('mocha');
-const expect = require('chai').expect;
-const mock_http = require('node-mocks-http');
-const stream_buffers = require('stream-buffers');
+const expect = require('chai').expect
+const mockHttp = require('node-mocks-http')
+const streamBuffers = require('stream-buffers')
 
-const ranges = require('@joystream/storage-utils/ranges');
+const ranges = require('@joystream/storage-utils/ranges')
 
-describe('util/ranges', function()
-{
-  describe('parse()', function()
-  {
-    it('should parse a full range', function()
-    {
+describe('util/ranges', function() {
+  describe('parse()', function() {
+    it('should parse a full range', function() {
       // Range with unit
-      var range = ranges.parse('bytes=0-100');
-      expect(range.unit).to.equal('bytes');
-      expect(range.range_str).to.equal('0-100');
-      expect(range.ranges[0][0]).to.equal(0);
-      expect(range.ranges[0][1]).to.equal(100);
+      let range = ranges.parse('bytes=0-100')
+      expect(range.unit).to.equal('bytes')
+      expect(range.rangeStr).to.equal('0-100')
+      expect(range.ranges[0][0]).to.equal(0)
+      expect(range.ranges[0][1]).to.equal(100)
 
       // Range without unit
-      var range = ranges.parse('0-100');
-      expect(range.unit).to.equal('bytes');
-      expect(range.range_str).to.equal('0-100');
-      expect(range.ranges[0][0]).to.equal(0);
-      expect(range.ranges[0][1]).to.equal(100);
+      range = ranges.parse('0-100')
+      expect(range.unit).to.equal('bytes')
+      expect(range.rangeStr).to.equal('0-100')
+      expect(range.ranges[0][0]).to.equal(0)
+      expect(range.ranges[0][1]).to.equal(100)
 
       // Range with custom unit
       //
-      var range = ranges.parse('foo=0-100');
-      expect(range.unit).to.equal('foo');
-      expect(range.range_str).to.equal('0-100');
-      expect(range.ranges[0][0]).to.equal(0);
-      expect(range.ranges[0][1]).to.equal(100);
-    });
-
-    it('should error out on malformed strings', function()
-    {
-      expect(() => ranges.parse('foo')).to.throw();
-      expect(() => ranges.parse('foo=bar')).to.throw();
-      expect(() => ranges.parse('foo=100')).to.throw();
-      expect(() => ranges.parse('foo=100-0')).to.throw();
-    });
-
-    it('should parse a range without end', function()
-    {
-      var range = ranges.parse('0-');
-      expect(range.unit).to.equal('bytes');
-      expect(range.range_str).to.equal('0-');
-      expect(range.ranges[0][0]).to.equal(0);
-      expect(range.ranges[0][1]).to.be.undefined;
-    });
-
-    it('should parse a range without start', function()
-    {
-      var range = ranges.parse('-100');
-      expect(range.unit).to.equal('bytes');
-      expect(range.range_str).to.equal('-100');
-      expect(range.ranges[0][0]).to.be.undefined;
-      expect(range.ranges[0][1]).to.equal(100);
-    });
-
-    it('should parse multiple ranges', function()
-    {
-      var range = ranges.parse('0-10,30-40,60-80');
-      expect(range.unit).to.equal('bytes');
-      expect(range.range_str).to.equal('0-10,30-40,60-80');
-      expect(range.ranges[0][0]).to.equal(0);
-      expect(range.ranges[0][1]).to.equal(10);
-      expect(range.ranges[1][0]).to.equal(30);
-      expect(range.ranges[1][1]).to.equal(40);
-      expect(range.ranges[2][0]).to.equal(60);
-      expect(range.ranges[2][1]).to.equal(80);
-    });
-
-    it('should merge overlapping ranges', function()
-    {
+      range = ranges.parse('foo=0-100')
+      expect(range.unit).to.equal('foo')
+      expect(range.rangeStr).to.equal('0-100')
+      expect(range.ranges[0][0]).to.equal(0)
+      expect(range.ranges[0][1]).to.equal(100)
+    })
+
+    it('should error out on malformed strings', function() {
+      expect(() => ranges.parse('foo')).to.throw()
+      expect(() => ranges.parse('foo=bar')).to.throw()
+      expect(() => ranges.parse('foo=100')).to.throw()
+      expect(() => ranges.parse('foo=100-0')).to.throw()
+    })
+
+    it('should parse a range without end', function() {
+      const range = ranges.parse('0-')
+      expect(range.unit).to.equal('bytes')
+      expect(range.rangeStr).to.equal('0-')
+      expect(range.ranges[0][0]).to.equal(0)
+      expect(range.ranges[0][1]).to.be.undefined
+    })
+
+    it('should parse a range without start', function() {
+      const range = ranges.parse('-100')
+      expect(range.unit).to.equal('bytes')
+      expect(range.rangeStr).to.equal('-100')
+      expect(range.ranges[0][0]).to.be.undefined
+      expect(range.ranges[0][1]).to.equal(100)
+    })
+
+    it('should parse multiple ranges', function() {
+      const range = ranges.parse('0-10,30-40,60-80')
+      expect(range.unit).to.equal('bytes')
+      expect(range.rangeStr).to.equal('0-10,30-40,60-80')
+      expect(range.ranges[0][0]).to.equal(0)
+      expect(range.ranges[0][1]).to.equal(10)
+      expect(range.ranges[1][0]).to.equal(30)
+      expect(range.ranges[1][1]).to.equal(40)
+      expect(range.ranges[2][0]).to.equal(60)
+      expect(range.ranges[2][1]).to.equal(80)
+    })
+
+    it('should merge overlapping ranges', function() {
       // Two overlapping ranges
-      var range = ranges.parse('0-20,10-30');
-      expect(range.unit).to.equal('bytes');
-      expect(range.range_str).to.equal('0-20,10-30');
-      expect(range.ranges).to.have.lengthOf(1);
-      expect(range.ranges[0][0]).to.equal(0);
-      expect(range.ranges[0][1]).to.equal(30);
+      let range = ranges.parse('0-20,10-30')
+      expect(range.unit).to.equal('bytes')
+      expect(range.rangeStr).to.equal('0-20,10-30')
+      expect(range.ranges).to.have.lengthOf(1)
+      expect(range.ranges[0][0]).to.equal(0)
+      expect(range.ranges[0][1]).to.equal(30)
 
       // Three overlapping ranges
-      var range = ranges.parse('0-15,10-25,20-30');
-      expect(range.unit).to.equal('bytes');
-      expect(range.range_str).to.equal('0-15,10-25,20-30');
-      expect(range.ranges).to.have.lengthOf(1);
-      expect(range.ranges[0][0]).to.equal(0);
-      expect(range.ranges[0][1]).to.equal(30);
+      range = ranges.parse('0-15,10-25,20-30')
+      expect(range.unit).to.equal('bytes')
+      expect(range.rangeStr).to.equal('0-15,10-25,20-30')
+      expect(range.ranges).to.have.lengthOf(1)
+      expect(range.ranges[0][0]).to.equal(0)
+      expect(range.ranges[0][1]).to.equal(30)
 
       // Three overlapping ranges, reverse order
-      var range = ranges.parse('20-30,10-25,0-15');
-      expect(range.unit).to.equal('bytes');
-      expect(range.range_str).to.equal('20-30,10-25,0-15');
-      expect(range.ranges).to.have.lengthOf(1);
-      expect(range.ranges[0][0]).to.equal(0);
-      expect(range.ranges[0][1]).to.equal(30);
+      range = ranges.parse('20-30,10-25,0-15')
+      expect(range.unit).to.equal('bytes')
+      expect(range.rangeStr).to.equal('20-30,10-25,0-15')
+      expect(range.ranges).to.have.lengthOf(1)
+      expect(range.ranges[0][0]).to.equal(0)
+      expect(range.ranges[0][1]).to.equal(30)
 
       // Adjacent ranges
-      var range = ranges.parse('0-10,11-20');
-      expect(range.unit).to.equal('bytes');
-      expect(range.range_str).to.equal('0-10,11-20');
-      expect(range.ranges).to.have.lengthOf(1);
-      expect(range.ranges[0][0]).to.equal(0);
-      expect(range.ranges[0][1]).to.equal(20);
-    });
-
-    it('should sort ranges', function()
-    {
-      var range = ranges.parse('10-30,0-5');
-      expect(range.unit).to.equal('bytes');
-      expect(range.range_str).to.equal('10-30,0-5');
-      expect(range.ranges).to.have.lengthOf(2);
-      expect(range.ranges[0][0]).to.equal(0);
-      expect(range.ranges[0][1]).to.equal(5);
-      expect(range.ranges[1][0]).to.equal(10);
-      expect(range.ranges[1][1]).to.equal(30);
-    });
-  });
-
-  describe('send()', function()
-  {
-    it('should send full files on request', function(done)
-    {
-      var res = mock_http.createResponse({});
-      var in_stream = new stream_buffers.ReadableStreamBuffer({});
+      range = ranges.parse('0-10,11-20')
+      expect(range.unit).to.equal('bytes')
+      expect(range.rangeStr).to.equal('0-10,11-20')
+      expect(range.ranges).to.have.lengthOf(1)
+      expect(range.ranges[0][0]).to.equal(0)
+      expect(range.ranges[0][1]).to.equal(20)
+    })
+
+    it('should sort ranges', function() {
+      const range = ranges.parse('10-30,0-5')
+      expect(range.unit).to.equal('bytes')
+      expect(range.rangeStr).to.equal('10-30,0-5')
+      expect(range.ranges).to.have.lengthOf(2)
+      expect(range.ranges[0][0]).to.equal(0)
+      expect(range.ranges[0][1]).to.equal(5)
+      expect(range.ranges[1][0]).to.equal(10)
+      expect(range.ranges[1][1]).to.equal(30)
+    })
+  })
+
+  describe('send()', function() {
+    it('should send full files on request', function(done) {
+      const res = mockHttp.createResponse({})
+      const inStream = new streamBuffers.ReadableStreamBuffer({})
 
       // End-of-stream callback
-      var opts = {
+      const opts = {
         name: 'test.file',
         type: 'application/test',
-      };
-      ranges.send(res, in_stream, opts, function(err) {
-        expect(err).to.not.exist;
+      }
+      ranges.send(res, inStream, opts, function(err) {
+        expect(err).to.not.exist
 
         // HTTP handling
-        expect(res.statusCode).to.equal(200);
-        expect(res.getHeader('content-type')).to.equal('application/test');
-        expect(res.getHeader('content-disposition')).to.equal('inline');
+        expect(res.statusCode).to.equal(200)
+        expect(res.getHeader('content-type')).to.equal('application/test')
+        expect(res.getHeader('content-disposition')).to.equal('inline')
 
         // Data/stream handling
-        expect(res._isEndCalled()).to.be.true;
-        expect(res._getBuffer().toString()).to.equal('Hello, world!');
+        expect(res._isEndCalled()).to.be.true
+        expect(res._getBuffer().toString()).to.equal('Hello, world!')
 
         // Notify mocha that we're done.
-        done();
-      });
+        done()
+      })
 
       // Simulate file stream
-      in_stream.emit('open');
-      in_stream.put('Hello, world!');
-      in_stream.stop();
-    });
+      inStream.emit('open')
+      inStream.put('Hello, world!')
+      inStream.stop()
+    })
 
-    it('should send a range spanning the entire file on request', function(done)
-    {
-      var res = mock_http.createResponse({});
-      var in_stream = new stream_buffers.ReadableStreamBuffer({});
+    it('should send a range spanning the entire file on request', function(done) {
+      const res = mockHttp.createResponse({})
+      const inStream = new streamBuffers.ReadableStreamBuffer({})
 
       // End-of-stream callback
-      var opts = {
+      const opts = {
         name: 'test.file',
         type: 'application/test',
         ranges: {
           ranges: [[0, 12]],
-        }
-      };
-      ranges.send(res, in_stream, opts, function(err) {
-        expect(err).to.not.exist;
+        },
+      }
+      ranges.send(res, inStream, opts, function(err) {
+        expect(err).to.not.exist
 
         // HTTP handling
-        expect(res.statusCode).to.equal(206);
-        expect(res.getHeader('content-type')).to.equal('application/test');
-        expect(res.getHeader('content-disposition')).to.equal('inline');
-        expect(res.getHeader('content-range')).to.equal('bytes 0-12/*');
-        expect(res.getHeader('content-length')).to.equal('13');
+        expect(res.statusCode).to.equal(206)
+        expect(res.getHeader('content-type')).to.equal('application/test')
+        expect(res.getHeader('content-disposition')).to.equal('inline')
+        expect(res.getHeader('content-range')).to.equal('bytes 0-12/*')
+        expect(res.getHeader('content-length')).to.equal('13')
 
         // Data/stream handling
-        expect(res._isEndCalled()).to.be.true;
-        expect(res._getBuffer().toString()).to.equal('Hello, world!');
+        expect(res._isEndCalled()).to.be.true
+        expect(res._getBuffer().toString()).to.equal('Hello, world!')
 
         // Notify mocha that we're done.
-        done();
-      });
+        done()
+      })
 
       // Simulate file stream
-      in_stream.emit('open');
-      in_stream.put('Hello, world!');
-      in_stream.stop();
+      inStream.emit('open')
+      inStream.put('Hello, world!')
+      inStream.stop()
+    })
 
-    });
-
-    it('should send a small range on request', function(done)
-    {
-      var res = mock_http.createResponse({});
-      var in_stream = new stream_buffers.ReadableStreamBuffer({});
+    it('should send a small range on request', function(done) {
+      const res = mockHttp.createResponse({})
+      const inStream = new streamBuffers.ReadableStreamBuffer({})
 
       // End-of-stream callback
-      var opts = {
+      const opts = {
         name: 'test.file',
         type: 'application/test',
         ranges: {
           ranges: [[1, 11]], // Cut off first and last letter
-        }
-      };
-      ranges.send(res, in_stream, opts, function(err) {
-        expect(err).to.not.exist;
+        },
+      }
+      ranges.send(res, inStream, opts, function(err) {
+        expect(err).to.not.exist
 
         // HTTP handling
-        expect(res.statusCode).to.equal(206);
-        expect(res.getHeader('content-type')).to.equal('application/test');
-        expect(res.getHeader('content-disposition')).to.equal('inline');
-        expect(res.getHeader('content-range')).to.equal('bytes 1-11/*');
-        expect(res.getHeader('content-length')).to.equal('11');
+        expect(res.statusCode).to.equal(206)
+        expect(res.getHeader('content-type')).to.equal('application/test')
+        expect(res.getHeader('content-disposition')).to.equal('inline')
+        expect(res.getHeader('content-range')).to.equal('bytes 1-11/*')
+        expect(res.getHeader('content-length')).to.equal('11')
 
         // Data/stream handling
-        expect(res._isEndCalled()).to.be.true;
-        expect(res._getBuffer().toString()).to.equal('ello, world');
+        expect(res._isEndCalled()).to.be.true
+        expect(res._getBuffer().toString()).to.equal('ello, world')
 
         // Notify mocha that we're done.
-        done();
-      });
+        done()
+      })
 
       // Simulate file stream
-      in_stream.emit('open');
-      in_stream.put('Hello, world!');
-      in_stream.stop();
-    });
-
-    it('should send ranges crossing buffer boundaries', function(done)
-    {
-      var res = mock_http.createResponse({});
-      var in_stream = new stream_buffers.ReadableStreamBuffer({
+      inStream.emit('open')
+      inStream.put('Hello, world!')
+      inStream.stop()
+    })
+
+    it('should send ranges crossing buffer boundaries', function(done) {
+      const res = mockHttp.createResponse({})
+      const inStream = new streamBuffers.ReadableStreamBuffer({
         chunkSize: 3, // Setting a chunk size smaller than the range should
-                      // not impact the test.
-      });
+        // not impact the test.
+      })
 
       // End-of-stream callback
-      var opts = {
+      const opts = {
         name: 'test.file',
         type: 'application/test',
         ranges: {
           ranges: [[1, 11]], // Cut off first and last letter
-        }
-      };
-      ranges.send(res, in_stream, opts, function(err) {
-        expect(err).to.not.exist;
+        },
+      }
+      ranges.send(res, inStream, opts, function(err) {
+        expect(err).to.not.exist
 
         // HTTP handling
-        expect(res.statusCode).to.equal(206);
-        expect(res.getHeader('content-type')).to.equal('application/test');
-        expect(res.getHeader('content-disposition')).to.equal('inline');
-        expect(res.getHeader('content-range')).to.equal('bytes 1-11/*');
-        expect(res.getHeader('content-length')).to.equal('11');
+        expect(res.statusCode).to.equal(206)
+        expect(res.getHeader('content-type')).to.equal('application/test')
+        expect(res.getHeader('content-disposition')).to.equal('inline')
+        expect(res.getHeader('content-range')).to.equal('bytes 1-11/*')
+        expect(res.getHeader('content-length')).to.equal('11')
 
         // Data/stream handling
-        expect(res._isEndCalled()).to.be.true;
-        expect(res._getBuffer().toString()).to.equal('ello, world');
+        expect(res._isEndCalled()).to.be.true
+        expect(res._getBuffer().toString()).to.equal('ello, world')
 
         // Notify mocha that we're done.
-        done();
-      });
+        done()
+      })
 
       // Simulate file stream
-      in_stream.emit('open');
-      in_stream.put('Hello, world!');
-      in_stream.stop();
-    });
+      inStream.emit('open')
+      inStream.put('Hello, world!')
+      inStream.stop()
+    })
 
-    it('should send multiple ranges', function(done)
-    {
-      var res = mock_http.createResponse({});
-      var in_stream = new stream_buffers.ReadableStreamBuffer({});
+    it('should send multiple ranges', function(done) {
+      const res = mockHttp.createResponse({})
+      const inStream = new streamBuffers.ReadableStreamBuffer({})
 
       // End-of-stream callback
-      var opts = {
+      const opts = {
         name: 'test.file',
         type: 'application/test',
         ranges: {
-          ranges: [[1, 3], [5, 7]], // Slice two ranges out
-        }
-      };
-      ranges.send(res, in_stream, opts, function(err) {
-        expect(err).to.not.exist;
+          ranges: [
+            [1, 3],
+            [5, 7],
+          ], // Slice two ranges out
+        },
+      }
+      ranges.send(res, inStream, opts, function(err) {
+        expect(err).to.not.exist
 
         // HTTP handling
-        expect(res.statusCode).to.equal(206);
-        expect(res.getHeader('content-type')).to.satisfy((str) => str.startsWith('multipart/byteranges'));
-        expect(res.getHeader('content-disposition')).to.equal('inline');
+        expect(res.statusCode).to.equal(206)
+        expect(res.getHeader('content-type')).to.satisfy(str => str.startsWith('multipart/byteranges'))
+        expect(res.getHeader('content-disposition')).to.equal('inline')
 
         // Data/stream handling
-        expect(res._isEndCalled()).to.be.true;
+        expect(res._isEndCalled()).to.be.true
 
         // The buffer should contain both ranges, but with all the That would be
         // "ell" and ", w".
         // It's pretty elaborate having to parse the entire multipart response
         // body, so we'll restrict ourselves to finding lines within it.
-        var body = res._getBuffer().toString();
-        expect(body).to.contain('\r\nContent-Range: bytes 1-3/*\r\n');
-        expect(body).to.contain('\r\nell\r\n');
-        expect(body).to.contain('\r\nContent-Range: bytes 5-7/*\r\n');
-        expect(body).to.contain('\r\n, w');
+        const body = res._getBuffer().toString()
+        expect(body).to.contain('\r\nContent-Range: bytes 1-3/*\r\n')
+        expect(body).to.contain('\r\nell\r\n')
+        expect(body).to.contain('\r\nContent-Range: bytes 5-7/*\r\n')
+        expect(body).to.contain('\r\n, w')
 
         // Notify mocha that we're done.
-        done();
-      });
+        done()
+      })
 
       // Simulate file stream
-      in_stream.emit('open');
-      in_stream.put('Hello, world!');
-      in_stream.stop();
-    });
+      inStream.emit('open')
+      inStream.put('Hello, world!')
+      inStream.stop()
+    })
 
-    it('should deal with ranges without end', function(done)
-    {
-      var res = mock_http.createResponse({});
-      var in_stream = new stream_buffers.ReadableStreamBuffer({});
+    it('should deal with ranges without end', function(done) {
+      const res = mockHttp.createResponse({})
+      const inStream = new streamBuffers.ReadableStreamBuffer({})
 
       // End-of-stream callback
-      var opts = {
+      const opts = {
         name: 'test.file',
         type: 'application/test',
         ranges: {
           ranges: [[5, undefined]], // Skip the first part, but read until end
-        }
-      };
-      ranges.send(res, in_stream, opts, function(err) {
-        expect(err).to.not.exist;
+        },
+      }
+      ranges.send(res, inStream, opts, function(err) {
+        expect(err).to.not.exist
 
         // HTTP handling
-        expect(res.statusCode).to.equal(206);
-        expect(res.getHeader('content-type')).to.equal('application/test');
-        expect(res.getHeader('content-disposition')).to.equal('inline');
-        expect(res.getHeader('content-range')).to.equal('bytes 5-/*');
+        expect(res.statusCode).to.equal(206)
+        expect(res.getHeader('content-type')).to.equal('application/test')
+        expect(res.getHeader('content-disposition')).to.equal('inline')
+        expect(res.getHeader('content-range')).to.equal('bytes 5-/*')
 
         // Data/stream handling
-        expect(res._isEndCalled()).to.be.true;
-        expect(res._getBuffer().toString()).to.equal(', world!');
+        expect(res._isEndCalled()).to.be.true
+        expect(res._getBuffer().toString()).to.equal(', world!')
 
         // Notify mocha that we're done.
-        done();
-      });
+        done()
+      })
 
       // Simulate file stream
-      in_stream.emit('open');
-      in_stream.put('Hello, world!');
-      in_stream.stop();
-    });
+      inStream.emit('open')
+      inStream.put('Hello, world!')
+      inStream.stop()
+    })
 
-    it('should ignore ranges without start', function(done)
-    {
-      var res = mock_http.createResponse({});
-      var in_stream = new stream_buffers.ReadableStreamBuffer({});
+    it('should ignore ranges without start', function(done) {
+      const res = mockHttp.createResponse({})
+      const inStream = new streamBuffers.ReadableStreamBuffer({})
 
       // End-of-stream callback
-      var opts = {
+      const opts = {
         name: 'test.file',
         type: 'application/test',
         ranges: {
           ranges: [[undefined, 5]], // Only last five
-        }
-      };
-      ranges.send(res, in_stream, opts, function(err) {
-        expect(err).to.not.exist;
+        },
+      }
+      ranges.send(res, inStream, opts, function(err) {
+        expect(err).to.not.exist
 
         // HTTP handling
-        expect(res.statusCode).to.equal(200);
-        expect(res.getHeader('content-type')).to.equal('application/test');
-        expect(res.getHeader('content-disposition')).to.equal('inline');
+        expect(res.statusCode).to.equal(200)
+        expect(res.getHeader('content-type')).to.equal('application/test')
+        expect(res.getHeader('content-disposition')).to.equal('inline')
 
         // Data/stream handling
-        expect(res._isEndCalled()).to.be.true;
-        expect(res._getBuffer().toString()).to.equal('Hello, world!');
+        expect(res._isEndCalled()).to.be.true
+        expect(res._getBuffer().toString()).to.equal('Hello, world!')
 
         // Notify mocha that we're done.
-        done();
-      });
+        done()
+      })
 
       // Simulate file stream
-      in_stream.emit('open');
-      in_stream.put('Hello, world!');
-      in_stream.stop();
-
-    });
-  });
-});
+      inStream.emit('open')
+      inStream.put('Hello, world!')
+      inStream.stop()
+    })
+  })
+})

+ 13 - 0
storage-node/packages/util/test/stripEndingSlash.js

@@ -0,0 +1,13 @@
+'use strict'
+
+const expect = require('chai').expect
+const stripEndingSlash = require('@joystream/storage-utils/stripEndingSlash')
+
+describe('stripEndingSlash', function() {
+  it('stripEndingSlash should keep URL without the slash', function() {
+    expect(stripEndingSlash('http://keep.one')).to.equal('http://keep.one')
+  })
+  it('stripEndingSlash should remove ending slash', function() {
+    expect(stripEndingSlash('http://strip.one/')).to.equal('http://strip.one')
+  })
+})

+ 194 - 20
yarn.lock

@@ -32,6 +32,13 @@
   dependencies:
     "@babel/highlight" "^7.10.1"
 
+"@babel/code-frame@^7.10.4":
+  version "7.10.4"
+  resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.10.4.tgz#168da1a36e90da68ae8d49c0f1b48c7c6249213a"
+  integrity sha512-vG6SvB6oYEhvgisZNFRmRCUkLz11c7rp+tbNTynGqc6mS1d5ATd/sGyV6W0KZZnXRKMTzZDRgQT3Ou9jhpAfUg==
+  dependencies:
+    "@babel/highlight" "^7.10.4"
+
 "@babel/core@7.6.0":
   version "7.6.0"
   resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.6.0.tgz#9b00f73554edd67bebc86df8303ef678be3d7b48"
@@ -104,6 +111,16 @@
     lodash "^4.17.13"
     source-map "^0.5.0"
 
+"@babel/generator@^7.10.4":
+  version "7.10.4"
+  resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.10.4.tgz#e49eeed9fe114b62fa5b181856a43a5e32f5f243"
+  integrity sha512-toLIHUIAgcQygFZRAQcsLQV3CBuX6yOIru1kJk/qqqvcRmZrYe6WavZTSG+bB8MxhnL9YPf+pKQfuiP161q7ng==
+  dependencies:
+    "@babel/types" "^7.10.4"
+    jsesc "^2.5.1"
+    lodash "^4.17.13"
+    source-map "^0.5.0"
+
 "@babel/generator@^7.4.0", "@babel/generator@^7.6.0", "@babel/generator@^7.7.4":
   version "7.7.4"
   resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.7.4.tgz#db651e2840ca9aa66f327dcec1dc5f5fa9611369"
@@ -216,6 +233,15 @@
     "@babel/template" "^7.10.1"
     "@babel/types" "^7.10.1"
 
+"@babel/helper-function-name@^7.10.4":
+  version "7.10.4"
+  resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.10.4.tgz#d2d3b20c59ad8c47112fa7d2a94bc09d5ef82f1a"
+  integrity sha512-YdaSyz1n8gY44EmN7x44zBn9zQ1Ry2Y+3GTA+3vH6Mizke1Vw0aWDM66FOYEPw8//qKkmqOckrGgTYa+6sceqQ==
+  dependencies:
+    "@babel/helper-get-function-arity" "^7.10.4"
+    "@babel/template" "^7.10.4"
+    "@babel/types" "^7.10.4"
+
 "@babel/helper-function-name@^7.7.4":
   version "7.7.4"
   resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.7.4.tgz#ab6e041e7135d436d8f0a3eca15de5b67a341a2e"
@@ -232,6 +258,13 @@
   dependencies:
     "@babel/types" "^7.10.1"
 
+"@babel/helper-get-function-arity@^7.10.4":
+  version "7.10.4"
+  resolved "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.10.4.tgz#98c1cbea0e2332f33f9a4661b8ce1505b2c19ba2"
+  integrity sha512-EkN3YDB+SRDgiIUnNgcmiD361ti+AVbL3f3Henf6dqqUyr5dMsorno0lJWJuLhDhkI5sYEpgj6y9kB8AOU1I2A==
+  dependencies:
+    "@babel/types" "^7.10.4"
+
 "@babel/helper-get-function-arity@^7.7.4":
   version "7.7.4"
   resolved "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.7.4.tgz#cb46348d2f8808e632f0ab048172130e636005f0"
@@ -384,6 +417,13 @@
   dependencies:
     "@babel/types" "^7.10.1"
 
+"@babel/helper-split-export-declaration@^7.10.4":
+  version "7.10.4"
+  resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.10.4.tgz#2c70576eaa3b5609b24cb99db2888cc3fc4251d1"
+  integrity sha512-pySBTeoUff56fL5CBU2hWm9TesA4r/rOkI9DyJLvvgz09MB9YtfIYe3iBriVaYNaPe+Alua0vBIOVOLs2buWhg==
+  dependencies:
+    "@babel/types" "^7.10.4"
+
 "@babel/helper-split-export-declaration@^7.7.4":
   version "7.7.4"
   resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.7.4.tgz#57292af60443c4a3622cf74040ddc28e68336fd8"
@@ -396,6 +436,11 @@
   resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.10.1.tgz#5770b0c1a826c4f53f5ede5e153163e0318e94b5"
   integrity sha512-5vW/JXLALhczRCWP0PnFDMCJAchlBvM7f4uk/jXritBnIa6E1KmqmtrS3yn1LAnxFBypQ3eneLuXjsnfQsgILw==
 
+"@babel/helper-validator-identifier@^7.10.4":
+  version "7.10.4"
+  resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.10.4.tgz#a78c7a7251e01f616512d31b10adcf52ada5e0d2"
+  integrity sha512-3U9y+43hz7ZM+rzG24Qe2mufW5KhvFg/NhnNph+i9mgCtdTCtMJuI1TMkrIUiK7Ix4PYlRF9I5dhqaLYA/ADXw==
+
 "@babel/helper-wrap-function@^7.7.4":
   version "7.7.4"
   resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.7.4.tgz#37ab7fed5150e22d9d7266e830072c0cdd8baace"
@@ -442,6 +487,15 @@
     chalk "^2.0.0"
     js-tokens "^4.0.0"
 
+"@babel/highlight@^7.10.4":
+  version "7.10.4"
+  resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.10.4.tgz#7d1bdfd65753538fabe6c38596cdb76d9ac60143"
+  integrity sha512-i6rgnR/YgPEQzZZnbTHHuZdlE8qyoBNalD6F+q4vAFlcMEcqmkoG+mPqJYJCo63qPf74+Y1UZsl3l6f7/RIkmA==
+  dependencies:
+    "@babel/helper-validator-identifier" "^7.10.4"
+    chalk "^2.0.0"
+    js-tokens "^4.0.0"
+
 "@babel/parser@^7.0.0", "@babel/parser@^7.1.0", "@babel/parser@^7.4.3", "@babel/parser@^7.6.0", "@babel/parser@^7.7.4":
   version "7.7.4"
   resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.7.4.tgz#75ab2d7110c2cf2fa949959afb05fa346d2231bb"
@@ -452,6 +506,11 @@
   resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.10.1.tgz#2e142c27ca58aa2c7b119d09269b702c8bbad28c"
   integrity sha512-AUTksaz3FqugBkbTZ1i+lDLG5qy8hIzCaAxEtttU6C0BtZZU9pkNZtWSVAht4EW9kl46YBiyTGMp9xTTGqViNg==
 
+"@babel/parser@^7.10.4", "@babel/parser@^7.7.0":
+  version "7.10.4"
+  resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.10.4.tgz#9eedf27e1998d87739fb5028a5120557c06a1a64"
+  integrity sha512-8jHII4hf+YVDsskTF6WuMB3X4Eh+PsUkC2ljq22so5rHvH+T8BzyL94VOdyFLNR8tBSVXOTbNHOKpR4TfRxVtA==
+
 "@babel/plugin-proposal-async-generator-functions@^7.2.0", "@babel/plugin-proposal-async-generator-functions@^7.7.4":
   version "7.7.4"
   resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.7.4.tgz#0351c5ac0a9e927845fffd5b82af476947b7ce6d"
@@ -1290,6 +1349,15 @@
     "@babel/parser" "^7.10.1"
     "@babel/types" "^7.10.1"
 
+"@babel/template@^7.10.4":
+  version "7.10.4"
+  resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.10.4.tgz#3251996c4200ebc71d1a8fc405fba940f36ba278"
+  integrity sha512-ZCjD27cGJFUB6nmCB1Enki3r+L5kJveX9pq1SvAUKoICy6CZ9yD8xO086YXdYhvNjBdnekm4ZnaP5yC8Cs/1tA==
+  dependencies:
+    "@babel/code-frame" "^7.10.4"
+    "@babel/parser" "^7.10.4"
+    "@babel/types" "^7.10.4"
+
 "@babel/template@^7.4.0", "@babel/template@^7.6.0", "@babel/template@^7.7.4":
   version "7.7.4"
   resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.7.4.tgz#428a7d9eecffe27deac0a98e23bf8e3675d2a77b"
@@ -1329,6 +1397,21 @@
     globals "^11.1.0"
     lodash "^4.17.13"
 
+"@babel/traverse@^7.7.0":
+  version "7.10.4"
+  resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.10.4.tgz#e642e5395a3b09cc95c8e74a27432b484b697818"
+  integrity sha512-aSy7p5THgSYm4YyxNGz6jZpXf+Ok40QF3aA2LyIONkDHpAcJzDUqlCKXv6peqYUs2gmic849C/t2HKw2a2K20Q==
+  dependencies:
+    "@babel/code-frame" "^7.10.4"
+    "@babel/generator" "^7.10.4"
+    "@babel/helper-function-name" "^7.10.4"
+    "@babel/helper-split-export-declaration" "^7.10.4"
+    "@babel/parser" "^7.10.4"
+    "@babel/types" "^7.10.4"
+    debug "^4.1.0"
+    globals "^11.1.0"
+    lodash "^4.17.13"
+
 "@babel/types@^7.0.0", "@babel/types@^7.3.0", "@babel/types@^7.4.0", "@babel/types@^7.4.4", "@babel/types@^7.6.0", "@babel/types@^7.7.4":
   version "7.7.4"
   resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.7.4.tgz#516570d539e44ddf308c07569c258ff94fde9193"
@@ -1347,6 +1430,15 @@
     lodash "^4.17.13"
     to-fast-properties "^2.0.0"
 
+"@babel/types@^7.10.4", "@babel/types@^7.7.0":
+  version "7.10.4"
+  resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.10.4.tgz#369517188352e18219981efd156bfdb199fff1ee"
+  integrity sha512-UTCFOxC3FsFHb7lkRMVvgLzaRVamXuAs2Tz4wajva4WxtVY82eZeaUBtC2Zt95FU9TiznuC0Zk35tsim8jeVpg==
+  dependencies:
+    "@babel/helper-validator-identifier" "^7.10.4"
+    lodash "^4.17.13"
+    to-fast-properties "^2.0.0"
+
 "@cnakazawa/watch@^1.0.3":
   version "1.0.3"
   resolved "https://registry.yarnpkg.com/@cnakazawa/watch/-/watch-1.0.3.tgz#099139eaec7ebf07a27c1786a3ff64f39464d2ef"
@@ -5529,6 +5621,18 @@ babel-core@^7.0.0-bridge.0:
   resolved "https://registry.yarnpkg.com/babel-core/-/babel-core-7.0.0-bridge.0.tgz#95a492ddd90f9b4e9a4a1da14eb335b87b634ece"
   integrity sha512-poPX9mZH/5CSanm50Q+1toVci6pv5KSRv/5TWCwtzQS5XEwn40BcCrgIeMFWP9CKKIniKXNxoIOnOq4VVlGXhg==
 
+babel-eslint@^10.0.1:
+  version "10.1.0"
+  resolved "https://registry.yarnpkg.com/babel-eslint/-/babel-eslint-10.1.0.tgz#6968e568a910b78fb3779cdd8b6ac2f479943232"
+  integrity sha512-ifWaTHQ0ce+448CYop8AdrQiBsGrnC+bMgfyKFdi6EsPLTAWG+QfyDeM6OH+FmWnKvEq5NnBMLvlBUPKQZoDSg==
+  dependencies:
+    "@babel/code-frame" "^7.0.0"
+    "@babel/parser" "^7.7.0"
+    "@babel/traverse" "^7.7.0"
+    "@babel/types" "^7.7.0"
+    eslint-visitor-keys "^1.0.0"
+    resolve "^1.12.0"
+
 babel-helper-evaluate-path@^0.5.0:
   version "0.5.0"
   resolved "https://registry.yarnpkg.com/babel-helper-evaluate-path/-/babel-helper-evaluate-path-0.5.0.tgz#a62fa9c4e64ff7ea5cea9353174ef023a900a67c"
@@ -9417,6 +9521,16 @@ eslint-ast-utils@^1.0.0:
     lodash.get "^4.4.2"
     lodash.zip "^4.2.0"
 
+eslint-config-esnext@^4.1.0:
+  version "4.1.0"
+  resolved "https://registry.yarnpkg.com/eslint-config-esnext/-/eslint-config-esnext-4.1.0.tgz#8695b858fcf40d28c1aedca181f700528c7b60c6"
+  integrity sha512-GhfVEXdqYKEIIj7j+Fw2SQdL9qyZMekgXfq6PyXM66cQw0B435ddjz3P3kxOBVihMRJ0xGYjosaveQz5Y6z0uA==
+  dependencies:
+    babel-eslint "^10.0.1"
+    eslint "^6.8.0"
+    eslint-plugin-babel "^5.2.1"
+    eslint-plugin-import "^2.14.0"
+
 eslint-config-oclif-typescript@^0.1.0:
   version "0.1.0"
   resolved "https://registry.yarnpkg.com/eslint-config-oclif-typescript/-/eslint-config-oclif-typescript-0.1.0.tgz#c310767c5ee8916ea5d08cf027d0317dd52ed8ba"
@@ -9506,6 +9620,13 @@ eslint-module-utils@^2.4.1, eslint-module-utils@^2.6.0:
     debug "^2.6.9"
     pkg-dir "^2.0.0"
 
+eslint-plugin-babel@^5.2.1, eslint-plugin-babel@^5.3.1:
+  version "5.3.1"
+  resolved "https://registry.yarnpkg.com/eslint-plugin-babel/-/eslint-plugin-babel-5.3.1.tgz#75a2413ffbf17e7be57458301c60291f2cfbf560"
+  integrity sha512-VsQEr6NH3dj664+EyxJwO4FCYm/00JhYb3Sk3ft8o+fpKuIfQ9TaW6uVUfvwMXHcf/lsnRIoyFPsLMyiWCSL/g==
+  dependencies:
+    eslint-rule-composer "^0.3.0"
+
 eslint-plugin-es@^1.3.1:
   version "1.4.1"
   resolved "https://registry.yarnpkg.com/eslint-plugin-es/-/eslint-plugin-es-1.4.1.tgz#12acae0f4953e76ba444bfd1b2271081ac620998"
@@ -9535,6 +9656,25 @@ eslint-plugin-eslint-plugin@^2.1.0:
   resolved "https://registry.yarnpkg.com/eslint-plugin-eslint-plugin/-/eslint-plugin-eslint-plugin-2.1.0.tgz#a7a00f15a886957d855feacaafee264f039e62d5"
   integrity sha512-kT3A/ZJftt28gbl/Cv04qezb/NQ1dwYIbi8lyf806XMxkus7DvOVCLIfTXMrorp322Pnoez7+zabXH29tADIDg==
 
+eslint-plugin-import@^2.14.0, eslint-plugin-import@^2.22.0:
+  version "2.22.0"
+  resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.22.0.tgz#92f7736fe1fde3e2de77623c838dd992ff5ffb7e"
+  integrity sha512-66Fpf1Ln6aIS5Gr/55ts19eUuoDhAbZgnr6UxK5hbDx6l/QgQgx61AePq+BV4PP2uXQFClgMVzep5zZ94qqsxg==
+  dependencies:
+    array-includes "^3.1.1"
+    array.prototype.flat "^1.2.3"
+    contains-path "^0.1.0"
+    debug "^2.6.9"
+    doctrine "1.5.0"
+    eslint-import-resolver-node "^0.3.3"
+    eslint-module-utils "^2.6.0"
+    has "^1.0.3"
+    minimatch "^3.0.4"
+    object.values "^1.1.1"
+    read-pkg-up "^2.0.0"
+    resolve "^1.17.0"
+    tsconfig-paths "^3.9.0"
+
 eslint-plugin-import@^2.18.2:
   version "2.18.2"
   resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.18.2.tgz#02f1180b90b077b33d447a17a2326ceb400aceb6"
@@ -9570,25 +9710,6 @@ eslint-plugin-import@^2.20.2:
     read-pkg-up "^2.0.0"
     resolve "^1.12.0"
 
-eslint-plugin-import@^2.22.0:
-  version "2.22.0"
-  resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.22.0.tgz#92f7736fe1fde3e2de77623c838dd992ff5ffb7e"
-  integrity sha512-66Fpf1Ln6aIS5Gr/55ts19eUuoDhAbZgnr6UxK5hbDx6l/QgQgx61AePq+BV4PP2uXQFClgMVzep5zZ94qqsxg==
-  dependencies:
-    array-includes "^3.1.1"
-    array.prototype.flat "^1.2.3"
-    contains-path "^0.1.0"
-    debug "^2.6.9"
-    doctrine "1.5.0"
-    eslint-import-resolver-node "^0.3.3"
-    eslint-module-utils "^2.6.0"
-    has "^1.0.3"
-    minimatch "^3.0.4"
-    object.values "^1.1.1"
-    read-pkg-up "^2.0.0"
-    resolve "^1.17.0"
-    tsconfig-paths "^3.9.0"
-
 eslint-plugin-mocha@^5.2.0:
   version "5.3.0"
   resolved "https://registry.yarnpkg.com/eslint-plugin-mocha/-/eslint-plugin-mocha-5.3.0.tgz#cf3eb18ae0e44e433aef7159637095a7cb19b15b"
@@ -9632,7 +9753,7 @@ eslint-plugin-node@^7.0.1:
     resolve "^1.8.1"
     semver "^5.5.0"
 
-eslint-plugin-prettier@^3.1.3:
+eslint-plugin-prettier@^3.1.3, eslint-plugin-prettier@^3.1.4:
   version "3.1.4"
   resolved "https://registry.yarnpkg.com/eslint-plugin-prettier/-/eslint-plugin-prettier-3.1.4.tgz#168ab43154e2ea57db992a2cd097c828171f75c2"
   integrity sha512-jZDa8z76klRqo+TdGDTFJSavwbnWK2ZpqGKNZ+VvweMW516pDUMmQ2koXvxEE4JhzNvTv+radye/bWGBmA6jmg==
@@ -9684,6 +9805,11 @@ eslint-plugin-unicorn@^6.0.1:
     lodash.upperfirst "^4.2.0"
     safe-regex "^1.1.0"
 
+eslint-rule-composer@^0.3.0:
+  version "0.3.0"
+  resolved "https://registry.yarnpkg.com/eslint-rule-composer/-/eslint-rule-composer-0.3.0.tgz#79320c927b0c5c0d3d3d2b76c8b4a488f25bbaf9"
+  integrity sha512-bt+Sh8CtDmn2OajxvNO+BX7Wn4CIWMpTRm3MaiKPCQcnnlm0CS2mhui6QaoeQugs+3Kj2ESKEEGJUdVafwhiCg==
+
 eslint-scope@^4.0.3:
   version "4.0.3"
   resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-4.0.3.tgz#ca03833310f6889a3264781aa82e63eb9cfe7848"
@@ -9804,6 +9930,49 @@ eslint@^6.7.1:
     text-table "^0.2.0"
     v8-compile-cache "^2.0.3"
 
+eslint@^6.8.0:
+  version "6.8.0"
+  resolved "https://registry.yarnpkg.com/eslint/-/eslint-6.8.0.tgz#62262d6729739f9275723824302fb227c8c93ffb"
+  integrity sha512-K+Iayyo2LtyYhDSYwz5D5QdWw0hCacNzyq1Y821Xna2xSJj7cijoLLYmLxTQgcgZ9mC61nryMy9S7GRbYpI5Ig==
+  dependencies:
+    "@babel/code-frame" "^7.0.0"
+    ajv "^6.10.0"
+    chalk "^2.1.0"
+    cross-spawn "^6.0.5"
+    debug "^4.0.1"
+    doctrine "^3.0.0"
+    eslint-scope "^5.0.0"
+    eslint-utils "^1.4.3"
+    eslint-visitor-keys "^1.1.0"
+    espree "^6.1.2"
+    esquery "^1.0.1"
+    esutils "^2.0.2"
+    file-entry-cache "^5.0.1"
+    functional-red-black-tree "^1.0.1"
+    glob-parent "^5.0.0"
+    globals "^12.1.0"
+    ignore "^4.0.6"
+    import-fresh "^3.0.0"
+    imurmurhash "^0.1.4"
+    inquirer "^7.0.0"
+    is-glob "^4.0.0"
+    js-yaml "^3.13.1"
+    json-stable-stringify-without-jsonify "^1.0.1"
+    levn "^0.3.0"
+    lodash "^4.17.14"
+    minimatch "^3.0.4"
+    mkdirp "^0.5.1"
+    natural-compare "^1.4.0"
+    optionator "^0.8.3"
+    progress "^2.0.0"
+    regexpp "^2.0.1"
+    semver "^6.1.2"
+    strip-ansi "^5.2.0"
+    strip-json-comments "^3.0.1"
+    table "^5.2.3"
+    text-table "^0.2.0"
+    v8-compile-cache "^2.0.3"
+
 esm@^3.2.25:
   version "3.2.25"
   resolved "https://registry.yarnpkg.com/esm/-/esm-3.2.25.tgz#342c18c29d56157688ba5ce31f8431fbb795cc10"
@@ -18330,6 +18499,11 @@ prettier@^1.16.4, prettier@^1.18.2:
   resolved "https://registry.yarnpkg.com/prettier/-/prettier-1.19.1.tgz#f7d7f5ff8a9cd872a7be4ca142095956a60797cb"
   integrity sha512-s7PoyDv/II1ObgQunCbB9PdLmUcBZcnWOcxDh7O0N/UwDEsHyqkW+Qh28jW+mVuCdx7gLB0BotYI1Y6uI9iyew==
 
+prettier@^2.0.5:
+  version "2.0.5"
+  resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.0.5.tgz#d6d56282455243f2f92cc1716692c08aa31522d4"
+  integrity sha512-7PtVymN48hGcO4fGjybyBSIWDsLU4H4XlvOHfq91pz9kkGlonzwTfYkaIEwiRg/dAJF9YlbsduBAgtYLi+8cFg==
+
 pretty-error@^2.0.2, pretty-error@^2.1.1:
   version "2.1.1"
   resolved "https://registry.yarnpkg.com/pretty-error/-/pretty-error-2.1.1.tgz#5f4f87c8f91e5ae3f3ba87ab4cf5e03b1a17f1a3"