Explorar o código

storage-node: Autofix linter errors.

Shamil Gadelshin %!s(int64=4) %!d(string=hai) anos
pai
achega
cab7971c6f

+ 1 - 1
storage-node/packages/cli/bin/cli.js

@@ -7,7 +7,7 @@ main()
   .then(() => {
     process.exit(0)
   })
-  .catch(err => {
+  .catch((err) => {
     console.error(chalk.red(err.stack))
     process.exit(-1)
   })

+ 6 - 6
storage-node/packages/colossus/bin/cli.js

@@ -29,14 +29,14 @@ const FLAG_DEFINITIONS = {
   },
   keyFile: {
     type: 'string',
-    isRequired: flags => {
+    isRequired: (flags) => {
       return !flags.dev
     },
   },
   publicUrl: {
     type: 'string',
     alias: 'u',
-    isRequired: flags => {
+    isRequired: (flags) => {
       return !flags.dev
     },
   },
@@ -50,7 +50,7 @@ const FLAG_DEFINITIONS = {
   providerId: {
     type: 'number',
     alias: 'i',
-    isRequired: flags => {
+    isRequired: (flags) => {
       return !flags.dev
     },
   },
@@ -122,7 +122,7 @@ function getStorage(runtimeApi) {
   const { Storage } = require('@joystream/storage-node-backend')
 
   const options = {
-    resolve_content_id: async contentId => {
+    resolve_content_id: async (contentId) => {
       // Resolve via API
       const obj = await runtimeApi.assets.getDataObject(contentId)
       if (!obj || obj.isNone) {
@@ -201,7 +201,7 @@ function getServiceInformation(publicUrl) {
 
 async function announcePublicUrl(api, publicUrl) {
   // re-announce in future
-  const reannounce = function(timeoutMs) {
+  const reannounce = function (timeoutMs) {
     setTimeout(announcePublicUrl, timeoutMs, api, publicUrl)
   }
 
@@ -295,7 +295,7 @@ main()
   .then(() => {
     process.exit(0)
   })
-  .catch(err => {
+  .catch((err) => {
     console.error(chalk.red(err.stack))
     process.exit(-1)
   })

+ 1 - 1
storage-node/packages/colossus/lib/app.js

@@ -66,7 +66,7 @@ function createApp(projectRoot, storage, runtime) {
   // error serialized to JSON.
   // Disable lint because we need such function signature.
   // eslint-disable-next-line no-unused-vars
-  app.use(function(err, req, res, next) {
+  app.use(function (err, req, res, next) {
     res.status(err.status).json(err)
   })
 

+ 1 - 1
storage-node/packages/colossus/lib/discovery.js

@@ -60,7 +60,7 @@ function createApp(projectRoot, runtime) {
 
   // If no other handler gets triggered (errors), respond with the
   // error serialized to JSON.
-  app.use(function(err, req, res) {
+  app.use(function (err, req, res) {
     res.status(err.status).json(err)
   })
 

+ 3 - 3
storage-node/packages/colossus/lib/middleware/file_uploads.js

@@ -21,8 +21,8 @@
 const multer = require('multer')
 
 // Taken from express-openapi examples
-module.exports = function(req, res, next) {
-  multer().any()(req, res, function(err) {
+module.exports = function (req, res, next) {
+  multer().any()(req, res, function (err) {
     if (err) {
       return next(err)
     }
@@ -34,7 +34,7 @@ module.exports = function(req, res, next) {
         }),
       {}
     )
-    Object.keys(filesMap).forEach(fieldname => {
+    Object.keys(filesMap).forEach((fieldname) => {
       const files = filesMap[fieldname]
       req.body[fieldname] = files.length > 1 ? files.map(() => '') : ''
     })

+ 2 - 2
storage-node/packages/colossus/lib/middleware/validate_responses.js

@@ -21,7 +21,7 @@
 const debug = require('debug')('joystream:middleware:validate')
 
 // Function taken directly from https://github.com/kogosoftwarellc/open-api/tree/master/packages/express-openapi
-module.exports = function(req, res, next) {
+module.exports = function (req, res, next) {
   const strictValidation = !!req.apiDoc['x-express-openapi-validation-strict']
   if (typeof res.validateResponse === 'function') {
     const send = res.send
@@ -42,7 +42,7 @@ module.exports = function(req, res, next) {
       }
       if (validation.errors) {
         const errorList = Array.from(validation.errors)
-          .map(_ => _.message)
+          .map((_) => _.message)
           .join(',')
         validationMessage = `Invalid response for status code ${res.statusCode}: ${errorList}`
         debug(validationMessage)

+ 1 - 1
storage-node/packages/colossus/lib/sync.js

@@ -30,7 +30,7 @@ async function syncCallback(api, storage) {
   const providerId = api.storageProviderId
 
   // Iterate over all sync objects, and ensure they're synced.
-  const allChecks = knownContentIds.map(async contentId => {
+  const allChecks = knownContentIds.map(async (contentId) => {
     // eslint-disable-next-line prefer-const
     let { relationship, relationshipId } = await api.assets.getStorageRelationshipAndId(providerId, contentId)
 

+ 4 - 4
storage-node/packages/colossus/paths/asset/v0/{id}.js

@@ -30,7 +30,7 @@ function errorHandler(response, err, code) {
   response.status(err.code || code || 500).send({ message: err.toString() })
 }
 
-module.exports = function(storage, runtime) {
+module.exports = function (storage, runtime) {
   const doc = {
     // parameters for all operations in this path
     parameters: [
@@ -108,7 +108,7 @@ module.exports = function(storage, runtime) {
           }
         }
 
-        stream.on('fileInfo', async info => {
+        stream.on('fileInfo', async (info) => {
           try {
             debug('Detected file info:', info)
 
@@ -142,7 +142,7 @@ module.exports = function(storage, runtime) {
           }
         })
 
-        stream.on('committed', async hash => {
+        stream.on('committed', async (hash) => {
           console.log('commited', dataObject)
           try {
             if (hash !== dataObject.ipfs_content_id.toString()) {
@@ -170,7 +170,7 @@ module.exports = function(storage, runtime) {
           }
         })
 
-        stream.on('error', err => errorHandler(res, err))
+        stream.on('error', (err) => errorHandler(res, err))
         req.pipe(stream)
       } catch (err) {
         errorHandler(res, err)

+ 1 - 1
storage-node/packages/colossus/paths/discover/v0/{id}.js

@@ -4,7 +4,7 @@ const debug = require('debug')('joystream:colossus:api:discovery')
 const MAX_CACHE_AGE = 30 * 60 * 1000
 const USE_CACHE = true
 
-module.exports = function(runtime) {
+module.exports = function (runtime) {
   const doc = {
     // parameters for all operations in this path
     parameters: [

+ 1 - 1
storage-node/packages/discovery/publish.js

@@ -42,7 +42,7 @@ function encodeServiceInfo(info) {
  */
 async function publish(serviceInfo) {
   const keys = await ipfs.key.list()
-  let servicesKey = keys.find(key => key.name === PUBLISH_KEY)
+  let servicesKey = keys.find((key) => key.name === PUBLISH_KEY)
 
   // An ipfs node will always have the self key.
   // If the publish key is specified as anything else and it doesn't exist

+ 7 - 7
storage-node/packages/helios/bin/cli.js

@@ -19,7 +19,7 @@ async function main() {
   console.log(`Found ${storageProviders.length} staked providers`)
 
   const storageProviderAccountInfos = await Promise.all(
-    storageProviders.map(async providerId => {
+    storageProviders.map(async (providerId) => {
       return {
         providerId,
         info: await runtime.discovery.getAccountInfo(providerId),
@@ -49,7 +49,7 @@ async function main() {
 
   console.log(
     '\n== Down Providers!\n',
-    downProviders.map(provider => {
+    downProviders.map((provider) => {
       return {
         providerId: provider.providerId,
       }
@@ -80,7 +80,7 @@ async function main() {
 
   console.log('\nChecking API Endpoints are online')
   await Promise.all(
-    endpoints.map(async provider => {
+    endpoints.map(async (provider) => {
       if (!provider.endpoint) {
         console.log('skipping', provider.address)
         return
@@ -103,7 +103,7 @@ async function main() {
 
   // Check which providers are reporting a ready relationship for each asset
   await Promise.all(
-    knownContentIds.map(async contentId => {
+    knownContentIds.map(async (contentId) => {
       const [relationshipsCount, judgement] = await assetRelationshipState(api, contentId, storageProviders)
       console.log(
         `${encodeAddress(contentId)} replication ${relationshipsCount}/${storageProviders.length} - ${judgement}`
@@ -180,7 +180,7 @@ async function assetRelationshipState(api, contentId, providers) {
 
   // how many relationships associated with active providers and in ready state
   const activeRelationships = await Promise.all(
-    relationshipIds.map(async id => {
+    relationshipIds.map(async (id) => {
       let relationship = await api.query.dataObjectStorageRegistry.relationships(id)
       relationship = relationship.unwrap()
       // only interested in ready relationships
@@ -188,11 +188,11 @@ async function assetRelationshipState(api, contentId, providers) {
         return undefined
       }
       // Does the relationship belong to an active provider ?
-      return providers.find(provider => relationship.storage_provider.eq(provider))
+      return providers.find((provider) => relationship.storage_provider.eq(provider))
     })
   )
 
-  return [activeRelationships.filter(active => active).length, dataObject.unwrap().liaison_judgement]
+  return [activeRelationships.filter((active) => active).length, dataObject.unwrap().liaison_judgement]
 }
 
 main()

+ 2 - 2
storage-node/packages/runtime-api/assets.js

@@ -134,8 +134,8 @@ class AssetsApi {
     // eslint-disable-next-line  no-async-promise-executor
     return new Promise(async (resolve, reject) => {
       try {
-        await this.createStorageRelationship(providerAccountId, storageProviderId, contentId, events => {
-          events.forEach(event => {
+        await this.createStorageRelationship(providerAccountId, storageProviderId, contentId, (events) => {
+          events.forEach((event) => {
             resolve(event[1].DataObjectStorageRelationshipId)
           })
         })

+ 8 - 8
storage-node/packages/runtime-api/index.js

@@ -96,7 +96,7 @@ class RuntimeApi {
   static matchingEvents(subscribed, events) {
     debug(`Number of events: ${events.length} subscribed to ${subscribed}`)
 
-    const filtered = events.filter(record => {
+    const filtered = events.filter((record) => {
       const { event, phase } = record
 
       // Show what we are busy with
@@ -104,14 +104,14 @@ class RuntimeApi {
       debug(`\t\t${event.meta.documentation.toString()}`)
 
       // Skip events we're not interested in.
-      const matching = subscribed.filter(value => {
+      const matching = subscribed.filter((value) => {
         return event.section === value[0] && event.method === value[1]
       })
       return matching.length > 0
     })
     debug(`Filtered: ${filtered.length}`)
 
-    const mapped = filtered.map(record => {
+    const mapped = filtered.map((record) => {
       const { event } = record
       const types = event.typeDef
 
@@ -138,8 +138,8 @@ class RuntimeApi {
    * Returns the first matched event *only*.
    */
   async waitForEvents(subscribed) {
-    return new Promise(resolve => {
-      this.api.query.system.events(events => {
+    return new Promise((resolve) => {
+      this.api.query.system.events((events) => {
         const matches = RuntimeApi.matchingEvents(subscribed, events)
         if (matches && matches.length) {
           resolve(matches)
@@ -243,7 +243,7 @@ class RuntimeApi {
             isInvalid
             */
           })
-          .catch(err => {
+          .catch((err) => {
             // 1014 error: Most likely you are sending transaction with the same nonce,
             // so it assumes you want to replace existing one, but the priority is too low to replace it (priority = fee = len(encoded_transaction) currently)
             // Remember this can also happen if in the past we sent a tx with a future nonce, and the current nonce
@@ -290,8 +290,8 @@ class RuntimeApi {
     // eslint-disable-next-line  no-async-promise-executor
     return new Promise(async (resolve, reject) => {
       try {
-        await this.signAndSend(senderAccountId, tx, 1, subscribed, events => {
-          events.forEach(event => {
+        await this.signAndSend(senderAccountId, tx, 1, subscribed, (events) => {
+          events.forEach((event) => {
             // fix - we may not necessarily want the first event
             // if there are multiple events emitted,
             resolve(event[1][eventProperty])

+ 9 - 9
storage-node/packages/storage/storage.js

@@ -39,7 +39,7 @@ const _ = require('lodash')
 const DEFAULT_TIMEOUT = 30 * 1000
 
 // Default/dummy resolution implementation.
-const DEFAULT_RESOLVE_CONTENT_ID = async original => {
+const DEFAULT_RESOLVE_CONTENT_ID = async (original) => {
   debug('Warning: Default resolution returns original CID', original)
   return original
 }
@@ -127,11 +127,11 @@ class StorageWriteStream extends Transform {
       const read = fs.createReadStream(this.temp.path)
       fileType
         .stream(read)
-        .then(stream => {
+        .then((stream) => {
           this.fileInfo = fixFileInfoOnStream(stream).fileInfo
           this.emit('fileInfo', this.fileInfo)
         })
-        .catch(err => {
+        .catch((err) => {
           debug('Error trying to detect file type at end-of-stream:', err)
         })
     }
@@ -151,13 +151,13 @@ class StorageWriteStream extends Transform {
     debug('Committing temporary stream: ', this.temp.path)
     this.storage.ipfs
       .addFromFs(this.temp.path)
-      .then(async result => {
+      .then(async (result) => {
         const hash = result[0].hash
         debug('Stream committed as', hash)
         this.emit('committed', hash)
         await this.storage.ipfs.pin.add(hash)
       })
-      .catch(err => {
+      .catch((err) => {
         debug('Error committing stream', err)
         this.emit('error', err)
       })
@@ -324,7 +324,7 @@ class Storage {
     // content ID (of its own).
     // We need to instead return a stream immediately, that we eventually
     // decorate with the content ID when that's available.
-    return new Promise(resolve => {
+    return new Promise((resolve) => {
       const stream = new StorageWriteStream(this)
       resolve(stream)
     })
@@ -336,7 +336,7 @@ class Storage {
     let found = false
     return await this.withSpecifiedTimeout(timeout, (resolve, reject) => {
       const ls = this.ipfs.getReadableStream(resolved)
-      ls.on('data', async result => {
+      ls.on('data', async (result) => {
         if (result.path === resolved) {
           found = true
 
@@ -344,7 +344,7 @@ class Storage {
           resolve(fixFileInfoOnStream(ftStream))
         }
       })
-      ls.on('error', err => {
+      ls.on('error', (err) => {
         ls.end()
         debug(err)
         reject(err)
@@ -375,7 +375,7 @@ class Storage {
     debug(`Pinning ${resolved}`)
 
     // This call blocks until file is retrieved..
-    this.ipfs.pin.add(resolved, { quiet: true, pin: true }, err => {
+    this.ipfs.pin.add(resolved, { quiet: true, pin: true }, (err) => {
       if (err) {
         debug(`Error Pinning: ${resolved}`)
         delete this.pins[resolved]

+ 21 - 21
storage-node/packages/storage/test/storage.js

@@ -33,7 +33,7 @@ const IPFS_CID_REGEX = /^Qm[1-9A-HJ-NP-Za-km-z]{44}$/
 function write(store, contentId, contents, callback) {
   store
     .open(contentId, 'w')
-    .then(stream => {
+    .then((stream) => {
       stream.on('finish', () => {
         stream.commit()
       })
@@ -45,7 +45,7 @@ function write(store, contentId, contents, callback) {
         process.nextTick(() => stream.end())
       }
     })
-    .catch(err => {
+    .catch((err) => {
       expect.fail(err)
     })
 }
@@ -53,9 +53,9 @@ function write(store, contentId, contents, callback) {
 function readAll(stream) {
   return new Promise((resolve, reject) => {
     const chunks = []
-    stream.on('data', chunk => chunks.push(chunk))
+    stream.on('data', (chunk) => chunks.push(chunk))
     stream.on('end', () => resolve(Buffer.concat(chunks)))
-    stream.on('error', err => reject(err))
+    stream.on('error', (err) => reject(err))
     stream.resume()
   })
 }
@@ -68,7 +68,7 @@ function createKnownObject(contentId, contents, callback) {
     },
   })
 
-  write(store, contentId, contents, theHash => {
+  write(store, contentId, contents, (theHash) => {
     hash = theHash
 
     callback(store, hash)
@@ -82,8 +82,8 @@ describe('storage/storage', () => {
   })
 
   describe('open()', () => {
-    it('can write a stream', done => {
-      write(storage, 'foobar', 'test-content', hash => {
+    it('can write a stream', (done) => {
+      write(storage, 'foobar', 'test-content', (hash) => {
         expect(hash).to.not.be.undefined
         expect(hash).to.match(IPFS_CID_REGEX)
         done()
@@ -124,28 +124,28 @@ describe('storage/storage', () => {
     // 		})
     // })
 
-    it('can read a stream', done => {
+    it('can read a stream', (done) => {
       const contents = 'test-for-reading'
-      createKnownObject('foobar', contents, store => {
+      createKnownObject('foobar', contents, (store) => {
         store
           .open('foobar', 'r')
-          .then(async stream => {
+          .then(async (stream) => {
             const data = await readAll(stream)
             expect(Buffer.compare(data, Buffer.from(contents))).to.equal(0)
             done()
           })
-          .catch(err => {
+          .catch((err) => {
             expect.fail(err)
           })
       })
     })
 
-    it('detects the MIME type of a read stream', done => {
+    it('detects the MIME type of a read stream', (done) => {
       const contents = fs.readFileSync('../../storage-node_new.svg')
-      createKnownObject('foobar', contents, store => {
+      createKnownObject('foobar', contents, (store) => {
         store
           .open('foobar', 'r')
-          .then(async stream => {
+          .then(async (stream) => {
             const data = await readAll(stream)
             expect(contents.length).to.equal(data.length)
             expect(Buffer.compare(data, contents)).to.equal(0)
@@ -156,18 +156,18 @@ describe('storage/storage', () => {
             expect(stream.fileInfo).to.have.property('ext', 'xml')
             done()
           })
-          .catch(err => {
+          .catch((err) => {
             expect.fail(err)
           })
       })
     })
 
-    it('provides default MIME type for read streams', done => {
+    it('provides default MIME type for read streams', (done) => {
       const contents = 'test-for-reading'
-      createKnownObject('foobar', contents, store => {
+      createKnownObject('foobar', contents, (store) => {
         store
           .open('foobar', 'r')
-          .then(async stream => {
+          .then(async (stream) => {
             const data = await readAll(stream)
             expect(Buffer.compare(data, Buffer.from(contents))).to.equal(0)
 
@@ -175,7 +175,7 @@ describe('storage/storage', () => {
             expect(stream.fileInfo).to.have.property('ext', 'bin')
             done()
           })
-          .catch(err => {
+          .catch((err) => {
             expect.fail(err)
           })
       })
@@ -192,7 +192,7 @@ describe('storage/storage', () => {
       expect(storage.stat(hash)).to.eventually.be.rejectedWith('timed out')
     })
 
-    it('returns stats for a known object', done => {
+    it('returns stats for a known object', (done) => {
       const content = 'stat-test'
       const expectedSize = content.length
       createKnownObject('foobar', content, (store, hash) => {
@@ -212,7 +212,7 @@ describe('storage/storage', () => {
       expect(storage.size(hash)).to.eventually.be.rejectedWith('timed out')
     })
 
-    it('returns the size of a known object', done => {
+    it('returns the size of a known object', (done) => {
       createKnownObject('foobar', 'stat-test', (store, hash) => {
         expect(store.size(hash)).to.eventually.equal(15)
         done()

+ 2 - 2
storage-node/packages/util/fs/walk.js

@@ -83,7 +83,7 @@ class Walker {
       this.pending += files.length
       debug('Reading', dir, 'bumps pending to', this.pending)
 
-      files.forEach(name => {
+      files.forEach((name) => {
         const fname = path.resolve(dir, name)
         this.archive.lstat(fname, (err2, lstat) => {
           if (err2) {
@@ -126,7 +126,7 @@ class Walker {
  *
  * The callback is invoked one last time without data to signal the end of data.
  */
-module.exports = function(base, archive, cb) {
+module.exports = function (base, archive, cb) {
   // Archive is optional and defaults to fs, but cb is not.
   if (!cb) {
     cb = archive

+ 1 - 1
storage-node/packages/util/lru.js

@@ -104,7 +104,7 @@ class LRUCache {
       ++idx
     }
 
-    toPrune.forEach(key => {
+    toPrune.forEach((key) => {
       this.store.delete(key)
       this.access.delete(key)
     })

+ 3 - 3
storage-node/packages/util/ranges.js

@@ -37,7 +37,7 @@ function parseRange(range) {
     throw new Error(`Not a valid range: ${range}`)
   }
 
-  const vals = matches[1].split('-').map(v => {
+  const vals = matches[1].split('-').map((v) => {
     return v === '*' || v === '' ? undefined : parseInt(v, 10)
   })
 
@@ -68,12 +68,12 @@ function parse(rangeStr) {
 
   // Parse individual ranges
   const ranges = []
-  res.rangeStr.split(',').forEach(range => {
+  res.rangeStr.split(',').forEach((range) => {
     ranges.push(parseRange(range))
   })
 
   // Merge ranges into result.
-  ranges.forEach(newRange => {
+  ranges.forEach((newRange) => {
     debug('Found range:', newRange)
 
     let isMerged = false

+ 10 - 10
storage-node/packages/util/test/fs/resolve.js

@@ -24,45 +24,45 @@ const path = require('path')
 const resolve = require('@joystream/storage-utils/fs/resolve')
 
 function tests(base) {
-  it('resolves absolute paths relative to the base', function() {
+  it('resolves absolute paths relative to the base', function () {
     const resolved = resolve(base, '/foo')
     const relative = path.relative(base, resolved)
     expect(relative).to.equal('foo')
   })
 
-  it('allows for relative paths that stay in the base', function() {
+  it('allows for relative paths that stay in the base', function () {
     const resolved = resolve(base, 'foo/../bar')
     const relative = path.relative(base, resolved)
     expect(relative).to.equal('bar')
   })
 
-  it('prevents relative paths from breaking out of the base', function() {
+  it('prevents relative paths from breaking out of the base', function () {
     expect(() => resolve(base, '../foo')).to.throw()
   })
 
-  it('prevents long relative paths from breaking out of the base', function() {
+  it('prevents long relative paths from breaking out of the base', function () {
     expect(() => resolve(base, '../../../foo')).to.throw()
   })
 
-  it('prevents sneaky relative paths from breaking out of the base', function() {
+  it('prevents sneaky relative paths from breaking out of the base', function () {
     expect(() => resolve(base, 'foo/../../../bar')).to.throw()
   })
 }
 
-describe('util/fs/resolve', function() {
-  describe('slash base', function() {
+describe('util/fs/resolve', function () {
+  describe('slash base', function () {
     tests('/')
   })
 
-  describe('empty base', function() {
+  describe('empty base', function () {
     tests('')
   })
 
-  describe('short base', function() {
+  describe('short base', function () {
     tests('/base')
   })
 
-  describe('long base', function() {
+  describe('long base', function () {
     tests('/this/base/is/very/long/indeed')
   })
 })

+ 2 - 2
storage-node/packages/util/test/fs/walk.js

@@ -60,8 +60,8 @@ function walktest(archive, base, done) {
   })
 }
 
-describe('util/fs/walk', function() {
-  it('reports all files in a file system hierarchy', function(done) {
+describe('util/fs/walk', function () {
+  it('reports all files in a file system hierarchy', function (done) {
     walktest(fs, path.resolve(__dirname, '../data'), done)
   })
 })

+ 11 - 11
storage-node/packages/util/test/lru.js

@@ -24,14 +24,14 @@ const lru = require('@joystream/storage-utils/lru')
 
 const DEFAULT_SLEEP = 1
 function sleep(ms = DEFAULT_SLEEP) {
-  return new Promise(resolve => {
+  return new Promise((resolve) => {
     setTimeout(resolve, ms)
   })
 }
 
-describe('util/lru', function() {
-  describe('simple usage', function() {
-    it('does not contain keys that were not added', function() {
+describe('util/lru', function () {
+  describe('simple usage', function () {
+    it('does not contain keys that were not added', function () {
       const cache = new lru.LRUCache()
       expect(cache.size()).to.equal(0)
 
@@ -41,7 +41,7 @@ describe('util/lru', function() {
       expect(cache.has('something')).to.be.false
     })
 
-    it('contains keys that were added', function() {
+    it('contains keys that were added', function () {
       const cache = new lru.LRUCache()
       cache.put('something', 'yay!')
       expect(cache.size()).to.equal(1)
@@ -52,7 +52,7 @@ describe('util/lru', function() {
       expect(cache.has('something')).to.be.true
     })
 
-    it('does not contain keys that were deleted', function() {
+    it('does not contain keys that were deleted', function () {
       const cache = new lru.LRUCache()
       cache.put('something', 'yay!')
       expect(cache.size()).to.equal(1)
@@ -67,7 +67,7 @@ describe('util/lru', function() {
       expect(cache.has('something')).to.be.false
     })
 
-    it('can be cleared', function() {
+    it('can be cleared', function () {
       const cache = new lru.LRUCache()
       cache.put('something', 'yay!')
       expect(cache.size()).to.equal(1)
@@ -77,8 +77,8 @@ describe('util/lru', function() {
     })
   })
 
-  describe('capacity management', function() {
-    it('does not grow beyond capacity', async function() {
+  describe('capacity management', function () {
+    it('does not grow beyond capacity', async function () {
       const cache = new lru.LRUCache(2) // Small capacity
       expect(cache.size()).to.equal(0)
 
@@ -96,7 +96,7 @@ describe('util/lru', function() {
       expect(cache.size()).to.equal(2) // Capacity exceeded
     })
 
-    it('removes the oldest key when pruning', async function() {
+    it('removes the oldest key when pruning', async function () {
       const cache = new lru.LRUCache(2) // Small capacity
       expect(cache.size()).to.equal(0)
 
@@ -119,7 +119,7 @@ describe('util/lru', function() {
       expect(cache.has('baz')).to.be.true
     })
 
-    it('updates LRU timestamp when reading', async function() {
+    it('updates LRU timestamp when reading', async function () {
       const cache = new lru.LRUCache(2) // Small capacity
       expect(cache.size()).to.equal(0)
 

+ 8 - 12
storage-node/packages/util/test/pagination.js

@@ -23,9 +23,9 @@ const mockHttp = require('node-mocks-http')
 
 const pagination = require('@joystream/storage-utils/pagination')
 
-describe('util/pagination', function() {
-  describe('openapi()', function() {
-    it('should add parameters and definitions to an API spec', function() {
+describe('util/pagination', function () {
+  describe('openapi()', function () {
+    it('should add parameters and definitions to an API spec', function () {
       const api = pagination.openapi({})
 
       // Parameters
@@ -62,8 +62,8 @@ describe('util/pagination', function() {
     })
   })
 
-  describe('paginate()', function() {
-    it('should add pagination links to a response object', function() {
+  describe('paginate()', function () {
+    it('should add pagination links to a response object', function () {
       const req = mockHttp.createRequest({
         method: 'GET',
         url: '/foo?limit=10',
@@ -78,16 +78,14 @@ describe('util/pagination', function() {
 
       const res = pagination.paginate(req, {})
 
-      expect(res)
-        .to.have.property('pagination')
-        .that.has.all.keys('self', 'first', 'next')
+      expect(res).to.have.property('pagination').that.has.all.keys('self', 'first', 'next')
 
       expect(res.pagination.self).to.equal('http://localhost/foo?limit=10')
       expect(res.pagination.first).to.equal('http://localhost/foo?limit=10&offset=0')
       expect(res.pagination.next).to.equal('http://localhost/foo?limit=10&offset=10')
     })
 
-    it('should add a last pagination link when requested', function() {
+    it('should add a last pagination link when requested', function () {
       const req = mockHttp.createRequest({
         method: 'GET',
         url: '/foo?limit=10&offset=15',
@@ -103,9 +101,7 @@ describe('util/pagination', function() {
 
       const res = pagination.paginate(req, {}, 35)
 
-      expect(res)
-        .to.have.property('pagination')
-        .that.has.all.keys('self', 'first', 'next', 'prev', 'last')
+      expect(res).to.have.property('pagination').that.has.all.keys('self', 'first', 'next', 'prev', 'last')
 
       expect(res.pagination.self).to.equal('http://localhost/foo?limit=10&offset=15')
       expect(res.pagination.first).to.equal('http://localhost/foo?limit=10&offset=0')

+ 25 - 25
storage-node/packages/util/test/ranges.js

@@ -24,9 +24,9 @@ const streamBuffers = require('stream-buffers')
 
 const ranges = require('@joystream/storage-utils/ranges')
 
-describe('util/ranges', function() {
-  describe('parse()', function() {
-    it('should parse a full range', function() {
+describe('util/ranges', function () {
+  describe('parse()', function () {
+    it('should parse a full range', function () {
       // Range with unit
       let range = ranges.parse('bytes=0-100')
       expect(range.unit).to.equal('bytes')
@@ -50,14 +50,14 @@ describe('util/ranges', function() {
       expect(range.ranges[0][1]).to.equal(100)
     })
 
-    it('should error out on malformed strings', function() {
+    it('should error out on malformed strings', function () {
       expect(() => ranges.parse('foo')).to.throw()
       expect(() => ranges.parse('foo=bar')).to.throw()
       expect(() => ranges.parse('foo=100')).to.throw()
       expect(() => ranges.parse('foo=100-0')).to.throw()
     })
 
-    it('should parse a range without end', function() {
+    it('should parse a range without end', function () {
       const range = ranges.parse('0-')
       expect(range.unit).to.equal('bytes')
       expect(range.rangeStr).to.equal('0-')
@@ -65,7 +65,7 @@ describe('util/ranges', function() {
       expect(range.ranges[0][1]).to.be.undefined
     })
 
-    it('should parse a range without start', function() {
+    it('should parse a range without start', function () {
       const range = ranges.parse('-100')
       expect(range.unit).to.equal('bytes')
       expect(range.rangeStr).to.equal('-100')
@@ -73,7 +73,7 @@ describe('util/ranges', function() {
       expect(range.ranges[0][1]).to.equal(100)
     })
 
-    it('should parse multiple ranges', function() {
+    it('should parse multiple ranges', function () {
       const range = ranges.parse('0-10,30-40,60-80')
       expect(range.unit).to.equal('bytes')
       expect(range.rangeStr).to.equal('0-10,30-40,60-80')
@@ -85,7 +85,7 @@ describe('util/ranges', function() {
       expect(range.ranges[2][1]).to.equal(80)
     })
 
-    it('should merge overlapping ranges', function() {
+    it('should merge overlapping ranges', function () {
       // Two overlapping ranges
       let range = ranges.parse('0-20,10-30')
       expect(range.unit).to.equal('bytes')
@@ -119,7 +119,7 @@ describe('util/ranges', function() {
       expect(range.ranges[0][1]).to.equal(20)
     })
 
-    it('should sort ranges', function() {
+    it('should sort ranges', function () {
       const range = ranges.parse('10-30,0-5')
       expect(range.unit).to.equal('bytes')
       expect(range.rangeStr).to.equal('10-30,0-5')
@@ -131,8 +131,8 @@ describe('util/ranges', function() {
     })
   })
 
-  describe('send()', function() {
-    it('should send full files on request', function(done) {
+  describe('send()', function () {
+    it('should send full files on request', function (done) {
       const res = mockHttp.createResponse({})
       const inStream = new streamBuffers.ReadableStreamBuffer({})
 
@@ -141,7 +141,7 @@ describe('util/ranges', function() {
         name: 'test.file',
         type: 'application/test',
       }
-      ranges.send(res, inStream, opts, function(err) {
+      ranges.send(res, inStream, opts, function (err) {
         expect(err).to.not.exist
 
         // HTTP handling
@@ -163,7 +163,7 @@ describe('util/ranges', function() {
       inStream.stop()
     })
 
-    it('should send a range spanning the entire file on request', function(done) {
+    it('should send a range spanning the entire file on request', function (done) {
       const res = mockHttp.createResponse({})
       const inStream = new streamBuffers.ReadableStreamBuffer({})
 
@@ -175,7 +175,7 @@ describe('util/ranges', function() {
           ranges: [[0, 12]],
         },
       }
-      ranges.send(res, inStream, opts, function(err) {
+      ranges.send(res, inStream, opts, function (err) {
         expect(err).to.not.exist
 
         // HTTP handling
@@ -199,7 +199,7 @@ describe('util/ranges', function() {
       inStream.stop()
     })
 
-    it('should send a small range on request', function(done) {
+    it('should send a small range on request', function (done) {
       const res = mockHttp.createResponse({})
       const inStream = new streamBuffers.ReadableStreamBuffer({})
 
@@ -211,7 +211,7 @@ describe('util/ranges', function() {
           ranges: [[1, 11]], // Cut off first and last letter
         },
       }
-      ranges.send(res, inStream, opts, function(err) {
+      ranges.send(res, inStream, opts, function (err) {
         expect(err).to.not.exist
 
         // HTTP handling
@@ -235,7 +235,7 @@ describe('util/ranges', function() {
       inStream.stop()
     })
 
-    it('should send ranges crossing buffer boundaries', function(done) {
+    it('should send ranges crossing buffer boundaries', function (done) {
       const res = mockHttp.createResponse({})
       const inStream = new streamBuffers.ReadableStreamBuffer({
         chunkSize: 3, // Setting a chunk size smaller than the range should
@@ -250,7 +250,7 @@ describe('util/ranges', function() {
           ranges: [[1, 11]], // Cut off first and last letter
         },
       }
-      ranges.send(res, inStream, opts, function(err) {
+      ranges.send(res, inStream, opts, function (err) {
         expect(err).to.not.exist
 
         // HTTP handling
@@ -274,7 +274,7 @@ describe('util/ranges', function() {
       inStream.stop()
     })
 
-    it('should send multiple ranges', function(done) {
+    it('should send multiple ranges', function (done) {
       const res = mockHttp.createResponse({})
       const inStream = new streamBuffers.ReadableStreamBuffer({})
 
@@ -289,12 +289,12 @@ describe('util/ranges', function() {
           ], // Slice two ranges out
         },
       }
-      ranges.send(res, inStream, opts, function(err) {
+      ranges.send(res, inStream, opts, function (err) {
         expect(err).to.not.exist
 
         // HTTP handling
         expect(res.statusCode).to.equal(206)
-        expect(res.getHeader('content-type')).to.satisfy(str => str.startsWith('multipart/byteranges'))
+        expect(res.getHeader('content-type')).to.satisfy((str) => str.startsWith('multipart/byteranges'))
         expect(res.getHeader('content-disposition')).to.equal('inline')
 
         // Data/stream handling
@@ -320,7 +320,7 @@ describe('util/ranges', function() {
       inStream.stop()
     })
 
-    it('should deal with ranges without end', function(done) {
+    it('should deal with ranges without end', function (done) {
       const res = mockHttp.createResponse({})
       const inStream = new streamBuffers.ReadableStreamBuffer({})
 
@@ -332,7 +332,7 @@ describe('util/ranges', function() {
           ranges: [[5, undefined]], // Skip the first part, but read until end
         },
       }
-      ranges.send(res, inStream, opts, function(err) {
+      ranges.send(res, inStream, opts, function (err) {
         expect(err).to.not.exist
 
         // HTTP handling
@@ -355,7 +355,7 @@ describe('util/ranges', function() {
       inStream.stop()
     })
 
-    it('should ignore ranges without start', function(done) {
+    it('should ignore ranges without start', function (done) {
       const res = mockHttp.createResponse({})
       const inStream = new streamBuffers.ReadableStreamBuffer({})
 
@@ -367,7 +367,7 @@ describe('util/ranges', function() {
           ranges: [[undefined, 5]], // Only last five
         },
       }
-      ranges.send(res, inStream, opts, function(err) {
+      ranges.send(res, inStream, opts, function (err) {
         expect(err).to.not.exist
 
         // HTTP handling

+ 3 - 3
storage-node/packages/util/test/stripEndingSlash.js

@@ -3,11 +3,11 @@
 const expect = require('chai').expect
 const stripEndingSlash = require('@joystream/storage-utils/stripEndingSlash')
 
-describe('stripEndingSlash', function() {
-  it('stripEndingSlash should keep URL without the slash', function() {
+describe('stripEndingSlash', function () {
+  it('stripEndingSlash should keep URL without the slash', function () {
     expect(stripEndingSlash('http://keep.one')).to.equal('http://keep.one')
   })
-  it('stripEndingSlash should remove ending slash', function() {
+  it('stripEndingSlash should remove ending slash', function () {
     expect(stripEndingSlash('http://strip.one/')).to.equal('http://strip.one')
   })
 })