cli.js 4.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137
  1. #!/usr/bin/env node
  2. const { ContentId } = require('@joystream/types/storage')
  3. const { ApiPromise, WsProvider } = require('@polkadot/api')
  4. const { types } = require('@joystream/types')
  5. const fsPromises = require('fs/promises')
  6. const fs = require('fs')
  7. const Path = require('path')
  8. const {
  9. getActiveWorkersIds,
  10. getWorkerEndpoint,
  11. generateListOfDataObjectsToDownload,
  12. makeAssetUrl,
  13. downloadFile,
  14. clearFolder,
  15. computeMedian
  16. } = require('./utils')
  17. const { program } = require('commander')
  18. const TEMP_FOLDER = Path.resolve(__dirname, '../', 'temp')
  19. const PROVIDER_URL = 'wss://rome-rpc-endpoint.joystream.org:9944'
  20. const NR_DEFAULT_SMALL_ASSETS = 9
  21. const NR_BIG_ASSETS = 1
  22. async function main() {
  23. program
  24. .option('-w, --workers <workerId>', `the Worker id's to perform the tests, separated by comma. Ex: 4,5,6`)
  25. .option('-f, --asset-file [path]', 'a list of assets ids to download the same files for different providers')
  26. .option('-s, --nr-small-assets [number]', 'the number of small files to download', NR_DEFAULT_SMALL_ASSETS)
  27. .option('-b, --nr-big-assets [number]', 'the number of big files to download', NR_BIG_ASSETS)
  28. .parse()
  29. const provider = new WsProvider(process.env.PROVIDER_URL || PROVIDER_URL)
  30. const api = await ApiPromise.create({ provider, types })
  31. await api.isReady
  32. const args = program.opts()
  33. let dataObjectsIds = []
  34. const assetsFilePath = args.assetFile
  35. if (assetsFilePath) {
  36. try {
  37. await fsPromises.access(assetsFilePath, fs.constants.R_OK)
  38. } catch {
  39. console.error('Unable to read ' + assetsFilePath)
  40. process.exit(1)
  41. }
  42. const data = (await fsPromises.readFile(assetsFilePath, 'utf8')).toString()
  43. dataObjectsIds = data
  44. .split('\n')
  45. .filter((line) => line)
  46. .map((line) => ContentId.decode(api.registry, line))
  47. } else {
  48. dataObjectsIds = await generateListOfDataObjectsToDownload(api, args.nrSmallAssets, args.nrBigAssets)
  49. const writeStream = fs.createWriteStream(Path.join(__dirname, '..', 'assets.txt'))
  50. for (const id of dataObjectsIds) {
  51. writeStream.write(id.encode() + '\n')
  52. }
  53. writeStream.close()
  54. }
  55. const dataObjects = await Promise.all(
  56. dataObjectsIds.map(async (id) => {
  57. const dataObject = await api.query.dataDirectory.dataByContentId(id)
  58. dataObject.contentId = id
  59. return dataObject
  60. })
  61. )
  62. let workerIds = args.workers?.split(',')
  63. if (!workerIds) {
  64. workerIds = await getActiveWorkersIds(api)
  65. }
  66. let success = true
  67. try {
  68. for (const workerId of workerIds) {
  69. try {
  70. await testWorker(api, workerId, dataObjects)
  71. } catch (e) {
  72. console.error(e)
  73. }
  74. await clearFolder(TEMP_FOLDER)
  75. }
  76. } catch {
  77. success = false
  78. } finally {
  79. await api.disconnect()
  80. }
  81. if (!success) {
  82. process.exit(2)
  83. }
  84. }
  85. async function testWorker(api, workerId, dataObjects) {
  86. const endpoint = await getWorkerEndpoint(api, workerId)
  87. if (!endpoint) {
  88. throw new Error(`Worker ${workerId} doesn't have an endpoint defined`)
  89. }
  90. const promises = []
  91. const startRequests = process.hrtime.bigint()
  92. for (const dataObject of dataObjects) {
  93. const url = makeAssetUrl(dataObject.contentId, endpoint)
  94. promises.push(downloadFile(url, TEMP_FOLDER, dataObject.contentId.toString()))
  95. }
  96. try {
  97. const times = await Promise.all(promises)
  98. const endRequests = process.hrtime.bigint()
  99. const totalTime = Number((endRequests - startRequests) / BigInt(1000000))
  100. const totalSizeInMegas =
  101. dataObjects.reduce((accumulator, dataObject) => accumulator + Number(dataObject.size_in_bytes), 0) / 1024 / 1024
  102. const average = times.reduce((accumulator, time) => accumulator + time, 0) / times.length
  103. console.log(
  104. JSON.stringify({
  105. averageMs: Number(average.toFixed(2)),
  106. medianMs: Number(computeMedian(times).toFixed(2)),
  107. maxMs: Number(Math.max.apply(Math, times).toFixed(2)),
  108. minMs: Number(Math.min.apply(Math, times).toFixed(2)),
  109. totalTimeMs: totalTime,
  110. averageSpeedMBpS: Number((totalSizeInMegas / (totalTime / 100)).toFixed(3)),
  111. nrFilesDownloaded: times.length,
  112. workerId: workerId,
  113. })
  114. )
  115. } catch (e) {
  116. throw new Error('Fail to download files from worker')
  117. }
  118. }
  119. main()