Browse Source

Merge pull request #679 from traumschule/test-providers

Test content providers script
l1.media 1 year ago
parent
commit
7a55321626

+ 17 - 0
scripts/test-providers/README.md

@@ -0,0 +1,17 @@
+# Simple Storage and Distribution Provider Assets Test
+
+Results are saved to `statusUrl` which is run by jsstats.
+
+See results: https://joystreamstats.live/storage
+
+## Setup
+
+```
+cd community-repo/scripts/test-providers
+yarn
+```
+
+## Usage
+
+- `yarn test` - run rapid test for all bags on all providers
+- `yarn logs` - count asset requests from logs (TBD)

+ 117 - 0
scripts/test-providers/channels.js

@@ -0,0 +1,117 @@
+const fs = require("fs");
+const axios = require("axios");
+const moment = require("moment");
+const {
+  ch,
+  time,
+  sleep,
+  loadResults,
+  headAsset,
+  sendResult,
+  printFailed,
+  writeTable,
+  query,
+} = require("./util");
+
+// config
+const QN = `https://ipfs.joystreamstats.live/graphql`;
+const resultsUrl = `https://joystreamstats.live/api/v1/bags/status`;
+const resultsFile = `./bag_availability.json`;
+const getUrl = (metadata) => metadata?.nodeEndpoint;
+const testProviders = async (channelId, object, sUrls, dUrls) => {
+  const storage = await Promise.all(
+    sUrls
+      .filter((url) => url)
+      .map((url) =>
+        headAsset(object.id, url, url + `api/v1/files/${object.id}`)
+      )
+  );
+  const distribution = await Promise.all(
+    dUrls.map((url) =>
+      headAsset(object.id, url, url + `api/v1/assets/${object.id}`)
+    )
+  );
+  return [storage, distribution];
+};
+const testBags = async (bags, objects = [], rapid = false) => {
+  console.debug(
+    time(),
+    `Starting rapid test`,
+    objects.length && `for ${objects}`
+  );
+  const start = new Date();
+  let results = [];
+  for (const bag of bags.sort((a, b) => ch(b.id) - ch(a.id))) {
+    if (!bag.objects) continue;
+    const channelId = ch(bag.id);
+    const sUrls = bag.storageBuckets.map((sb) => getUrl(sb.operatorMetadata));
+    const dUrls = bag.distributionBuckets.map((db) =>
+      getUrl(db.operators[0]?.metadata)
+    );
+    for (const object of bag.objects) {
+      //if (results.length) continue; // TODO
+      if (objects.length && !objects.includes(object.id)) continue;
+      const [storage, distribution] = await testProviders(
+        ch(bag.id),
+        object,
+        sUrls,
+        dUrls
+      );
+      const sFailed = storage.filter((b) => b.status !== `success`);
+      const dFailed = distribution.filter((b) => b.status !== `success`);
+      const SP =
+        `${storage.length - sFailed.length}/${storage.length} ` +
+        (sFailed.length ? `( ` + printFailed(sFailed) + ` )` : "OK");
+      const DP =
+        `${distribution.length - dFailed.length}/${distribution.length} ` +
+        (dFailed.length ? `( ` + printFailed(dFailed) + ` )` : "OK");
+      console.log(`${time()} ${channelId} ${object.id} SP:${SP} DP:${DP}`);
+      const result = { channelId, storage, distribution };
+      sendResult(result, resultsUrl);
+      results.push(result);
+    }
+    if (!rapid) await sleep(Math.randon * 10.0);
+  }
+  const duration = (moment().diff(start) / 1000).toFixed(3) + `s`;
+  console.log(time(), `Finished rapid test in `, duration);
+  if (!objects.length) {
+    fs.writeFileSync(resultsFile, JSON.stringify(results));
+    console.log(time(), `Wrote results to ${resultsFile} `);
+  }
+};
+
+// start
+const bagIds = process.argv.slice(2);
+if (bagIds.length) console.log(`selected bags`, bagIds);
+const old = loadResults(resultsFile);
+console.debug(time(), `Fetching bags with buckets and objects\n`, query);
+axios
+  .post(QN, { query })
+  .then(({ data }) => {
+    const bags = data.data.storageBags;
+    if (!bags.length)
+      return console.error(time(), `No bags received.`, data.error);
+    console.log(time(), `Received list with ${bags.length} bags.`);
+    //bags.forEach((b) => printBagBuckets(b));
+    let selected = bags;
+    if (bagIds.length) {
+      console.log(time(), `Selecting bags with id`, bagIds);
+      selected = bags.filter(
+        (b) => !bagIds.length || bagIds.find((id) => id === ch(b.id))
+      );
+    }
+    if (old.length) {
+      console.log(time(), `Testing formerly failed assets`);
+      const ids = old.reduce(
+        (ids, bag) =>
+          ids.concat(
+            ...bag.storage.map((r) => r.objectId),
+            ...bag.distribution.map((r) => r.objectId)
+          ),
+        []
+      );
+      console.log(ids);
+      testBags(selected, ids);
+    } else testBags(selected);
+  })
+  .catch((e) => console.error(e.message, e.response?.data));

+ 4 - 0
scripts/test-providers/getChannels.sh

@@ -0,0 +1,4 @@
+
+set -e
+curl 'https://ipfs.joystreamstats.live/graphql' -H 'Accept-Encoding: gzip, deflate, br' -H 'Content-Type: application/json' -H 'Accept: application/json' -H 'Connection: keep-alive' -H 'DNT: 1' -H 'Origin: https://ipfs.joystreamstats.live' --data-binary '{"query":"query { channels {  \n  id  language{iso} title description videos { description media {size} id language{iso}  } \n}}"}' --compressed > channels.json
+echo Wrote channels.json

+ 1 - 0
scripts/test-providers/logs.js

@@ -0,0 +1 @@
+console.log(`Not yet implemented.`)

+ 13 - 0
scripts/test-providers/package.json

@@ -0,0 +1,13 @@
+{
+  "name": "joystreams-provider-test",
+  "version": "0.1.0",
+  "license": "MIT",
+  "scripts": {
+    "test": "node channels.js",
+    "logs": "node logs.js"
+  },
+  "dependencies": {
+    "axios": "^0.26.0",
+    "moment": "^2.29.1"
+  }
+}

+ 93 - 0
scripts/test-providers/util.js

@@ -0,0 +1,93 @@
+const fs = require("fs");
+const axios = require("axios");
+const moment = require("moment");
+
+const query = `query { storageBags(limit:10000){ id objects{id} storageBuckets {id operatorMetadata{nodeEndpoint}} distributionBuckets{id operators{metadata{nodeEndpoint}}} }}`;
+
+const gb = (bytes) => (bytes / 1024 ** 3).toFixed(1);
+const ch = (id) => id.split(":")[2];
+const time = () => `[` + moment().format(`HH:mm:ss`) + `]`;
+const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
+const printBagBuckets = ({ id, storageBuckets, distributionBuckets }) => {
+  const sb = storageBuckets.map((b) => b.id).join(", ");
+  const db = distributionBuckets.map((b) => b.id).join(", ");
+  console.log(time(), `${ch(id)} storage: [ ${sb} ] distribution: [ ${db} ]`);
+};
+
+const logChannel = ({ id, size, title, description }) =>
+  console.log(`${id} ${gb(size)}gb [${title}] ${description}`);
+
+const sendResult = (result, url) =>
+  axios
+    .post(url, result)
+    .then(({ data }) => data.error && console.error(time(), data.error))
+    .catch((e) => console.error(time(), e.message));
+
+const loadResults = (file) => {
+  try {
+    fs.stat(file);
+    require(file);
+  } catch {
+    return [];
+  }
+};
+
+const headAsset = (objectId, endpoint, url) => {
+  const start = new Date();
+  return axios
+    .head(url)
+    .then(({ data }) => {
+      const timestamp = new Date();
+      const latency = moment(timestamp).diff(start);
+      const status = data || `success`;
+      return { objectId, endpoint, url, timestamp, latency, status };
+    })
+    .catch((e) => {
+      const timestamp = new Date();
+      const latency = moment(timestamp).diff(start);
+      const status = e.message + e.response?.data;
+      return { objectId, endpoint, url, timestamp, latency, status };
+    });
+};
+
+// channel sizes
+
+const printFailed = (list) => list.map((r) => r.url).join(` `);
+
+const addRow = ({ id, size, title, description }) =>
+  `| ${id} | ${gb(size)} | ${title} | ${description
+    ?.split("\n")
+    .join(" ")
+    .slice(0, 60)} |`;
+
+const writeTable = (channels) => {
+  const rows = channels
+    .map((channel) => {
+      let size =
+        channel.videos?.reduce(
+          (sum, video) => (sum += +video.media?.size),
+          0
+        ) || 0;
+      return { ...channel, size };
+    })
+    .filter(({ size }) => size > 0.1 * 1024 ** 3)
+    .sort((a, b) => b.size - a.size)
+    .map((c) => addRow(c));
+
+  const table =
+    `| # | GB | Title | Description |\n|---|---|---|---|\n` + rows.join(`\n`);
+  fs.writeFileSync("channels.md", table);
+  console.log(`wrote channels.md`);
+};
+
+module.exports = {
+  query,
+  ch,
+  sleep,
+  time,
+  loadResults,
+  headAsset,
+  sendResult,
+  printFailed,
+  writeTable,
+};

+ 20 - 0
scripts/test-providers/yarn.lock

@@ -0,0 +1,20 @@
+# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
+# yarn lockfile v1
+
+
+axios@^0.26.0:
+  version "0.26.0"
+  resolved "https://registry.yarnpkg.com/axios/-/axios-0.26.0.tgz#9a318f1c69ec108f8cd5f3c3d390366635e13928"
+  integrity sha512-lKoGLMYtHvFrPVt3r+RBMp9nh34N0M8zEfCWqdWZx6phynIEhQqAdydpyBAAG211zlhX9Rgu08cOamy6XjE5Og==
+  dependencies:
+    follow-redirects "^1.14.8"
+
+follow-redirects@^1.14.8:
+  version "1.14.8"
+  resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.14.8.tgz#016996fb9a11a100566398b1c6839337d7bfa8fc"
+  integrity sha512-1x0S9UVJHsQprFcEC/qnNzBLcIxsjAV905f/UkQxbclCsoTWlacCNOpQa/anodLl2uaEKFhfWOvM2Qg77+15zA==
+
+moment@^2.29.1:
+  version "2.29.1"
+  resolved "https://registry.yarnpkg.com/moment/-/moment-2.29.1.tgz#b2be769fa31940be9eeea6469c075e35006fa3d3"
+  integrity sha512-kHmoybcPV8Sqy59DwNDY3Jefr64lK/by/da0ViFcuA4DH0vQg5Q6Ze5VimxkfQNSC+Mls/Kx53s7TjP1RhFEDQ==