Browse Source

Merge pull request #2944 from mnaamani/giza-staging-fix-runtime-upgrade-checks

Giza staging fix runtime upgrade checks
Mokhtar Naamani 3 years ago
parent
commit
decf3eff85

+ 19 - 23
tests/network-tests/run-migration-tests.sh

@@ -9,12 +9,9 @@ TARGET_RUNTIME_TAG=${TARGET_RUNTIME_TAG:=latest}
 # The joystream/node docker image tag to start the chain with
 RUNTIME_TAG=${RUNTIME_TAG:=sumer}
 # Post migration assertions by means of typescript scenarios required
-POST_MIGRATION_ASYNC_ASSERTIONS=${POST_MIGRATION_ASYNC_ASSERTIONS=$true}
-# The joystream/node docker image tag to start the chain with
-RUNTIME_TAG=${RUNTIME_TAG:=sumer}
+POST_MIGRATION_ASYNC_ASSERTIONS=${POST_MIGRATION_ASYNC_ASSERTIONS:=$true}
 # source common function used for node setup
 source ./node-utils.sh
-source ./.env
 
 #######################################
 # use fork-off to generate a chainspec file with the current s
@@ -27,32 +24,31 @@ function fork_off_init() {
     # chain-spec-raw already existing
 
     if ! [[ -f ${DATA_PATH}/storage.json ]]; then
-	curl http://testnet-rpc-3-uk.joystream.org:9933 -H \
-	     "Content-type: application/json" -d \
-	     '{"jsonrpc":"2.0","id":1,"method":"state_getPairs","params":["0x"]}' \
-	     > ${DATA_PATH}/storage.json	
+        curl http://testnet-rpc-3-uk.joystream.org:9933 -H \
+            "Content-type: application/json" -d \
+            '{"jsonrpc":"2.0","id":1,"method":"state_getPairs","params":["0x"]}' \
+            > ${DATA_PATH}/storage.json
     fi
 
     if ! [[ -f ${DATA_PATH}/schema.json ]]; then
-	cp $SCRIPT_PATH/../../types/augment/all/defs.json ${DATA_PATH}/schema.json
+        cp $SCRIPT_PATH/../../types/augment/all/defs.json ${DATA_PATH}/schema.json
     fi
 
     id=$(docker create joystream/node:${TARGET_RUNTIME_TAG})
     docker cp $id:/joystream/runtime.compact.wasm ${DATA_PATH}/runtime.wasm
 
     # RPC endpoint for live RUNTIME testnet 
-    export WS_RPC_ENDPOINT="wss://testnet-rpc-3-uk.joystream.org"
-    yarn workspace api-scripts tsnode-strict src/fork-off.ts
+    WS_RPC_ENDPOINT="wss://testnet-rpc-3-uk.joystream.org" \
+        yarn workspace api-scripts tsnode-strict src/fork-off.ts
 }
 
 function export_chainspec_file_to_disk() {
-    if ! [[ -f ${DATA_PATH}/exported-state.json ]]; then
-    # write the initial genesis state to db, in order to aviod waiting for an arbitrary amount of time 
+    echo "**** Initializing node database by exporting state ****"
+    # write the initial genesis state to db, in order to avoid waiting for an arbitrary amount of time 
     docker-compose -f ../../docker-compose.yml run \
 		   -v ${DATA_PATH}:/spec joystream-node export-state \
 		   --chain /spec/chain-spec-raw.json \
 		   --base-path /data --pruning archive > ${DATA_PATH}/exported-state.json
-    fi
 }
 
 # entrypoint
@@ -68,22 +64,22 @@ function main {
     # use forkoff to update chainspec with the live state + update runtime code
     fork_off_init
 
+    export JOYSTREAM_NODE_TAG=$RUNTIME_TAG
+
     # export chain-spec BEFORE starting the node
-#    export_chainspec_file_to_disk
+    export_chainspec_file_to_disk
     
     echo "***** STARTING NODE WITH FORKED STATE *****"
-    export JOYSTREAM_NODE_TAG=$RUNTIME_TAG
     CONTAINER_ID=$(start_node)
-    
-    sleep 120
-    
+
     if ( $POST_MIGRATION_ASYNC_ASSERTIONS ); then
-	# verify assertion using typsecript
-	echo "***** POST MIGRATION TYPESCRIPT *****"	
-	yarn workspace network-tests node-ts-strict src/scenarios/post-migration.ts
+        sleep 120
+        # verify assertion using typsecript
+        echo "***** POST MIGRATION TYPESCRIPT *****"
+        yarn workspace network-tests node-ts-strict src/scenarios/post-migration.ts
     fi
 }
 
 # main entrypoint
-main
+main || :
 cleanup

+ 46 - 47
tests/network-tests/src/misc/postMigrationAssertionsFlow.ts

@@ -4,66 +4,65 @@ import { extendDebug } from '../Debugger'
 import { Utils } from '../utils'
 
 export default async function postMigrationAssertions({ api }: FlowProps): Promise<void> {
-    const debug = extendDebug('flow:postMigrationAssertions')
-    debug('Started')
+  const debug = extendDebug('flow:postMigrationAssertions')
+  debug('Started')
 
-    debug('Ensure migration is done')
+  debug('Ensure migration is done')
 
-    let channelMigration = await api.query.content.channelMigration()
-    let videoMigration = await api.query.content.videoMigration()
+  let channelMigration = await api.query.content.channelMigration()
+  let videoMigration = await api.query.content.videoMigration()
 
-    // wait for migration to be done and checking that index do actually change
-    while (
-        channelMigration.current_id.toNumber() < channelMigration.final_id.toNumber() ||
-        videoMigration.current_id.toNumber() < videoMigration.final_id.toNumber()
+  // wait for migration to be done and checking that index do actually change
+  while (
+    channelMigration.current_id.toNumber() < channelMigration.final_id.toNumber() ||
+    videoMigration.current_id.toNumber() < videoMigration.final_id.toNumber()
+  ) {
+    // wait 6 seconds until next block is produced
+    await Utils.wait(6000)
+
+    const channelMigrationNew = await api.query.content.channelMigration()
+    const videoMigrationNew = await api.query.content.videoMigration()
+
+    // check invariant in order to prevent infinite loop
+    if (
+      channelMigrationNew.current_id.toNumber() > channelMigration.current_id.toNumber() ||
+      videoMigrationNew.current_id.toNumber() > videoMigration.current_id.toNumber()
     ) {
-        // wait 6 seconds until next block is produced
-        await Utils.wait(6000)
-
-        const channelMigrationNew = await api.query.content.channelMigration()
-        const videoMigrationNew = await api.query.content.videoMigration()
-
-        // check invariant in order to prevent infinite loop
-        if (
-            channelMigrationNew.current_id.toNumber() > channelMigration.current_id.toNumber() ||
-            videoMigrationNew.current_id.toNumber() > videoMigration.current_id.toNumber()
-        ) {
-            // update migration variables
-            channelMigration = channelMigrationNew
-            videoMigration = videoMigrationNew
-
-        } else {
-            throw new Error('Migration status not changing')
-        }
+      // update migration variables
+      channelMigration = channelMigrationNew
+      videoMigration = videoMigrationNew
+    } else {
+      throw new Error('Migration status not changing')
     }
+  }
 
-    debug('Check all new  working groups have been correctly initialized')
+  debug('Check all new  working groups have been correctly initialized')
 
-    const wgBeta = await api.query.operationsWorkingGroupBeta.activeWorkerCount()
-    const wgGamma = await api.query.operationsWorkingGroupGamma.activeWorkerCount()
-    const wgGateway = await api.query.gatewayWorkingGroup.activeWorkerCount()
+  const wgBeta = await api.query.operationsWorkingGroupBeta.activeWorkerCount()
+  const wgGamma = await api.query.operationsWorkingGroupGamma.activeWorkerCount()
+  const wgGateway = await api.query.gatewayWorkingGroup.activeWorkerCount()
 
-    assert.equal(wgBeta.toNumber(), 0)
-    assert.equal(wgGamma.toNumber(), 0)
-    assert.equal(wgGateway.toNumber(), 0)
+  assert.equal(wgBeta.toNumber(), 0)
+  assert.equal(wgGamma.toNumber(), 0)
+  assert.equal(wgGateway.toNumber(), 0)
 
-    debug('Checking that Video, Channel, Categories  counters have not been re-set')
+  debug('Checking that Video, Channel, Categories  counters have not been re-set')
 
-    const nextVideoCategoryId = await api.query.content.nextVideoCategoryId()
-    const nextVideoId = await api.query.content.nextVideoId()
-    const nextChannelId = await api.query.content.nextChannelId()
+  const nextVideoCategoryId = await api.query.content.nextVideoCategoryId()
+  const nextVideoId = await api.query.content.nextVideoId()
+  const nextChannelId = await api.query.content.nextChannelId()
 
-    assert(nextVideoCategoryId.toNumber() > 1)
-    assert(nextVideoId.toNumber() > 1)
-    assert(nextChannelId.toNumber() > 1)
+  assert(nextVideoCategoryId.toNumber() > 1)
+  assert(nextVideoId.toNumber() > 1)
+  assert(nextChannelId.toNumber() > 1)
 
-    debug('Checking that number of outstanding channels & videos == 0')
+  debug('Checking that number of outstanding channels & videos == 0')
 
-    const numChannels = await api.getNumberOfOutstandingChannels()
-    const numVideos = await api.getNumberOfOutstandingVideos()
+  const numChannels = await api.getNumberOfOutstandingChannels()
+  const numVideos = await api.getNumberOfOutstandingVideos()
 
-    assert.equal(numChannels, 0)
-    assert.equal(numVideos, 0)
+  assert.equal(numChannels, 0)
+  assert.equal(numVideos, 0)
 
-    debug('Done')
+  debug('Done')
 }

+ 77 - 73
utils/api-scripts/src/fork-off.ts

@@ -1,20 +1,20 @@
-import fs = require("fs");
-import path = require("path");
-import { xxhashAsHex } from '@polkadot/util-crypto';
-import { ApiPromise, WsProvider } from '@polkadot/api';
-const execSync = require('child_process').execSync;
+import fs = require('fs')
+import path = require('path')
+import { xxhashAsHex } from '@polkadot/util-crypto'
+import { ApiPromise, WsProvider } from '@polkadot/api'
+const execSync = require('child_process').execSync
 
 // paths & env variables
 let alice = process.env.SUDO_ACCOUNT
 // bad error handling TODO: fix process.env
-let schemaPath = path.join(process.env.DATA_PATH || "", 'schema.json');
-let wasmPath = path.join(process.env.DATA_PATH || "", 'runtime.wasm') || "";
-let hexPath = path.join(process.env.DATA_PATH || "", 'runtime.hex') || "";
-let specPath = path.join(process.env.DATA_PATH || "", 'chain-spec-raw.json');
-let storagePath = path.join(process.env.DATA_PATH || "", 'storage.json');
+let schemaPath = path.join(process.env.DATA_PATH || '', 'schema.json')
+let wasmPath = path.join(process.env.DATA_PATH || '', 'runtime.wasm') || ''
+let hexPath = path.join(process.env.DATA_PATH || '', 'runtime.hex') || ''
+let specPath = path.join(process.env.DATA_PATH || '', 'chain-spec-raw.json')
+let storagePath = path.join(process.env.DATA_PATH || '', 'storage.json')
 
 // this might not be of much use
-const provider = new WsProvider(process.env.WS_RPC_ENDPOINT || 'http://localhost:9944')
+const provider = new WsProvider(process.env.WS_RPC_ENDPOINT || 'ws://localhost:9944')
 /**
  * All module prefixes except those mentioned in the skippedModulesPrefix will be added to this by the script.
  * If you want to add any past module or part of a skipped module, add the prefix here manually.
@@ -28,8 +28,16 @@ const provider = new WsProvider(process.env.WS_RPC_ENDPOINT || 'http://localhost
  * For module hashing, do it via xxhashAsHex,
  * e.g. console.log(xxhashAsHex('System', 128)).
  */
-let prefixes = ['0x26aa394eea5630e07c48ae0c9558cef7b99d880ec681799c0cf30e8886371da9' /* System.Account */];
-const skippedModulesPrefix = ['System', 'Session', 'Babe', 'Grandpa', 'GrandpaFinality', 'FinalityTracker', 'Authorship'];
+let prefixes = ['0x26aa394eea5630e07c48ae0c9558cef7b99d880ec681799c0cf30e8886371da9' /* System.Account */]
+const skippedModulesPrefix = [
+  'System',
+  'Session',
+  'Babe',
+  'Grandpa',
+  'GrandpaFinality',
+  'FinalityTracker',
+  'Authorship',
+]
 
 // Apparently not needed: To review
 // async function fixParachinStates(api: ApiPromise, chainSpec: any) {
@@ -42,80 +50,76 @@ const skippedModulesPrefix = ['System', 'Session', 'Babe', 'Grandpa', 'GrandpaFi
 // }
 
 async function main() {
-
-    // hexdump of runtime wasm binary, running it from the shell gives bad format error
-    execSync('cat ' + wasmPath + ' | hexdump -ve \'/1 "%02x"\' > ' + hexPath);
-
-    let api;
-    if (!fs.existsSync(schemaPath)) {
-        console.log(('Custom Schema missing, using default schema.'));
-        api = await ApiPromise.create({ provider });
-    } else {
-        const types = JSON.parse(fs.readFileSync(schemaPath, 'utf8'));
-        api = await ApiPromise.create({
-            provider,
-            types,
-        });
+  // hexdump of runtime wasm binary, running it from the shell gives bad format error
+  execSync('cat ' + wasmPath + ' | hexdump -ve \'/1 "%02x"\' > ' + hexPath)
+
+  let api
+  if (!fs.existsSync(schemaPath)) {
+    console.log('Custom Schema missing, using default schema.')
+    api = await ApiPromise.create({ provider })
+  } else {
+    const types = JSON.parse(fs.readFileSync(schemaPath, 'utf8'))
+    api = await ApiPromise.create({
+      provider,
+      types,
+    })
+  }
+
+  // storage.json is guaranteed to exists
+
+  let metadata = await api.rpc.state.getMetadata()
+  // Populate the prefixes array
+  let modules = metadata.asLatest.modules
+  modules.forEach((module) => {
+    if (module.storage) {
+      if (!skippedModulesPrefix.includes(module.name.toString())) {
+        prefixes.push(xxhashAsHex(module.name.toString(), 128))
+      }
     }
+  })
 
-    // storage.json is guaranteed to exists
-
-    let metadata = await api.rpc.state.getMetadata();
-    // Populate the prefixes array
-    let modules = metadata.asLatest.modules;
-    modules.forEach((module) => {
-        if (module.storage) {
-            if (!skippedModulesPrefix.includes(module.name.toString())) {
-                prefixes.push(xxhashAsHex(module.name.toString(), 128));
-            }
-        }
-    });
+  // blank starting chainspec guaranteed to exist
 
-    // blank starting chainspec guaranteed to exist
+  let storage: Storage = JSON.parse(fs.readFileSync(storagePath, 'utf8'))
+  let chainSpec = JSON.parse(fs.readFileSync(specPath, 'utf8'))
 
-    let storage: Storage = JSON.parse(fs.readFileSync(storagePath, 'utf8'));
-    let chainSpec = JSON.parse(fs.readFileSync(specPath, 'utf8'));
+  // Modify chain name and id
+  chainSpec.name = chainSpec.name + '-fork'
+  chainSpec.id = chainSpec.id + '-fork'
+  chainSpec.protocolId = chainSpec.protocolId
 
-    // Modify chain name and id
-    chainSpec.name = chainSpec.name + '-fork';
-    chainSpec.id = chainSpec.id + '-fork';
-    chainSpec.protocolId = chainSpec.protocolId;
+  // Grab the items to be moved, then iterate through and insert into storage
+  storage.result
+    .filter((i) => prefixes.some((prefix) => i[0].startsWith(prefix)))
+    .forEach(([key, value]) => (chainSpec.genesis.raw.top[key] = value))
 
-    // Grab the items to be moved, then iterate through and insert into storage
-    storage
-        .result
-        .filter((i) => prefixes.some((prefix) => i[0].startsWith(prefix)))
-        .forEach(([key, value]) => (chainSpec.genesis.raw.top[key] = value));
+  // Delete System.LastRuntimeUpgrade to ensure that the on_runtime_upgrade event is triggered
+  delete chainSpec.genesis.raw.top['0x26aa394eea5630e07c48ae0c9558cef7f9cce9c888469bb1a0dceaa129672ef8']
 
-    // Delete System.LastRuntimeUpgrade to ensure that the on_runtime_upgrade event is triggered
-    delete chainSpec.genesis.raw.top['0x26aa394eea5630e07c48ae0c9558cef7f9cce9c888469bb1a0dceaa129672ef8'];
+  //    fixParachinStates(api, chainSpec);
 
-    //    fixParachinStates(api, chainSpec);
+  // Set the code to the current runtime code: this replaces the set code transaction
+  chainSpec.genesis.raw.top['0x3a636f6465'] = '0x' + fs.readFileSync(hexPath, 'utf8').trim()
 
-    // Set the code to the current runtime code: this replaces the set code transaction
-    chainSpec.genesis.raw.top['0x3a636f6465'] = '0x' + fs.readFileSync(hexPath, 'utf8').trim();
+  // To prevent the validator set from changing mid-test, set Staking.ForceEra to ForceNone ('0x02')
+  chainSpec.genesis.raw.top['0x5f3e4907f716ac89b6347d15ececedcaf7dad0317324aecae8744b87fc95f2f3'] = '0x02'
 
-    // To prevent the validator set from changing mid-test, set Staking.ForceEra to ForceNone ('0x02')
-    chainSpec.genesis.raw.top['0x5f3e4907f716ac89b6347d15ececedcaf7dad0317324aecae8744b87fc95f2f3'] = '0x02';
-
-    if (alice !== '') {
-        // Set sudo key to //Alice
-        chainSpec.genesis.raw.top['0x5c0d1176a568c1f92944340dbfed9e9c530ebca703c85910e7164cb7d1c9e47b'] = '0xd43593c715fdd31c61141abd04a99fd6822c8558854ccde39a5684e7a56da27d';
-    }
+  if (alice !== '') {
+    // Set sudo key to //Alice
+    chainSpec.genesis.raw.top['0x5c0d1176a568c1f92944340dbfed9e9c530ebca703c85910e7164cb7d1c9e47b'] =
+      '0xd43593c715fdd31c61141abd04a99fd6822c8558854ccde39a5684e7a56da27d'
+  }
 
-    fs.writeFileSync(specPath, JSON.stringify(chainSpec, null, 4));
+  fs.writeFileSync(specPath, JSON.stringify(chainSpec, null, 4))
 
-    console.log('****** INITIAL CHAINSPEC UPDATED TO REFLECT LIVE STATE ******');
-    process.exit();
+  console.log('****** INITIAL CHAINSPEC UPDATED TO REFLECT LIVE STATE ******')
+  process.exit()
 }
 
-main();
-
-
+main()
 
 interface Storage {
-    "jsonrpc": string,
-    "result": Array<[string, string]>,
-    "id": string,
+  'jsonrpc': string
+  'result': Array<[string, string]>
+  'id': string
 }
-