Browse Source

migration scripts fixes

ignazio 3 years ago
parent
commit
87d500ca37
2 changed files with 37 additions and 30 deletions
  1. 5 10
      tests/network-tests/run-migration-tests.sh
  2. 32 20
      utils/api-scripts/src/fork-off.ts

+ 5 - 10
tests/network-tests/run-migration-tests.sh

@@ -231,19 +231,14 @@ function main {
     convert_chainspec
     echo "**** EMPTY CHAINSPEC CREATED SUCCESSFULLY ****"
 
-    # use forkoff
     if ( $CLONE_LIVE_STATE ); then
+	# use forkoff to update chainspec with the live state + update runtime code
 	fork_off_init
+    else
+	# use the uprade_runtime to upgrade runtime code
+	upgrade_runtime
     fi
-
-    # node has to be started because upgrade runtime uses signAndSend to update code on storage
-    JOYSTREAM_NODE_TAG=${TARGET_RUNTIME_TAG}
-    echo "******* STARTING ${JOYSTREAM_NODE_TAG} ********"	
-    CONTAINER_ID=$(start_node)
-    echo "******* JS BINARY STARTED CONTAINER_ID: $CONTAINER_ID ********"	
-
-    # Section B: migration
-    upgrade_runtime
+    
 
     # Section C: assertions
     # verify that the number of outstanding channels & videos == 0

+ 32 - 20
utils/api-scripts/src/fork-off.ts

@@ -1,18 +1,18 @@
-const fs = require('fs');
-const path = require('path');
-require("dotenv").config();
-const { xxhashAsHex } = require('@polkadot/util-crypto');
-const { ApiPromise, WsProvider } = require('@polkadot/api');
-const execFileSync = require('child_process').execFileSync;
+import fs = require("fs");
+import path = require("path");
+import { xxhashAsHex } from '@polkadot/util-crypto';
+import { ApiPromise, WsProvider } from '@polkadot/api';
 const execSync = require('child_process').execSync;
 
 // paths & env variables
-const alice = process.env.SUDO_ACCOUNT
-const schemaPath = path.join(process.env.DATA_PATH, 'schema.json');
-const wasmPath = path.join(process.env.DATA_PATH, 'runtime.wasm');
-const hexPath = path.join(process.env.DATA_PATH, 'runtime.hex');
-const specPath = path.join(process.env.DATA_PATH, 'chain-spec-raw.json');
-const storagePath = path.join(process.env.DATA_PATH, 'storage.json');
+let alice = process.env.SUDO_ACCOUNT
+
+// bad error handling TODO: fix process.env
+let schemaPath = path.join(process.env.DATA_PATH || "", 'schema.json');
+let wasmPath = path.join(process.env.DATA_PATH || "", 'runtime.wasm') || "";
+let hexPath = path.join(process.env.DATA_PATH || "", 'runtime.hex') || "";
+let specPath = path.join(process.env.DATA_PATH || "", 'chain-spec-raw.json');
+let storagePath = path.join(process.env.DATA_PATH || "", 'storage.json');
 
 // this might not be of much use
 const provider = new WsProvider(process.env.WS_RPC_ENDPOINT || 'http://localhost:9944')
@@ -32,18 +32,18 @@ const provider = new WsProvider(process.env.WS_RPC_ENDPOINT || 'http://localhost
 let prefixes = ['0x26aa394eea5630e07c48ae0c9558cef7b99d880ec681799c0cf30e8886371da9' /* System.Account */];
 const skippedModulesPrefix = ['System', 'Session', 'Babe', 'Grandpa', 'GrandpaFinality', 'FinalityTracker', 'Authorship'];
 
-async function fixParachinStates(api, specPath) {
+async function fixParachinStates(api: ApiPromise, chainSpec: any) {
     const skippedKeys = [
         api.query.parasScheduler.sessionStartBlock.key()
     ];
     for (const k of skippedKeys) {
-        delete specPath.genesis.raw.top[k];
+        delete chainSpec.genesis.raw.top[k];
     }
 }
 
 async function main() {
 
-    // hexdump of runtime wasm binary
+    // hexdump of runtime wasm binary, running it from the shell gives bad format error
     execSync('cat ' + wasmPath + ' | hexdump -ve \'/1 "%02x"\' > ' + hexPath);
 
     let api;
@@ -60,9 +60,9 @@ async function main() {
 
     // storage.json is guaranteed to exists
 
-    const metadata = await api.rpc.state.getMetadata();
+    let metadata = await api.rpc.state.getMetadata();
     // Populate the prefixes array
-    const modules = metadata.asLatest.pallets;
+    let modules: Array<Module> = metadata.asLatest.pallets;
     modules.forEach((module) => {
         if (module.storage) {
             if (!skippedModulesPrefix.includes(module.name)) {
@@ -73,7 +73,7 @@ async function main() {
 
     // blank starting chainspec guaranteed to exist
 
-    let storage = JSON.parse(fs.readFileSync(storagePath, 'utf8'));
+    let storage: Storage = JSON.parse(fs.readFileSync(storagePath, 'utf8'));
     let chainSpec = JSON.parse(fs.readFileSync(specPath, 'utf8'));
 
     // Modify chain name and id
@@ -83,7 +83,7 @@ async function main() {
 
     // Grab the items to be moved, then iterate through and insert into storage
     storage
-        .results
+        .result
         .filter((i) => prefixes.some((prefix) => i[0].startsWith(prefix)))
         .forEach(([key, value]) => (chainSpec.genesis.raw.top[key] = value));
 
@@ -92,7 +92,7 @@ async function main() {
 
     fixParachinStates(api, chainSpec);
 
-    // Set the code to the current runtime code
+    // Set the code to the current runtime code: this replaces the set code transaction
     chainSpec.genesis.raw.top['0x3a636f6465'] = '0x' + fs.readFileSync(hexPath, 'utf8').trim();
 
     // To prevent the validator set from changing mid-test, set Staking.ForceEra to ForceNone ('0x02')
@@ -111,3 +111,15 @@ async function main() {
 
 main();
 
+
+
+interface Storage {
+    "jsonrpc": string,
+    "result": Array<[string, string]>,
+    "id": string,
+}
+
+interface Module {
+    "name": string,
+    "storage": string,
+}