瀏覽代碼

forkoff integration finished - draft1

ignazio 3 年之前
父節點
當前提交
1bb1e208ec
共有 3 個文件被更改,包括 37 次插入68 次删除
  1. 2 1
      tests/network-tests/.env
  2. 19 65
      tests/network-tests/index.js
  3. 16 2
      tests/network-tests/run-migration-tests.sh

+ 2 - 1
tests/network-tests/.env

@@ -56,4 +56,5 @@ STAKE_DECREMENT=3
 MINT_CAPACITY_INCREMENT=1000
 # Storage node address to download content from
 STORAGE_NODE_URL=http://localhost:3001/asset/v0
-
+# Weather or not to clone chain live state using fork-off
+CLONE_LIVE_STATE=$false

+ 19 - 65
tests/network-tests/index.js

@@ -8,21 +8,12 @@ const { HttpProvider } = require('@polkadot/rpc-provider');
 const { xxhashAsHex } = require('@polkadot/util-crypto');
 const execFileSync = require('child_process').execFileSync;
 const execSync = require('child_process').execSync;
-const binaryPath = path.join(__dirname, 'data', 'binary');
-const wasmPath = path.join(__dirname, 'data', 'runtime.wasm');
-const schemaPath = path.join(__dirname, 'data', 'schema.json');
-const hexPath = path.join(__dirname, 'data', 'runtime.hex');
-const originalSpecPath = path.join(__dirname, 'data', 'genesis.json');
-const forkedSpecPath = path.join(process.env.DATA_PATH, 'fork.json');
-const storagePath = path.join(process.env.DATA_PATH, 'storage.json');
-
-const alice = process.env.ALICE || ''
-const originalChain = process.env.ORIG_CHAIN || '';
-const forkChain = process.env.FORK_CHAIN || '';
 
-let chunksFetched = 0;
-let separator = false;
-const progressBar = new cliProgress.SingleBar({}, cliProgress.Presets.shades_classic);
+// paths
+const schemaPath = path.join(process.env.DATA_PATH, 'schema.json');
+const hexPath = path.join(process.env.DATA_PATH, 'runtime.hex');
+const specPath = path.join(process.env.DATA_PATH, 'chain-spec-raw.json');
+const storagePath = path.join(process.env.DATA_PATH, 'storage.json');
 
 /**
  * All module prefixes except those mentioned in the skippedModulesPrefix will be added to this by the script.
@@ -50,46 +41,20 @@ async function fixParachinStates (api, forkedSpec) {
 }
 
 async function main() {
-    if (!fs.existsSync(binaryPath)) {
-	console.log(chalk.red('Binary missing. Please copy the binary of your substrate node to the data folder and rename the binary to "binary"'));
-	process.exit(1);
-    }
-    execFileSync('chmod', ['+x', binaryPath]);
-
-    if (!fs.existsSync(wasmPath)) {
-	console.log(chalk.red('WASM missing. Please copy the WASM blob of your substrate node to the data folder and rename it to "runtime.wasm"'));
-	process.exit(1);
-    }
-    execSync('cat ' + wasmPath + ' | hexdump -ve \'/1 "%02x"\' > ' + hexPath);
-
     let api;
     console.log(chalk.green('We are intentionally using the HTTP endpoint. If you see any warnings about that, please ignore them.'));
     if (!fs.existsSync(schemaPath)) {
 	console.log(chalk.yellow('Custom Schema missing, using default schema.'));
 	api = await ApiPromise.create({ provider });
     } else {
-	const { types, rpc } = JSON.parse(fs.readFileSync(schemaPath, 'utf8'));
+	const types = JSON.parse(fs.readFileSync(schemaPath, 'utf8'));
 	api = await ApiPromise.create({
 	    provider,
 	    types,
-	    rpc,
 	});
     }
 
-    if (fs.existsSync(storagePath)) {
-	console.log(chalk.yellow('Reusing cached storage. Delete ./data/storage.json and rerun the script if you want to fetch latest storage'));
-    } else {
-	// Download state of original chain
-	console.log(chalk.green('Fetching current state of the live chain. Please wait, it can take a while depending on the size of your chain.'));
-	let at = (await api.rpc.chain.getBlockHash()).toString();
-	progressBar.start(totalChunks, 0);
-	const stream = fs.createWriteStream(storagePath, { flags: 'a' });
-	stream.write("[");
-	await fetchChunks("0x", chunksLevel, stream, at);
-	stream.write("]");
-	stream.end();
-	progressBar.stop();
-    }
+    // storage.json is guaranteed to exists
 
     const metadata = await api.rpc.state.getMetadata();
     // Populate the prefixes array
@@ -102,50 +67,39 @@ async function main() {
 	}
     });
 
-    // Generate chain spec for original and forked chains
-    if (originalChain == '') {
-	execSync(binaryPath + ` build-spec --raw > ` + originalSpecPath);
-    } else {
-	execSync(binaryPath + ` build-spec --chain ${originalChain} --raw > ` + originalSpecPath);
-    }
-    if (forkChain == '') {
-	execSync(binaryPath + ` build-spec --dev --raw > ` + forkedSpecPath);
-    } else {
-	execSync(binaryPath + ` build-spec --chain ${forkChain} --raw > ` + forkedSpecPath);
-    }
+    // blank starting chainspec guaranteed to exist
 
     let storage = JSON.parse(fs.readFileSync(storagePath, 'utf8'));
-    let originalSpec = JSON.parse(fs.readFileSync(originalSpecPath, 'utf8'));
-    let forkedSpec = JSON.parse(fs.readFileSync(forkedSpecPath, 'utf8'));
+    let chainSpec = JSON.parse(fs.readFileSync(specPath, 'utf8'));
 
     // Modify chain name and id
-    forkedSpec.name = originalSpec.name + '-fork';
-    forkedSpec.id = originalSpec.id + '-fork';
-    forkedSpec.protocolId = originalSpec.protocolId;
+    chainSpec.name = chainSpec.name + '-fork';
+    chainSpec.id = chainSpec.id + '-fork';
+    chainSpec.protocolId = chainSpec.protocolId;
 
     // Grab the items to be moved, then iterate through and insert into storage
     storage
 	.results
 	.filter((i) => prefixes.some((prefix) => i[0].startsWith(prefix)))
-	.forEach(([key, value]) => (forkedSpec.genesis.raw.top[key] = value));
+	.forEach(([key, value]) => (chainSpec.genesis.raw.top[key] = value));
 
     // Delete System.LastRuntimeUpgrade to ensure that the on_runtime_upgrade event is triggered
-    delete forkedSpec.genesis.raw.top['0x26aa394eea5630e07c48ae0c9558cef7f9cce9c888469bb1a0dceaa129672ef8'];
+    delete chainSpec.genesis.raw.top['0x26aa394eea5630e07c48ae0c9558cef7f9cce9c888469bb1a0dceaa129672ef8'];
 
-    fixParachinStates(api, forkedSpec);
+    fixParachinStates(api, chainSpec);
 
     // Set the code to the current runtime code
-    forkedSpec.genesis.raw.top['0x3a636f6465'] = '0x' + fs.readFileSync(hexPath, 'utf8').trim();
+    chainSpec.genesis.raw.top['0x3a636f6465'] = '0x' + fs.readFileSync(hexPath, 'utf8').trim();
 
     // To prevent the validator set from changing mid-test, set Staking.ForceEra to ForceNone ('0x02')
-    forkedSpec.genesis.raw.top['0x5f3e4907f716ac89b6347d15ececedcaf7dad0317324aecae8744b87fc95f2f3'] = '0x02';
+    chainSpec.genesis.raw.top['0x5f3e4907f716ac89b6347d15ececedcaf7dad0317324aecae8744b87fc95f2f3'] = '0x02';
 
     if (alice !== '') {
 	// Set sudo key to //Alice
-	forkedSpec.genesis.raw.top['0x5c0d1176a568c1f92944340dbfed9e9c530ebca703c85910e7164cb7d1c9e47b'] = '0xd43593c715fdd31c61141abd04a99fd6822c8558854ccde39a5684e7a56da27d';
+	chainSpec.genesis.raw.top['0x5c0d1176a568c1f92944340dbfed9e9c530ebca703c85910e7164cb7d1c9e47b'] = '0xd43593c715fdd31c61141abd04a99fd6822c8558854ccde39a5684e7a56da27d';
     }
 
-    fs.writeFileSync(forkedSpecPath, JSON.stringify(forkedSpec, null, 4));
+    fs.writeFileSync(specPath, JSON.stringify(chainSpec, null, 4));
 
     console.log('Forked genesis generated successfully. Find it at ./data/fork.json');
     process.exit();

+ 16 - 2
tests/network-tests/run-migration-tests.sh

@@ -31,10 +31,22 @@ source ./.env
 #   None
 #######################################
 function fork_off_init() {
+    # chain-spec-raw already existing
+    
     if [[ -z ${DATA_PATH}/storage.json ]]; then
 	sudo scp ignazio@testnet-rpc-3-uk.joystream.org:/home/ignazio/storage.json \
 	     ${DATA_PATH}/storage.json
     fi
+    
+    if [[ -z ${DATA_PATH}/schema.json ]]; then
+	cp ../../types/augment/all/defs.json \
+	     ${DATA_PATH}/schema.json
+    fi
+
+    id=$(docker create joystream/node:${TARGET_RUNTIME_TAG})
+    docker cp $id:/joystream/runtime.compact.wasm ${DATA_PATH}/runtime.wasm
+    cat ${DATA_PATH}/runtime.wasm | hexdump -ve \'/1 "%02x"\ > ${DATA_PATH}/runtime.hex
+    
     npm start
 }
 #######################################
@@ -135,8 +147,11 @@ function main {
     echo "**** EMPTY CHAINSPEC CREATED SUCCESSFULLY ****"
 
     # use forkoff
-    fork_off_init
+    if ( $CLONE_LIVE_STATE ); then
+	fork_off_init
+    fi
 
+    # node has to be started because upgrade runtime uses signAndSend to update code on storage
     JOYSTREAM_NODE_TAG=${TARGET_RUNTIME_TAG}
     echo "******* STARTING ${JOYSTREAM_NODE_TAG} ********"	
     CONTAINER_ID=$(start_node)
@@ -161,4 +176,3 @@ function main {
 # main entrypoint
 main
 cleanup
-