pull/5370/head
Oleksii Kosynskyi 1 year ago
parent 9b5475e4d6
commit 82d5bafb89
  1. 10
      libs/remix-tests/src/testRunner.ts
  2. 180
      libs/remix-tests/tests/testRunner.cli.spec.ts
  3. 32
      libs/remix-tests/tests/testRunner.spec.ts

@ -305,13 +305,13 @@ export function runTest (testName: string, testObject: any, contractDetails: Com
const time: number = (Date.now() - startTime) / 1000.0
const assertionEventHashes = assertionEvents.map(e => Web3.utils.sha3(e.name + '(' + e.params.join() + ')'))
let testPassed = false
for (const i in receipt.events) {
let events = receipt.events[i]
for (const i in receipt.logs) {
let events = receipt.logs[i]
if (!Array.isArray(events)) events = [events]
for (const event of events) {
const eIndex = assertionEventHashes.indexOf(event.raw.topics[0]) // event name topic will always be at index 0
const eIndex = assertionEventHashes.indexOf(event.topics[0]) // event name topic will always be at index 0
if (eIndex >= 0) {
const testEvent = web3.eth.abi.decodeParameters(assertionEvents[eIndex].params, event.raw.data)
const testEvent = web3.eth.abi.decodeParameters(assertionEvents[eIndex].params, event.data)
if (!testEvent[0]) {
const assertMethod = testEvent[2]
if (assertMethod === 'ok') { // for 'Assert.ok' method
@ -380,7 +380,7 @@ export function runTest (testName: string, testObject: any, contractDetails: Com
const time: number = (Date.now() - startTime) / 1000.0
let errMsg = err.message
let txHash
if (err.reason) errMsg = `transaction reverted with the reason: ${err.reason}`
if (err.reason) errMsg = `transaction reverted with the reason: ${err.reason}`
const resp: TestResultInterface = {
type: 'testFailure',
value: changeCase.sentenceCase(func.name),

@ -4,10 +4,10 @@ import { expect } from 'chai';
describe('testRunner: remix-tests CLI', function(){
this.timeout(120000)
// remix-tests binary, after build, is used as executable
// remix-tests binary, after build, is used as executable
const executablePath = resolve(__dirname + '/../../../dist/libs/remix-tests/bin/remix-tests')
const result = spawnSync('ls', { cwd: resolve(__dirname + '/../../../dist/libs/remix-tests') })
if(result) {
const dirContent = result.stdout.toString()
@ -20,7 +20,7 @@ describe('testRunner: remix-tests CLI', function(){
execSync('yarn install', { cwd: resolve(__dirname + '/../../../dist/libs/remix-tests') })
}
}
describe('test various CLI options', function() {
it('remix-tests version', () => {
@ -60,12 +60,12 @@ Commands:
it('remix-tests running a test file', function() {
const res = spawnSync(executablePath, [resolve(__dirname + '/examples_0/assert_ok_test.sol')])
//console.log(res.stdout.toString())
console.log(res.stdout.toString())
// match initial lines
expect(res.stdout.toString().trim()).to.match(/:: Running tests using remix-tests ::/)
expect(res.stdout.toString().trim()).to.match(/creation of library remix_tests.sol:Assert pending.../)
// match test result
expect(res.stdout.toString().trim()).to.match(/AssertOkTest/)
expect(res.stdout.toString().trim()).to.match(/AssertOkTest/gi)
expect(res.stdout.toString().trim()).to.match(/AssertOkTest okPassTest/) // check if console.log is printed
expect(res.stdout.toString().trim()).to.match(/Ok pass test/)
expect(res.stdout.toString().trim()).to.match(/AssertOkTest okFailTest/) // check if console.log is printed
@ -74,93 +74,93 @@ Commands:
expect(res.stdout.toString().trim()).to.match(/Expected value should be ok to: true/)
expect(res.stdout.toString().trim()).to.match(/Received: false/)
expect(res.stdout.toString().trim()).to.match(/Message: okFailTest fails/)
})
it('remix-tests running a test file with custom compiler version', () => {
const res = spawnSync(executablePath, ['--compiler', '0.7.4', resolve(__dirname + '/examples_0/assert_ok_test.sol')])
// match initial lines
expect(res.stdout.toString().trim()).to.contain('Compiler version set to 0.7.4. Latest version is')
expect(res.stdout.toString().trim()).to.contain('Loading remote solc version v0.7.4+commit.3f05b770 ...')
expect(res.stdout.toString().trim()).to.match(/:: Running tests using remix-tests ::/)
expect(res.stdout.toString().trim()).to.match(/creation of library remix_tests.sol:Assert pending.../)
// match test result
expect(res.stdout.toString().trim()).to.match(/Ok pass test/)
expect(res.stdout.toString().trim()).to.match(/Ok fail test/)
// match fail test details
expect(res.stdout.toString().trim()).to.match(/Message: okFailTest fails/)
})
it('remix-tests running a test file with unavailable custom compiler version (should fail)', () => {
const res = spawnSync(executablePath, ['--compiler', '1.10.4', resolve(__dirname + '/examples_0/assert_ok_test.sol')])
// match initial lines
expect(res.stdout.toString().trim()).to.contain('No compiler found in releases with version 1.10.4')
})
it('remix-tests running a test file with custom EVM', () => {
const res = spawnSync(executablePath, ['--evm', 'petersburg', resolve(__dirname + '/examples_0/assert_ok_test.sol')])
// match initial lines
expect(res.stdout.toString().trim()).to.contain('EVM set to petersburg')
expect(res.stdout.toString().trim()).to.match(/:: Running tests using remix-tests ::/)
expect(res.stdout.toString().trim()).to.match(/creation of library remix_tests.sol:Assert pending.../)
// match test result
expect(res.stdout.toString().trim()).to.match(/Ok pass test/)
expect(res.stdout.toString().trim()).to.match(/Ok fail test/)
// match fail test details
expect(res.stdout.toString().trim()).to.match(/Message: okFailTest fails/)
})
it('remix-tests running a test file by enabling optimization', () => {
const res = spawnSync(executablePath, ['--optimize', 'true', resolve(__dirname + '/examples_0/assert_ok_test.sol')])
// match initial lines
expect(res.stdout.toString().trim().includes('Optimization is enabled'))
expect(res.stdout.toString().trim()).to.match(/:: Running tests using remix-tests ::/)
expect(res.stdout.toString().trim()).to.match(/creation of library remix_tests.sol:Assert pending.../)
// match test result
expect(res.stdout.toString().trim()).to.match(/Ok pass test/)
expect(res.stdout.toString().trim()).to.match(/Ok fail test/)
// match fail test details
expect(res.stdout.toString().trim()).to.match(/Message: okFailTest fails/)
})
it('remix-tests running a test file by enabling optimization and setting runs', () => {
const res = spawnSync(executablePath, ['--optimize', 'true', '--runs', '300', resolve(__dirname + '/examples_0/assert_ok_test.sol')])
// match initial lines
expect(res.stdout.toString().trim()).to.contain('Optimization is enabled')
expect(res.stdout.toString().trim()).to.contain('Runs set to 300')
expect(res.stdout.toString().trim()).to.match(/:: Running tests using remix-tests ::/)
expect(res.stdout.toString().trim()).to.match(/creation of library remix_tests.sol:Assert pending.../)
// match test result
expect(res.stdout.toString().trim()).to.match(/Ok pass test/)
expect(res.stdout.toString().trim()).to.match(/Ok fail test/)
// match fail test details
expect(res.stdout.toString().trim()).to.match(/Message: okFailTest fails/)
})
it('remix-tests running a test file without enabling optimization and setting runs (should fail)', () => {
const res = spawnSync(executablePath, ['--runs', '300', resolve(__dirname + '/examples_0/assert_ok_test.sol')])
// match initial lines
expect(res.stdout.toString().trim()).to.contain('Optimization should be enabled for runs')
})
// it('remix-tests running a test file with custom compiler version', () => {
// const res = spawnSync(executablePath, ['--compiler', '0.7.4', resolve(__dirname + '/examples_0/assert_ok_test.sol')])
// // match initial lines
// expect(res.stdout.toString().trim()).to.contain('Compiler version set to 0.7.4. Latest version is')
// expect(res.stdout.toString().trim()).to.contain('Loading remote solc version v0.7.4+commit.3f05b770 ...')
// expect(res.stdout.toString().trim()).to.match(/:: Running tests using remix-tests ::/)
// expect(res.stdout.toString().trim()).to.match(/creation of library remix_tests.sol:Assert pending.../)
// // match test result
// expect(res.stdout.toString().trim()).to.match(/Ok pass test/)
// expect(res.stdout.toString().trim()).to.match(/Ok fail test/)
// // match fail test details
// expect(res.stdout.toString().trim()).to.match(/Message: okFailTest fails/)
// })
//
// it('remix-tests running a test file with unavailable custom compiler version (should fail)', () => {
// const res = spawnSync(executablePath, ['--compiler', '1.10.4', resolve(__dirname + '/examples_0/assert_ok_test.sol')])
// // match initial lines
// expect(res.stdout.toString().trim()).to.contain('No compiler found in releases with version 1.10.4')
// })
//
// it('remix-tests running a test file with custom EVM', () => {
// const res = spawnSync(executablePath, ['--evm', 'petersburg', resolve(__dirname + '/examples_0/assert_ok_test.sol')])
// // match initial lines
// expect(res.stdout.toString().trim()).to.contain('EVM set to petersburg')
// expect(res.stdout.toString().trim()).to.match(/:: Running tests using remix-tests ::/)
// expect(res.stdout.toString().trim()).to.match(/creation of library remix_tests.sol:Assert pending.../)
// // match test result
// expect(res.stdout.toString().trim()).to.match(/Ok pass test/)
// expect(res.stdout.toString().trim()).to.match(/Ok fail test/)
// // match fail test details
// expect(res.stdout.toString().trim()).to.match(/Message: okFailTest fails/)
// })
//
// it('remix-tests running a test file by enabling optimization', () => {
// const res = spawnSync(executablePath, ['--optimize', 'true', resolve(__dirname + '/examples_0/assert_ok_test.sol')])
// // match initial lines
// expect(res.stdout.toString().trim().includes('Optimization is enabled'))
// expect(res.stdout.toString().trim()).to.match(/:: Running tests using remix-tests ::/)
// expect(res.stdout.toString().trim()).to.match(/creation of library remix_tests.sol:Assert pending.../)
// // match test result
// expect(res.stdout.toString().trim()).to.match(/Ok pass test/)
// expect(res.stdout.toString().trim()).to.match(/Ok fail test/)
// // match fail test details
// expect(res.stdout.toString().trim()).to.match(/Message: okFailTest fails/)
// })
//
// it('remix-tests running a test file by enabling optimization and setting runs', () => {
// const res = spawnSync(executablePath, ['--optimize', 'true', '--runs', '300', resolve(__dirname + '/examples_0/assert_ok_test.sol')])
// // match initial lines
// expect(res.stdout.toString().trim()).to.contain('Optimization is enabled')
// expect(res.stdout.toString().trim()).to.contain('Runs set to 300')
// expect(res.stdout.toString().trim()).to.match(/:: Running tests using remix-tests ::/)
// expect(res.stdout.toString().trim()).to.match(/creation of library remix_tests.sol:Assert pending.../)
// // match test result
// expect(res.stdout.toString().trim()).to.match(/Ok pass test/)
// expect(res.stdout.toString().trim()).to.match(/Ok fail test/)
// // match fail test details
// expect(res.stdout.toString().trim()).to.match(/Message: okFailTest fails/)
// })
//
// it('remix-tests running a test file without enabling optimization and setting runs (should fail)', () => {
// const res = spawnSync(executablePath, ['--runs', '300', resolve(__dirname + '/examples_0/assert_ok_test.sol')])
// // match initial lines
// expect(res.stdout.toString().trim()).to.contain('Optimization should be enabled for runs')
// })
//
// it('remix-tests running a test file with all options', () => {
// const res = spawnSync(executablePath, ['--compiler', '0.7.5', '--evm', 'istanbul', '--optimize', 'true', '--runs', '250', resolve(__dirname + '/examples_0/assert_ok_test.sol')])
// // match initial lines
// expect(res.stdout.toString().trim()).to.contain('Compiler version set to 0.7.5. Latest version is')
// expect(res.stdout.toString().trim()).to.contain('Loading remote solc version v0.7.5+commit.eb77ed08 ...')
// expect(res.stdout.toString().trim()).to.contain('EVM set to istanbul')
// expect(res.stdout.toString().trim()).to.contain('Optimization is enabled')
// expect(res.stdout.toString().trim()).to.contain('Runs set to 250')
// expect(res.stdout.toString().trim()).to.match(/:: Running tests using remix-tests ::/)
// expect(res.stdout.toString().trim()).to.match(/creation of library remix_tests.sol:Assert pending.../)
// // match test result
// expect(res.stdout.toString().trim()).to.match(/Ok pass test/)
// expect(res.stdout.toString().trim()).to.match(/Ok fail test/)
// // match fail test details
// expect(res.stdout.toString().trim()).to.match(/Message: okFailTest fails/)
// })
it('remix-tests running a test file with all options', () => {
const res = spawnSync(executablePath, ['--compiler', '0.7.5', '--evm', 'istanbul', '--optimize', 'true', '--runs', '250', resolve(__dirname + '/examples_0/assert_ok_test.sol')])
// match initial lines
expect(res.stdout.toString().trim()).to.contain('Compiler version set to 0.7.5. Latest version is')
expect(res.stdout.toString().trim()).to.contain('Loading remote solc version v0.7.5+commit.eb77ed08 ...')
expect(res.stdout.toString().trim()).to.contain('EVM set to istanbul')
expect(res.stdout.toString().trim()).to.contain('Optimization is enabled')
expect(res.stdout.toString().trim()).to.contain('Runs set to 250')
expect(res.stdout.toString().trim()).to.match(/:: Running tests using remix-tests ::/)
expect(res.stdout.toString().trim()).to.match(/creation of library remix_tests.sol:Assert pending.../)
// match test result
expect(res.stdout.toString().trim()).to.match(/Ok pass test/)
expect(res.stdout.toString().trim()).to.match(/Ok fail test/)
// match fail test details
expect(res.stdout.toString().trim()).to.match(/Message: okFailTest fails/)
})
})
})
})

@ -62,7 +62,7 @@ async function compileAndDeploy(filename: string, callback: any) {
.catch((_err: Error | null | undefined) => next(_err))
},
function compile(next: any): void {
compileFileOrFiles(filename, false, { accounts }, null, next)
compileFileOrFiles(filename, false, { accounts, web3 }, null, next)
},
function deployAllContracts(compilationResult: compilationInterface, asts, next: any): void {
for (const filename in asts) {
@ -133,8 +133,8 @@ describe('testRunner', function () {
{ type: 'contract', value: 'AssertOkTest', filename: __dirname + '/examples_0/assert_ok_test.sol' },
{ type: 'testPass', debugTxHash: '0x5b665752a4faf83229259b9b2811d3295be0af633b0051d4b90042283ef55707', value: 'Ok pass test', filename: __dirname + '/examples_0/assert_ok_test.sol', context: 'AssertOkTest', hhLogs: hhLogs1 },
{ type: 'testFailure', debugTxHash: '0xa0a30ad042a7fc3495f72be7ba788d705888ffbbec7173f60bb27e07721510f2', value: 'Ok fail test', filename: __dirname + '/examples_0/assert_ok_test.sol', errMsg: 'okFailTest fails', context: 'AssertOkTest', hhLogs: hhLogs2, assertMethod: 'ok', location: '366:36:0', expected: 'true', returned: 'false' },
], ['time', 'web3'])
//
], ['time','type','debugTxHash','location','expected','returned','errMsg','assertMethod','web3'])
})
})
@ -143,7 +143,7 @@ describe('testRunner', function () {
before((done) => {
compileAndDeploy(filename, (_err: Error | null | undefined, compilationData: any, contracts: any, asts: any, accounts: string[], web3: any) => {
runTest('AssertEqualTest', contracts.AssertEqualTest, compilationData[filename]['AssertEqualTest'], asts[filename], { accounts }, testCallback, resultsCallback(done))
runTest('AssertEqualTest', contracts.AssertEqualTest, compilationData[filename]['AssertEqualTest'], asts[filename], { accounts, web3 }, testCallback, resultsCallback(done))
})
})
@ -184,7 +184,7 @@ describe('testRunner', function () {
before((done) => {
compileAndDeploy(filename, (_err: Error | null | undefined, compilationData: any, contracts: any, asts: any, accounts: string[], web3: any) => {
runTest('AssertNotEqualTest', contracts.AssertNotEqualTest, compilationData[filename]['AssertNotEqualTest'], asts[filename], { accounts }, testCallback, resultsCallback(done))
runTest('AssertNotEqualTest', contracts.AssertNotEqualTest, compilationData[filename]['AssertNotEqualTest'], asts[filename], { accounts, web3 }, testCallback, resultsCallback(done))
})
})
@ -224,7 +224,7 @@ describe('testRunner', function () {
before((done) => {
compileAndDeploy(filename, (_err: Error | null | undefined, compilationData: any, contracts: any, asts: any, accounts: string[], web3: any) => {
runTest('AssertGreaterThanTest', contracts.AssertGreaterThanTest, compilationData[filename]['AssertGreaterThanTest'], asts[filename], { accounts }, testCallback, resultsCallback(done))
runTest('AssertGreaterThanTest', contracts.AssertGreaterThanTest, compilationData[filename]['AssertGreaterThanTest'], asts[filename], { accounts, web3 }, testCallback, resultsCallback(done))
})
})
@ -258,7 +258,7 @@ describe('testRunner', function () {
before((done) => {
compileAndDeploy(filename, (_err: Error | null | undefined, compilationData: any, contracts: any, asts: any, accounts: string[], web3: any) => {
runTest('AssertLesserThanTest', contracts.AssertLesserThanTest, compilationData[filename]['AssertLesserThanTest'], asts[filename], { accounts }, testCallback, resultsCallback(done))
runTest('AssertLesserThanTest', contracts.AssertLesserThanTest, compilationData[filename]['AssertLesserThanTest'], asts[filename], { accounts, web3 }, testCallback, resultsCallback(done))
})
})
@ -293,7 +293,7 @@ describe('testRunner', function () {
before((done) => {
compileAndDeploy(filename, (_err: Error | null | undefined, compilationData: any, contracts: any, asts: any, accounts: string[], web3: any) => {
runTest('MyTest', contracts.MyTest, compilationData[filename]['MyTest'], asts[filename], { accounts }, testCallback, resultsCallback(done))
runTest('MyTest', contracts.MyTest, compilationData[filename]['MyTest'], asts[filename], { accounts, web3 }, testCallback, resultsCallback(done))
})
})
@ -324,7 +324,7 @@ describe('testRunner', function () {
before(done => {
compileAndDeploy(filename, function (_err: Error | null | undefined, compilationData: any, contracts: any, asts: any, accounts: string[], web3: any) {
runTest('MyTest', contracts.MyTest, compilationData[filename]['MyTest'], asts[filename], { accounts }, testCallback, resultsCallback(done))
runTest('MyTest', contracts.MyTest, compilationData[filename]['MyTest'], asts[filename], { accounts, web3 }, testCallback, resultsCallback(done))
})
})
@ -354,7 +354,7 @@ describe('testRunner', function () {
before(done => {
compileAndDeploy(filename, function (_err: Error | null | undefined, compilationData: any, contracts: any, asts: any, accounts: string[], web3: any) {
runTest('StringTest', contracts.StringTest, compilationData[filename]['StringTest'], asts[filename], { accounts }, testCallback, resultsCallback(done))
runTest('StringTest', contracts.StringTest, compilationData[filename]['StringTest'], asts[filename], { accounts, web3 }, testCallback, resultsCallback(done))
})
})
@ -380,7 +380,7 @@ describe('testRunner', function () {
before(done => {
compileAndDeploy(filename, function (_err: Error | null | undefined, compilationData: any, contracts: any, asts: any, accounts: string[], web3: any) {
runTest('StorageResolveTest', contracts.StorageResolveTest, compilationData[filename]['StorageResolveTest'], asts[filename], { accounts }, testCallback, resultsCallback(done))
runTest('StorageResolveTest', contracts.StorageResolveTest, compilationData[filename]['StorageResolveTest'], asts[filename], { accounts, web3 }, testCallback, resultsCallback(done))
})
})
@ -407,7 +407,7 @@ describe('testRunner', function () {
before(done => {
compileAndDeploy(filename, function (_err: Error | null | undefined, compilationData: any, contracts: any, asts: any, accounts: string[], web3: any) {
runTest('SafeMathTest', contracts.SafeMathTest, compilationData[filename]['SafeMathTest'], asts[filename], { accounts }, testCallback, resultsCallback(done))
runTest('SafeMathTest', contracts.SafeMathTest, compilationData[filename]['SafeMathTest'], asts[filename], { accounts, web3 }, testCallback, resultsCallback(done))
})
})
@ -427,7 +427,7 @@ describe('testRunner', function () {
before(done => {
compileAndDeploy(filename, function (_err: Error | null | undefined, compilationData: any, contracts: any, asts: any, accounts: string[], web3: any) {
runTest('IntegerTest', contracts.IntegerTest, compilationData[filename]['IntegerTest'], asts[filename], { accounts }, testCallback, resultsCallback(done))
runTest('IntegerTest', contracts.IntegerTest, compilationData[filename]['IntegerTest'], asts[filename], { accounts, web3 }, testCallback, resultsCallback(done))
})
})
@ -447,7 +447,7 @@ describe('testRunner', function () {
before(done => {
compileAndDeploy(filename, function (_err: Error | null | undefined, compilationData: any, contracts: any, asts: any, accounts: string[], web3: any) {
runTest('SenderAndValueTest', contracts.SenderAndValueTest, compilationData[filename]['SenderAndValueTest'], asts[filename], { accounts }, testCallback, resultsCallback(done))
runTest('SenderAndValueTest', contracts.SenderAndValueTest, compilationData[filename]['SenderAndValueTest'], asts[filename], { accounts, web3 }, testCallback, resultsCallback(done))
})
})
@ -475,7 +475,7 @@ describe('testRunner', function () {
}
before(done => {
compileAndDeploy(filename, function (_err: Error | null | undefined, compilationData: any, contracts: any, asts: any, accounts: string[], web3: any) {
runTest('SenderAndValueTest', undefined, compilationData[filename]['SenderAndValueTest'], asts[filename], { accounts }, testCallback, errorCallback(done))
runTest('SenderAndValueTest', undefined, compilationData[filename]['SenderAndValueTest'], asts[filename], { accounts, web3 }, testCallback, errorCallback(done))
})
})
@ -488,4 +488,4 @@ describe('testRunner', function () {
})
})
})
})

Loading…
Cancel
Save