Merge branch 'master' into searchfixes

pull/2170/head
bunsenstraat 3 years ago committed by GitHub
commit 2f611afbce
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 5
      apps/debugger/src/app/debugger-api.ts
  2. 12
      apps/remix-ide-e2e/src/tests/gist.test.ts
  3. 168
      apps/remix-ide-e2e/src/tests/terminal.test.ts
  4. 2
      apps/remix-ide/src/app/plugins/storage.ts
  5. 2
      apps/remix-ide/src/app/tabs/test-tab.js
  6. 19
      apps/solidity-compiler/src/app/compiler-api.ts
  7. 22
      libs/remix-core-plugin/src/lib/compiler-fetch-and-compile.ts
  8. 2
      libs/remix-core-plugin/src/lib/gist-handler.ts
  9. 4
      libs/remix-core-plugin/src/lib/helpers/fetch-etherscan.ts
  10. 4
      libs/remix-core-plugin/src/lib/helpers/fetch-sourcify.ts
  11. 1
      libs/remix-debug/src/Ethdebugger.ts
  12. 7
      libs/remix-debug/src/code/breakpointManager.ts
  13. 13
      libs/remix-debug/src/debugger/debugger.ts
  14. 9
      libs/remix-debug/test/debugger.ts
  15. 50
      libs/remix-lib/src/execution/eventsDecoder.ts
  16. 6
      libs/remix-solidity/src/compiler/compiler-abstract.ts
  17. 2
      libs/remix-solidity/src/compiler/compiler.ts
  18. 23
      libs/remix-solidity/src/compiler/helper.ts
  19. 1
      libs/remix-solidity/src/index.ts
  20. 12
      libs/remix-ui/debugger-ui/src/lib/debugger-ui.tsx
  21. 9
      libs/remix-ui/editor/src/lib/remix-ui-editor.tsx
  22. 35
      libs/remix-ui/renderer/src/lib/renderer.tsx
  23. 4
      libs/remix-ui/workspace/src/lib/reducers/workspace.ts

@ -127,6 +127,11 @@ export const DebuggerApiMixin = (Base) => class extends Base {
} }
debug (hash, web3?) { debug (hash, web3?) {
try {
this.call('fetchAndCompile', 'clearCache')
} catch (e) {
console.error(e)
}
this.debugHash = hash this.debugHash = hash
if (web3) this._web3 = web3 if (web3) this._web3 = web3
else this._web3 = this.initialWeb3 else this._web3 = this.initialWeb3

@ -36,9 +36,9 @@ module.exports = {
.addFile('File.sol', { content: '' }) .addFile('File.sol', { content: '' })
.executeScript(`remix.loadgist('${gistid}')`) .executeScript(`remix.loadgist('${gistid}')`)
// .perform((done) => { if (runtimeBrowser === 'chrome') { browser.openFile('gists') } done() }) // .perform((done) => { if (runtimeBrowser === 'chrome') { browser.openFile('gists') } done() })
.waitForElementVisible(`[data-id="treeViewLitreeViewItem${gistid}"]`) .waitForElementVisible(`[data-id="treeViewLitreeViewItemgist-${gistid}"]`)
.click(`[data-id="treeViewLitreeViewItem${gistid}"]`) .click(`[data-id="treeViewLitreeViewItemgist-${gistid}"]`)
.openFile(`${gistid}/README.txt`) .openFile(`gist-${gistid}/README.txt`)
// Remix publish to gist // Remix publish to gist
/* .click('*[data-id="fileExplorerNewFilepublishToGist"]') /* .click('*[data-id="fileExplorerNewFilepublishToGist"]')
.pause(2000) .pause(2000)
@ -140,9 +140,9 @@ module.exports = {
}) })
.setValue('*[data-id="gisthandlerModalDialogModalBody-react"] input[data-id="modalDialogCustomPromp"]', testData.validGistId) .setValue('*[data-id="gisthandlerModalDialogModalBody-react"] input[data-id="modalDialogCustomPromp"]', testData.validGistId)
.modalFooterOKClick('gisthandler') .modalFooterOKClick('gisthandler')
.openFile(`${testData.validGistId}/README.txt`) .openFile(`gist-${testData.validGistId}/README.txt`)
.waitForElementVisible(`div[title='default_workspace/${testData.validGistId}/README.txt']`) .waitForElementVisible(`div[title='default_workspace/gist-${testData.validGistId}/README.txt']`)
.assert.containsText(`div[title='default_workspace/${testData.validGistId}/README.txt'] > span`, 'README.txt') .assert.containsText(`div[title='default_workspace/gist-${testData.validGistId}/README.txt'] > span`, 'README.txt')
.end() .end()
} }
} }

@ -119,7 +119,47 @@ module.exports = {
.waitForElementContainsText('*[data-id="terminalJournal"]', 'newOwner', 60000) .waitForElementContainsText('*[data-id="terminalJournal"]', 'newOwner', 60000)
.waitForElementContainsText('*[data-id="terminalJournal"]', '0xd9145CCE52D386f254917e481eB44e9943F39138', 60000) .waitForElementContainsText('*[data-id="terminalJournal"]', '0xd9145CCE52D386f254917e481eB44e9943F39138', 60000)
}, },
'Run tests using Mocha script and check result logging in the terminal #group4': function (browser: NightwatchBrowser) {
browser
.addFile('scripts/storage.test.js', { content: storageMochaTests })
.pause(1000)
.openFile('contracts/1_Storage.sol')
.clickLaunchIcon('solidity')
.click('*[data-id="compilerContainerCompileBtn"]')
.pause(1000) // compile Storage
.executeScript('remix.execute(\'scripts/storage.test.js\')')
.pause(1000)
.waitForElementContainsText('*[data-id="terminalJournal"]', 'Running tests....')
.waitForElementContainsText('*[data-id="terminalJournal"]', 'storage contract Address:')
.waitForElementContainsText('*[data-id="terminalJournal"]', '✓ test initial value')
.waitForElementContainsText('*[data-id="terminalJournal"]', '✓ test updating and retrieving updated value')
.waitForElementContainsText('*[data-id="terminalJournal"]', '✘ fail test updating and retrieving updated value')
.waitForElementContainsText('*[data-id="terminalJournal"]', 'Expected: 55')
.waitForElementContainsText('*[data-id="terminalJournal"]', 'Actual: 56')
.waitForElementContainsText('*[data-id="terminalJournal"]', 'Message: incorrect number: expected 56 to equal 55')
.waitForElementContainsText('*[data-id="terminalJournal"]', '2 passing, 1 failing')
},
'Run tests using Mocha for a contract with library deployment and check result logging in the terminal #group4': function (browser: NightwatchBrowser) {
browser
.addFile('scripts/storageWithLib.test.js', { content: storageWithLibMochaTests })
.pause(1000)
.click('[data-id="treeViewDivtreeViewItemcontracts"]')
.addFile('contracts/StorageWithLib.sol', { content: storageWithLibContract })
.openFile('contracts/StorageWithLib.sol')
.clickLaunchIcon('solidity')
.click('*[data-id="compilerContainerCompileBtn"]')
.pause(1000) // compile StorageWithLib
.executeScript('remix.execute(\'scripts/storageWithLib.test.js\')')
.pause(1000)
.waitForElementContainsText('*[data-id="terminalJournal"]', 'Running tests....')
.waitForElementContainsText('*[data-id="terminalJournal"]', 'Storage with lib')
.waitForElementContainsText('*[data-id="terminalJournal"]', 'deploying lib:')
.waitForElementContainsText('*[data-id="terminalJournal"]', '✘ test library integration by calling a lib method')
.waitForElementContainsText('*[data-id="terminalJournal"]', 'Expected: 34')
.waitForElementContainsText('*[data-id="terminalJournal"]', 'Actual: 14')
.waitForElementContainsText('*[data-id="terminalJournal"]', 'Message: expected \'14\' to equal \'34\'')
.waitForElementContainsText('*[data-id="terminalJournal"]', '0 passing, 1 failing')
},
'Should print hardhat logs #group4': function (browser: NightwatchBrowser) { 'Should print hardhat logs #group4': function (browser: NightwatchBrowser) {
browser browser
.click('*[data-id="terminalClearConsole"]') // clear the terminal .click('*[data-id="terminalClearConsole"]') // clear the terminal
@ -261,6 +301,132 @@ const deployWithEthersJs = `
} }
})()` })()`
const storageMochaTests = `
const { expect } = require("chai");
describe("Storage with lib", function () {
it("test initial value", async function () {
// Make sure contract is compiled and artifacts are generated
const metadata = JSON.parse(await remix.call('fileManager', 'getFile', 'contracts/artifacts/Storage.json'))
const signer = (new ethers.providers.Web3Provider(web3Provider)).getSigner()
let Storage = new ethers.ContractFactory(metadata.abi, metadata.data.bytecode.object, signer);
let storage = await Storage.deploy();
console.log('storage contract Address: ' + storage.address);
await storage.deployed()
expect((await storage.retrieve()).toNumber()).to.equal(0);
});
it("test updating and retrieving updated value", async function () {
const metadata = JSON.parse(await remix.call('fileManager', 'getFile', 'contracts/artifacts/Storage.json'))
const signer = (new ethers.providers.Web3Provider(web3Provider)).getSigner()
let Storage = new ethers.ContractFactory(metadata.abi, metadata.data.bytecode.object, signer);
let storage = await Storage.deploy();
await storage.deployed()
const setValue = await storage.store(56);
await setValue.wait();
expect((await storage.retrieve()).toNumber()).to.equal(56);
});
it("fail test updating and retrieving updated value", async function () {
const metadata = JSON.parse(await remix.call('fileManager', 'getFile', 'contracts/artifacts/Storage.json'))
const signer = (new ethers.providers.Web3Provider(web3Provider)).getSigner()
let Storage = new ethers.ContractFactory(metadata.abi, metadata.data.bytecode.object, signer);
let storage = await Storage.deploy();
await storage.deployed()
const setValue = await storage.store(56);
await setValue.wait();
expect((await storage.retrieve()).toNumber(), 'incorrect number').to.equal(55);
});
});`
const storageWithLibContract = `
// SPDX-License-Identifier: GPL-3.0
pragma solidity >=0.7.0 <0.9.0;
library Lib {
function test () public view returns (uint) {
return 14;
}
}
/**
* @title Storage
* @dev Store & retrieve value inr a variable
*/
contract StorageWithLib {
uint256 number;
/**
* @dev Store valrue in variable
* @param num value to store
*/
function store(uint256 num) public {
number = num;
}
/**
* @dev Return value
* @return value of 'number'
*/
function retrieve() public view returns (uint256){
return number;
}
function getFromLib() public view returns (uint) {
return Lib.test();
}
}
`
const storageWithLibMochaTests = `
const { expect } = require("chai");
const { ethers } = require("hardhat");
describe("Storage", function () {
it("test library integration by calling a lib method", async function () {
const metadataLib = JSON.parse(await remix.call('fileManager', 'readFile', 'contracts/artifacts/Lib.json'))
console.log('deploying lib:')
const artifactLib = {
contractName: 'Lib',
sourceName: 'contracts/StorageWithLib.sol',
abi: metadataLib.abi,
bytecode: '0x' + metadataLib.data.bytecode.object,
deployedBytecode: '0x' + metadataLib.data.deployedBytecode.object,
linkReferences: metadataLib.data.bytecode.linkReferences,
deployedLinkReferences: metadataLib.data.deployedBytecode.linkReferences,
}
const optionsLib = {}
const factoryLib = await ethers.getContractFactoryFromArtifact(artifactLib, optionsLib)
const lib = await factoryLib.deploy();
await lib.deployed()
const metadata = JSON.parse(await remix.call('fileManager', 'readFile', 'contracts/artifacts/StorageWithLib.json'))
const artifact = {
contractName: 'StorageWithLib',
sourceName: 'contracts/StorageWithLib.sol',
abi: metadata.abi,
bytecode: '0x' + metadata.data.bytecode.object,
deployedBytecode: '0x' + metadata.data.deployedBytecode.object,
linkReferences: metadata.data.bytecode.linkReferences,
deployedLinkReferences: metadata.data.deployedBytecode.linkReferences,
}
const options = {
libraries: {
'Lib': lib.address
}
}
const factory = await ethers.getContractFactoryFromArtifact(artifact, options)
const storage = await factory.deploy();
await storage.deployed()
const storeValue = await storage.store(333);
await storeValue.wait();
expect((await storage.getFromLib()).toString()).to.equal('34');
});
});`
const hardhatLog = ` const hardhatLog = `
// SPDX-License-Identifier: GPL-3.0 // SPDX-License-Identifier: GPL-3.0

@ -39,7 +39,7 @@ export class StoragePlugin extends Plugin {
if (!localStorage.hasOwnProperty(_x)) { if (!localStorage.hasOwnProperty(_x)) {
continue continue
} }
_xLen = ((localStorage[_x].length + _x.length) * 2) _xLen = ((localStorage[_x].length + _x.length))
_lsTotal += _xLen _lsTotal += _xLen
} }
return (_lsTotal / 1024).toFixed(2) return (_lsTotal / 1024).toFixed(2)

@ -146,7 +146,7 @@ module.exports = class TestTab extends ViewPlugin {
renderComponent (testDirPath) { renderComponent (testDirPath) {
this.dispatch({ this.dispatch({
testTab: this, testTab: this,
helper: this.helper, helper: helper,
testDirPath: testDirPath testDirPath: testDirPath
}) })
} }

@ -1,4 +1,4 @@
import { compile } from '@remix-project/remix-solidity' import { compile, helper } from '@remix-project/remix-solidity'
import { CompileTabLogic, parseContracts } from '@remix-ui/solidity-compiler' // eslint-disable-line import { CompileTabLogic, parseContracts } from '@remix-ui/solidity-compiler' // eslint-disable-line
import type { ConfigurationSettings } from '@remix-project/remix-lib-ts' import type { ConfigurationSettings } from '@remix-project/remix-lib-ts'
@ -261,7 +261,7 @@ export const CompilerApiMixin = (Base) => class extends Base {
this.on('fileManager', 'fileClosed', this.data.eventHandlers.onFileClosed) this.on('fileManager', 'fileClosed', this.data.eventHandlers.onFileClosed)
this.data.eventHandlers.onCompilationFinished = (success, data, source, input, version) => { this.data.eventHandlers.onCompilationFinished = async (success, data, source, input, version) => {
this.compileErrors = data this.compileErrors = data
if (success) { if (success) {
// forwarding the event to the appManager infra // forwarding the event to the appManager infra
@ -291,6 +291,21 @@ export const CompilerApiMixin = (Base) => class extends Base {
if (success) this.compiler.visitContracts((contract) => { this.compilationDetails.contractMap[contract.name] = contract }) if (success) this.compiler.visitContracts((contract) => { this.compilationDetails.contractMap[contract.name] = contract })
this.compilationDetails.target = source.target this.compilationDetails.target = source.target
if (this.onCompilationFinished) this.onCompilationFinished(this.compilationDetails) if (this.onCompilationFinished) this.onCompilationFinished(this.compilationDetails)
// set annotations
if (data.errors) {
for (const error of data.errors) {
let pos = helper.getPositionDetails(error.formattedMessage)
if (pos.errFile) {
pos = {
row: pos.errLine,
column: pos.errCol,
text: error.formattedMessage,
type: error.severity
}
await this.call('editor', 'addAnnotation', pos, pos.errFile)
}
}
}
} }
this.compiler.event.register('compilationFinished', this.data.eventHandlers.onCompilationFinished) this.compiler.event.register('compilationFinished', this.data.eventHandlers.onCompilationFinished)

@ -1,4 +1,3 @@
import { Plugin } from '@remixproject/engine' import { Plugin } from '@remixproject/engine'
import { compile } from '@remix-project/remix-solidity' import { compile } from '@remix-project/remix-solidity'
import { util } from '@remix-project/remix-lib' import { util } from '@remix-project/remix-lib'
@ -8,7 +7,7 @@ import { fetchContractFromSourcify } from './helpers/fetch-sourcify'
const profile = { const profile = {
name: 'fetchAndCompile', name: 'fetchAndCompile',
methods: ['resolve'], methods: ['resolve', 'clearCache'],
version: '0.0.1' version: '0.0.1'
} }
@ -21,6 +20,14 @@ export class FetchAndCompile extends Plugin {
this.sourceVerifierNetWork = ['Main', 'Rinkeby', 'Ropsten', 'Goerli'] this.sourceVerifierNetWork = ['Main', 'Rinkeby', 'Ropsten', 'Goerli']
} }
/**
* Clear the cache
*
*/
async clearCache () {
this.unresolvedAddresses = []
}
/** /**
* Fetch compiliation metadata from source-Verify from a given @arg contractAddress - https://github.com/ethereum/source-verify * Fetch compiliation metadata from source-Verify from a given @arg contractAddress - https://github.com/ethereum/source-verify
* Put the artifacts in the file explorer * Put the artifacts in the file explorer
@ -68,6 +75,7 @@ export class FetchAndCompile extends Plugin {
} }
} }
targetPath = `${targetPath}/${network.id}/${contractAddress}`
let data let data
try { try {
data = await fetchContractFromSourcify(this, network, contractAddress, targetPath) data = await fetchContractFromSourcify(this, network, contractAddress, targetPath)
@ -100,7 +108,15 @@ export class FetchAndCompile extends Plugin {
const compData = await compile( const compData = await compile(
compilationTargets, compilationTargets,
settings, settings,
async (url, cb) => await this.call('contentImport', 'resolveAndSave', url).then((result) => cb(null, result)).catch((error) => cb(error.message))) async (url, cb) => {
// we first try to resolve the content from the compilation target using a more appropiate path
const path = `${targetPath}/${url}`
if (compilationTargets[path] && compilationTargets[path].content) {
return cb(null, compilationTargets[path].content)
} else {
await this.call('contentImport', 'resolveAndSave', url).then((result) => cb(null, result)).catch((error) => cb(error.message))
}
})
await this.call('compilerArtefacts', 'addResolvedContract', contractAddress, compData) await this.call('compilerArtefacts', 'addResolvedContract', contractAddress, compData)
return compData return compData
} catch (e) { } catch (e) {

@ -116,7 +116,7 @@ export class GistHandler extends Plugin {
const obj: StringByString = {} const obj: StringByString = {}
Object.keys(data.files).forEach((element) => { Object.keys(data.files).forEach((element) => {
const path = element.replace(/\.\.\./g, '/') const path = element.replace(/\.\.\./g, '/')
obj['/' + gistId + '/' + path] = data.files[element] obj['/gist-' + gistId + '/' + path] = data.files[element]
}) })
this.call('fileManager', 'setBatchFiles', obj, 'workspace', true, async (errorSavingFiles: any) => { this.call('fileManager', 'setBatchFiles', obj, 'workspace', true, async (errorSavingFiles: any) => {
if (errorSavingFiles) { if (errorSavingFiles) {

@ -23,7 +23,7 @@ export const fetchContractFromEtherscan = async (plugin, network, contractAddres
} }
if (typeof data.result[0].SourceCode === 'string') { if (typeof data.result[0].SourceCode === 'string') {
const fileName = `${targetPath}/${network.id}/${contractAddress}/${data.result[0].ContractName}.sol` const fileName = `${targetPath}/${data.result[0].ContractName}.sol`
await plugin.call('fileManager', 'setFile', fileName , data.result[0].SourceCode) await plugin.call('fileManager', 'setFile', fileName , data.result[0].SourceCode)
compilationTargets[fileName] = { content: data.result[0].SourceCode } compilationTargets[fileName] = { content: data.result[0].SourceCode }
} else if (data.result[0].SourceCode && typeof data.result[0].SourceCode == 'object') { } else if (data.result[0].SourceCode && typeof data.result[0].SourceCode == 'object') {
@ -34,7 +34,7 @@ export const fetchContractFromEtherscan = async (plugin, network, contractAddres
if (await plugin.call('contentImport', 'isExternalUrl', file)) { if (await plugin.call('contentImport', 'isExternalUrl', file)) {
// nothing to do, the compiler callback will handle those // nothing to do, the compiler callback will handle those
} else { } else {
const path = `${targetPath}/${network.id}/${contractAddress}/${file}` const path = `${targetPath}/${file}`
const content = (source as any).content const content = (source as any).content
await plugin.call('fileManager', 'setFile', path, content) await plugin.call('fileManager', 'setFile', path, content)
compilationTargets[path] = { content } compilationTargets[path] = { content }

@ -13,7 +13,7 @@ export const fetchContractFromSourcify = async (plugin, network, contractAddress
} }
// set the solidity contract code using metadata // set the solidity contract code using metadata
await plugin.call('fileManager', 'setFile', `${targetPath}/${network.id}/${contractAddress}/metadata.json`, JSON.stringify(data.metadata, null, '\t')) await plugin.call('fileManager', 'setFile', `${targetPath}/metadata.json`, JSON.stringify(data.metadata, null, '\t'))
for (let file in data.metadata.sources) { for (let file in data.metadata.sources) {
const urls = data.metadata.sources[file].urls const urls = data.metadata.sources[file].urls
for (const url of urls) { for (const url of urls) {
@ -24,7 +24,7 @@ export const fetchContractFromSourcify = async (plugin, network, contractAddress
if (await plugin.call('contentImport', 'isExternalUrl', file)) { if (await plugin.call('contentImport', 'isExternalUrl', file)) {
// nothing to do, the compiler callback will handle those // nothing to do, the compiler callback will handle those
} else { } else {
const path = `${targetPath}/${network.id}/${contractAddress}/${file}` const path = `${targetPath}/${file}`
await plugin.call('fileManager', 'setFile', path, source.content) await plugin.call('fileManager', 'setFile', path, source.content)
compilationTargets[path] = { content: source.content } compilationTargets[path] = { content: source.content }
} }

@ -64,7 +64,6 @@ export class Ethdebugger {
this.solidityProxy, this.solidityProxy,
this.codeManager, this.codeManager,
{ ...this.opts, includeLocalVariables }) { ...this.opts, includeLocalVariables })
this.event.trigger('managersChanged')
} }
resolveStep (index) { resolveStep (index) {

@ -87,6 +87,7 @@ export class BreakpointManager {
* *
*/ */
async jump (fromStep, direction, defaultToLimit, trace) { async jump (fromStep, direction, defaultToLimit, trace) {
this.event.trigger('locatingBreakpoint', [])
let sourceLocation let sourceLocation
let previousSourceLocation let previousSourceLocation
let currentStep = fromStep + direction let currentStep = fromStep + direction
@ -113,14 +114,14 @@ export class BreakpointManager {
} }
if (this.hasBreakpointAtLine(sourceLocation.file, lineColumn.start.line)) { if (this.hasBreakpointAtLine(sourceLocation.file, lineColumn.start.line)) {
lineHadBreakpoint = true lineHadBreakpoint = true
if (direction === 1 && this.hitLine(currentStep, sourceLocation, previousSourceLocation, trace)) { if (this.hitLine(currentStep, sourceLocation, previousSourceLocation, trace)) {
return return
} }
} }
} }
currentStep += direction currentStep += direction
} }
this.event.trigger('NoBreakpointHit', []) this.event.trigger('noBreakpointHit', [])
if (!defaultToLimit) { if (!defaultToLimit) {
return return
} }
@ -172,6 +173,7 @@ export class BreakpointManager {
* @param {Object} sourceLocation - position of the breakpoint { file: '<file index>', row: '<line number' } * @param {Object} sourceLocation - position of the breakpoint { file: '<file index>', row: '<line number' }
*/ */
add (sourceLocation) { add (sourceLocation) {
sourceLocation.row -= 1
if (!this.breakpoints[sourceLocation.fileName]) { if (!this.breakpoints[sourceLocation.fileName]) {
this.breakpoints[sourceLocation.fileName] = [] this.breakpoints[sourceLocation.fileName] = []
} }
@ -185,6 +187,7 @@ export class BreakpointManager {
* @param {Object} sourceLocation - position of the breakpoint { file: '<file index>', row: '<line number' } * @param {Object} sourceLocation - position of the breakpoint { file: '<file index>', row: '<line number' }
*/ */
remove (sourceLocation) { remove (sourceLocation) {
sourceLocation.row -= 1
const sources = this.breakpoints[sourceLocation.fileName] const sources = this.breakpoints[sourceLocation.fileName]
if (!sources) { if (!sources) {
return return

@ -41,15 +41,18 @@ export class Debugger {
} }
}) })
this.breakPointManager.event.register('managersChanged', () => {
const { traceManager, callTree, solidityProxy } = this.debugger
this.breakPointManager.setManagers({ traceManager, callTree, solidityProxy })
})
this.breakPointManager.event.register('breakpointStep', (step) => { this.breakPointManager.event.register('breakpointStep', (step) => {
this.step_manager.jumpTo(step) this.step_manager.jumpTo(step)
}) })
this.breakPointManager.event.register('noBreakpointHit', (step) => {
this.event.trigger('noBreakpointHit', [])
})
this.breakPointManager.event.register('locatingBreakpoint', () => {
this.event.trigger('locatingBreakpoint', [])
})
this.debugger.setBreakpointManager(this.breakPointManager) this.debugger.setBreakpointManager(this.breakPointManager)
this.debugger.event.register('newTraceLoaded', this, () => { this.debugger.event.register('newTraceLoaded', this, () => {

@ -277,22 +277,15 @@ function testDebugging (debugManager) {
return sourceMappingDecoder.convertOffsetToLineColumn(rawLocation, sourceMappingDecoder.getLinebreakPositions(ballot)) return sourceMappingDecoder.convertOffsetToLineColumn(rawLocation, sourceMappingDecoder.getLinebreakPositions(ballot))
}}) }})
breakPointManager.event.register('managersChanged', () => { breakPointManager.add({fileName: 'test.sol', row: 39})
const {traceManager, callTree, solidityProxy} = debugManager
breakPointManager.setManagers({traceManager, callTree, solidityProxy})
})
breakPointManager.add({fileName: 'test.sol', row: 38})
breakPointManager.event.register('breakpointHit', function (sourceLocation, step) { breakPointManager.event.register('breakpointHit', function (sourceLocation, step) {
console.log('breakpointHit')
t.equal(JSON.stringify(sourceLocation), JSON.stringify({ start: 1153, length: 6, file: 0, jump: '-' })) t.equal(JSON.stringify(sourceLocation), JSON.stringify({ start: 1153, length: 6, file: 0, jump: '-' }))
t.equal(step, 212) t.equal(step, 212)
}) })
breakPointManager.event.register('noBreakpointHit', function () { breakPointManager.event.register('noBreakpointHit', function () {
t.end('noBreakpointHit') t.end('noBreakpointHit')
console.log('noBreakpointHit')
}) })
breakPointManager.jumpNextBreakpoint(0, true) breakPointManager.jumpNextBreakpoint(0, true)
}) })

@ -56,21 +56,22 @@ export class EventsDecoder {
return eventsABI return eventsABI
} }
_event (hash: string, eventsABI: Record<string, unknown>, contractName: string) { _event (hash, eventsABI) {
const events = eventsABI[contractName] // get all the events responding to that hash.
if (!events) return null const contracts = []
for (const k in eventsABI) {
if (events[hash]) { if (eventsABI[k][hash]) {
const event = events[hash] const event = eventsABI[k][hash]
for (const input of event.inputs) { for (const input of event.inputs) {
if (input.type === 'function') { if (input.type === 'function') {
input.type = 'bytes24' input.type = 'bytes24'
input.baseType = 'bytes24' input.baseType = 'bytes24'
}
} }
contracts.push(event)
} }
return event
} }
return null return contracts
} }
_stringifyBigNumber (value): string { _stringifyBigNumber (value): string {
@ -95,16 +96,23 @@ export class EventsDecoder {
// [address, topics, mem] // [address, topics, mem]
const log = logs[i] const log = logs[i]
const topicId = log.topics[0] const topicId = log.topics[0]
const eventAbi = this._event(topicId.replace('0x', ''), eventsABI, contractName) const eventAbis = this._event(topicId.replace('0x', ''), eventsABI)
if (eventAbi) { for (const eventAbi of eventAbis) {
const decodedlog = eventAbi.abi.parseLog(log) try {
const decoded = {} if (eventAbi) {
for (const v in decodedlog.args) { const decodedlog = eventAbi.abi.parseLog(log)
decoded[v] = this._stringifyEvent(decodedlog.args[v]) const decoded = {}
for (const v in decodedlog.args) {
decoded[v] = this._stringifyEvent(decodedlog.args[v])
}
events.push({ from: log.address, topic: topicId, event: eventAbi.event, args: decoded })
} else {
events.push({ from: log.address, data: log.data, topics: log.topics })
}
break // if one of the iteration is successful
} catch (e) {
continue
} }
events.push({ from: log.address, topic: topicId, event: eventAbi.event, args: decoded })
} else {
events.push({ from: log.address, data: log.data, topics: log.topics })
} }
} }
cb(null, { decoded: events, raw: logs }) cb(null, { decoded: events, raw: logs })

@ -1,5 +1,5 @@
'use strict' 'use strict'
import txHelper from './txHelper' import helper from './helper'
export class CompilerAbstract { export class CompilerAbstract {
languageversion: any languageversion: any
@ -18,11 +18,11 @@ export class CompilerAbstract {
} }
getContract (name) { getContract (name) {
return txHelper.getContract(name, this.data.contracts) return helper.getContract(name, this.data.contracts)
} }
visitContracts (calllback) { visitContracts (calllback) {
return txHelper.visitContracts(this.data.contracts, calllback) return helper.visitContracts(this.data.contracts, calllback)
} }
getData () { getData () {

@ -4,7 +4,7 @@ import { update } from 'solc/abi'
import * as webworkify from 'webworkify-webpack' import * as webworkify from 'webworkify-webpack'
import compilerInput from './compiler-input' import compilerInput from './compiler-input'
import EventManager from '../lib/eventManager' import EventManager from '../lib/eventManager'
import txHelper from './txHelper' import txHelper from './helper'
import { import {
Source, SourceWithTarget, MessageFromWorker, CompilerState, CompilationResult, Source, SourceWithTarget, MessageFromWorker, CompilerState, CompilationResult,
visitContractsCallbackParam, visitContractsCallbackInterface, CompilationError, visitContractsCallbackParam, visitContractsCallbackInterface, CompilationError,

@ -35,6 +35,27 @@ export default {
if (cb(param)) return if (cb(param)) return
} }
} }
} },
// ^ e.g:
// browser/gm.sol: Warning: Source file does not specify required compiler version! Consider adding "pragma solidity ^0.6.12
// https://github.com/OpenZeppelin/openzeppelin-contracts/blob/release-v3.2.0/contracts/introspection/IERC1820Registry.sol:3:1: ParserError: Source file requires different compiler version (current compiler is 0.7.4+commit.3f05b770.Emscripten.clang) - note that nightly builds are considered to be strictly less than the released version
getPositionDetails: (msg: string) => {
const result = { } as Record<string, number | string>
// To handle some compiler warning without location like SPDX license warning etc
if (!msg.includes(':')) return { errLine: -1, errCol: -1, errFile: '' }
if (msg.includes('-->')) msg = msg.split('-->')[1].trim()
// extract line / column
let pos = msg.match(/^(.*?):([0-9]*?):([0-9]*?)?/)
result.errLine = pos ? parseInt(pos[2]) - 1 : -1
result.errCol = pos ? parseInt(pos[3]) : -1
// extract file
pos = msg.match(/^(https:.*?|http:.*?|.*?):/)
result.errFile = pos ? pos[1] : msg
return result
}
} }

@ -4,3 +4,4 @@ export { default as CompilerInput, getValidLanguage } from './compiler/compiler-
export { CompilerAbstract } from './compiler/compiler-abstract' export { CompilerAbstract } from './compiler/compiler-abstract'
export * from './compiler/types' export * from './compiler/types'
export { promisedMiniXhr, pathToURL, baseURLBin, baseURLWasm, canUseWorker, urlFromVersion } from './compiler/compiler-utils' export { promisedMiniXhr, pathToURL, baseURLBin, baseURLWasm, canUseWorker, urlFromVersion } from './compiler/compiler-utils'
export { default as helper } from './compiler/helper'

@ -87,6 +87,18 @@ export const DebuggerUI = (props: DebuggerUIProps) => {
}) })
}) })
debuggerInstance.event.register('locatingBreakpoint', async (isActive) => {
setState(prevState => {
return { ...prevState, sourceLocationStatus: 'Locating breakpoint, this might take a while...' }
})
})
debuggerInstance.event.register('noBreakpointHit', async (isActive) => {
setState(prevState => {
return { ...prevState, sourceLocationStatus: '' }
})
})
debuggerInstance.event.register('newSourceLocation', async (lineColumnPos, rawLocation, generatedSources, address) => { debuggerInstance.event.register('newSourceLocation', async (lineColumnPos, rawLocation, generatedSources, address) => {
if (!lineColumnPos) { if (!lineColumnPos) {
await debuggerModule.discardHighlight() await debuggerModule.discardHighlight()

@ -271,7 +271,10 @@ export const EditorUI = (props: EditorUIProps) => {
props.editorAPI.clearDecorationsByPlugin = (filePath: string, plugin: string, typeOfDecoration: string, registeredDecorations: any, currentDecorations: any) => { props.editorAPI.clearDecorationsByPlugin = (filePath: string, plugin: string, typeOfDecoration: string, registeredDecorations: any, currentDecorations: any) => {
const model = editorModelsState[filePath]?.model const model = editorModelsState[filePath]?.model
if (!model) return if (!model) return {
currentDecorations: [],
registeredDecorations: []
}
const decorations = [] const decorations = []
const newRegisteredDecorations = [] const newRegisteredDecorations = []
if (registeredDecorations) { if (registeredDecorations) {
@ -290,7 +293,9 @@ export const EditorUI = (props: EditorUIProps) => {
props.editorAPI.keepDecorationsFor = (filePath: string, plugin: string, typeOfDecoration: string, registeredDecorations: any, currentDecorations: any) => { props.editorAPI.keepDecorationsFor = (filePath: string, plugin: string, typeOfDecoration: string, registeredDecorations: any, currentDecorations: any) => {
const model = editorModelsState[filePath]?.model const model = editorModelsState[filePath]?.model
if (!model) return if (!model) return {
currentDecorations: []
}
const decorations = [] const decorations = []
if (registeredDecorations) { if (registeredDecorations) {
for (const decoration of registeredDecorations) { for (const decoration of registeredDecorations) {

@ -1,4 +1,5 @@
import React, { useEffect, useState } from 'react' //eslint-disable-line import React, { useEffect, useState } from 'react' //eslint-disable-line
import { helper } from '@remix-project/remix-solidity'
import './renderer.css' import './renderer.css'
interface RendererProps { interface RendererProps {
message: any; message: any;
@ -29,51 +30,19 @@ export const Renderer = ({ message, opt = {}, plugin }: RendererProps) => {
// ^ e.g: // ^ e.g:
// browser/gm.sol: Warning: Source file does not specify required compiler version! Consider adding "pragma solidity ^0.6.12 // browser/gm.sol: Warning: Source file does not specify required compiler version! Consider adding "pragma solidity ^0.6.12
// https://github.com/OpenZeppelin/openzeppelin-contracts/blob/release-v3.2.0/contracts/introspection/IERC1820Registry.sol:3:1: ParserError: Source file requires different compiler version (current compiler is 0.7.4+commit.3f05b770.Emscripten.clang) - note that nightly builds are considered to be strictly less than the released version // https://github.com/OpenZeppelin/openzeppelin-contracts/blob/release-v3.2.0/contracts/introspection/IERC1820Registry.sol:3:1: ParserError: Source file requires different compiler version (current compiler is 0.7.4+commit.3f05b770.Emscripten.clang) - note that nightly builds are considered to be strictly less than the released version
const positionDetails = getPositionDetails(text) const positionDetails = helper.getPositionDetails(text)
opt.errLine = positionDetails.errLine opt.errLine = positionDetails.errLine
opt.errCol = positionDetails.errCol opt.errCol = positionDetails.errCol
opt.errFile = positionDetails.errFile ? (positionDetails.errFile as string).trim() : '' opt.errFile = positionDetails.errFile ? (positionDetails.errFile as string).trim() : ''
if (!opt.noAnnotations && opt.errFile && opt.errFile !== '') {
addAnnotation(opt.errFile, {
row: opt.errLine,
column: opt.errCol,
text: text,
type: opt.type
})
}
setMessageText(text) setMessageText(text)
setEditorOptions(opt) setEditorOptions(opt)
setClose(false) setClose(false)
setClassList(opt.type === 'error' ? 'alert alert-danger' : 'alert alert-warning') setClassList(opt.type === 'error' ? 'alert alert-danger' : 'alert alert-warning')
}, [message, opt]) }, [message, opt])
const getPositionDetails = (msg: string) => {
const result = { } as Record<string, number | string>
// To handle some compiler warning without location like SPDX license warning etc
if (!msg.includes(':')) return { errLine: -1, errCol: -1, errFile: '' }
if (msg.includes('-->')) msg = msg.split('-->')[1].trim()
// extract line / column
let pos = msg.match(/^(.*?):([0-9]*?):([0-9]*?)?/)
result.errLine = pos ? parseInt(pos[2]) - 1 : -1
result.errCol = pos ? parseInt(pos[3]) : -1
// extract file
pos = msg.match(/^(https:.*?|http:.*?|.*?):/)
result.errFile = pos ? pos[1] : msg
return result
}
const addAnnotation = async (file, error) => {
if (file === await plugin.call('config', 'getAppParameter', 'currentFile')) {
await plugin.call('editor', 'addAnnotation', error, file)
}
}
const handleErrorClick = (opt) => { const handleErrorClick = (opt) => {
if (opt.click) { if (opt.click) {

@ -641,7 +641,7 @@ const removeInputField = (state: BrowserState, path: string): { [x: string]: Rec
files = _.setWith(files, _path, { files = _.setWith(files, _path, {
isDirectory: true, isDirectory: true,
path, path,
name: extractNameFromKey(path).indexOf('gist-') === 0 ? extractNameFromKey(path).split('-')[1] : extractNameFromKey(path), name: extractNameFromKey(path),
type: extractNameFromKey(path).indexOf('gist-') === 0 ? 'gist' : 'folder', type: extractNameFromKey(path).indexOf('gist-') === 0 ? 'gist' : 'folder',
child: prevFiles ? prevFiles.child : {} child: prevFiles ? prevFiles.child : {}
}, Object) }, Object)
@ -742,7 +742,7 @@ const normalize = (filesList, directory?: string, newInputType?: 'folder' | 'fil
if (filesList[key].isDirectory) { if (filesList[key].isDirectory) {
folders[extractNameFromKey(key)] = { folders[extractNameFromKey(key)] = {
path, path,
name: extractNameFromKey(path).indexOf('gist-') === 0 ? extractNameFromKey(path).split('-')[1] : extractNameFromKey(path), name: extractNameFromKey(path),
isDirectory: filesList[key].isDirectory, isDirectory: filesList[key].isDirectory,
type: extractNameFromKey(path).indexOf('gist-') === 0 ? 'gist' : 'folder' type: extractNameFromKey(path).indexOf('gist-') === 0 ? 'gist' : 'folder'
} }

Loading…
Cancel
Save