Merge branch 'master' into searchfixes

pull/5370/head
bunsenstraat 3 years ago committed by GitHub
commit e3e8f3e99f
  1. 5
      apps/debugger/src/app/debugger-api.ts
  2. 12
      apps/remix-ide-e2e/src/tests/gist.test.ts
  3. 168
      apps/remix-ide-e2e/src/tests/terminal.test.ts
  4. 2
      apps/remix-ide/src/app/plugins/storage.ts
  5. 2
      apps/remix-ide/src/app/tabs/test-tab.js
  6. 19
      apps/solidity-compiler/src/app/compiler-api.ts
  7. 22
      libs/remix-core-plugin/src/lib/compiler-fetch-and-compile.ts
  8. 2
      libs/remix-core-plugin/src/lib/gist-handler.ts
  9. 4
      libs/remix-core-plugin/src/lib/helpers/fetch-etherscan.ts
  10. 4
      libs/remix-core-plugin/src/lib/helpers/fetch-sourcify.ts
  11. 1
      libs/remix-debug/src/Ethdebugger.ts
  12. 7
      libs/remix-debug/src/code/breakpointManager.ts
  13. 13
      libs/remix-debug/src/debugger/debugger.ts
  14. 9
      libs/remix-debug/test/debugger.ts
  15. 50
      libs/remix-lib/src/execution/eventsDecoder.ts
  16. 6
      libs/remix-solidity/src/compiler/compiler-abstract.ts
  17. 2
      libs/remix-solidity/src/compiler/compiler.ts
  18. 23
      libs/remix-solidity/src/compiler/helper.ts
  19. 1
      libs/remix-solidity/src/index.ts
  20. 12
      libs/remix-ui/debugger-ui/src/lib/debugger-ui.tsx
  21. 9
      libs/remix-ui/editor/src/lib/remix-ui-editor.tsx
  22. 35
      libs/remix-ui/renderer/src/lib/renderer.tsx
  23. 4
      libs/remix-ui/workspace/src/lib/reducers/workspace.ts

@ -127,6 +127,11 @@ export const DebuggerApiMixin = (Base) => class extends Base {
}
debug (hash, web3?) {
try {
this.call('fetchAndCompile', 'clearCache')
} catch (e) {
console.error(e)
}
this.debugHash = hash
if (web3) this._web3 = web3
else this._web3 = this.initialWeb3

@ -36,9 +36,9 @@ module.exports = {
.addFile('File.sol', { content: '' })
.executeScript(`remix.loadgist('${gistid}')`)
// .perform((done) => { if (runtimeBrowser === 'chrome') { browser.openFile('gists') } done() })
.waitForElementVisible(`[data-id="treeViewLitreeViewItem${gistid}"]`)
.click(`[data-id="treeViewLitreeViewItem${gistid}"]`)
.openFile(`${gistid}/README.txt`)
.waitForElementVisible(`[data-id="treeViewLitreeViewItemgist-${gistid}"]`)
.click(`[data-id="treeViewLitreeViewItemgist-${gistid}"]`)
.openFile(`gist-${gistid}/README.txt`)
// Remix publish to gist
/* .click('*[data-id="fileExplorerNewFilepublishToGist"]')
.pause(2000)
@ -140,9 +140,9 @@ module.exports = {
})
.setValue('*[data-id="gisthandlerModalDialogModalBody-react"] input[data-id="modalDialogCustomPromp"]', testData.validGistId)
.modalFooterOKClick('gisthandler')
.openFile(`${testData.validGistId}/README.txt`)
.waitForElementVisible(`div[title='default_workspace/${testData.validGistId}/README.txt']`)
.assert.containsText(`div[title='default_workspace/${testData.validGistId}/README.txt'] > span`, 'README.txt')
.openFile(`gist-${testData.validGistId}/README.txt`)
.waitForElementVisible(`div[title='default_workspace/gist-${testData.validGistId}/README.txt']`)
.assert.containsText(`div[title='default_workspace/gist-${testData.validGistId}/README.txt'] > span`, 'README.txt')
.end()
}
}

@ -119,7 +119,47 @@ module.exports = {
.waitForElementContainsText('*[data-id="terminalJournal"]', 'newOwner', 60000)
.waitForElementContainsText('*[data-id="terminalJournal"]', '0xd9145CCE52D386f254917e481eB44e9943F39138', 60000)
},
'Run tests using Mocha script and check result logging in the terminal #group4': function (browser: NightwatchBrowser) {
browser
.addFile('scripts/storage.test.js', { content: storageMochaTests })
.pause(1000)
.openFile('contracts/1_Storage.sol')
.clickLaunchIcon('solidity')
.click('*[data-id="compilerContainerCompileBtn"]')
.pause(1000) // compile Storage
.executeScript('remix.execute(\'scripts/storage.test.js\')')
.pause(1000)
.waitForElementContainsText('*[data-id="terminalJournal"]', 'Running tests....')
.waitForElementContainsText('*[data-id="terminalJournal"]', 'storage contract Address:')
.waitForElementContainsText('*[data-id="terminalJournal"]', '✓ test initial value')
.waitForElementContainsText('*[data-id="terminalJournal"]', '✓ test updating and retrieving updated value')
.waitForElementContainsText('*[data-id="terminalJournal"]', '✘ fail test updating and retrieving updated value')
.waitForElementContainsText('*[data-id="terminalJournal"]', 'Expected: 55')
.waitForElementContainsText('*[data-id="terminalJournal"]', 'Actual: 56')
.waitForElementContainsText('*[data-id="terminalJournal"]', 'Message: incorrect number: expected 56 to equal 55')
.waitForElementContainsText('*[data-id="terminalJournal"]', '2 passing, 1 failing')
},
'Run tests using Mocha for a contract with library deployment and check result logging in the terminal #group4': function (browser: NightwatchBrowser) {
browser
.addFile('scripts/storageWithLib.test.js', { content: storageWithLibMochaTests })
.pause(1000)
.click('[data-id="treeViewDivtreeViewItemcontracts"]')
.addFile('contracts/StorageWithLib.sol', { content: storageWithLibContract })
.openFile('contracts/StorageWithLib.sol')
.clickLaunchIcon('solidity')
.click('*[data-id="compilerContainerCompileBtn"]')
.pause(1000) // compile StorageWithLib
.executeScript('remix.execute(\'scripts/storageWithLib.test.js\')')
.pause(1000)
.waitForElementContainsText('*[data-id="terminalJournal"]', 'Running tests....')
.waitForElementContainsText('*[data-id="terminalJournal"]', 'Storage with lib')
.waitForElementContainsText('*[data-id="terminalJournal"]', 'deploying lib:')
.waitForElementContainsText('*[data-id="terminalJournal"]', '✘ test library integration by calling a lib method')
.waitForElementContainsText('*[data-id="terminalJournal"]', 'Expected: 34')
.waitForElementContainsText('*[data-id="terminalJournal"]', 'Actual: 14')
.waitForElementContainsText('*[data-id="terminalJournal"]', 'Message: expected \'14\' to equal \'34\'')
.waitForElementContainsText('*[data-id="terminalJournal"]', '0 passing, 1 failing')
},
'Should print hardhat logs #group4': function (browser: NightwatchBrowser) {
browser
.click('*[data-id="terminalClearConsole"]') // clear the terminal
@ -261,6 +301,132 @@ const deployWithEthersJs = `
}
})()`
const storageMochaTests = `
const { expect } = require("chai");
describe("Storage with lib", function () {
it("test initial value", async function () {
// Make sure contract is compiled and artifacts are generated
const metadata = JSON.parse(await remix.call('fileManager', 'getFile', 'contracts/artifacts/Storage.json'))
const signer = (new ethers.providers.Web3Provider(web3Provider)).getSigner()
let Storage = new ethers.ContractFactory(metadata.abi, metadata.data.bytecode.object, signer);
let storage = await Storage.deploy();
console.log('storage contract Address: ' + storage.address);
await storage.deployed()
expect((await storage.retrieve()).toNumber()).to.equal(0);
});
it("test updating and retrieving updated value", async function () {
const metadata = JSON.parse(await remix.call('fileManager', 'getFile', 'contracts/artifacts/Storage.json'))
const signer = (new ethers.providers.Web3Provider(web3Provider)).getSigner()
let Storage = new ethers.ContractFactory(metadata.abi, metadata.data.bytecode.object, signer);
let storage = await Storage.deploy();
await storage.deployed()
const setValue = await storage.store(56);
await setValue.wait();
expect((await storage.retrieve()).toNumber()).to.equal(56);
});
it("fail test updating and retrieving updated value", async function () {
const metadata = JSON.parse(await remix.call('fileManager', 'getFile', 'contracts/artifacts/Storage.json'))
const signer = (new ethers.providers.Web3Provider(web3Provider)).getSigner()
let Storage = new ethers.ContractFactory(metadata.abi, metadata.data.bytecode.object, signer);
let storage = await Storage.deploy();
await storage.deployed()
const setValue = await storage.store(56);
await setValue.wait();
expect((await storage.retrieve()).toNumber(), 'incorrect number').to.equal(55);
});
});`
const storageWithLibContract = `
// SPDX-License-Identifier: GPL-3.0
pragma solidity >=0.7.0 <0.9.0;
library Lib {
function test () public view returns (uint) {
return 14;
}
}
/**
* @title Storage
* @dev Store & retrieve value inr a variable
*/
contract StorageWithLib {
uint256 number;
/**
* @dev Store valrue in variable
* @param num value to store
*/
function store(uint256 num) public {
number = num;
}
/**
* @dev Return value
* @return value of 'number'
*/
function retrieve() public view returns (uint256){
return number;
}
function getFromLib() public view returns (uint) {
return Lib.test();
}
}
`
const storageWithLibMochaTests = `
const { expect } = require("chai");
const { ethers } = require("hardhat");
describe("Storage", function () {
it("test library integration by calling a lib method", async function () {
const metadataLib = JSON.parse(await remix.call('fileManager', 'readFile', 'contracts/artifacts/Lib.json'))
console.log('deploying lib:')
const artifactLib = {
contractName: 'Lib',
sourceName: 'contracts/StorageWithLib.sol',
abi: metadataLib.abi,
bytecode: '0x' + metadataLib.data.bytecode.object,
deployedBytecode: '0x' + metadataLib.data.deployedBytecode.object,
linkReferences: metadataLib.data.bytecode.linkReferences,
deployedLinkReferences: metadataLib.data.deployedBytecode.linkReferences,
}
const optionsLib = {}
const factoryLib = await ethers.getContractFactoryFromArtifact(artifactLib, optionsLib)
const lib = await factoryLib.deploy();
await lib.deployed()
const metadata = JSON.parse(await remix.call('fileManager', 'readFile', 'contracts/artifacts/StorageWithLib.json'))
const artifact = {
contractName: 'StorageWithLib',
sourceName: 'contracts/StorageWithLib.sol',
abi: metadata.abi,
bytecode: '0x' + metadata.data.bytecode.object,
deployedBytecode: '0x' + metadata.data.deployedBytecode.object,
linkReferences: metadata.data.bytecode.linkReferences,
deployedLinkReferences: metadata.data.deployedBytecode.linkReferences,
}
const options = {
libraries: {
'Lib': lib.address
}
}
const factory = await ethers.getContractFactoryFromArtifact(artifact, options)
const storage = await factory.deploy();
await storage.deployed()
const storeValue = await storage.store(333);
await storeValue.wait();
expect((await storage.getFromLib()).toString()).to.equal('34');
});
});`
const hardhatLog = `
// SPDX-License-Identifier: GPL-3.0

@ -39,7 +39,7 @@ export class StoragePlugin extends Plugin {
if (!localStorage.hasOwnProperty(_x)) {
continue
}
_xLen = ((localStorage[_x].length + _x.length) * 2)
_xLen = ((localStorage[_x].length + _x.length))
_lsTotal += _xLen
}
return (_lsTotal / 1024).toFixed(2)

@ -146,7 +146,7 @@ module.exports = class TestTab extends ViewPlugin {
renderComponent (testDirPath) {
this.dispatch({
testTab: this,
helper: this.helper,
helper: helper,
testDirPath: testDirPath
})
}

@ -1,4 +1,4 @@
import { compile } from '@remix-project/remix-solidity'
import { compile, helper } from '@remix-project/remix-solidity'
import { CompileTabLogic, parseContracts } from '@remix-ui/solidity-compiler' // eslint-disable-line
import type { ConfigurationSettings } from '@remix-project/remix-lib-ts'
@ -261,7 +261,7 @@ export const CompilerApiMixin = (Base) => class extends Base {
this.on('fileManager', 'fileClosed', this.data.eventHandlers.onFileClosed)
this.data.eventHandlers.onCompilationFinished = (success, data, source, input, version) => {
this.data.eventHandlers.onCompilationFinished = async (success, data, source, input, version) => {
this.compileErrors = data
if (success) {
// forwarding the event to the appManager infra
@ -291,6 +291,21 @@ export const CompilerApiMixin = (Base) => class extends Base {
if (success) this.compiler.visitContracts((contract) => { this.compilationDetails.contractMap[contract.name] = contract })
this.compilationDetails.target = source.target
if (this.onCompilationFinished) this.onCompilationFinished(this.compilationDetails)
// set annotations
if (data.errors) {
for (const error of data.errors) {
let pos = helper.getPositionDetails(error.formattedMessage)
if (pos.errFile) {
pos = {
row: pos.errLine,
column: pos.errCol,
text: error.formattedMessage,
type: error.severity
}
await this.call('editor', 'addAnnotation', pos, pos.errFile)
}
}
}
}
this.compiler.event.register('compilationFinished', this.data.eventHandlers.onCompilationFinished)

@ -1,4 +1,3 @@
import { Plugin } from '@remixproject/engine'
import { compile } from '@remix-project/remix-solidity'
import { util } from '@remix-project/remix-lib'
@ -8,7 +7,7 @@ import { fetchContractFromSourcify } from './helpers/fetch-sourcify'
const profile = {
name: 'fetchAndCompile',
methods: ['resolve'],
methods: ['resolve', 'clearCache'],
version: '0.0.1'
}
@ -21,6 +20,14 @@ export class FetchAndCompile extends Plugin {
this.sourceVerifierNetWork = ['Main', 'Rinkeby', 'Ropsten', 'Goerli']
}
/**
* Clear the cache
*
*/
async clearCache () {
this.unresolvedAddresses = []
}
/**
* Fetch compiliation metadata from source-Verify from a given @arg contractAddress - https://github.com/ethereum/source-verify
* Put the artifacts in the file explorer
@ -68,6 +75,7 @@ export class FetchAndCompile extends Plugin {
}
}
targetPath = `${targetPath}/${network.id}/${contractAddress}`
let data
try {
data = await fetchContractFromSourcify(this, network, contractAddress, targetPath)
@ -100,7 +108,15 @@ export class FetchAndCompile extends Plugin {
const compData = await compile(
compilationTargets,
settings,
async (url, cb) => await this.call('contentImport', 'resolveAndSave', url).then((result) => cb(null, result)).catch((error) => cb(error.message)))
async (url, cb) => {
// we first try to resolve the content from the compilation target using a more appropiate path
const path = `${targetPath}/${url}`
if (compilationTargets[path] && compilationTargets[path].content) {
return cb(null, compilationTargets[path].content)
} else {
await this.call('contentImport', 'resolveAndSave', url).then((result) => cb(null, result)).catch((error) => cb(error.message))
}
})
await this.call('compilerArtefacts', 'addResolvedContract', contractAddress, compData)
return compData
} catch (e) {

@ -116,7 +116,7 @@ export class GistHandler extends Plugin {
const obj: StringByString = {}
Object.keys(data.files).forEach((element) => {
const path = element.replace(/\.\.\./g, '/')
obj['/' + gistId + '/' + path] = data.files[element]
obj['/gist-' + gistId + '/' + path] = data.files[element]
})
this.call('fileManager', 'setBatchFiles', obj, 'workspace', true, async (errorSavingFiles: any) => {
if (errorSavingFiles) {

@ -23,7 +23,7 @@ export const fetchContractFromEtherscan = async (plugin, network, contractAddres
}
if (typeof data.result[0].SourceCode === 'string') {
const fileName = `${targetPath}/${network.id}/${contractAddress}/${data.result[0].ContractName}.sol`
const fileName = `${targetPath}/${data.result[0].ContractName}.sol`
await plugin.call('fileManager', 'setFile', fileName , data.result[0].SourceCode)
compilationTargets[fileName] = { content: data.result[0].SourceCode }
} else if (data.result[0].SourceCode && typeof data.result[0].SourceCode == 'object') {
@ -34,7 +34,7 @@ export const fetchContractFromEtherscan = async (plugin, network, contractAddres
if (await plugin.call('contentImport', 'isExternalUrl', file)) {
// nothing to do, the compiler callback will handle those
} else {
const path = `${targetPath}/${network.id}/${contractAddress}/${file}`
const path = `${targetPath}/${file}`
const content = (source as any).content
await plugin.call('fileManager', 'setFile', path, content)
compilationTargets[path] = { content }

@ -13,7 +13,7 @@ export const fetchContractFromSourcify = async (plugin, network, contractAddress
}
// set the solidity contract code using metadata
await plugin.call('fileManager', 'setFile', `${targetPath}/${network.id}/${contractAddress}/metadata.json`, JSON.stringify(data.metadata, null, '\t'))
await plugin.call('fileManager', 'setFile', `${targetPath}/metadata.json`, JSON.stringify(data.metadata, null, '\t'))
for (let file in data.metadata.sources) {
const urls = data.metadata.sources[file].urls
for (const url of urls) {
@ -24,7 +24,7 @@ export const fetchContractFromSourcify = async (plugin, network, contractAddress
if (await plugin.call('contentImport', 'isExternalUrl', file)) {
// nothing to do, the compiler callback will handle those
} else {
const path = `${targetPath}/${network.id}/${contractAddress}/${file}`
const path = `${targetPath}/${file}`
await plugin.call('fileManager', 'setFile', path, source.content)
compilationTargets[path] = { content: source.content }
}

@ -64,7 +64,6 @@ export class Ethdebugger {
this.solidityProxy,
this.codeManager,
{ ...this.opts, includeLocalVariables })
this.event.trigger('managersChanged')
}
resolveStep (index) {

@ -87,6 +87,7 @@ export class BreakpointManager {
*
*/
async jump (fromStep, direction, defaultToLimit, trace) {
this.event.trigger('locatingBreakpoint', [])
let sourceLocation
let previousSourceLocation
let currentStep = fromStep + direction
@ -113,14 +114,14 @@ export class BreakpointManager {
}
if (this.hasBreakpointAtLine(sourceLocation.file, lineColumn.start.line)) {
lineHadBreakpoint = true
if (direction === 1 && this.hitLine(currentStep, sourceLocation, previousSourceLocation, trace)) {
if (this.hitLine(currentStep, sourceLocation, previousSourceLocation, trace)) {
return
}
}
}
currentStep += direction
}
this.event.trigger('NoBreakpointHit', [])
this.event.trigger('noBreakpointHit', [])
if (!defaultToLimit) {
return
}
@ -172,6 +173,7 @@ export class BreakpointManager {
* @param {Object} sourceLocation - position of the breakpoint { file: '<file index>', row: '<line number' }
*/
add (sourceLocation) {
sourceLocation.row -= 1
if (!this.breakpoints[sourceLocation.fileName]) {
this.breakpoints[sourceLocation.fileName] = []
}
@ -185,6 +187,7 @@ export class BreakpointManager {
* @param {Object} sourceLocation - position of the breakpoint { file: '<file index>', row: '<line number' }
*/
remove (sourceLocation) {
sourceLocation.row -= 1
const sources = this.breakpoints[sourceLocation.fileName]
if (!sources) {
return

@ -41,15 +41,18 @@ export class Debugger {
}
})
this.breakPointManager.event.register('managersChanged', () => {
const { traceManager, callTree, solidityProxy } = this.debugger
this.breakPointManager.setManagers({ traceManager, callTree, solidityProxy })
})
this.breakPointManager.event.register('breakpointStep', (step) => {
this.step_manager.jumpTo(step)
})
this.breakPointManager.event.register('noBreakpointHit', (step) => {
this.event.trigger('noBreakpointHit', [])
})
this.breakPointManager.event.register('locatingBreakpoint', () => {
this.event.trigger('locatingBreakpoint', [])
})
this.debugger.setBreakpointManager(this.breakPointManager)
this.debugger.event.register('newTraceLoaded', this, () => {

@ -277,22 +277,15 @@ function testDebugging (debugManager) {
return sourceMappingDecoder.convertOffsetToLineColumn(rawLocation, sourceMappingDecoder.getLinebreakPositions(ballot))
}})
breakPointManager.event.register('managersChanged', () => {
const {traceManager, callTree, solidityProxy} = debugManager
breakPointManager.setManagers({traceManager, callTree, solidityProxy})
})
breakPointManager.add({fileName: 'test.sol', row: 38})
breakPointManager.add({fileName: 'test.sol', row: 39})
breakPointManager.event.register('breakpointHit', function (sourceLocation, step) {
console.log('breakpointHit')
t.equal(JSON.stringify(sourceLocation), JSON.stringify({ start: 1153, length: 6, file: 0, jump: '-' }))
t.equal(step, 212)
})
breakPointManager.event.register('noBreakpointHit', function () {
t.end('noBreakpointHit')
console.log('noBreakpointHit')
})
breakPointManager.jumpNextBreakpoint(0, true)
})

@ -56,21 +56,22 @@ export class EventsDecoder {
return eventsABI
}
_event (hash: string, eventsABI: Record<string, unknown>, contractName: string) {
const events = eventsABI[contractName]
if (!events) return null
if (events[hash]) {
const event = events[hash]
for (const input of event.inputs) {
if (input.type === 'function') {
input.type = 'bytes24'
input.baseType = 'bytes24'
_event (hash, eventsABI) {
// get all the events responding to that hash.
const contracts = []
for (const k in eventsABI) {
if (eventsABI[k][hash]) {
const event = eventsABI[k][hash]
for (const input of event.inputs) {
if (input.type === 'function') {
input.type = 'bytes24'
input.baseType = 'bytes24'
}
}
contracts.push(event)
}
return event
}
return null
return contracts
}
_stringifyBigNumber (value): string {
@ -95,16 +96,23 @@ export class EventsDecoder {
// [address, topics, mem]
const log = logs[i]
const topicId = log.topics[0]
const eventAbi = this._event(topicId.replace('0x', ''), eventsABI, contractName)
if (eventAbi) {
const decodedlog = eventAbi.abi.parseLog(log)
const decoded = {}
for (const v in decodedlog.args) {
decoded[v] = this._stringifyEvent(decodedlog.args[v])
const eventAbis = this._event(topicId.replace('0x', ''), eventsABI)
for (const eventAbi of eventAbis) {
try {
if (eventAbi) {
const decodedlog = eventAbi.abi.parseLog(log)
const decoded = {}
for (const v in decodedlog.args) {
decoded[v] = this._stringifyEvent(decodedlog.args[v])
}
events.push({ from: log.address, topic: topicId, event: eventAbi.event, args: decoded })
} else {
events.push({ from: log.address, data: log.data, topics: log.topics })
}
break // if one of the iteration is successful
} catch (e) {
continue
}
events.push({ from: log.address, topic: topicId, event: eventAbi.event, args: decoded })
} else {
events.push({ from: log.address, data: log.data, topics: log.topics })
}
}
cb(null, { decoded: events, raw: logs })

@ -1,5 +1,5 @@
'use strict'
import txHelper from './txHelper'
import helper from './helper'
export class CompilerAbstract {
languageversion: any
@ -18,11 +18,11 @@ export class CompilerAbstract {
}
getContract (name) {
return txHelper.getContract(name, this.data.contracts)
return helper.getContract(name, this.data.contracts)
}
visitContracts (calllback) {
return txHelper.visitContracts(this.data.contracts, calllback)
return helper.visitContracts(this.data.contracts, calllback)
}
getData () {

@ -4,7 +4,7 @@ import { update } from 'solc/abi'
import * as webworkify from 'webworkify-webpack'
import compilerInput from './compiler-input'
import EventManager from '../lib/eventManager'
import txHelper from './txHelper'
import txHelper from './helper'
import {
Source, SourceWithTarget, MessageFromWorker, CompilerState, CompilationResult,
visitContractsCallbackParam, visitContractsCallbackInterface, CompilationError,

@ -35,6 +35,27 @@ export default {
if (cb(param)) return
}
}
}
},
// ^ e.g:
// browser/gm.sol: Warning: Source file does not specify required compiler version! Consider adding "pragma solidity ^0.6.12
// https://github.com/OpenZeppelin/openzeppelin-contracts/blob/release-v3.2.0/contracts/introspection/IERC1820Registry.sol:3:1: ParserError: Source file requires different compiler version (current compiler is 0.7.4+commit.3f05b770.Emscripten.clang) - note that nightly builds are considered to be strictly less than the released version
getPositionDetails: (msg: string) => {
const result = { } as Record<string, number | string>
// To handle some compiler warning without location like SPDX license warning etc
if (!msg.includes(':')) return { errLine: -1, errCol: -1, errFile: '' }
if (msg.includes('-->')) msg = msg.split('-->')[1].trim()
// extract line / column
let pos = msg.match(/^(.*?):([0-9]*?):([0-9]*?)?/)
result.errLine = pos ? parseInt(pos[2]) - 1 : -1
result.errCol = pos ? parseInt(pos[3]) : -1
// extract file
pos = msg.match(/^(https:.*?|http:.*?|.*?):/)
result.errFile = pos ? pos[1] : msg
return result
}
}

@ -4,3 +4,4 @@ export { default as CompilerInput, getValidLanguage } from './compiler/compiler-
export { CompilerAbstract } from './compiler/compiler-abstract'
export * from './compiler/types'
export { promisedMiniXhr, pathToURL, baseURLBin, baseURLWasm, canUseWorker, urlFromVersion } from './compiler/compiler-utils'
export { default as helper } from './compiler/helper'

@ -87,6 +87,18 @@ export const DebuggerUI = (props: DebuggerUIProps) => {
})
})
debuggerInstance.event.register('locatingBreakpoint', async (isActive) => {
setState(prevState => {
return { ...prevState, sourceLocationStatus: 'Locating breakpoint, this might take a while...' }
})
})
debuggerInstance.event.register('noBreakpointHit', async (isActive) => {
setState(prevState => {
return { ...prevState, sourceLocationStatus: '' }
})
})
debuggerInstance.event.register('newSourceLocation', async (lineColumnPos, rawLocation, generatedSources, address) => {
if (!lineColumnPos) {
await debuggerModule.discardHighlight()

@ -271,7 +271,10 @@ export const EditorUI = (props: EditorUIProps) => {
props.editorAPI.clearDecorationsByPlugin = (filePath: string, plugin: string, typeOfDecoration: string, registeredDecorations: any, currentDecorations: any) => {
const model = editorModelsState[filePath]?.model
if (!model) return
if (!model) return {
currentDecorations: [],
registeredDecorations: []
}
const decorations = []
const newRegisteredDecorations = []
if (registeredDecorations) {
@ -290,7 +293,9 @@ export const EditorUI = (props: EditorUIProps) => {
props.editorAPI.keepDecorationsFor = (filePath: string, plugin: string, typeOfDecoration: string, registeredDecorations: any, currentDecorations: any) => {
const model = editorModelsState[filePath]?.model
if (!model) return
if (!model) return {
currentDecorations: []
}
const decorations = []
if (registeredDecorations) {
for (const decoration of registeredDecorations) {

@ -1,4 +1,5 @@
import React, { useEffect, useState } from 'react' //eslint-disable-line
import { helper } from '@remix-project/remix-solidity'
import './renderer.css'
interface RendererProps {
message: any;
@ -29,51 +30,19 @@ export const Renderer = ({ message, opt = {}, plugin }: RendererProps) => {
// ^ e.g:
// browser/gm.sol: Warning: Source file does not specify required compiler version! Consider adding "pragma solidity ^0.6.12
// https://github.com/OpenZeppelin/openzeppelin-contracts/blob/release-v3.2.0/contracts/introspection/IERC1820Registry.sol:3:1: ParserError: Source file requires different compiler version (current compiler is 0.7.4+commit.3f05b770.Emscripten.clang) - note that nightly builds are considered to be strictly less than the released version
const positionDetails = getPositionDetails(text)
const positionDetails = helper.getPositionDetails(text)
opt.errLine = positionDetails.errLine
opt.errCol = positionDetails.errCol
opt.errFile = positionDetails.errFile ? (positionDetails.errFile as string).trim() : ''
if (!opt.noAnnotations && opt.errFile && opt.errFile !== '') {
addAnnotation(opt.errFile, {
row: opt.errLine,
column: opt.errCol,
text: text,
type: opt.type
})
}
setMessageText(text)
setEditorOptions(opt)
setClose(false)
setClassList(opt.type === 'error' ? 'alert alert-danger' : 'alert alert-warning')
}, [message, opt])
const getPositionDetails = (msg: string) => {
const result = { } as Record<string, number | string>
// To handle some compiler warning without location like SPDX license warning etc
if (!msg.includes(':')) return { errLine: -1, errCol: -1, errFile: '' }
if (msg.includes('-->')) msg = msg.split('-->')[1].trim()
// extract line / column
let pos = msg.match(/^(.*?):([0-9]*?):([0-9]*?)?/)
result.errLine = pos ? parseInt(pos[2]) - 1 : -1
result.errCol = pos ? parseInt(pos[3]) : -1
// extract file
pos = msg.match(/^(https:.*?|http:.*?|.*?):/)
result.errFile = pos ? pos[1] : msg
return result
}
const addAnnotation = async (file, error) => {
if (file === await plugin.call('config', 'getAppParameter', 'currentFile')) {
await plugin.call('editor', 'addAnnotation', error, file)
}
}
const handleErrorClick = (opt) => {
if (opt.click) {

@ -641,7 +641,7 @@ const removeInputField = (state: BrowserState, path: string): { [x: string]: Rec
files = _.setWith(files, _path, {
isDirectory: true,
path,
name: extractNameFromKey(path).indexOf('gist-') === 0 ? extractNameFromKey(path).split('-')[1] : extractNameFromKey(path),
name: extractNameFromKey(path),
type: extractNameFromKey(path).indexOf('gist-') === 0 ? 'gist' : 'folder',
child: prevFiles ? prevFiles.child : {}
}, Object)
@ -742,7 +742,7 @@ const normalize = (filesList, directory?: string, newInputType?: 'folder' | 'fil
if (filesList[key].isDirectory) {
folders[extractNameFromKey(key)] = {
path,
name: extractNameFromKey(path).indexOf('gist-') === 0 ? extractNameFromKey(path).split('-')[1] : extractNameFromKey(path),
name: extractNameFromKey(path),
isDirectory: filesList[key].isDirectory,
type: extractNameFromKey(path).indexOf('gist-') === 0 ? 'gist' : 'folder'
}

Loading…
Cancel
Save