Merge branch 'master' into fix_solidity_warning

pull/2163/head
Rob 3 years ago committed by GitHub
commit 66673241db
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 5
      apps/debugger/src/app/debugger-api.ts
  2. 168
      apps/remix-ide-e2e/src/tests/terminal.test.ts
  3. 2
      apps/remix-ide/src/app/plugins/storage.ts
  4. 22
      libs/remix-core-plugin/src/lib/compiler-fetch-and-compile.ts
  5. 4
      libs/remix-core-plugin/src/lib/helpers/fetch-etherscan.ts
  6. 4
      libs/remix-core-plugin/src/lib/helpers/fetch-sourcify.ts
  7. 1
      libs/remix-debug/src/Ethdebugger.ts
  8. 7
      libs/remix-debug/src/code/breakpointManager.ts
  9. 13
      libs/remix-debug/src/debugger/debugger.ts
  10. 9
      libs/remix-debug/test/debugger.ts
  11. 50
      libs/remix-lib/src/execution/eventsDecoder.ts
  12. 12
      libs/remix-ui/debugger-ui/src/lib/debugger-ui.tsx
  13. 18
      libs/remix-ui/search/src/lib/context/context.tsx

@ -127,6 +127,11 @@ export const DebuggerApiMixin = (Base) => class extends Base {
}
debug (hash, web3?) {
try {
this.call('fetchAndCompile', 'clearCache')
} catch (e) {
console.error(e)
}
this.debugHash = hash
if (web3) this._web3 = web3
else this._web3 = this.initialWeb3

@ -119,7 +119,47 @@ module.exports = {
.waitForElementContainsText('*[data-id="terminalJournal"]', 'newOwner', 60000)
.waitForElementContainsText('*[data-id="terminalJournal"]', '0xd9145CCE52D386f254917e481eB44e9943F39138', 60000)
},
'Run tests using Mocha script and check result logging in the terminal #group4': function (browser: NightwatchBrowser) {
browser
.addFile('scripts/storage.test.js', { content: storageMochaTests })
.pause(1000)
.openFile('contracts/1_Storage.sol')
.clickLaunchIcon('solidity')
.click('*[data-id="compilerContainerCompileBtn"]')
.pause(1000) // compile Storage
.executeScript('remix.execute(\'scripts/storage.test.js\')')
.pause(1000)
.waitForElementContainsText('*[data-id="terminalJournal"]', 'Running tests....')
.waitForElementContainsText('*[data-id="terminalJournal"]', 'storage contract Address:')
.waitForElementContainsText('*[data-id="terminalJournal"]', '✓ test initial value')
.waitForElementContainsText('*[data-id="terminalJournal"]', '✓ test updating and retrieving updated value')
.waitForElementContainsText('*[data-id="terminalJournal"]', '✘ fail test updating and retrieving updated value')
.waitForElementContainsText('*[data-id="terminalJournal"]', 'Expected: 55')
.waitForElementContainsText('*[data-id="terminalJournal"]', 'Actual: 56')
.waitForElementContainsText('*[data-id="terminalJournal"]', 'Message: incorrect number: expected 56 to equal 55')
.waitForElementContainsText('*[data-id="terminalJournal"]', '2 passing, 1 failing')
},
'Run tests using Mocha for a contract with library deployment and check result logging in the terminal #group4': function (browser: NightwatchBrowser) {
browser
.addFile('scripts/storageWithLib.test.js', { content: storageWithLibMochaTests })
.pause(1000)
.click('[data-id="treeViewDivtreeViewItemcontracts"]')
.addFile('contracts/StorageWithLib.sol', { content: storageWithLibContract })
.openFile('contracts/StorageWithLib.sol')
.clickLaunchIcon('solidity')
.click('*[data-id="compilerContainerCompileBtn"]')
.pause(1000) // compile StorageWithLib
.executeScript('remix.execute(\'scripts/storageWithLib.test.js\')')
.pause(1000)
.waitForElementContainsText('*[data-id="terminalJournal"]', 'Running tests....')
.waitForElementContainsText('*[data-id="terminalJournal"]', 'Storage with lib')
.waitForElementContainsText('*[data-id="terminalJournal"]', 'deploying lib:')
.waitForElementContainsText('*[data-id="terminalJournal"]', '✘ test library integration by calling a lib method')
.waitForElementContainsText('*[data-id="terminalJournal"]', 'Expected: 34')
.waitForElementContainsText('*[data-id="terminalJournal"]', 'Actual: 14')
.waitForElementContainsText('*[data-id="terminalJournal"]', 'Message: expected \'14\' to equal \'34\'')
.waitForElementContainsText('*[data-id="terminalJournal"]', '0 passing, 1 failing')
},
'Should print hardhat logs #group4': function (browser: NightwatchBrowser) {
browser
.click('*[data-id="terminalClearConsole"]') // clear the terminal
@ -261,6 +301,132 @@ const deployWithEthersJs = `
}
})()`
const storageMochaTests = `
const { expect } = require("chai");
describe("Storage with lib", function () {
it("test initial value", async function () {
// Make sure contract is compiled and artifacts are generated
const metadata = JSON.parse(await remix.call('fileManager', 'getFile', 'contracts/artifacts/Storage.json'))
const signer = (new ethers.providers.Web3Provider(web3Provider)).getSigner()
let Storage = new ethers.ContractFactory(metadata.abi, metadata.data.bytecode.object, signer);
let storage = await Storage.deploy();
console.log('storage contract Address: ' + storage.address);
await storage.deployed()
expect((await storage.retrieve()).toNumber()).to.equal(0);
});
it("test updating and retrieving updated value", async function () {
const metadata = JSON.parse(await remix.call('fileManager', 'getFile', 'contracts/artifacts/Storage.json'))
const signer = (new ethers.providers.Web3Provider(web3Provider)).getSigner()
let Storage = new ethers.ContractFactory(metadata.abi, metadata.data.bytecode.object, signer);
let storage = await Storage.deploy();
await storage.deployed()
const setValue = await storage.store(56);
await setValue.wait();
expect((await storage.retrieve()).toNumber()).to.equal(56);
});
it("fail test updating and retrieving updated value", async function () {
const metadata = JSON.parse(await remix.call('fileManager', 'getFile', 'contracts/artifacts/Storage.json'))
const signer = (new ethers.providers.Web3Provider(web3Provider)).getSigner()
let Storage = new ethers.ContractFactory(metadata.abi, metadata.data.bytecode.object, signer);
let storage = await Storage.deploy();
await storage.deployed()
const setValue = await storage.store(56);
await setValue.wait();
expect((await storage.retrieve()).toNumber(), 'incorrect number').to.equal(55);
});
});`
const storageWithLibContract = `
// SPDX-License-Identifier: GPL-3.0
pragma solidity >=0.7.0 <0.9.0;
library Lib {
function test () public view returns (uint) {
return 14;
}
}
/**
* @title Storage
* @dev Store & retrieve value inr a variable
*/
contract StorageWithLib {
uint256 number;
/**
* @dev Store valrue in variable
* @param num value to store
*/
function store(uint256 num) public {
number = num;
}
/**
* @dev Return value
* @return value of 'number'
*/
function retrieve() public view returns (uint256){
return number;
}
function getFromLib() public view returns (uint) {
return Lib.test();
}
}
`
const storageWithLibMochaTests = `
const { expect } = require("chai");
const { ethers } = require("hardhat");
describe("Storage", function () {
it("test library integration by calling a lib method", async function () {
const metadataLib = JSON.parse(await remix.call('fileManager', 'readFile', 'contracts/artifacts/Lib.json'))
console.log('deploying lib:')
const artifactLib = {
contractName: 'Lib',
sourceName: 'contracts/StorageWithLib.sol',
abi: metadataLib.abi,
bytecode: '0x' + metadataLib.data.bytecode.object,
deployedBytecode: '0x' + metadataLib.data.deployedBytecode.object,
linkReferences: metadataLib.data.bytecode.linkReferences,
deployedLinkReferences: metadataLib.data.deployedBytecode.linkReferences,
}
const optionsLib = {}
const factoryLib = await ethers.getContractFactoryFromArtifact(artifactLib, optionsLib)
const lib = await factoryLib.deploy();
await lib.deployed()
const metadata = JSON.parse(await remix.call('fileManager', 'readFile', 'contracts/artifacts/StorageWithLib.json'))
const artifact = {
contractName: 'StorageWithLib',
sourceName: 'contracts/StorageWithLib.sol',
abi: metadata.abi,
bytecode: '0x' + metadata.data.bytecode.object,
deployedBytecode: '0x' + metadata.data.deployedBytecode.object,
linkReferences: metadata.data.bytecode.linkReferences,
deployedLinkReferences: metadata.data.deployedBytecode.linkReferences,
}
const options = {
libraries: {
'Lib': lib.address
}
}
const factory = await ethers.getContractFactoryFromArtifact(artifact, options)
const storage = await factory.deploy();
await storage.deployed()
const storeValue = await storage.store(333);
await storeValue.wait();
expect((await storage.getFromLib()).toString()).to.equal('34');
});
});`
const hardhatLog = `
// SPDX-License-Identifier: GPL-3.0

@ -39,7 +39,7 @@ export class StoragePlugin extends Plugin {
if (!localStorage.hasOwnProperty(_x)) {
continue
}
_xLen = ((localStorage[_x].length + _x.length) * 2)
_xLen = ((localStorage[_x].length + _x.length))
_lsTotal += _xLen
}
return (_lsTotal / 1024).toFixed(2)

@ -1,4 +1,3 @@
import { Plugin } from '@remixproject/engine'
import { compile } from '@remix-project/remix-solidity'
import { util } from '@remix-project/remix-lib'
@ -8,7 +7,7 @@ import { fetchContractFromSourcify } from './helpers/fetch-sourcify'
const profile = {
name: 'fetchAndCompile',
methods: ['resolve'],
methods: ['resolve', 'clearCache'],
version: '0.0.1'
}
@ -21,6 +20,14 @@ export class FetchAndCompile extends Plugin {
this.sourceVerifierNetWork = ['Main', 'Rinkeby', 'Ropsten', 'Goerli']
}
/**
* Clear the cache
*
*/
async clearCache () {
this.unresolvedAddresses = []
}
/**
* Fetch compiliation metadata from source-Verify from a given @arg contractAddress - https://github.com/ethereum/source-verify
* Put the artifacts in the file explorer
@ -68,6 +75,7 @@ export class FetchAndCompile extends Plugin {
}
}
targetPath = `${targetPath}/${network.id}/${contractAddress}`
let data
try {
data = await fetchContractFromSourcify(this, network, contractAddress, targetPath)
@ -100,7 +108,15 @@ export class FetchAndCompile extends Plugin {
const compData = await compile(
compilationTargets,
settings,
async (url, cb) => await this.call('contentImport', 'resolveAndSave', url).then((result) => cb(null, result)).catch((error) => cb(error.message)))
async (url, cb) => {
// we first try to resolve the content from the compilation target using a more appropiate path
const path = `${targetPath}/${url}`
if (compilationTargets[path] && compilationTargets[path].content) {
return cb(null, compilationTargets[path].content)
} else {
await this.call('contentImport', 'resolveAndSave', url).then((result) => cb(null, result)).catch((error) => cb(error.message))
}
})
await this.call('compilerArtefacts', 'addResolvedContract', contractAddress, compData)
return compData
} catch (e) {

@ -23,7 +23,7 @@ export const fetchContractFromEtherscan = async (plugin, network, contractAddres
}
if (typeof data.result[0].SourceCode === 'string') {
const fileName = `${targetPath}/${network.id}/${contractAddress}/${data.result[0].ContractName}.sol`
const fileName = `${targetPath}/${data.result[0].ContractName}.sol`
await plugin.call('fileManager', 'setFile', fileName , data.result[0].SourceCode)
compilationTargets[fileName] = { content: data.result[0].SourceCode }
} else if (data.result[0].SourceCode && typeof data.result[0].SourceCode == 'object') {
@ -34,7 +34,7 @@ export const fetchContractFromEtherscan = async (plugin, network, contractAddres
if (await plugin.call('contentImport', 'isExternalUrl', file)) {
// nothing to do, the compiler callback will handle those
} else {
const path = `${targetPath}/${network.id}/${contractAddress}/${file}`
const path = `${targetPath}/${file}`
const content = (source as any).content
await plugin.call('fileManager', 'setFile', path, content)
compilationTargets[path] = { content }

@ -13,7 +13,7 @@ export const fetchContractFromSourcify = async (plugin, network, contractAddress
}
// set the solidity contract code using metadata
await plugin.call('fileManager', 'setFile', `${targetPath}/${network.id}/${contractAddress}/metadata.json`, JSON.stringify(data.metadata, null, '\t'))
await plugin.call('fileManager', 'setFile', `${targetPath}/metadata.json`, JSON.stringify(data.metadata, null, '\t'))
for (let file in data.metadata.sources) {
const urls = data.metadata.sources[file].urls
for (const url of urls) {
@ -24,7 +24,7 @@ export const fetchContractFromSourcify = async (plugin, network, contractAddress
if (await plugin.call('contentImport', 'isExternalUrl', file)) {
// nothing to do, the compiler callback will handle those
} else {
const path = `${targetPath}/${network.id}/${contractAddress}/${file}`
const path = `${targetPath}/${file}`
await plugin.call('fileManager', 'setFile', path, source.content)
compilationTargets[path] = { content: source.content }
}

@ -64,7 +64,6 @@ export class Ethdebugger {
this.solidityProxy,
this.codeManager,
{ ...this.opts, includeLocalVariables })
this.event.trigger('managersChanged')
}
resolveStep (index) {

@ -87,6 +87,7 @@ export class BreakpointManager {
*
*/
async jump (fromStep, direction, defaultToLimit, trace) {
this.event.trigger('locatingBreakpoint', [])
let sourceLocation
let previousSourceLocation
let currentStep = fromStep + direction
@ -113,14 +114,14 @@ export class BreakpointManager {
}
if (this.hasBreakpointAtLine(sourceLocation.file, lineColumn.start.line)) {
lineHadBreakpoint = true
if (direction === 1 && this.hitLine(currentStep, sourceLocation, previousSourceLocation, trace)) {
if (this.hitLine(currentStep, sourceLocation, previousSourceLocation, trace)) {
return
}
}
}
currentStep += direction
}
this.event.trigger('NoBreakpointHit', [])
this.event.trigger('noBreakpointHit', [])
if (!defaultToLimit) {
return
}
@ -172,6 +173,7 @@ export class BreakpointManager {
* @param {Object} sourceLocation - position of the breakpoint { file: '<file index>', row: '<line number' }
*/
add (sourceLocation) {
sourceLocation.row -= 1
if (!this.breakpoints[sourceLocation.fileName]) {
this.breakpoints[sourceLocation.fileName] = []
}
@ -185,6 +187,7 @@ export class BreakpointManager {
* @param {Object} sourceLocation - position of the breakpoint { file: '<file index>', row: '<line number' }
*/
remove (sourceLocation) {
sourceLocation.row -= 1
const sources = this.breakpoints[sourceLocation.fileName]
if (!sources) {
return

@ -41,15 +41,18 @@ export class Debugger {
}
})
this.breakPointManager.event.register('managersChanged', () => {
const { traceManager, callTree, solidityProxy } = this.debugger
this.breakPointManager.setManagers({ traceManager, callTree, solidityProxy })
})
this.breakPointManager.event.register('breakpointStep', (step) => {
this.step_manager.jumpTo(step)
})
this.breakPointManager.event.register('noBreakpointHit', (step) => {
this.event.trigger('noBreakpointHit', [])
})
this.breakPointManager.event.register('locatingBreakpoint', () => {
this.event.trigger('locatingBreakpoint', [])
})
this.debugger.setBreakpointManager(this.breakPointManager)
this.debugger.event.register('newTraceLoaded', this, () => {

@ -277,22 +277,15 @@ function testDebugging (debugManager) {
return sourceMappingDecoder.convertOffsetToLineColumn(rawLocation, sourceMappingDecoder.getLinebreakPositions(ballot))
}})
breakPointManager.event.register('managersChanged', () => {
const {traceManager, callTree, solidityProxy} = debugManager
breakPointManager.setManagers({traceManager, callTree, solidityProxy})
})
breakPointManager.add({fileName: 'test.sol', row: 38})
breakPointManager.add({fileName: 'test.sol', row: 39})
breakPointManager.event.register('breakpointHit', function (sourceLocation, step) {
console.log('breakpointHit')
t.equal(JSON.stringify(sourceLocation), JSON.stringify({ start: 1153, length: 6, file: 0, jump: '-' }))
t.equal(step, 212)
})
breakPointManager.event.register('noBreakpointHit', function () {
t.end('noBreakpointHit')
console.log('noBreakpointHit')
})
breakPointManager.jumpNextBreakpoint(0, true)
})

@ -56,21 +56,22 @@ export class EventsDecoder {
return eventsABI
}
_event (hash: string, eventsABI: Record<string, unknown>, contractName: string) {
const events = eventsABI[contractName]
if (!events) return null
if (events[hash]) {
const event = events[hash]
for (const input of event.inputs) {
if (input.type === 'function') {
input.type = 'bytes24'
input.baseType = 'bytes24'
_event (hash, eventsABI) {
// get all the events responding to that hash.
const contracts = []
for (const k in eventsABI) {
if (eventsABI[k][hash]) {
const event = eventsABI[k][hash]
for (const input of event.inputs) {
if (input.type === 'function') {
input.type = 'bytes24'
input.baseType = 'bytes24'
}
}
contracts.push(event)
}
return event
}
return null
return contracts
}
_stringifyBigNumber (value): string {
@ -95,16 +96,23 @@ export class EventsDecoder {
// [address, topics, mem]
const log = logs[i]
const topicId = log.topics[0]
const eventAbi = this._event(topicId.replace('0x', ''), eventsABI, contractName)
if (eventAbi) {
const decodedlog = eventAbi.abi.parseLog(log)
const decoded = {}
for (const v in decodedlog.args) {
decoded[v] = this._stringifyEvent(decodedlog.args[v])
const eventAbis = this._event(topicId.replace('0x', ''), eventsABI)
for (const eventAbi of eventAbis) {
try {
if (eventAbi) {
const decodedlog = eventAbi.abi.parseLog(log)
const decoded = {}
for (const v in decodedlog.args) {
decoded[v] = this._stringifyEvent(decodedlog.args[v])
}
events.push({ from: log.address, topic: topicId, event: eventAbi.event, args: decoded })
} else {
events.push({ from: log.address, data: log.data, topics: log.topics })
}
break // if one of the iteration is successful
} catch (e) {
continue
}
events.push({ from: log.address, topic: topicId, event: eventAbi.event, args: decoded })
} else {
events.push({ from: log.address, data: log.data, topics: log.topics })
}
}
cb(null, { decoded: events, raw: logs })

@ -87,6 +87,18 @@ export const DebuggerUI = (props: DebuggerUIProps) => {
})
})
debuggerInstance.event.register('locatingBreakpoint', async (isActive) => {
setState(prevState => {
return { ...prevState, sourceLocationStatus: 'Locating breakpoint, this might take a while...' }
})
})
debuggerInstance.event.register('noBreakpointHit', async (isActive) => {
setState(prevState => {
return { ...prevState, sourceLocationStatus: '' }
})
})
debuggerInstance.event.register('newSourceLocation', async (lineColumnPos, rawLocation, generatedSources, address) => {
if (!lineColumnPos) {
await debuggerModule.discardHighlight()

@ -202,18 +202,28 @@ export const SearchProvider = ({
}
}, [])
//*.sol, **/*.txt, contracts/*
const setGlobalExpression = (paths: string) => {
const results = []
paths.split(',').forEach(path => {
path = path.trim()
if(path.startsWith('*.')) path = path.replace(/(\*\.)/g, '**/*.')
if(path.endsWith('/*') && !path.endsWith('/**/*')) path = path.replace(/(\*)/g, '**/*.*')
results.push(path)
})
return results
}
useEffect(() => {
if (state.find) {
(async () => {
const files = await getDirectory('/', plugin)
const pathFilter: any = {}
if (state.include){
const includeWithGlobalExpression = state.include.replaceAll(/(?<!\/)(\*\.)/g, '**/*.')
pathFilter.include = includeWithGlobalExpression.split(',').map(i => i.trim())
pathFilter.include = setGlobalExpression(state.include)
}
if (state.exclude){
const excludeWithGlobalExpression = state.exclude.replaceAll(/(?<!\/)(\*\.)/g, '**/*.')
pathFilter.exclude = excludeWithGlobalExpression.split(',').map(i => i.trim())
pathFilter.exclude = setGlobalExpression(state.exclude)
}
const filteredFiles = files.filter(filePathFilter(pathFilter)).map(file => {
const r: SearchResult = {

Loading…
Cancel
Save