publish on ipfs

pull/1/head
LianaHus 5 years ago committed by yann300
parent 4fb8453295
commit de2ec3087e
  1. 13113
      package-lock.json
  2. 3
      package.json
  3. 1
      src/app.js
  4. 67
      src/app/tabs/compile-tab.js
  5. 2
      src/app/tabs/styles/compile-tab-styles.js
  6. 100
      src/lib/publishOnIpfs.js

13113
package-lock.json generated

File diff suppressed because it is too large Load Diff

@ -37,6 +37,7 @@
"fast-async": "^7.0.6",
"fast-levenshtein": "^2.0.6",
"gists": "^1.0.1",
"ipfs-mini": "^1.1.5",
"is-electron": "^2.2.0",
"javascript-serialize": "^1.6.1",
"jquery": "^3.3.1",
@ -76,6 +77,8 @@
"dependencies": {
"@remixproject/engine": "^0.1.14",
"http-server": "^0.11.1",
"ipfs": "^0.37.1",
"ipfs-http-client": "^35.1.0",
"remixd": "0.1.8-alpha.7",
"standard": "^8.5.0"
},

@ -298,6 +298,7 @@ Please make a backup of your contracts and start using http://remix.ethereum.org
registry.get('config').api,
new Renderer(),
registry.get('fileproviders/swarm').api,
registry.get('fileproviders/ipfs').api,
registry.get('filemanager').api,
registry.get('fileproviders').api,
)

File diff suppressed because one or more lines are too long

@ -156,7 +156,7 @@ const css = csjs`
padding-right: 5px;
word-break: break-all;
}
.swarmLogo {
.storageLogo {
width: 20px;
height: 20px;
}

@ -0,0 +1,100 @@
'use strict'
const async = require('async')
const IpfsClient = require('ipfs-mini')
const ipfs = new IpfsClient({ host: 'ipfs.infura.io', port: 5001, protocol: 'https' })
module.exports = (contract, fileManager, cb, ipfsVerifiedPublishCallBack) => {
// gather list of files to publish
var sources = []
var metadata
try {
metadata = JSON.parse(contract.metadata)
} catch (e) {
return cb(e)
}
if (metadata === undefined) {
return cb('No metadata')
}
async.eachSeries(Object.keys(metadata.sources), function (fileName, cb) {
// find hash
var hash
try {
hash = metadata.sources[fileName].urls[1].match('dweb:/ipfs/(.+)')[1]
} catch (e) {
return cb('Metadata inconsistency')
}
fileManager.fileProviderOf(fileName).get(fileName, (error, content) => {
if (error) {
console.log(error)
} else {
sources.push({
content: content,
hash: hash,
filename: fileName
})
}
cb()
})
}, function (error) {
if (error) {
cb(error)
} else {
// publish the list of sources in order, fail if any failed
var uploaded = []
async.eachSeries(sources, function (item, cb) {
ipfsVerifiedPublish(item.content, item.hash, (error, result) => {
try {
item.hash = result.url.match('dweb:/ipfs/(.+)')[1]
} catch (e) {
item.hash = '<Metadata inconsistency> - ' + item.fileName
}
if (!error && ipfsVerifiedPublishCallBack) ipfsVerifiedPublishCallBack(item)
item.output = result
uploaded.push(item)
cb(error)
})
}, () => {
const metadataContent = JSON.stringify(metadata)
ipfsVerifiedPublish(metadataContent, '', (error, result) => {
try {
contract.metadataHash = result.url.match('dweb:/ipfs/(.+)')[1]
} catch (e) {
contract.metadataHash = '<Metadata inconsistency> - metadata.json'
}
if (!error && ipfsVerifiedPublishCallBack) {
ipfsVerifiedPublishCallBack({
content: metadataContent,
hash: contract.metadataHash
})
}
uploaded.push({
content: contract.metadata,
hash: contract.metadataHash,
filename: 'metadata.json',
output: result
})
cb(error, uploaded)
})
})
}
})
}
async function ipfsVerifiedPublish (content, expectedHash, cb) {
try {
const results = await ipfs.add(content)
if (results !== expectedHash) {
cb(null, { message: 'hash mismatch between solidity bytecode and uploaded content.', url: 'dweb:/ipfs/' + results, hash: results })
} else {
cb(null, { message: 'ok', url: 'dweb:/ipfs/' + results, hash: results })
}
} catch (error) {
cb(error)
}
}
Loading…
Cancel
Save