databatcher

flattentree
bunsenstraat 11 months ago
parent c5cfb5cb27
commit 653ff6aaf6
  1. 14
      apps/remix-ide/src/app/files/electronProvider.ts
  2. 17
      apps/remixdesktop/src/plugins/fsPlugin.ts
  3. 62
      apps/remixdesktop/src/utils/pluginEventDataBatcher.ts

@ -17,6 +17,18 @@ export class ElectronProvider extends FileProvider {
async init() {
this._appManager.on('fs', 'change', async (event, path) => {
this.handleEvent(event, path)
})
this._appManager.on('fs', 'eventGroup', async (data) => {
console.log('eventGroup', data)
for (const event of data) {
console.log('event', event)
this.handleEvent(event.payload[0], event.payload[1])
}
})
}
handleEvent = (event, path) => {
switch (event) {
case 'add':
this.event.emit('fileAdded', path)
@ -38,9 +50,9 @@ export class ElectronProvider extends FileProvider {
case 'unlinkDir':
this.event.emit('fileRemoved', path)
}
})
}
// isDirectory is already included
// this is a more efficient version of the default implementation
async resolveDirectory(path, cb) {

@ -7,6 +7,9 @@ import {createWindow, isPackaged} from '../main'
import {writeConfig} from '../utils/config'
import path from 'path'
import {customAction} from '@remixproject/plugin-api'
import { PluginEventDataBatcher } from '../utils/pluginEventDataBatcher'
const profile: Profile = {
displayName: 'fs',
@ -87,6 +90,7 @@ class FSPluginClient extends ElectronBasePluginClient {
workingDir: string = ''
trackDownStreamUpdate: Record<string, string> = {}
expandedPaths: string[] = ['.']
dataBatcher: PluginEventDataBatcher
constructor(webContentsId: number, profile: Profile) {
super(webContentsId, profile)
@ -99,6 +103,11 @@ class FSPluginClient extends ElectronBasePluginClient {
await this.closeWatch()
})
})
this.dataBatcher = new PluginEventDataBatcher(webContentsId)
this.dataBatcher.on('flush', (data: any) => {
console.log('flush', data)
this.emit('eventGroup', data)
})
}
// best for non recursive
@ -201,7 +210,7 @@ class FSPluginClient extends ElectronBasePluginClient {
this.expandedPaths = ['.', ...paths] // add root
console.log(Object.keys(this.watchers))
paths = paths.map((path) => this.fixPath(path))
for (let path of paths) {
for (const path of paths) {
if (!Object.keys(this.watchers).includes(path)) {
this.watchers[path] = await this.watcherInit(path)
console.log('added watcher', path)
@ -263,9 +272,8 @@ class FSPluginClient extends ElectronBasePluginClient {
const dirname = path.dirname(pathWithoutPrefix)
if (this.expandedPaths.includes(dirname) || this.expandedPaths.includes(pathWithoutPrefix)) {
console.log('emitting', eventName, pathWithoutPrefix, this.expandedPaths)
this.emit('change', eventName, pathWithoutPrefix)
this.dataBatcher.write('change', eventName, pathWithoutPrefix)
}
this.emit('change', eventName, pathWithoutPrefix)
} catch (e) {
console.log('error emitting change', e)
}
@ -276,7 +284,8 @@ class FSPluginClient extends ElectronBasePluginClient {
console.log('check emitting', eventName, pathWithoutPrefix, this.expandedPaths, dirname)
if (this.expandedPaths.includes(dirname) || this.expandedPaths.includes(pathWithoutPrefix)) {
console.log('emitting', eventName, pathWithoutPrefix, this.expandedPaths)
this.emit('change', eventName, pathWithoutPrefix)
//this.emit('change', eventName, pathWithoutPrefix)
this.dataBatcher.write('change', eventName, pathWithoutPrefix)
}
} catch (e) {
console.log('error emitting change', e)

@ -0,0 +1,62 @@
import {EventEmitter} from 'events';
import { StringDecoder } from 'string_decoder';
// Max duration to batch session data before sending it to the renderer process.
const BATCH_DURATION_MS = 16;
// Max size of a session data batch. Note that this value can be exceeded by ~4k
// (chunk sizes seem to be 4k at the most)
const BATCH_MAX_SIZE = 200 * 1024;
// Data coming from the pty is sent to the renderer process for further
// vt parsing and rendering. This class batches data to minimize the number of
// IPC calls. It also reduces GC pressure and CPU cost: each chunk is prefixed
// with the window ID which is then stripped on the renderer process and this
// overhead is reduced with batching.
export class PluginEventDataBatcher extends EventEmitter {
uid: number;
decoder: StringDecoder;
data!: any[]
timeout!: NodeJS.Timeout | null;
constructor(uid: number) {
super();
this.uid = uid;
this.decoder = new StringDecoder('utf8');
this.reset();
}
reset() {
this.data = [];
this.timeout = null;
}
write(key: string, ...payload: any): void {
if (this.data.length >= BATCH_MAX_SIZE) {
// We've reached the max batch size. Flush it and start another one
if (this.timeout) {
clearTimeout(this.timeout);
this.timeout = null;
}
this.flush();
}
this.data.push({
key,
payload,
});
console.log('data', this.data)
if (!this.timeout) {
this.timeout = setTimeout(() => this.flush(), BATCH_DURATION_MS);
}
}
flush() {
// Reset before emitting to allow for potential reentrancy
const data = this.data;
this.reset();
this.emit('flush', data);
}
}
Loading…
Cancel
Save