Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
} else if (type === 'ofs-delta') {
offsetToObject[offset] = {
type,
offset
}
} else if (type === 'ref-delta') {
offsetToObject[offset] = {
type,
offset
}
}
})
times['offsets'] = Math.floor(marky.stop('offsets').duration)
log('Computing CRCs')
marky.mark('crcs')
// We need to know the lengths of the slices to compute the CRCs.
const offsetArray = Object.keys(offsetToObject).map(Number)
for (const [i, start] of offsetArray.entries()) {
const end =
i + 1 === offsetArray.length ? pack.byteLength - 20 : offsetArray[i + 1]
const o = offsetToObject[start]
const crc = crc32.buf(pack.slice(start, end)) >>> 0
o.end = end
o.crc = crc
}
times['crcs'] = Math.floor(marky.stop('crcs').duration)
// We don't have the hashes yet. But we can generate them using the .readSlice function!
const p = new GitPackIndex({
pack: Promise.resolve(pack),
packfileSha,
descriptor.value = async function(this: void, ...args: any[]) {
if (ProfilingEnabled) {
mark(propertyKey)
}
let result = await functionToMeasure.apply(this, args)
if (ProfilingEnabled) {
let measurement = stop(propertyKey)
console.log(`method '${measurement.name}' took ${measurement.duration.toFixed(2)} ms`)
}
return result
}
}
lastPercent = percent
const o = offsetToObject[offset]
if (o.oid) continue
try {
p.readDepth = 0
p.externalReadDepth = 0
marky.mark('readSlice')
const { type, object } = await p.readSlice({ start: offset })
const time = marky.stop('readSlice').duration
times.readSlice += time
callsToReadSlice += p.readDepth
callsToGetExternal += p.externalReadDepth
timeByDepth[p.readDepth] += time
objectsByDepth[p.readDepth] += 1
marky.mark('hash')
const oid = await shasum(GitObject.wrap({ type, object }))
times.hash += marky.stop('hash').duration
o.oid = oid
hashes.push(oid)
offsets.set(oid, offset)
crcs[oid] = o.crc
} catch (err) {
log('ERROR', err)
continue
}
}
marky.mark('sort')
hashes.sort()
times['sort'] = Math.floor(marky.stop('sort').duration)
const totalElapsedTime = marky.stop('total').duration
async read (filepath, options = {}) {
try {
marky.mark(filepath)
let buffer = await this._readFile(filepath, options)
readFileLog(`${filepath} ${marky.stop(filepath).duration}`)
return buffer
} catch (err) {
return null
}
}
/**
function doApplyPatch(patchString) {
marky.mark('JSON.parse()');
var patch = JSON.parse(patchString);
marky.stop('JSON.parse()');
marky.mark('patchElement()');
applyPatch(monstersList, patch);
marky.stop('patchElement()');
progress.end();
}
function doApplyPatch(patchString) {
marky.mark('JSON.parse()');
var patch = JSON.parse(patchString);
marky.stop('JSON.parse()');
marky.mark('patchElement()');
applyPatch(monstersList, patch);
marky.stop('patchElement()');
progress.end();
}
function onViewportChange() {
if (!monstersList) {
return;
}
marky.mark('binarySearch');
var children = monstersList.children;
var firstVisibleIndex = binarySearchForFirstVisibleChild(children);
var firstInvisibleIndex = binarySearchForFirstInvisibleChild(firstVisibleIndex, children);
marky.stop('binarySearch');
marky.mark('worker');
worker.postMessage({
type: 'listStateChanged',
start: Math.max(0, firstVisibleIndex - PLACEHOLDER_OFFSET),
end: firstInvisibleIndex + PLACEHOLDER_OFFSET
});
}
async retrieveChunk(transactionHash, index) {
Marky.mark('readFromTangle');
try {
this.chunkBundle[index] = Tanglestash.buildChunkBundleEntry(null, index);
this.chunkBundle[index]["hash"] = transactionHash;
let failedChunkIndex = this.failedChunks.indexOf(index);
if (failedChunkIndex !== -1) {
this.failedChunks.splice(failedChunkIndex, 1);
}
let chunk = await this.retrieveJSONFromTransaction(transactionHash);
Marky.stop('readFromTangle');
this.chunkBundle[index]["content"] = chunk[this.ChunkContentKey];
this.chunkBundle[index]["retrieved"] = true;