Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
private async _handleAsync(
request: IncomingMessage,
response: ServerResponse,
) {
debug(
'Incoming request %s %s',
request.method,
request.url,
request.headers,
);
const cacheKey = this._getCacheKey(request);
try {
const entry = await cacache.get(this._options.cachePath, cacheKey);
if (entry.metadata.createdAt + this._options.ttl > Date.now()) {
debug('Sending cached response for %s', cacheKey);
this._sendCachedEntry(entry.data, entry.metadata, response);
return;
}
debug('Cache entry expired for %s', cacheKey);
// (continue to forward the request)
} catch (error) {
if (error.code !== 'ENOENT') {
console.warn('Cannot load cached entry.', error);
}
debug('Cache miss for %s', cacheKey);
// (continue to forward the request)
}
await this._forwardRequest(request, response);
//
//
// Much slower way to get cache with pacote
//
// const promise = pacote
// .packument(
// pkgName,
// this.getPacoteOpts({
// offline: true,
// "full-metadata": true,
// "fetch-retries": 3
// })
// )
const promise = cacache
.get(this._cacheDir, cacheKey, { memoize: true })
.then(cached => {
foundCache = true;
const packument = JSON.parse(cached.data);
logger.debug("found", pkgName, "packument cache");
return queueMetaFetchRequest(packument);
})
.catch(err => {
if (foundCache) {
// the .then above threw an error - not expectec
throw err;
}
return queueMetaFetchRequest();
})
.then(meta => {
this._metaStat.done++;
.then(() => {
// If `cache` enabled, we try to get compressed source from cache, if cache doesn't found, we run `imagemin`.
if (options.cache) {
return cacache.get(cacheDir, cacheKey).then(
({ data }) => data,
() =>
runImagemin(
result.input,
imageminOptions
).then(optimizedSource =>
cacache
.put(cacheDir, cacheKey, optimizedSource)
.then(() => optimizedSource)
)
);
}
// If `cache` disable, we just run `imagemin`.
return runImagemin(result.input, imageminOptions);
})
[_clone] (handler, tarballOk = true) {
const o = { tmpPrefix: 'git-clone' }
const ref = this.resolvedSha || this.spec.gitCommittish
const h = this.spec.hosted
const resolved = this.resolved
// can be set manually to false to fall back to actual git clone
tarballOk = tarballOk &&
h && resolved === repoUrl(h, { noCommittish: false }) && h.tarball
return cacache.tmp.withTmp(this.cache, o, tmp => {
// if we're resolved, and have a tarball url, shell out to RemoteFetcher
if (tarballOk) {
const nameat = this.spec.name ? `${this.spec.name}@` : ''
return new RemoteFetcher(h.tarball({ noCommittish: false }), {
...this.opts,
pkgid: `git:${nameat}${this.resolved}`,
resolved: this.resolved,
integrity: null, // it'll always be different, if we have one
}).extract(tmp).then(() => handler(tmp), er => {
// fall back to ssh download if tarball fails
if (er.constructor.name.match(/^Http/))
return this[_clone](handler, false)
else
throw er
})
}
}).then((pkg) => {
return cacache.tmp.withTmp(npm.tmp, {tmpPrefix: 'packing'}, (tmp) => {
const tmpTarget = path.join(tmp, path.basename(target))
const tarOpt = {
file: tmpTarget,
cwd: dir,
prefix: 'package/',
portable: true,
// Provide a specific date in the 1980s for the benefit of zip,
// which is confounded by files dated at the Unix epoch 0.
mtime: new Date('1985-10-26T08:15:00.000Z'),
gzip: true
}
return BB.resolve(packlist({ path: dir }))
// NOTE: node-tar does some Magic Stuff depending on prefixes for files
// specifically with @ signs, so we just neutralize that one
function withTmp (opts, cb) {
if (opts.cache) {
// cacache has a special facility for working in a tmp dir
return cacache.tmp.withTmp(opts.cache, { tmpPrefix: 'git-clone' }, cb)
} else {
const tmpDir = path.join(osenv.tmpdir(), 'pacote-git-tmp')
const tmpName = uniqueFilename(tmpDir, 'git-clone')
const tmp = mkdirp(tmpName).then(() => tmpName).disposer(rimraf)
return BB.using(tmp, cb)
}
}
manifest () {
if (this.package)
return Promise.resolve(this.package)
// have to unpack the tarball for this.
return cacache.tmp.withTmp(this.cache, this.opts, dir =>
this.extract(dir)
.then(() => readPackageJson(dir + '/package.json'))
.then(mani => this.package = {
...mani,
_integrity: String(this.integrity),
_resolved: this.resolved,
_from: this.from,
}))
}
})
log.trace(integrity)
if (integrity[hashAlgorithm][0].source !== integrityDigest) {
throw new Error('Integrity check failed')
} else {
log.trace('integrity digest ok')
}
} catch (err) {
// Do not throw yet, only display the error.
log.info(err)
if (os.platform() === 'win32') {
log.info('If you have an aggressive antivirus, try to' +
' reconfigure it, or temporarily disable it.')
}
// Remove from the index.
await cacache.rm.entry(cachePath, cacheKey)
throw new CliError('Download failed.', CliExitCodes.ERROR.INPUT)
}
// Update the cache info after downloading the file.
cacheInfo = await cacache.get.info(cachePath, cacheKey)
if (!cacheInfo) {
throw new CliError('Download failed.', CliExitCodes.ERROR.INPUT)
}
}
// The number of initial folder levels to skip.
let skip = 0
if (json.xpack.binaries.skip) {
try {
skip = parseInt(json.xpack.binaries.skip)
} catch (err) {
}
'delete' (req, opts) {
opts = opts || {}
if (typeof opts.memoize === 'object') {
if (opts.memoize.reset) {
opts.memoize.reset()
} else if (opts.memoize.clear) {
opts.memoize.clear()
} else {
Object.keys(opts.memoize).forEach(k => {
opts.memoize[k] = null
})
}
}
return cacache.rm.entry(
this._path,
cacheKey(req)
// TODO - true/false
).then(() => false)
}
}
async function stat ({ cache, hash, pkg, resolvedPath, isDir }, verify) {
if (isDir || path.basename(resolvedPath) === '.package-map.json') {
return Object.assign(fs.lstatSync.orig(process.tink.cache), {
mode: 16676, // read-only
size: 64
})
}
if (!cache || !hash) {
throw new Error('stat() requires a fully-resolved pkgmap file address')
}
let info
try {
info = await ccGet.hasContent(cache, hash)
} catch (err) {
await fetchPackage(cache, hash, pkg)
info = await ccGet.hasContent(cache, hash)
}
if (!info) {
return false
}
const cpath = ccPath(cache, info.sri)
if (verify) {
try {
await ssri.checkStream(
fs.createReadStream.orig(cpath),
info.sri
)
} catch (err) {
const newResolved = await fetchPackage(cache, pkg, hash)