aboutsummaryrefslogtreecommitdiff
path: root/sandbox/testAppNevena/Front/node_modules/cacache/lib/content
diff options
context:
space:
mode:
Diffstat (limited to 'sandbox/testAppNevena/Front/node_modules/cacache/lib/content')
-rw-r--r--sandbox/testAppNevena/Front/node_modules/cacache/lib/content/path.js29
-rw-r--r--sandbox/testAppNevena/Front/node_modules/cacache/lib/content/read.js244
-rw-r--r--sandbox/testAppNevena/Front/node_modules/cacache/lib/content/rm.js19
-rw-r--r--sandbox/testAppNevena/Front/node_modules/cacache/lib/content/write.js189
4 files changed, 481 insertions, 0 deletions
diff --git a/sandbox/testAppNevena/Front/node_modules/cacache/lib/content/path.js b/sandbox/testAppNevena/Front/node_modules/cacache/lib/content/path.js
new file mode 100644
index 00000000..ad5a76a4
--- /dev/null
+++ b/sandbox/testAppNevena/Front/node_modules/cacache/lib/content/path.js
@@ -0,0 +1,29 @@
+'use strict'
+
+const contentVer = require('../../package.json')['cache-version'].content
+const hashToSegments = require('../util/hash-to-segments')
+const path = require('path')
+const ssri = require('ssri')
+
+// Current format of content file path:
+//
+// sha512-BaSE64Hex= ->
+// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee
+//
+module.exports = contentPath
+
+function contentPath (cache, integrity) {
+ const sri = ssri.parse(integrity, { single: true })
+ // contentPath is the *strongest* algo given
+ return path.join(
+ contentDir(cache),
+ sri.algorithm,
+ ...hashToSegments(sri.hexDigest())
+ )
+}
+
+module.exports.contentDir = contentDir
+
+function contentDir (cache) {
+ return path.join(cache, `content-v${contentVer}`)
+}
diff --git a/sandbox/testAppNevena/Front/node_modules/cacache/lib/content/read.js b/sandbox/testAppNevena/Front/node_modules/cacache/lib/content/read.js
new file mode 100644
index 00000000..034e8eee
--- /dev/null
+++ b/sandbox/testAppNevena/Front/node_modules/cacache/lib/content/read.js
@@ -0,0 +1,244 @@
+'use strict'
+
+const util = require('util')
+
+const fs = require('fs')
+const fsm = require('fs-minipass')
+const ssri = require('ssri')
+const contentPath = require('./path')
+const Pipeline = require('minipass-pipeline')
+
+const lstat = util.promisify(fs.lstat)
+const readFile = util.promisify(fs.readFile)
+
+module.exports = read
+
+const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024
+function read (cache, integrity, opts = {}) {
+ const { size } = opts
+ return withContentSri(cache, integrity, (cpath, sri) => {
+ // get size
+ return lstat(cpath).then(stat => ({ stat, cpath, sri }))
+ }).then(({ stat, cpath, sri }) => {
+ if (typeof size === 'number' && stat.size !== size)
+ throw sizeError(size, stat.size)
+
+ if (stat.size > MAX_SINGLE_READ_SIZE)
+ return readPipeline(cpath, stat.size, sri, new Pipeline()).concat()
+
+ return readFile(cpath, null).then((data) => {
+ if (!ssri.checkData(data, sri))
+ throw integrityError(sri, cpath)
+
+ return data
+ })
+ })
+}
+
+const readPipeline = (cpath, size, sri, stream) => {
+ stream.push(
+ new fsm.ReadStream(cpath, {
+ size,
+ readSize: MAX_SINGLE_READ_SIZE,
+ }),
+ ssri.integrityStream({
+ integrity: sri,
+ size,
+ })
+ )
+ return stream
+}
+
+module.exports.sync = readSync
+
+function readSync (cache, integrity, opts = {}) {
+ const { size } = opts
+ return withContentSriSync(cache, integrity, (cpath, sri) => {
+ const data = fs.readFileSync(cpath)
+ if (typeof size === 'number' && size !== data.length)
+ throw sizeError(size, data.length)
+
+ if (ssri.checkData(data, sri))
+ return data
+
+ throw integrityError(sri, cpath)
+ })
+}
+
+module.exports.stream = readStream
+module.exports.readStream = readStream
+
+function readStream (cache, integrity, opts = {}) {
+ const { size } = opts
+ const stream = new Pipeline()
+ withContentSri(cache, integrity, (cpath, sri) => {
+ // just lstat to ensure it exists
+ return lstat(cpath).then((stat) => ({ stat, cpath, sri }))
+ }).then(({ stat, cpath, sri }) => {
+ if (typeof size === 'number' && size !== stat.size)
+ return stream.emit('error', sizeError(size, stat.size))
+
+ readPipeline(cpath, stat.size, sri, stream)
+ }, er => stream.emit('error', er))
+
+ return stream
+}
+
+let copyFile
+if (fs.copyFile) {
+ module.exports.copy = copy
+ module.exports.copy.sync = copySync
+ copyFile = util.promisify(fs.copyFile)
+}
+
+function copy (cache, integrity, dest) {
+ return withContentSri(cache, integrity, (cpath, sri) => {
+ return copyFile(cpath, dest)
+ })
+}
+
+function copySync (cache, integrity, dest) {
+ return withContentSriSync(cache, integrity, (cpath, sri) => {
+ return fs.copyFileSync(cpath, dest)
+ })
+}
+
+module.exports.hasContent = hasContent
+
+function hasContent (cache, integrity) {
+ if (!integrity)
+ return Promise.resolve(false)
+
+ return withContentSri(cache, integrity, (cpath, sri) => {
+ return lstat(cpath).then((stat) => ({ size: stat.size, sri, stat }))
+ }).catch((err) => {
+ if (err.code === 'ENOENT')
+ return false
+
+ if (err.code === 'EPERM') {
+ /* istanbul ignore else */
+ if (process.platform !== 'win32')
+ throw err
+ else
+ return false
+ }
+ })
+}
+
+module.exports.hasContent.sync = hasContentSync
+
+function hasContentSync (cache, integrity) {
+ if (!integrity)
+ return false
+
+ return withContentSriSync(cache, integrity, (cpath, sri) => {
+ try {
+ const stat = fs.lstatSync(cpath)
+ return { size: stat.size, sri, stat }
+ } catch (err) {
+ if (err.code === 'ENOENT')
+ return false
+
+ if (err.code === 'EPERM') {
+ /* istanbul ignore else */
+ if (process.platform !== 'win32')
+ throw err
+ else
+ return false
+ }
+ }
+ })
+}
+
+function withContentSri (cache, integrity, fn) {
+ const tryFn = () => {
+ const sri = ssri.parse(integrity)
+ // If `integrity` has multiple entries, pick the first digest
+ // with available local data.
+ const algo = sri.pickAlgorithm()
+ const digests = sri[algo]
+
+ if (digests.length <= 1) {
+ const cpath = contentPath(cache, digests[0])
+ return fn(cpath, digests[0])
+ } else {
+ // Can't use race here because a generic error can happen before
+ // a ENOENT error, and can happen before a valid result
+ return Promise
+ .all(digests.map((meta) => {
+ return withContentSri(cache, meta, fn)
+ .catch((err) => {
+ if (err.code === 'ENOENT') {
+ return Object.assign(
+ new Error('No matching content found for ' + sri.toString()),
+ { code: 'ENOENT' }
+ )
+ }
+ return err
+ })
+ }))
+ .then((results) => {
+ // Return the first non error if it is found
+ const result = results.find((r) => !(r instanceof Error))
+ if (result)
+ return result
+
+ // Throw the No matching content found error
+ const enoentError = results.find((r) => r.code === 'ENOENT')
+ if (enoentError)
+ throw enoentError
+
+ // Throw generic error
+ throw results.find((r) => r instanceof Error)
+ })
+ }
+ }
+
+ return new Promise((resolve, reject) => {
+ try {
+ tryFn()
+ .then(resolve)
+ .catch(reject)
+ } catch (err) {
+ reject(err)
+ }
+ })
+}
+
+function withContentSriSync (cache, integrity, fn) {
+ const sri = ssri.parse(integrity)
+ // If `integrity` has multiple entries, pick the first digest
+ // with available local data.
+ const algo = sri.pickAlgorithm()
+ const digests = sri[algo]
+ if (digests.length <= 1) {
+ const cpath = contentPath(cache, digests[0])
+ return fn(cpath, digests[0])
+ } else {
+ let lastErr = null
+ for (const meta of digests) {
+ try {
+ return withContentSriSync(cache, meta, fn)
+ } catch (err) {
+ lastErr = err
+ }
+ }
+ throw lastErr
+ }
+}
+
+function sizeError (expected, found) {
+ const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
+ err.expected = expected
+ err.found = found
+ err.code = 'EBADSIZE'
+ return err
+}
+
+function integrityError (sri, path) {
+ const err = new Error(`Integrity verification failed for ${sri} (${path})`)
+ err.code = 'EINTEGRITY'
+ err.sri = sri
+ err.path = path
+ return err
+}
diff --git a/sandbox/testAppNevena/Front/node_modules/cacache/lib/content/rm.js b/sandbox/testAppNevena/Front/node_modules/cacache/lib/content/rm.js
new file mode 100644
index 00000000..6a3d1a3d
--- /dev/null
+++ b/sandbox/testAppNevena/Front/node_modules/cacache/lib/content/rm.js
@@ -0,0 +1,19 @@
+'use strict'
+
+const util = require('util')
+
+const contentPath = require('./path')
+const { hasContent } = require('./read')
+const rimraf = util.promisify(require('rimraf'))
+
+module.exports = rm
+
+function rm (cache, integrity) {
+ return hasContent(cache, integrity).then((content) => {
+ // ~pretty~ sure we can't end up with a content lacking sri, but be safe
+ if (content && content.sri)
+ return rimraf(contentPath(cache, content.sri)).then(() => true)
+ else
+ return false
+ })
+}
diff --git a/sandbox/testAppNevena/Front/node_modules/cacache/lib/content/write.js b/sandbox/testAppNevena/Front/node_modules/cacache/lib/content/write.js
new file mode 100644
index 00000000..dde1bd1d
--- /dev/null
+++ b/sandbox/testAppNevena/Front/node_modules/cacache/lib/content/write.js
@@ -0,0 +1,189 @@
+'use strict'
+
+const util = require('util')
+
+const contentPath = require('./path')
+const fixOwner = require('../util/fix-owner')
+const fs = require('fs')
+const moveFile = require('../util/move-file')
+const Minipass = require('minipass')
+const Pipeline = require('minipass-pipeline')
+const Flush = require('minipass-flush')
+const path = require('path')
+const rimraf = util.promisify(require('rimraf'))
+const ssri = require('ssri')
+const uniqueFilename = require('unique-filename')
+const { disposer } = require('./../util/disposer')
+const fsm = require('fs-minipass')
+
+const writeFile = util.promisify(fs.writeFile)
+
+module.exports = write
+
+function write (cache, data, opts = {}) {
+ const { algorithms, size, integrity } = opts
+ if (algorithms && algorithms.length > 1)
+ throw new Error('opts.algorithms only supports a single algorithm for now')
+
+ if (typeof size === 'number' && data.length !== size)
+ return Promise.reject(sizeError(size, data.length))
+
+ const sri = ssri.fromData(data, algorithms ? { algorithms } : {})
+ if (integrity && !ssri.checkData(data, integrity, opts))
+ return Promise.reject(checksumError(integrity, sri))
+
+ return disposer(makeTmp(cache, opts), makeTmpDisposer,
+ (tmp) => {
+ return writeFile(tmp.target, data, { flag: 'wx' })
+ .then(() => moveToDestination(tmp, cache, sri, opts))
+ })
+ .then(() => ({ integrity: sri, size: data.length }))
+}
+
+module.exports.stream = writeStream
+
+// writes proxied to the 'inputStream' that is passed to the Promise
+// 'end' is deferred until content is handled.
+class CacacheWriteStream extends Flush {
+ constructor (cache, opts) {
+ super()
+ this.opts = opts
+ this.cache = cache
+ this.inputStream = new Minipass()
+ this.inputStream.on('error', er => this.emit('error', er))
+ this.inputStream.on('drain', () => this.emit('drain'))
+ this.handleContentP = null
+ }
+
+ write (chunk, encoding, cb) {
+ if (!this.handleContentP) {
+ this.handleContentP = handleContent(
+ this.inputStream,
+ this.cache,
+ this.opts
+ )
+ }
+ return this.inputStream.write(chunk, encoding, cb)
+ }
+
+ flush (cb) {
+ this.inputStream.end(() => {
+ if (!this.handleContentP) {
+ const e = new Error('Cache input stream was empty')
+ e.code = 'ENODATA'
+ // empty streams are probably emitting end right away.
+ // defer this one tick by rejecting a promise on it.
+ return Promise.reject(e).catch(cb)
+ }
+ this.handleContentP.then(
+ (res) => {
+ res.integrity && this.emit('integrity', res.integrity)
+ res.size !== null && this.emit('size', res.size)
+ cb()
+ },
+ (er) => cb(er)
+ )
+ })
+ }
+}
+
+function writeStream (cache, opts = {}) {
+ return new CacacheWriteStream(cache, opts)
+}
+
+function handleContent (inputStream, cache, opts) {
+ return disposer(makeTmp(cache, opts), makeTmpDisposer, (tmp) => {
+ return pipeToTmp(inputStream, cache, tmp.target, opts)
+ .then((res) => {
+ return moveToDestination(
+ tmp,
+ cache,
+ res.integrity,
+ opts
+ ).then(() => res)
+ })
+ })
+}
+
+function pipeToTmp (inputStream, cache, tmpTarget, opts) {
+ let integrity
+ let size
+ const hashStream = ssri.integrityStream({
+ integrity: opts.integrity,
+ algorithms: opts.algorithms,
+ size: opts.size,
+ })
+ hashStream.on('integrity', i => {
+ integrity = i
+ })
+ hashStream.on('size', s => {
+ size = s
+ })
+
+ const outStream = new fsm.WriteStream(tmpTarget, {
+ flags: 'wx',
+ })
+
+ // NB: this can throw if the hashStream has a problem with
+ // it, and the data is fully written. but pipeToTmp is only
+ // called in promisory contexts where that is handled.
+ const pipeline = new Pipeline(
+ inputStream,
+ hashStream,
+ outStream
+ )
+
+ return pipeline.promise()
+ .then(() => ({ integrity, size }))
+ .catch(er => rimraf(tmpTarget).then(() => {
+ throw er
+ }))
+}
+
+function makeTmp (cache, opts) {
+ const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
+ return fixOwner.mkdirfix(cache, path.dirname(tmpTarget)).then(() => ({
+ target: tmpTarget,
+ moved: false,
+ }))
+}
+
+function makeTmpDisposer (tmp) {
+ if (tmp.moved)
+ return Promise.resolve()
+
+ return rimraf(tmp.target)
+}
+
+function moveToDestination (tmp, cache, sri, opts) {
+ const destination = contentPath(cache, sri)
+ const destDir = path.dirname(destination)
+
+ return fixOwner
+ .mkdirfix(cache, destDir)
+ .then(() => {
+ return moveFile(tmp.target, destination)
+ })
+ .then(() => {
+ tmp.moved = true
+ return fixOwner.chownr(cache, destination)
+ })
+}
+
+function sizeError (expected, found) {
+ const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
+ err.expected = expected
+ err.found = found
+ err.code = 'EBADSIZE'
+ return err
+}
+
+function checksumError (expected, found) {
+ const err = new Error(`Integrity check failed:
+ Wanted: ${expected}
+ Found: ${found}`)
+ err.code = 'EINTEGRITY'
+ err.expected = expected
+ err.found = found
+ return err
+}