/*! * Mygra v0.2.1 * (c) Blujedi LLC * Released under the MIT License. */ 'use strict'; Object.defineProperty(exports, '__esModule', { value: true }); var require$$0$2 = require('fs'); var require$$0 = require('constants'); var require$$0$1 = require('stream'); var require$$4 = require('util'); var require$$5 = require('assert'); var require$$1 = require('path'); var os = require('os'); var symbols = require('log-symbols'); var colors = require('ansi-colors'); var flatCache = require('flat-cache'); var events = require('events'); var glob = require('fast-glob'); function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; } function _interopNamespace(e) { if (e && e.__esModule) return e; var n = Object.create(null); if (e) { Object.keys(e).forEach(function (k) { if (k !== 'default') { var d = Object.getOwnPropertyDescriptor(e, k); Object.defineProperty(n, k, d.get ? d : { enumerable: true, get: function () { return e[k]; } }); } }); } n['default'] = e; return Object.freeze(n); } var require$$0__default$2 = /*#__PURE__*/_interopDefaultLegacy(require$$0$2); var require$$0__default = /*#__PURE__*/_interopDefaultLegacy(require$$0); var require$$0__default$1 = /*#__PURE__*/_interopDefaultLegacy(require$$0$1); var require$$4__default = /*#__PURE__*/_interopDefaultLegacy(require$$4); var require$$5__default = /*#__PURE__*/_interopDefaultLegacy(require$$5); var require$$1__default = /*#__PURE__*/_interopDefaultLegacy(require$$1); var symbols__default = /*#__PURE__*/_interopDefaultLegacy(symbols); var colors__default = /*#__PURE__*/_interopDefaultLegacy(colors); var flatCache__default = /*#__PURE__*/_interopDefaultLegacy(flatCache); var glob__default = /*#__PURE__*/_interopDefaultLegacy(glob); // Events: // // created { name, filename } // filtered { filenames, direction, name, levels } // loaded { filenames, migrations } // migration { type, message, ok, count } // up { name } // down { name } // active [migration_name, direction]; // revert { names } const EVENTS = ['created', 'filtered', 'loaded', 'migration', 'up', 'down', 'active', 'revert']; const _default = ({ name, up, down, description }) => ` const name = '${name}'; const description = '${description || ''}'; async function up(conn, cb) { ${up || ''} } async function down(conn, cb) { ${down || ''} } module.exports = { name, description, up, down }; `; var defaultTemplates = { default: _default }; var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {}; var fs$i = {}; var universalify$1 = {}; universalify$1.fromCallback = function (fn) { return Object.defineProperty(function (...args) { if (typeof args[args.length - 1] === 'function') fn.apply(this, args); else { return new Promise((resolve, reject) => { fn.call( this, ...args, (err, res) => (err != null) ? reject(err) : resolve(res) ); }) } }, 'name', { value: fn.name }) }; universalify$1.fromPromise = function (fn) { return Object.defineProperty(function (...args) { const cb = args[args.length - 1]; if (typeof cb !== 'function') return fn.apply(this, args) else fn.apply(this, args.slice(0, -1)).then(r => cb(null, r), cb); }, 'name', { value: fn.name }) }; var constants = require$$0__default['default']; var origCwd = process.cwd; var cwd = null; var platform = process.env.GRACEFUL_FS_PLATFORM || process.platform; process.cwd = function() { if (!cwd) cwd = origCwd.call(process); return cwd }; try { process.cwd(); } catch (er) {} // This check is needed until node.js 12 is required if (typeof process.chdir === 'function') { var chdir = process.chdir; process.chdir = function (d) { cwd = null; chdir.call(process, d); }; if (Object.setPrototypeOf) Object.setPrototypeOf(process.chdir, chdir); } var polyfills$1 = patch$1; function patch$1 (fs) { // (re-)implement some things that are known busted or missing. // lchmod, broken prior to 0.6.2 // back-port the fix here. if (constants.hasOwnProperty('O_SYMLINK') && process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) { patchLchmod(fs); } // lutimes implementation, or no-op if (!fs.lutimes) { patchLutimes(fs); } // https://github.com/isaacs/node-graceful-fs/issues/4 // Chown should not fail on einval or eperm if non-root. // It should not fail on enosys ever, as this just indicates // that a fs doesn't support the intended operation. fs.chown = chownFix(fs.chown); fs.fchown = chownFix(fs.fchown); fs.lchown = chownFix(fs.lchown); fs.chmod = chmodFix(fs.chmod); fs.fchmod = chmodFix(fs.fchmod); fs.lchmod = chmodFix(fs.lchmod); fs.chownSync = chownFixSync(fs.chownSync); fs.fchownSync = chownFixSync(fs.fchownSync); fs.lchownSync = chownFixSync(fs.lchownSync); fs.chmodSync = chmodFixSync(fs.chmodSync); fs.fchmodSync = chmodFixSync(fs.fchmodSync); fs.lchmodSync = chmodFixSync(fs.lchmodSync); fs.stat = statFix(fs.stat); fs.fstat = statFix(fs.fstat); fs.lstat = statFix(fs.lstat); fs.statSync = statFixSync(fs.statSync); fs.fstatSync = statFixSync(fs.fstatSync); fs.lstatSync = statFixSync(fs.lstatSync); // if lchmod/lchown do not exist, then make them no-ops if (!fs.lchmod) { fs.lchmod = function (path, mode, cb) { if (cb) process.nextTick(cb); }; fs.lchmodSync = function () {}; } if (!fs.lchown) { fs.lchown = function (path, uid, gid, cb) { if (cb) process.nextTick(cb); }; fs.lchownSync = function () {}; } // on Windows, A/V software can lock the directory, causing this // to fail with an EACCES or EPERM if the directory contains newly // created files. Try again on failure, for up to 60 seconds. // Set the timeout this long because some Windows Anti-Virus, such as Parity // bit9, may lock files for up to a minute, causing npm package install // failures. Also, take care to yield the scheduler. Windows scheduling gives // CPU to a busy looping process, which can cause the program causing the lock // contention to be starved of CPU by node, so the contention doesn't resolve. if (platform === "win32") { fs.rename = (function (fs$rename) { return function (from, to, cb) { var start = Date.now(); var backoff = 0; fs$rename(from, to, function CB (er) { if (er && (er.code === "EACCES" || er.code === "EPERM") && Date.now() - start < 60000) { setTimeout(function() { fs.stat(to, function (stater, st) { if (stater && stater.code === "ENOENT") fs$rename(from, to, CB); else cb(er); }); }, backoff); if (backoff < 100) backoff += 10; return; } if (cb) cb(er); }); }})(fs.rename); } // if read() returns EAGAIN, then just try it again. fs.read = (function (fs$read) { function read (fd, buffer, offset, length, position, callback_) { var callback; if (callback_ && typeof callback_ === 'function') { var eagCounter = 0; callback = function (er, _, __) { if (er && er.code === 'EAGAIN' && eagCounter < 10) { eagCounter ++; return fs$read.call(fs, fd, buffer, offset, length, position, callback) } callback_.apply(this, arguments); }; } return fs$read.call(fs, fd, buffer, offset, length, position, callback) } // This ensures `util.promisify` works as it does for native `fs.read`. if (Object.setPrototypeOf) Object.setPrototypeOf(read, fs$read); return read })(fs.read); fs.readSync = (function (fs$readSync) { return function (fd, buffer, offset, length, position) { var eagCounter = 0; while (true) { try { return fs$readSync.call(fs, fd, buffer, offset, length, position) } catch (er) { if (er.code === 'EAGAIN' && eagCounter < 10) { eagCounter ++; continue } throw er } } }})(fs.readSync); function patchLchmod (fs) { fs.lchmod = function (path, mode, callback) { fs.open( path , constants.O_WRONLY | constants.O_SYMLINK , mode , function (err, fd) { if (err) { if (callback) callback(err); return } // prefer to return the chmod error, if one occurs, // but still try to close, and report closing errors if they occur. fs.fchmod(fd, mode, function (err) { fs.close(fd, function(err2) { if (callback) callback(err || err2); }); }); }); }; fs.lchmodSync = function (path, mode) { var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode); // prefer to return the chmod error, if one occurs, // but still try to close, and report closing errors if they occur. var threw = true; var ret; try { ret = fs.fchmodSync(fd, mode); threw = false; } finally { if (threw) { try { fs.closeSync(fd); } catch (er) {} } else { fs.closeSync(fd); } } return ret }; } function patchLutimes (fs) { if (constants.hasOwnProperty("O_SYMLINK")) { fs.lutimes = function (path, at, mt, cb) { fs.open(path, constants.O_SYMLINK, function (er, fd) { if (er) { if (cb) cb(er); return } fs.futimes(fd, at, mt, function (er) { fs.close(fd, function (er2) { if (cb) cb(er || er2); }); }); }); }; fs.lutimesSync = function (path, at, mt) { var fd = fs.openSync(path, constants.O_SYMLINK); var ret; var threw = true; try { ret = fs.futimesSync(fd, at, mt); threw = false; } finally { if (threw) { try { fs.closeSync(fd); } catch (er) {} } else { fs.closeSync(fd); } } return ret }; } else { fs.lutimes = function (_a, _b, _c, cb) { if (cb) process.nextTick(cb); }; fs.lutimesSync = function () {}; } } function chmodFix (orig) { if (!orig) return orig return function (target, mode, cb) { return orig.call(fs, target, mode, function (er) { if (chownErOk(er)) er = null; if (cb) cb.apply(this, arguments); }) } } function chmodFixSync (orig) { if (!orig) return orig return function (target, mode) { try { return orig.call(fs, target, mode) } catch (er) { if (!chownErOk(er)) throw er } } } function chownFix (orig) { if (!orig) return orig return function (target, uid, gid, cb) { return orig.call(fs, target, uid, gid, function (er) { if (chownErOk(er)) er = null; if (cb) cb.apply(this, arguments); }) } } function chownFixSync (orig) { if (!orig) return orig return function (target, uid, gid) { try { return orig.call(fs, target, uid, gid) } catch (er) { if (!chownErOk(er)) throw er } } } function statFix (orig) { if (!orig) return orig // Older versions of Node erroneously returned signed integers for // uid + gid. return function (target, options, cb) { if (typeof options === 'function') { cb = options; options = null; } function callback (er, stats) { if (stats) { if (stats.uid < 0) stats.uid += 0x100000000; if (stats.gid < 0) stats.gid += 0x100000000; } if (cb) cb.apply(this, arguments); } return options ? orig.call(fs, target, options, callback) : orig.call(fs, target, callback) } } function statFixSync (orig) { if (!orig) return orig // Older versions of Node erroneously returned signed integers for // uid + gid. return function (target, options) { var stats = options ? orig.call(fs, target, options) : orig.call(fs, target); if (stats.uid < 0) stats.uid += 0x100000000; if (stats.gid < 0) stats.gid += 0x100000000; return stats; } } // ENOSYS means that the fs doesn't support the op. Just ignore // that, because it doesn't matter. // // if there's no getuid, or if getuid() is something other // than 0, and the error is EINVAL or EPERM, then just ignore // it. // // This specific case is a silent failure in cp, install, tar, // and most other unix tools that manage permissions. // // When running as root, or if other types of errors are // encountered, then it's strict. function chownErOk (er) { if (!er) return true if (er.code === "ENOSYS") return true var nonroot = !process.getuid || process.getuid() !== 0; if (nonroot) { if (er.code === "EINVAL" || er.code === "EPERM") return true } return false } } var Stream = require$$0__default$1['default'].Stream; var legacyStreams = legacy$1; function legacy$1 (fs) { return { ReadStream: ReadStream, WriteStream: WriteStream } function ReadStream (path, options) { if (!(this instanceof ReadStream)) return new ReadStream(path, options); Stream.call(this); var self = this; this.path = path; this.fd = null; this.readable = true; this.paused = false; this.flags = 'r'; this.mode = 438; /*=0666*/ this.bufferSize = 64 * 1024; options = options || {}; // Mixin options into this var keys = Object.keys(options); for (var index = 0, length = keys.length; index < length; index++) { var key = keys[index]; this[key] = options[key]; } if (this.encoding) this.setEncoding(this.encoding); if (this.start !== undefined) { if ('number' !== typeof this.start) { throw TypeError('start must be a Number'); } if (this.end === undefined) { this.end = Infinity; } else if ('number' !== typeof this.end) { throw TypeError('end must be a Number'); } if (this.start > this.end) { throw new Error('start must be <= end'); } this.pos = this.start; } if (this.fd !== null) { process.nextTick(function() { self._read(); }); return; } fs.open(this.path, this.flags, this.mode, function (err, fd) { if (err) { self.emit('error', err); self.readable = false; return; } self.fd = fd; self.emit('open', fd); self._read(); }); } function WriteStream (path, options) { if (!(this instanceof WriteStream)) return new WriteStream(path, options); Stream.call(this); this.path = path; this.fd = null; this.writable = true; this.flags = 'w'; this.encoding = 'binary'; this.mode = 438; /*=0666*/ this.bytesWritten = 0; options = options || {}; // Mixin options into this var keys = Object.keys(options); for (var index = 0, length = keys.length; index < length; index++) { var key = keys[index]; this[key] = options[key]; } if (this.start !== undefined) { if ('number' !== typeof this.start) { throw TypeError('start must be a Number'); } if (this.start < 0) { throw new Error('start must be >= zero'); } this.pos = this.start; } this.busy = false; this._queue = []; if (this.fd === null) { this._open = fs.open; this._queue.push([this._open, this.path, this.flags, this.mode, undefined]); this.flush(); } } } var clone_1 = clone$1; var getPrototypeOf = Object.getPrototypeOf || function (obj) { return obj.__proto__ }; function clone$1 (obj) { if (obj === null || typeof obj !== 'object') return obj if (obj instanceof Object) var copy = { __proto__: getPrototypeOf(obj) }; else var copy = Object.create(null); Object.getOwnPropertyNames(obj).forEach(function (key) { Object.defineProperty(copy, key, Object.getOwnPropertyDescriptor(obj, key)); }); return copy } var fs$h = require$$0__default$2['default']; var polyfills = polyfills$1; var legacy = legacyStreams; var clone = clone_1; var util$1 = require$$4__default['default']; /* istanbul ignore next - node 0.x polyfill */ var gracefulQueue; var previousSymbol; /* istanbul ignore else - node 0.x polyfill */ if (typeof Symbol === 'function' && typeof Symbol.for === 'function') { gracefulQueue = Symbol.for('graceful-fs.queue'); // This is used in testing by future versions previousSymbol = Symbol.for('graceful-fs.previous'); } else { gracefulQueue = '___graceful-fs.queue'; previousSymbol = '___graceful-fs.previous'; } function noop () {} function publishQueue(context, queue) { Object.defineProperty(context, gracefulQueue, { get: function() { return queue } }); } var debug = noop; if (util$1.debuglog) debug = util$1.debuglog('gfs4'); else if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) debug = function() { var m = util$1.format.apply(util$1, arguments); m = 'GFS4: ' + m.split(/\n/).join('\nGFS4: '); console.error(m); }; // Once time initialization if (!fs$h[gracefulQueue]) { // This queue can be shared by multiple loaded instances var queue = commonjsGlobal[gracefulQueue] || []; publishQueue(fs$h, queue); // Patch fs.close/closeSync to shared queue version, because we need // to retry() whenever a close happens *anywhere* in the program. // This is essential when multiple graceful-fs instances are // in play at the same time. fs$h.close = (function (fs$close) { function close (fd, cb) { return fs$close.call(fs$h, fd, function (err) { // This function uses the graceful-fs shared queue if (!err) { resetQueue(); } if (typeof cb === 'function') cb.apply(this, arguments); }) } Object.defineProperty(close, previousSymbol, { value: fs$close }); return close })(fs$h.close); fs$h.closeSync = (function (fs$closeSync) { function closeSync (fd) { // This function uses the graceful-fs shared queue fs$closeSync.apply(fs$h, arguments); resetQueue(); } Object.defineProperty(closeSync, previousSymbol, { value: fs$closeSync }); return closeSync })(fs$h.closeSync); if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) { process.on('exit', function() { debug(fs$h[gracefulQueue]); require$$5__default['default'].equal(fs$h[gracefulQueue].length, 0); }); } } if (!commonjsGlobal[gracefulQueue]) { publishQueue(commonjsGlobal, fs$h[gracefulQueue]); } var gracefulFs = patch(clone(fs$h)); if (process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH && !fs$h.__patched) { gracefulFs = patch(fs$h); fs$h.__patched = true; } function patch (fs) { // Everything that references the open() function needs to be in here polyfills(fs); fs.gracefulify = patch; fs.createReadStream = createReadStream; fs.createWriteStream = createWriteStream; var fs$readFile = fs.readFile; fs.readFile = readFile; function readFile (path, options, cb) { if (typeof options === 'function') cb = options, options = null; return go$readFile(path, options, cb) function go$readFile (path, options, cb, startTime) { return fs$readFile(path, options, function (err) { if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) enqueue([go$readFile, [path, options, cb], err, startTime || Date.now(), Date.now()]); else { if (typeof cb === 'function') cb.apply(this, arguments); } }) } } var fs$writeFile = fs.writeFile; fs.writeFile = writeFile; function writeFile (path, data, options, cb) { if (typeof options === 'function') cb = options, options = null; return go$writeFile(path, data, options, cb) function go$writeFile (path, data, options, cb, startTime) { return fs$writeFile(path, data, options, function (err) { if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) enqueue([go$writeFile, [path, data, options, cb], err, startTime || Date.now(), Date.now()]); else { if (typeof cb === 'function') cb.apply(this, arguments); } }) } } var fs$appendFile = fs.appendFile; if (fs$appendFile) fs.appendFile = appendFile; function appendFile (path, data, options, cb) { if (typeof options === 'function') cb = options, options = null; return go$appendFile(path, data, options, cb) function go$appendFile (path, data, options, cb, startTime) { return fs$appendFile(path, data, options, function (err) { if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) enqueue([go$appendFile, [path, data, options, cb], err, startTime || Date.now(), Date.now()]); else { if (typeof cb === 'function') cb.apply(this, arguments); } }) } } var fs$copyFile = fs.copyFile; if (fs$copyFile) fs.copyFile = copyFile; function copyFile (src, dest, flags, cb) { if (typeof flags === 'function') { cb = flags; flags = 0; } return go$copyFile(src, dest, flags, cb) function go$copyFile (src, dest, flags, cb, startTime) { return fs$copyFile(src, dest, flags, function (err) { if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) enqueue([go$copyFile, [src, dest, flags, cb], err, startTime || Date.now(), Date.now()]); else { if (typeof cb === 'function') cb.apply(this, arguments); } }) } } var fs$readdir = fs.readdir; fs.readdir = readdir; function readdir (path, options, cb) { if (typeof options === 'function') cb = options, options = null; return go$readdir(path, options, cb) function go$readdir (path, options, cb, startTime) { return fs$readdir(path, options, function (err, files) { if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) enqueue([go$readdir, [path, options, cb], err, startTime || Date.now(), Date.now()]); else { if (files && files.sort) files.sort(); if (typeof cb === 'function') cb.call(this, err, files); } }) } } if (process.version.substr(0, 4) === 'v0.8') { var legStreams = legacy(fs); ReadStream = legStreams.ReadStream; WriteStream = legStreams.WriteStream; } var fs$ReadStream = fs.ReadStream; if (fs$ReadStream) { ReadStream.prototype = Object.create(fs$ReadStream.prototype); ReadStream.prototype.open = ReadStream$open; } var fs$WriteStream = fs.WriteStream; if (fs$WriteStream) { WriteStream.prototype = Object.create(fs$WriteStream.prototype); WriteStream.prototype.open = WriteStream$open; } Object.defineProperty(fs, 'ReadStream', { get: function () { return ReadStream }, set: function (val) { ReadStream = val; }, enumerable: true, configurable: true }); Object.defineProperty(fs, 'WriteStream', { get: function () { return WriteStream }, set: function (val) { WriteStream = val; }, enumerable: true, configurable: true }); // legacy names var FileReadStream = ReadStream; Object.defineProperty(fs, 'FileReadStream', { get: function () { return FileReadStream }, set: function (val) { FileReadStream = val; }, enumerable: true, configurable: true }); var FileWriteStream = WriteStream; Object.defineProperty(fs, 'FileWriteStream', { get: function () { return FileWriteStream }, set: function (val) { FileWriteStream = val; }, enumerable: true, configurable: true }); function ReadStream (path, options) { if (this instanceof ReadStream) return fs$ReadStream.apply(this, arguments), this else return ReadStream.apply(Object.create(ReadStream.prototype), arguments) } function ReadStream$open () { var that = this; open(that.path, that.flags, that.mode, function (err, fd) { if (err) { if (that.autoClose) that.destroy(); that.emit('error', err); } else { that.fd = fd; that.emit('open', fd); that.read(); } }); } function WriteStream (path, options) { if (this instanceof WriteStream) return fs$WriteStream.apply(this, arguments), this else return WriteStream.apply(Object.create(WriteStream.prototype), arguments) } function WriteStream$open () { var that = this; open(that.path, that.flags, that.mode, function (err, fd) { if (err) { that.destroy(); that.emit('error', err); } else { that.fd = fd; that.emit('open', fd); } }); } function createReadStream (path, options) { return new fs.ReadStream(path, options) } function createWriteStream (path, options) { return new fs.WriteStream(path, options) } var fs$open = fs.open; fs.open = open; function open (path, flags, mode, cb) { if (typeof mode === 'function') cb = mode, mode = null; return go$open(path, flags, mode, cb) function go$open (path, flags, mode, cb, startTime) { return fs$open(path, flags, mode, function (err, fd) { if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) enqueue([go$open, [path, flags, mode, cb], err, startTime || Date.now(), Date.now()]); else { if (typeof cb === 'function') cb.apply(this, arguments); } }) } } return fs } function enqueue (elem) { debug('ENQUEUE', elem[0].name, elem[1]); fs$h[gracefulQueue].push(elem); retry(); } // keep track of the timeout between retry() calls var retryTimer; // reset the startTime and lastTime to now // this resets the start of the 60 second overall timeout as well as the // delay between attempts so that we'll retry these jobs sooner function resetQueue () { var now = Date.now(); for (var i = 0; i < fs$h[gracefulQueue].length; ++i) { // entries that are only a length of 2 are from an older version, don't // bother modifying those since they'll be retried anyway. if (fs$h[gracefulQueue][i].length > 2) { fs$h[gracefulQueue][i][3] = now; // startTime fs$h[gracefulQueue][i][4] = now; // lastTime } } // call retry to make sure we're actively processing the queue retry(); } function retry () { // clear the timer and remove it to help prevent unintended concurrency clearTimeout(retryTimer); retryTimer = undefined; if (fs$h[gracefulQueue].length === 0) return var elem = fs$h[gracefulQueue].shift(); var fn = elem[0]; var args = elem[1]; // these items may be unset if they were added by an older graceful-fs var err = elem[2]; var startTime = elem[3]; var lastTime = elem[4]; // if we don't have a startTime we have no way of knowing if we've waited // long enough, so go ahead and retry this item now if (startTime === undefined) { debug('RETRY', fn.name, args); fn.apply(null, args); } else if (Date.now() - startTime >= 60000) { // it's been more than 60 seconds total, bail now debug('TIMEOUT', fn.name, args); var cb = args.pop(); if (typeof cb === 'function') cb.call(null, err); } else { // the amount of time between the last attempt and right now var sinceAttempt = Date.now() - lastTime; // the amount of time between when we first tried, and when we last tried // rounded up to at least 1 var sinceStart = Math.max(lastTime - startTime, 1); // backoff. wait longer than the total time we've been retrying, but only // up to a maximum of 100ms var desiredDelay = Math.min(sinceStart * 1.2, 100); // it's been long enough since the last retry, do it again if (sinceAttempt >= desiredDelay) { debug('RETRY', fn.name, args); fn.apply(null, args.concat([startTime])); } else { // if we can't do this job yet, push it to the end of the queue // and let the next iteration check again fs$h[gracefulQueue].push(elem); } } // schedule our next run if one isn't already scheduled if (retryTimer === undefined) { retryTimer = setTimeout(retry, 0); } } (function (exports) { // This is adapted from https://github.com/normalize/mz // Copyright (c) 2014-2016 Jonathan Ong me@jongleberry.com and Contributors const u = universalify$1.fromCallback; const fs = gracefulFs; const api = [ 'access', 'appendFile', 'chmod', 'chown', 'close', 'copyFile', 'fchmod', 'fchown', 'fdatasync', 'fstat', 'fsync', 'ftruncate', 'futimes', 'lchmod', 'lchown', 'link', 'lstat', 'mkdir', 'mkdtemp', 'open', 'opendir', 'readdir', 'readFile', 'readlink', 'realpath', 'rename', 'rm', 'rmdir', 'stat', 'symlink', 'truncate', 'unlink', 'utimes', 'writeFile' ].filter(key => { // Some commands are not available on some systems. Ex: // fs.opendir was added in Node.js v12.12.0 // fs.rm was added in Node.js v14.14.0 // fs.lchown is not available on at least some Linux return typeof fs[key] === 'function' }); // Export cloned fs: Object.assign(exports, fs); // Universalify async methods: api.forEach(method => { exports[method] = u(fs[method]); }); exports.realpath.native = u(fs.realpath.native); // We differ from mz/fs in that we still ship the old, broken, fs.exists() // since we are a drop-in replacement for the native module exports.exists = function (filename, callback) { if (typeof callback === 'function') { return fs.exists(filename, callback) } return new Promise(resolve => { return fs.exists(filename, resolve) }) }; // fs.read(), fs.write(), & fs.writev() need special treatment due to multiple callback args exports.read = function (fd, buffer, offset, length, position, callback) { if (typeof callback === 'function') { return fs.read(fd, buffer, offset, length, position, callback) } return new Promise((resolve, reject) => { fs.read(fd, buffer, offset, length, position, (err, bytesRead, buffer) => { if (err) return reject(err) resolve({ bytesRead, buffer }); }); }) }; // Function signature can be // fs.write(fd, buffer[, offset[, length[, position]]], callback) // OR // fs.write(fd, string[, position[, encoding]], callback) // We need to handle both cases, so we use ...args exports.write = function (fd, buffer, ...args) { if (typeof args[args.length - 1] === 'function') { return fs.write(fd, buffer, ...args) } return new Promise((resolve, reject) => { fs.write(fd, buffer, ...args, (err, bytesWritten, buffer) => { if (err) return reject(err) resolve({ bytesWritten, buffer }); }); }) }; // fs.writev only available in Node v12.9.0+ if (typeof fs.writev === 'function') { // Function signature is // s.writev(fd, buffers[, position], callback) // We need to handle the optional arg, so we use ...args exports.writev = function (fd, buffers, ...args) { if (typeof args[args.length - 1] === 'function') { return fs.writev(fd, buffers, ...args) } return new Promise((resolve, reject) => { fs.writev(fd, buffers, ...args, (err, bytesWritten, buffers) => { if (err) return reject(err) resolve({ bytesWritten, buffers }); }); }) }; } }(fs$i)); var makeDir$1 = {}; var utils$1 = {}; const path$c = require$$1__default['default']; // https://github.com/nodejs/node/issues/8987 // https://github.com/libuv/libuv/pull/1088 utils$1.checkPath = function checkPath (pth) { if (process.platform === 'win32') { const pathHasInvalidWinCharacters = /[<>:"|?*]/.test(pth.replace(path$c.parse(pth).root, '')); if (pathHasInvalidWinCharacters) { const error = new Error(`Path contains invalid characters: ${pth}`); error.code = 'EINVAL'; throw error } } }; const fs$g = fs$i; const { checkPath } = utils$1; const getMode = options => { const defaults = { mode: 0o777 }; if (typeof options === 'number') return options return ({ ...defaults, ...options }).mode }; makeDir$1.makeDir = async (dir, options) => { checkPath(dir); return fs$g.mkdir(dir, { mode: getMode(options), recursive: true }) }; makeDir$1.makeDirSync = (dir, options) => { checkPath(dir); return fs$g.mkdirSync(dir, { mode: getMode(options), recursive: true }) }; const u$a = universalify$1.fromPromise; const { makeDir: _makeDir, makeDirSync } = makeDir$1; const makeDir = u$a(_makeDir); var mkdirs$2 = { mkdirs: makeDir, mkdirsSync: makeDirSync, // alias mkdirp: makeDir, mkdirpSync: makeDirSync, ensureDir: makeDir, ensureDirSync: makeDirSync }; const fs$f = gracefulFs; function utimesMillis$1 (path, atime, mtime, callback) { // if (!HAS_MILLIS_RES) return fs.utimes(path, atime, mtime, callback) fs$f.open(path, 'r+', (err, fd) => { if (err) return callback(err) fs$f.futimes(fd, atime, mtime, futimesErr => { fs$f.close(fd, closeErr => { if (callback) callback(futimesErr || closeErr); }); }); }); } function utimesMillisSync$1 (path, atime, mtime) { const fd = fs$f.openSync(path, 'r+'); fs$f.futimesSync(fd, atime, mtime); return fs$f.closeSync(fd) } var utimes = { utimesMillis: utimesMillis$1, utimesMillisSync: utimesMillisSync$1 }; const fs$e = fs$i; const path$b = require$$1__default['default']; const util = require$$4__default['default']; function getStats$2 (src, dest, opts) { const statFunc = opts.dereference ? (file) => fs$e.stat(file, { bigint: true }) : (file) => fs$e.lstat(file, { bigint: true }); return Promise.all([ statFunc(src), statFunc(dest).catch(err => { if (err.code === 'ENOENT') return null throw err }) ]).then(([srcStat, destStat]) => ({ srcStat, destStat })) } function getStatsSync (src, dest, opts) { let destStat; const statFunc = opts.dereference ? (file) => fs$e.statSync(file, { bigint: true }) : (file) => fs$e.lstatSync(file, { bigint: true }); const srcStat = statFunc(src); try { destStat = statFunc(dest); } catch (err) { if (err.code === 'ENOENT') return { srcStat, destStat: null } throw err } return { srcStat, destStat } } function checkPaths (src, dest, funcName, opts, cb) { util.callbackify(getStats$2)(src, dest, opts, (err, stats) => { if (err) return cb(err) const { srcStat, destStat } = stats; if (destStat) { if (areIdentical$2(srcStat, destStat)) { const srcBaseName = path$b.basename(src); const destBaseName = path$b.basename(dest); if (funcName === 'move' && srcBaseName !== destBaseName && srcBaseName.toLowerCase() === destBaseName.toLowerCase()) { return cb(null, { srcStat, destStat, isChangingCase: true }) } return cb(new Error('Source and destination must not be the same.')) } if (srcStat.isDirectory() && !destStat.isDirectory()) { return cb(new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`)) } if (!srcStat.isDirectory() && destStat.isDirectory()) { return cb(new Error(`Cannot overwrite directory '${dest}' with non-directory '${src}'.`)) } } if (srcStat.isDirectory() && isSrcSubdir(src, dest)) { return cb(new Error(errMsg(src, dest, funcName))) } return cb(null, { srcStat, destStat }) }); } function checkPathsSync (src, dest, funcName, opts) { const { srcStat, destStat } = getStatsSync(src, dest, opts); if (destStat) { if (areIdentical$2(srcStat, destStat)) { const srcBaseName = path$b.basename(src); const destBaseName = path$b.basename(dest); if (funcName === 'move' && srcBaseName !== destBaseName && srcBaseName.toLowerCase() === destBaseName.toLowerCase()) { return { srcStat, destStat, isChangingCase: true } } throw new Error('Source and destination must not be the same.') } if (srcStat.isDirectory() && !destStat.isDirectory()) { throw new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`) } if (!srcStat.isDirectory() && destStat.isDirectory()) { throw new Error(`Cannot overwrite directory '${dest}' with non-directory '${src}'.`) } } if (srcStat.isDirectory() && isSrcSubdir(src, dest)) { throw new Error(errMsg(src, dest, funcName)) } return { srcStat, destStat } } // recursively check if dest parent is a subdirectory of src. // It works for all file types including symlinks since it // checks the src and dest inodes. It starts from the deepest // parent and stops once it reaches the src parent or the root path. function checkParentPaths (src, srcStat, dest, funcName, cb) { const srcParent = path$b.resolve(path$b.dirname(src)); const destParent = path$b.resolve(path$b.dirname(dest)); if (destParent === srcParent || destParent === path$b.parse(destParent).root) return cb() fs$e.stat(destParent, { bigint: true }, (err, destStat) => { if (err) { if (err.code === 'ENOENT') return cb() return cb(err) } if (areIdentical$2(srcStat, destStat)) { return cb(new Error(errMsg(src, dest, funcName))) } return checkParentPaths(src, srcStat, destParent, funcName, cb) }); } function checkParentPathsSync (src, srcStat, dest, funcName) { const srcParent = path$b.resolve(path$b.dirname(src)); const destParent = path$b.resolve(path$b.dirname(dest)); if (destParent === srcParent || destParent === path$b.parse(destParent).root) return let destStat; try { destStat = fs$e.statSync(destParent, { bigint: true }); } catch (err) { if (err.code === 'ENOENT') return throw err } if (areIdentical$2(srcStat, destStat)) { throw new Error(errMsg(src, dest, funcName)) } return checkParentPathsSync(src, srcStat, destParent, funcName) } function areIdentical$2 (srcStat, destStat) { return destStat.ino && destStat.dev && destStat.ino === srcStat.ino && destStat.dev === srcStat.dev } // return true if dest is a subdir of src, otherwise false. // It only checks the path strings. function isSrcSubdir (src, dest) { const srcArr = path$b.resolve(src).split(path$b.sep).filter(i => i); const destArr = path$b.resolve(dest).split(path$b.sep).filter(i => i); return srcArr.reduce((acc, cur, i) => acc && destArr[i] === cur, true) } function errMsg (src, dest, funcName) { return `Cannot ${funcName} '${src}' to a subdirectory of itself, '${dest}'.` } var stat$4 = { checkPaths, checkPathsSync, checkParentPaths, checkParentPathsSync, isSrcSubdir, areIdentical: areIdentical$2 }; const fs$d = gracefulFs; const path$a = require$$1__default['default']; const mkdirsSync$1 = mkdirs$2.mkdirsSync; const utimesMillisSync = utimes.utimesMillisSync; const stat$3 = stat$4; function copySync$2 (src, dest, opts) { if (typeof opts === 'function') { opts = { filter: opts }; } opts = opts || {}; opts.clobber = 'clobber' in opts ? !!opts.clobber : true; // default to true for now opts.overwrite = 'overwrite' in opts ? !!opts.overwrite : opts.clobber; // overwrite falls back to clobber // Warn about using preserveTimestamps on 32-bit node if (opts.preserveTimestamps && process.arch === 'ia32') { console.warn(`fs-extra: Using the preserveTimestamps option in 32-bit node is not recommended;\n see https://github.com/jprichardson/node-fs-extra/issues/269`); } const { srcStat, destStat } = stat$3.checkPathsSync(src, dest, 'copy', opts); stat$3.checkParentPathsSync(src, srcStat, dest, 'copy'); return handleFilterAndCopy(destStat, src, dest, opts) } function handleFilterAndCopy (destStat, src, dest, opts) { if (opts.filter && !opts.filter(src, dest)) return const destParent = path$a.dirname(dest); if (!fs$d.existsSync(destParent)) mkdirsSync$1(destParent); return getStats$1(destStat, src, dest, opts) } function startCopy$1 (destStat, src, dest, opts) { if (opts.filter && !opts.filter(src, dest)) return return getStats$1(destStat, src, dest, opts) } function getStats$1 (destStat, src, dest, opts) { const statSync = opts.dereference ? fs$d.statSync : fs$d.lstatSync; const srcStat = statSync(src); if (srcStat.isDirectory()) return onDir$1(srcStat, destStat, src, dest, opts) else if (srcStat.isFile() || srcStat.isCharacterDevice() || srcStat.isBlockDevice()) return onFile$1(srcStat, destStat, src, dest, opts) else if (srcStat.isSymbolicLink()) return onLink$1(destStat, src, dest, opts) else if (srcStat.isSocket()) throw new Error(`Cannot copy a socket file: ${src}`) else if (srcStat.isFIFO()) throw new Error(`Cannot copy a FIFO pipe: ${src}`) throw new Error(`Unknown file: ${src}`) } function onFile$1 (srcStat, destStat, src, dest, opts) { if (!destStat) return copyFile$1(srcStat, src, dest, opts) return mayCopyFile$1(srcStat, src, dest, opts) } function mayCopyFile$1 (srcStat, src, dest, opts) { if (opts.overwrite) { fs$d.unlinkSync(dest); return copyFile$1(srcStat, src, dest, opts) } else if (opts.errorOnExist) { throw new Error(`'${dest}' already exists`) } } function copyFile$1 (srcStat, src, dest, opts) { fs$d.copyFileSync(src, dest); if (opts.preserveTimestamps) handleTimestamps(srcStat.mode, src, dest); return setDestMode$1(dest, srcStat.mode) } function handleTimestamps (srcMode, src, dest) { // Make sure the file is writable before setting the timestamp // otherwise open fails with EPERM when invoked with 'r+' // (through utimes call) if (fileIsNotWritable$1(srcMode)) makeFileWritable$1(dest, srcMode); return setDestTimestamps$1(src, dest) } function fileIsNotWritable$1 (srcMode) { return (srcMode & 0o200) === 0 } function makeFileWritable$1 (dest, srcMode) { return setDestMode$1(dest, srcMode | 0o200) } function setDestMode$1 (dest, srcMode) { return fs$d.chmodSync(dest, srcMode) } function setDestTimestamps$1 (src, dest) { // The initial srcStat.atime cannot be trusted // because it is modified by the read(2) system call // (See https://nodejs.org/api/fs.html#fs_stat_time_values) const updatedSrcStat = fs$d.statSync(src); return utimesMillisSync(dest, updatedSrcStat.atime, updatedSrcStat.mtime) } function onDir$1 (srcStat, destStat, src, dest, opts) { if (!destStat) return mkDirAndCopy$1(srcStat.mode, src, dest, opts) return copyDir$1(src, dest, opts) } function mkDirAndCopy$1 (srcMode, src, dest, opts) { fs$d.mkdirSync(dest); copyDir$1(src, dest, opts); return setDestMode$1(dest, srcMode) } function copyDir$1 (src, dest, opts) { fs$d.readdirSync(src).forEach(item => copyDirItem$1(item, src, dest, opts)); } function copyDirItem$1 (item, src, dest, opts) { const srcItem = path$a.join(src, item); const destItem = path$a.join(dest, item); const { destStat } = stat$3.checkPathsSync(srcItem, destItem, 'copy', opts); return startCopy$1(destStat, srcItem, destItem, opts) } function onLink$1 (destStat, src, dest, opts) { let resolvedSrc = fs$d.readlinkSync(src); if (opts.dereference) { resolvedSrc = path$a.resolve(process.cwd(), resolvedSrc); } if (!destStat) { return fs$d.symlinkSync(resolvedSrc, dest) } else { let resolvedDest; try { resolvedDest = fs$d.readlinkSync(dest); } catch (err) { // dest exists and is a regular file or directory, // Windows may throw UNKNOWN error. If dest already exists, // fs throws error anyway, so no need to guard against it here. if (err.code === 'EINVAL' || err.code === 'UNKNOWN') return fs$d.symlinkSync(resolvedSrc, dest) throw err } if (opts.dereference) { resolvedDest = path$a.resolve(process.cwd(), resolvedDest); } if (stat$3.isSrcSubdir(resolvedSrc, resolvedDest)) { throw new Error(`Cannot copy '${resolvedSrc}' to a subdirectory of itself, '${resolvedDest}'.`) } // prevent copy if src is a subdir of dest since unlinking // dest in this case would result in removing src contents // and therefore a broken symlink would be created. if (fs$d.statSync(dest).isDirectory() && stat$3.isSrcSubdir(resolvedDest, resolvedSrc)) { throw new Error(`Cannot overwrite '${resolvedDest}' with '${resolvedSrc}'.`) } return copyLink$1(resolvedSrc, dest) } } function copyLink$1 (resolvedSrc, dest) { fs$d.unlinkSync(dest); return fs$d.symlinkSync(resolvedSrc, dest) } var copySync_1 = copySync$2; var copySync$1 = { copySync: copySync_1 }; const u$9 = universalify$1.fromPromise; const fs$c = fs$i; function pathExists$6 (path) { return fs$c.access(path).then(() => true).catch(() => false) } var pathExists_1 = { pathExists: u$9(pathExists$6), pathExistsSync: fs$c.existsSync }; const fs$b = gracefulFs; const path$9 = require$$1__default['default']; const mkdirs$1 = mkdirs$2.mkdirs; const pathExists$5 = pathExists_1.pathExists; const utimesMillis = utimes.utimesMillis; const stat$2 = stat$4; function copy$2 (src, dest, opts, cb) { if (typeof opts === 'function' && !cb) { cb = opts; opts = {}; } else if (typeof opts === 'function') { opts = { filter: opts }; } cb = cb || function () {}; opts = opts || {}; opts.clobber = 'clobber' in opts ? !!opts.clobber : true; // default to true for now opts.overwrite = 'overwrite' in opts ? !!opts.overwrite : opts.clobber; // overwrite falls back to clobber // Warn about using preserveTimestamps on 32-bit node if (opts.preserveTimestamps && process.arch === 'ia32') { console.warn(`fs-extra: Using the preserveTimestamps option in 32-bit node is not recommended;\n see https://github.com/jprichardson/node-fs-extra/issues/269`); } stat$2.checkPaths(src, dest, 'copy', opts, (err, stats) => { if (err) return cb(err) const { srcStat, destStat } = stats; stat$2.checkParentPaths(src, srcStat, dest, 'copy', err => { if (err) return cb(err) if (opts.filter) return handleFilter(checkParentDir, destStat, src, dest, opts, cb) return checkParentDir(destStat, src, dest, opts, cb) }); }); } function checkParentDir (destStat, src, dest, opts, cb) { const destParent = path$9.dirname(dest); pathExists$5(destParent, (err, dirExists) => { if (err) return cb(err) if (dirExists) return getStats(destStat, src, dest, opts, cb) mkdirs$1(destParent, err => { if (err) return cb(err) return getStats(destStat, src, dest, opts, cb) }); }); } function handleFilter (onInclude, destStat, src, dest, opts, cb) { Promise.resolve(opts.filter(src, dest)).then(include => { if (include) return onInclude(destStat, src, dest, opts, cb) return cb() }, error => cb(error)); } function startCopy (destStat, src, dest, opts, cb) { if (opts.filter) return handleFilter(getStats, destStat, src, dest, opts, cb) return getStats(destStat, src, dest, opts, cb) } function getStats (destStat, src, dest, opts, cb) { const stat = opts.dereference ? fs$b.stat : fs$b.lstat; stat(src, (err, srcStat) => { if (err) return cb(err) if (srcStat.isDirectory()) return onDir(srcStat, destStat, src, dest, opts, cb) else if (srcStat.isFile() || srcStat.isCharacterDevice() || srcStat.isBlockDevice()) return onFile(srcStat, destStat, src, dest, opts, cb) else if (srcStat.isSymbolicLink()) return onLink(destStat, src, dest, opts, cb) else if (srcStat.isSocket()) return cb(new Error(`Cannot copy a socket file: ${src}`)) else if (srcStat.isFIFO()) return cb(new Error(`Cannot copy a FIFO pipe: ${src}`)) return cb(new Error(`Unknown file: ${src}`)) }); } function onFile (srcStat, destStat, src, dest, opts, cb) { if (!destStat) return copyFile(srcStat, src, dest, opts, cb) return mayCopyFile(srcStat, src, dest, opts, cb) } function mayCopyFile (srcStat, src, dest, opts, cb) { if (opts.overwrite) { fs$b.unlink(dest, err => { if (err) return cb(err) return copyFile(srcStat, src, dest, opts, cb) }); } else if (opts.errorOnExist) { return cb(new Error(`'${dest}' already exists`)) } else return cb() } function copyFile (srcStat, src, dest, opts, cb) { fs$b.copyFile(src, dest, err => { if (err) return cb(err) if (opts.preserveTimestamps) return handleTimestampsAndMode(srcStat.mode, src, dest, cb) return setDestMode(dest, srcStat.mode, cb) }); } function handleTimestampsAndMode (srcMode, src, dest, cb) { // Make sure the file is writable before setting the timestamp // otherwise open fails with EPERM when invoked with 'r+' // (through utimes call) if (fileIsNotWritable(srcMode)) { return makeFileWritable(dest, srcMode, err => { if (err) return cb(err) return setDestTimestampsAndMode(srcMode, src, dest, cb) }) } return setDestTimestampsAndMode(srcMode, src, dest, cb) } function fileIsNotWritable (srcMode) { return (srcMode & 0o200) === 0 } function makeFileWritable (dest, srcMode, cb) { return setDestMode(dest, srcMode | 0o200, cb) } function setDestTimestampsAndMode (srcMode, src, dest, cb) { setDestTimestamps(src, dest, err => { if (err) return cb(err) return setDestMode(dest, srcMode, cb) }); } function setDestMode (dest, srcMode, cb) { return fs$b.chmod(dest, srcMode, cb) } function setDestTimestamps (src, dest, cb) { // The initial srcStat.atime cannot be trusted // because it is modified by the read(2) system call // (See https://nodejs.org/api/fs.html#fs_stat_time_values) fs$b.stat(src, (err, updatedSrcStat) => { if (err) return cb(err) return utimesMillis(dest, updatedSrcStat.atime, updatedSrcStat.mtime, cb) }); } function onDir (srcStat, destStat, src, dest, opts, cb) { if (!destStat) return mkDirAndCopy(srcStat.mode, src, dest, opts, cb) return copyDir(src, dest, opts, cb) } function mkDirAndCopy (srcMode, src, dest, opts, cb) { fs$b.mkdir(dest, err => { if (err) return cb(err) copyDir(src, dest, opts, err => { if (err) return cb(err) return setDestMode(dest, srcMode, cb) }); }); } function copyDir (src, dest, opts, cb) { fs$b.readdir(src, (err, items) => { if (err) return cb(err) return copyDirItems(items, src, dest, opts, cb) }); } function copyDirItems (items, src, dest, opts, cb) { const item = items.pop(); if (!item) return cb() return copyDirItem(items, item, src, dest, opts, cb) } function copyDirItem (items, item, src, dest, opts, cb) { const srcItem = path$9.join(src, item); const destItem = path$9.join(dest, item); stat$2.checkPaths(srcItem, destItem, 'copy', opts, (err, stats) => { if (err) return cb(err) const { destStat } = stats; startCopy(destStat, srcItem, destItem, opts, err => { if (err) return cb(err) return copyDirItems(items, src, dest, opts, cb) }); }); } function onLink (destStat, src, dest, opts, cb) { fs$b.readlink(src, (err, resolvedSrc) => { if (err) return cb(err) if (opts.dereference) { resolvedSrc = path$9.resolve(process.cwd(), resolvedSrc); } if (!destStat) { return fs$b.symlink(resolvedSrc, dest, cb) } else { fs$b.readlink(dest, (err, resolvedDest) => { if (err) { // dest exists and is a regular file or directory, // Windows may throw UNKNOWN error. If dest already exists, // fs throws error anyway, so no need to guard against it here. if (err.code === 'EINVAL' || err.code === 'UNKNOWN') return fs$b.symlink(resolvedSrc, dest, cb) return cb(err) } if (opts.dereference) { resolvedDest = path$9.resolve(process.cwd(), resolvedDest); } if (stat$2.isSrcSubdir(resolvedSrc, resolvedDest)) { return cb(new Error(`Cannot copy '${resolvedSrc}' to a subdirectory of itself, '${resolvedDest}'.`)) } // do not copy if src is a subdir of dest since unlinking // dest in this case would result in removing src contents // and therefore a broken symlink would be created. if (destStat.isDirectory() && stat$2.isSrcSubdir(resolvedDest, resolvedSrc)) { return cb(new Error(`Cannot overwrite '${resolvedDest}' with '${resolvedSrc}'.`)) } return copyLink(resolvedSrc, dest, cb) }); } }); } function copyLink (resolvedSrc, dest, cb) { fs$b.unlink(dest, err => { if (err) return cb(err) return fs$b.symlink(resolvedSrc, dest, cb) }); } var copy_1 = copy$2; const u$8 = universalify$1.fromCallback; var copy$1 = { copy: u$8(copy_1) }; const fs$a = gracefulFs; const path$8 = require$$1__default['default']; const assert = require$$5__default['default']; const isWindows = (process.platform === 'win32'); function defaults (options) { const methods = [ 'unlink', 'chmod', 'stat', 'lstat', 'rmdir', 'readdir' ]; methods.forEach(m => { options[m] = options[m] || fs$a[m]; m = m + 'Sync'; options[m] = options[m] || fs$a[m]; }); options.maxBusyTries = options.maxBusyTries || 3; } function rimraf$1 (p, options, cb) { let busyTries = 0; if (typeof options === 'function') { cb = options; options = {}; } assert(p, 'rimraf: missing path'); assert.strictEqual(typeof p, 'string', 'rimraf: path should be a string'); assert.strictEqual(typeof cb, 'function', 'rimraf: callback function required'); assert(options, 'rimraf: invalid options argument provided'); assert.strictEqual(typeof options, 'object', 'rimraf: options should be object'); defaults(options); rimraf_(p, options, function CB (er) { if (er) { if ((er.code === 'EBUSY' || er.code === 'ENOTEMPTY' || er.code === 'EPERM') && busyTries < options.maxBusyTries) { busyTries++; const time = busyTries * 100; // try again, with the same exact callback as this one. return setTimeout(() => rimraf_(p, options, CB), time) } // already gone if (er.code === 'ENOENT') er = null; } cb(er); }); } // Two possible strategies. // 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR // 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR // // Both result in an extra syscall when you guess wrong. However, there // are likely far more normal files in the world than directories. This // is based on the assumption that a the average number of files per // directory is >= 1. // // If anyone ever complains about this, then I guess the strategy could // be made configurable somehow. But until then, YAGNI. function rimraf_ (p, options, cb) { assert(p); assert(options); assert(typeof cb === 'function'); // sunos lets the root user unlink directories, which is... weird. // so we have to lstat here and make sure it's not a dir. options.lstat(p, (er, st) => { if (er && er.code === 'ENOENT') { return cb(null) } // Windows can EPERM on stat. Life is suffering. if (er && er.code === 'EPERM' && isWindows) { return fixWinEPERM(p, options, er, cb) } if (st && st.isDirectory()) { return rmdir(p, options, er, cb) } options.unlink(p, er => { if (er) { if (er.code === 'ENOENT') { return cb(null) } if (er.code === 'EPERM') { return (isWindows) ? fixWinEPERM(p, options, er, cb) : rmdir(p, options, er, cb) } if (er.code === 'EISDIR') { return rmdir(p, options, er, cb) } } return cb(er) }); }); } function fixWinEPERM (p, options, er, cb) { assert(p); assert(options); assert(typeof cb === 'function'); options.chmod(p, 0o666, er2 => { if (er2) { cb(er2.code === 'ENOENT' ? null : er); } else { options.stat(p, (er3, stats) => { if (er3) { cb(er3.code === 'ENOENT' ? null : er); } else if (stats.isDirectory()) { rmdir(p, options, er, cb); } else { options.unlink(p, cb); } }); } }); } function fixWinEPERMSync (p, options, er) { let stats; assert(p); assert(options); try { options.chmodSync(p, 0o666); } catch (er2) { if (er2.code === 'ENOENT') { return } else { throw er } } try { stats = options.statSync(p); } catch (er3) { if (er3.code === 'ENOENT') { return } else { throw er } } if (stats.isDirectory()) { rmdirSync(p, options, er); } else { options.unlinkSync(p); } } function rmdir (p, options, originalEr, cb) { assert(p); assert(options); assert(typeof cb === 'function'); // try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS) // if we guessed wrong, and it's not a directory, then // raise the original error. options.rmdir(p, er => { if (er && (er.code === 'ENOTEMPTY' || er.code === 'EEXIST' || er.code === 'EPERM')) { rmkids(p, options, cb); } else if (er && er.code === 'ENOTDIR') { cb(originalEr); } else { cb(er); } }); } function rmkids (p, options, cb) { assert(p); assert(options); assert(typeof cb === 'function'); options.readdir(p, (er, files) => { if (er) return cb(er) let n = files.length; let errState; if (n === 0) return options.rmdir(p, cb) files.forEach(f => { rimraf$1(path$8.join(p, f), options, er => { if (errState) { return } if (er) return cb(errState = er) if (--n === 0) { options.rmdir(p, cb); } }); }); }); } // this looks simpler, and is strictly *faster*, but will // tie up the JavaScript thread and fail on excessively // deep directory trees. function rimrafSync (p, options) { let st; options = options || {}; defaults(options); assert(p, 'rimraf: missing path'); assert.strictEqual(typeof p, 'string', 'rimraf: path should be a string'); assert(options, 'rimraf: missing options'); assert.strictEqual(typeof options, 'object', 'rimraf: options should be object'); try { st = options.lstatSync(p); } catch (er) { if (er.code === 'ENOENT') { return } // Windows can EPERM on stat. Life is suffering. if (er.code === 'EPERM' && isWindows) { fixWinEPERMSync(p, options, er); } } try { // sunos lets the root user unlink directories, which is... weird. if (st && st.isDirectory()) { rmdirSync(p, options, null); } else { options.unlinkSync(p); } } catch (er) { if (er.code === 'ENOENT') { return } else if (er.code === 'EPERM') { return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er) } else if (er.code !== 'EISDIR') { throw er } rmdirSync(p, options, er); } } function rmdirSync (p, options, originalEr) { assert(p); assert(options); try { options.rmdirSync(p); } catch (er) { if (er.code === 'ENOTDIR') { throw originalEr } else if (er.code === 'ENOTEMPTY' || er.code === 'EEXIST' || er.code === 'EPERM') { rmkidsSync(p, options); } else if (er.code !== 'ENOENT') { throw er } } } function rmkidsSync (p, options) { assert(p); assert(options); options.readdirSync(p).forEach(f => rimrafSync(path$8.join(p, f), options)); if (isWindows) { // We only end up here once we got ENOTEMPTY at least once, and // at this point, we are guaranteed to have removed all the kids. // So, we know that it won't be ENOENT or ENOTDIR or anything else. // try really hard to delete stuff on windows, because it has a // PROFOUNDLY annoying habit of not closing handles promptly when // files are deleted, resulting in spurious ENOTEMPTY errors. const startTime = Date.now(); do { try { const ret = options.rmdirSync(p, options); return ret } catch {} } while (Date.now() - startTime < 500) // give up after 500ms } else { const ret = options.rmdirSync(p, options); return ret } } var rimraf_1 = rimraf$1; rimraf$1.sync = rimrafSync; const fs$9 = gracefulFs; const u$7 = universalify$1.fromCallback; const rimraf = rimraf_1; function remove$2 (path, callback) { // Node 14.14.0+ if (fs$9.rm) return fs$9.rm(path, { recursive: true, force: true }, callback) rimraf(path, callback); } function removeSync$1 (path) { // Node 14.14.0+ if (fs$9.rmSync) return fs$9.rmSync(path, { recursive: true, force: true }) rimraf.sync(path); } var remove_1 = { remove: u$7(remove$2), removeSync: removeSync$1 }; const u$6 = universalify$1.fromPromise; const fs$8 = fs$i; const path$7 = require$$1__default['default']; const mkdir$3 = mkdirs$2; const remove$1 = remove_1; const emptyDir = u$6(async function emptyDir (dir) { let items; try { items = await fs$8.readdir(dir); } catch { return mkdir$3.mkdirs(dir) } return Promise.all(items.map(item => remove$1.remove(path$7.join(dir, item)))) }); function emptyDirSync (dir) { let items; try { items = fs$8.readdirSync(dir); } catch { return mkdir$3.mkdirsSync(dir) } items.forEach(item => { item = path$7.join(dir, item); remove$1.removeSync(item); }); } var empty = { emptyDirSync, emptydirSync: emptyDirSync, emptyDir, emptydir: emptyDir }; const u$5 = universalify$1.fromCallback; const path$6 = require$$1__default['default']; const fs$7 = gracefulFs; const mkdir$2 = mkdirs$2; function createFile (file, callback) { function makeFile () { fs$7.writeFile(file, '', err => { if (err) return callback(err) callback(); }); } fs$7.stat(file, (err, stats) => { // eslint-disable-line handle-callback-err if (!err && stats.isFile()) return callback() const dir = path$6.dirname(file); fs$7.stat(dir, (err, stats) => { if (err) { // if the directory doesn't exist, make it if (err.code === 'ENOENT') { return mkdir$2.mkdirs(dir, err => { if (err) return callback(err) makeFile(); }) } return callback(err) } if (stats.isDirectory()) makeFile(); else { // parent is not a directory // This is just to cause an internal ENOTDIR error to be thrown fs$7.readdir(dir, err => { if (err) return callback(err) }); } }); }); } function createFileSync (file) { let stats; try { stats = fs$7.statSync(file); } catch {} if (stats && stats.isFile()) return const dir = path$6.dirname(file); try { if (!fs$7.statSync(dir).isDirectory()) { // parent is not a directory // This is just to cause an internal ENOTDIR error to be thrown fs$7.readdirSync(dir); } } catch (err) { // If the stat call above failed because the directory doesn't exist, create it if (err && err.code === 'ENOENT') mkdir$2.mkdirsSync(dir); else throw err } fs$7.writeFileSync(file, ''); } var file$1 = { createFile: u$5(createFile), createFileSync }; const u$4 = universalify$1.fromCallback; const path$5 = require$$1__default['default']; const fs$6 = gracefulFs; const mkdir$1 = mkdirs$2; const pathExists$4 = pathExists_1.pathExists; const { areIdentical: areIdentical$1 } = stat$4; function createLink (srcpath, dstpath, callback) { function makeLink (srcpath, dstpath) { fs$6.link(srcpath, dstpath, err => { if (err) return callback(err) callback(null); }); } fs$6.lstat(dstpath, (_, dstStat) => { fs$6.lstat(srcpath, (err, srcStat) => { if (err) { err.message = err.message.replace('lstat', 'ensureLink'); return callback(err) } if (dstStat && areIdentical$1(srcStat, dstStat)) return callback(null) const dir = path$5.dirname(dstpath); pathExists$4(dir, (err, dirExists) => { if (err) return callback(err) if (dirExists) return makeLink(srcpath, dstpath) mkdir$1.mkdirs(dir, err => { if (err) return callback(err) makeLink(srcpath, dstpath); }); }); }); }); } function createLinkSync (srcpath, dstpath) { let dstStat; try { dstStat = fs$6.lstatSync(dstpath); } catch {} try { const srcStat = fs$6.lstatSync(srcpath); if (dstStat && areIdentical$1(srcStat, dstStat)) return } catch (err) { err.message = err.message.replace('lstat', 'ensureLink'); throw err } const dir = path$5.dirname(dstpath); const dirExists = fs$6.existsSync(dir); if (dirExists) return fs$6.linkSync(srcpath, dstpath) mkdir$1.mkdirsSync(dir); return fs$6.linkSync(srcpath, dstpath) } var link$1 = { createLink: u$4(createLink), createLinkSync }; const path$4 = require$$1__default['default']; const fs$5 = gracefulFs; const pathExists$3 = pathExists_1.pathExists; /** * Function that returns two types of paths, one relative to symlink, and one * relative to the current working directory. Checks if path is absolute or * relative. If the path is relative, this function checks if the path is * relative to symlink or relative to current working directory. This is an * initiative to find a smarter `srcpath` to supply when building symlinks. * This allows you to determine which path to use out of one of three possible * types of source paths. The first is an absolute path. This is detected by * `path.isAbsolute()`. When an absolute path is provided, it is checked to * see if it exists. If it does it's used, if not an error is returned * (callback)/ thrown (sync). The other two options for `srcpath` are a * relative url. By default Node's `fs.symlink` works by creating a symlink * using `dstpath` and expects the `srcpath` to be relative to the newly * created symlink. If you provide a `srcpath` that does not exist on the file * system it results in a broken symlink. To minimize this, the function * checks to see if the 'relative to symlink' source file exists, and if it * does it will use it. If it does not, it checks if there's a file that * exists that is relative to the current working directory, if does its used. * This preserves the expectations of the original fs.symlink spec and adds * the ability to pass in `relative to current working direcotry` paths. */ function symlinkPaths$1 (srcpath, dstpath, callback) { if (path$4.isAbsolute(srcpath)) { return fs$5.lstat(srcpath, (err) => { if (err) { err.message = err.message.replace('lstat', 'ensureSymlink'); return callback(err) } return callback(null, { toCwd: srcpath, toDst: srcpath }) }) } else { const dstdir = path$4.dirname(dstpath); const relativeToDst = path$4.join(dstdir, srcpath); return pathExists$3(relativeToDst, (err, exists) => { if (err) return callback(err) if (exists) { return callback(null, { toCwd: relativeToDst, toDst: srcpath }) } else { return fs$5.lstat(srcpath, (err) => { if (err) { err.message = err.message.replace('lstat', 'ensureSymlink'); return callback(err) } return callback(null, { toCwd: srcpath, toDst: path$4.relative(dstdir, srcpath) }) }) } }) } } function symlinkPathsSync$1 (srcpath, dstpath) { let exists; if (path$4.isAbsolute(srcpath)) { exists = fs$5.existsSync(srcpath); if (!exists) throw new Error('absolute srcpath does not exist') return { toCwd: srcpath, toDst: srcpath } } else { const dstdir = path$4.dirname(dstpath); const relativeToDst = path$4.join(dstdir, srcpath); exists = fs$5.existsSync(relativeToDst); if (exists) { return { toCwd: relativeToDst, toDst: srcpath } } else { exists = fs$5.existsSync(srcpath); if (!exists) throw new Error('relative srcpath does not exist') return { toCwd: srcpath, toDst: path$4.relative(dstdir, srcpath) } } } } var symlinkPaths_1 = { symlinkPaths: symlinkPaths$1, symlinkPathsSync: symlinkPathsSync$1 }; const fs$4 = gracefulFs; function symlinkType$1 (srcpath, type, callback) { callback = (typeof type === 'function') ? type : callback; type = (typeof type === 'function') ? false : type; if (type) return callback(null, type) fs$4.lstat(srcpath, (err, stats) => { if (err) return callback(null, 'file') type = (stats && stats.isDirectory()) ? 'dir' : 'file'; callback(null, type); }); } function symlinkTypeSync$1 (srcpath, type) { let stats; if (type) return type try { stats = fs$4.lstatSync(srcpath); } catch { return 'file' } return (stats && stats.isDirectory()) ? 'dir' : 'file' } var symlinkType_1 = { symlinkType: symlinkType$1, symlinkTypeSync: symlinkTypeSync$1 }; const u$3 = universalify$1.fromCallback; const path$3 = require$$1__default['default']; const fs$3 = fs$i; const _mkdirs = mkdirs$2; const mkdirs = _mkdirs.mkdirs; const mkdirsSync = _mkdirs.mkdirsSync; const _symlinkPaths = symlinkPaths_1; const symlinkPaths = _symlinkPaths.symlinkPaths; const symlinkPathsSync = _symlinkPaths.symlinkPathsSync; const _symlinkType = symlinkType_1; const symlinkType = _symlinkType.symlinkType; const symlinkTypeSync = _symlinkType.symlinkTypeSync; const pathExists$2 = pathExists_1.pathExists; const { areIdentical } = stat$4; function createSymlink (srcpath, dstpath, type, callback) { callback = (typeof type === 'function') ? type : callback; type = (typeof type === 'function') ? false : type; fs$3.lstat(dstpath, (err, stats) => { if (!err && stats.isSymbolicLink()) { Promise.all([ fs$3.stat(srcpath), fs$3.stat(dstpath) ]).then(([srcStat, dstStat]) => { if (areIdentical(srcStat, dstStat)) return callback(null) _createSymlink(srcpath, dstpath, type, callback); }); } else _createSymlink(srcpath, dstpath, type, callback); }); } function _createSymlink (srcpath, dstpath, type, callback) { symlinkPaths(srcpath, dstpath, (err, relative) => { if (err) return callback(err) srcpath = relative.toDst; symlinkType(relative.toCwd, type, (err, type) => { if (err) return callback(err) const dir = path$3.dirname(dstpath); pathExists$2(dir, (err, dirExists) => { if (err) return callback(err) if (dirExists) return fs$3.symlink(srcpath, dstpath, type, callback) mkdirs(dir, err => { if (err) return callback(err) fs$3.symlink(srcpath, dstpath, type, callback); }); }); }); }); } function createSymlinkSync (srcpath, dstpath, type) { let stats; try { stats = fs$3.lstatSync(dstpath); } catch {} if (stats && stats.isSymbolicLink()) { const srcStat = fs$3.statSync(srcpath); const dstStat = fs$3.statSync(dstpath); if (areIdentical(srcStat, dstStat)) return } const relative = symlinkPathsSync(srcpath, dstpath); srcpath = relative.toDst; type = symlinkTypeSync(relative.toCwd, type); const dir = path$3.dirname(dstpath); const exists = fs$3.existsSync(dir); if (exists) return fs$3.symlinkSync(srcpath, dstpath, type) mkdirsSync(dir); return fs$3.symlinkSync(srcpath, dstpath, type) } var symlink$1 = { createSymlink: u$3(createSymlink), createSymlinkSync }; const file = file$1; const link = link$1; const symlink = symlink$1; var ensure = { // file createFile: file.createFile, createFileSync: file.createFileSync, ensureFile: file.createFile, ensureFileSync: file.createFileSync, // link createLink: link.createLink, createLinkSync: link.createLinkSync, ensureLink: link.createLink, ensureLinkSync: link.createLinkSync, // symlink createSymlink: symlink.createSymlink, createSymlinkSync: symlink.createSymlinkSync, ensureSymlink: symlink.createSymlink, ensureSymlinkSync: symlink.createSymlinkSync }; function stringify$3 (obj, { EOL = '\n', finalEOL = true, replacer = null, spaces } = {}) { const EOF = finalEOL ? EOL : ''; const str = JSON.stringify(obj, replacer, spaces); return str.replace(/\n/g, EOL) + EOF } function stripBom$1 (content) { // we do this because JSON.parse would convert it to a utf8 string if encoding wasn't specified if (Buffer.isBuffer(content)) content = content.toString('utf8'); return content.replace(/^\uFEFF/, '') } var utils = { stringify: stringify$3, stripBom: stripBom$1 }; let _fs; try { _fs = gracefulFs; } catch (_) { _fs = require$$0__default$2['default']; } const universalify = universalify$1; const { stringify: stringify$2, stripBom } = utils; async function _readFile (file, options = {}) { if (typeof options === 'string') { options = { encoding: options }; } const fs = options.fs || _fs; const shouldThrow = 'throws' in options ? options.throws : true; let data = await universalify.fromCallback(fs.readFile)(file, options); data = stripBom(data); let obj; try { obj = JSON.parse(data, options ? options.reviver : null); } catch (err) { if (shouldThrow) { err.message = `${file}: ${err.message}`; throw err } else { return null } } return obj } const readFile = universalify.fromPromise(_readFile); function readFileSync (file, options = {}) { if (typeof options === 'string') { options = { encoding: options }; } const fs = options.fs || _fs; const shouldThrow = 'throws' in options ? options.throws : true; try { let content = fs.readFileSync(file, options); content = stripBom(content); return JSON.parse(content, options.reviver) } catch (err) { if (shouldThrow) { err.message = `${file}: ${err.message}`; throw err } else { return null } } } async function _writeFile (file, obj, options = {}) { const fs = options.fs || _fs; const str = stringify$2(obj, options); await universalify.fromCallback(fs.writeFile)(file, str, options); } const writeFile = universalify.fromPromise(_writeFile); function writeFileSync (file, obj, options = {}) { const fs = options.fs || _fs; const str = stringify$2(obj, options); // not sure if fs.writeFileSync returns anything, but just in case return fs.writeFileSync(file, str, options) } const jsonfile$1 = { readFile, readFileSync, writeFile, writeFileSync }; var jsonfile_1 = jsonfile$1; const jsonFile$1 = jsonfile_1; var jsonfile = { // jsonfile exports readJson: jsonFile$1.readFile, readJsonSync: jsonFile$1.readFileSync, writeJson: jsonFile$1.writeFile, writeJsonSync: jsonFile$1.writeFileSync }; const u$2 = universalify$1.fromCallback; const fs$2 = gracefulFs; const path$2 = require$$1__default['default']; const mkdir = mkdirs$2; const pathExists$1 = pathExists_1.pathExists; function outputFile$1 (file, data, encoding, callback) { if (typeof encoding === 'function') { callback = encoding; encoding = 'utf8'; } const dir = path$2.dirname(file); pathExists$1(dir, (err, itDoes) => { if (err) return callback(err) if (itDoes) return fs$2.writeFile(file, data, encoding, callback) mkdir.mkdirs(dir, err => { if (err) return callback(err) fs$2.writeFile(file, data, encoding, callback); }); }); } function outputFileSync$1 (file, ...args) { const dir = path$2.dirname(file); if (fs$2.existsSync(dir)) { return fs$2.writeFileSync(file, ...args) } mkdir.mkdirsSync(dir); fs$2.writeFileSync(file, ...args); } var output = { outputFile: u$2(outputFile$1), outputFileSync: outputFileSync$1 }; const { stringify: stringify$1 } = utils; const { outputFile } = output; async function outputJson (file, data, options = {}) { const str = stringify$1(data, options); await outputFile(file, str, options); } var outputJson_1 = outputJson; const { stringify } = utils; const { outputFileSync } = output; function outputJsonSync (file, data, options) { const str = stringify(data, options); outputFileSync(file, str, options); } var outputJsonSync_1 = outputJsonSync; const u$1 = universalify$1.fromPromise; const jsonFile = jsonfile; jsonFile.outputJson = u$1(outputJson_1); jsonFile.outputJsonSync = outputJsonSync_1; // aliases jsonFile.outputJSON = jsonFile.outputJson; jsonFile.outputJSONSync = jsonFile.outputJsonSync; jsonFile.writeJSON = jsonFile.writeJson; jsonFile.writeJSONSync = jsonFile.writeJsonSync; jsonFile.readJSON = jsonFile.readJson; jsonFile.readJSONSync = jsonFile.readJsonSync; var json = jsonFile; const fs$1 = gracefulFs; const path$1 = require$$1__default['default']; const copySync = copySync$1.copySync; const removeSync = remove_1.removeSync; const mkdirpSync = mkdirs$2.mkdirpSync; const stat$1 = stat$4; function moveSync$1 (src, dest, opts) { opts = opts || {}; const overwrite = opts.overwrite || opts.clobber || false; const { srcStat, isChangingCase = false } = stat$1.checkPathsSync(src, dest, 'move', opts); stat$1.checkParentPathsSync(src, srcStat, dest, 'move'); if (!isParentRoot$1(dest)) mkdirpSync(path$1.dirname(dest)); return doRename$1(src, dest, overwrite, isChangingCase) } function isParentRoot$1 (dest) { const parent = path$1.dirname(dest); const parsedPath = path$1.parse(parent); return parsedPath.root === parent } function doRename$1 (src, dest, overwrite, isChangingCase) { if (isChangingCase) return rename$1(src, dest, overwrite) if (overwrite) { removeSync(dest); return rename$1(src, dest, overwrite) } if (fs$1.existsSync(dest)) throw new Error('dest already exists.') return rename$1(src, dest, overwrite) } function rename$1 (src, dest, overwrite) { try { fs$1.renameSync(src, dest); } catch (err) { if (err.code !== 'EXDEV') throw err return moveAcrossDevice$1(src, dest, overwrite) } } function moveAcrossDevice$1 (src, dest, overwrite) { const opts = { overwrite, errorOnExist: true }; copySync(src, dest, opts); return removeSync(src) } var moveSync_1 = moveSync$1; var moveSync = { moveSync: moveSync_1 }; const fs = gracefulFs; const path = require$$1__default['default']; const copy = copy$1.copy; const remove = remove_1.remove; const mkdirp = mkdirs$2.mkdirp; const pathExists = pathExists_1.pathExists; const stat = stat$4; function move$1 (src, dest, opts, cb) { if (typeof opts === 'function') { cb = opts; opts = {}; } const overwrite = opts.overwrite || opts.clobber || false; stat.checkPaths(src, dest, 'move', opts, (err, stats) => { if (err) return cb(err) const { srcStat, isChangingCase = false } = stats; stat.checkParentPaths(src, srcStat, dest, 'move', err => { if (err) return cb(err) if (isParentRoot(dest)) return doRename(src, dest, overwrite, isChangingCase, cb) mkdirp(path.dirname(dest), err => { if (err) return cb(err) return doRename(src, dest, overwrite, isChangingCase, cb) }); }); }); } function isParentRoot (dest) { const parent = path.dirname(dest); const parsedPath = path.parse(parent); return parsedPath.root === parent } function doRename (src, dest, overwrite, isChangingCase, cb) { if (isChangingCase) return rename(src, dest, overwrite, cb) if (overwrite) { return remove(dest, err => { if (err) return cb(err) return rename(src, dest, overwrite, cb) }) } pathExists(dest, (err, destExists) => { if (err) return cb(err) if (destExists) return cb(new Error('dest already exists.')) return rename(src, dest, overwrite, cb) }); } function rename (src, dest, overwrite, cb) { fs.rename(src, dest, err => { if (!err) return cb() if (err.code !== 'EXDEV') return cb(err) return moveAcrossDevice(src, dest, overwrite, cb) }); } function moveAcrossDevice (src, dest, overwrite, cb) { const opts = { overwrite, errorOnExist: true }; copy(src, dest, opts, err => { if (err) return cb(err) return remove(src, cb) }); } var move_1 = move$1; const u = universalify$1.fromCallback; var move = { move: u(move_1) }; var lib = { // Export promiseified graceful-fs: ...fs$i, // Export extra methods: ...copySync$1, ...copy$1, ...empty, ...ensure, ...json, ...mkdirs$2, ...moveSync, ...move, ...output, ...pathExists_1, ...remove_1 }; const pkgPath = __dirname.indexOf('cli') !== -1 ? require$$1.join(__dirname, '../../package.json') : require$$1.join(__dirname, '../../package.json'); const PKG = readJSONSync(pkgPath); const APP_PKG = readJSONSync(require$$1.join(process.cwd(), 'package.json')); const MYGRA_CONFIG_DIR = require$$1.join(os.homedir(), '.mygra'); const MYGRA_DEFAULT_PATH = require$$1.join(process.cwd(), 'mygra'); const MYGRA_CONFIG_PATH = require$$1.join(os.homedir(), '.mygra'); const MYGRA_DEFAULTS = { initialized: true, directory: MYGRA_DEFAULT_PATH, active: [], reverts: [], extension: '.js', templatePrefix: true }; /** * Inspects a string matching by supplied pattern. * * @param value the value to inspect. * @param pattern a pattern to check for matching. * @returns a boolean indicating if the value matches pattern. */ function isMatch(value, pattern) { if (typeof pattern === 'string') return value.indexOf(pattern) !== -1; return pattern.test(value); } /** * Finds the index of a matched element in an array. * * @param values the string values to inspect. * @param pattern the pattern used for matching. * @returns the index of a matched element. */ function findIndex(values, pattern) { if (typeof pattern === 'undefined' || pattern === '') return -1; return values.findIndex(currentFile => isMatch(currentFile, pattern)); } /** * Reads a file asynchronously. * * @param path the path to be read. * @returns a file as string. */ function readFileAsync(path) { return new Promise((res, rej) => { lib.readFile(path, 'utf8', (err, data) => { if (err) rej(err); return res(data); }); }); } /** * Writes a file asynchronously. * * @param path the path to write to. * @param data the data to be written. * @returns a boolean indicating if successful write. */ function writeFileAsync(path, data) { data = typeof data === 'object' ? JSON.stringify(data, null, 2) : data; return new Promise((res, rej) => { lib.writeFile(path, data, (err) => { if (err) { console.error(err.name + ': ' + err.message); rej(err); } res(true); }); }); } /** * Reads JSON returning as a parsed object of specified type. * * @param path the path to be read. * @param defaults defaults to use when not found. * @returns a JSON parsed object. */ function readJSONSync(path, defaults = {}) { if (!lib.existsSync(path)) return defaults; return JSON.parse(lib.readFileSync(path, 'utf-8')); } /** * Colorizes a string with ansi colors. * * @param str the string to be colorized. * @param styles the styles to be applied. * @returns a ansi colorized string. */ function colorize(str, ...styles) { return styles.reduce((a, c) => { if (!colors__default['default'][c]) return a; return colors__default['default'][c](a); }, str); } /** * * @param name the name of the config to create. * @param directory the directory where the config is to be stored. * @returns */ function initConfig(name = APP_PKG.name, directory = MYGRA_CONFIG_DIR) { const filename = `${name}.config.json`; const fullpath = require$$1.join(directory, filename); const config = flatCache__default['default'].load(`${name}.config.json`, directory); const api = { get props() { return config.all(); }, directory, filename, fullpath, defaults, get, set, update, save: config.save }; /** * Initialize the configuration merging defaults. * * @param defaults optional defaults. */ function defaults(initDefaults = {}) { set({ ...initDefaults, ...config.all() }); } /** * Gets a key's value from config store. * * @param key the get to get. */ function get(key) { return config.getKey(key); } function set(keyOrObject, value) { let obj = keyOrObject; if (arguments.length > 1) obj = { [keyOrObject]: value }; for (const k in obj) { config.setKey(k, obj[k]); } config.save(true); } /** * Updates config only for known keys. * * @param obj the config object to update from. */ function update(obj) { const keys = Object.keys(MYGRA_DEFAULTS); for (const k in obj) { if (keys.includes(k)) config.setKey(k, obj[k]); } config.save(); } if (!config.getKey('initialized')) defaults(MYGRA_DEFAULTS); return api; } /** * Checks if the value is a promise. * * @param value the value to inspect as a promise. */ function isPromise(value) { return Promise.resolve(value) === value; } /** * Helper to format the last or active migration for storage. * * @param migrations the array of successful migrations. * @param dir the migration direction. * @returns a tuple containing last migration and direction. */ function defineActive(migrations, dir) { const last = migrations[migrations.length - 1].filename; return [getBaseName(last), dir]; } /** * Helper to format the last migrations for reverting. * * @param migrations the array of successful migrations. * @param dir the migration direction. * @returns a tuple containing last migrations and direction. */ function defineReverts(migrations, dir) { const clone = [...migrations].reverse(); // reverse so we traverse in opposite order. const names = clone.map(file => getBaseName(file.filename)); // Don't flip direction here as mygra.revert() will do that automatically. return [names, dir]; } /** * Makes errors more readable. * * @param err the error to addd colorization to. */ function colorizeError(err) { const _err = err; _err.colorizedMessage = symbols__default['default'].error + ' ' + colorize((err.name || 'Error') + ': ' + err.message || 'Unknown', 'redBright'); _err.colorizedStack = colorize((err.stack || '').split('\n').slice(1).join('\n'), 'dim'); return _err; } /** * Gets the base name of a file path with or without file extension. * * @param filepath the full path to the file. * @param includeExt when true the file extension is retained. * @returns the filename only from the specified path. */ function getBaseName(filepath, includeExt = false) { filepath = require$$1.basename(filepath); if (includeExt) return filepath; return filepath.replace(require$$1.extname(filepath), ''); } function promisifyMigration(fn) { return (conn) => { return new Promise((res, rej) => { const prom = fn(conn, (err, data) => { if (err) return rej(err); return res(data); }); if (!isPromise(prom)) return prom; return prom.then(res).catch(rej); }); }; } const config = initConfig(); class Mygra extends events.EventEmitter { constructor(options = {}) { super(); options.templates = { ...defaultTemplates, ...options.templates }; this.options = { ...config.props, events: {}, connection: (..._args) => { throw new Error(`Database connection NOT implemented`); }, ...options }; this.bindEvents(this.options.events); config.update({ ...this.options }); this.duplicateNames().then(dupes => { if (dupes.length) { const err = colorizeError(Error(`Please remove duplicate migration names:`)); const stack = err.colorizedMessage + '\n' + dupes.join('\n') + '\n' + err.colorizedStack; throw stack; } }); } bindEvents(events) { if (!events) return; for (const event in events) { const listeners = events[event] || []; for (const listener of listeners) { this.on(event, listener); } } } /** * Gets the directory where migrations are stored. */ get directory() { return this.options.directory; } get reverts() { return this.options.reverts; } /** * Gets the active migration. */ get active() { return this.options.active || []; } /** * The extension to use for filenames. */ get extension() { return this.options.extension; } get templates() { return (this.options.templates || {}); } /** * Gets the database connection. */ get connection() { return this.options.connection; } /** * Sets the active migration. */ set active(state) { if (!state || !state.length || typeof state[0] === 'undefined' || state[0] === '' || state[0] === null) return; this.emit('active', state); this.options.active = state; config.set('active', state); } /** * Sets the previous migration. */ set reverts(state) { if (!state || !state.length || typeof state[0] === 'undefined' || state[0] === null) return; this.emit('reverts', state); this.options.reverts = state; config.set('reverts', state); } /** * Loads templates. * * @returns an object containing templates. */ async getTemplates(templates = this.templates) { const templatesDir = require$$1.join(this.directory, 'templates'); const hasTemplates = require$$0$2.existsSync(templatesDir); const _templates = templates; // If user templates directory load them. if (hasTemplates) { const files = glob__default['default'].sync(`${templatesDir}/*${this.extension}`, { onlyFiles: true }); for (const file of files) { const base = getBaseName(file); const result = await Promise.resolve().then(function () { return /*#__PURE__*/_interopNamespace(require(file)); }); _templates[base] = result.default || result; } } return _templates; } /** * Loads migration filenames. * * @returns an array of migration file names. */ async getFilenames() { let files = await glob__default['default'](`${this.directory}/migrations/**/*${this.extension}`, { onlyFiles: true }); files = files.sort().reverse().map(f => require$$1.resolve(f)); return files; } /** * Gets a create migration handler template. * * @param name the name of the template to get. * @returns a create migration template handler. */ async getTemplate(name = 'default') { const templates = await this.getTemplates(); return templates[name]; } /** * Verifies that the provided migration name is unique. * * @param name the name to be inspected * @returns a boolean indicating if the name is unique. */ async isUniqueName(name) { const filenames = await this.getFilenames(); const stripped = filenames.map(v => require$$1.parse(v).name.replace(/^\d+_/, '')); const found = stripped.findIndex(v => { return v.indexOf(name) !== -1; }); return found === -1; } /** * Gets list of duplicate migration names. * * @returns object indicating duplicates and their names. */ async duplicateNames() { let filenames = await this.getFilenames(); filenames = filenames.map(name => { const base = require$$1.basename(name); return base.replace(/^\d+_/, '').replace(require$$1.extname(name), ''); }); if (filenames.length === 1) return []; const found = []; return filenames.filter(v => { const isDupe = found.includes(v); found.push(v); return isDupe; }); } /** * Checks if the current active migration is the first migration * and has a current migration direction of down. * * @param files the filenames to be inspected. * @param active the active migration. * @returns a boolean indicating if is first migration. */ isFirstMigration(files, active) { const clone = [...files].sort(); const [name, dir] = active; const idx = clone.findIndex(v => v.indexOf(name) !== -1); return idx === 0 && dir === 'down'; } async create(nameOrOptions, options) { let name = nameOrOptions; if (typeof nameOrOptions === 'object') { options = nameOrOptions; name = undefined; } options = { up: '', down: '', ...options }; if (!options.name) throw new Error(`Cannot create migration with name of undefined.`); let baseName = options.name.replace(/\s/g, '_').toLowerCase(); options.table = options.table || baseName; // Check if should prefix name with the template // name, for ex: generate:create user_table will // become create_user_table. baseName = this.options.templatePrefix ? options.template + '_' + baseName : baseName; const isUnique = await this.isUniqueName(baseName); if (!isUnique) return { ok: false, name, message: `Cannot create duplicate migration name: "${baseName}"` }; name = Date.now() + '_' + baseName; const filename = require$$1.join(this.directory, 'migrations', name + this.extension); try { const template = await this.getTemplate(options.template); options.up = options.up?.length ? "`" + options.up + "`" : "`" + "`"; options.down = options.down?.length ? "`" + options.down + "`" : "`" + "`"; const writeResult = await writeFileAsync(filename, template({ ...options, name: baseName })); this.emit('created', { name, filename, ok: writeResult || false }); return { ok: writeResult || false, name, message: writeResult === true ? ` Migration "${name}" succesfully created` : ` Migration "${name}" was NOT created`, filename }; } catch (err) { return { ok: false, name, message: err, filename }; } } async filter(nameOrDir, dirOrLevels) { let name = nameOrDir; let dir = dirOrLevels; let levels; if (['up', 'down'].includes(nameOrDir)) { levels = (dirOrLevels || 0); if (dirOrLevels === '*') levels = 999999; dir = nameOrDir; name = undefined; } // levels only used when name not present // default to one step or level user can // define if they want more. levels = levels || 1; const [active, activeDir] = this.active; const files = await this.getFilenames(); const isFirst = this.isFirstMigration(files, this.active); const idx = findIndex(files, name); // used to shift index depending on last // active direction of migration. const offset = activeDir === 'down' ? 1 : 0; let lastIdx = findIndex(files, active); lastIdx = !active || isFirst ? files.length : lastIdx; let filtered = files; if (name && idx === -1) throw new Error(`Migration ${name} required but not found.`); if (name) { dir = dir || 'up'; if (dir === 'up') { filtered = files.slice(idx, lastIdx + offset); } else { filtered = files.slice(lastIdx + offset, idx + 1 + offset); } } else if (dir) { if (dir === 'up') { filtered = files.slice(0, lastIdx + offset); filtered = levels ? filtered.slice(-levels) : filtered; } else { levels = levels || 1; return files.slice(lastIdx + offset, lastIdx + offset + levels); } } this.emit('filtered', { name, direction: dir, filenames: filtered, levels }); return filtered; } /** * Loads a single migration from file. * * @param filename the filename to be loaded. * @returns an IMigration instance from file. */ async import(filename) { const file = await Promise.resolve().then(function () { return /*#__PURE__*/_interopNamespace(require(filename)); }); Object.defineProperty(file, 'filename', { value: filename }); return file; } /** * Imports array of migrations by filename. * * @param filenames the file names to import/load. * @returns Promise; */ async load(filenames) { const proms = filenames.map(filename => this.import(filename)); const migrations = await Promise.all(proms); this.emit('loaded', { filenames, migrations }); return migrations; } /** * Checks if is preview or filtered files are out of scope. * * @param dir the direction of the migration. * @param files the files to be migrated. * @param preview indicates preview mode requested. * @returns IMigrationResult. */ checkPreviewAndScope(dir, migrations, preview = false) { const message = preview ? 'Migration Preview' : `Migration out of scope, no files match request`; const names = preview ? migrations.map(m => require$$1.parse(m.filename).name) : []; const count = migrations.length; const ok = preview || !!migrations.length; return { type: dir, ok, message, count, success: 0, failed: 0, names, isPreview: preview }; } // /** // * Iterates in series the migrations. // * // * @param dir the direction of the migration // * @param files the file list being migration. // * @param migrations the loaded migrations. // * @returns IMigrationResult // */ // async run(dir: MigrateDirection, migrations: IMigration[]) { // const migrated = [] as IMigration[]; // let count = 0; // for (const [, file] of migrations.entries()) { // this.emit(dir, file); // const fn = promisifyMigration(file[dir]); // await fn(this.connection); // count += 1; // migrated.push(file); // } // return { // names: migrated.map(m => m.filename), // type: dir, // ok: count === migrated.length, // count, // migrated, // } as IMigrationResult; // } /** * Migrates up automatically or by level count or name of migration. * * @param nameOrLevels the name or level count to migrate up. * @param preview returns a dry run. * @returns status, count affected and message. */ async up(nameOrLevels, preview = false) { let files; let result = { type: 'up', ok: false, message: 'Unknown', count: 0, success: 0, failed: 0, names: [] }; let count = 0; let success = 0; const migrated = []; try { if (typeof nameOrLevels === 'number' || nameOrLevels === '*') files = await this.filter('up', nameOrLevels); else files = await this.filter(nameOrLevels, 'up'); files.sort(); // ascending order. const migrations = await this.load(files); count = migrations.length; result = this.checkPreviewAndScope('up', migrations, preview); if (!result.ok || result.isPreview) return result; for (const [, file] of migrations.entries()) { this.emit('up', file); const fn = promisifyMigration(file['up']); await fn(this.connection) .then(_ => { success += 1; migrated.push(file); }); } result.ok = count === success; } catch (err) { if (migrated.length) await (this.revert(migrated, 'up')); result = { ...result, ok: false, message: err, names: migrated.map(m => m.filename) }; } result.count = count; result.success = success; result.failed = Math.max(0, count - success); if (result.ok) { result.message = `Migration up successful`; result.names = migrated.map(m => m.filename); } // Update the active migration when // result status is ok store last migration run. if (result.ok && migrated.length) { this.reverts = defineReverts(migrated, 'up'); this.active = defineActive(migrated, 'up'); } this.emit('migration', result); return result; } /** * Migrates down automatically or by level count or name of migration. * * @param nameOrLevels the name or level count to migrate down. * @param preview returns a dry run. * @returns status, count affected and message. */ async down(nameOrLevels, preview = false) { let files; let result = { type: 'up', ok: false, message: 'Unknown', count: 0, success: 0, failed: 0, names: [] }; let count = 0; let success = 0; const migrated = []; try { if (typeof nameOrLevels === 'number' || nameOrLevels === '*') files = await this.filter('down', nameOrLevels); else files = await this.filter(nameOrLevels, 'down'); const migrations = await this.load(files); count = migrations.length; result = this.checkPreviewAndScope('up', migrations, preview); if (!result.ok || result.isPreview) return result; for (const [, file] of migrations.entries()) { this.emit('down', file); const fn = promisifyMigration(file['down']); await fn(this.connection) .then(_ => { success += 1; migrated.push(file); }); } result.ok = count === success; } catch (err) { if (migrated.length) await (this.revert(migrated, 'down')); result = { ...result, ok: false, message: err, names: migrated.map(m => m.filename) }; } result.count = count; result.success = success; result.failed = Math.max(0, count - success); if (result.ok) { result.message = `Migration down successful`; result.names = migrated.map(m => m.filename); } // Update the active migration when // result status is ok store last migration run. if (result.ok && migrated.length) { this.reverts = defineReverts(migrated, 'down'); this.active = defineActive(migrated, 'down'); } this.emit('migration', result); return result; } /** * Reverts a list of migrations by calling the opposite direction * on the previously migrated files. * * @param migrations an array of migrations to revert. * @param dir the original direction of the migration * @param preview shows dry run of revert. * @returns status, count affected and message. */ async revert(migrations, dir, preview = false) { const clone = [...migrations]; const newDir = dir === 'up' ? 'down' : 'up'; let result = { type: newDir, ok: false, message: 'Unknown', count: 0, success: 0, failed: 0, names: [] }; let revertNames = []; let last; let name = ''; const count = migrations.length; let success = 0; const migrated = []; try { // Ensure migrations are in correct order // using timestamped filenames. if (dir === 'up') { // we'll be descending so we need last first. clone.sort().reverse(); } else { // we'll be ascending as we are reverting a down // sort ensuring that the oldest is first. clone.sort(); } // The revert migration names will now be the opposite // of whatever the clone order is. revertNames = [...clone].reverse().map(file => getBaseName(file.filename)); // Get last to store as active migration. last = clone[clone.length - 1]; name = getBaseName(last.filename); result = this.checkPreviewAndScope('up', clone, preview); if (!result.count || result.isPreview) return result; for (const [, file] of migrations.entries()) { this.emit(newDir, file); const fn = promisifyMigration(file[newDir]); await fn(this.connection) .then(_ => { success += 1; migrated.push(file); }); } result.ok = count === success; } catch (err) { result = { ...result, ok: false, message: err, names: migrated.map(m => m.filename) }; } result.count = count; result.success = success; result.failed = Math.max(0, count - success); if (result.ok) { result.message = `Revert Migration ${newDir} successful`; result.names = migrated.map(m => m.filename); } if (result.ok && migrated.length) { this.reverts = [revertNames, newDir]; this.active = [name, newDir]; } this.emit('migration', result); return result; } /** * Revert all migrations. * * @param name migration name to reset to. */ async reset(preview = false) { let result = { type: 'down', ok: false, message: 'Unknown', count: 0, success: 0, failed: 0, names: [] }; let count = 0; let success = 0; const migrated = []; try { const files = await this.filter('down', '*'); const migrations = await this.load(files); count = migrations.length; if (!files.length) { return { ...result, message: `Migration out of scope, no files match request`, count, names: migrated.map(m => m.filename) }; } else if (preview) { return { ...result, ok: true, message: 'Migration preview', count: files.length, names: files }; } else { for (const [i, file] of migrations.entries()) { const name = require$$1.parse(files[i]).name; this.emit('down', { name, revert: true }); await require$$4.promisify(file.down)(this.connection) .then(_ => { migrated.push(file); success += 1; }); } result.ok = count === success; } } catch (err) { if (migrated.length) await (this.revert(migrated, 'down')); result = { ...result, ok: false, message: err, count }; } result.count = count; result.success = success; result.failed = Math.max(0, count - success); result.names = migrated.map(m => m.filename); if (result.ok) result.message = `Migration reset successful`; // Update the active migration when // result status is ok store last migration run. if (result.ok && migrated.length) { this.reverts = defineReverts(migrated, 'down'); this.active = defineActive(migrated, 'down'); } this.emit('migration', result); return result; } } exports.APP_PKG = APP_PKG; exports.EVENTS = EVENTS; exports.MYGRA_CONFIG_DIR = MYGRA_CONFIG_DIR; exports.MYGRA_CONFIG_PATH = MYGRA_CONFIG_PATH; exports.MYGRA_DEFAULTS = MYGRA_DEFAULTS; exports.MYGRA_DEFAULT_PATH = MYGRA_DEFAULT_PATH; exports.Mygra = Mygra; exports.PKG = PKG; exports.colorize = colorize; exports.colorizeError = colorizeError; exports.defineActive = defineActive; exports.defineReverts = defineReverts; exports.findIndex = findIndex; exports.getBaseName = getBaseName; exports.initConfig = initConfig; exports.isMatch = isMatch; exports.isPromise = isPromise; exports.promisifyMigration = promisifyMigration; exports.readFileAsync = readFileAsync; exports.readJSONSync = readJSONSync; exports.writeFileAsync = writeFileAsync;