'use strict'; Object.defineProperty(exports, '__esModule', { value: true }); function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } var AsyncLock = _interopDefault(require('async-lock')); var Hash = _interopDefault(require('sha.js/sha1.js')); var crc32 = _interopDefault(require('crc-32')); var pako = _interopDefault(require('pako')); var ignore = _interopDefault(require('ignore')); var pify = _interopDefault(require('pify')); var cleanGitRef = _interopDefault(require('clean-git-ref')); var diff3Merge = _interopDefault(require('diff3')); /** * @typedef {Object} GitProgressEvent * @property {string} phase * @property {number} loaded * @property {number} total */ /** * @callback ProgressCallback * @param {GitProgressEvent} progress * @returns {void | Promise} */ /** * @typedef {Object} GitHttpRequest * @property {string} url - The URL to request * @property {string} [method='GET'] - The HTTP method to use * @property {Object} [headers={}] - Headers to include in the HTTP request * @property {Object} [agent] - An HTTP or HTTPS agent that manages connections for the HTTP client (Node.js only) * @property {AsyncIterableIterator} [body] - An async iterator of Uint8Arrays that make up the body of POST requests * @property {ProgressCallback} [onProgress] - Reserved for future use (emitting `GitProgressEvent`s) * @property {object} [signal] - Reserved for future use (canceling a request) */ /** * @typedef {Object} GitHttpResponse * @property {string} url - The final URL that was fetched after any redirects * @property {string} [method] - The HTTP method that was used * @property {Object} [headers] - HTTP response headers * @property {AsyncIterableIterator} [body] - An async iterator of Uint8Arrays that make up the body of the response * @property {number} statusCode - The HTTP status code * @property {string} statusMessage - The HTTP status message */ /** * @callback HttpFetch * @param {GitHttpRequest} request * @returns {Promise} */ /** * @typedef {Object} HttpClient * @property {HttpFetch} request */ /** * A git commit object. * * @typedef {Object} CommitObject * @property {string} message Commit message * @property {string} tree SHA-1 object id of corresponding file tree * @property {string[]} parent an array of zero or more SHA-1 object ids * @property {Object} author * @property {string} author.name The author's name * @property {string} author.email The author's email * @property {number} author.timestamp UTC Unix timestamp in seconds * @property {number} author.timezoneOffset Timezone difference from UTC in minutes * @property {Object} committer * @property {string} committer.name The committer's name * @property {string} committer.email The committer's email * @property {number} committer.timestamp UTC Unix timestamp in seconds * @property {number} committer.timezoneOffset Timezone difference from UTC in minutes * @property {string} [gpgsig] PGP signature (if present) */ /** * An entry from a git tree object. Files are called 'blobs' and directories are called 'trees'. * * @typedef {Object} TreeEntry * @property {string} mode the 6 digit hexadecimal mode * @property {string} path the name of the file or directory * @property {string} oid the SHA-1 object id of the blob or tree * @property {'commit'|'blob'|'tree'} type the type of object */ /** * A git tree object. Trees represent a directory snapshot. * * @typedef {TreeEntry[]} TreeObject */ /** * A git annotated tag object. * * @typedef {Object} TagObject * @property {string} object SHA-1 object id of object being tagged * @property {'blob' | 'tree' | 'commit' | 'tag'} type the type of the object being tagged * @property {string} tag the tag name * @property {Object} tagger * @property {string} tagger.name the tagger's name * @property {string} tagger.email the tagger's email * @property {number} tagger.timestamp UTC Unix timestamp in seconds * @property {number} tagger.timezoneOffset timezone difference from UTC in minutes * @property {string} message tag message * @property {string} [gpgsig] PGP signature (if present) */ /** * @typedef {Object} ReadCommitResult * @property {string} oid - SHA-1 object id of this commit * @property {CommitObject} commit - the parsed commit object * @property {string} payload - PGP signing payload */ /** * @typedef {Object} ServerRef - This object has the following schema: * @property {string} ref - The name of the ref * @property {string} oid - The SHA-1 object id the ref points to * @property {string} [target] - The target ref pointed to by a symbolic ref * @property {string} [peeled] - If the oid is the SHA-1 object id of an annotated tag, this is the SHA-1 object id that the annotated tag points to */ /** * @typedef Walker * @property {Symbol} Symbol('GitWalkerSymbol') */ /** * Normalized subset of filesystem `stat` data: * * @typedef {Object} Stat * @property {number} ctimeSeconds * @property {number} ctimeNanoseconds * @property {number} mtimeSeconds * @property {number} mtimeNanoseconds * @property {number} dev * @property {number} ino * @property {number} mode * @property {number} uid * @property {number} gid * @property {number} size */ /** * The `WalkerEntry` is an interface that abstracts computing many common tree / blob stats. * * @typedef {Object} WalkerEntry * @property {function(): Promise<'tree'|'blob'|'special'|'commit'>} type * @property {function(): Promise} mode * @property {function(): Promise} oid * @property {function(): Promise} content * @property {function(): Promise} stat */ /** * @typedef {Object} CallbackFsClient * @property {function} readFile - https://nodejs.org/api/fs.html#fs_fs_readfile_path_options_callback * @property {function} writeFile - https://nodejs.org/api/fs.html#fs_fs_writefile_file_data_options_callback * @property {function} unlink - https://nodejs.org/api/fs.html#fs_fs_unlink_path_callback * @property {function} readdir - https://nodejs.org/api/fs.html#fs_fs_readdir_path_options_callback * @property {function} mkdir - https://nodejs.org/api/fs.html#fs_fs_mkdir_path_mode_callback * @property {function} rmdir - https://nodejs.org/api/fs.html#fs_fs_rmdir_path_callback * @property {function} stat - https://nodejs.org/api/fs.html#fs_fs_stat_path_options_callback * @property {function} lstat - https://nodejs.org/api/fs.html#fs_fs_lstat_path_options_callback * @property {function} [readlink] - https://nodejs.org/api/fs.html#fs_fs_readlink_path_options_callback * @property {function} [symlink] - https://nodejs.org/api/fs.html#fs_fs_symlink_target_path_type_callback * @property {function} [chmod] - https://nodejs.org/api/fs.html#fs_fs_chmod_path_mode_callback */ /** * @typedef {Object} PromiseFsClient * @property {Object} promises * @property {function} promises.readFile - https://nodejs.org/api/fs.html#fs_fspromises_readfile_path_options * @property {function} promises.writeFile - https://nodejs.org/api/fs.html#fs_fspromises_writefile_file_data_options * @property {function} promises.unlink - https://nodejs.org/api/fs.html#fs_fspromises_unlink_path * @property {function} promises.readdir - https://nodejs.org/api/fs.html#fs_fspromises_readdir_path_options * @property {function} promises.mkdir - https://nodejs.org/api/fs.html#fs_fspromises_mkdir_path_options * @property {function} promises.rmdir - https://nodejs.org/api/fs.html#fs_fspromises_rmdir_path * @property {function} promises.stat - https://nodejs.org/api/fs.html#fs_fspromises_stat_path_options * @property {function} promises.lstat - https://nodejs.org/api/fs.html#fs_fspromises_lstat_path_options * @property {function} [promises.readlink] - https://nodejs.org/api/fs.html#fs_fspromises_readlink_path_options * @property {function} [promises.symlink] - https://nodejs.org/api/fs.html#fs_fspromises_symlink_target_path_type * @property {function} [promises.chmod] - https://nodejs.org/api/fs.html#fs_fspromises_chmod_path_mode */ /** * @typedef {CallbackFsClient | PromiseFsClient} FsClient */ /** * @callback MessageCallback * @param {string} message * @returns {void | Promise} */ /** * @typedef {Object} GitAuth * @property {string} [username] * @property {string} [password] * @property {Object} [headers] * @property {boolean} [cancel] Tells git to throw a `UserCanceledError` (instead of an `HttpError`). */ /** * @callback AuthCallback * @param {string} url * @param {GitAuth} auth Might have some values if the URL itself originally contained a username or password. * @returns {GitAuth | void | Promise} */ /** * @callback AuthFailureCallback * @param {string} url * @param {GitAuth} auth The credentials that failed * @returns {GitAuth | void | Promise} */ /** * @callback AuthSuccessCallback * @param {string} url * @param {GitAuth} auth * @returns {void | Promise} */ /** * @typedef {Object} SignParams * @property {string} payload - a plaintext message * @property {string} secretKey - an 'ASCII armor' encoded PGP key (technically can actually contain _multiple_ keys) */ /** * @callback SignCallback * @param {SignParams} args * @return {{signature: string} | Promise<{signature: string}>} - an 'ASCII armor' encoded "detached" signature */ /** * @callback WalkerMap * @param {string} filename * @param {WalkerEntry[]} entries * @returns {Promise} */ /** * @callback WalkerReduce * @param {any} parent * @param {any[]} children * @returns {Promise} */ /** * @callback WalkerIterateCallback * @param {WalkerEntry[]} entries * @returns {Promise} */ /** * @callback WalkerIterate * @param {WalkerIterateCallback} walk * @param {IterableIterator} children * @returns {Promise} */ /** * @typedef {Object} RefUpdateStatus * @property {boolean} ok * @property {string} error */ /** * @typedef {Object} PushResult * @property {boolean} ok * @property {?string} error * @property {Object} refs * @property {Object} [headers] */ /** * @typedef {0|1} HeadStatus */ /** * @typedef {0|1|2} WorkdirStatus */ /** * @typedef {0|1|2|3} StageStatus */ /** * @typedef {[string, HeadStatus, WorkdirStatus, StageStatus]} StatusRow */ class BaseError extends Error { constructor(message) { super(message); // Setting this here allows TS to infer that all git errors have a `caller` property and // that its type is string. this.caller = ''; } toJSON() { // Error objects aren't normally serializable. So we do something about that. return { code: this.code, data: this.data, caller: this.caller, message: this.message, stack: this.stack, } } fromJSON(json) { const e = new BaseError(json.message); e.code = json.code; e.data = json.data; e.caller = json.caller; e.stack = json.stack; return e } get isIsomorphicGitError() { return true } } class InternalError extends BaseError { /** * @param {string} message */ constructor(message) { super( `An internal error caused this command to fail. Please file a bug report at https://github.com/isomorphic-git/isomorphic-git/issues with this error message: ${message}` ); this.code = this.name = InternalError.code; this.data = { message }; } } /** @type {'InternalError'} */ InternalError.code = 'InternalError'; class UnsafeFilepathError extends BaseError { /** * @param {string} filepath */ constructor(filepath) { super(`The filepath "${filepath}" contains unsafe character sequences`); this.code = this.name = UnsafeFilepathError.code; this.data = { filepath }; } } /** @type {'UnsafeFilepathError'} */ UnsafeFilepathError.code = 'UnsafeFilepathError'; // Modeled after https://github.com/tjfontaine/node-buffercursor // but with the goal of being much lighter weight. class BufferCursor { constructor(buffer) { this.buffer = buffer; this._start = 0; } eof() { return this._start >= this.buffer.length } tell() { return this._start } seek(n) { this._start = n; } slice(n) { const r = this.buffer.slice(this._start, this._start + n); this._start += n; return r } toString(enc, length) { const r = this.buffer.toString(enc, this._start, this._start + length); this._start += length; return r } write(value, length, enc) { const r = this.buffer.write(value, this._start, length, enc); this._start += length; return r } copy(source, start, end) { const r = source.copy(this.buffer, this._start, start, end); this._start += r; return r } readUInt8() { const r = this.buffer.readUInt8(this._start); this._start += 1; return r } writeUInt8(value) { const r = this.buffer.writeUInt8(value, this._start); this._start += 1; return r } readUInt16BE() { const r = this.buffer.readUInt16BE(this._start); this._start += 2; return r } writeUInt16BE(value) { const r = this.buffer.writeUInt16BE(value, this._start); this._start += 2; return r } readUInt32BE() { const r = this.buffer.readUInt32BE(this._start); this._start += 4; return r } writeUInt32BE(value) { const r = this.buffer.writeUInt32BE(value, this._start); this._start += 4; return r } } function compareStrings(a, b) { // https://stackoverflow.com/a/40355107/2168416 return -(a < b) || +(a > b) } function comparePath(a, b) { // https://stackoverflow.com/a/40355107/2168416 return compareStrings(a.path, b.path) } /** * From https://github.com/git/git/blob/master/Documentation/technical/index-format.txt * * 32-bit mode, split into (high to low bits) * * 4-bit object type * valid values in binary are 1000 (regular file), 1010 (symbolic link) * and 1110 (gitlink) * * 3-bit unused * * 9-bit unix permission. Only 0755 and 0644 are valid for regular files. * Symbolic links and gitlinks have value 0 in this field. */ function normalizeMode(mode) { // Note: BrowserFS will use -1 for "unknown" // I need to make it non-negative for these bitshifts to work. let type = mode > 0 ? mode >> 12 : 0; // If it isn't valid, assume it as a "regular file" // 0100 = directory // 1000 = regular file // 1010 = symlink // 1110 = gitlink if ( type !== 0b0100 && type !== 0b1000 && type !== 0b1010 && type !== 0b1110 ) { type = 0b1000; } let permissions = mode & 0o777; // Is the file executable? then 755. Else 644. if (permissions & 0b001001001) { permissions = 0o755; } else { permissions = 0o644; } // If it's not a regular file, scrub all permissions if (type !== 0b1000) permissions = 0; return (type << 12) + permissions } const MAX_UINT32 = 2 ** 32; function SecondsNanoseconds( givenSeconds, givenNanoseconds, milliseconds, date ) { if (givenSeconds !== undefined && givenNanoseconds !== undefined) { return [givenSeconds, givenNanoseconds] } if (milliseconds === undefined) { milliseconds = date.valueOf(); } const seconds = Math.floor(milliseconds / 1000); const nanoseconds = (milliseconds - seconds * 1000) * 1000000; return [seconds, nanoseconds] } function normalizeStats(e) { const [ctimeSeconds, ctimeNanoseconds] = SecondsNanoseconds( e.ctimeSeconds, e.ctimeNanoseconds, e.ctimeMs, e.ctime ); const [mtimeSeconds, mtimeNanoseconds] = SecondsNanoseconds( e.mtimeSeconds, e.mtimeNanoseconds, e.mtimeMs, e.mtime ); return { ctimeSeconds: ctimeSeconds % MAX_UINT32, ctimeNanoseconds: ctimeNanoseconds % MAX_UINT32, mtimeSeconds: mtimeSeconds % MAX_UINT32, mtimeNanoseconds: mtimeNanoseconds % MAX_UINT32, dev: e.dev % MAX_UINT32, ino: e.ino % MAX_UINT32, mode: normalizeMode(e.mode % MAX_UINT32), uid: e.uid % MAX_UINT32, gid: e.gid % MAX_UINT32, // size of -1 happens over a BrowserFS HTTP Backend that doesn't serve Content-Length headers // (like the Karma webserver) because BrowserFS HTTP Backend uses HTTP HEAD requests to do fs.stat size: e.size > -1 ? e.size % MAX_UINT32 : 0, } } function toHex(buffer) { let hex = ''; for (const byte of new Uint8Array(buffer)) { if (byte < 16) hex += '0'; hex += byte.toString(16); } return hex } /* eslint-env node, browser */ let supportsSubtleSHA1 = null; async function shasum(buffer) { if (supportsSubtleSHA1 === null) { supportsSubtleSHA1 = await testSubtleSHA1(); } return supportsSubtleSHA1 ? subtleSHA1(buffer) : shasumSync(buffer) } // This is modeled after @dominictarr's "shasum" module, // but without the 'json-stable-stringify' dependency and // extra type-casting features. function shasumSync(buffer) { return new Hash().update(buffer).digest('hex') } async function subtleSHA1(buffer) { const hash = await crypto.subtle.digest('SHA-1', buffer); return toHex(hash) } async function testSubtleSHA1() { // I'm using a rather crude method of progressive enhancement, because // some browsers that have crypto.subtle.digest don't actually implement SHA-1. try { const hash = await subtleSHA1(new Uint8Array([])); if (hash === 'da39a3ee5e6b4b0d3255bfef95601890afd80709') return true } catch (_) { // no bother } return false } // Extract 1-bit assume-valid, 1-bit extended flag, 2-bit merge state flag, 12-bit path length flag function parseCacheEntryFlags(bits) { return { assumeValid: Boolean(bits & 0b1000000000000000), extended: Boolean(bits & 0b0100000000000000), stage: (bits & 0b0011000000000000) >> 12, nameLength: bits & 0b0000111111111111, } } function renderCacheEntryFlags(entry) { const flags = entry.flags; // 1-bit extended flag (must be zero in version 2) flags.extended = false; // 12-bit name length if the length is less than 0xFFF; otherwise 0xFFF // is stored in this field. flags.nameLength = Math.min(Buffer.from(entry.path).length, 0xfff); return ( (flags.assumeValid ? 0b1000000000000000 : 0) + (flags.extended ? 0b0100000000000000 : 0) + ((flags.stage & 0b11) << 12) + (flags.nameLength & 0b111111111111) ) } class GitIndex { /*:: _entries: Map _dirty: boolean // Used to determine if index needs to be saved to filesystem */ constructor(entries) { this._dirty = false; this._entries = entries || new Map(); } static async from(buffer) { if (Buffer.isBuffer(buffer)) { return GitIndex.fromBuffer(buffer) } else if (buffer === null) { return new GitIndex(null) } else { throw new InternalError('invalid type passed to GitIndex.from') } } static async fromBuffer(buffer) { // Verify shasum const shaComputed = await shasum(buffer.slice(0, -20)); const shaClaimed = buffer.slice(-20).toString('hex'); if (shaClaimed !== shaComputed) { throw new InternalError( `Invalid checksum in GitIndex buffer: expected ${shaClaimed} but saw ${shaComputed}` ) } const reader = new BufferCursor(buffer); const _entries = new Map(); const magic = reader.toString('utf8', 4); if (magic !== 'DIRC') { throw new InternalError(`Inavlid dircache magic file number: ${magic}`) } const version = reader.readUInt32BE(); if (version !== 2) { throw new InternalError(`Unsupported dircache version: ${version}`) } const numEntries = reader.readUInt32BE(); let i = 0; while (!reader.eof() && i < numEntries) { const entry = {}; entry.ctimeSeconds = reader.readUInt32BE(); entry.ctimeNanoseconds = reader.readUInt32BE(); entry.mtimeSeconds = reader.readUInt32BE(); entry.mtimeNanoseconds = reader.readUInt32BE(); entry.dev = reader.readUInt32BE(); entry.ino = reader.readUInt32BE(); entry.mode = reader.readUInt32BE(); entry.uid = reader.readUInt32BE(); entry.gid = reader.readUInt32BE(); entry.size = reader.readUInt32BE(); entry.oid = reader.slice(20).toString('hex'); const flags = reader.readUInt16BE(); entry.flags = parseCacheEntryFlags(flags); // TODO: handle if (version === 3 && entry.flags.extended) const pathlength = buffer.indexOf(0, reader.tell() + 1) - reader.tell(); if (pathlength < 1) { throw new InternalError(`Got a path length of: ${pathlength}`) } // TODO: handle pathnames larger than 12 bits entry.path = reader.toString('utf8', pathlength); // Prevent malicious paths like "..\foo" if (entry.path.includes('..\\') || entry.path.includes('../')) { throw new UnsafeFilepathError(entry.path) } // The next bit is awkward. We expect 1 to 8 null characters // such that the total size of the entry is a multiple of 8 bits. // (Hence subtract 12 bytes for the header.) let padding = 8 - ((reader.tell() - 12) % 8); if (padding === 0) padding = 8; while (padding--) { const tmp = reader.readUInt8(); if (tmp !== 0) { throw new InternalError( `Expected 1-8 null characters but got '${tmp}' after ${entry.path}` ) } else if (reader.eof()) { throw new InternalError('Unexpected end of file') } } // end of awkward part _entries.set(entry.path, entry); i++; } return new GitIndex(_entries) } get entries() { return [...this._entries.values()].sort(comparePath) } get entriesMap() { return this._entries } *[Symbol.iterator]() { for (const entry of this.entries) { yield entry; } } insert({ filepath, stats, oid }) { stats = normalizeStats(stats); const bfilepath = Buffer.from(filepath); const entry = { ctimeSeconds: stats.ctimeSeconds, ctimeNanoseconds: stats.ctimeNanoseconds, mtimeSeconds: stats.mtimeSeconds, mtimeNanoseconds: stats.mtimeNanoseconds, dev: stats.dev, ino: stats.ino, // We provide a fallback value for `mode` here because not all fs // implementations assign it, but we use it in GitTree. // '100644' is for a "regular non-executable file" mode: stats.mode || 0o100644, uid: stats.uid, gid: stats.gid, size: stats.size, path: filepath, oid: oid, flags: { assumeValid: false, extended: false, stage: 0, nameLength: bfilepath.length < 0xfff ? bfilepath.length : 0xfff, }, }; this._entries.set(entry.path, entry); this._dirty = true; } delete({ filepath }) { if (this._entries.has(filepath)) { this._entries.delete(filepath); } else { for (const key of this._entries.keys()) { if (key.startsWith(filepath + '/')) { this._entries.delete(key); } } } this._dirty = true; } clear() { this._entries.clear(); this._dirty = true; } has({ filepath }) { return this._entries.has(filepath) } render() { return this.entries .map(entry => `${entry.mode.toString(8)} ${entry.oid} ${entry.path}`) .join('\n') } async toObject() { const header = Buffer.alloc(12); const writer = new BufferCursor(header); writer.write('DIRC', 4, 'utf8'); writer.writeUInt32BE(2); writer.writeUInt32BE(this.entries.length); const body = Buffer.concat( this.entries.map(entry => { const bpath = Buffer.from(entry.path); // the fixed length + the filename + at least one null char => align by 8 const length = Math.ceil((62 + bpath.length + 1) / 8) * 8; const written = Buffer.alloc(length); const writer = new BufferCursor(written); const stat = normalizeStats(entry); writer.writeUInt32BE(stat.ctimeSeconds); writer.writeUInt32BE(stat.ctimeNanoseconds); writer.writeUInt32BE(stat.mtimeSeconds); writer.writeUInt32BE(stat.mtimeNanoseconds); writer.writeUInt32BE(stat.dev); writer.writeUInt32BE(stat.ino); writer.writeUInt32BE(stat.mode); writer.writeUInt32BE(stat.uid); writer.writeUInt32BE(stat.gid); writer.writeUInt32BE(stat.size); writer.write(entry.oid, 20, 'hex'); writer.writeUInt16BE(renderCacheEntryFlags(entry)); writer.write(entry.path, bpath.length, 'utf8'); return written }) ); const main = Buffer.concat([header, body]); const sum = await shasum(main); return Buffer.concat([main, Buffer.from(sum, 'hex')]) } } function compareStats(entry, stats) { // Comparison based on the description in Paragraph 4 of // https://www.kernel.org/pub/software/scm/git/docs/technical/racy-git.txt const e = normalizeStats(entry); const s = normalizeStats(stats); const staleness = e.mode !== s.mode || e.mtimeSeconds !== s.mtimeSeconds || e.ctimeSeconds !== s.ctimeSeconds || e.uid !== s.uid || e.gid !== s.gid || e.ino !== s.ino || e.size !== s.size; return staleness } // import LockManager from 'travix-lock-manager' // import Lock from '../utils.js' // const lm = new LockManager() let lock = null; const IndexCache = Symbol('IndexCache'); function createCache() { return { map: new Map(), stats: new Map(), } } async function updateCachedIndexFile(fs, filepath, cache) { const stat = await fs.lstat(filepath); const rawIndexFile = await fs.read(filepath); const index = await GitIndex.from(rawIndexFile); // cache the GitIndex object so we don't need to re-read it every time. cache.map.set(filepath, index); // Save the stat data for the index so we know whether the cached file is stale (modified by an outside process). cache.stats.set(filepath, stat); } // Determine whether our copy of the index file is stale async function isIndexStale(fs, filepath, cache) { const savedStats = cache.stats.get(filepath); if (savedStats === undefined) return true const currStats = await fs.lstat(filepath); if (savedStats === null) return false if (currStats === null) return false return compareStats(savedStats, currStats) } class GitIndexManager { /** * * @param {object} opts * @param {import('../models/FileSystem.js').FileSystem} opts.fs * @param {string} opts.gitdir * @param {object} opts.cache * @param {function(GitIndex): any} closure */ static async acquire({ fs, gitdir, cache }, closure) { if (!cache[IndexCache]) cache[IndexCache] = createCache(); const filepath = `${gitdir}/index`; if (lock === null) lock = new AsyncLock({ maxPending: Infinity }); let result; await lock.acquire(filepath, async () => { // Acquire a file lock while we're reading the index // to make sure other processes aren't writing to it // simultaneously, which could result in a corrupted index. // const fileLock = await Lock(filepath) if (await isIndexStale(fs, filepath, cache[IndexCache])) { await updateCachedIndexFile(fs, filepath, cache[IndexCache]); } const index = cache[IndexCache].map.get(filepath); result = await closure(index); if (index._dirty) { // Acquire a file lock while we're writing the index file // let fileLock = await Lock(filepath) const buffer = await index.toObject(); await fs.write(filepath, buffer); // Update cached stat value cache[IndexCache].stats.set(filepath, await fs.lstat(filepath)); index._dirty = false; } }); return result } } function basename(path) { const last = Math.max(path.lastIndexOf('/'), path.lastIndexOf('\\')); if (last > -1) { path = path.slice(last + 1); } return path } function dirname(path) { const last = Math.max(path.lastIndexOf('/'), path.lastIndexOf('\\')); if (last === -1) return '.' if (last === 0) return '/' return path.slice(0, last) } /*:: type Node = { type: string, fullpath: string, basename: string, metadata: Object, // mode, oid parent?: Node, children: Array } */ function flatFileListToDirectoryStructure(files) { const inodes = new Map(); const mkdir = function(name) { if (!inodes.has(name)) { const dir = { type: 'tree', fullpath: name, basename: basename(name), metadata: {}, children: [], }; inodes.set(name, dir); // This recursively generates any missing parent folders. // We do it after we've added the inode to the set so that // we don't recurse infinitely trying to create the root '.' dirname. dir.parent = mkdir(dirname(name)); if (dir.parent && dir.parent !== dir) dir.parent.children.push(dir); } return inodes.get(name) }; const mkfile = function(name, metadata) { if (!inodes.has(name)) { const file = { type: 'blob', fullpath: name, basename: basename(name), metadata: metadata, // This recursively generates any missing parent folders. parent: mkdir(dirname(name)), children: [], }; if (file.parent) file.parent.children.push(file); inodes.set(name, file); } return inodes.get(name) }; mkdir('.'); for (const file of files) { mkfile(file.path, file); } return inodes } /** * * @param {number} mode */ function mode2type(mode) { // prettier-ignore switch (mode) { case 0o040000: return 'tree' case 0o100644: return 'blob' case 0o100755: return 'blob' case 0o120000: return 'blob' case 0o160000: return 'commit' } throw new InternalError(`Unexpected GitTree entry mode: ${mode.toString(8)}`) } class GitWalkerIndex { constructor({ fs, gitdir, cache }) { this.treePromise = GitIndexManager.acquire( { fs, gitdir, cache }, async function(index) { return flatFileListToDirectoryStructure(index.entries) } ); const walker = this; this.ConstructEntry = class StageEntry { constructor(fullpath) { this._fullpath = fullpath; this._type = false; this._mode = false; this._stat = false; this._oid = false; } async type() { return walker.type(this) } async mode() { return walker.mode(this) } async stat() { return walker.stat(this) } async content() { return walker.content(this) } async oid() { return walker.oid(this) } }; } async readdir(entry) { const filepath = entry._fullpath; const tree = await this.treePromise; const inode = tree.get(filepath); if (!inode) return null if (inode.type === 'blob') return null if (inode.type !== 'tree') { throw new Error(`ENOTDIR: not a directory, scandir '${filepath}'`) } const names = inode.children.map(inode => inode.fullpath); names.sort(compareStrings); return names } async type(entry) { if (entry._type === false) { await entry.stat(); } return entry._type } async mode(entry) { if (entry._mode === false) { await entry.stat(); } return entry._mode } async stat(entry) { if (entry._stat === false) { const tree = await this.treePromise; const inode = tree.get(entry._fullpath); if (!inode) { throw new Error( `ENOENT: no such file or directory, lstat '${entry._fullpath}'` ) } const stats = inode.type === 'tree' ? {} : normalizeStats(inode.metadata); entry._type = inode.type === 'tree' ? 'tree' : mode2type(stats.mode); entry._mode = stats.mode; if (inode.type === 'tree') { entry._stat = undefined; } else { entry._stat = stats; } } return entry._stat } async content(_entry) { // Cannot get content for an index entry } async oid(entry) { if (entry._oid === false) { const tree = await this.treePromise; const inode = tree.get(entry._fullpath); entry._oid = inode.metadata.oid; } return entry._oid } } // This is part of an elaborate system to facilitate code-splitting / tree-shaking. // commands/walk.js can depend on only this, and the actual Walker classes exported // can be opaque - only having a single property (this symbol) that is not enumerable, // and thus the constructor can be passed as an argument to walk while being "unusable" // outside of it. const GitWalkSymbol = Symbol('GitWalkSymbol'); // @ts-check /** * @returns {Walker} */ function STAGE() { const o = Object.create(null); Object.defineProperty(o, GitWalkSymbol, { value: function({ fs, gitdir, cache }) { return new GitWalkerIndex({ fs, gitdir, cache }) }, }); Object.freeze(o); return o } // @ts-check class NotFoundError extends BaseError { /** * @param {string} what */ constructor(what) { super(`Could not find ${what}.`); this.code = this.name = NotFoundError.code; this.data = { what }; } } /** @type {'NotFoundError'} */ NotFoundError.code = 'NotFoundError'; class ObjectTypeError extends BaseError { /** * @param {string} oid * @param {'blob'|'commit'|'tag'|'tree'} actual * @param {'blob'|'commit'|'tag'|'tree'} expected * @param {string} [filepath] */ constructor(oid, actual, expected, filepath) { super( `Object ${oid} ${ filepath ? `at ${filepath}` : '' }was anticipated to be a ${expected} but it is a ${actual}.` ); this.code = this.name = ObjectTypeError.code; this.data = { oid, actual, expected, filepath }; } } /** @type {'ObjectTypeError'} */ ObjectTypeError.code = 'ObjectTypeError'; class InvalidOidError extends BaseError { /** * @param {string} value */ constructor(value) { super(`Expected a 40-char hex object id but saw "${value}".`); this.code = this.name = InvalidOidError.code; this.data = { value }; } } /** @type {'InvalidOidError'} */ InvalidOidError.code = 'InvalidOidError'; class NoRefspecError extends BaseError { /** * @param {string} remote */ constructor(remote) { super(`Could not find a fetch refspec for remote "${remote}". Make sure the config file has an entry like the following: [remote "${remote}"] \tfetch = +refs/heads/*:refs/remotes/origin/* `); this.code = this.name = NoRefspecError.code; this.data = { remote }; } } /** @type {'NoRefspecError'} */ NoRefspecError.code = 'NoRefspecError'; class GitPackedRefs { constructor(text) { this.refs = new Map(); this.parsedConfig = []; if (text) { let key = null; this.parsedConfig = text .trim() .split('\n') .map(line => { if (/^\s*#/.test(line)) { return { line, comment: true } } const i = line.indexOf(' '); if (line.startsWith('^')) { // This is a oid for the commit associated with the annotated tag immediately preceding this line. // Trim off the '^' const value = line.slice(1); // The tagname^{} syntax is based on the output of `git show-ref --tags -d` this.refs.set(key + '^{}', value); return { line, ref: key, peeled: value } } else { // This is an oid followed by the ref name const value = line.slice(0, i); key = line.slice(i + 1); this.refs.set(key, value); return { line, ref: key, oid: value } } }); } return this } static from(text) { return new GitPackedRefs(text) } delete(ref) { this.parsedConfig = this.parsedConfig.filter(entry => entry.ref !== ref); this.refs.delete(ref); } toString() { return this.parsedConfig.map(({ line }) => line).join('\n') + '\n' } } class GitRefSpec { constructor({ remotePath, localPath, force, matchPrefix }) { Object.assign(this, { remotePath, localPath, force, matchPrefix, }); } static from(refspec) { const [ forceMatch, remotePath, remoteGlobMatch, localPath, localGlobMatch, ] = refspec.match(/^(\+?)(.*?)(\*?):(.*?)(\*?)$/).slice(1); const force = forceMatch === '+'; const remoteIsGlob = remoteGlobMatch === '*'; const localIsGlob = localGlobMatch === '*'; // validate // TODO: Make this check more nuanced, and depend on whether this is a fetch refspec or a push refspec if (remoteIsGlob !== localIsGlob) { throw new InternalError('Invalid refspec') } return new GitRefSpec({ remotePath, localPath, force, matchPrefix: remoteIsGlob, }) // TODO: We need to run resolveRef on both paths to expand them to their full name. } translate(remoteBranch) { if (this.matchPrefix) { if (remoteBranch.startsWith(this.remotePath)) { return this.localPath + remoteBranch.replace(this.remotePath, '') } } else { if (remoteBranch === this.remotePath) return this.localPath } return null } reverseTranslate(localBranch) { if (this.matchPrefix) { if (localBranch.startsWith(this.localPath)) { return this.remotePath + localBranch.replace(this.localPath, '') } } else { if (localBranch === this.localPath) return this.remotePath } return null } } class GitRefSpecSet { constructor(rules = []) { this.rules = rules; } static from(refspecs) { const rules = []; for (const refspec of refspecs) { rules.push(GitRefSpec.from(refspec)); // might throw } return new GitRefSpecSet(rules) } add(refspec) { const rule = GitRefSpec.from(refspec); // might throw this.rules.push(rule); } translate(remoteRefs) { const result = []; for (const rule of this.rules) { for (const remoteRef of remoteRefs) { const localRef = rule.translate(remoteRef); if (localRef) { result.push([remoteRef, localRef]); } } } return result } translateOne(remoteRef) { let result = null; for (const rule of this.rules) { const localRef = rule.translate(remoteRef); if (localRef) { result = localRef; } } return result } localNamespaces() { return this.rules .filter(rule => rule.matchPrefix) .map(rule => rule.localPath.replace(/\/$/, '')) } } function compareRefNames(a, b) { // https://stackoverflow.com/a/40355107/2168416 const _a = a.replace(/\^\{\}$/, ''); const _b = b.replace(/\^\{\}$/, ''); const tmp = -(_a < _b) || +(_a > _b); if (tmp === 0) { return a.endsWith('^{}') ? 1 : -1 } return tmp } function normalizePath(path) { return path .replace(/\/\.\//g, '/') // Replace '/./' with '/' .replace(/\/{2,}/g, '/') // Replace consecutive '/' .replace(/^\/\.$/, '/') // if path === '/.' return '/' .replace(/^\.\/$/, '.') // if path === './' return '.' .replace(/^\.\//, '') // Remove leading './' .replace(/\/\.$/, '') // Remove trailing '/.' .replace(/(.+)\/$/, '$1') // Remove trailing '/' .replace(/^$/, '.') // if path === '' return '.' } // For some reason path.posix.join is undefined in webpack function join(...parts) { return normalizePath(parts.map(normalizePath).join('/')) } // This is straight from parse_unit_factor in config.c of canonical git const num = val => { val = val.toLowerCase(); let n = parseInt(val); if (val.endsWith('k')) n *= 1024; if (val.endsWith('m')) n *= 1024 * 1024; if (val.endsWith('g')) n *= 1024 * 1024 * 1024; return n }; // This is straight from git_parse_maybe_bool_text in config.c of canonical git const bool = val => { val = val.trim().toLowerCase(); if (val === 'true' || val === 'yes' || val === 'on') return true if (val === 'false' || val === 'no' || val === 'off') return false throw Error( `Expected 'true', 'false', 'yes', 'no', 'on', or 'off', but got ${val}` ) }; const schema = { core: { filemode: bool, bare: bool, logallrefupdates: bool, symlinks: bool, ignorecase: bool, bigFileThreshold: num, }, }; // https://git-scm.com/docs/git-config#_syntax // section starts with [ and ends with ] // section is alphanumeric (ASCII) with - and . // section is case insensitive // subsection is optionnal // subsection is specified after section and one or more spaces // subsection is specified between double quotes const SECTION_LINE_REGEX = /^\[([A-Za-z0-9-.]+)(?: "(.*)")?\]$/; const SECTION_REGEX = /^[A-Za-z0-9-.]+$/; // variable lines contain a name, and equal sign and then a value // variable lines can also only contain a name (the implicit value is a boolean true) // variable name is alphanumeric (ASCII) with - // variable name starts with an alphabetic character // variable name is case insensitive const VARIABLE_LINE_REGEX = /^([A-Za-z][A-Za-z-]*)(?: *= *(.*))?$/; const VARIABLE_NAME_REGEX = /^[A-Za-z][A-Za-z-]*$/; const VARIABLE_VALUE_COMMENT_REGEX = /^(.*?)( *[#;].*)$/; const extractSectionLine = line => { const matches = SECTION_LINE_REGEX.exec(line); if (matches != null) { const [section, subsection] = matches.slice(1); return [section, subsection] } return null }; const extractVariableLine = line => { const matches = VARIABLE_LINE_REGEX.exec(line); if (matches != null) { const [name, rawValue = 'true'] = matches.slice(1); const valueWithoutComments = removeComments(rawValue); const valueWithoutQuotes = removeQuotes(valueWithoutComments); return [name, valueWithoutQuotes] } return null }; const removeComments = rawValue => { const commentMatches = VARIABLE_VALUE_COMMENT_REGEX.exec(rawValue); if (commentMatches == null) { return rawValue } const [valueWithoutComment, comment] = commentMatches.slice(1); // if odd number of quotes before and after comment => comment is escaped if ( hasOddNumberOfQuotes(valueWithoutComment) && hasOddNumberOfQuotes(comment) ) { return `${valueWithoutComment}${comment}` } return valueWithoutComment }; const hasOddNumberOfQuotes = text => { const numberOfQuotes = (text.match(/(?:^|[^\\])"/g) || []).length; return numberOfQuotes % 2 !== 0 }; const removeQuotes = text => { return text.split('').reduce((newText, c, idx, text) => { const isQuote = c === '"' && text[idx - 1] !== '\\'; const isEscapeForQuote = c === '\\' && text[idx + 1] === '"'; if (isQuote || isEscapeForQuote) { return newText } return newText + c }, '') }; const lower = text => { return text != null ? text.toLowerCase() : null }; const getPath = (section, subsection, name) => { return [lower(section), subsection, lower(name)] .filter(a => a != null) .join('.') }; const findLastIndex = (array, callback) => { return array.reduce((lastIndex, item, index) => { return callback(item) ? index : lastIndex }, -1) }; // Note: there are a LOT of edge cases that aren't covered (e.g. keys in sections that also // have subsections, [include] directives, etc. class GitConfig { constructor(text) { let section = null; let subsection = null; this.parsedConfig = text.split('\n').map(line => { let name = null; let value = null; const trimmedLine = line.trim(); const extractedSection = extractSectionLine(trimmedLine); const isSection = extractedSection != null; if (isSection) { ;[section, subsection] = extractedSection; } else { const extractedVariable = extractVariableLine(trimmedLine); const isVariable = extractedVariable != null; if (isVariable) { ;[name, value] = extractedVariable; } } const path = getPath(section, subsection, name); return { line, isSection, section, subsection, name, value, path } }); } static from(text) { return new GitConfig(text) } async get(path, getall = false) { const allValues = this.parsedConfig .filter(config => config.path === path.toLowerCase()) .map(({ section, name, value }) => { const fn = schema[section] && schema[section][name]; return fn ? fn(value) : value }); return getall ? allValues : allValues.pop() } async getall(path) { return this.get(path, true) } async getSubsections(section) { return this.parsedConfig .filter(config => config.section === section && config.isSection) .map(config => config.subsection) } async deleteSection(section, subsection) { this.parsedConfig = this.parsedConfig.filter( config => !(config.section === section && config.subsection === subsection) ); } async append(path, value) { return this.set(path, value, true) } async set(path, value, append = false) { const configIndex = findLastIndex( this.parsedConfig, config => config.path === path.toLowerCase() ); if (value == null) { if (configIndex !== -1) { this.parsedConfig.splice(configIndex, 1); } } else { if (configIndex !== -1) { const config = this.parsedConfig[configIndex]; const modifiedConfig = Object.assign({}, config, { value, modified: true, }); if (append) { this.parsedConfig.splice(configIndex + 1, 0, modifiedConfig); } else { this.parsedConfig[configIndex] = modifiedConfig; } } else { const pathSegments = path.split('.'); const section = pathSegments.shift().toLowerCase(); const name = pathSegments.pop(); const subsection = pathSegments.length ? pathSegments.join('.').toLowerCase() : undefined; const sectionPath = subsection ? section + '.' + subsection : section; const sectionIndex = this.parsedConfig.findIndex( config => config.path === sectionPath ); const newConfig = { section, subsection, name, value, modified: true, path: getPath(section, subsection, name), }; if (SECTION_REGEX.test(section) && VARIABLE_NAME_REGEX.test(name)) { if (sectionIndex >= 0) { // Reuse existing section this.parsedConfig.splice(sectionIndex + 1, 0, newConfig); } else { // Add a new section const newSection = { section, subsection, modified: true, path: getPath(section, subsection, null), }; this.parsedConfig.push(newSection, newConfig); } } } } } toString() { return this.parsedConfig .map(({ line, section, subsection, name, value, modified = false }) => { if (!modified) { return line } if (name != null && value != null) { return `\t${name} = ${value}` } if (subsection != null) { return `[${section} "${subsection}"]` } return `[${section}]` }) .join('\n') } } class GitConfigManager { static async get({ fs, gitdir }) { // We can improve efficiency later if needed. // TODO: read from full list of git config files const text = await fs.read(`${gitdir}/config`, { encoding: 'utf8' }); return GitConfig.from(text) } static async save({ fs, gitdir, config }) { // We can improve efficiency later if needed. // TODO: handle saving to the correct global/user/repo location await fs.write(`${gitdir}/config`, config.toString(), { encoding: 'utf8', }); } } // This is a convenience wrapper for reading and writing files in the 'refs' directory. // @see https://git-scm.com/docs/git-rev-parse.html#_specifying_revisions const refpaths = ref => [ `${ref}`, `refs/${ref}`, `refs/tags/${ref}`, `refs/heads/${ref}`, `refs/remotes/${ref}`, `refs/remotes/${ref}/HEAD`, ]; // @see https://git-scm.com/docs/gitrepository-layout const GIT_FILES = ['config', 'description', 'index', 'shallow', 'commondir']; class GitRefManager { static async updateRemoteRefs({ fs, gitdir, remote, refs, symrefs, tags, refspecs = undefined, prune = false, pruneTags = false, }) { // Validate input for (const value of refs.values()) { if (!value.match(/[0-9a-f]{40}/)) { throw new InvalidOidError(value) } } const config = await GitConfigManager.get({ fs, gitdir }); if (!refspecs) { refspecs = await config.getall(`remote.${remote}.fetch`); if (refspecs.length === 0) { throw new NoRefspecError(remote) } // There's some interesting behavior with HEAD that doesn't follow the refspec. refspecs.unshift(`+HEAD:refs/remotes/${remote}/HEAD`); } const refspec = GitRefSpecSet.from(refspecs); const actualRefsToWrite = new Map(); // Delete all current tags if the pruneTags argument is true. if (pruneTags) { const tags = await GitRefManager.listRefs({ fs, gitdir, filepath: 'refs/tags', }); await GitRefManager.deleteRefs({ fs, gitdir, refs: tags.map(tag => `refs/tags/${tag}`), }); } // Add all tags if the fetch tags argument is true. if (tags) { for (const serverRef of refs.keys()) { if (serverRef.startsWith('refs/tags') && !serverRef.endsWith('^{}')) { // Git's behavior is to only fetch tags that do not conflict with tags already present. if (!(await GitRefManager.exists({ fs, gitdir, ref: serverRef }))) { // Always use the object id of the tag itself, and not the peeled object id. const oid = refs.get(serverRef); actualRefsToWrite.set(serverRef, oid); } } } } // Combine refs and symrefs giving symrefs priority const refTranslations = refspec.translate([...refs.keys()]); for (const [serverRef, translatedRef] of refTranslations) { const value = refs.get(serverRef); actualRefsToWrite.set(translatedRef, value); } const symrefTranslations = refspec.translate([...symrefs.keys()]); for (const [serverRef, translatedRef] of symrefTranslations) { const value = symrefs.get(serverRef); const symtarget = refspec.translateOne(value); if (symtarget) { actualRefsToWrite.set(translatedRef, `ref: ${symtarget}`); } } // If `prune` argument is true, clear out the existing local refspec roots const pruned = []; if (prune) { for (const filepath of refspec.localNamespaces()) { const refs = ( await GitRefManager.listRefs({ fs, gitdir, filepath, }) ).map(file => `${filepath}/${file}`); for (const ref of refs) { if (!actualRefsToWrite.has(ref)) { pruned.push(ref); } } } if (pruned.length > 0) { await GitRefManager.deleteRefs({ fs, gitdir, refs: pruned }); } } // Update files // TODO: For large repos with a history of thousands of pull requests // (i.e. gitlab-ce) it would be vastly more efficient to write them // to .git/packed-refs. // The trick is to make sure we a) don't write a packed ref that is // already shadowed by a loose ref and b) don't loose any refs already // in packed-refs. Doing this efficiently may be difficult. A // solution that might work is // a) load the current packed-refs file // b) add actualRefsToWrite, overriding the existing values if present // c) enumerate all the loose refs currently in .git/refs/remotes/${remote} // d) overwrite their value with the new value. // Examples of refs we need to avoid writing in loose format for efficieny's sake // are .git/refs/remotes/origin/refs/remotes/remote_mirror_3059 // and .git/refs/remotes/origin/refs/merge-requests for (const [key, value] of actualRefsToWrite) { await fs.write(join(gitdir, key), `${value.trim()}\n`, 'utf8'); } return { pruned } } // TODO: make this less crude? static async writeRef({ fs, gitdir, ref, value }) { // Validate input if (!value.match(/[0-9a-f]{40}/)) { throw new InvalidOidError(value) } await fs.write(join(gitdir, ref), `${value.trim()}\n`, 'utf8'); } static async writeSymbolicRef({ fs, gitdir, ref, value }) { await fs.write(join(gitdir, ref), 'ref: ' + `${value.trim()}\n`, 'utf8'); } static async deleteRef({ fs, gitdir, ref }) { return GitRefManager.deleteRefs({ fs, gitdir, refs: [ref] }) } static async deleteRefs({ fs, gitdir, refs }) { // Delete regular ref await Promise.all(refs.map(ref => fs.rm(join(gitdir, ref)))); // Delete any packed ref let text = await fs.read(`${gitdir}/packed-refs`, { encoding: 'utf8' }); const packed = GitPackedRefs.from(text); const beforeSize = packed.refs.size; for (const ref of refs) { if (packed.refs.has(ref)) { packed.delete(ref); } } if (packed.refs.size < beforeSize) { text = packed.toString(); await fs.write(`${gitdir}/packed-refs`, text, { encoding: 'utf8' }); } } /** * @param {object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs * @param {string} args.gitdir * @param {string} args.ref * @param {number} [args.depth] * @returns {Promise} */ static async resolve({ fs, gitdir, ref, depth = undefined }) { if (depth !== undefined) { depth--; if (depth === -1) { return ref } } let sha; // Is it a ref pointer? if (ref.startsWith('ref: ')) { ref = ref.slice('ref: '.length); return GitRefManager.resolve({ fs, gitdir, ref, depth }) } // Is it a complete and valid SHA? if (ref.length === 40 && /[0-9a-f]{40}/.test(ref)) { return ref } // We need to alternate between the file system and the packed-refs const packedMap = await GitRefManager.packedRefs({ fs, gitdir }); // Look in all the proper paths, in this order const allpaths = refpaths(ref).filter(p => !GIT_FILES.includes(p)); // exclude git system files (#709) for (const ref of allpaths) { sha = (await fs.read(`${gitdir}/${ref}`, { encoding: 'utf8' })) || packedMap.get(ref); if (sha) { return GitRefManager.resolve({ fs, gitdir, ref: sha.trim(), depth }) } } // Do we give up? throw new NotFoundError(ref) } static async exists({ fs, gitdir, ref }) { try { await GitRefManager.expand({ fs, gitdir, ref }); return true } catch (err) { return false } } static async expand({ fs, gitdir, ref }) { // Is it a complete and valid SHA? if (ref.length === 40 && /[0-9a-f]{40}/.test(ref)) { return ref } // We need to alternate between the file system and the packed-refs const packedMap = await GitRefManager.packedRefs({ fs, gitdir }); // Look in all the proper paths, in this order const allpaths = refpaths(ref); for (const ref of allpaths) { if (await fs.exists(`${gitdir}/${ref}`)) return ref if (packedMap.has(ref)) return ref } // Do we give up? throw new NotFoundError(ref) } static async expandAgainstMap({ ref, map }) { // Look in all the proper paths, in this order const allpaths = refpaths(ref); for (const ref of allpaths) { if (await map.has(ref)) return ref } // Do we give up? throw new NotFoundError(ref) } static resolveAgainstMap({ ref, fullref = ref, depth = undefined, map }) { if (depth !== undefined) { depth--; if (depth === -1) { return { fullref, oid: ref } } } // Is it a ref pointer? if (ref.startsWith('ref: ')) { ref = ref.slice('ref: '.length); return GitRefManager.resolveAgainstMap({ ref, fullref, depth, map }) } // Is it a complete and valid SHA? if (ref.length === 40 && /[0-9a-f]{40}/.test(ref)) { return { fullref, oid: ref } } // Look in all the proper paths, in this order const allpaths = refpaths(ref); for (const ref of allpaths) { const sha = map.get(ref); if (sha) { return GitRefManager.resolveAgainstMap({ ref: sha.trim(), fullref: ref, depth, map, }) } } // Do we give up? throw new NotFoundError(ref) } static async packedRefs({ fs, gitdir }) { const text = await fs.read(`${gitdir}/packed-refs`, { encoding: 'utf8' }); const packed = GitPackedRefs.from(text); return packed.refs } // List all the refs that match the `filepath` prefix static async listRefs({ fs, gitdir, filepath }) { const packedMap = GitRefManager.packedRefs({ fs, gitdir }); let files = null; try { files = await fs.readdirDeep(`${gitdir}/${filepath}`); files = files.map(x => x.replace(`${gitdir}/${filepath}/`, '')); } catch (err) { files = []; } for (let key of (await packedMap).keys()) { // filter by prefix if (key.startsWith(filepath)) { // remove prefix key = key.replace(filepath + '/', ''); // Don't include duplicates; the loose files have precedence anyway if (!files.includes(key)) { files.push(key); } } } // since we just appended things onto an array, we need to sort them now files.sort(compareRefNames); return files } static async listBranches({ fs, gitdir, remote }) { if (remote) { return GitRefManager.listRefs({ fs, gitdir, filepath: `refs/remotes/${remote}`, }) } else { return GitRefManager.listRefs({ fs, gitdir, filepath: `refs/heads` }) } } static async listTags({ fs, gitdir }) { const tags = await GitRefManager.listRefs({ fs, gitdir, filepath: `refs/tags`, }); return tags.filter(x => !x.endsWith('^{}')) } } function compareTreeEntryPath(a, b) { // Git sorts tree entries as if there is a trailing slash on directory names. return compareStrings(appendSlashIfDir(a), appendSlashIfDir(b)) } function appendSlashIfDir(entry) { return entry.mode === '040000' ? entry.path + '/' : entry.path } /** * * @typedef {Object} TreeEntry * @property {string} mode - the 6 digit hexadecimal mode * @property {string} path - the name of the file or directory * @property {string} oid - the SHA-1 object id of the blob or tree * @property {'commit'|'blob'|'tree'} type - the type of object */ function mode2type$1(mode) { // prettier-ignore switch (mode) { case '040000': return 'tree' case '100644': return 'blob' case '100755': return 'blob' case '120000': return 'blob' case '160000': return 'commit' } throw new InternalError(`Unexpected GitTree entry mode: ${mode}`) } function parseBuffer(buffer) { const _entries = []; let cursor = 0; while (cursor < buffer.length) { const space = buffer.indexOf(32, cursor); if (space === -1) { throw new InternalError( `GitTree: Error parsing buffer at byte location ${cursor}: Could not find the next space character.` ) } const nullchar = buffer.indexOf(0, cursor); if (nullchar === -1) { throw new InternalError( `GitTree: Error parsing buffer at byte location ${cursor}: Could not find the next null character.` ) } let mode = buffer.slice(cursor, space).toString('utf8'); if (mode === '40000') mode = '040000'; // makes it line up neater in printed output const type = mode2type$1(mode); const path = buffer.slice(space + 1, nullchar).toString('utf8'); // Prevent malicious git repos from writing to "..\foo" on clone etc if (path.includes('\\') || path.includes('/')) { throw new UnsafeFilepathError(path) } const oid = buffer.slice(nullchar + 1, nullchar + 21).toString('hex'); cursor = nullchar + 21; _entries.push({ mode, path, oid, type }); } return _entries } function limitModeToAllowed(mode) { if (typeof mode === 'number') { mode = mode.toString(8); } // tree if (mode.match(/^0?4.*/)) return '040000' // Directory if (mode.match(/^1006.*/)) return '100644' // Regular non-executable file if (mode.match(/^1007.*/)) return '100755' // Regular executable file if (mode.match(/^120.*/)) return '120000' // Symbolic link if (mode.match(/^160.*/)) return '160000' // Commit (git submodule reference) throw new InternalError(`Could not understand file mode: ${mode}`) } function nudgeIntoShape(entry) { if (!entry.oid && entry.sha) { entry.oid = entry.sha; // Github } entry.mode = limitModeToAllowed(entry.mode); // index if (!entry.type) { entry.type = mode2type$1(entry.mode); // index } return entry } class GitTree { constructor(entries) { if (Buffer.isBuffer(entries)) { this._entries = parseBuffer(entries); } else if (Array.isArray(entries)) { this._entries = entries.map(nudgeIntoShape); } else { throw new InternalError('invalid type passed to GitTree constructor') } // Tree entries are not sorted alphabetically in the usual sense (see `compareTreeEntryPath`) // but it is important later on that these be sorted in the same order as they would be returned from readdir. this._entries.sort(comparePath); } static from(tree) { return new GitTree(tree) } render() { return this._entries .map(entry => `${entry.mode} ${entry.type} ${entry.oid} ${entry.path}`) .join('\n') } toObject() { // Adjust the sort order to match git's const entries = [...this._entries]; entries.sort(compareTreeEntryPath); return Buffer.concat( entries.map(entry => { const mode = Buffer.from(entry.mode.replace(/^0/, '')); const space = Buffer.from(' '); const path = Buffer.from(entry.path, 'utf8'); const nullchar = Buffer.from([0]); const oid = Buffer.from(entry.oid, 'hex'); return Buffer.concat([mode, space, path, nullchar, oid]) }) ) } /** * @returns {TreeEntry[]} */ entries() { return this._entries } *[Symbol.iterator]() { for (const entry of this._entries) { yield entry; } } } class GitObject { static wrap({ type, object }) { return Buffer.concat([ Buffer.from(`${type} ${object.byteLength.toString()}\x00`), Buffer.from(object), ]) } static unwrap(buffer) { const s = buffer.indexOf(32); // first space const i = buffer.indexOf(0); // first null value const type = buffer.slice(0, s).toString('utf8'); // get type of object const length = buffer.slice(s + 1, i).toString('utf8'); // get type of object const actualLength = buffer.length - (i + 1); // verify length if (parseInt(length) !== actualLength) { throw new InternalError( `Length mismatch: expected ${length} bytes but got ${actualLength} instead.` ) } return { type, object: Buffer.from(buffer.slice(i + 1)), } } } async function readObjectLoose({ fs, gitdir, oid }) { const source = `objects/${oid.slice(0, 2)}/${oid.slice(2)}`; const file = await fs.read(`${gitdir}/${source}`); if (!file) { return null } return { object: file, format: 'deflated', source } } /** * @param {Buffer} delta * @param {Buffer} source * @returns {Buffer} */ function applyDelta(delta, source) { const reader = new BufferCursor(delta); const sourceSize = readVarIntLE(reader); if (sourceSize !== source.byteLength) { throw new InternalError( `applyDelta expected source buffer to be ${sourceSize} bytes but the provided buffer was ${source.length} bytes` ) } const targetSize = readVarIntLE(reader); let target; const firstOp = readOp(reader, source); // Speed optimization - return raw buffer if it's just single simple copy if (firstOp.byteLength === targetSize) { target = firstOp; } else { // Otherwise, allocate a fresh buffer and slices target = Buffer.alloc(targetSize); const writer = new BufferCursor(target); writer.copy(firstOp); while (!reader.eof()) { writer.copy(readOp(reader, source)); } const tell = writer.tell(); if (targetSize !== tell) { throw new InternalError( `applyDelta expected target buffer to be ${targetSize} bytes but the resulting buffer was ${tell} bytes` ) } } return target } function readVarIntLE(reader) { let result = 0; let shift = 0; let byte = null; do { byte = reader.readUInt8(); result |= (byte & 0b01111111) << shift; shift += 7; } while (byte & 0b10000000) return result } function readCompactLE(reader, flags, size) { let result = 0; let shift = 0; while (size--) { if (flags & 0b00000001) { result |= reader.readUInt8() << shift; } flags >>= 1; shift += 8; } return result } function readOp(reader, source) { /** @type {number} */ const byte = reader.readUInt8(); const COPY = 0b10000000; const OFFS = 0b00001111; const SIZE = 0b01110000; if (byte & COPY) { // copy consists of 4 byte offset, 3 byte size (in LE order) const offset = readCompactLE(reader, byte & OFFS, 4); let size = readCompactLE(reader, (byte & SIZE) >> 4, 3); // Yup. They really did this optimization. if (size === 0) size = 0x10000; return source.slice(offset, offset + size) } else { // insert return reader.slice(byte) } } // Convert a value to an Async Iterator // This will be easier with async generator functions. function fromValue(value) { let queue = [value]; return { next() { return Promise.resolve({ done: queue.length === 0, value: queue.pop() }) }, return() { queue = []; return {} }, [Symbol.asyncIterator]() { return this }, } } function getIterator(iterable) { if (iterable[Symbol.asyncIterator]) { return iterable[Symbol.asyncIterator]() } if (iterable[Symbol.iterator]) { return iterable[Symbol.iterator]() } if (iterable.next) { return iterable } return fromValue(iterable) } // inspired by 'gartal' but lighter-weight and more battle-tested. class StreamReader { constructor(stream) { this.stream = getIterator(stream); this.buffer = null; this.cursor = 0; this.undoCursor = 0; this.started = false; this._ended = false; this._discardedBytes = 0; } eof() { return this._ended && this.cursor === this.buffer.length } tell() { return this._discardedBytes + this.cursor } async byte() { if (this.eof()) return if (!this.started) await this._init(); if (this.cursor === this.buffer.length) { await this._loadnext(); if (this._ended) return } this._moveCursor(1); return this.buffer[this.undoCursor] } async chunk() { if (this.eof()) return if (!this.started) await this._init(); if (this.cursor === this.buffer.length) { await this._loadnext(); if (this._ended) return } this._moveCursor(this.buffer.length); return this.buffer.slice(this.undoCursor, this.cursor) } async read(n) { if (this.eof()) return if (!this.started) await this._init(); if (this.cursor + n > this.buffer.length) { this._trim(); await this._accumulate(n); } this._moveCursor(n); return this.buffer.slice(this.undoCursor, this.cursor) } async skip(n) { if (this.eof()) return if (!this.started) await this._init(); if (this.cursor + n > this.buffer.length) { this._trim(); await this._accumulate(n); } this._moveCursor(n); } async undo() { this.cursor = this.undoCursor; } async _next() { this.started = true; let { done, value } = await this.stream.next(); if (done) { this._ended = true; } if (value) { value = Buffer.from(value); } return value } _trim() { // Throw away parts of the buffer we don't need anymore // assert(this.cursor <= this.buffer.length) this.buffer = this.buffer.slice(this.undoCursor); this.cursor -= this.undoCursor; this._discardedBytes += this.undoCursor; this.undoCursor = 0; } _moveCursor(n) { this.undoCursor = this.cursor; this.cursor += n; if (this.cursor > this.buffer.length) { this.cursor = this.buffer.length; } } async _accumulate(n) { if (this._ended) return // Expand the buffer until we have N bytes of data // or we've reached the end of the stream const buffers = [this.buffer]; while (this.cursor + n > lengthBuffers(buffers)) { const nextbuffer = await this._next(); if (this._ended) break buffers.push(nextbuffer); } this.buffer = Buffer.concat(buffers); } async _loadnext() { this._discardedBytes += this.buffer.length; this.undoCursor = 0; this.cursor = 0; this.buffer = await this._next(); } async _init() { this.buffer = await this._next(); } } // This helper function helps us postpone concatenating buffers, which // would create intermediate buffer objects, function lengthBuffers(buffers) { return buffers.reduce((acc, buffer) => acc + buffer.length, 0) } // My version of git-list-pack - roughly 15x faster than the original async function listpack(stream, onData) { const reader = new StreamReader(stream); let PACK = await reader.read(4); PACK = PACK.toString('utf8'); if (PACK !== 'PACK') { throw new InternalError(`Invalid PACK header '${PACK}'`) } let version = await reader.read(4); version = version.readUInt32BE(0); if (version !== 2) { throw new InternalError(`Invalid packfile version: ${version}`) } let numObjects = await reader.read(4); numObjects = numObjects.readUInt32BE(0); // If (for some godforsaken reason) this is an empty packfile, abort now. if (numObjects < 1) return while (!reader.eof() && numObjects--) { const offset = reader.tell(); const { type, length, ofs, reference } = await parseHeader(reader); const inflator = new pako.Inflate(); while (!inflator.result) { const chunk = await reader.chunk(); if (!chunk) break inflator.push(chunk, false); if (inflator.err) { throw new InternalError(`Pako error: ${inflator.msg}`) } if (inflator.result) { if (inflator.result.length !== length) { throw new InternalError( `Inflated object size is different from that stated in packfile.` ) } // Backtrack parser to where deflated data ends await reader.undo(); await reader.read(chunk.length - inflator.strm.avail_in); const end = reader.tell(); await onData({ data: inflator.result, type, num: numObjects, offset, end, reference, ofs, }); } } } } async function parseHeader(reader) { // Object type is encoded in bits 654 let byte = await reader.byte(); const type = (byte >> 4) & 0b111; // The length encoding get complicated. // Last four bits of length is encoded in bits 3210 let length = byte & 0b1111; // Whether the next byte is part of the variable-length encoded number // is encoded in bit 7 if (byte & 0b10000000) { let shift = 4; do { byte = await reader.byte(); length |= (byte & 0b01111111) << shift; shift += 7; } while (byte & 0b10000000) } // Handle deltified objects let ofs; let reference; if (type === 6) { let shift = 0; ofs = 0; const bytes = []; do { byte = await reader.byte(); ofs |= (byte & 0b01111111) << shift; shift += 7; bytes.push(byte); } while (byte & 0b10000000) reference = Buffer.from(bytes); } if (type === 7) { const buf = await reader.read(20); reference = buf; } return { type, length, ofs, reference } } /* eslint-env node, browser */ let supportsDecompressionStream = false; async function inflate(buffer) { if (supportsDecompressionStream === null) { supportsDecompressionStream = testDecompressionStream(); } return supportsDecompressionStream ? browserInflate(buffer) : pako.inflate(buffer) } async function browserInflate(buffer) { const ds = new DecompressionStream('deflate'); const d = new Blob([buffer]).stream().pipeThrough(ds); return new Uint8Array(await new Response(d).arrayBuffer()) } function testDecompressionStream() { try { const ds = new DecompressionStream('deflate'); if (ds) return true } catch (_) { // no bother } return false } function decodeVarInt(reader) { const bytes = []; let byte = 0; let multibyte = 0; do { byte = reader.readUInt8(); // We keep bits 6543210 const lastSeven = byte & 0b01111111; bytes.push(lastSeven); // Whether the next byte is part of the variable-length encoded number // is encoded in bit 7 multibyte = byte & 0b10000000; } while (multibyte) // Now that all the bytes are in big-endian order, // alternate shifting the bits left by 7 and OR-ing the next byte. // And... do a weird increment-by-one thing that I don't quite understand. return bytes.reduce((a, b) => ((a + 1) << 7) | b, -1) } // I'm pretty much copying this one from the git C source code, // because it makes no sense. function otherVarIntDecode(reader, startWith) { let result = startWith; let shift = 4; let byte = null; do { byte = reader.readUInt8(); result |= (byte & 0b01111111) << shift; shift += 7; } while (byte & 0b10000000) return result } class GitPackIndex { constructor(stuff) { Object.assign(this, stuff); this.offsetCache = {}; } static async fromIdx({ idx, getExternalRefDelta }) { const reader = new BufferCursor(idx); const magic = reader.slice(4).toString('hex'); // Check for IDX v2 magic number if (magic !== 'ff744f63') { return // undefined } const version = reader.readUInt32BE(); if (version !== 2) { throw new InternalError( `Unable to read version ${version} packfile IDX. (Only version 2 supported)` ) } if (idx.byteLength > 2048 * 1024 * 1024) { throw new InternalError( `To keep implementation simple, I haven't implemented the layer 5 feature needed to support packfiles > 2GB in size.` ) } // Skip over fanout table reader.seek(reader.tell() + 4 * 255); // Get hashes const size = reader.readUInt32BE(); const hashes = []; for (let i = 0; i < size; i++) { const hash = reader.slice(20).toString('hex'); hashes[i] = hash; } reader.seek(reader.tell() + 4 * size); // Skip over CRCs // Get offsets const offsets = new Map(); for (let i = 0; i < size; i++) { offsets.set(hashes[i], reader.readUInt32BE()); } const packfileSha = reader.slice(20).toString('hex'); return new GitPackIndex({ hashes, crcs: {}, offsets, packfileSha, getExternalRefDelta, }) } static async fromPack({ pack, getExternalRefDelta, onProgress }) { const listpackTypes = { 1: 'commit', 2: 'tree', 3: 'blob', 4: 'tag', 6: 'ofs-delta', 7: 'ref-delta', }; const offsetToObject = {}; // Older packfiles do NOT use the shasum of the pack itself, // so it is recommended to just use whatever bytes are in the trailer. // Source: https://github.com/git/git/commit/1190a1acf800acdcfd7569f87ac1560e2d077414 const packfileSha = pack.slice(-20).toString('hex'); const hashes = []; const crcs = {}; const offsets = new Map(); let totalObjectCount = null; let lastPercent = null; await listpack([pack], async ({ data, type, reference, offset, num }) => { if (totalObjectCount === null) totalObjectCount = num; const percent = Math.floor( ((totalObjectCount - num) * 100) / totalObjectCount ); if (percent !== lastPercent) { if (onProgress) { await onProgress({ phase: 'Receiving objects', loaded: totalObjectCount - num, total: totalObjectCount, }); } } lastPercent = percent; // Change type from a number to a meaningful string type = listpackTypes[type]; if (['commit', 'tree', 'blob', 'tag'].includes(type)) { offsetToObject[offset] = { type, offset, }; } else if (type === 'ofs-delta') { offsetToObject[offset] = { type, offset, }; } else if (type === 'ref-delta') { offsetToObject[offset] = { type, offset, }; } }); // We need to know the lengths of the slices to compute the CRCs. const offsetArray = Object.keys(offsetToObject).map(Number); for (const [i, start] of offsetArray.entries()) { const end = i + 1 === offsetArray.length ? pack.byteLength - 20 : offsetArray[i + 1]; const o = offsetToObject[start]; const crc = crc32.buf(pack.slice(start, end)) >>> 0; o.end = end; o.crc = crc; } // We don't have the hashes yet. But we can generate them using the .readSlice function! const p = new GitPackIndex({ pack: Promise.resolve(pack), packfileSha, crcs, hashes, offsets, getExternalRefDelta, }); // Resolve deltas and compute the oids lastPercent = null; let count = 0; const objectsByDepth = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; for (let offset in offsetToObject) { offset = Number(offset); const percent = Math.floor((count++ * 100) / totalObjectCount); if (percent !== lastPercent) { if (onProgress) { await onProgress({ phase: 'Resolving deltas', loaded: count, total: totalObjectCount, }); } } lastPercent = percent; const o = offsetToObject[offset]; if (o.oid) continue try { p.readDepth = 0; p.externalReadDepth = 0; const { type, object } = await p.readSlice({ start: offset }); objectsByDepth[p.readDepth] += 1; const oid = await shasum(GitObject.wrap({ type, object })); o.oid = oid; hashes.push(oid); offsets.set(oid, offset); crcs[oid] = o.crc; } catch (err) { continue } } hashes.sort(); return p } async toBuffer() { const buffers = []; const write = (str, encoding) => { buffers.push(Buffer.from(str, encoding)); }; // Write out IDX v2 magic number write('ff744f63', 'hex'); // Write out version number 2 write('00000002', 'hex'); // Write fanout table const fanoutBuffer = new BufferCursor(Buffer.alloc(256 * 4)); for (let i = 0; i < 256; i++) { let count = 0; for (const hash of this.hashes) { if (parseInt(hash.slice(0, 2), 16) <= i) count++; } fanoutBuffer.writeUInt32BE(count); } buffers.push(fanoutBuffer.buffer); // Write out hashes for (const hash of this.hashes) { write(hash, 'hex'); } // Write out crcs const crcsBuffer = new BufferCursor(Buffer.alloc(this.hashes.length * 4)); for (const hash of this.hashes) { crcsBuffer.writeUInt32BE(this.crcs[hash]); } buffers.push(crcsBuffer.buffer); // Write out offsets const offsetsBuffer = new BufferCursor(Buffer.alloc(this.hashes.length * 4)); for (const hash of this.hashes) { offsetsBuffer.writeUInt32BE(this.offsets.get(hash)); } buffers.push(offsetsBuffer.buffer); // Write out packfile checksum write(this.packfileSha, 'hex'); // Write out shasum const totalBuffer = Buffer.concat(buffers); const sha = await shasum(totalBuffer); const shaBuffer = Buffer.alloc(20); shaBuffer.write(sha, 'hex'); return Buffer.concat([totalBuffer, shaBuffer]) } async load({ pack }) { this.pack = pack; } async unload() { this.pack = null; } async read({ oid }) { if (!this.offsets.get(oid)) { if (this.getExternalRefDelta) { this.externalReadDepth++; return this.getExternalRefDelta(oid) } else { throw new InternalError(`Could not read object ${oid} from packfile`) } } const start = this.offsets.get(oid); return this.readSlice({ start }) } async readSlice({ start }) { if (this.offsetCache[start]) { return Object.assign({}, this.offsetCache[start]) } this.readDepth++; const types = { 0b0010000: 'commit', 0b0100000: 'tree', 0b0110000: 'blob', 0b1000000: 'tag', 0b1100000: 'ofs_delta', 0b1110000: 'ref_delta', }; if (!this.pack) { throw new InternalError( 'Tried to read from a GitPackIndex with no packfile loaded into memory' ) } const raw = (await this.pack).slice(start); const reader = new BufferCursor(raw); const byte = reader.readUInt8(); // Object type is encoded in bits 654 const btype = byte & 0b1110000; let type = types[btype]; if (type === undefined) { throw new InternalError('Unrecognized type: 0b' + btype.toString(2)) } // The length encoding get complicated. // Last four bits of length is encoded in bits 3210 const lastFour = byte & 0b1111; let length = lastFour; // Whether the next byte is part of the variable-length encoded number // is encoded in bit 7 const multibyte = byte & 0b10000000; if (multibyte) { length = otherVarIntDecode(reader, lastFour); } let base = null; let object = null; // Handle deltified objects if (type === 'ofs_delta') { const offset = decodeVarInt(reader); const baseOffset = start - offset ;({ object: base, type } = await this.readSlice({ start: baseOffset })); } if (type === 'ref_delta') { const oid = reader.slice(20).toString('hex') ;({ object: base, type } = await this.read({ oid })); } // Handle undeltified objects const buffer = raw.slice(reader.tell()); object = Buffer.from(await inflate(buffer)); // Assert that the object length is as expected. if (object.byteLength !== length) { throw new InternalError( `Packfile told us object would have length ${length} but it had length ${object.byteLength}` ) } if (base) { object = Buffer.from(applyDelta(object, base)); } // Cache the result based on depth. if (this.readDepth > 3) { // hand tuned for speed / memory usage tradeoff this.offsetCache[start] = { type, object }; } return { type, format: 'content', object } } } const PackfileCache = Symbol('PackfileCache'); async function loadPackIndex({ fs, filename, getExternalRefDelta, emitter, emitterPrefix, }) { const idx = await fs.read(filename); return GitPackIndex.fromIdx({ idx, getExternalRefDelta }) } function readPackIndex({ fs, cache, filename, getExternalRefDelta, emitter, emitterPrefix, }) { // Try to get the packfile index from the in-memory cache if (!cache[PackfileCache]) cache[PackfileCache] = new Map(); let p = cache[PackfileCache].get(filename); if (!p) { p = loadPackIndex({ fs, filename, getExternalRefDelta, emitter, emitterPrefix, }); cache[PackfileCache].set(filename, p); } return p } async function readObjectPacked({ fs, cache, gitdir, oid, format = 'content', getExternalRefDelta, }) { // Check to see if it's in a packfile. // Iterate through all the .idx files let list = await fs.readdir(join(gitdir, 'objects/pack')); list = list.filter(x => x.endsWith('.idx')); for (const filename of list) { const indexFile = `${gitdir}/objects/pack/${filename}`; const p = await readPackIndex({ fs, cache, filename: indexFile, getExternalRefDelta, }); if (p.error) throw new InternalError(p.error) // If the packfile DOES have the oid we're looking for... if (p.offsets.has(oid)) { // Get the resolved git object from the packfile if (!p.pack) { const packFile = indexFile.replace(/idx$/, 'pack'); p.pack = fs.read(packFile); } const result = await p.read({ oid, getExternalRefDelta }); result.format = 'content'; result.source = `objects/pack/${filename.replace(/idx$/, 'pack')}`; return result } } // Failed to find it return null } /** * @param {object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs * @param {any} args.cache * @param {string} args.gitdir * @param {string} args.oid * @param {string} [args.format] */ async function _readObject({ fs, cache, gitdir, oid, format = 'content', }) { // Curry the current read method so that the packfile un-deltification // process can acquire external ref-deltas. const getExternalRefDelta = oid => _readObject({ fs, cache, gitdir, oid }); let result; // Empty tree - hard-coded so we can use it as a shorthand. // Note: I think the canonical git implementation must do this too because // `git cat-file -t 4b825dc642cb6eb9a060e54bf8d69288fbee4904` prints "tree" even in empty repos. if (oid === '4b825dc642cb6eb9a060e54bf8d69288fbee4904') { result = { format: 'wrapped', object: Buffer.from(`tree 0\x00`) }; } // Look for it in the loose object directory. if (!result) { result = await readObjectLoose({ fs, gitdir, oid }); } // Check to see if it's in a packfile. if (!result) { result = await readObjectPacked({ fs, cache, gitdir, oid, getExternalRefDelta, }); } // Finally if (!result) { throw new NotFoundError(oid) } if (format === 'deflated') { return result } if (result.format === 'deflated') { result.object = Buffer.from(await inflate(result.object)); result.format = 'wrapped'; } if (result.format === 'wrapped') { if (format === 'wrapped' && result.format === 'wrapped') { return result } const sha = await shasum(result.object); if (sha !== oid) { throw new InternalError( `SHA check failed! Expected ${oid}, computed ${sha}` ) } const { object, type } = GitObject.unwrap(result.object); result.type = type; result.object = object; result.format = 'content'; } if (result.format === 'content') { if (format === 'content') return result return } throw new InternalError(`invalid format "${result.format}"`) } class AlreadyExistsError extends BaseError { /** * @param {'note'|'remote'|'tag'|'branch'} noun * @param {string} where * @param {boolean} canForce */ constructor(noun, where, canForce = true) { super( `Failed to create ${noun} at ${where} because it already exists.${ canForce ? ` (Hint: use 'force: true' parameter to overwrite existing ${noun}.)` : '' }` ); this.code = this.name = AlreadyExistsError.code; this.data = { noun, where, canForce }; } } /** @type {'AlreadyExistsError'} */ AlreadyExistsError.code = 'AlreadyExistsError'; class AmbiguousError extends BaseError { /** * @param {'oids'|'refs'} nouns * @param {string} short * @param {string[]} matches */ constructor(nouns, short, matches) { super( `Found multiple ${nouns} matching "${short}" (${matches.join( ', ' )}). Use a longer abbreviation length to disambiguate them.` ); this.code = this.name = AmbiguousError.code; this.data = { nouns, short, matches }; } } /** @type {'AmbiguousError'} */ AmbiguousError.code = 'AmbiguousError'; class CheckoutConflictError extends BaseError { /** * @param {string[]} filepaths */ constructor(filepaths) { super( `Your local changes to the following files would be overwritten by checkout: ${filepaths.join( ', ' )}` ); this.code = this.name = CheckoutConflictError.code; this.data = { filepaths }; } } /** @type {'CheckoutConflictError'} */ CheckoutConflictError.code = 'CheckoutConflictError'; class CommitNotFetchedError extends BaseError { /** * @param {string} ref * @param {string} oid */ constructor(ref, oid) { super( `Failed to checkout "${ref}" because commit ${oid} is not available locally. Do a git fetch to make the branch available locally.` ); this.code = this.name = CommitNotFetchedError.code; this.data = { ref, oid }; } } /** @type {'CommitNotFetchedError'} */ CommitNotFetchedError.code = 'CommitNotFetchedError'; class EmptyServerResponseError extends BaseError { constructor() { super(`Empty response from git server.`); this.code = this.name = EmptyServerResponseError.code; this.data = {}; } } /** @type {'EmptyServerResponseError'} */ EmptyServerResponseError.code = 'EmptyServerResponseError'; class FastForwardError extends BaseError { constructor() { super(`A simple fast-forward merge was not possible.`); this.code = this.name = FastForwardError.code; this.data = {}; } } /** @type {'FastForwardError'} */ FastForwardError.code = 'FastForwardError'; class GitPushError extends BaseError { /** * @param {string} prettyDetails * @param {PushResult} result */ constructor(prettyDetails, result) { super(`One or more branches were not updated: ${prettyDetails}`); this.code = this.name = GitPushError.code; this.data = { prettyDetails, result }; } } /** @type {'GitPushError'} */ GitPushError.code = 'GitPushError'; class HttpError extends BaseError { /** * @param {number} statusCode * @param {string} statusMessage * @param {string} response */ constructor(statusCode, statusMessage, response) { super(`HTTP Error: ${statusCode} ${statusMessage}`); this.code = this.name = HttpError.code; this.data = { statusCode, statusMessage, response }; } } /** @type {'HttpError'} */ HttpError.code = 'HttpError'; class InvalidFilepathError extends BaseError { /** * @param {'leading-slash'|'trailing-slash'|'directory'} [reason] */ constructor(reason) { let message = 'invalid filepath'; if (reason === 'leading-slash' || reason === 'trailing-slash') { message = `"filepath" parameter should not include leading or trailing directory separators because these can cause problems on some platforms.`; } else if (reason === 'directory') { message = `"filepath" should not be a directory.`; } super(message); this.code = this.name = InvalidFilepathError.code; this.data = { reason }; } } /** @type {'InvalidFilepathError'} */ InvalidFilepathError.code = 'InvalidFilepathError'; class InvalidRefNameError extends BaseError { /** * @param {string} ref * @param {string} suggestion * @param {boolean} canForce */ constructor(ref, suggestion) { super( `"${ref}" would be an invalid git reference. (Hint: a valid alternative would be "${suggestion}".)` ); this.code = this.name = InvalidRefNameError.code; this.data = { ref, suggestion }; } } /** @type {'InvalidRefNameError'} */ InvalidRefNameError.code = 'InvalidRefNameError'; class MaxDepthError extends BaseError { /** * @param {number} depth */ constructor(depth) { super(`Maximum search depth of ${depth} exceeded.`); this.code = this.name = MaxDepthError.code; this.data = { depth }; } } /** @type {'MaxDepthError'} */ MaxDepthError.code = 'MaxDepthError'; class MergeNotSupportedError extends BaseError { constructor() { super(`Merges with conflicts are not supported yet.`); this.code = this.name = MergeNotSupportedError.code; this.data = {}; } } /** @type {'MergeNotSupportedError'} */ MergeNotSupportedError.code = 'MergeNotSupportedError'; class MissingNameError extends BaseError { /** * @param {'author'|'committer'|'tagger'} role */ constructor(role) { super( `No name was provided for ${role} in the argument or in the .git/config file.` ); this.code = this.name = MissingNameError.code; this.data = { role }; } } /** @type {'MissingNameError'} */ MissingNameError.code = 'MissingNameError'; class MissingParameterError extends BaseError { /** * @param {string} parameter */ constructor(parameter) { super( `The function requires a "${parameter}" parameter but none was provided.` ); this.code = this.name = MissingParameterError.code; this.data = { parameter }; } } /** @type {'MissingParameterError'} */ MissingParameterError.code = 'MissingParameterError'; class MultipleGitError extends BaseError { /** * @param {Error[]} errors * @param {string} message */ constructor(errors) { super( `There are multiple errors that were thrown by the method. Please refer to the "errors" property to see more` ); this.code = this.name = MultipleGitError.code; this.data = { errors }; this.errors = errors; } } /** @type {'MultipleGitError'} */ MultipleGitError.code = 'MultipleGitError'; class ParseError extends BaseError { /** * @param {string} expected * @param {string} actual */ constructor(expected, actual) { super(`Expected "${expected}" but received "${actual}".`); this.code = this.name = ParseError.code; this.data = { expected, actual }; } } /** @type {'ParseError'} */ ParseError.code = 'ParseError'; class PushRejectedError extends BaseError { /** * @param {'not-fast-forward'|'tag-exists'} reason */ constructor(reason) { let message = ''; if (reason === 'not-fast-forward') { message = ' because it was not a simple fast-forward'; } else if (reason === 'tag-exists') { message = ' because tag already exists'; } super(`Push rejected${message}. Use "force: true" to override.`); this.code = this.name = PushRejectedError.code; this.data = { reason }; } } /** @type {'PushRejectedError'} */ PushRejectedError.code = 'PushRejectedError'; class RemoteCapabilityError extends BaseError { /** * @param {'shallow'|'deepen-since'|'deepen-not'|'deepen-relative'} capability * @param {'depth'|'since'|'exclude'|'relative'} parameter */ constructor(capability, parameter) { super( `Remote does not support the "${capability}" so the "${parameter}" parameter cannot be used.` ); this.code = this.name = RemoteCapabilityError.code; this.data = { capability, parameter }; } } /** @type {'RemoteCapabilityError'} */ RemoteCapabilityError.code = 'RemoteCapabilityError'; class SmartHttpError extends BaseError { /** * @param {string} preview * @param {string} response */ constructor(preview, response) { super( `Remote did not reply using the "smart" HTTP protocol. Expected "001e# service=git-upload-pack" but received: ${preview}` ); this.code = this.name = SmartHttpError.code; this.data = { preview, response }; } } /** @type {'SmartHttpError'} */ SmartHttpError.code = 'SmartHttpError'; class UnknownTransportError extends BaseError { /** * @param {string} url * @param {string} transport * @param {string} [suggestion] */ constructor(url, transport, suggestion) { super( `Git remote "${url}" uses an unrecognized transport protocol: "${transport}"` ); this.code = this.name = UnknownTransportError.code; this.data = { url, transport, suggestion }; } } /** @type {'UnknownTransportError'} */ UnknownTransportError.code = 'UnknownTransportError'; class UrlParseError extends BaseError { /** * @param {string} url */ constructor(url) { super(`Cannot parse remote URL: "${url}"`); this.code = this.name = UrlParseError.code; this.data = { url }; } } /** @type {'UrlParseError'} */ UrlParseError.code = 'UrlParseError'; class UserCanceledError extends BaseError { constructor() { super(`The operation was canceled.`); this.code = this.name = UserCanceledError.code; this.data = {}; } } /** @type {'UserCanceledError'} */ UserCanceledError.code = 'UserCanceledError'; var Errors = /*#__PURE__*/Object.freeze({ __proto__: null, AlreadyExistsError: AlreadyExistsError, AmbiguousError: AmbiguousError, CheckoutConflictError: CheckoutConflictError, CommitNotFetchedError: CommitNotFetchedError, EmptyServerResponseError: EmptyServerResponseError, FastForwardError: FastForwardError, GitPushError: GitPushError, HttpError: HttpError, InternalError: InternalError, InvalidFilepathError: InvalidFilepathError, InvalidOidError: InvalidOidError, InvalidRefNameError: InvalidRefNameError, MaxDepthError: MaxDepthError, MergeNotSupportedError: MergeNotSupportedError, MissingNameError: MissingNameError, MissingParameterError: MissingParameterError, MultipleGitError: MultipleGitError, NoRefspecError: NoRefspecError, NotFoundError: NotFoundError, ObjectTypeError: ObjectTypeError, ParseError: ParseError, PushRejectedError: PushRejectedError, RemoteCapabilityError: RemoteCapabilityError, SmartHttpError: SmartHttpError, UnknownTransportError: UnknownTransportError, UnsafeFilepathError: UnsafeFilepathError, UrlParseError: UrlParseError, UserCanceledError: UserCanceledError }); function formatAuthor({ name, email, timestamp, timezoneOffset }) { timezoneOffset = formatTimezoneOffset(timezoneOffset); return `${name} <${email}> ${timestamp} ${timezoneOffset}` } // The amount of effort that went into crafting these cases to handle // -0 (just so we don't lose that information when parsing and reconstructing) // but can also default to +0 was extraordinary. function formatTimezoneOffset(minutes) { const sign = simpleSign(negateExceptForZero(minutes)); minutes = Math.abs(minutes); const hours = Math.floor(minutes / 60); minutes -= hours * 60; let strHours = String(hours); let strMinutes = String(minutes); if (strHours.length < 2) strHours = '0' + strHours; if (strMinutes.length < 2) strMinutes = '0' + strMinutes; return (sign === -1 ? '-' : '+') + strHours + strMinutes } function simpleSign(n) { return Math.sign(n) || (Object.is(n, -0) ? -1 : 1) } function negateExceptForZero(n) { return n === 0 ? n : -n } function normalizeNewlines(str) { // remove all str = str.replace(/\r/g, ''); // no extra newlines up front str = str.replace(/^\n+/, ''); // and a single newline at the end str = str.replace(/\n+$/, '') + '\n'; return str } function parseAuthor(author) { const [, name, email, timestamp, offset] = author.match( /^(.*) <(.*)> (.*) (.*)$/ ); return { name: name, email: email, timestamp: Number(timestamp), timezoneOffset: parseTimezoneOffset(offset), } } // The amount of effort that went into crafting these cases to handle // -0 (just so we don't lose that information when parsing and reconstructing) // but can also default to +0 was extraordinary. function parseTimezoneOffset(offset) { let [, sign, hours, minutes] = offset.match(/(\+|-)(\d\d)(\d\d)/); minutes = (sign === '+' ? 1 : -1) * (Number(hours) * 60 + Number(minutes)); return negateExceptForZero$1(minutes) } function negateExceptForZero$1(n) { return n === 0 ? n : -n } class GitAnnotatedTag { constructor(tag) { if (typeof tag === 'string') { this._tag = tag; } else if (Buffer.isBuffer(tag)) { this._tag = tag.toString('utf8'); } else if (typeof tag === 'object') { this._tag = GitAnnotatedTag.render(tag); } else { throw new InternalError( 'invalid type passed to GitAnnotatedTag constructor' ) } } static from(tag) { return new GitAnnotatedTag(tag) } static render(obj) { return `object ${obj.object} type ${obj.type} tag ${obj.tag} tagger ${formatAuthor(obj.tagger)} ${obj.message} ${obj.gpgsig ? obj.gpgsig : ''}` } justHeaders() { return this._tag.slice(0, this._tag.indexOf('\n\n')) } message() { const tag = this.withoutSignature(); return tag.slice(tag.indexOf('\n\n') + 2) } parse() { return Object.assign(this.headers(), { message: this.message(), gpgsig: this.gpgsig(), }) } render() { return this._tag } headers() { const headers = this.justHeaders().split('\n'); const hs = []; for (const h of headers) { if (h[0] === ' ') { // combine with previous header (without space indent) hs[hs.length - 1] += '\n' + h.slice(1); } else { hs.push(h); } } const obj = {}; for (const h of hs) { const key = h.slice(0, h.indexOf(' ')); const value = h.slice(h.indexOf(' ') + 1); if (Array.isArray(obj[key])) { obj[key].push(value); } else { obj[key] = value; } } if (obj.tagger) { obj.tagger = parseAuthor(obj.tagger); } if (obj.committer) { obj.committer = parseAuthor(obj.committer); } return obj } withoutSignature() { const tag = normalizeNewlines(this._tag); if (tag.indexOf('\n-----BEGIN PGP SIGNATURE-----') === -1) return tag return tag.slice(0, tag.lastIndexOf('\n-----BEGIN PGP SIGNATURE-----')) } gpgsig() { if (this._tag.indexOf('\n-----BEGIN PGP SIGNATURE-----') === -1) return const signature = this._tag.slice( this._tag.indexOf('-----BEGIN PGP SIGNATURE-----'), this._tag.indexOf('-----END PGP SIGNATURE-----') + '-----END PGP SIGNATURE-----'.length ); return normalizeNewlines(signature) } payload() { return this.withoutSignature() + '\n' } toObject() { return Buffer.from(this._tag, 'utf8') } static async sign(tag, sign, secretKey) { const payload = tag.payload(); let { signature } = await sign({ payload, secretKey }); // renormalize the line endings to the one true line-ending signature = normalizeNewlines(signature); const signedTag = payload + signature; // return a new tag object return GitAnnotatedTag.from(signedTag) } } function indent(str) { return ( str .trim() .split('\n') .map(x => ' ' + x) .join('\n') + '\n' ) } function outdent(str) { return str .split('\n') .map(x => x.replace(/^ /, '')) .join('\n') } class GitCommit { constructor(commit) { if (typeof commit === 'string') { this._commit = commit; } else if (Buffer.isBuffer(commit)) { this._commit = commit.toString('utf8'); } else if (typeof commit === 'object') { this._commit = GitCommit.render(commit); } else { throw new InternalError('invalid type passed to GitCommit constructor') } } static fromPayloadSignature({ payload, signature }) { const headers = GitCommit.justHeaders(payload); const message = GitCommit.justMessage(payload); const commit = normalizeNewlines( headers + '\ngpgsig' + indent(signature) + '\n' + message ); return new GitCommit(commit) } static from(commit) { return new GitCommit(commit) } toObject() { return Buffer.from(this._commit, 'utf8') } // Todo: allow setting the headers and message headers() { return this.parseHeaders() } // Todo: allow setting the headers and message message() { return GitCommit.justMessage(this._commit) } parse() { return Object.assign({ message: this.message() }, this.headers()) } static justMessage(commit) { return normalizeNewlines(commit.slice(commit.indexOf('\n\n') + 2)) } static justHeaders(commit) { return commit.slice(0, commit.indexOf('\n\n')) } parseHeaders() { const headers = GitCommit.justHeaders(this._commit).split('\n'); const hs = []; for (const h of headers) { if (h[0] === ' ') { // combine with previous header (without space indent) hs[hs.length - 1] += '\n' + h.slice(1); } else { hs.push(h); } } const obj = { parent: [], }; for (const h of hs) { const key = h.slice(0, h.indexOf(' ')); const value = h.slice(h.indexOf(' ') + 1); if (Array.isArray(obj[key])) { obj[key].push(value); } else { obj[key] = value; } } if (obj.author) { obj.author = parseAuthor(obj.author); } if (obj.committer) { obj.committer = parseAuthor(obj.committer); } return obj } static renderHeaders(obj) { let headers = ''; if (obj.tree) { headers += `tree ${obj.tree}\n`; } else { headers += `tree 4b825dc642cb6eb9a060e54bf8d69288fbee4904\n`; // the null tree } if (obj.parent) { if (obj.parent.length === undefined) { throw new InternalError(`commit 'parent' property should be an array`) } for (const p of obj.parent) { headers += `parent ${p}\n`; } } const author = obj.author; headers += `author ${formatAuthor(author)}\n`; const committer = obj.committer || obj.author; headers += `committer ${formatAuthor(committer)}\n`; if (obj.gpgsig) { headers += 'gpgsig' + indent(obj.gpgsig); } return headers } static render(obj) { return GitCommit.renderHeaders(obj) + '\n' + normalizeNewlines(obj.message) } render() { return this._commit } withoutSignature() { const commit = normalizeNewlines(this._commit); if (commit.indexOf('\ngpgsig') === -1) return commit const headers = commit.slice(0, commit.indexOf('\ngpgsig')); const message = commit.slice( commit.indexOf('-----END PGP SIGNATURE-----\n') + '-----END PGP SIGNATURE-----\n'.length ); return normalizeNewlines(headers + '\n' + message) } isolateSignature() { const signature = this._commit.slice( this._commit.indexOf('-----BEGIN PGP SIGNATURE-----'), this._commit.indexOf('-----END PGP SIGNATURE-----') + '-----END PGP SIGNATURE-----'.length ); return outdent(signature) } static async sign(commit, sign, secretKey) { const payload = commit.withoutSignature(); const message = GitCommit.justMessage(commit._commit); let { signature } = await sign({ payload, secretKey }); // renormalize the line endings to the one true line-ending signature = normalizeNewlines(signature); const headers = GitCommit.justHeaders(commit._commit); const signedCommit = headers + '\n' + 'gpgsig' + indent(signature) + '\n' + message; // return a new commit object return GitCommit.from(signedCommit) } } async function resolveTree({ fs, cache, gitdir, oid }) { // Empty tree - bypass `readObject` if (oid === '4b825dc642cb6eb9a060e54bf8d69288fbee4904') { return { tree: GitTree.from([]), oid } } const { type, object } = await _readObject({ fs, cache, gitdir, oid }); // Resolve annotated tag objects to whatever if (type === 'tag') { oid = GitAnnotatedTag.from(object).parse().object; return resolveTree({ fs, cache, gitdir, oid }) } // Resolve commits to trees if (type === 'commit') { oid = GitCommit.from(object).parse().tree; return resolveTree({ fs, cache, gitdir, oid }) } if (type !== 'tree') { throw new ObjectTypeError(oid, type, 'tree') } return { tree: GitTree.from(object), oid } } class GitWalkerRepo { constructor({ fs, gitdir, ref, cache }) { this.fs = fs; this.cache = cache; this.gitdir = gitdir; this.mapPromise = (async () => { const map = new Map(); let oid; try { oid = await GitRefManager.resolve({ fs, gitdir, ref }); } catch (e) { if (e instanceof NotFoundError) { // Handle fresh branches with no commits oid = '4b825dc642cb6eb9a060e54bf8d69288fbee4904'; } } const tree = await resolveTree({ fs, cache: this.cache, gitdir, oid }); tree.type = 'tree'; tree.mode = '40000'; map.set('.', tree); return map })(); const walker = this; this.ConstructEntry = class TreeEntry { constructor(fullpath) { this._fullpath = fullpath; this._type = false; this._mode = false; this._stat = false; this._content = false; this._oid = false; } async type() { return walker.type(this) } async mode() { return walker.mode(this) } async stat() { return walker.stat(this) } async content() { return walker.content(this) } async oid() { return walker.oid(this) } }; } async readdir(entry) { const filepath = entry._fullpath; const { fs, cache, gitdir } = this; const map = await this.mapPromise; const obj = map.get(filepath); if (!obj) throw new Error(`No obj for ${filepath}`) const oid = obj.oid; if (!oid) throw new Error(`No oid for obj ${JSON.stringify(obj)}`) if (obj.type !== 'tree') { // TODO: support submodules (type === 'commit') return null } const { type, object } = await _readObject({ fs, cache, gitdir, oid }); if (type !== obj.type) { throw new ObjectTypeError(oid, type, obj.type) } const tree = GitTree.from(object); // cache all entries for (const entry of tree) { map.set(join(filepath, entry.path), entry); } return tree.entries().map(entry => join(filepath, entry.path)) } async type(entry) { if (entry._type === false) { const map = await this.mapPromise; const { type } = map.get(entry._fullpath); entry._type = type; } return entry._type } async mode(entry) { if (entry._mode === false) { const map = await this.mapPromise; const { mode } = map.get(entry._fullpath); entry._mode = normalizeMode(parseInt(mode, 8)); } return entry._mode } async stat(_entry) {} async content(entry) { if (entry._content === false) { const map = await this.mapPromise; const { fs, cache, gitdir } = this; const obj = map.get(entry._fullpath); const oid = obj.oid; const { type, object } = await _readObject({ fs, cache, gitdir, oid }); if (type !== 'blob') { entry._content = undefined; } else { entry._content = new Uint8Array(object); } } return entry._content } async oid(entry) { if (entry._oid === false) { const map = await this.mapPromise; const obj = map.get(entry._fullpath); entry._oid = obj.oid; } return entry._oid } } // @ts-check /** * @param {object} args * @param {string} [args.ref='HEAD'] * @returns {Walker} */ function TREE({ ref = 'HEAD' }) { const o = Object.create(null); Object.defineProperty(o, GitWalkSymbol, { value: function({ fs, gitdir, cache }) { return new GitWalkerRepo({ fs, gitdir, ref, cache }) }, }); Object.freeze(o); return o } // @ts-check class GitWalkerFs { constructor({ fs, dir, gitdir, cache }) { this.fs = fs; this.cache = cache; this.dir = dir; this.gitdir = gitdir; const walker = this; this.ConstructEntry = class WorkdirEntry { constructor(fullpath) { this._fullpath = fullpath; this._type = false; this._mode = false; this._stat = false; this._content = false; this._oid = false; } async type() { return walker.type(this) } async mode() { return walker.mode(this) } async stat() { return walker.stat(this) } async content() { return walker.content(this) } async oid() { return walker.oid(this) } }; } async readdir(entry) { const filepath = entry._fullpath; const { fs, dir } = this; const names = await fs.readdir(join(dir, filepath)); if (names === null) return null return names.map(name => join(filepath, name)) } async type(entry) { if (entry._type === false) { await entry.stat(); } return entry._type } async mode(entry) { if (entry._mode === false) { await entry.stat(); } return entry._mode } async stat(entry) { if (entry._stat === false) { const { fs, dir } = this; let stat = await fs.lstat(`${dir}/${entry._fullpath}`); if (!stat) { throw new Error( `ENOENT: no such file or directory, lstat '${entry._fullpath}'` ) } let type = stat.isDirectory() ? 'tree' : 'blob'; if (type === 'blob' && !stat.isFile() && !stat.isSymbolicLink()) { type = 'special'; } entry._type = type; stat = normalizeStats(stat); entry._mode = stat.mode; // workaround for a BrowserFS edge case if (stat.size === -1 && entry._actualSize) { stat.size = entry._actualSize; } entry._stat = stat; } return entry._stat } async content(entry) { if (entry._content === false) { const { fs, dir } = this; if ((await entry.type()) === 'tree') { entry._content = undefined; } else { const content = await fs.read(`${dir}/${entry._fullpath}`); // workaround for a BrowserFS edge case entry._actualSize = content.length; if (entry._stat && entry._stat.size === -1) { entry._stat.size = entry._actualSize; } entry._content = new Uint8Array(content); } } return entry._content } async oid(entry) { if (entry._oid === false) { const { fs, gitdir, cache } = this; let oid; // See if we can use the SHA1 hash in the index. await GitIndexManager.acquire({ fs, gitdir, cache }, async function( index ) { const stage = index.entriesMap.get(entry._fullpath); const stats = await entry.stat(); if (!stage || compareStats(stats, stage)) { const content = await entry.content(); if (content === undefined) { oid = undefined; } else { oid = await shasum( GitObject.wrap({ type: 'blob', object: await entry.content() }) ); // Update the stats in the index so we will get a "cache hit" next time // 1) if we can (because the oid and mode are the same) // 2) and only if we need to (because other stats differ) if ( stage && oid === stage.oid && stats.mode === stage.mode && compareStats(stats, stage) ) { index.insert({ filepath: entry._fullpath, stats, oid: oid, }); } } } else { // Use the index SHA1 rather than compute it oid = stage.oid; } }); entry._oid = oid; } return entry._oid } } // @ts-check /** * @returns {Walker} */ function WORKDIR() { const o = Object.create(null); Object.defineProperty(o, GitWalkSymbol, { value: function({ fs, dir, gitdir, cache }) { return new GitWalkerFs({ fs, dir, gitdir, cache }) }, }); Object.freeze(o); return o } // @ts-check // I'm putting this in a Manager because I reckon it could benefit // from a LOT of cacheing. class GitIgnoreManager { static async isIgnored({ fs, dir, gitdir = join(dir, '.git'), filepath }) { // ALWAYS ignore ".git" folders. if (basename(filepath) === '.git') return true // '.' is not a valid gitignore entry, so '.' is never ignored if (filepath === '.') return false // Check and load exclusion rules from project exclude file (.git/info/exclude) let excludes = ''; const excludesFile = join(gitdir, 'info', 'exclude'); if (await fs.exists(excludesFile)) { excludes = await fs.read(excludesFile, 'utf8'); } // Find all the .gitignore files that could affect this file const pairs = [ { gitignore: join(dir, '.gitignore'), filepath, }, ]; const pieces = filepath.split('/').filter(Boolean); for (let i = 1; i < pieces.length; i++) { const folder = pieces.slice(0, i).join('/'); const file = pieces.slice(i).join('/'); pairs.push({ gitignore: join(dir, folder, '.gitignore'), filepath: file, }); } let ignoredStatus = false; for (const p of pairs) { let file; try { file = await fs.read(p.gitignore, 'utf8'); } catch (err) { if (err.code === 'NOENT') continue } const ign = ignore().add(excludes); ign.add(file); // If the parent directory is excluded, we are done. // "It is not possible to re-include a file if a parent directory of that file is excluded. Git doesn’t list excluded directories for performance reasons, so any patterns on contained files have no effect, no matter where they are defined." // source: https://git-scm.com/docs/gitignore const parentdir = dirname(p.filepath); if (parentdir !== '.' && ign.ignores(parentdir)) return true // If the file is currently ignored, test for UNignoring. if (ignoredStatus) { ignoredStatus = !ign.test(p.filepath).unignored; } else { ignoredStatus = ign.test(p.filepath).ignored; } } return ignoredStatus } } /** * Removes the directory at the specified filepath recursively. Used internally to replicate the behavior of * fs.promises.rm({ recursive: true, force: true }) from Node.js 14 and above when not available. If the provided * filepath resolves to a file, it will be removed. * * @param {import('../models/FileSystem.js').FileSystem} fs * @param {string} filepath - The file or directory to remove. */ async function rmRecursive(fs, filepath) { const entries = await fs.readdir(filepath); if (entries == null) { await fs.rm(filepath); } else if (entries.length) { await Promise.all( entries.map(entry => { const subpath = join(filepath, entry); return fs.lstat(subpath).then(stat => { if (!stat) return return stat.isDirectory() ? rmRecursive(fs, subpath) : fs.rm(subpath) }) }) ).then(() => fs.rmdir(filepath)); } else { await fs.rmdir(filepath); } } /** * This is just a collection of helper functions really. At least that's how it started. */ class FileSystem { constructor(fs) { if (typeof fs._original_unwrapped_fs !== 'undefined') return fs const promises = Object.getOwnPropertyDescriptor(fs, 'promises'); if (promises && promises.enumerable) { this._readFile = fs.promises.readFile.bind(fs.promises); this._writeFile = fs.promises.writeFile.bind(fs.promises); this._mkdir = fs.promises.mkdir.bind(fs.promises); if (fs.promises.rm) { this._rm = fs.promises.rm.bind(fs.promises); } else if (fs.promises.rmdir.length > 1) { this._rm = fs.promises.rmdir.bind(fs.promises); } else { this._rm = rmRecursive.bind(null, this); } this._rmdir = fs.promises.rmdir.bind(fs.promises); this._unlink = fs.promises.unlink.bind(fs.promises); this._stat = fs.promises.stat.bind(fs.promises); this._lstat = fs.promises.lstat.bind(fs.promises); this._readdir = fs.promises.readdir.bind(fs.promises); this._readlink = fs.promises.readlink.bind(fs.promises); this._symlink = fs.promises.symlink.bind(fs.promises); } else { this._readFile = pify(fs.readFile.bind(fs)); this._writeFile = pify(fs.writeFile.bind(fs)); this._mkdir = pify(fs.mkdir.bind(fs)); if (fs.rm) { this._rm = pify(fs.rm.bind(fs)); } else if (fs.rmdir.length > 2) { this._rm = pify(fs.rmdir.bind(fs)); } else { this._rm = rmRecursive.bind(null, this); } this._rmdir = pify(fs.rmdir.bind(fs)); this._unlink = pify(fs.unlink.bind(fs)); this._stat = pify(fs.stat.bind(fs)); this._lstat = pify(fs.lstat.bind(fs)); this._readdir = pify(fs.readdir.bind(fs)); this._readlink = pify(fs.readlink.bind(fs)); this._symlink = pify(fs.symlink.bind(fs)); } this._original_unwrapped_fs = fs; } /** * Return true if a file exists, false if it doesn't exist. * Rethrows errors that aren't related to file existance. */ async exists(filepath, options = {}) { try { await this._stat(filepath); return true } catch (err) { if (err.code === 'ENOENT' || err.code === 'ENOTDIR') { return false } else { console.log('Unhandled error in "FileSystem.exists()" function', err); throw err } } } /** * Return the contents of a file if it exists, otherwise returns null. * * @param {string} filepath * @param {object} [options] * * @returns {Promise} */ async read(filepath, options = {}) { try { let buffer = await this._readFile(filepath, options); // Convert plain ArrayBuffers to Buffers if (typeof buffer !== 'string') { buffer = Buffer.from(buffer); } return buffer } catch (err) { return null } } /** * Write a file (creating missing directories if need be) without throwing errors. * * @param {string} filepath * @param {Buffer|Uint8Array|string} contents * @param {object|string} [options] */ async write(filepath, contents, options = {}) { try { await this._writeFile(filepath, contents, options); return } catch (err) { // Hmm. Let's try mkdirp and try again. await this.mkdir(dirname(filepath)); await this._writeFile(filepath, contents, options); } } /** * Make a directory (or series of nested directories) without throwing an error if it already exists. */ async mkdir(filepath, _selfCall = false) { try { await this._mkdir(filepath); return } catch (err) { // If err is null then operation succeeded! if (err === null) return // If the directory already exists, that's OK! if (err.code === 'EEXIST') return // Avoid infinite loops of failure if (_selfCall) throw err // If we got a "no such file or directory error" backup and try again. if (err.code === 'ENOENT') { const parent = dirname(filepath); // Check to see if we've gone too far if (parent === '.' || parent === '/' || parent === filepath) throw err // Infinite recursion, what could go wrong? await this.mkdir(parent); await this.mkdir(filepath, true); } } } /** * Delete a file without throwing an error if it is already deleted. */ async rm(filepath) { try { await this._unlink(filepath); } catch (err) { if (err.code !== 'ENOENT') throw err } } /** * Delete a directory without throwing an error if it is already deleted. */ async rmdir(filepath, opts) { try { if (opts && opts.recursive) { await this._rm(filepath, opts); } else { await this._rmdir(filepath); } } catch (err) { if (err.code !== 'ENOENT') throw err } } /** * Read a directory without throwing an error is the directory doesn't exist */ async readdir(filepath) { try { const names = await this._readdir(filepath); // Ordering is not guaranteed, and system specific (Windows vs Unix) // so we must sort them ourselves. names.sort(compareStrings); return names } catch (err) { if (err.code === 'ENOTDIR') return null return [] } } /** * Return a flast list of all the files nested inside a directory * * Based on an elegant concurrent recursive solution from SO * https://stackoverflow.com/a/45130990/2168416 */ async readdirDeep(dir) { const subdirs = await this._readdir(dir); const files = await Promise.all( subdirs.map(async subdir => { const res = dir + '/' + subdir; return (await this._stat(res)).isDirectory() ? this.readdirDeep(res) : res }) ); return files.reduce((a, f) => a.concat(f), []) } /** * Return the Stats of a file/symlink if it exists, otherwise returns null. * Rethrows errors that aren't related to file existance. */ async lstat(filename) { try { const stats = await this._lstat(filename); return stats } catch (err) { if (err.code === 'ENOENT') { return null } throw err } } /** * Reads the contents of a symlink if it exists, otherwise returns null. * Rethrows errors that aren't related to file existance. */ async readlink(filename, opts = { encoding: 'buffer' }) { // Note: FileSystem.readlink returns a buffer by default // so we can dump it into GitObject.write just like any other file. try { const link = await this._readlink(filename, opts); return Buffer.isBuffer(link) ? link : Buffer.from(link) } catch (err) { if (err.code === 'ENOENT') { return null } throw err } } /** * Write the contents of buffer to a symlink. */ async writelink(filename, buffer) { return this._symlink(buffer.toString('utf8'), filename) } } async function writeObjectLoose({ fs, gitdir, object, format, oid }) { if (format !== 'deflated') { throw new InternalError( 'GitObjectStoreLoose expects objects to write to be in deflated format' ) } const source = `objects/${oid.slice(0, 2)}/${oid.slice(2)}`; const filepath = `${gitdir}/${source}`; // Don't overwrite existing git objects - this helps avoid EPERM errors. // Although I don't know how we'd fix corrupted objects then. Perhaps delete them // on read? if (!(await fs.exists(filepath))) await fs.write(filepath, object); } /* eslint-env node, browser */ let supportsCompressionStream = null; async function deflate(buffer) { if (supportsCompressionStream === null) { supportsCompressionStream = testCompressionStream(); } return supportsCompressionStream ? browserDeflate(buffer) : pako.deflate(buffer) } async function browserDeflate(buffer) { const cs = new CompressionStream('deflate'); const c = new Blob([buffer]).stream().pipeThrough(cs); return new Uint8Array(await new Response(c).arrayBuffer()) } function testCompressionStream() { try { const cs = new CompressionStream('deflate'); // Test if `Blob.stream` is present. React Native does not have the `stream` method new Blob([]).stream(); if (cs) return true } catch (_) { // no bother } return false } async function _writeObject({ fs, gitdir, type, object, format = 'content', oid = undefined, dryRun = false, }) { if (format !== 'deflated') { if (format !== 'wrapped') { object = GitObject.wrap({ type, object }); } oid = await shasum(object); object = Buffer.from(await deflate(object)); } if (!dryRun) { await writeObjectLoose({ fs, gitdir, object, format: 'deflated', oid }); } return oid } function assertParameter(name, value) { if (value === undefined) { throw new MissingParameterError(name) } } // @ts-check /** * Add a file to the git index (aka staging area) * * @param {object} args * @param {FsClient} args.fs - a file system implementation * @param {string} args.dir - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir, '.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string|string[]} args.filepath - The path to the file to add to the index * @param {object} [args.cache] - a [cache](cache.md) object * @param {boolean} [args.force=false] - add to index even if matches gitignore. Think `git add --force` * * @returns {Promise} Resolves successfully once the git index has been updated * * @example * await fs.promises.writeFile('/tutorial/README.md', `# TEST`) * await git.add({ fs, dir: '/tutorial', filepath: 'README.md' }) * console.log('done') * */ async function add({ fs: _fs, dir, gitdir = join(dir, '.git'), filepath, cache = {}, force = false, }) { try { assertParameter('fs', _fs); assertParameter('dir', dir); assertParameter('gitdir', gitdir); assertParameter('filepaths', filepath); const fs = new FileSystem(_fs); await GitIndexManager.acquire({ fs, gitdir, cache }, async index => { return addToIndex({ dir, gitdir, fs, filepath, index, force }) }); } catch (err) { err.caller = 'git.add'; throw err } } async function addToIndex({ dir, gitdir, fs, filepath, index, force }) { // TODO: Should ignore UNLESS it's already in the index. filepath = Array.isArray(filepath) ? filepath : [filepath]; const promises = filepath.map(async currentFilepath => { if (!force) { const ignored = await GitIgnoreManager.isIgnored({ fs, dir, gitdir, filepath: currentFilepath, }); if (ignored) return } const stats = await fs.lstat(join(dir, currentFilepath)); if (!stats) throw new NotFoundError(currentFilepath) if (stats.isDirectory()) { const children = await fs.readdir(join(dir, currentFilepath)); const promises = children.map(child => addToIndex({ dir, gitdir, fs, filepath: [join(currentFilepath, child)], index, force, }) ); await Promise.all(promises); } else { const object = stats.isSymbolicLink() ? await fs.readlink(join(dir, currentFilepath)) : await fs.read(join(dir, currentFilepath)); if (object === null) throw new NotFoundError(currentFilepath) const oid = await _writeObject({ fs, gitdir, type: 'blob', object }); index.insert({ filepath: currentFilepath, stats, oid }); } }); const settledPromises = await Promise.allSettled(promises); const rejectedPromises = settledPromises .filter(settle => settle.status === 'rejected') .map(settle => settle.reason); if (rejectedPromises.length > 1) { throw new MultipleGitError(rejectedPromises) } if (rejectedPromises.length === 1) { throw rejectedPromises[0] } const fulfilledPromises = settledPromises .filter(settle => settle.status === 'fulfilled' && settle.value) .map(settle => settle.value); return fulfilledPromises } // @ts-check /** * * @param {Object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs * @param {object} args.cache * @param {SignCallback} [args.onSign] * @param {string} args.gitdir * @param {string} args.message * @param {Object} args.author * @param {string} args.author.name * @param {string} args.author.email * @param {number} args.author.timestamp * @param {number} args.author.timezoneOffset * @param {Object} args.committer * @param {string} args.committer.name * @param {string} args.committer.email * @param {number} args.committer.timestamp * @param {number} args.committer.timezoneOffset * @param {string} [args.signingKey] * @param {boolean} [args.dryRun = false] * @param {boolean} [args.noUpdateBranch = false] * @param {string} [args.ref] * @param {string[]} [args.parent] * @param {string} [args.tree] * * @returns {Promise} Resolves successfully with the SHA-1 object id of the newly created commit. */ async function _commit({ fs, cache, onSign, gitdir, message, author, committer, signingKey, dryRun = false, noUpdateBranch = false, ref, parent, tree, }) { if (!ref) { ref = await GitRefManager.resolve({ fs, gitdir, ref: 'HEAD', depth: 2, }); } return GitIndexManager.acquire({ fs, gitdir, cache }, async function(index) { const inodes = flatFileListToDirectoryStructure(index.entries); const inode = inodes.get('.'); if (!tree) { tree = await constructTree({ fs, gitdir, inode, dryRun }); } if (!parent) { try { parent = [ await GitRefManager.resolve({ fs, gitdir, ref, }), ]; } catch (err) { // Probably an initial commit parent = []; } } let comm = GitCommit.from({ tree, parent, author, committer, message, }); if (signingKey) { comm = await GitCommit.sign(comm, onSign, signingKey); } const oid = await _writeObject({ fs, gitdir, type: 'commit', object: comm.toObject(), dryRun, }); if (!noUpdateBranch && !dryRun) { // Update branch pointer await GitRefManager.writeRef({ fs, gitdir, ref, value: oid, }); } return oid }) } async function constructTree({ fs, gitdir, inode, dryRun }) { // use depth first traversal const children = inode.children; for (const inode of children) { if (inode.type === 'tree') { inode.metadata.mode = '040000'; inode.metadata.oid = await constructTree({ fs, gitdir, inode, dryRun }); } } const entries = children.map(inode => ({ mode: inode.metadata.mode, path: inode.basename, oid: inode.metadata.oid, type: inode.type, })); const tree = GitTree.from(entries); const oid = await _writeObject({ fs, gitdir, type: 'tree', object: tree.toObject(), dryRun, }); return oid } // @ts-check async function resolveFilepath({ fs, cache, gitdir, oid, filepath }) { // Ensure there are no leading or trailing directory separators. // I was going to do this automatically, but then found that the Git Terminal for Windows // auto-expands --filepath=/src/utils to --filepath=C:/Users/Will/AppData/Local/Programs/Git/src/utils // so I figured it would be wise to promote the behavior in the application layer not just the library layer. if (filepath.startsWith('/')) { throw new InvalidFilepathError('leading-slash') } else if (filepath.endsWith('/')) { throw new InvalidFilepathError('trailing-slash') } const _oid = oid; const result = await resolveTree({ fs, cache, gitdir, oid }); const tree = result.tree; if (filepath === '') { oid = result.oid; } else { const pathArray = filepath.split('/'); oid = await _resolveFilepath({ fs, cache, gitdir, tree, pathArray, oid: _oid, filepath, }); } return oid } async function _resolveFilepath({ fs, cache, gitdir, tree, pathArray, oid, filepath, }) { const name = pathArray.shift(); for (const entry of tree) { if (entry.path === name) { if (pathArray.length === 0) { return entry.oid } else { const { type, object } = await _readObject({ fs, cache, gitdir, oid: entry.oid, }); if (type !== 'tree') { throw new ObjectTypeError(oid, type, 'blob', filepath) } tree = GitTree.from(object); return _resolveFilepath({ fs, cache, gitdir, tree, pathArray, oid, filepath, }) } } } throw new NotFoundError(`file or directory found at "${oid}:${filepath}"`) } // @ts-check /** * * @typedef {Object} ReadTreeResult - The object returned has the following schema: * @property {string} oid - SHA-1 object id of this tree * @property {TreeObject} tree - the parsed tree object */ /** * @param {object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs * @param {any} args.cache * @param {string} args.gitdir * @param {string} args.oid * @param {string} [args.filepath] * * @returns {Promise} */ async function _readTree({ fs, cache, gitdir, oid, filepath = undefined, }) { if (filepath !== undefined) { oid = await resolveFilepath({ fs, cache, gitdir, oid, filepath }); } const { tree, oid: treeOid } = await resolveTree({ fs, cache, gitdir, oid }); const result = { oid: treeOid, tree: tree.entries(), }; return result } // @ts-check /** * @param {object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs * @param {string} args.gitdir * @param {TreeObject} args.tree * * @returns {Promise} */ async function _writeTree({ fs, gitdir, tree }) { // Convert object to buffer const object = GitTree.from(tree).toObject(); const oid = await _writeObject({ fs, gitdir, type: 'tree', object, format: 'content', }); return oid } // @ts-check /** * @param {object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs * @param {object} args.cache * @param {SignCallback} [args.onSign] * @param {string} args.gitdir * @param {string} args.ref * @param {string} args.oid * @param {string|Uint8Array} args.note * @param {boolean} [args.force] * @param {Object} args.author * @param {string} args.author.name * @param {string} args.author.email * @param {number} args.author.timestamp * @param {number} args.author.timezoneOffset * @param {Object} args.committer * @param {string} args.committer.name * @param {string} args.committer.email * @param {number} args.committer.timestamp * @param {number} args.committer.timezoneOffset * @param {string} [args.signingKey] * * @returns {Promise} */ async function _addNote({ fs, cache, onSign, gitdir, ref, oid, note, force, author, committer, signingKey, }) { // Get the current note commit let parent; try { parent = await GitRefManager.resolve({ gitdir, fs, ref }); } catch (err) { if (!(err instanceof NotFoundError)) { throw err } } // I'm using the "empty tree" magic number here for brevity const result = await _readTree({ fs, cache, gitdir, oid: parent || '4b825dc642cb6eb9a060e54bf8d69288fbee4904', }); let tree = result.tree; // Handle the case where a note already exists if (force) { tree = tree.filter(entry => entry.path !== oid); } else { for (const entry of tree) { if (entry.path === oid) { throw new AlreadyExistsError('note', oid) } } } // Create the note blob if (typeof note === 'string') { note = Buffer.from(note, 'utf8'); } const noteOid = await _writeObject({ fs, gitdir, type: 'blob', object: note, format: 'content', }); // Create the new note tree tree.push({ mode: '100644', path: oid, oid: noteOid, type: 'blob' }); const treeOid = await _writeTree({ fs, gitdir, tree, }); // Create the new note commit const commitOid = await _commit({ fs, cache, onSign, gitdir, ref, tree: treeOid, parent: parent && [parent], message: `Note added by 'isomorphic-git addNote'\n`, author, committer, signingKey, }); return commitOid } // @ts-check /** * @param {Object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs * @param {string} args.gitdir * @param {string} args.path * * @returns {Promise} Resolves with the config value * * @example * // Read config value * let value = await git.getConfig({ * dir: '$input((/))', * path: '$input((user.name))' * }) * console.log(value) * */ async function _getConfig({ fs, gitdir, path }) { const config = await GitConfigManager.get({ fs, gitdir }); return config.get(path) } /** * * @returns {Promise} */ async function normalizeAuthorObject({ fs, gitdir, author = {} }) { let { name, email, timestamp, timezoneOffset } = author; name = name || (await _getConfig({ fs, gitdir, path: 'user.name' })); email = email || (await _getConfig({ fs, gitdir, path: 'user.email' })) || ''; if (name === undefined) { return undefined } timestamp = timestamp != null ? timestamp : Math.floor(Date.now() / 1000); timezoneOffset = timezoneOffset != null ? timezoneOffset : new Date(timestamp * 1000).getTimezoneOffset(); return { name, email, timestamp, timezoneOffset } } /** * * @returns {Promise} */ async function normalizeCommitterObject({ fs, gitdir, author, committer, }) { committer = Object.assign({}, committer || author); // Match committer's date to author's one, if omitted if (author) { committer.timestamp = committer.timestamp || author.timestamp; committer.timezoneOffset = committer.timezoneOffset || author.timezoneOffset; } committer = await normalizeAuthorObject({ fs, gitdir, author: committer }); return committer } // @ts-check /** * Add or update an object note * * @param {object} args * @param {FsClient} args.fs - a file system implementation * @param {SignCallback} [args.onSign] - a PGP signing implementation * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string} [args.ref] - The notes ref to look under * @param {string} args.oid - The SHA-1 object id of the object to add the note to. * @param {string|Uint8Array} args.note - The note to add * @param {boolean} [args.force] - Over-write note if it already exists. * @param {Object} [args.author] - The details about the author. * @param {string} [args.author.name] - Default is `user.name` config. * @param {string} [args.author.email] - Default is `user.email` config. * @param {number} [args.author.timestamp=Math.floor(Date.now()/1000)] - Set the author timestamp field. This is the integer number of seconds since the Unix epoch (1970-01-01 00:00:00). * @param {number} [args.author.timezoneOffset] - Set the author timezone offset field. This is the difference, in minutes, from the current timezone to UTC. Default is `(new Date()).getTimezoneOffset()`. * @param {Object} [args.committer = author] - The details about the note committer, in the same format as the author parameter. If not specified, the author details are used. * @param {string} [args.committer.name] - Default is `user.name` config. * @param {string} [args.committer.email] - Default is `user.email` config. * @param {number} [args.committer.timestamp=Math.floor(Date.now()/1000)] - Set the committer timestamp field. This is the integer number of seconds since the Unix epoch (1970-01-01 00:00:00). * @param {number} [args.committer.timezoneOffset] - Set the committer timezone offset field. This is the difference, in minutes, from the current timezone to UTC. Default is `(new Date()).getTimezoneOffset()`. * @param {string} [args.signingKey] - Sign the note commit using this private PGP key. * @param {object} [args.cache] - a [cache](cache.md) object * * @returns {Promise} Resolves successfully with the SHA-1 object id of the commit object for the added note. */ async function addNote({ fs: _fs, onSign, dir, gitdir = join(dir, '.git'), ref = 'refs/notes/commits', oid, note, force, author: _author, committer: _committer, signingKey, cache = {}, }) { try { assertParameter('fs', _fs); assertParameter('gitdir', gitdir); assertParameter('oid', oid); assertParameter('note', note); if (signingKey) { assertParameter('onSign', onSign); } const fs = new FileSystem(_fs); const author = await normalizeAuthorObject({ fs, gitdir, author: _author }); if (!author) throw new MissingNameError('author') const committer = await normalizeCommitterObject({ fs, gitdir, author, committer: _committer, }); if (!committer) throw new MissingNameError('committer') return await _addNote({ fs: new FileSystem(fs), cache, onSign, gitdir, ref, oid, note, force, author, committer, signingKey, }) } catch (err) { err.caller = 'git.addNote'; throw err } } // @ts-check /** * @param {object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs * @param {string} args.gitdir * @param {string} args.remote * @param {string} args.url * @param {boolean} args.force * * @returns {Promise} * */ async function _addRemote({ fs, gitdir, remote, url, force }) { if (remote !== cleanGitRef.clean(remote)) { throw new InvalidRefNameError(remote, cleanGitRef.clean(remote)) } const config = await GitConfigManager.get({ fs, gitdir }); if (!force) { // Check that setting it wouldn't overwrite. const remoteNames = await config.getSubsections('remote'); if (remoteNames.includes(remote)) { // Throw an error if it would overwrite an existing remote, // but not if it's simply setting the same value again. if (url !== (await config.get(`remote.${remote}.url`))) { throw new AlreadyExistsError('remote', remote) } } } await config.set(`remote.${remote}.url`, url); await config.set( `remote.${remote}.fetch`, `+refs/heads/*:refs/remotes/${remote}/*` ); await GitConfigManager.save({ fs, gitdir, config }); } // @ts-check /** * Add or update a remote * * @param {object} args * @param {FsClient} args.fs - a file system implementation * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string} args.remote - The name of the remote * @param {string} args.url - The URL of the remote * @param {boolean} [args.force = false] - Instead of throwing an error if a remote named `remote` already exists, overwrite the existing remote. * * @returns {Promise} Resolves successfully when filesystem operations are complete * * @example * await git.addRemote({ * fs, * dir: '/tutorial', * remote: 'upstream', * url: 'https://github.com/isomorphic-git/isomorphic-git' * }) * console.log('done') * */ async function addRemote({ fs, dir, gitdir = join(dir, '.git'), remote, url, force = false, }) { try { assertParameter('fs', fs); assertParameter('gitdir', gitdir); assertParameter('remote', remote); assertParameter('url', url); return await _addRemote({ fs: new FileSystem(fs), gitdir, remote, url, force, }) } catch (err) { err.caller = 'git.addRemote'; throw err } } // @ts-check /** * Create an annotated tag. * * @param {object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs * @param {any} args.cache * @param {SignCallback} [args.onSign] * @param {string} args.gitdir * @param {string} args.ref * @param {string} [args.message = ref] * @param {string} [args.object = 'HEAD'] * @param {object} [args.tagger] * @param {string} args.tagger.name * @param {string} args.tagger.email * @param {number} args.tagger.timestamp * @param {number} args.tagger.timezoneOffset * @param {string} [args.gpgsig] * @param {string} [args.signingKey] * @param {boolean} [args.force = false] * * @returns {Promise} Resolves successfully when filesystem operations are complete * * @example * await git.annotatedTag({ * dir: '$input((/))', * ref: '$input((test-tag))', * message: '$input((This commit is awesome))', * tagger: { * name: '$input((Mr. Test))', * email: '$input((mrtest@example.com))' * } * }) * console.log('done') * */ async function _annotatedTag({ fs, cache, onSign, gitdir, ref, tagger, message = ref, gpgsig, object, signingKey, force = false, }) { ref = ref.startsWith('refs/tags/') ? ref : `refs/tags/${ref}`; if (!force && (await GitRefManager.exists({ fs, gitdir, ref }))) { throw new AlreadyExistsError('tag', ref) } // Resolve passed value const oid = await GitRefManager.resolve({ fs, gitdir, ref: object || 'HEAD', }); const { type } = await _readObject({ fs, cache, gitdir, oid }); let tagObject = GitAnnotatedTag.from({ object: oid, type, tag: ref.replace('refs/tags/', ''), tagger, message, gpgsig, }); if (signingKey) { tagObject = await GitAnnotatedTag.sign(tagObject, onSign, signingKey); } const value = await _writeObject({ fs, gitdir, type: 'tag', object: tagObject.toObject(), }); await GitRefManager.writeRef({ fs, gitdir, ref, value }); } // @ts-check /** * Create an annotated tag. * * @param {object} args * @param {FsClient} args.fs - a file system implementation * @param {SignCallback} [args.onSign] - a PGP signing implementation * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string} args.ref - What to name the tag * @param {string} [args.message = ref] - The tag message to use. * @param {string} [args.object = 'HEAD'] - The SHA-1 object id the tag points to. (Will resolve to a SHA-1 object id if value is a ref.) By default, the commit object which is referred by the current `HEAD` is used. * @param {object} [args.tagger] - The details about the tagger. * @param {string} [args.tagger.name] - Default is `user.name` config. * @param {string} [args.tagger.email] - Default is `user.email` config. * @param {number} [args.tagger.timestamp=Math.floor(Date.now()/1000)] - Set the tagger timestamp field. This is the integer number of seconds since the Unix epoch (1970-01-01 00:00:00). * @param {number} [args.tagger.timezoneOffset] - Set the tagger timezone offset field. This is the difference, in minutes, from the current timezone to UTC. Default is `(new Date()).getTimezoneOffset()`. * @param {string} [args.gpgsig] - The gpgsig attatched to the tag object. (Mutually exclusive with the `signingKey` option.) * @param {string} [args.signingKey] - Sign the tag object using this private PGP key. (Mutually exclusive with the `gpgsig` option.) * @param {boolean} [args.force = false] - Instead of throwing an error if a tag named `ref` already exists, overwrite the existing tag. Note that this option does not modify the original tag object itself. * @param {object} [args.cache] - a [cache](cache.md) object * * @returns {Promise} Resolves successfully when filesystem operations are complete * * @example * await git.annotatedTag({ * fs, * dir: '/tutorial', * ref: 'test-tag', * message: 'This commit is awesome', * tagger: { * name: 'Mr. Test', * email: 'mrtest@example.com' * } * }) * console.log('done') * */ async function annotatedTag({ fs: _fs, onSign, dir, gitdir = join(dir, '.git'), ref, tagger: _tagger, message = ref, gpgsig, object, signingKey, force = false, cache = {}, }) { try { assertParameter('fs', _fs); assertParameter('gitdir', gitdir); assertParameter('ref', ref); if (signingKey) { assertParameter('onSign', onSign); } const fs = new FileSystem(_fs); // Fill in missing arguments with default values const tagger = await normalizeAuthorObject({ fs, gitdir, author: _tagger }); if (!tagger) throw new MissingNameError('tagger') return await _annotatedTag({ fs, cache, onSign, gitdir, ref, tagger, message, gpgsig, object, signingKey, force, }) } catch (err) { err.caller = 'git.annotatedTag'; throw err } } // @ts-check /** * Create a branch * * @param {object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs * @param {string} args.gitdir * @param {string} args.ref * @param {boolean} [args.checkout = false] * * @returns {Promise} Resolves successfully when filesystem operations are complete * * @example * await git.branch({ dir: '$input((/))', ref: '$input((develop))' }) * console.log('done') * */ async function _branch({ fs, gitdir, ref, checkout = false }) { if (ref !== cleanGitRef.clean(ref)) { throw new InvalidRefNameError(ref, cleanGitRef.clean(ref)) } const fullref = `refs/heads/${ref}`; const exist = await GitRefManager.exists({ fs, gitdir, ref: fullref }); if (exist) { throw new AlreadyExistsError('branch', ref, false) } // Get current HEAD tree oid let oid; try { oid = await GitRefManager.resolve({ fs, gitdir, ref: 'HEAD' }); } catch (e) { // Probably an empty repo } // Create a new ref that points at the current commit if (oid) { await GitRefManager.writeRef({ fs, gitdir, ref: fullref, value: oid }); } if (checkout) { // Update HEAD await GitRefManager.writeSymbolicRef({ fs, gitdir, ref: 'HEAD', value: fullref, }); } } // @ts-check /** * Create a branch * * @param {object} args * @param {FsClient} args.fs - a file system implementation * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string} args.ref - What to name the branch * @param {boolean} [args.checkout = false] - Update `HEAD` to point at the newly created branch * * @returns {Promise} Resolves successfully when filesystem operations are complete * * @example * await git.branch({ fs, dir: '/tutorial', ref: 'develop' }) * console.log('done') * */ async function branch({ fs, dir, gitdir = join(dir, '.git'), ref, checkout = false, }) { try { assertParameter('fs', fs); assertParameter('gitdir', gitdir); assertParameter('ref', ref); return await _branch({ fs: new FileSystem(fs), gitdir, ref, checkout, }) } catch (err) { err.caller = 'git.branch'; throw err } } // https://dev.to/namirsab/comment/2050 function arrayRange(start, end) { const length = end - start; return Array.from({ length }, (_, i) => start + i) } // TODO: Should I just polyfill Array.flat? const flat = typeof Array.prototype.flat === 'undefined' ? entries => entries.reduce((acc, x) => acc.concat(x), []) : entries => entries.flat(); // This is convenient for computing unions/joins of sorted lists. class RunningMinimum { constructor() { // Using a getter for 'value' would just bloat the code. // You know better than to set it directly right? this.value = null; } consider(value) { if (value === null || value === undefined) return if (this.value === null) { this.value = value; } else if (value < this.value) { this.value = value; } } reset() { this.value = null; } } // Take an array of length N of // iterators of length Q_n // of strings // and return an iterator of length max(Q_n) for all n // of arrays of length N // of string|null who all have the same string value function* unionOfIterators(sets) { /* NOTE: We can assume all arrays are sorted. * Indexes are sorted because they are defined that way: * * > Index entries are sorted in ascending order on the name field, * > interpreted as a string of unsigned bytes (i.e. memcmp() order, no * > localization, no special casing of directory separator '/'). Entries * > with the same name are sorted by their stage field. * * Trees should be sorted because they are created directly from indexes. * They definitely should be sorted, or else they wouldn't have a unique SHA1. * So that would be very naughty on the part of the tree-creator. * * Lastly, the working dir entries are sorted because I choose to sort them * in my FileSystem.readdir() implementation. */ // Init const min = new RunningMinimum(); let minimum; const heads = []; const numsets = sets.length; for (let i = 0; i < numsets; i++) { // Abuse the fact that iterators continue to return 'undefined' for value // once they are done heads[i] = sets[i].next().value; if (heads[i] !== undefined) { min.consider(heads[i]); } } if (min.value === null) return // Iterate while (true) { const result = []; minimum = min.value; min.reset(); for (let i = 0; i < numsets; i++) { if (heads[i] !== undefined && heads[i] === minimum) { result[i] = heads[i]; heads[i] = sets[i].next().value; } else { // A little hacky, but eh result[i] = null; } if (heads[i] !== undefined) { min.consider(heads[i]); } } yield result; if (min.value === null) return } } // @ts-check /** * @param {object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs * @param {object} args.cache * @param {string} [args.dir] * @param {string} [args.gitdir=join(dir,'.git')] * @param {Walker[]} args.trees * @param {WalkerMap} [args.map] * @param {WalkerReduce} [args.reduce] * @param {WalkerIterate} [args.iterate] * * @returns {Promise} The finished tree-walking result * * @see {WalkerMap} * */ async function _walk({ fs, cache, dir, gitdir, trees, // @ts-ignore map = async (_, entry) => entry, // The default reducer is a flatmap that filters out undefineds. reduce = async (parent, children) => { const flatten = flat(children); if (parent !== undefined) flatten.unshift(parent); return flatten }, // The default iterate function walks all children concurrently iterate = (walk, children) => Promise.all([...children].map(walk)), }) { const walkers = trees.map(proxy => proxy[GitWalkSymbol]({ fs, dir, gitdir, cache }) ); const root = new Array(walkers.length).fill('.'); const range = arrayRange(0, walkers.length); const unionWalkerFromReaddir = async entries => { range.map(i => { entries[i] = entries[i] && new walkers[i].ConstructEntry(entries[i]); }); const subdirs = await Promise.all( range.map(i => (entries[i] ? walkers[i].readdir(entries[i]) : [])) ); // Now process child directories const iterators = subdirs .map(array => (array === null ? [] : array)) .map(array => array[Symbol.iterator]()); return { entries, children: unionOfIterators(iterators), } }; const walk = async root => { const { entries, children } = await unionWalkerFromReaddir(root); const fullpath = entries.find(entry => entry && entry._fullpath)._fullpath; const parent = await map(fullpath, entries); if (parent !== null) { let walkedChildren = await iterate(walk, children); walkedChildren = walkedChildren.filter(x => x !== undefined); return reduce(parent, walkedChildren) } }; return walk(root) } const worthWalking = (filepath, root) => { if (filepath === '.' || root == null || root.length === 0 || root === '.') { return true } if (root.length >= filepath.length) { return root.startsWith(filepath) } else { return filepath.startsWith(root) } }; // @ts-check /** * @param {object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs * @param {any} args.cache * @param {ProgressCallback} [args.onProgress] * @param {string} args.dir * @param {string} args.gitdir * @param {string} args.ref * @param {string[]} [args.filepaths] * @param {string} args.remote * @param {boolean} args.noCheckout * @param {boolean} [args.noUpdateHead] * @param {boolean} [args.dryRun] * @param {boolean} [args.force] * @param {boolean} [args.track] * * @returns {Promise} Resolves successfully when filesystem operations are complete * */ async function _checkout({ fs, cache, onProgress, dir, gitdir, remote, ref, filepaths, noCheckout, noUpdateHead, dryRun, force, track = true, }) { // Get tree oid let oid; try { oid = await GitRefManager.resolve({ fs, gitdir, ref }); // TODO: Figure out what to do if both 'ref' and 'remote' are specified, ref already exists, // and is configured to track a different remote. } catch (err) { if (ref === 'HEAD') throw err // If `ref` doesn't exist, create a new remote tracking branch // Figure out the commit to checkout const remoteRef = `${remote}/${ref}`; oid = await GitRefManager.resolve({ fs, gitdir, ref: remoteRef, }); if (track) { // Set up remote tracking branch const config = await GitConfigManager.get({ fs, gitdir }); await config.set(`branch.${ref}.remote`, remote); await config.set(`branch.${ref}.merge`, `refs/heads/${ref}`); await GitConfigManager.save({ fs, gitdir, config }); } // Create a new branch that points at that same commit await GitRefManager.writeRef({ fs, gitdir, ref: `refs/heads/${ref}`, value: oid, }); } // Update working dir if (!noCheckout) { let ops; // First pass - just analyze files (not directories) and figure out what needs to be done try { ops = await analyze({ fs, cache, onProgress, dir, gitdir, ref, force, filepaths, }); } catch (err) { // Throw a more helpful error message for this common mistake. if (err instanceof NotFoundError && err.data.what === oid) { throw new CommitNotFetchedError(ref, oid) } else { throw err } } // Report conflicts const conflicts = ops .filter(([method]) => method === 'conflict') .map(([method, fullpath]) => fullpath); if (conflicts.length > 0) { throw new CheckoutConflictError(conflicts) } // Collect errors const errors = ops .filter(([method]) => method === 'error') .map(([method, fullpath]) => fullpath); if (errors.length > 0) { throw new InternalError(errors.join(', ')) } if (dryRun) { // Since the format of 'ops' is in flux, I really would rather folk besides myself not start relying on it // return ops return } // Second pass - execute planned changes // The cheapest semi-parallel solution without computing a full dependency graph will be // to just do ops in 4 dumb phases: delete files, delete dirs, create dirs, write files let count = 0; const total = ops.length; await GitIndexManager.acquire({ fs, gitdir, cache }, async function(index) { await Promise.all( ops .filter( ([method]) => method === 'delete' || method === 'delete-index' ) .map(async function([method, fullpath]) { const filepath = `${dir}/${fullpath}`; if (method === 'delete') { await fs.rm(filepath); } index.delete({ filepath: fullpath }); if (onProgress) { await onProgress({ phase: 'Updating workdir', loaded: ++count, total, }); } }) ); }); // Note: this is cannot be done naively in parallel await GitIndexManager.acquire({ fs, gitdir, cache }, async function(index) { for (const [method, fullpath] of ops) { if (method === 'rmdir' || method === 'rmdir-index') { const filepath = `${dir}/${fullpath}`; try { if (method === 'rmdir-index') { index.delete({ filepath: fullpath }); } await fs.rmdir(filepath); if (onProgress) { await onProgress({ phase: 'Updating workdir', loaded: ++count, total, }); } } catch (e) { if (e.code === 'ENOTEMPTY') { console.log( `Did not delete ${fullpath} because directory is not empty` ); } else { throw e } } } } }); await Promise.all( ops .filter(([method]) => method === 'mkdir' || method === 'mkdir-index') .map(async function([_, fullpath]) { const filepath = `${dir}/${fullpath}`; await fs.mkdir(filepath); if (onProgress) { await onProgress({ phase: 'Updating workdir', loaded: ++count, total, }); } }) ); await GitIndexManager.acquire({ fs, gitdir, cache }, async function(index) { await Promise.all( ops .filter( ([method]) => method === 'create' || method === 'create-index' || method === 'update' || method === 'mkdir-index' ) .map(async function([method, fullpath, oid, mode, chmod]) { const filepath = `${dir}/${fullpath}`; try { if (method !== 'create-index' && method !== 'mkdir-index') { const { object } = await _readObject({ fs, cache, gitdir, oid }); if (chmod) { // Note: the mode option of fs.write only works when creating files, // not updating them. Since the `fs` plugin doesn't expose `chmod` this // is our only option. await fs.rm(filepath); } if (mode === 0o100644) { // regular file await fs.write(filepath, object); } else if (mode === 0o100755) { // executable file await fs.write(filepath, object, { mode: 0o777 }); } else if (mode === 0o120000) { // symlink await fs.writelink(filepath, object); } else { throw new InternalError( `Invalid mode 0o${mode.toString(8)} detected in blob ${oid}` ) } } const stats = await fs.lstat(filepath); // We can't trust the executable bit returned by lstat on Windows, // so we need to preserve this value from the TREE. // TODO: Figure out how git handles this internally. if (mode === 0o100755) { stats.mode = 0o755; } // Submodules are present in the git index but use a unique mode different from trees if (method === 'mkdir-index') { stats.mode = 0o160000; } index.insert({ filepath: fullpath, stats, oid, }); if (onProgress) { await onProgress({ phase: 'Updating workdir', loaded: ++count, total, }); } } catch (e) { console.log(e); } }) ); }); } // Update HEAD if (!noUpdateHead) { const fullRef = await GitRefManager.expand({ fs, gitdir, ref }); if (fullRef.startsWith('refs/heads')) { await GitRefManager.writeSymbolicRef({ fs, gitdir, ref: 'HEAD', value: fullRef, }); } else { // detached head await GitRefManager.writeRef({ fs, gitdir, ref: 'HEAD', value: oid }); } } } async function analyze({ fs, cache, onProgress, dir, gitdir, ref, force, filepaths, }) { let count = 0; return _walk({ fs, cache, dir, gitdir, trees: [TREE({ ref }), WORKDIR(), STAGE()], map: async function(fullpath, [commit, workdir, stage]) { if (fullpath === '.') return // match against base paths if (filepaths && !filepaths.some(base => worthWalking(fullpath, base))) { return null } // Emit progress event if (onProgress) { await onProgress({ phase: 'Analyzing workdir', loaded: ++count }); } // This is a kind of silly pattern but it worked so well for me in the past // and it makes intuitively demonstrating exhaustiveness so *easy*. // This checks for the presense and/or absence of each of the 3 entries, // converts that to a 3-bit binary representation, and then handles // every possible combination (2^3 or 8 cases) with a lookup table. const key = [!!stage, !!commit, !!workdir].map(Number).join(''); switch (key) { // Impossible case. case '000': return // Ignore workdir files that are not tracked and not part of the new commit. case '001': // OK, make an exception for explicitly named files. if (force && filepaths && filepaths.includes(fullpath)) { return ['delete', fullpath] } return // New entries case '010': { switch (await commit.type()) { case 'tree': { return ['mkdir', fullpath] } case 'blob': { return [ 'create', fullpath, await commit.oid(), await commit.mode(), ] } case 'commit': { return [ 'mkdir-index', fullpath, await commit.oid(), await commit.mode(), ] } default: { return [ 'error', `new entry Unhandled type ${await commit.type()}`, ] } } } // New entries but there is already something in the workdir there. case '011': { switch (`${await commit.type()}-${await workdir.type()}`) { case 'tree-tree': { return // noop } case 'tree-blob': case 'blob-tree': { return ['conflict', fullpath] } case 'blob-blob': { // Is the incoming file different? if ((await commit.oid()) !== (await workdir.oid())) { if (force) { return [ 'update', fullpath, await commit.oid(), await commit.mode(), (await commit.mode()) !== (await workdir.mode()), ] } else { return ['conflict', fullpath] } } else { // Is the incoming file a different mode? if ((await commit.mode()) !== (await workdir.mode())) { if (force) { return [ 'update', fullpath, await commit.oid(), await commit.mode(), true, ] } else { return ['conflict', fullpath] } } else { return [ 'create-index', fullpath, await commit.oid(), await commit.mode(), ] } } } case 'commit-tree': { // TODO: submodule // We'll ignore submodule directories for now. // Users prefer we not throw an error for lack of submodule support. // gitlinks return } case 'commit-blob': { // TODO: submodule // But... we'll complain if there is a *file* where we would // put a submodule if we had submodule support. return ['conflict', fullpath] } default: { return ['error', `new entry Unhandled type ${commit.type}`] } } } // Something in stage but not in the commit OR the workdir. // Note: I verified this behavior against canonical git. case '100': { return ['delete-index', fullpath] } // Deleted entries // TODO: How to handle if stage type and workdir type mismatch? case '101': { switch (await stage.type()) { case 'tree': { return ['rmdir', fullpath] } case 'blob': { // Git checks that the workdir.oid === stage.oid before deleting file if ((await stage.oid()) !== (await workdir.oid())) { if (force) { return ['delete', fullpath] } else { return ['conflict', fullpath] } } else { return ['delete', fullpath] } } case 'commit': { return ['rmdir-index', fullpath] } default: { return [ 'error', `delete entry Unhandled type ${await stage.type()}`, ] } } } /* eslint-disable no-fallthrough */ // File missing from workdir case '110': // Possibly modified entries case '111': { /* eslint-enable no-fallthrough */ switch (`${await stage.type()}-${await commit.type()}`) { case 'tree-tree': { return } case 'blob-blob': { // If the file hasn't changed, there is no need to do anything. // Existing file modifications in the workdir can be be left as is. if ( (await stage.oid()) === (await commit.oid()) && (await stage.mode()) === (await commit.mode()) && !force ) { return } // Check for local changes that would be lost if (workdir) { // Note: canonical git only compares with the stage. But we're smart enough // to compare to the stage AND the incoming commit. if ( (await workdir.oid()) !== (await stage.oid()) && (await workdir.oid()) !== (await commit.oid()) ) { if (force) { return [ 'update', fullpath, await commit.oid(), await commit.mode(), (await commit.mode()) !== (await workdir.mode()), ] } else { return ['conflict', fullpath] } } } else if (force) { return [ 'update', fullpath, await commit.oid(), await commit.mode(), (await commit.mode()) !== (await stage.mode()), ] } // Has file mode changed? if ((await commit.mode()) !== (await stage.mode())) { return [ 'update', fullpath, await commit.oid(), await commit.mode(), true, ] } // TODO: HANDLE SYMLINKS // Has the file content changed? if ((await commit.oid()) !== (await stage.oid())) { return [ 'update', fullpath, await commit.oid(), await commit.mode(), false, ] } else { return } } case 'tree-blob': { return ['update-dir-to-blob', fullpath, await commit.oid()] } case 'blob-tree': { return ['update-blob-to-tree', fullpath] } case 'commit-commit': { return [ 'mkdir-index', fullpath, await commit.oid(), await commit.mode(), ] } default: { return [ 'error', `update entry Unhandled type ${await stage.type()}-${await commit.type()}`, ] } } } } }, // Modify the default flat mapping reduce: async function(parent, children) { children = flat(children); if (!parent) { return children } else if (parent && parent[0] === 'rmdir') { children.push(parent); return children } else { children.unshift(parent); return children } }, }) } // @ts-check /** * Checkout a branch * * If the branch already exists it will check out that branch. Otherwise, it will create a new remote tracking branch set to track the remote branch of that name. * * @param {object} args * @param {FsClient} args.fs - a file system implementation * @param {ProgressCallback} [args.onProgress] - optional progress event callback * @param {string} args.dir - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string} [args.ref = 'HEAD'] - Source to checkout files from * @param {string[]} [args.filepaths] - Limit the checkout to the given files and directories * @param {string} [args.remote = 'origin'] - Which remote repository to use * @param {boolean} [args.noCheckout = false] - If true, will update HEAD but won't update the working directory * @param {boolean} [args.noUpdateHead] - If true, will update the working directory but won't update HEAD. Defaults to `false` when `ref` is provided, and `true` if `ref` is not provided. * @param {boolean} [args.dryRun = false] - If true, simulates a checkout so you can test whether it would succeed. * @param {boolean} [args.force = false] - If true, conflicts will be ignored and files will be overwritten regardless of local changes. * @param {boolean} [args.track = true] - If false, will not set the remote branch tracking information. Defaults to true. * @param {object} [args.cache] - a [cache](cache.md) object * * @returns {Promise} Resolves successfully when filesystem operations are complete * * @example * // switch to the main branch * await git.checkout({ * fs, * dir: '/tutorial', * ref: 'main' * }) * console.log('done') * * @example * // restore the 'docs' and 'src/docs' folders to the way they were, overwriting any changes * await git.checkout({ * fs, * dir: '/tutorial', * force: true, * filepaths: ['docs', 'src/docs'] * }) * console.log('done') * * @example * // restore the 'docs' and 'src/docs' folders to the way they are in the 'develop' branch, overwriting any changes * await git.checkout({ * fs, * dir: '/tutorial', * ref: 'develop', * noUpdateHead: true, * force: true, * filepaths: ['docs', 'src/docs'] * }) * console.log('done') */ async function checkout({ fs, onProgress, dir, gitdir = join(dir, '.git'), remote = 'origin', ref: _ref, filepaths, noCheckout = false, noUpdateHead = _ref === undefined, dryRun = false, force = false, track = true, cache = {}, }) { try { assertParameter('fs', fs); assertParameter('dir', dir); assertParameter('gitdir', gitdir); const ref = _ref || 'HEAD'; return await _checkout({ fs: new FileSystem(fs), cache, onProgress, dir, gitdir, remote, ref, filepaths, noCheckout, noUpdateHead, dryRun, force, track, }) } catch (err) { err.caller = 'git.checkout'; throw err } } // @see https://git-scm.com/docs/git-rev-parse.html#_specifying_revisions const abbreviateRx = new RegExp('^refs/(heads/|tags/|remotes/)?(.*)'); function abbreviateRef(ref) { const match = abbreviateRx.exec(ref); if (match) { if (match[1] === 'remotes/' && ref.endsWith('/HEAD')) { return match[2].slice(0, -5) } else { return match[2] } } return ref } // @ts-check /** * @param {Object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs * @param {string} args.gitdir * @param {boolean} [args.fullname = false] - Return the full path (e.g. "refs/heads/main") instead of the abbreviated form. * @param {boolean} [args.test = false] - If the current branch doesn't actually exist (such as right after git init) then return `undefined`. * * @returns {Promise} The name of the current branch or undefined if the HEAD is detached. * */ async function _currentBranch({ fs, gitdir, fullname = false, test = false, }) { const ref = await GitRefManager.resolve({ fs, gitdir, ref: 'HEAD', depth: 2, }); if (test) { try { await GitRefManager.resolve({ fs, gitdir, ref }); } catch (_) { return } } // Return `undefined` for detached HEAD if (!ref.startsWith('refs/')) return return fullname ? ref : abbreviateRef(ref) } function translateSSHtoHTTP(url) { // handle "shorter scp-like syntax" url = url.replace(/^git@([^:]+):/, 'https://$1/'); // handle proper SSH URLs url = url.replace(/^ssh:\/\//, 'https://'); return url } function calculateBasicAuthHeader({ username = '', password = '' }) { return `Basic ${Buffer.from(`${username}:${password}`).toString('base64')}` } // Currently 'for await' upsets my linters. async function forAwait(iterable, cb) { const iter = getIterator(iterable); while (true) { const { value, done } = await iter.next(); if (value) await cb(value); if (done) break } if (iter.return) iter.return(); } async function collect(iterable) { let size = 0; const buffers = []; // This will be easier once `for await ... of` loops are available. await forAwait(iterable, value => { buffers.push(value); size += value.byteLength; }); const result = new Uint8Array(size); let nextIndex = 0; for (const buffer of buffers) { result.set(buffer, nextIndex); nextIndex += buffer.byteLength; } return result } function extractAuthFromUrl(url) { // For whatever reason, the `fetch` API does not convert credentials embedded in the URL // into Basic Authentication headers automatically. Instead it throws an error! // So we must manually parse the URL, rip out the user:password portion if it is present // and compute the Authorization header. // Note: I tried using new URL(url) but that throws a security exception in Edge. :rolleyes: let userpass = url.match(/^https?:\/\/([^/]+)@/); // No credentials, return the url unmodified and an empty auth object if (userpass == null) return { url, auth: {} } userpass = userpass[1]; const [username, password] = userpass.split(':'); // Remove credentials from URL url = url.replace(`${userpass}@`, ''); // Has credentials, return the fetch-safe URL and the parsed credentials return { url, auth: { username, password } } } function padHex(b, n) { const s = n.toString(16); return '0'.repeat(b - s.length) + s } /** pkt-line Format --------------- Much (but not all) of the payload is described around pkt-lines. A pkt-line is a variable length binary string. The first four bytes of the line, the pkt-len, indicates the total length of the line, in hexadecimal. The pkt-len includes the 4 bytes used to contain the length's hexadecimal representation. A pkt-line MAY contain binary data, so implementors MUST ensure pkt-line parsing/formatting routines are 8-bit clean. A non-binary line SHOULD BE terminated by an LF, which if present MUST be included in the total length. Receivers MUST treat pkt-lines with non-binary data the same whether or not they contain the trailing LF (stripping the LF if present, and not complaining when it is missing). The maximum length of a pkt-line's data component is 65516 bytes. Implementations MUST NOT send pkt-line whose length exceeds 65520 (65516 bytes of payload + 4 bytes of length data). Implementations SHOULD NOT send an empty pkt-line ("0004"). A pkt-line with a length field of 0 ("0000"), called a flush-pkt, is a special case and MUST be handled differently than an empty pkt-line ("0004"). ---- pkt-line = data-pkt / flush-pkt data-pkt = pkt-len pkt-payload pkt-len = 4*(HEXDIG) pkt-payload = (pkt-len - 4)*(OCTET) flush-pkt = "0000" ---- Examples (as C-style strings): ---- pkt-line actual value --------------------------------- "0006a\n" "a\n" "0005a" "a" "000bfoobar\n" "foobar\n" "0004" "" ---- */ // I'm really using this more as a namespace. // There's not a lot of "state" in a pkt-line class GitPktLine { static flush() { return Buffer.from('0000', 'utf8') } static delim() { return Buffer.from('0001', 'utf8') } static encode(line) { if (typeof line === 'string') { line = Buffer.from(line); } const length = line.length + 4; const hexlength = padHex(4, length); return Buffer.concat([Buffer.from(hexlength, 'utf8'), line]) } static streamReader(stream) { const reader = new StreamReader(stream); return async function read() { try { let length = await reader.read(4); if (length == null) return true length = parseInt(length.toString('utf8'), 16); if (length === 0) return null if (length === 1) return null // delim packets const buffer = await reader.read(length - 4); if (buffer == null) return true return buffer } catch (err) { console.log('error', err); return true } } } } // @ts-check /** * @param {function} read */ async function parseCapabilitiesV2(read) { /** @type {Object} */ const capabilities2 = {}; let line; while (true) { line = await read(); if (line === true) break if (line === null) continue line = line.toString('utf8').replace(/\n$/, ''); const i = line.indexOf('='); if (i > -1) { const key = line.slice(0, i); const value = line.slice(i + 1); capabilities2[key] = value; } else { capabilities2[line] = true; } } return { protocolVersion: 2, capabilities2 } } async function parseRefsAdResponse(stream, { service }) { const capabilities = new Set(); const refs = new Map(); const symrefs = new Map(); // There is probably a better way to do this, but for now // let's just throw the result parser inline here. const read = GitPktLine.streamReader(stream); let lineOne = await read(); // skip past any flushes while (lineOne === null) lineOne = await read(); if (lineOne === true) throw new EmptyServerResponseError() // Handle protocol v2 responses (Bitbucket Server doesn't include a `# service=` line) if (lineOne.includes('version 2')) { return parseCapabilitiesV2(read) } // Clients MUST ignore an LF at the end of the line. if (lineOne.toString('utf8').replace(/\n$/, '') !== `# service=${service}`) { throw new ParseError(`# service=${service}\\n`, lineOne.toString('utf8')) } let lineTwo = await read(); // skip past any flushes while (lineTwo === null) lineTwo = await read(); // In the edge case of a brand new repo, zero refs (and zero capabilities) // are returned. if (lineTwo === true) return { capabilities, refs, symrefs } lineTwo = lineTwo.toString('utf8'); // Handle protocol v2 responses if (lineTwo.includes('version 2')) { return parseCapabilitiesV2(read) } const [firstRef, capabilitiesLine] = splitAndAssert(lineTwo, '\x00', '\\x00'); capabilitiesLine.split(' ').map(x => capabilities.add(x)); const [ref, name] = splitAndAssert(firstRef, ' ', ' '); refs.set(name, ref); while (true) { const line = await read(); if (line === true) break if (line !== null) { const [ref, name] = splitAndAssert(line.toString('utf8'), ' ', ' '); refs.set(name, ref); } } // Symrefs are thrown into the "capabilities" unfortunately. for (const cap of capabilities) { if (cap.startsWith('symref=')) { const m = cap.match(/symref=([^:]+):(.*)/); if (m.length === 3) { symrefs.set(m[1], m[2]); } } } return { protocolVersion: 1, capabilities, refs, symrefs } } function splitAndAssert(line, sep, expected) { const split = line.trim().split(sep); if (split.length !== 2) { throw new ParseError( `Two strings separated by '${expected}'`, line.toString('utf8') ) } return split } // Try to accomodate known CORS proxy implementations: // - https://jcubic.pl/proxy.php? <-- uses query string // - https://cors.isomorphic-git.org <-- uses path const corsProxify = (corsProxy, url) => corsProxy.endsWith('?') ? `${corsProxy}${url}` : `${corsProxy}/${url.replace(/^https?:\/\//, '')}`; const updateHeaders = (headers, auth) => { // Update the basic auth header if (auth.username || auth.password) { headers.Authorization = calculateBasicAuthHeader(auth); } // but any manually provided headers take precedence if (auth.headers) { Object.assign(headers, auth.headers); } }; /** * @param {GitHttpResponse} res * * @returns {{ preview: string, response: string, data: Buffer }} */ const stringifyBody = async res => { try { // Some services provide a meaningful error message in the body of 403s like "token lacks the scopes necessary to perform this action" const data = Buffer.from(await collect(res.body)); const response = data.toString('utf8'); const preview = response.length < 256 ? response : response.slice(0, 256) + '...'; return { preview, response, data } } catch (e) { return {} } }; class GitRemoteHTTP { static async capabilities() { return ['discover', 'connect'] } /** * @param {Object} args * @param {HttpClient} args.http * @param {ProgressCallback} [args.onProgress] * @param {AuthCallback} [args.onAuth] * @param {AuthFailureCallback} [args.onAuthFailure] * @param {AuthSuccessCallback} [args.onAuthSuccess] * @param {string} [args.corsProxy] * @param {string} args.service * @param {string} args.url * @param {Object} args.headers * @param {1 | 2} args.protocolVersion - Git Protocol Version */ static async discover({ http, onProgress, onAuth, onAuthSuccess, onAuthFailure, corsProxy, service, url: _origUrl, headers, protocolVersion, }) { let { url, auth } = extractAuthFromUrl(_origUrl); const proxifiedURL = corsProxy ? corsProxify(corsProxy, url) : url; if (auth.username || auth.password) { headers.Authorization = calculateBasicAuthHeader(auth); } if (protocolVersion === 2) { headers['Git-Protocol'] = 'version=2'; } let res; let tryAgain; let providedAuthBefore = false; do { res = await http.request({ onProgress, method: 'GET', url: `${proxifiedURL}/info/refs?service=${service}`, headers, }); // the default loop behavior tryAgain = false; // 401 is the "correct" response for access denied. 203 is Non-Authoritative Information and comes from Azure DevOps, which // apparently doesn't realize this is a git request and is returning the HTML for the "Azure DevOps Services | Sign In" page. if (res.statusCode === 401 || res.statusCode === 203) { // On subsequent 401s, call `onAuthFailure` instead of `onAuth`. // This is so that naive `onAuth` callbacks that return a fixed value don't create an infinite loop of retrying. const getAuth = providedAuthBefore ? onAuthFailure : onAuth; if (getAuth) { // Acquire credentials and try again // TODO: read `useHttpPath` value from git config and pass along? auth = await getAuth(url, { ...auth, headers: { ...headers }, }); if (auth && auth.cancel) { throw new UserCanceledError() } else if (auth) { updateHeaders(headers, auth); providedAuthBefore = true; tryAgain = true; } } } else if ( res.statusCode === 200 && providedAuthBefore && onAuthSuccess ) { await onAuthSuccess(url, auth); } } while (tryAgain) if (res.statusCode !== 200) { const { response } = await stringifyBody(res); throw new HttpError(res.statusCode, res.statusMessage, response) } // Git "smart" HTTP servers should respond with the correct Content-Type header. if ( res.headers['content-type'] === `application/x-${service}-advertisement` ) { const remoteHTTP = await parseRefsAdResponse(res.body, { service }); remoteHTTP.auth = auth; return remoteHTTP } else { // If they don't send the correct content-type header, that's a good indicator it is either a "dumb" HTTP // server, or the user specified an incorrect remote URL and the response is actually an HTML page. // In this case, we save the response as plain text so we can generate a better error message if needed. const { preview, response, data } = await stringifyBody(res); // For backwards compatibility, try to parse it anyway. // TODO: maybe just throw instead of trying? try { const remoteHTTP = await parseRefsAdResponse([data], { service }); remoteHTTP.auth = auth; return remoteHTTP } catch (e) { throw new SmartHttpError(preview, response) } } } /** * @param {Object} args * @param {HttpClient} args.http * @param {ProgressCallback} [args.onProgress] * @param {string} [args.corsProxy] * @param {string} args.service * @param {string} args.url * @param {Object} [args.headers] * @param {any} args.body * @param {any} args.auth */ static async connect({ http, onProgress, corsProxy, service, url, auth, body, headers, }) { // We already have the "correct" auth value at this point, but // we need to strip out the username/password from the URL yet again. const urlAuth = extractAuthFromUrl(url); if (urlAuth) url = urlAuth.url; if (corsProxy) url = corsProxify(corsProxy, url); headers['content-type'] = `application/x-${service}-request`; headers.accept = `application/x-${service}-result`; updateHeaders(headers, auth); const res = await http.request({ onProgress, method: 'POST', url: `${url}/${service}`, body, headers, }); if (res.statusCode !== 200) { const { response } = stringifyBody(res); throw new HttpError(res.statusCode, res.statusMessage, response) } return res } } function parseRemoteUrl({ url }) { // the stupid "shorter scp-like syntax" if (url.startsWith('git@')) { return { transport: 'ssh', address: url, } } const matches = url.match(/(\w+)(:\/\/|::)(.*)/); if (matches === null) return /* * When git encounters a URL of the form ://
, where is * a protocol that it cannot handle natively, it automatically invokes git remote- * with the full URL as the second argument. * * @see https://git-scm.com/docs/git-remote-helpers */ if (matches[2] === '://') { return { transport: matches[1], address: matches[0], } } /* * A URL of the form ::
explicitly instructs git to invoke * git remote- with
as the second argument. * * @see https://git-scm.com/docs/git-remote-helpers */ if (matches[2] === '::') { return { transport: matches[1], address: matches[3], } } } class GitRemoteManager { static getRemoteHelperFor({ url }) { // TODO: clean up the remoteHelper API and move into PluginCore const remoteHelpers = new Map(); remoteHelpers.set('http', GitRemoteHTTP); remoteHelpers.set('https', GitRemoteHTTP); const parts = parseRemoteUrl({ url }); if (!parts) { throw new UrlParseError(url) } if (remoteHelpers.has(parts.transport)) { return remoteHelpers.get(parts.transport) } throw new UnknownTransportError( url, parts.transport, parts.transport === 'ssh' ? translateSSHtoHTTP(url) : undefined ) } } let lock$1 = null; class GitShallowManager { static async read({ fs, gitdir }) { if (lock$1 === null) lock$1 = new AsyncLock(); const filepath = join(gitdir, 'shallow'); const oids = new Set(); await lock$1.acquire(filepath, async function() { const text = await fs.read(filepath, { encoding: 'utf8' }); if (text === null) return oids // no file if (text.trim() === '') return oids // empty file text .trim() .split('\n') .map(oid => oids.add(oid)); }); return oids } static async write({ fs, gitdir, oids }) { if (lock$1 === null) lock$1 = new AsyncLock(); const filepath = join(gitdir, 'shallow'); if (oids.size > 0) { const text = [...oids].join('\n') + '\n'; await lock$1.acquire(filepath, async function() { await fs.write(filepath, text, { encoding: 'utf8', }); }); } else { // No shallows await lock$1.acquire(filepath, async function() { await fs.rm(filepath); }); } } } async function hasObjectLoose({ fs, gitdir, oid }) { const source = `objects/${oid.slice(0, 2)}/${oid.slice(2)}`; return fs.exists(`${gitdir}/${source}`) } async function hasObjectPacked({ fs, cache, gitdir, oid, getExternalRefDelta, }) { // Check to see if it's in a packfile. // Iterate through all the .idx files let list = await fs.readdir(join(gitdir, 'objects/pack')); list = list.filter(x => x.endsWith('.idx')); for (const filename of list) { const indexFile = `${gitdir}/objects/pack/${filename}`; const p = await readPackIndex({ fs, cache, filename: indexFile, getExternalRefDelta, }); if (p.error) throw new InternalError(p.error) // If the packfile DOES have the oid we're looking for... if (p.offsets.has(oid)) { return true } } // Failed to find it return false } async function hasObject({ fs, cache, gitdir, oid, format = 'content', }) { // Curry the current read method so that the packfile un-deltification // process can acquire external ref-deltas. const getExternalRefDelta = oid => _readObject({ fs, cache, gitdir, oid }); // Look for it in the loose object directory. let result = await hasObjectLoose({ fs, gitdir, oid }); // Check to see if it's in a packfile. if (!result) { result = await hasObjectPacked({ fs, cache, gitdir, oid, getExternalRefDelta, }); } // Finally return result } // TODO: make a function that just returns obCount. then emptyPackfile = () => sizePack(pack) === 0 function emptyPackfile(pack) { const pheader = '5041434b'; const version = '00000002'; const obCount = '00000000'; const header = pheader + version + obCount; return pack.slice(0, 12).toString('hex') === header } function filterCapabilities(server, client) { const serverNames = server.map(cap => cap.split('=', 1)[0]); return client.filter(cap => { const name = cap.split('=', 1)[0]; return serverNames.includes(name) }) } const pkg = { name: 'isomorphic-git', version: '1.17.0', agent: 'git/isomorphic-git@1.17.0', }; class FIFO { constructor() { this._queue = []; } write(chunk) { if (this._ended) { throw Error('You cannot write to a FIFO that has already been ended!') } if (this._waiting) { const resolve = this._waiting; this._waiting = null; resolve({ value: chunk }); } else { this._queue.push(chunk); } } end() { this._ended = true; if (this._waiting) { const resolve = this._waiting; this._waiting = null; resolve({ done: true }); } } destroy(err) { this._ended = true; this.error = err; } async next() { if (this._queue.length > 0) { return { value: this._queue.shift() } } if (this._ended) { return { done: true } } if (this._waiting) { throw Error( 'You cannot call read until the previous call to read has returned!' ) } return new Promise(resolve => { this._waiting = resolve; }) } } // Note: progress messages are designed to be written directly to the terminal, // so they are often sent with just a carriage return to overwrite the last line of output. // But there are also messages delimited with newlines. // I also include CRLF just in case. function findSplit(str) { const r = str.indexOf('\r'); const n = str.indexOf('\n'); if (r === -1 && n === -1) return -1 if (r === -1) return n + 1 // \n if (n === -1) return r + 1 // \r if (n === r + 1) return n + 1 // \r\n return Math.min(r, n) + 1 // \r or \n } function splitLines(input) { const output = new FIFO(); let tmp = '' ;(async () => { await forAwait(input, chunk => { chunk = chunk.toString('utf8'); tmp += chunk; while (true) { const i = findSplit(tmp); if (i === -1) break output.write(tmp.slice(0, i)); tmp = tmp.slice(i); } }); if (tmp.length > 0) { output.write(tmp); } output.end(); })(); return output } /* If 'side-band' or 'side-band-64k' capabilities have been specified by the client, the server will send the packfile data multiplexed. Each packet starting with the packet-line length of the amount of data that follows, followed by a single byte specifying the sideband the following data is coming in on. In 'side-band' mode, it will send up to 999 data bytes plus 1 control code, for a total of up to 1000 bytes in a pkt-line. In 'side-band-64k' mode it will send up to 65519 data bytes plus 1 control code, for a total of up to 65520 bytes in a pkt-line. The sideband byte will be a '1', '2' or a '3'. Sideband '1' will contain packfile data, sideband '2' will be used for progress information that the client will generally print to stderr and sideband '3' is used for error information. If no 'side-band' capability was specified, the server will stream the entire packfile without multiplexing. */ class GitSideBand { static demux(input) { const read = GitPktLine.streamReader(input); // And now for the ridiculous side-band or side-band-64k protocol const packetlines = new FIFO(); const packfile = new FIFO(); const progress = new FIFO(); // TODO: Use a proper through stream? const nextBit = async function() { const line = await read(); // Skip over flush packets if (line === null) return nextBit() // A made up convention to signal there's no more to read. if (line === true) { packetlines.end(); progress.end(); packfile.end(); return } // Examine first byte to determine which output "stream" to use switch (line[0]) { case 1: { // pack data packfile.write(line.slice(1)); break } case 2: { // progress message progress.write(line.slice(1)); break } case 3: { // fatal error message just before stream aborts const error = line.slice(1); progress.write(error); packfile.destroy(new Error(error.toString('utf8'))); return } default: { // Not part of the side-band-64k protocol packetlines.write(line.slice(0)); } } // Careful not to blow up the stack. // I think Promises in a tail-call position should be OK. nextBit(); }; nextBit(); return { packetlines, packfile, progress, } } // static mux ({ // protocol, // 'side-band' or 'side-band-64k' // packetlines, // packfile, // progress, // error // }) { // const MAX_PACKET_LENGTH = protocol === 'side-band-64k' ? 999 : 65519 // let output = new PassThrough() // packetlines.on('data', data => { // if (data === null) { // output.write(GitPktLine.flush()) // } else { // output.write(GitPktLine.encode(data)) // } // }) // let packfileWasEmpty = true // let packfileEnded = false // let progressEnded = false // let errorEnded = false // let goodbye = Buffer.concat([ // GitPktLine.encode(Buffer.from('010A', 'hex')), // GitPktLine.flush() // ]) // packfile // .on('data', data => { // packfileWasEmpty = false // const buffers = splitBuffer(data, MAX_PACKET_LENGTH) // for (const buffer of buffers) { // output.write( // GitPktLine.encode(Buffer.concat([Buffer.from('01', 'hex'), buffer])) // ) // } // }) // .on('end', () => { // packfileEnded = true // if (!packfileWasEmpty) output.write(goodbye) // if (progressEnded && errorEnded) output.end() // }) // progress // .on('data', data => { // const buffers = splitBuffer(data, MAX_PACKET_LENGTH) // for (const buffer of buffers) { // output.write( // GitPktLine.encode(Buffer.concat([Buffer.from('02', 'hex'), buffer])) // ) // } // }) // .on('end', () => { // progressEnded = true // if (packfileEnded && errorEnded) output.end() // }) // error // .on('data', data => { // const buffers = splitBuffer(data, MAX_PACKET_LENGTH) // for (const buffer of buffers) { // output.write( // GitPktLine.encode(Buffer.concat([Buffer.from('03', 'hex'), buffer])) // ) // } // }) // .on('end', () => { // errorEnded = true // if (progressEnded && packfileEnded) output.end() // }) // return output // } } async function parseUploadPackResponse(stream) { const { packetlines, packfile, progress } = GitSideBand.demux(stream); const shallows = []; const unshallows = []; const acks = []; let nak = false; let done = false; return new Promise((resolve, reject) => { // Parse the response forAwait(packetlines, data => { const line = data.toString('utf8').trim(); if (line.startsWith('shallow')) { const oid = line.slice(-41).trim(); if (oid.length !== 40) { reject(new InvalidOidError(oid)); } shallows.push(oid); } else if (line.startsWith('unshallow')) { const oid = line.slice(-41).trim(); if (oid.length !== 40) { reject(new InvalidOidError(oid)); } unshallows.push(oid); } else if (line.startsWith('ACK')) { const [, oid, status] = line.split(' '); acks.push({ oid, status }); if (!status) done = true; } else if (line.startsWith('NAK')) { nak = true; done = true; } if (done) { resolve({ shallows, unshallows, acks, nak, packfile, progress }); } }); }) } function writeUploadPackRequest({ capabilities = [], wants = [], haves = [], shallows = [], depth = null, since = null, exclude = [], }) { const packstream = []; wants = [...new Set(wants)]; // remove duplicates let firstLineCapabilities = ` ${capabilities.join(' ')}`; for (const oid of wants) { packstream.push(GitPktLine.encode(`want ${oid}${firstLineCapabilities}\n`)); firstLineCapabilities = ''; } for (const oid of shallows) { packstream.push(GitPktLine.encode(`shallow ${oid}\n`)); } if (depth !== null) { packstream.push(GitPktLine.encode(`deepen ${depth}\n`)); } if (since !== null) { packstream.push( GitPktLine.encode(`deepen-since ${Math.floor(since.valueOf() / 1000)}\n`) ); } for (const oid of exclude) { packstream.push(GitPktLine.encode(`deepen-not ${oid}\n`)); } packstream.push(GitPktLine.flush()); for (const oid of haves) { packstream.push(GitPktLine.encode(`have ${oid}\n`)); } packstream.push(GitPktLine.encode(`done\n`)); return packstream } // @ts-check /** * * @typedef {object} FetchResult - The object returned has the following schema: * @property {string | null} defaultBranch - The branch that is cloned if no branch is specified * @property {string | null} fetchHead - The SHA-1 object id of the fetched head commit * @property {string | null} fetchHeadDescription - a textual description of the branch that was fetched * @property {Object} [headers] - The HTTP response headers returned by the git server * @property {string[]} [pruned] - A list of branches that were pruned, if you provided the `prune` parameter * */ /** * @param {object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs * @param {any} args.cache * @param {HttpClient} args.http * @param {ProgressCallback} [args.onProgress] * @param {MessageCallback} [args.onMessage] * @param {AuthCallback} [args.onAuth] * @param {AuthFailureCallback} [args.onAuthFailure] * @param {AuthSuccessCallback} [args.onAuthSuccess] * @param {string} args.gitdir * @param {string|void} [args.url] * @param {string} [args.corsProxy] * @param {string} [args.ref] * @param {string} [args.remoteRef] * @param {string} [args.remote] * @param {boolean} [args.singleBranch = false] * @param {boolean} [args.tags = false] * @param {number} [args.depth] * @param {Date} [args.since] * @param {string[]} [args.exclude = []] * @param {boolean} [args.relative = false] * @param {Object} [args.headers] * @param {boolean} [args.prune] * @param {boolean} [args.pruneTags] * * @returns {Promise} * @see FetchResult */ async function _fetch({ fs, cache, http, onProgress, onMessage, onAuth, onAuthSuccess, onAuthFailure, gitdir, ref: _ref, remoteRef: _remoteRef, remote: _remote, url: _url, corsProxy, depth = null, since = null, exclude = [], relative = false, tags = false, singleBranch = false, headers = {}, prune = false, pruneTags = false, }) { const ref = _ref || (await _currentBranch({ fs, gitdir, test: true })); const config = await GitConfigManager.get({ fs, gitdir }); // Figure out what remote to use. const remote = _remote || (ref && (await config.get(`branch.${ref}.remote`))) || 'origin'; // Lookup the URL for the given remote. const url = _url || (await config.get(`remote.${remote}.url`)); if (typeof url === 'undefined') { throw new MissingParameterError('remote OR url') } // Figure out what remote ref to use. const remoteRef = _remoteRef || (ref && (await config.get(`branch.${ref}.merge`))) || _ref || 'HEAD'; if (corsProxy === undefined) { corsProxy = await config.get('http.corsProxy'); } const GitRemoteHTTP = GitRemoteManager.getRemoteHelperFor({ url }); const remoteHTTP = await GitRemoteHTTP.discover({ http, onAuth, onAuthSuccess, onAuthFailure, corsProxy, service: 'git-upload-pack', url, headers, protocolVersion: 1, }); const auth = remoteHTTP.auth; // hack to get new credentials from CredentialManager API const remoteRefs = remoteHTTP.refs; // For the special case of an empty repository with no refs, return null. if (remoteRefs.size === 0) { return { defaultBranch: null, fetchHead: null, fetchHeadDescription: null, } } // Check that the remote supports the requested features if (depth !== null && !remoteHTTP.capabilities.has('shallow')) { throw new RemoteCapabilityError('shallow', 'depth') } if (since !== null && !remoteHTTP.capabilities.has('deepen-since')) { throw new RemoteCapabilityError('deepen-since', 'since') } if (exclude.length > 0 && !remoteHTTP.capabilities.has('deepen-not')) { throw new RemoteCapabilityError('deepen-not', 'exclude') } if (relative === true && !remoteHTTP.capabilities.has('deepen-relative')) { throw new RemoteCapabilityError('deepen-relative', 'relative') } // Figure out the SHA for the requested ref const { oid, fullref } = GitRefManager.resolveAgainstMap({ ref: remoteRef, map: remoteRefs, }); // Filter out refs we want to ignore: only keep ref we're cloning, HEAD, branches, and tags (if we're keeping them) for (const remoteRef of remoteRefs.keys()) { if ( remoteRef === fullref || remoteRef === 'HEAD' || remoteRef.startsWith('refs/heads/') || (tags && remoteRef.startsWith('refs/tags/')) ) { continue } remoteRefs.delete(remoteRef); } // Assemble the application/x-git-upload-pack-request const capabilities = filterCapabilities( [...remoteHTTP.capabilities], [ 'multi_ack_detailed', 'no-done', 'side-band-64k', // Note: I removed 'thin-pack' option since our code doesn't "fatten" packfiles, // which is necessary for compatibility with git. It was the cause of mysterious // 'fatal: pack has [x] unresolved deltas' errors that plagued us for some time. // isomorphic-git is perfectly happy with thin packfiles in .git/objects/pack but // canonical git it turns out is NOT. 'ofs-delta', `agent=${pkg.agent}`, ] ); if (relative) capabilities.push('deepen-relative'); // Start figuring out which oids from the remote we want to request const wants = singleBranch ? [oid] : remoteRefs.values(); // Come up with a reasonable list of oids to tell the remote we already have // (preferably oids that are close ancestors of the branch heads we're fetching) const haveRefs = singleBranch ? [ref] : await GitRefManager.listRefs({ fs, gitdir, filepath: `refs`, }); let haves = []; for (let ref of haveRefs) { try { ref = await GitRefManager.expand({ fs, gitdir, ref }); const oid = await GitRefManager.resolve({ fs, gitdir, ref }); if (await hasObject({ fs, cache, gitdir, oid })) { haves.push(oid); } } catch (err) {} } haves = [...new Set(haves)]; const oids = await GitShallowManager.read({ fs, gitdir }); const shallows = remoteHTTP.capabilities.has('shallow') ? [...oids] : []; const packstream = writeUploadPackRequest({ capabilities, wants, haves, shallows, depth, since, exclude, }); // CodeCommit will hang up if we don't send a Content-Length header // so we can't stream the body. const packbuffer = Buffer.from(await collect(packstream)); const raw = await GitRemoteHTTP.connect({ http, onProgress, corsProxy, service: 'git-upload-pack', url, auth, body: [packbuffer], headers, }); const response = await parseUploadPackResponse(raw.body); if (raw.headers) { response.headers = raw.headers; } // Apply all the 'shallow' and 'unshallow' commands for (const oid of response.shallows) { if (!oids.has(oid)) { // this is in a try/catch mostly because my old test fixtures are missing objects try { // server says it's shallow, but do we have the parents? const { object } = await _readObject({ fs, cache, gitdir, oid }); const commit = new GitCommit(object); const hasParents = await Promise.all( commit .headers() .parent.map(oid => hasObject({ fs, cache, gitdir, oid })) ); const haveAllParents = hasParents.length === 0 || hasParents.every(has => has); if (!haveAllParents) { oids.add(oid); } } catch (err) { oids.add(oid); } } } for (const oid of response.unshallows) { oids.delete(oid); } await GitShallowManager.write({ fs, gitdir, oids }); // Update local remote refs if (singleBranch) { const refs = new Map([[fullref, oid]]); // But wait, maybe it was a symref, like 'HEAD'! // We need to save all the refs in the symref chain (sigh). const symrefs = new Map(); let bail = 10; let key = fullref; while (bail--) { const value = remoteHTTP.symrefs.get(key); if (value === undefined) break symrefs.set(key, value); key = value; } // final value must not be a symref but a real ref const realRef = remoteRefs.get(key); // There may be no ref at all if we've fetched a specific commit hash if (realRef) { refs.set(key, realRef); } const { pruned } = await GitRefManager.updateRemoteRefs({ fs, gitdir, remote, refs, symrefs, tags, prune, }); if (prune) { response.pruned = pruned; } } else { const { pruned } = await GitRefManager.updateRemoteRefs({ fs, gitdir, remote, refs: remoteRefs, symrefs: remoteHTTP.symrefs, tags, prune, pruneTags, }); if (prune) { response.pruned = pruned; } } // We need this value later for the `clone` command. response.HEAD = remoteHTTP.symrefs.get('HEAD'); // AWS CodeCommit doesn't list HEAD as a symref, but we can reverse engineer it // Find the SHA of the branch called HEAD if (response.HEAD === undefined) { const { oid } = GitRefManager.resolveAgainstMap({ ref: 'HEAD', map: remoteRefs, }); // Use the name of the first branch that's not called HEAD that has // the same SHA as the branch called HEAD. for (const [key, value] of remoteRefs.entries()) { if (key !== 'HEAD' && value === oid) { response.HEAD = key; break } } } const noun = fullref.startsWith('refs/tags') ? 'tag' : 'branch'; response.FETCH_HEAD = { oid, description: `${noun} '${abbreviateRef(fullref)}' of ${url}`, }; if (onProgress || onMessage) { const lines = splitLines(response.progress); forAwait(lines, async line => { if (onMessage) await onMessage(line); if (onProgress) { const matches = line.match(/([^:]*).*\((\d+?)\/(\d+?)\)/); if (matches) { await onProgress({ phase: matches[1].trim(), loaded: parseInt(matches[2], 10), total: parseInt(matches[3], 10), }); } } }); } const packfile = Buffer.from(await collect(response.packfile)); const packfileSha = packfile.slice(-20).toString('hex'); const res = { defaultBranch: response.HEAD, fetchHead: response.FETCH_HEAD.oid, fetchHeadDescription: response.FETCH_HEAD.description, }; if (response.headers) { res.headers = response.headers; } if (prune) { res.pruned = response.pruned; } // This is a quick fix for the empty .git/objects/pack/pack-.pack file error, // which due to the way `git-list-pack` works causes the program to hang when it tries to read it. // TODO: Longer term, we should actually: // a) NOT concatenate the entire packfile into memory (line 78), // b) compute the SHA of the stream except for the last 20 bytes, using the same library used in push.js, and // c) compare the computed SHA with the last 20 bytes of the stream before saving to disk, and throwing a "packfile got corrupted during download" error if the SHA doesn't match. if (packfileSha !== '' && !emptyPackfile(packfile)) { res.packfile = `objects/pack/pack-${packfileSha}.pack`; const fullpath = join(gitdir, res.packfile); await fs.write(fullpath, packfile); const getExternalRefDelta = oid => _readObject({ fs, cache, gitdir, oid }); const idx = await GitPackIndex.fromPack({ pack: packfile, getExternalRefDelta, onProgress, }); await fs.write(fullpath.replace(/\.pack$/, '.idx'), await idx.toBuffer()); } return res } // @ts-check /** * Initialize a new repository * * @param {object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs * @param {string} [args.dir] * @param {string} [args.gitdir] * @param {boolean} [args.bare = false] * @param {string} [args.defaultBranch = 'master'] * @returns {Promise} */ async function _init({ fs, bare = false, dir, gitdir = bare ? dir : join(dir, '.git'), defaultBranch = 'master', }) { // Don't overwrite an existing config if (await fs.exists(gitdir + '/config')) return let folders = [ 'hooks', 'info', 'objects/info', 'objects/pack', 'refs/heads', 'refs/tags', ]; folders = folders.map(dir => gitdir + '/' + dir); for (const folder of folders) { await fs.mkdir(folder); } await fs.write( gitdir + '/config', '[core]\n' + '\trepositoryformatversion = 0\n' + '\tfilemode = false\n' + `\tbare = ${bare}\n` + (bare ? '' : '\tlogallrefupdates = true\n') + '\tsymlinks = false\n' + '\tignorecase = true\n' ); await fs.write(gitdir + '/HEAD', `ref: refs/heads/${defaultBranch}\n`); } // @ts-check /** * @param {object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs * @param {object} args.cache * @param {HttpClient} args.http * @param {ProgressCallback} [args.onProgress] * @param {MessageCallback} [args.onMessage] * @param {AuthCallback} [args.onAuth] * @param {AuthFailureCallback} [args.onAuthFailure] * @param {AuthSuccessCallback} [args.onAuthSuccess] * @param {string} [args.dir] * @param {string} args.gitdir * @param {string} args.url * @param {string} args.corsProxy * @param {string} args.ref * @param {boolean} args.singleBranch * @param {boolean} args.noCheckout * @param {boolean} args.noTags * @param {string} args.remote * @param {number} args.depth * @param {Date} args.since * @param {string[]} args.exclude * @param {boolean} args.relative * @param {Object} args.headers * * @returns {Promise} Resolves successfully when clone completes * */ async function _clone({ fs, cache, http, onProgress, onMessage, onAuth, onAuthSuccess, onAuthFailure, dir, gitdir, url, corsProxy, ref, remote, depth, since, exclude, relative, singleBranch, noCheckout, noTags, headers, }) { try { await _init({ fs, gitdir }); await _addRemote({ fs, gitdir, remote, url, force: false }); if (corsProxy) { const config = await GitConfigManager.get({ fs, gitdir }); await config.set(`http.corsProxy`, corsProxy); await GitConfigManager.save({ fs, gitdir, config }); } const { defaultBranch, fetchHead } = await _fetch({ fs, cache, http, onProgress, onMessage, onAuth, onAuthSuccess, onAuthFailure, gitdir, ref, remote, corsProxy, depth, since, exclude, relative, singleBranch, headers, tags: !noTags, }); if (fetchHead === null) return ref = ref || defaultBranch; ref = ref.replace('refs/heads/', ''); // Checkout that branch await _checkout({ fs, cache, onProgress, dir, gitdir, ref, remote, noCheckout, }); } catch (err) { // Remove partial local repository, see #1283 // Ignore any error as we are already failing. // The catch is necessary so the original error is not masked. await fs .rmdir(gitdir, { recursive: true, maxRetries: 10 }) .catch(() => undefined); throw err } } // @ts-check /** * Clone a repository * * @param {object} args * @param {FsClient} args.fs - a file system implementation * @param {HttpClient} args.http - an HTTP client * @param {ProgressCallback} [args.onProgress] - optional progress event callback * @param {MessageCallback} [args.onMessage] - optional message event callback * @param {AuthCallback} [args.onAuth] - optional auth fill callback * @param {AuthFailureCallback} [args.onAuthFailure] - optional auth rejected callback * @param {AuthSuccessCallback} [args.onAuthSuccess] - optional auth approved callback * @param {string} args.dir - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string} args.url - The URL of the remote repository * @param {string} [args.corsProxy] - Optional [CORS proxy](https://www.npmjs.com/%40isomorphic-git/cors-proxy). Value is stored in the git config file for that repo. * @param {string} [args.ref] - Which branch to checkout. By default this is the designated "main branch" of the repository. * @param {boolean} [args.singleBranch = false] - Instead of the default behavior of fetching all the branches, only fetch a single branch. * @param {boolean} [args.noCheckout = false] - If true, clone will only fetch the repo, not check out a branch. Skipping checkout can save a lot of time normally spent writing files to disk. * @param {boolean} [args.noTags = false] - By default clone will fetch all tags. `noTags` disables that behavior. * @param {string} [args.remote = 'origin'] - What to name the remote that is created. * @param {number} [args.depth] - Integer. Determines how much of the git repository's history to retrieve * @param {Date} [args.since] - Only fetch commits created after the given date. Mutually exclusive with `depth`. * @param {string[]} [args.exclude = []] - A list of branches or tags. Instructs the remote server not to send us any commits reachable from these refs. * @param {boolean} [args.relative = false] - Changes the meaning of `depth` to be measured from the current shallow depth rather than from the branch tip. * @param {Object} [args.headers = {}] - Additional headers to include in HTTP requests, similar to git's `extraHeader` config * @param {object} [args.cache] - a [cache](cache.md) object * * @returns {Promise} Resolves successfully when clone completes * * @example * await git.clone({ * fs, * http, * dir: '/tutorial', * corsProxy: 'https://cors.isomorphic-git.org', * url: 'https://github.com/isomorphic-git/isomorphic-git', * singleBranch: true, * depth: 1 * }) * console.log('done') * */ async function clone({ fs, http, onProgress, onMessage, onAuth, onAuthSuccess, onAuthFailure, dir, gitdir = join(dir, '.git'), url, corsProxy = undefined, ref = undefined, remote = 'origin', depth = undefined, since = undefined, exclude = [], relative = false, singleBranch = false, noCheckout = false, noTags = false, headers = {}, cache = {}, }) { try { assertParameter('fs', fs); assertParameter('http', http); assertParameter('gitdir', gitdir); if (!noCheckout) { assertParameter('dir', dir); } assertParameter('url', url); return await _clone({ fs: new FileSystem(fs), cache, http, onProgress, onMessage, onAuth, onAuthSuccess, onAuthFailure, dir, gitdir, url, corsProxy, ref, remote, depth, since, exclude, relative, singleBranch, noCheckout, noTags, headers, }) } catch (err) { err.caller = 'git.clone'; throw err } } // @ts-check /** * Create a new commit * * @param {Object} args * @param {FsClient} args.fs - a file system implementation * @param {SignCallback} [args.onSign] - a PGP signing implementation * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string} args.message - The commit message to use. * @param {Object} [args.author] - The details about the author. * @param {string} [args.author.name] - Default is `user.name` config. * @param {string} [args.author.email] - Default is `user.email` config. * @param {number} [args.author.timestamp=Math.floor(Date.now()/1000)] - Set the author timestamp field. This is the integer number of seconds since the Unix epoch (1970-01-01 00:00:00). * @param {number} [args.author.timezoneOffset] - Set the author timezone offset field. This is the difference, in minutes, from the current timezone to UTC. Default is `(new Date()).getTimezoneOffset()`. * @param {Object} [args.committer = author] - The details about the commit committer, in the same format as the author parameter. If not specified, the author details are used. * @param {string} [args.committer.name] - Default is `user.name` config. * @param {string} [args.committer.email] - Default is `user.email` config. * @param {number} [args.committer.timestamp=Math.floor(Date.now()/1000)] - Set the committer timestamp field. This is the integer number of seconds since the Unix epoch (1970-01-01 00:00:00). * @param {number} [args.committer.timezoneOffset] - Set the committer timezone offset field. This is the difference, in minutes, from the current timezone to UTC. Default is `(new Date()).getTimezoneOffset()`. * @param {string} [args.signingKey] - Sign the tag object using this private PGP key. * @param {boolean} [args.dryRun = false] - If true, simulates making a commit so you can test whether it would succeed. Implies `noUpdateBranch`. * @param {boolean} [args.noUpdateBranch = false] - If true, does not update the branch pointer after creating the commit. * @param {string} [args.ref] - The fully expanded name of the branch to commit to. Default is the current branch pointed to by HEAD. (TODO: fix it so it can expand branch names without throwing if the branch doesn't exist yet.) * @param {string[]} [args.parent] - The SHA-1 object ids of the commits to use as parents. If not specified, the commit pointed to by `ref` is used. * @param {string} [args.tree] - The SHA-1 object id of the tree to use. If not specified, a new tree object is created from the current git index. * @param {object} [args.cache] - a [cache](cache.md) object * * @returns {Promise} Resolves successfully with the SHA-1 object id of the newly created commit. * * @example * let sha = await git.commit({ * fs, * dir: '/tutorial', * author: { * name: 'Mr. Test', * email: 'mrtest@example.com', * }, * message: 'Added the a.txt file' * }) * console.log(sha) * */ async function commit({ fs: _fs, onSign, dir, gitdir = join(dir, '.git'), message, author: _author, committer: _committer, signingKey, dryRun = false, noUpdateBranch = false, ref, parent, tree, cache = {}, }) { try { assertParameter('fs', _fs); assertParameter('message', message); if (signingKey) { assertParameter('onSign', onSign); } const fs = new FileSystem(_fs); const author = await normalizeAuthorObject({ fs, gitdir, author: _author }); if (!author) throw new MissingNameError('author') const committer = await normalizeCommitterObject({ fs, gitdir, author, committer: _committer, }); if (!committer) throw new MissingNameError('committer') return await _commit({ fs, cache, onSign, gitdir, message, author, committer, signingKey, dryRun, noUpdateBranch, ref, parent, tree, }) } catch (err) { err.caller = 'git.commit'; throw err } } // @ts-check /** * Get the name of the branch currently pointed to by .git/HEAD * * @param {Object} args * @param {FsClient} args.fs - a file system implementation * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {boolean} [args.fullname = false] - Return the full path (e.g. "refs/heads/main") instead of the abbreviated form. * @param {boolean} [args.test = false] - If the current branch doesn't actually exist (such as right after git init) then return `undefined`. * * @returns {Promise} The name of the current branch or undefined if the HEAD is detached. * * @example * // Get the current branch name * let branch = await git.currentBranch({ * fs, * dir: '/tutorial', * fullname: false * }) * console.log(branch) * */ async function currentBranch({ fs, dir, gitdir = join(dir, '.git'), fullname = false, test = false, }) { try { assertParameter('fs', fs); assertParameter('gitdir', gitdir); return await _currentBranch({ fs: new FileSystem(fs), gitdir, fullname, test, }) } catch (err) { err.caller = 'git.currentBranch'; throw err } } // @ts-check /** * @param {Object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs * @param {string} args.gitdir * @param {string} args.ref * * @returns {Promise} */ async function _deleteBranch({ fs, gitdir, ref }) { const exist = await GitRefManager.exists({ fs, gitdir, ref }); if (!exist) { throw new NotFoundError(ref) } const fullRef = await GitRefManager.expand({ fs, gitdir, ref }); const currentRef = await _currentBranch({ fs, gitdir, fullname: true }); if (fullRef === currentRef) { // detach HEAD const value = await GitRefManager.resolve({ fs, gitdir, ref: fullRef }); await GitRefManager.writeRef({ fs, gitdir, ref: 'HEAD', value }); } // Delete a specified branch await GitRefManager.deleteRef({ fs, gitdir, ref: fullRef }); } // @ts-check /** * Delete a local branch * * > Note: This only deletes loose branches - it should be fixed in the future to delete packed branches as well. * * @param {Object} args * @param {FsClient} args.fs - a file system implementation * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string} args.ref - The branch to delete * * @returns {Promise} Resolves successfully when filesystem operations are complete * * @example * await git.deleteBranch({ fs, dir: '/tutorial', ref: 'local-branch' }) * console.log('done') * */ async function deleteBranch({ fs, dir, gitdir = join(dir, '.git'), ref, }) { try { assertParameter('fs', fs); assertParameter('ref', ref); return await _deleteBranch({ fs: new FileSystem(fs), gitdir, ref, }) } catch (err) { err.caller = 'git.deleteBranch'; throw err } } // @ts-check /** * Delete a local ref * * @param {Object} args * @param {FsClient} args.fs - a file system implementation * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string} args.ref - The ref to delete * * @returns {Promise} Resolves successfully when filesystem operations are complete * * @example * await git.deleteRef({ fs, dir: '/tutorial', ref: 'refs/tags/test-tag' }) * console.log('done') * */ async function deleteRef({ fs, dir, gitdir = join(dir, '.git'), ref }) { try { assertParameter('fs', fs); assertParameter('ref', ref); await GitRefManager.deleteRef({ fs: new FileSystem(fs), gitdir, ref }); } catch (err) { err.caller = 'git.deleteRef'; throw err } } // @ts-check /** * @param {Object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs * @param {string} args.gitdir * @param {string} args.remote * * @returns {Promise} */ async function _deleteRemote({ fs, gitdir, remote }) { const config = await GitConfigManager.get({ fs, gitdir }); await config.deleteSection('remote', remote); await GitConfigManager.save({ fs, gitdir, config }); } // @ts-check /** * Removes the local config entry for a given remote * * @param {Object} args * @param {FsClient} args.fs - a file system implementation * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string} args.remote - The name of the remote to delete * * @returns {Promise} Resolves successfully when filesystem operations are complete * * @example * await git.deleteRemote({ fs, dir: '/tutorial', remote: 'upstream' }) * console.log('done') * */ async function deleteRemote({ fs, dir, gitdir = join(dir, '.git'), remote, }) { try { assertParameter('fs', fs); assertParameter('remote', remote); return await _deleteRemote({ fs: new FileSystem(fs), gitdir, remote, }) } catch (err) { err.caller = 'git.deleteRemote'; throw err } } // @ts-check /** * Delete a local tag ref * * @param {Object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs * @param {string} args.gitdir * @param {string} args.ref - The tag to delete * * @returns {Promise} Resolves successfully when filesystem operations are complete * * @example * await git.deleteTag({ dir: '$input((/))', ref: '$input((test-tag))' }) * console.log('done') * */ async function _deleteTag({ fs, gitdir, ref }) { ref = ref.startsWith('refs/tags/') ? ref : `refs/tags/${ref}`; await GitRefManager.deleteRef({ fs, gitdir, ref }); } // @ts-check /** * Delete a local tag ref * * @param {Object} args * @param {FsClient} args.fs - a file system implementation * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string} args.ref - The tag to delete * * @returns {Promise} Resolves successfully when filesystem operations are complete * * @example * await git.deleteTag({ fs, dir: '/tutorial', ref: 'test-tag' }) * console.log('done') * */ async function deleteTag({ fs, dir, gitdir = join(dir, '.git'), ref }) { try { assertParameter('fs', fs); assertParameter('ref', ref); return await _deleteTag({ fs: new FileSystem(fs), gitdir, ref, }) } catch (err) { err.caller = 'git.deleteTag'; throw err } } async function expandOidLoose({ fs, gitdir, oid: short }) { const prefix = short.slice(0, 2); const objectsSuffixes = await fs.readdir(`${gitdir}/objects/${prefix}`); return objectsSuffixes .map(suffix => `${prefix}${suffix}`) .filter(_oid => _oid.startsWith(short)) } async function expandOidPacked({ fs, cache, gitdir, oid: short, getExternalRefDelta, }) { // Iterate through all the .pack files const results = []; let list = await fs.readdir(join(gitdir, 'objects/pack')); list = list.filter(x => x.endsWith('.idx')); for (const filename of list) { const indexFile = `${gitdir}/objects/pack/${filename}`; const p = await readPackIndex({ fs, cache, filename: indexFile, getExternalRefDelta, }); if (p.error) throw new InternalError(p.error) // Search through the list of oids in the packfile for (const oid of p.offsets.keys()) { if (oid.startsWith(short)) results.push(oid); } } return results } async function _expandOid({ fs, cache, gitdir, oid: short }) { // Curry the current read method so that the packfile un-deltification // process can acquire external ref-deltas. const getExternalRefDelta = oid => _readObject({ fs, cache, gitdir, oid }); const results1 = await expandOidLoose({ fs, gitdir, oid: short }); const results2 = await expandOidPacked({ fs, cache, gitdir, oid: short, getExternalRefDelta, }); const results = results1.concat(results2); if (results.length === 1) { return results[0] } if (results.length > 1) { throw new AmbiguousError('oids', short, results) } throw new NotFoundError(`an object matching "${short}"`) } // @ts-check /** * Expand and resolve a short oid into a full oid * * @param {Object} args * @param {FsClient} args.fs - a file system implementation * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string} args.oid - The shortened oid prefix to expand (like "0414d2a") * @param {object} [args.cache] - a [cache](cache.md) object * * @returns {Promise} Resolves successfully with the full oid (like "0414d2a286d7bbc7a4a326a61c1f9f888a8ab87f") * * @example * let oid = await git.expandOid({ fs, dir: '/tutorial', oid: '0414d2a'}) * console.log(oid) * */ async function expandOid({ fs, dir, gitdir = join(dir, '.git'), oid, cache = {}, }) { try { assertParameter('fs', fs); assertParameter('gitdir', gitdir); assertParameter('oid', oid); return await _expandOid({ fs: new FileSystem(fs), cache, gitdir, oid, }) } catch (err) { err.caller = 'git.expandOid'; throw err } } // @ts-check /** * Expand an abbreviated ref to its full name * * @param {Object} args * @param {FsClient} args.fs - a file system implementation * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string} args.ref - The ref to expand (like "v1.0.0") * * @returns {Promise} Resolves successfully with a full ref name ("refs/tags/v1.0.0") * * @example * let fullRef = await git.expandRef({ fs, dir: '/tutorial', ref: 'main'}) * console.log(fullRef) * */ async function expandRef({ fs, dir, gitdir = join(dir, '.git'), ref }) { try { assertParameter('fs', fs); assertParameter('gitdir', gitdir); assertParameter('ref', ref); return await GitRefManager.expand({ fs: new FileSystem(fs), gitdir, ref, }) } catch (err) { err.caller = 'git.expandRef'; throw err } } // @ts-check /** * @param {object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs * @param {any} args.cache * @param {string} args.gitdir * @param {string[]} args.oids * */ async function _findMergeBase({ fs, cache, gitdir, oids }) { // Note: right now, the tests are geared so that the output should match that of // `git merge-base --all --octopus` // because without the --octopus flag, git's output seems to depend on the ORDER of the oids, // and computing virtual merge bases is just too much for me to fathom right now. // If we start N independent walkers, one at each of the given `oids`, and walk backwards // through ancestors, eventually we'll discover a commit where each one of these N walkers // has passed through. So we just need to keep track of which walkers have visited each commit // until we find a commit that N distinct walkers has visited. const visits = {}; const passes = oids.length; let heads = oids.map((oid, index) => ({ index, oid })); while (heads.length) { // Count how many times we've passed each commit const result = new Set(); for (const { oid, index } of heads) { if (!visits[oid]) visits[oid] = new Set(); visits[oid].add(index); if (visits[oid].size === passes) { result.add(oid); } } if (result.size > 0) { return [...result] } // We haven't found a common ancestor yet const newheads = new Map(); for (const { oid, index } of heads) { try { const { object } = await _readObject({ fs, cache, gitdir, oid }); const commit = GitCommit.from(object); const { parent } = commit.parseHeaders(); for (const oid of parent) { if (!visits[oid] || !visits[oid].has(index)) { newheads.set(oid + ':' + index, { oid, index }); } } } catch (err) { // do nothing } } heads = Array.from(newheads.values()); } return [] } const LINEBREAKS = /^.*(\r?\n|$)/gm; function mergeFile({ ourContent, baseContent, theirContent, ourName = 'ours', baseName = 'base', theirName = 'theirs', format = 'diff', markerSize = 7, }) { const ours = ourContent.match(LINEBREAKS); const base = baseContent.match(LINEBREAKS); const theirs = theirContent.match(LINEBREAKS); // Here we let the diff3 library do the heavy lifting. const result = diff3Merge(ours, base, theirs); // Here we note whether there are conflicts and format the results let mergedText = ''; let cleanMerge = true; for (const item of result) { if (item.ok) { mergedText += item.ok.join(''); } if (item.conflict) { cleanMerge = false; mergedText += `${'<'.repeat(markerSize)} ${ourName}\n`; mergedText += item.conflict.a.join(''); if (format === 'diff3') { mergedText += `${'|'.repeat(markerSize)} ${baseName}\n`; mergedText += item.conflict.o.join(''); } mergedText += `${'='.repeat(markerSize)}\n`; mergedText += item.conflict.b.join(''); mergedText += `${'>'.repeat(markerSize)} ${theirName}\n`; } } return { cleanMerge, mergedText } } // @ts-check /** * Create a merged tree * * @param {Object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs * @param {object} args.cache * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string} args.ourOid - The SHA-1 object id of our tree * @param {string} args.baseOid - The SHA-1 object id of the base tree * @param {string} args.theirOid - The SHA-1 object id of their tree * @param {string} [args.ourName='ours'] - The name to use in conflicted files for our hunks * @param {string} [args.baseName='base'] - The name to use in conflicted files (in diff3 format) for the base hunks * @param {string} [args.theirName='theirs'] - The name to use in conflicted files for their hunks * @param {boolean} [args.dryRun=false] * * @returns {Promise} - The SHA-1 object id of the merged tree * */ async function mergeTree({ fs, cache, dir, gitdir = join(dir, '.git'), ourOid, baseOid, theirOid, ourName = 'ours', baseName = 'base', theirName = 'theirs', dryRun = false, }) { const ourTree = TREE({ ref: ourOid }); const baseTree = TREE({ ref: baseOid }); const theirTree = TREE({ ref: theirOid }); const results = await _walk({ fs, cache, dir, gitdir, trees: [ourTree, baseTree, theirTree], map: async function(filepath, [ours, base, theirs]) { const path = basename(filepath); // What we did, what they did const ourChange = await modified(ours, base); const theirChange = await modified(theirs, base); switch (`${ourChange}-${theirChange}`) { case 'false-false': { return { mode: await base.mode(), path, oid: await base.oid(), type: await base.type(), } } case 'false-true': { return theirs ? { mode: await theirs.mode(), path, oid: await theirs.oid(), type: await theirs.type(), } : undefined } case 'true-false': { return ours ? { mode: await ours.mode(), path, oid: await ours.oid(), type: await ours.type(), } : undefined } case 'true-true': { // Modifications if ( ours && base && theirs && (await ours.type()) === 'blob' && (await base.type()) === 'blob' && (await theirs.type()) === 'blob' ) { return mergeBlobs({ fs, gitdir, path, ours, base, theirs, ourName, baseName, theirName, }) } // all other types of conflicts fail throw new MergeNotSupportedError() } } }, /** * @param {TreeEntry} [parent] * @param {Array} children */ reduce: async (parent, children) => { const entries = children.filter(Boolean); // remove undefineds // if the parent was deleted, the children have to go if (!parent) return // automatically delete directories if they have been emptied if (parent && parent.type === 'tree' && entries.length === 0) return if (entries.length > 0) { const tree = new GitTree(entries); const object = tree.toObject(); const oid = await _writeObject({ fs, gitdir, type: 'tree', object, dryRun, }); parent.oid = oid; } return parent }, }); return results.oid } /** * * @param {WalkerEntry} entry * @param {WalkerEntry} base * */ async function modified(entry, base) { if (!entry && !base) return false if (entry && !base) return true if (!entry && base) return true if ((await entry.type()) === 'tree' && (await base.type()) === 'tree') { return false } if ( (await entry.type()) === (await base.type()) && (await entry.mode()) === (await base.mode()) && (await entry.oid()) === (await base.oid()) ) { return false } return true } /** * * @param {Object} args * @param {import('../models/FileSystem').FileSystem} args.fs * @param {string} args.gitdir * @param {string} args.path * @param {WalkerEntry} args.ours * @param {WalkerEntry} args.base * @param {WalkerEntry} args.theirs * @param {string} [args.ourName] * @param {string} [args.baseName] * @param {string} [args.theirName] * @param {string} [args.format] * @param {number} [args.markerSize] * @param {boolean} [args.dryRun = false] * */ async function mergeBlobs({ fs, gitdir, path, ours, base, theirs, ourName, theirName, baseName, format, markerSize, dryRun, }) { const type = 'blob'; // Compute the new mode. // Since there are ONLY two valid blob modes ('100755' and '100644') it boils down to this const mode = (await base.mode()) === (await ours.mode()) ? await theirs.mode() : await ours.mode(); // The trivial case: nothing to merge except maybe mode if ((await ours.oid()) === (await theirs.oid())) { return { mode, path, oid: await ours.oid(), type } } // if only one side made oid changes, return that side's oid if ((await ours.oid()) === (await base.oid())) { return { mode, path, oid: await theirs.oid(), type } } if ((await theirs.oid()) === (await base.oid())) { return { mode, path, oid: await ours.oid(), type } } // if both sides made changes do a merge const { mergedText, cleanMerge } = mergeFile({ ourContent: Buffer.from(await ours.content()).toString('utf8'), baseContent: Buffer.from(await base.content()).toString('utf8'), theirContent: Buffer.from(await theirs.content()).toString('utf8'), ourName, theirName, baseName, format, markerSize, }); if (!cleanMerge) { // all other types of conflicts fail throw new MergeNotSupportedError() } const oid = await _writeObject({ fs, gitdir, type: 'blob', object: Buffer.from(mergedText, 'utf8'), dryRun, }); return { mode, path, oid, type } } // @ts-check // import diff3 from 'node-diff3' /** * * @typedef {Object} MergeResult - Returns an object with a schema like this: * @property {string} [oid] - The SHA-1 object id that is now at the head of the branch. Absent only if `dryRun` was specified and `mergeCommit` is true. * @property {boolean} [alreadyMerged] - True if the branch was already merged so no changes were made * @property {boolean} [fastForward] - True if it was a fast-forward merge * @property {boolean} [mergeCommit] - True if merge resulted in a merge commit * @property {string} [tree] - The SHA-1 object id of the tree resulting from a merge commit * */ /** * @param {object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs * @param {object} args.cache * @param {string} args.gitdir * @param {string} [args.ours] * @param {string} args.theirs * @param {boolean} args.fastForward * @param {boolean} args.fastForwardOnly * @param {boolean} args.dryRun * @param {boolean} args.noUpdateBranch * @param {string} [args.message] * @param {Object} args.author * @param {string} args.author.name * @param {string} args.author.email * @param {number} args.author.timestamp * @param {number} args.author.timezoneOffset * @param {Object} args.committer * @param {string} args.committer.name * @param {string} args.committer.email * @param {number} args.committer.timestamp * @param {number} args.committer.timezoneOffset * @param {string} [args.signingKey] * @param {SignCallback} [args.onSign] - a PGP signing implementation * * @returns {Promise} Resolves to a description of the merge operation * */ async function _merge({ fs, cache, gitdir, ours, theirs, fastForward = true, fastForwardOnly = false, dryRun = false, noUpdateBranch = false, message, author, committer, signingKey, onSign, }) { if (ours === undefined) { ours = await _currentBranch({ fs, gitdir, fullname: true }); } ours = await GitRefManager.expand({ fs, gitdir, ref: ours, }); theirs = await GitRefManager.expand({ fs, gitdir, ref: theirs, }); const ourOid = await GitRefManager.resolve({ fs, gitdir, ref: ours, }); const theirOid = await GitRefManager.resolve({ fs, gitdir, ref: theirs, }); // find most recent common ancestor of ref a and ref b const baseOids = await _findMergeBase({ fs, cache, gitdir, oids: [ourOid, theirOid], }); if (baseOids.length !== 1) { throw new MergeNotSupportedError() } const baseOid = baseOids[0]; // handle fast-forward case if (baseOid === theirOid) { return { oid: ourOid, alreadyMerged: true, } } if (fastForward && baseOid === ourOid) { if (!dryRun && !noUpdateBranch) { await GitRefManager.writeRef({ fs, gitdir, ref: ours, value: theirOid }); } return { oid: theirOid, fastForward: true, } } else { // not a simple fast-forward if (fastForwardOnly) { throw new FastForwardError() } // try a fancier merge const tree = await mergeTree({ fs, cache, gitdir, ourOid, theirOid, baseOid, ourName: ours, baseName: 'base', theirName: theirs, dryRun, }); if (!message) { message = `Merge branch '${abbreviateRef(theirs)}' into ${abbreviateRef( ours )}`; } const oid = await _commit({ fs, cache, gitdir, message, ref: ours, tree, parent: [ourOid, theirOid], author, committer, signingKey, onSign, dryRun, noUpdateBranch, }); return { oid, tree, mergeCommit: true, } } } // @ts-check /** * @param {object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs * @param {object} args.cache * @param {HttpClient} args.http * @param {ProgressCallback} [args.onProgress] * @param {MessageCallback} [args.onMessage] * @param {AuthCallback} [args.onAuth] * @param {AuthFailureCallback} [args.onAuthFailure] * @param {AuthSuccessCallback} [args.onAuthSuccess] * @param {string} args.dir * @param {string} args.gitdir * @param {string} args.ref * @param {string} [args.url] * @param {string} [args.remote] * @param {string} [args.remoteRef] * @param {string} [args.corsProxy] * @param {boolean} args.singleBranch * @param {boolean} args.fastForward * @param {boolean} args.fastForwardOnly * @param {Object} [args.headers] * @param {Object} args.author * @param {string} args.author.name * @param {string} args.author.email * @param {number} args.author.timestamp * @param {number} args.author.timezoneOffset * @param {Object} args.committer * @param {string} args.committer.name * @param {string} args.committer.email * @param {number} args.committer.timestamp * @param {number} args.committer.timezoneOffset * @param {string} [args.signingKey] * * @returns {Promise} Resolves successfully when pull operation completes * */ async function _pull({ fs, cache, http, onProgress, onMessage, onAuth, onAuthSuccess, onAuthFailure, dir, gitdir, ref, url, remote, remoteRef, fastForward, fastForwardOnly, corsProxy, singleBranch, headers, author, committer, signingKey, }) { try { // If ref is undefined, use 'HEAD' if (!ref) { const head = await _currentBranch({ fs, gitdir }); // TODO: use a better error. if (!head) { throw new MissingParameterError('ref') } ref = head; } const { fetchHead, fetchHeadDescription } = await _fetch({ fs, cache, http, onProgress, onMessage, onAuth, onAuthSuccess, onAuthFailure, gitdir, corsProxy, ref, url, remote, remoteRef, singleBranch, headers, }); // Merge the remote tracking branch into the local one. await _merge({ fs, cache, gitdir, ours: ref, theirs: fetchHead, fastForward, fastForwardOnly, message: `Merge ${fetchHeadDescription}`, author, committer, signingKey, dryRun: false, noUpdateBranch: false, }); await _checkout({ fs, cache, onProgress, dir, gitdir, ref, remote, noCheckout: false, }); } catch (err) { err.caller = 'git.pull'; throw err } } // @ts-check /** * Like `pull`, but hard-coded with `fastForward: true` so there is no need for an `author` parameter. * * @param {object} args * @param {FsClient} args.fs - a file system client * @param {HttpClient} args.http - an HTTP client * @param {ProgressCallback} [args.onProgress] - optional progress event callback * @param {MessageCallback} [args.onMessage] - optional message event callback * @param {AuthCallback} [args.onAuth] - optional auth fill callback * @param {AuthFailureCallback} [args.onAuthFailure] - optional auth rejected callback * @param {AuthSuccessCallback} [args.onAuthSuccess] - optional auth approved callback * @param {string} args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string} [args.ref] - Which branch to merge into. By default this is the currently checked out branch. * @param {string} [args.url] - (Added in 1.1.0) The URL of the remote repository. The default is the value set in the git config for that remote. * @param {string} [args.remote] - (Added in 1.1.0) If URL is not specified, determines which remote to use. * @param {string} [args.remoteRef] - (Added in 1.1.0) The name of the branch on the remote to fetch. By default this is the configured remote tracking branch. * @param {string} [args.corsProxy] - Optional [CORS proxy](https://www.npmjs.com/%40isomorphic-git/cors-proxy). Overrides value in repo config. * @param {boolean} [args.singleBranch = false] - Instead of the default behavior of fetching all the branches, only fetch a single branch. * @param {Object} [args.headers] - Additional headers to include in HTTP requests, similar to git's `extraHeader` config * @param {object} [args.cache] - a [cache](cache.md) object * * @returns {Promise} Resolves successfully when pull operation completes * * @example * await git.fastForward({ * fs, * http, * dir: '/tutorial', * ref: 'main', * singleBranch: true * }) * console.log('done') * */ async function fastForward({ fs, http, onProgress, onMessage, onAuth, onAuthSuccess, onAuthFailure, dir, gitdir = join(dir, '.git'), ref, url, remote, remoteRef, corsProxy, singleBranch, headers = {}, cache = {}, }) { try { assertParameter('fs', fs); assertParameter('http', http); assertParameter('gitdir', gitdir); const thisWillNotBeUsed = { name: '', email: '', timestamp: Date.now(), timezoneOffset: 0, }; return await _pull({ fs: new FileSystem(fs), cache, http, onProgress, onMessage, onAuth, onAuthSuccess, onAuthFailure, dir, gitdir, ref, url, remote, remoteRef, fastForwardOnly: true, corsProxy, singleBranch, headers, author: thisWillNotBeUsed, committer: thisWillNotBeUsed, }) } catch (err) { err.caller = 'git.fastForward'; throw err } } // @ts-check /** * * @typedef {object} FetchResult - The object returned has the following schema: * @property {string | null} defaultBranch - The branch that is cloned if no branch is specified * @property {string | null} fetchHead - The SHA-1 object id of the fetched head commit * @property {string | null} fetchHeadDescription - a textual description of the branch that was fetched * @property {Object} [headers] - The HTTP response headers returned by the git server * @property {string[]} [pruned] - A list of branches that were pruned, if you provided the `prune` parameter * */ /** * Fetch commits from a remote repository * * @param {object} args * @param {FsClient} args.fs - a file system client * @param {HttpClient} args.http - an HTTP client * @param {ProgressCallback} [args.onProgress] - optional progress event callback * @param {MessageCallback} [args.onMessage] - optional message event callback * @param {AuthCallback} [args.onAuth] - optional auth fill callback * @param {AuthFailureCallback} [args.onAuthFailure] - optional auth rejected callback * @param {AuthSuccessCallback} [args.onAuthSuccess] - optional auth approved callback * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string} [args.url] - The URL of the remote repository. The default is the value set in the git config for that remote. * @param {string} [args.remote] - If URL is not specified, determines which remote to use. * @param {boolean} [args.singleBranch = false] - Instead of the default behavior of fetching all the branches, only fetch a single branch. * @param {string} [args.ref] - Which branch to fetch if `singleBranch` is true. By default this is the current branch or the remote's default branch. * @param {string} [args.remoteRef] - The name of the branch on the remote to fetch if `singleBranch` is true. By default this is the configured remote tracking branch. * @param {boolean} [args.tags = false] - Also fetch tags * @param {number} [args.depth] - Integer. Determines how much of the git repository's history to retrieve * @param {boolean} [args.relative = false] - Changes the meaning of `depth` to be measured from the current shallow depth rather than from the branch tip. * @param {Date} [args.since] - Only fetch commits created after the given date. Mutually exclusive with `depth`. * @param {string[]} [args.exclude = []] - A list of branches or tags. Instructs the remote server not to send us any commits reachable from these refs. * @param {boolean} [args.prune] - Delete local remote-tracking branches that are not present on the remote * @param {boolean} [args.pruneTags] - Prune local tags that don’t exist on the remote, and force-update those tags that differ * @param {string} [args.corsProxy] - Optional [CORS proxy](https://www.npmjs.com/%40isomorphic-git/cors-proxy). Overrides value in repo config. * @param {Object} [args.headers] - Additional headers to include in HTTP requests, similar to git's `extraHeader` config * @param {object} [args.cache] - a [cache](cache.md) object * * @returns {Promise} Resolves successfully when fetch completes * @see FetchResult * * @example * let result = await git.fetch({ * fs, * http, * dir: '/tutorial', * corsProxy: 'https://cors.isomorphic-git.org', * url: 'https://github.com/isomorphic-git/isomorphic-git', * ref: 'main', * depth: 1, * singleBranch: true, * tags: false * }) * console.log(result) * */ async function fetch({ fs, http, onProgress, onMessage, onAuth, onAuthSuccess, onAuthFailure, dir, gitdir = join(dir, '.git'), ref, remote, remoteRef, url, corsProxy, depth = null, since = null, exclude = [], relative = false, tags = false, singleBranch = false, headers = {}, prune = false, pruneTags = false, cache = {}, }) { try { assertParameter('fs', fs); assertParameter('http', http); assertParameter('gitdir', gitdir); return await _fetch({ fs: new FileSystem(fs), cache, http, onProgress, onMessage, onAuth, onAuthSuccess, onAuthFailure, gitdir, ref, remote, remoteRef, url, corsProxy, depth, since, exclude, relative, tags, singleBranch, headers, prune, pruneTags, }) } catch (err) { err.caller = 'git.fetch'; throw err } } // @ts-check /** * Find the merge base for a set of commits * * @param {object} args * @param {FsClient} args.fs - a file system client * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string[]} args.oids - Which commits * @param {object} [args.cache] - a [cache](cache.md) object * */ async function findMergeBase({ fs, dir, gitdir = join(dir, '.git'), oids, cache = {}, }) { try { assertParameter('fs', fs); assertParameter('gitdir', gitdir); assertParameter('oids', oids); return await _findMergeBase({ fs: new FileSystem(fs), cache, gitdir, oids, }) } catch (err) { err.caller = 'git.findMergeBase'; throw err } } // @ts-check /** * Find the root git directory * * Starting at `filepath`, walks upward until it finds a directory that contains a subdirectory called '.git'. * * @param {Object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs * @param {string} args.filepath * * @returns {Promise} Resolves successfully with a root git directory path */ async function _findRoot({ fs, filepath }) { if (await fs.exists(join(filepath, '.git'))) { return filepath } else { const parent = dirname(filepath); if (parent === filepath) { throw new NotFoundError(`git root for ${filepath}`) } return _findRoot({ fs, filepath: parent }) } } // @ts-check /** * Find the root git directory * * Starting at `filepath`, walks upward until it finds a directory that contains a subdirectory called '.git'. * * @param {Object} args * @param {FsClient} args.fs - a file system client * @param {string} args.filepath - The file directory to start searching in. * * @returns {Promise} Resolves successfully with a root git directory path * @throws {NotFoundError} * * @example * let gitroot = await git.findRoot({ * fs, * filepath: '/tutorial/src/utils' * }) * console.log(gitroot) * */ async function findRoot({ fs, filepath }) { try { assertParameter('fs', fs); assertParameter('filepath', filepath); return await _findRoot({ fs: new FileSystem(fs), filepath }) } catch (err) { err.caller = 'git.findRoot'; throw err } } // @ts-check /** * Read an entry from the git config files. * * *Caveats:* * - Currently only the local `$GIT_DIR/config` file can be read or written. However support for the global `~/.gitconfig` and system `$(prefix)/etc/gitconfig` will be added in the future. * - The current parser does not support the more exotic features of the git-config file format such as `[include]` and `[includeIf]`. * * @param {Object} args * @param {FsClient} args.fs - a file system implementation * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string} args.path - The key of the git config entry * * @returns {Promise} Resolves with the config value * * @example * // Read config value * let value = await git.getConfig({ * fs, * dir: '/tutorial', * path: 'remote.origin.url' * }) * console.log(value) * */ async function getConfig({ fs, dir, gitdir = join(dir, '.git'), path }) { try { assertParameter('fs', fs); assertParameter('gitdir', gitdir); assertParameter('path', path); return await _getConfig({ fs: new FileSystem(fs), gitdir, path, }) } catch (err) { err.caller = 'git.getConfig'; throw err } } // @ts-check /** * @param {Object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs * @param {string} args.gitdir * @param {string} args.path * * @returns {Promise>} Resolves with an array of the config value * */ async function _getConfigAll({ fs, gitdir, path }) { const config = await GitConfigManager.get({ fs, gitdir }); return config.getall(path) } // @ts-check /** * Read a multi-valued entry from the git config files. * * *Caveats:* * - Currently only the local `$GIT_DIR/config` file can be read or written. However support for the global `~/.gitconfig` and system `$(prefix)/etc/gitconfig` will be added in the future. * - The current parser does not support the more exotic features of the git-config file format such as `[include]` and `[includeIf]`. * * @param {Object} args * @param {FsClient} args.fs - a file system implementation * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string} args.path - The key of the git config entry * * @returns {Promise>} Resolves with the config value * */ async function getConfigAll({ fs, dir, gitdir = join(dir, '.git'), path, }) { try { assertParameter('fs', fs); assertParameter('gitdir', gitdir); assertParameter('path', path); return await _getConfigAll({ fs: new FileSystem(fs), gitdir, path, }) } catch (err) { err.caller = 'git.getConfigAll'; throw err } } // @ts-check /** * * @typedef {Object} GetRemoteInfoResult - The object returned has the following schema: * @property {string[]} capabilities - The list of capabilities returned by the server (part of the Git protocol) * @property {Object} [refs] * @property {string} [HEAD] - The default branch of the remote * @property {Object} [refs.heads] - The branches on the remote * @property {Object} [refs.pull] - The special branches representing pull requests (non-standard) * @property {Object} [refs.tags] - The tags on the remote * */ /** * List a remote servers branches, tags, and capabilities. * * This is a rare command that doesn't require an `fs`, `dir`, or even `gitdir` argument. * It just communicates to a remote git server, using the first step of the `git-upload-pack` handshake, but stopping short of fetching the packfile. * * @param {object} args * @param {HttpClient} args.http - an HTTP client * @param {AuthCallback} [args.onAuth] - optional auth fill callback * @param {AuthFailureCallback} [args.onAuthFailure] - optional auth rejected callback * @param {AuthSuccessCallback} [args.onAuthSuccess] - optional auth approved callback * @param {string} args.url - The URL of the remote repository. Will be gotten from gitconfig if absent. * @param {string} [args.corsProxy] - Optional [CORS proxy](https://www.npmjs.com/%40isomorphic-git/cors-proxy). Overrides value in repo config. * @param {boolean} [args.forPush = false] - By default, the command queries the 'fetch' capabilities. If true, it will ask for the 'push' capabilities. * @param {Object} [args.headers] - Additional headers to include in HTTP requests, similar to git's `extraHeader` config * * @returns {Promise} Resolves successfully with an object listing the branches, tags, and capabilities of the remote. * @see GetRemoteInfoResult * * @example * let info = await git.getRemoteInfo({ * http, * url: * "https://cors.isomorphic-git.org/github.com/isomorphic-git/isomorphic-git.git" * }); * console.log(info); * */ async function getRemoteInfo({ http, onAuth, onAuthSuccess, onAuthFailure, corsProxy, url, headers = {}, forPush = false, }) { try { assertParameter('http', http); assertParameter('url', url); const GitRemoteHTTP = GitRemoteManager.getRemoteHelperFor({ url }); const remote = await GitRemoteHTTP.discover({ http, onAuth, onAuthSuccess, onAuthFailure, corsProxy, service: forPush ? 'git-receive-pack' : 'git-upload-pack', url, headers, protocolVersion: 1, }); // Note: remote.capabilities, remote.refs, and remote.symrefs are Set and Map objects, // but one of the objectives of the public API is to always return JSON-compatible objects // so we must JSONify them. const result = { capabilities: [...remote.capabilities], }; // Convert the flat list into an object tree, because I figure 99% of the time // that will be easier to use. for (const [ref, oid] of remote.refs) { const parts = ref.split('/'); const last = parts.pop(); let o = result; for (const part of parts) { o[part] = o[part] || {}; o = o[part]; } o[last] = oid; } // Merge symrefs on top of refs to more closely match actual git repo layouts for (const [symref, ref] of remote.symrefs) { const parts = symref.split('/'); const last = parts.pop(); let o = result; for (const part of parts) { o[part] = o[part] || {}; o = o[part]; } o[last] = ref; } return result } catch (err) { err.caller = 'git.getRemoteInfo'; throw err } } // @ts-check /** * @param {any} remote * @param {string} prefix * @param {boolean} symrefs * @param {boolean} peelTags * @returns {ServerRef[]} */ function formatInfoRefs(remote, prefix, symrefs, peelTags) { const refs = []; for (const [key, value] of remote.refs) { if (prefix && !key.startsWith(prefix)) continue if (key.endsWith('^{}')) { if (peelTags) { const _key = key.replace('^{}', ''); // Peeled tags are almost always listed immediately after the original tag const last = refs[refs.length - 1]; const r = last.ref === _key ? last : refs.find(x => x.ref === _key); if (r === undefined) { throw new Error('I did not expect this to happen') } r.peeled = value; } continue } /** @type ServerRef */ const ref = { ref: key, oid: value }; if (symrefs) { if (remote.symrefs.has(key)) { ref.target = remote.symrefs.get(key); } } refs.push(ref); } return refs } // @ts-check /** * @typedef {Object} GetRemoteInfo2Result - This object has the following schema: * @property {1 | 2} protocolVersion - Git protocol version the server supports * @property {Object} capabilities - An object of capabilities represented as keys and values * @property {ServerRef[]} [refs] - Server refs (they get returned by protocol version 1 whether you want them or not) */ /** * List a remote server's capabilities. * * This is a rare command that doesn't require an `fs`, `dir`, or even `gitdir` argument. * It just communicates to a remote git server, determining what protocol version, commands, and features it supports. * * > The successor to [`getRemoteInfo`](./getRemoteInfo.md), this command supports Git Wire Protocol Version 2. * > Therefore its return type is more complicated as either: * > * > - v1 capabilities (and refs) or * > - v2 capabilities (and no refs) * > * > are returned. * > If you just care about refs, use [`listServerRefs`](./listServerRefs.md) * * @param {object} args * @param {HttpClient} args.http - an HTTP client * @param {AuthCallback} [args.onAuth] - optional auth fill callback * @param {AuthFailureCallback} [args.onAuthFailure] - optional auth rejected callback * @param {AuthSuccessCallback} [args.onAuthSuccess] - optional auth approved callback * @param {string} args.url - The URL of the remote repository. Will be gotten from gitconfig if absent. * @param {string} [args.corsProxy] - Optional [CORS proxy](https://www.npmjs.com/%40isomorphic-git/cors-proxy). Overrides value in repo config. * @param {boolean} [args.forPush = false] - By default, the command queries the 'fetch' capabilities. If true, it will ask for the 'push' capabilities. * @param {Object} [args.headers] - Additional headers to include in HTTP requests, similar to git's `extraHeader` config * @param {1 | 2} [args.protocolVersion = 2] - Which version of the Git Protocol to use. * * @returns {Promise} Resolves successfully with an object listing the capabilities of the remote. * @see GetRemoteInfo2Result * @see ServerRef * * @example * let info = await git.getRemoteInfo2({ * http, * corsProxy: "https://cors.isomorphic-git.org", * url: "https://github.com/isomorphic-git/isomorphic-git.git" * }); * console.log(info); * */ async function getRemoteInfo2({ http, onAuth, onAuthSuccess, onAuthFailure, corsProxy, url, headers = {}, forPush = false, protocolVersion = 2, }) { try { assertParameter('http', http); assertParameter('url', url); const GitRemoteHTTP = GitRemoteManager.getRemoteHelperFor({ url }); const remote = await GitRemoteHTTP.discover({ http, onAuth, onAuthSuccess, onAuthFailure, corsProxy, service: forPush ? 'git-receive-pack' : 'git-upload-pack', url, headers, protocolVersion, }); if (remote.protocolVersion === 2) { /** @type GetRemoteInfo2Result */ return { protocolVersion: remote.protocolVersion, capabilities: remote.capabilities2, } } // Note: remote.capabilities, remote.refs, and remote.symrefs are Set and Map objects, // but one of the objectives of the public API is to always return JSON-compatible objects // so we must JSONify them. /** @type Object */ const capabilities = {}; for (const cap of remote.capabilities) { const [key, value] = cap.split('='); if (value) { capabilities[key] = value; } else { capabilities[key] = true; } } /** @type GetRemoteInfo2Result */ return { protocolVersion: 1, capabilities, refs: formatInfoRefs(remote, undefined, true, true), } } catch (err) { err.caller = 'git.getRemoteInfo2'; throw err } } async function hashObject({ type, object, format = 'content', oid = undefined, }) { if (format !== 'deflated') { if (format !== 'wrapped') { object = GitObject.wrap({ type, object }); } oid = await shasum(object); } return { oid, object } } // @ts-check /** * * @typedef {object} HashBlobResult - The object returned has the following schema: * @property {string} oid - The SHA-1 object id * @property {'blob'} type - The type of the object * @property {Uint8Array} object - The wrapped git object (the thing that is hashed) * @property {'wrapped'} format - The format of the object * */ /** * Compute what the SHA-1 object id of a file would be * * @param {object} args * @param {Uint8Array|string} args.object - The object to write. If `object` is a String then it will be converted to a Uint8Array using UTF-8 encoding. * * @returns {Promise} Resolves successfully with the SHA-1 object id and the wrapped object Uint8Array. * @see HashBlobResult * * @example * let { oid, type, object, format } = await git.hashBlob({ * object: 'Hello world!', * }) * * console.log('oid', oid) * console.log('type', type) * console.log('object', object) * console.log('format', format) * */ async function hashBlob({ object }) { try { assertParameter('object', object); // Convert object to buffer if (typeof object === 'string') { object = Buffer.from(object, 'utf8'); } else { object = Buffer.from(object); } const type = 'blob'; const { oid, object: _object } = await hashObject({ type: 'blob', format: 'content', object, }); return { oid, type, object: new Uint8Array(_object), format: 'wrapped' } } catch (err) { err.caller = 'git.hashBlob'; throw err } } // @ts-check /** * @param {object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs * @param {any} args.cache * @param {ProgressCallback} [args.onProgress] * @param {string} args.dir * @param {string} args.gitdir * @param {string} args.filepath * * @returns {Promise<{oids: string[]}>} */ async function _indexPack({ fs, cache, onProgress, dir, gitdir, filepath, }) { try { filepath = join(dir, filepath); const pack = await fs.read(filepath); const getExternalRefDelta = oid => _readObject({ fs, cache, gitdir, oid }); const idx = await GitPackIndex.fromPack({ pack, getExternalRefDelta, onProgress, }); await fs.write(filepath.replace(/\.pack$/, '.idx'), await idx.toBuffer()); return { oids: [...idx.hashes], } } catch (err) { err.caller = 'git.indexPack'; throw err } } // @ts-check /** * Create the .idx file for a given .pack file * * @param {object} args * @param {FsClient} args.fs - a file system client * @param {ProgressCallback} [args.onProgress] - optional progress event callback * @param {string} args.dir - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string} args.filepath - The path to the .pack file to index * @param {object} [args.cache] - a [cache](cache.md) object * * @returns {Promise<{oids: string[]}>} Resolves with a list of the SHA-1 object ids contained in the packfile * * @example * let packfiles = await fs.promises.readdir('/tutorial/.git/objects/pack') * packfiles = packfiles.filter(name => name.endsWith('.pack')) * console.log('packfiles', packfiles) * * const { oids } = await git.indexPack({ * fs, * dir: '/tutorial', * filepath: `.git/objects/pack/${packfiles[0]}`, * async onProgress (evt) { * console.log(`${evt.phase}: ${evt.loaded} / ${evt.total}`) * } * }) * console.log(oids) * */ async function indexPack({ fs, onProgress, dir, gitdir = join(dir, '.git'), filepath, cache = {}, }) { try { assertParameter('fs', fs); assertParameter('dir', dir); assertParameter('gitdir', dir); assertParameter('filepath', filepath); return await _indexPack({ fs: new FileSystem(fs), cache, onProgress, dir, gitdir, filepath, }) } catch (err) { err.caller = 'git.indexPack'; throw err } } // @ts-check /** * Initialize a new repository * * @param {object} args * @param {FsClient} args.fs - a file system client * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {boolean} [args.bare = false] - Initialize a bare repository * @param {string} [args.defaultBranch = 'master'] - The name of the default branch (might be changed to a required argument in 2.0.0) * @returns {Promise} Resolves successfully when filesystem operations are complete * * @example * await git.init({ fs, dir: '/tutorial' }) * console.log('done') * */ async function init({ fs, bare = false, dir, gitdir = bare ? dir : join(dir, '.git'), defaultBranch = 'master', }) { try { assertParameter('fs', fs); assertParameter('gitdir', gitdir); if (!bare) { assertParameter('dir', dir); } return await _init({ fs: new FileSystem(fs), bare, dir, gitdir, defaultBranch, }) } catch (err) { err.caller = 'git.init'; throw err } } // @ts-check /** * @param {object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs * @param {any} args.cache * @param {string} args.gitdir * @param {string} args.oid * @param {string} args.ancestor * @param {number} args.depth - Maximum depth to search before giving up. -1 means no maximum depth. * * @returns {Promise} */ async function _isDescendent({ fs, cache, gitdir, oid, ancestor, depth, }) { const shallows = await GitShallowManager.read({ fs, gitdir }); if (!oid) { throw new MissingParameterError('oid') } if (!ancestor) { throw new MissingParameterError('ancestor') } // If you don't like this behavior, add your own check. // Edge cases are hard to define a perfect solution. if (oid === ancestor) return false // We do not use recursion here, because that would lead to depth-first traversal, // and we want to maintain a breadth-first traversal to avoid hitting shallow clone depth cutoffs. const queue = [oid]; const visited = new Set(); let searchdepth = 0; while (queue.length) { if (searchdepth++ === depth) { throw new MaxDepthError(depth) } const oid = queue.shift(); const { type, object } = await _readObject({ fs, cache, gitdir, oid, }); if (type !== 'commit') { throw new ObjectTypeError(oid, type, 'commit') } const commit = GitCommit.from(object).parse(); // Are any of the parents the sought-after ancestor? for (const parent of commit.parent) { if (parent === ancestor) return true } // If not, add them to heads (unless we know this is a shallow commit) if (!shallows.has(oid)) { for (const parent of commit.parent) { if (!visited.has(parent)) { queue.push(parent); visited.add(parent); } } } // Eventually, we'll travel entire tree to the roots where all the parents are empty arrays, // or hit the shallow depth and throw an error. Excluding the possibility of grafts, or // different branches cloned to different depths, you would hit this error at the same time // for all parents, so trying to continue is futile. } return false } // @ts-check /** * Check whether a git commit is descended from another * * @param {object} args * @param {FsClient} args.fs - a file system client * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string} args.oid - The descendent commit * @param {string} args.ancestor - The (proposed) ancestor commit * @param {number} [args.depth = -1] - Maximum depth to search before giving up. -1 means no maximum depth. * @param {object} [args.cache] - a [cache](cache.md) object * * @returns {Promise} Resolves to true if `oid` is a descendent of `ancestor` * * @example * let oid = await git.resolveRef({ fs, dir: '/tutorial', ref: 'main' }) * let ancestor = await git.resolveRef({ fs, dir: '/tutorial', ref: 'v0.20.0' }) * console.log(oid, ancestor) * await git.isDescendent({ fs, dir: '/tutorial', oid, ancestor, depth: -1 }) * */ async function isDescendent({ fs, dir, gitdir = join(dir, '.git'), oid, ancestor, depth = -1, cache = {}, }) { try { assertParameter('fs', fs); assertParameter('gitdir', gitdir); assertParameter('oid', oid); assertParameter('ancestor', ancestor); return await _isDescendent({ fs: new FileSystem(fs), cache, gitdir, oid, ancestor, depth, }) } catch (err) { err.caller = 'git.isDescendent'; throw err } } // @ts-check /** * Test whether a filepath should be ignored (because of .gitignore or .git/exclude) * * @param {object} args * @param {FsClient} args.fs - a file system client * @param {string} args.dir - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir, '.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string} args.filepath - The filepath to test * * @returns {Promise} Resolves to true if the file should be ignored * * @example * await git.isIgnored({ fs, dir: '/tutorial', filepath: 'docs/add.md' }) * */ async function isIgnored({ fs, dir, gitdir = join(dir, '.git'), filepath, }) { try { assertParameter('fs', fs); assertParameter('dir', dir); assertParameter('gitdir', gitdir); assertParameter('filepath', filepath); return GitIgnoreManager.isIgnored({ fs: new FileSystem(fs), dir, gitdir, filepath, }) } catch (err) { err.caller = 'git.isIgnored'; throw err } } // @ts-check /** * List branches * * By default it lists local branches. If a 'remote' is specified, it lists the remote's branches. When listing remote branches, the HEAD branch is not filtered out, so it may be included in the list of results. * * Note that specifying a remote does not actually contact the server and update the list of branches. * If you want an up-to-date list, first do a `fetch` to that remote. * (Which branch you fetch doesn't matter - the list of branches available on the remote is updated during the fetch handshake.) * * @param {object} args * @param {FsClient} args.fs - a file system client * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string} [args.remote] - Instead of the branches in `refs/heads`, list the branches in `refs/remotes/${remote}`. * * @returns {Promise>} Resolves successfully with an array of branch names * * @example * let branches = await git.listBranches({ fs, dir: '/tutorial' }) * console.log(branches) * let remoteBranches = await git.listBranches({ fs, dir: '/tutorial', remote: 'origin' }) * console.log(remoteBranches) * */ async function listBranches({ fs, dir, gitdir = join(dir, '.git'), remote, }) { try { assertParameter('fs', fs); assertParameter('gitdir', gitdir); return GitRefManager.listBranches({ fs: new FileSystem(fs), gitdir, remote, }) } catch (err) { err.caller = 'git.listBranches'; throw err } } // @ts-check /** * @param {object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs * @param {object} args.cache * @param {string} args.gitdir * @param {string} [args.ref] * * @returns {Promise>} */ async function _listFiles({ fs, gitdir, ref, cache }) { if (ref) { const oid = await GitRefManager.resolve({ gitdir, fs, ref }); const filenames = []; await accumulateFilesFromOid({ fs, cache, gitdir, oid, filenames, prefix: '', }); return filenames } else { return GitIndexManager.acquire({ fs, gitdir, cache }, async function( index ) { return index.entries.map(x => x.path) }) } } async function accumulateFilesFromOid({ fs, cache, gitdir, oid, filenames, prefix, }) { const { tree } = await _readTree({ fs, cache, gitdir, oid }); // TODO: Use `walk` to do this. Should be faster. for (const entry of tree) { if (entry.type === 'tree') { await accumulateFilesFromOid({ fs, cache, gitdir, oid: entry.oid, filenames, prefix: join(prefix, entry.path), }); } else { filenames.push(join(prefix, entry.path)); } } } // @ts-check /** * List all the files in the git index or a commit * * > Note: This function is efficient for listing the files in the staging area, but listing all the files in a commit requires recursively walking through the git object store. * > If you do not require a complete list of every file, better performance can be achieved by using [walk](./walk) and ignoring subdirectories you don't care about. * * @param {object} args * @param {FsClient} args.fs - a file system client * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string} [args.ref] - Return a list of all the files in the commit at `ref` instead of the files currently in the git index (aka staging area) * @param {object} [args.cache] - a [cache](cache.md) object * * @returns {Promise>} Resolves successfully with an array of filepaths * * @example * // All the files in the previous commit * let files = await git.listFiles({ fs, dir: '/tutorial', ref: 'HEAD' }) * console.log(files) * // All the files in the current staging area * files = await git.listFiles({ fs, dir: '/tutorial' }) * console.log(files) * */ async function listFiles({ fs, dir, gitdir = join(dir, '.git'), ref, cache = {}, }) { try { assertParameter('fs', fs); assertParameter('gitdir', gitdir); return await _listFiles({ fs: new FileSystem(fs), cache, gitdir, ref, }) } catch (err) { err.caller = 'git.listFiles'; throw err } } // @ts-check /** * List all the object notes * * @param {object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs * @param {any} args.cache * @param {string} args.gitdir * @param {string} args.ref * * @returns {Promise>} */ async function _listNotes({ fs, cache, gitdir, ref }) { // Get the current note commit let parent; try { parent = await GitRefManager.resolve({ gitdir, fs, ref }); } catch (err) { if (err instanceof NotFoundError) { return [] } } // Create the current note tree const result = await _readTree({ fs, cache, gitdir, oid: parent, }); // Format the tree entries const notes = result.tree.map(entry => ({ target: entry.path, note: entry.oid, })); return notes } // @ts-check /** * List all the object notes * * @param {object} args * @param {FsClient} args.fs - a file system client * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string} [args.ref] - The notes ref to look under * @param {object} [args.cache] - a [cache](cache.md) object * * @returns {Promise>} Resolves successfully with an array of entries containing SHA-1 object ids of the note and the object the note targets */ async function listNotes({ fs, dir, gitdir = join(dir, '.git'), ref = 'refs/notes/commits', cache = {}, }) { try { assertParameter('fs', fs); assertParameter('gitdir', gitdir); assertParameter('ref', ref); return await _listNotes({ fs: new FileSystem(fs), cache, gitdir, ref, }) } catch (err) { err.caller = 'git.listNotes'; throw err } } // @ts-check /** * @param {object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs * @param {string} args.gitdir * * @returns {Promise>} */ async function _listRemotes({ fs, gitdir }) { const config = await GitConfigManager.get({ fs, gitdir }); const remoteNames = await config.getSubsections('remote'); const remotes = Promise.all( remoteNames.map(async remote => { const url = await config.get(`remote.${remote}.url`); return { remote, url } }) ); return remotes } // @ts-check /** * List remotes * * @param {object} args * @param {FsClient} args.fs - a file system client * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * * @returns {Promise>} Resolves successfully with an array of `{remote, url}` objects * * @example * let remotes = await git.listRemotes({ fs, dir: '/tutorial' }) * console.log(remotes) * */ async function listRemotes({ fs, dir, gitdir = join(dir, '.git') }) { try { assertParameter('fs', fs); assertParameter('gitdir', gitdir); return await _listRemotes({ fs: new FileSystem(fs), gitdir, }) } catch (err) { err.caller = 'git.listRemotes'; throw err } } /** * @typedef {Object} ServerRef - This object has the following schema: * @property {string} ref - The name of the ref * @property {string} oid - The SHA-1 object id the ref points to * @property {string} [target] - The target ref pointed to by a symbolic ref * @property {string} [peeled] - If the oid is the SHA-1 object id of an annotated tag, this is the SHA-1 object id that the annotated tag points to */ async function parseListRefsResponse(stream) { const read = GitPktLine.streamReader(stream); // TODO: when we re-write everything to minimize memory usage, // we could make this a generator const refs = []; let line; while (true) { line = await read(); if (line === true) break if (line === null) continue line = line.toString('utf8').replace(/\n$/, ''); const [oid, ref, ...attrs] = line.split(' '); const r = { ref, oid }; for (const attr of attrs) { const [name, value] = attr.split(':'); if (name === 'symref-target') { r.target = value; } else if (name === 'peeled') { r.peeled = value; } } refs.push(r); } return refs } /** * @param {object} args * @param {string} [args.prefix] - Only list refs that start with this prefix * @param {boolean} [args.symrefs = false] - Include symbolic ref targets * @param {boolean} [args.peelTags = false] - Include peeled tags values * @returns {Uint8Array[]} */ async function writeListRefsRequest({ prefix, symrefs, peelTags }) { const packstream = []; // command packstream.push(GitPktLine.encode('command=ls-refs\n')); // capability-list packstream.push(GitPktLine.encode(`agent=${pkg.agent}\n`)); // [command-args] if (peelTags || symrefs || prefix) { packstream.push(GitPktLine.delim()); } if (peelTags) packstream.push(GitPktLine.encode('peel')); if (symrefs) packstream.push(GitPktLine.encode('symrefs')); if (prefix) packstream.push(GitPktLine.encode(`ref-prefix ${prefix}`)); packstream.push(GitPktLine.flush()); return packstream } // @ts-check /** * Fetch a list of refs (branches, tags, etc) from a server. * * This is a rare command that doesn't require an `fs`, `dir`, or even `gitdir` argument. * It just requires an `http` argument. * * ### About `protocolVersion` * * There's a rather fun trade-off between Git Protocol Version 1 and Git Protocol Version 2. * Version 2 actually requires 2 HTTP requests instead of 1, making it similar to fetch or push in that regard. * However, version 2 supports server-side filtering by prefix, whereas that filtering is done client-side in version 1. * Which protocol is most efficient therefore depends on the number of refs on the remote, the latency of the server, and speed of the network connection. * For an small repos (or fast Internet connections), the requirement to make two trips to the server makes protocol 2 slower. * But for large repos (or slow Internet connections), the decreased payload size of the second request makes up for the additional request. * * Hard numbers vary by situation, but here's some numbers from my machine: * * Using isomorphic-git in a browser, with a CORS proxy, listing only the branches (refs/heads) of https://github.com/isomorphic-git/isomorphic-git * - Protocol Version 1 took ~300ms and transfered 84 KB. * - Protocol Version 2 took ~500ms and transfered 4.1 KB. * * Using isomorphic-git in a browser, with a CORS proxy, listing only the branches (refs/heads) of https://gitlab.com/gitlab-org/gitlab * - Protocol Version 1 took ~4900ms and transfered 9.41 MB. * - Protocol Version 2 took ~1280ms and transfered 433 KB. * * Finally, there is a fun quirk regarding the `symrefs` parameter. * Protocol Version 1 will generally only return the `HEAD` symref and not others. * Historically, this meant that servers don't use symbolic refs except for `HEAD`, which is used to point at the "default branch". * However Protocol Version 2 can return *all* the symbolic refs on the server. * So if you are running your own git server, you could take advantage of that I guess. * * #### TL;DR * If you are _not_ taking advantage of `prefix` I would recommend `protocolVersion: 1`. * Otherwise, I recommend to use the default which is `protocolVersion: 2`. * * @param {object} args * @param {HttpClient} args.http - an HTTP client * @param {AuthCallback} [args.onAuth] - optional auth fill callback * @param {AuthFailureCallback} [args.onAuthFailure] - optional auth rejected callback * @param {AuthSuccessCallback} [args.onAuthSuccess] - optional auth approved callback * @param {string} args.url - The URL of the remote repository. Will be gotten from gitconfig if absent. * @param {string} [args.corsProxy] - Optional [CORS proxy](https://www.npmjs.com/%40isomorphic-git/cors-proxy). Overrides value in repo config. * @param {boolean} [args.forPush = false] - By default, the command queries the 'fetch' capabilities. If true, it will ask for the 'push' capabilities. * @param {Object} [args.headers] - Additional headers to include in HTTP requests, similar to git's `extraHeader` config * @param {1 | 2} [args.protocolVersion = 2] - Which version of the Git Protocol to use. * @param {string} [args.prefix] - Only list refs that start with this prefix * @param {boolean} [args.symrefs = false] - Include symbolic ref targets * @param {boolean} [args.peelTags = false] - Include annotated tag peeled targets * * @returns {Promise} Resolves successfully with an array of ServerRef objects * @see ServerRef * * @example * // List all the branches on a repo * let refs = await git.listServerRefs({ * http, * corsProxy: "https://cors.isomorphic-git.org", * url: "https://github.com/isomorphic-git/isomorphic-git.git", * prefix: "refs/heads/", * }); * console.log(refs); * * @example * // Get the default branch on a repo * let refs = await git.listServerRefs({ * http, * corsProxy: "https://cors.isomorphic-git.org", * url: "https://github.com/isomorphic-git/isomorphic-git.git", * prefix: "HEAD", * symrefs: true, * }); * console.log(refs); * * @example * // List all the tags on a repo * let refs = await git.listServerRefs({ * http, * corsProxy: "https://cors.isomorphic-git.org", * url: "https://github.com/isomorphic-git/isomorphic-git.git", * prefix: "refs/tags/", * peelTags: true, * }); * console.log(refs); * * @example * // List all the pull requests on a repo * let refs = await git.listServerRefs({ * http, * corsProxy: "https://cors.isomorphic-git.org", * url: "https://github.com/isomorphic-git/isomorphic-git.git", * prefix: "refs/pull/", * }); * console.log(refs); * */ async function listServerRefs({ http, onAuth, onAuthSuccess, onAuthFailure, corsProxy, url, headers = {}, forPush = false, protocolVersion = 2, prefix, symrefs, peelTags, }) { try { assertParameter('http', http); assertParameter('url', url); const remote = await GitRemoteHTTP.discover({ http, onAuth, onAuthSuccess, onAuthFailure, corsProxy, service: forPush ? 'git-receive-pack' : 'git-upload-pack', url, headers, protocolVersion, }); if (remote.protocolVersion === 1) { return formatInfoRefs(remote, prefix, symrefs, peelTags) } // Protocol Version 2 const body = await writeListRefsRequest({ prefix, symrefs, peelTags }); const res = await GitRemoteHTTP.connect({ http, auth: remote.auth, headers, corsProxy, service: forPush ? 'git-receive-pack' : 'git-upload-pack', url, body, }); return parseListRefsResponse(res.body) } catch (err) { err.caller = 'git.listServerRefs'; throw err } } // @ts-check /** * List tags * * @param {object} args * @param {FsClient} args.fs - a file system client * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * * @returns {Promise>} Resolves successfully with an array of tag names * * @example * let tags = await git.listTags({ fs, dir: '/tutorial' }) * console.log(tags) * */ async function listTags({ fs, dir, gitdir = join(dir, '.git') }) { try { assertParameter('fs', fs); assertParameter('gitdir', gitdir); return GitRefManager.listTags({ fs: new FileSystem(fs), gitdir }) } catch (err) { err.caller = 'git.listTags'; throw err } } async function resolveCommit({ fs, cache, gitdir, oid }) { const { type, object } = await _readObject({ fs, cache, gitdir, oid }); // Resolve annotated tag objects to whatever if (type === 'tag') { oid = GitAnnotatedTag.from(object).parse().object; return resolveCommit({ fs, cache, gitdir, oid }) } if (type !== 'commit') { throw new ObjectTypeError(oid, type, 'commit') } return { commit: GitCommit.from(object), oid } } // @ts-check /** * @param {object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs * @param {any} args.cache * @param {string} args.gitdir * @param {string} args.oid * * @returns {Promise} Resolves successfully with a git commit object * @see ReadCommitResult * @see CommitObject * */ async function _readCommit({ fs, cache, gitdir, oid }) { const { commit, oid: commitOid } = await resolveCommit({ fs, cache, gitdir, oid, }); const result = { oid: commitOid, commit: commit.parse(), payload: commit.withoutSignature(), }; // @ts-ignore return result } function compareAge(a, b) { return a.committer.timestamp - b.committer.timestamp } // @ts-check // the empty file content object id const EMPTY_OID = 'e69de29bb2d1d6434b8b29ae775ad8c2e48c5391'; async function resolveFileIdInTree({ fs, cache, gitdir, oid, fileId }) { if (fileId === EMPTY_OID) return const _oid = oid; let filepath; const result = await resolveTree({ fs, cache, gitdir, oid }); const tree = result.tree; if (fileId === result.oid) { filepath = result.path; } else { filepath = await _resolveFileId({ fs, cache, gitdir, tree, fileId, oid: _oid, }); if (Array.isArray(filepath)) { if (filepath.length === 0) filepath = undefined; else if (filepath.length === 1) filepath = filepath[0]; } } return filepath } async function _resolveFileId({ fs, cache, gitdir, tree, fileId, oid, filepaths = [], parentPath = '', }) { const walks = tree.entries().map(function(entry) { let result; if (entry.oid === fileId) { result = join(parentPath, entry.path); filepaths.push(result); } else if (entry.type === 'tree') { result = _readObject({ fs, cache, gitdir, oid: entry.oid, }).then(function({ object }) { return _resolveFileId({ fs, cache, gitdir, tree: GitTree.from(object), fileId, oid, filepaths, parentPath: join(parentPath, entry.path), }) }); } return result }); await Promise.all(walks); return filepaths } // @ts-check /** * Get commit descriptions from the git history * * @param {object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs * @param {any} args.cache * @param {string} args.gitdir * @param {string=} args.filepath optional get the commit for the filepath only * @param {string} args.ref * @param {number|void} args.depth * @param {boolean=} [args.force=false] do not throw error if filepath is not exist (works only for a single file). defaults to false * @param {boolean=} [args.follow=false] Continue listing the history of a file beyond renames (works only for a single file). defaults to false * @param {boolean=} args.follow Continue listing the history of a file beyond renames (works only for a single file). defaults to false * * @returns {Promise>} Resolves to an array of ReadCommitResult objects * @see ReadCommitResult * @see CommitObject * * @example * let commits = await git.log({ dir: '$input((/))', depth: $input((5)), ref: '$input((master))' }) * console.log(commits) * */ async function _log({ fs, cache, gitdir, filepath, ref, depth, since, force, follow, }) { const sinceTimestamp = typeof since === 'undefined' ? undefined : Math.floor(since.valueOf() / 1000); // TODO: In the future, we may want to have an API where we return a // async iterator that emits commits. const commits = []; const shallowCommits = await GitShallowManager.read({ fs, gitdir }); const oid = await GitRefManager.resolve({ fs, gitdir, ref }); const tips = [await _readCommit({ fs, cache, gitdir, oid })]; let lastFileOid; let lastCommit; let isOk; function endCommit(commit) { if (isOk && filepath) commits.push(commit); } while (tips.length > 0) { const commit = tips.pop(); // Stop the log if we've hit the age limit if ( sinceTimestamp !== undefined && commit.commit.committer.timestamp <= sinceTimestamp ) { break } if (filepath) { let vFileOid; try { vFileOid = await resolveFilepath({ fs, cache, gitdir, oid: commit.commit.tree, filepath, }); if (lastCommit && lastFileOid !== vFileOid) { commits.push(lastCommit); } lastFileOid = vFileOid; lastCommit = commit; isOk = true; } catch (e) { if (e instanceof NotFoundError) { let found = follow && lastFileOid; if (found) { found = await resolveFileIdInTree({ fs, cache, gitdir, oid: commit.commit.tree, fileId: lastFileOid, }); if (found) { if (Array.isArray(found)) { if (lastCommit) { const lastFound = await resolveFileIdInTree({ fs, cache, gitdir, oid: lastCommit.commit.tree, fileId: lastFileOid, }); if (Array.isArray(lastFound)) { found = found.filter(p => lastFound.indexOf(p) === -1); if (found.length === 1) { found = found[0]; filepath = found; if (lastCommit) commits.push(lastCommit); } else { found = false; if (lastCommit) commits.push(lastCommit); break } } } } else { filepath = found; if (lastCommit) commits.push(lastCommit); } } } if (!found) { if (!force && !follow) throw e if (isOk && lastFileOid) { commits.push(lastCommit); // break } } lastCommit = commit; isOk = false; } else throw e } } else { commits.push(commit); } // Stop the loop if we have enough commits now. if (depth !== undefined && commits.length === depth) { endCommit(commit); break } // If this is not a shallow commit... if (!shallowCommits.has(commit.oid)) { // Add the parents of this commit to the queue // Note: for the case of a commit with no parents, it will concat an empty array, having no net effect. for (const oid of commit.commit.parent) { const commit = await _readCommit({ fs, cache, gitdir, oid }); if (!tips.map(commit => commit.oid).includes(commit.oid)) { tips.push(commit); } } } // Stop the loop if there are no more commit parents if (tips.length === 0) { endCommit(commit); } // Process tips in order by age tips.sort((a, b) => compareAge(a.commit, b.commit)); } return commits } // @ts-check /** * Get commit descriptions from the git history * * @param {object} args * @param {FsClient} args.fs - a file system client * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string=} args.filepath optional get the commit for the filepath only * @param {string} [args.ref = 'HEAD'] - The commit to begin walking backwards through the history from * @param {number=} [args.depth] - Limit the number of commits returned. No limit by default. * @param {Date} [args.since] - Return history newer than the given date. Can be combined with `depth` to get whichever is shorter. * @param {boolean=} [args.force=false] do not throw error if filepath is not exist (works only for a single file). defaults to false * @param {boolean=} [args.follow=false] Continue listing the history of a file beyond renames (works only for a single file). defaults to false * @param {object} [args.cache] - a [cache](cache.md) object * * @returns {Promise>} Resolves to an array of ReadCommitResult objects * @see ReadCommitResult * @see CommitObject * * @example * let commits = await git.log({ * fs, * dir: '/tutorial', * depth: 5, * ref: 'main' * }) * console.log(commits) * */ async function log({ fs, dir, gitdir = join(dir, '.git'), filepath, ref = 'HEAD', depth, since, // Date force, follow, cache = {}, }) { try { assertParameter('fs', fs); assertParameter('gitdir', gitdir); assertParameter('ref', ref); return await _log({ fs: new FileSystem(fs), cache, gitdir, filepath, ref, depth, since, force, follow, }) } catch (err) { err.caller = 'git.log'; throw err } } // @ts-check /** * * @typedef {Object} MergeResult - Returns an object with a schema like this: * @property {string} [oid] - The SHA-1 object id that is now at the head of the branch. Absent only if `dryRun` was specified and `mergeCommit` is true. * @property {boolean} [alreadyMerged] - True if the branch was already merged so no changes were made * @property {boolean} [fastForward] - True if it was a fast-forward merge * @property {boolean} [mergeCommit] - True if merge resulted in a merge commit * @property {string} [tree] - The SHA-1 object id of the tree resulting from a merge commit * */ /** * Merge two branches * * ## Limitations * * Currently it does not support incomplete merges. That is, if there are merge conflicts it cannot solve * with the built in diff3 algorithm it will not modify the working dir, and will throw a [`MergeNotSupportedError`](./errors.md#mergenotsupportedError) error. * * Currently it will fail if multiple candidate merge bases are found. (It doesn't yet implement the recursive merge strategy.) * * Currently it does not support selecting alternative merge strategies. * * @param {object} args * @param {FsClient} args.fs - a file system client * @param {SignCallback} [args.onSign] - a PGP signing implementation * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string} [args.ours] - The branch receiving the merge. If undefined, defaults to the current branch. * @param {string} args.theirs - The branch to be merged * @param {boolean} [args.fastForward = true] - If false, create a merge commit in all cases. * @param {boolean} [args.fastForwardOnly = false] - If true, then non-fast-forward merges will throw an Error instead of performing a merge. * @param {boolean} [args.dryRun = false] - If true, simulates a merge so you can test whether it would succeed. * @param {boolean} [args.noUpdateBranch = false] - If true, does not update the branch pointer after creating the commit. * @param {string} [args.message] - Overrides the default auto-generated merge commit message * @param {Object} [args.author] - passed to [commit](commit.md) when creating a merge commit * @param {string} [args.author.name] - Default is `user.name` config. * @param {string} [args.author.email] - Default is `user.email` config. * @param {number} [args.author.timestamp=Math.floor(Date.now()/1000)] - Set the author timestamp field. This is the integer number of seconds since the Unix epoch (1970-01-01 00:00:00). * @param {number} [args.author.timezoneOffset] - Set the author timezone offset field. This is the difference, in minutes, from the current timezone to UTC. Default is `(new Date()).getTimezoneOffset()`. * @param {Object} [args.committer] - passed to [commit](commit.md) when creating a merge commit * @param {string} [args.committer.name] - Default is `user.name` config. * @param {string} [args.committer.email] - Default is `user.email` config. * @param {number} [args.committer.timestamp=Math.floor(Date.now()/1000)] - Set the committer timestamp field. This is the integer number of seconds since the Unix epoch (1970-01-01 00:00:00). * @param {number} [args.committer.timezoneOffset] - Set the committer timezone offset field. This is the difference, in minutes, from the current timezone to UTC. Default is `(new Date()).getTimezoneOffset()`. * @param {string} [args.signingKey] - passed to [commit](commit.md) when creating a merge commit * @param {object} [args.cache] - a [cache](cache.md) object * * @returns {Promise} Resolves to a description of the merge operation * @see MergeResult * * @example * let m = await git.merge({ * fs, * dir: '/tutorial', * ours: 'main', * theirs: 'remotes/origin/main' * }) * console.log(m) * */ async function merge({ fs: _fs, onSign, dir, gitdir = join(dir, '.git'), ours, theirs, fastForward = true, fastForwardOnly = false, dryRun = false, noUpdateBranch = false, message, author: _author, committer: _committer, signingKey, cache = {}, }) { try { assertParameter('fs', _fs); if (signingKey) { assertParameter('onSign', onSign); } const fs = new FileSystem(_fs); const author = await normalizeAuthorObject({ fs, gitdir, author: _author }); if (!author && (!fastForwardOnly || !fastForward)) { throw new MissingNameError('author') } const committer = await normalizeCommitterObject({ fs, gitdir, author, committer: _committer, }); if (!committer && (!fastForwardOnly || !fastForward)) { throw new MissingNameError('committer') } return await _merge({ fs, cache, gitdir, ours, theirs, fastForward, fastForwardOnly, dryRun, noUpdateBranch, message, author, committer, signingKey, onSign, }) } catch (err) { err.caller = 'git.merge'; throw err } } /** * @enum {number} */ const types = { commit: 0b0010000, tree: 0b0100000, blob: 0b0110000, tag: 0b1000000, ofs_delta: 0b1100000, ref_delta: 0b1110000, }; /** * @param {object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs * @param {any} args.cache * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir, '.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string[]} args.oids */ async function _pack({ fs, cache, dir, gitdir = join(dir, '.git'), oids, }) { const hash = new Hash(); const outputStream = []; function write(chunk, enc) { const buff = Buffer.from(chunk, enc); outputStream.push(buff); hash.update(buff); } async function writeObject({ stype, object }) { // Object type is encoded in bits 654 const type = types[stype]; // The length encoding gets complicated. let length = object.length; // Whether the next byte is part of the variable-length encoded number // is encoded in bit 7 let multibyte = length > 0b1111 ? 0b10000000 : 0b0; // Last four bits of length is encoded in bits 3210 const lastFour = length & 0b1111; // Discard those bits length = length >>> 4; // The first byte is then (1-bit multibyte?), (3-bit type), (4-bit least sig 4-bits of length) let byte = (multibyte | type | lastFour).toString(16); write(byte, 'hex'); // Now we keep chopping away at length 7-bits at a time until its zero, // writing out the bytes in what amounts to little-endian order. while (multibyte) { multibyte = length > 0b01111111 ? 0b10000000 : 0b0; byte = multibyte | (length & 0b01111111); write(padHex(2, byte), 'hex'); length = length >>> 7; } // Lastly, we can compress and write the object. write(Buffer.from(await deflate(object))); } write('PACK'); write('00000002', 'hex'); // Write a 4 byte (32-bit) int write(padHex(8, oids.length), 'hex'); for (const oid of oids) { const { type, object } = await _readObject({ fs, cache, gitdir, oid }); await writeObject({ write, object, stype: type }); } // Write SHA1 checksum const digest = hash.digest(); outputStream.push(digest); return outputStream } // @ts-check /** * * @typedef {Object} PackObjectsResult The packObjects command returns an object with two properties: * @property {string} filename - The suggested filename for the packfile if you want to save it to disk somewhere. It includes the packfile SHA. * @property {Uint8Array} [packfile] - The packfile contents. Not present if `write` parameter was true, in which case the packfile was written straight to disk. */ /** * @param {object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs * @param {any} args.cache * @param {string} args.gitdir * @param {string[]} args.oids * @param {boolean} args.write * * @returns {Promise} * @see PackObjectsResult */ async function _packObjects({ fs, cache, gitdir, oids, write }) { const buffers = await _pack({ fs, cache, gitdir, oids }); const packfile = Buffer.from(await collect(buffers)); const packfileSha = packfile.slice(-20).toString('hex'); const filename = `pack-${packfileSha}.pack`; if (write) { await fs.write(join(gitdir, `objects/pack/${filename}`), packfile); return { filename } } return { filename, packfile: new Uint8Array(packfile), } } // @ts-check /** * * @typedef {Object} PackObjectsResult The packObjects command returns an object with two properties: * @property {string} filename - The suggested filename for the packfile if you want to save it to disk somewhere. It includes the packfile SHA. * @property {Uint8Array} [packfile] - The packfile contents. Not present if `write` parameter was true, in which case the packfile was written straight to disk. */ /** * Create a packfile from an array of SHA-1 object ids * * @param {object} args * @param {FsClient} args.fs - a file system client * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir, '.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string[]} args.oids - An array of SHA-1 object ids to be included in the packfile * @param {boolean} [args.write = false] - Whether to save the packfile to disk or not * @param {object} [args.cache] - a [cache](cache.md) object * * @returns {Promise} Resolves successfully when the packfile is ready with the filename and buffer * @see PackObjectsResult * * @example * // Create a packfile containing only an empty tree * let { packfile } = await git.packObjects({ * fs, * dir: '/tutorial', * oids: ['4b825dc642cb6eb9a060e54bf8d69288fbee4904'] * }) * console.log(packfile) * */ async function packObjects({ fs, dir, gitdir = join(dir, '.git'), oids, write = false, cache = {}, }) { try { assertParameter('fs', fs); assertParameter('gitdir', gitdir); assertParameter('oids', oids); return await _packObjects({ fs: new FileSystem(fs), cache, gitdir, oids, write, }) } catch (err) { err.caller = 'git.packObjects'; throw err } } // @ts-check /** * Fetch and merge commits from a remote repository * * @param {object} args * @param {FsClient} args.fs - a file system client * @param {HttpClient} args.http - an HTTP client * @param {ProgressCallback} [args.onProgress] - optional progress event callback * @param {MessageCallback} [args.onMessage] - optional message event callback * @param {AuthCallback} [args.onAuth] - optional auth fill callback * @param {AuthFailureCallback} [args.onAuthFailure] - optional auth rejected callback * @param {AuthSuccessCallback} [args.onAuthSuccess] - optional auth approved callback * @param {string} args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string} [args.ref] - Which branch to merge into. By default this is the currently checked out branch. * @param {string} [args.url] - (Added in 1.1.0) The URL of the remote repository. The default is the value set in the git config for that remote. * @param {string} [args.remote] - (Added in 1.1.0) If URL is not specified, determines which remote to use. * @param {string} [args.remoteRef] - (Added in 1.1.0) The name of the branch on the remote to fetch. By default this is the configured remote tracking branch. * @param {string} [args.corsProxy] - Optional [CORS proxy](https://www.npmjs.com/%40isomorphic-git/cors-proxy). Overrides value in repo config. * @param {boolean} [args.singleBranch = false] - Instead of the default behavior of fetching all the branches, only fetch a single branch. * @param {boolean} [args.fastForward = true] - If false, only create merge commits. * @param {boolean} [args.fastForwardOnly = false] - Only perform simple fast-forward merges. (Don't create merge commits.) * @param {Object} [args.headers] - Additional headers to include in HTTP requests, similar to git's `extraHeader` config * @param {Object} [args.author] - The details about the author. * @param {string} [args.author.name] - Default is `user.name` config. * @param {string} [args.author.email] - Default is `user.email` config. * @param {number} [args.author.timestamp=Math.floor(Date.now()/1000)] - Set the author timestamp field. This is the integer number of seconds since the Unix epoch (1970-01-01 00:00:00). * @param {number} [args.author.timezoneOffset] - Set the author timezone offset field. This is the difference, in minutes, from the current timezone to UTC. Default is `(new Date()).getTimezoneOffset()`. * @param {Object} [args.committer = author] - The details about the commit committer, in the same format as the author parameter. If not specified, the author details are used. * @param {string} [args.committer.name] - Default is `user.name` config. * @param {string} [args.committer.email] - Default is `user.email` config. * @param {number} [args.committer.timestamp=Math.floor(Date.now()/1000)] - Set the committer timestamp field. This is the integer number of seconds since the Unix epoch (1970-01-01 00:00:00). * @param {number} [args.committer.timezoneOffset] - Set the committer timezone offset field. This is the difference, in minutes, from the current timezone to UTC. Default is `(new Date()).getTimezoneOffset()`. * @param {string} [args.signingKey] - passed to [commit](commit.md) when creating a merge commit * @param {object} [args.cache] - a [cache](cache.md) object * * @returns {Promise} Resolves successfully when pull operation completes * * @example * await git.pull({ * fs, * http, * dir: '/tutorial', * ref: 'main', * singleBranch: true * }) * console.log('done') * */ async function pull({ fs: _fs, http, onProgress, onMessage, onAuth, onAuthSuccess, onAuthFailure, dir, gitdir = join(dir, '.git'), ref, url, remote, remoteRef, fastForward = true, fastForwardOnly = false, corsProxy, singleBranch, headers = {}, author: _author, committer: _committer, signingKey, cache = {}, }) { try { assertParameter('fs', _fs); assertParameter('gitdir', gitdir); const fs = new FileSystem(_fs); const author = await normalizeAuthorObject({ fs, gitdir, author: _author }); if (!author) throw new MissingNameError('author') const committer = await normalizeCommitterObject({ fs, gitdir, author, committer: _committer, }); if (!committer) throw new MissingNameError('committer') return await _pull({ fs, cache, http, onProgress, onMessage, onAuth, onAuthSuccess, onAuthFailure, dir, gitdir, ref, url, remote, remoteRef, fastForward, fastForwardOnly, corsProxy, singleBranch, headers, author, committer, signingKey, }) } catch (err) { err.caller = 'git.pull'; throw err } } /** * @param {object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs * @param {any} args.cache * @param {string} [args.dir] * @param {string} args.gitdir * @param {Iterable} args.start * @param {Iterable} args.finish * @returns {Promise>} */ async function listCommitsAndTags({ fs, cache, dir, gitdir = join(dir, '.git'), start, finish, }) { const shallows = await GitShallowManager.read({ fs, gitdir }); const startingSet = new Set(); const finishingSet = new Set(); for (const ref of start) { startingSet.add(await GitRefManager.resolve({ fs, gitdir, ref })); } for (const ref of finish) { // We may not have these refs locally so we must try/catch try { const oid = await GitRefManager.resolve({ fs, gitdir, ref }); finishingSet.add(oid); } catch (err) {} } const visited = new Set(); // Because git commits are named by their hash, there is no // way to construct a cycle. Therefore we won't worry about // setting a default recursion limit. async function walk(oid) { visited.add(oid); const { type, object } = await _readObject({ fs, cache, gitdir, oid }); // Recursively resolve annotated tags if (type === 'tag') { const tag = GitAnnotatedTag.from(object); const commit = tag.headers().object; return walk(commit) } if (type !== 'commit') { throw new ObjectTypeError(oid, type, 'commit') } if (!shallows.has(oid)) { const commit = GitCommit.from(object); const parents = commit.headers().parent; for (oid of parents) { if (!finishingSet.has(oid) && !visited.has(oid)) { await walk(oid); } } } } // Let's go walking! for (const oid of startingSet) { await walk(oid); } return visited } /** * @param {object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs * @param {any} args.cache * @param {string} [args.dir] * @param {string} args.gitdir * @param {Iterable} args.oids * @returns {Promise>} */ async function listObjects({ fs, cache, dir, gitdir = join(dir, '.git'), oids, }) { const visited = new Set(); // We don't do the purest simplest recursion, because we can // avoid reading Blob objects entirely since the Tree objects // tell us which oids are Blobs and which are Trees. async function walk(oid) { if (visited.has(oid)) return visited.add(oid); const { type, object } = await _readObject({ fs, cache, gitdir, oid }); if (type === 'tag') { const tag = GitAnnotatedTag.from(object); const obj = tag.headers().object; await walk(obj); } else if (type === 'commit') { const commit = GitCommit.from(object); const tree = commit.headers().tree; await walk(tree); } else if (type === 'tree') { const tree = GitTree.from(object); for (const entry of tree) { // add blobs to the set // skip over submodules whose type is 'commit' if (entry.type === 'blob') { visited.add(entry.oid); } // recurse for trees if (entry.type === 'tree') { await walk(entry.oid); } } } } // Let's go walking! for (const oid of oids) { await walk(oid); } return visited } async function parseReceivePackResponse(packfile) { /** @type PushResult */ const result = {}; let response = ''; const read = GitPktLine.streamReader(packfile); let line = await read(); while (line !== true) { if (line !== null) response += line.toString('utf8') + '\n'; line = await read(); } const lines = response.toString('utf8').split('\n'); // We're expecting "unpack {unpack-result}" line = lines.shift(); if (!line.startsWith('unpack ')) { throw new ParseError('unpack ok" or "unpack [error message]', line) } result.ok = line === 'unpack ok'; if (!result.ok) { result.error = line.slice('unpack '.length); } result.refs = {}; for (const line of lines) { if (line.trim() === '') continue const status = line.slice(0, 2); const refAndMessage = line.slice(3); let space = refAndMessage.indexOf(' '); if (space === -1) space = refAndMessage.length; const ref = refAndMessage.slice(0, space); const error = refAndMessage.slice(space + 1); result.refs[ref] = { ok: status === 'ok', error, }; } return result } async function writeReceivePackRequest({ capabilities = [], triplets = [], }) { const packstream = []; let capsFirstLine = `\x00 ${capabilities.join(' ')}`; for (const trip of triplets) { packstream.push( GitPktLine.encode( `${trip.oldoid} ${trip.oid} ${trip.fullRef}${capsFirstLine}\n` ) ); capsFirstLine = ''; } packstream.push(GitPktLine.flush()); return packstream } // @ts-check /** * @param {object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs * @param {any} args.cache * @param {HttpClient} args.http * @param {ProgressCallback} [args.onProgress] * @param {MessageCallback} [args.onMessage] * @param {AuthCallback} [args.onAuth] * @param {AuthFailureCallback} [args.onAuthFailure] * @param {AuthSuccessCallback} [args.onAuthSuccess] * @param {string} args.gitdir * @param {string} [args.ref] * @param {string} [args.remoteRef] * @param {string} [args.remote] * @param {boolean} [args.force = false] * @param {boolean} [args.delete = false] * @param {string} [args.url] * @param {string} [args.corsProxy] * @param {Object} [args.headers] * * @returns {Promise} */ async function _push({ fs, cache, http, onProgress, onMessage, onAuth, onAuthSuccess, onAuthFailure, gitdir, ref: _ref, remoteRef: _remoteRef, remote, url: _url, force = false, delete: _delete = false, corsProxy, headers = {}, }) { const ref = _ref || (await _currentBranch({ fs, gitdir })); if (typeof ref === 'undefined') { throw new MissingParameterError('ref') } const config = await GitConfigManager.get({ fs, gitdir }); // Figure out what remote to use. remote = remote || (await config.get(`branch.${ref}.pushRemote`)) || (await config.get('remote.pushDefault')) || (await config.get(`branch.${ref}.remote`)) || 'origin'; // Lookup the URL for the given remote. const url = _url || (await config.get(`remote.${remote}.pushurl`)) || (await config.get(`remote.${remote}.url`)); if (typeof url === 'undefined') { throw new MissingParameterError('remote OR url') } // Figure out what remote ref to use. const remoteRef = _remoteRef || (await config.get(`branch.${ref}.merge`)); if (typeof url === 'undefined') { throw new MissingParameterError('remoteRef') } if (corsProxy === undefined) { corsProxy = await config.get('http.corsProxy'); } const fullRef = await GitRefManager.expand({ fs, gitdir, ref }); const oid = _delete ? '0000000000000000000000000000000000000000' : await GitRefManager.resolve({ fs, gitdir, ref: fullRef }); /** @type typeof import("../managers/GitRemoteHTTP").GitRemoteHTTP */ const GitRemoteHTTP = GitRemoteManager.getRemoteHelperFor({ url }); const httpRemote = await GitRemoteHTTP.discover({ http, onAuth, onAuthSuccess, onAuthFailure, corsProxy, service: 'git-receive-pack', url, headers, protocolVersion: 1, }); const auth = httpRemote.auth; // hack to get new credentials from CredentialManager API let fullRemoteRef; if (!remoteRef) { fullRemoteRef = fullRef; } else { try { fullRemoteRef = await GitRefManager.expandAgainstMap({ ref: remoteRef, map: httpRemote.refs, }); } catch (err) { if (err instanceof NotFoundError) { // The remote reference doesn't exist yet. // If it is fully specified, use that value. Otherwise, treat it as a branch. fullRemoteRef = remoteRef.startsWith('refs/') ? remoteRef : `refs/heads/${remoteRef}`; } else { throw err } } } const oldoid = httpRemote.refs.get(fullRemoteRef) || '0000000000000000000000000000000000000000'; // Remotes can always accept thin-packs UNLESS they specify the 'no-thin' capability const thinPack = !httpRemote.capabilities.has('no-thin'); let objects = new Set(); if (!_delete) { const finish = [...httpRemote.refs.values()]; let skipObjects = new Set(); // If remote branch is present, look for a common merge base. if (oldoid !== '0000000000000000000000000000000000000000') { // trick to speed up common force push scenarios const mergebase = await _findMergeBase({ fs, cache, gitdir, oids: [oid, oldoid], }); for (const oid of mergebase) finish.push(oid); if (thinPack) { skipObjects = await listObjects({ fs, cache, gitdir, oids: mergebase }); } } // If remote does not have the commit, figure out the objects to send if (!finish.includes(oid)) { const commits = await listCommitsAndTags({ fs, cache, gitdir, start: [oid], finish, }); objects = await listObjects({ fs, cache, gitdir, oids: commits }); } if (thinPack) { // If there's a default branch for the remote lets skip those objects too. // Since this is an optional optimization, we just catch and continue if there is // an error (because we can't find a default branch, or can't find a commit, etc) try { // Sadly, the discovery phase with 'forPush' doesn't return symrefs, so we have to // rely on existing ones. const ref = await GitRefManager.resolve({ fs, gitdir, ref: `refs/remotes/${remote}/HEAD`, depth: 2, }); const { oid } = await GitRefManager.resolveAgainstMap({ ref: ref.replace(`refs/remotes/${remote}/`, ''), fullref: ref, map: httpRemote.refs, }); const oids = [oid]; for (const oid of await listObjects({ fs, cache, gitdir, oids })) { skipObjects.add(oid); } } catch (e) {} // Remove objects that we know the remote already has for (const oid of skipObjects) { objects.delete(oid); } } if (!force) { // Is it a tag that already exists? if ( fullRef.startsWith('refs/tags') && oldoid !== '0000000000000000000000000000000000000000' ) { throw new PushRejectedError('tag-exists') } // Is it a non-fast-forward commit? if ( oid !== '0000000000000000000000000000000000000000' && oldoid !== '0000000000000000000000000000000000000000' && !(await _isDescendent({ fs, cache, gitdir, oid, ancestor: oldoid, depth: -1, })) ) { throw new PushRejectedError('not-fast-forward') } } } // We can only safely use capabilities that the server also understands. // For instance, AWS CodeCommit aborts a push if you include the `agent`!!! const capabilities = filterCapabilities( [...httpRemote.capabilities], ['report-status', 'side-band-64k', `agent=${pkg.agent}`] ); const packstream1 = await writeReceivePackRequest({ capabilities, triplets: [{ oldoid, oid, fullRef: fullRemoteRef }], }); const packstream2 = _delete ? [] : await _pack({ fs, cache, gitdir, oids: [...objects], }); const res = await GitRemoteHTTP.connect({ http, onProgress, corsProxy, service: 'git-receive-pack', url, auth, headers, body: [...packstream1, ...packstream2], }); const { packfile, progress } = await GitSideBand.demux(res.body); if (onMessage) { const lines = splitLines(progress); forAwait(lines, async line => { await onMessage(line); }); } // Parse the response! const result = await parseReceivePackResponse(packfile); if (res.headers) { result.headers = res.headers; } // Update the local copy of the remote ref if (remote && result.ok && result.refs[fullRemoteRef].ok) { // TODO: I think this should actually be using a refspec transform rather than assuming 'refs/remotes/{remote}' const ref = `refs/remotes/${remote}/${fullRemoteRef.replace( 'refs/heads', '' )}`; if (_delete) { await GitRefManager.deleteRef({ fs, gitdir, ref }); } else { await GitRefManager.writeRef({ fs, gitdir, ref, value: oid }); } } if (result.ok && Object.values(result.refs).every(result => result.ok)) { return result } else { const prettyDetails = Object.entries(result.refs) .filter(([k, v]) => !v.ok) .map(([k, v]) => `\n - ${k}: ${v.error}`) .join(''); throw new GitPushError(prettyDetails, result) } } // @ts-check /** * Push a branch or tag * * The push command returns an object that describes the result of the attempted push operation. * *Notes:* If there were no errors, then there will be no `errors` property. There can be a mix of `ok` messages and `errors` messages. * * | param | type [= default] | description | * | ------ | ---------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | * | ok | Array\ | The first item is "unpack" if the overall operation was successful. The remaining items are the names of refs that were updated successfully. | * | errors | Array\ | If the overall operation threw and error, the first item will be "unpack {Overall error message}". The remaining items are individual refs that failed to be updated in the format "{ref name} {error message}". | * * @param {object} args * @param {FsClient} args.fs - a file system client * @param {HttpClient} args.http - an HTTP client * @param {ProgressCallback} [args.onProgress] - optional progress event callback * @param {MessageCallback} [args.onMessage] - optional message event callback * @param {AuthCallback} [args.onAuth] - optional auth fill callback * @param {AuthFailureCallback} [args.onAuthFailure] - optional auth rejected callback * @param {AuthSuccessCallback} [args.onAuthSuccess] - optional auth approved callback * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string} [args.ref] - Which branch to push. By default this is the currently checked out branch. * @param {string} [args.url] - The URL of the remote repository. The default is the value set in the git config for that remote. * @param {string} [args.remote] - If URL is not specified, determines which remote to use. * @param {string} [args.remoteRef] - The name of the receiving branch on the remote. By default this is the configured remote tracking branch. * @param {boolean} [args.force = false] - If true, behaves the same as `git push --force` * @param {boolean} [args.delete = false] - If true, delete the remote ref * @param {string} [args.corsProxy] - Optional [CORS proxy](https://www.npmjs.com/%40isomorphic-git/cors-proxy). Overrides value in repo config. * @param {Object} [args.headers] - Additional headers to include in HTTP requests, similar to git's `extraHeader` config * @param {object} [args.cache] - a [cache](cache.md) object * * @returns {Promise} Resolves successfully when push completes with a detailed description of the operation from the server. * @see PushResult * @see RefUpdateStatus * * @example * let pushResult = await git.push({ * fs, * http, * dir: '/tutorial', * remote: 'origin', * ref: 'main', * onAuth: () => ({ username: process.env.GITHUB_TOKEN }), * }) * console.log(pushResult) * */ async function push({ fs, http, onProgress, onMessage, onAuth, onAuthSuccess, onAuthFailure, dir, gitdir = join(dir, '.git'), ref, remoteRef, remote = 'origin', url, force = false, delete: _delete = false, corsProxy, headers = {}, cache = {}, }) { try { assertParameter('fs', fs); assertParameter('http', http); assertParameter('gitdir', gitdir); return await _push({ fs: new FileSystem(fs), cache, http, onProgress, onMessage, onAuth, onAuthSuccess, onAuthFailure, gitdir, ref, remoteRef, remote, url, force, delete: _delete, corsProxy, headers, }) } catch (err) { err.caller = 'git.push'; throw err } } async function resolveBlob({ fs, cache, gitdir, oid }) { const { type, object } = await _readObject({ fs, cache, gitdir, oid }); // Resolve annotated tag objects to whatever if (type === 'tag') { oid = GitAnnotatedTag.from(object).parse().object; return resolveBlob({ fs, cache, gitdir, oid }) } if (type !== 'blob') { throw new ObjectTypeError(oid, type, 'blob') } return { oid, blob: new Uint8Array(object) } } // @ts-check /** * * @typedef {Object} ReadBlobResult - The object returned has the following schema: * @property {string} oid * @property {Uint8Array} blob * */ /** * @param {object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs * @param {any} args.cache * @param {string} args.gitdir * @param {string} args.oid * @param {string} [args.filepath] * * @returns {Promise} Resolves successfully with a blob object description * @see ReadBlobResult */ async function _readBlob({ fs, cache, gitdir, oid, filepath = undefined, }) { if (filepath !== undefined) { oid = await resolveFilepath({ fs, cache, gitdir, oid, filepath }); } const blob = await resolveBlob({ fs, cache, gitdir, oid, }); return blob } // @ts-check /** * * @typedef {Object} ReadBlobResult - The object returned has the following schema: * @property {string} oid * @property {Uint8Array} blob * */ /** * Read a blob object directly * * @param {object} args * @param {FsClient} args.fs - a file system client * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string} args.oid - The SHA-1 object id to get. Annotated tags, commits, and trees are peeled. * @param {string} [args.filepath] - Don't return the object with `oid` itself, but resolve `oid` to a tree and then return the blob object at that filepath. * @param {object} [args.cache] - a [cache](cache.md) object * * @returns {Promise} Resolves successfully with a blob object description * @see ReadBlobResult * * @example * // Get the contents of 'README.md' in the main branch. * let commitOid = await git.resolveRef({ fs, dir: '/tutorial', ref: 'main' }) * console.log(commitOid) * let { blob } = await git.readBlob({ * fs, * dir: '/tutorial', * oid: commitOid, * filepath: 'README.md' * }) * console.log(Buffer.from(blob).toString('utf8')) * */ async function readBlob({ fs, dir, gitdir = join(dir, '.git'), oid, filepath, cache = {}, }) { try { assertParameter('fs', fs); assertParameter('gitdir', gitdir); assertParameter('oid', oid); return await _readBlob({ fs: new FileSystem(fs), cache, gitdir, oid, filepath, }) } catch (err) { err.caller = 'git.readBlob'; throw err } } // @ts-check /** * Read a commit object directly * * @param {object} args * @param {FsClient} args.fs - a file system client * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string} args.oid - The SHA-1 object id to get. Annotated tags are peeled. * @param {object} [args.cache] - a [cache](cache.md) object * * @returns {Promise} Resolves successfully with a git commit object * @see ReadCommitResult * @see CommitObject * * @example * // Read a commit object * let sha = await git.resolveRef({ fs, dir: '/tutorial', ref: 'main' }) * console.log(sha) * let commit = await git.readCommit({ fs, dir: '/tutorial', oid: sha }) * console.log(commit) * */ async function readCommit({ fs, dir, gitdir = join(dir, '.git'), oid, cache = {}, }) { try { assertParameter('fs', fs); assertParameter('gitdir', gitdir); assertParameter('oid', oid); return await _readCommit({ fs: new FileSystem(fs), cache, gitdir, oid, }) } catch (err) { err.caller = 'git.readCommit'; throw err } } // @ts-check /** * Read the contents of a note * * @param {object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs * @param {any} args.cache * @param {string} args.gitdir * @param {string} [args.ref] - The notes ref to look under * @param {string} args.oid * * @returns {Promise} Resolves successfully with note contents as a Buffer. */ async function _readNote({ fs, cache, gitdir, ref = 'refs/notes/commits', oid, }) { const parent = await GitRefManager.resolve({ gitdir, fs, ref }); const { blob } = await _readBlob({ fs, cache, gitdir, oid: parent, filepath: oid, }); return blob } // @ts-check /** * Read the contents of a note * * @param {object} args * @param {FsClient} args.fs - a file system client * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string} [args.ref] - The notes ref to look under * @param {string} args.oid - The SHA-1 object id of the object to get the note for. * @param {object} [args.cache] - a [cache](cache.md) object * * @returns {Promise} Resolves successfully with note contents as a Buffer. */ async function readNote({ fs, dir, gitdir = join(dir, '.git'), ref = 'refs/notes/commits', oid, cache = {}, }) { try { assertParameter('fs', fs); assertParameter('gitdir', gitdir); assertParameter('ref', ref); assertParameter('oid', oid); return await _readNote({ fs: new FileSystem(fs), cache, gitdir, ref, oid, }) } catch (err) { err.caller = 'git.readNote'; throw err } } // @ts-check /** * * @typedef {Object} DeflatedObject * @property {string} oid * @property {'deflated'} type * @property {'deflated'} format * @property {Uint8Array} object * @property {string} [source] * */ /** * * @typedef {Object} WrappedObject * @property {string} oid * @property {'wrapped'} type * @property {'wrapped'} format * @property {Uint8Array} object * @property {string} [source] * */ /** * * @typedef {Object} RawObject * @property {string} oid * @property {'blob'|'commit'|'tree'|'tag'} type * @property {'content'} format * @property {Uint8Array} object * @property {string} [source] * */ /** * * @typedef {Object} ParsedBlobObject * @property {string} oid * @property {'blob'} type * @property {'parsed'} format * @property {string} object * @property {string} [source] * */ /** * * @typedef {Object} ParsedCommitObject * @property {string} oid * @property {'commit'} type * @property {'parsed'} format * @property {CommitObject} object * @property {string} [source] * */ /** * * @typedef {Object} ParsedTreeObject * @property {string} oid * @property {'tree'} type * @property {'parsed'} format * @property {TreeObject} object * @property {string} [source] * */ /** * * @typedef {Object} ParsedTagObject * @property {string} oid * @property {'tag'} type * @property {'parsed'} format * @property {TagObject} object * @property {string} [source] * */ /** * * @typedef {ParsedBlobObject | ParsedCommitObject | ParsedTreeObject | ParsedTagObject} ParsedObject */ /** * * @typedef {DeflatedObject | WrappedObject | RawObject | ParsedObject } ReadObjectResult */ /** * Read a git object directly by its SHA-1 object id * * Regarding `ReadObjectResult`: * * - `oid` will be the same as the `oid` argument unless the `filepath` argument is provided, in which case it will be the oid of the tree or blob being returned. * - `type` of deflated objects is `'deflated'`, and `type` of wrapped objects is `'wrapped'` * - `format` is usually, but not always, the format you requested. Packfiles do not store each object individually compressed so if you end up reading the object from a packfile it will be returned in format 'content' even if you requested 'deflated' or 'wrapped'. * - `object` will be an actual Object if format is 'parsed' and the object is a commit, tree, or annotated tag. Blobs are still formatted as Buffers unless an encoding is provided in which case they'll be strings. If format is anything other than 'parsed', object will be a Buffer. * - `source` is the name of the packfile or loose object file where the object was found. * * The `format` parameter can have the following values: * * | param | description | * | ---------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | * | 'deflated' | Return the raw deflate-compressed buffer for an object if possible. Useful for efficiently shuffling around loose objects when you don't care about the contents and can save time by not inflating them. | * | 'wrapped' | Return the inflated object buffer wrapped in the git object header if possible. This is the raw data used when calculating the SHA-1 object id of a git object. | * | 'content' | Return the object buffer without the git header. | * | 'parsed' | Returns a parsed representation of the object. | * * The result will be in one of the following schemas: * * ## `'deflated'` format * * {@link DeflatedObject typedef} * * ## `'wrapped'` format * * {@link WrappedObject typedef} * * ## `'content'` format * * {@link RawObject typedef} * * ## `'parsed'` format * * ### parsed `'blob'` type * * {@link ParsedBlobObject typedef} * * ### parsed `'commit'` type * * {@link ParsedCommitObject typedef} * {@link CommitObject typedef} * * ### parsed `'tree'` type * * {@link ParsedTreeObject typedef} * {@link TreeObject typedef} * {@link TreeEntry typedef} * * ### parsed `'tag'` type * * {@link ParsedTagObject typedef} * {@link TagObject typedef} * * @deprecated * > This command is overly complicated. * > * > If you know the type of object you are reading, use [`readBlob`](./readBlob.md), [`readCommit`](./readCommit.md), [`readTag`](./readTag.md), or [`readTree`](./readTree.md). * * @param {object} args * @param {FsClient} args.fs - a file system client * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string} args.oid - The SHA-1 object id to get * @param {'deflated' | 'wrapped' | 'content' | 'parsed'} [args.format = 'parsed'] - What format to return the object in. The choices are described in more detail below. * @param {string} [args.filepath] - Don't return the object with `oid` itself, but resolve `oid` to a tree and then return the object at that filepath. To return the root directory of a tree set filepath to `''` * @param {string} [args.encoding] - A convenience argument that only affects blobs. Instead of returning `object` as a buffer, it returns a string parsed using the given encoding. * @param {object} [args.cache] - a [cache](cache.md) object * * @returns {Promise} Resolves successfully with a git object description * @see ReadObjectResult * * @example * // Given a ransom SHA-1 object id, figure out what it is * let { type, object } = await git.readObject({ * fs, * dir: '/tutorial', * oid: '0698a781a02264a6f37ba3ff41d78067eaf0f075' * }) * switch (type) { * case 'commit': { * console.log(object) * break * } * case 'tree': { * console.log(object) * break * } * case 'blob': { * console.log(object) * break * } * case 'tag': { * console.log(object) * break * } * } * */ async function readObject({ fs: _fs, dir, gitdir = join(dir, '.git'), oid, format = 'parsed', filepath = undefined, encoding = undefined, cache = {}, }) { try { assertParameter('fs', _fs); assertParameter('gitdir', gitdir); assertParameter('oid', oid); const fs = new FileSystem(_fs); if (filepath !== undefined) { oid = await resolveFilepath({ fs, cache, gitdir, oid, filepath, }); } // GitObjectManager does not know how to parse content, so we tweak that parameter before passing it. const _format = format === 'parsed' ? 'content' : format; const result = await _readObject({ fs, cache, gitdir, oid, format: _format, }); result.oid = oid; if (format === 'parsed') { result.format = 'parsed'; switch (result.type) { case 'commit': result.object = GitCommit.from(result.object).parse(); break case 'tree': result.object = GitTree.from(result.object).entries(); break case 'blob': // Here we consider returning a raw Buffer as the 'content' format // and returning a string as the 'parsed' format if (encoding) { result.object = result.object.toString(encoding); } else { result.object = new Uint8Array(result.object); result.format = 'content'; } break case 'tag': result.object = GitAnnotatedTag.from(result.object).parse(); break default: throw new ObjectTypeError( result.oid, result.type, 'blob|commit|tag|tree' ) } } else if (result.format === 'deflated' || result.format === 'wrapped') { result.type = result.format; } return result } catch (err) { err.caller = 'git.readObject'; throw err } } // @ts-check /** * * @typedef {Object} ReadTagResult - The object returned has the following schema: * @property {string} oid - SHA-1 object id of this tag * @property {TagObject} tag - the parsed tag object * @property {string} payload - PGP signing payload */ /** * @param {object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs * @param {any} args.cache * @param {string} args.gitdir * @param {string} args.oid * * @returns {Promise} */ async function _readTag({ fs, cache, gitdir, oid }) { const { type, object } = await _readObject({ fs, cache, gitdir, oid, format: 'content', }); if (type !== 'tag') { throw new ObjectTypeError(oid, type, 'tag') } const tag = GitAnnotatedTag.from(object); const result = { oid, tag: tag.parse(), payload: tag.payload(), }; // @ts-ignore return result } /** * * @typedef {Object} ReadTagResult - The object returned has the following schema: * @property {string} oid - SHA-1 object id of this tag * @property {TagObject} tag - the parsed tag object * @property {string} payload - PGP signing payload */ /** * Read an annotated tag object directly * * @param {object} args * @param {FsClient} args.fs - a file system client * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string} args.oid - The SHA-1 object id to get * @param {object} [args.cache] - a [cache](cache.md) object * * @returns {Promise} Resolves successfully with a git object description * @see ReadTagResult * @see TagObject * */ async function readTag({ fs, dir, gitdir = join(dir, '.git'), oid, cache = {}, }) { try { assertParameter('fs', fs); assertParameter('gitdir', gitdir); assertParameter('oid', oid); return await _readTag({ fs: new FileSystem(fs), cache, gitdir, oid, }) } catch (err) { err.caller = 'git.readTag'; throw err } } // @ts-check /** * * @typedef {Object} ReadTreeResult - The object returned has the following schema: * @property {string} oid - SHA-1 object id of this tree * @property {TreeObject} tree - the parsed tree object */ /** * Read a tree object directly * * @param {object} args * @param {FsClient} args.fs - a file system client * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string} args.oid - The SHA-1 object id to get. Annotated tags and commits are peeled. * @param {string} [args.filepath] - Don't return the object with `oid` itself, but resolve `oid` to a tree and then return the tree object at that filepath. * @param {object} [args.cache] - a [cache](cache.md) object * * @returns {Promise} Resolves successfully with a git tree object * @see ReadTreeResult * @see TreeObject * @see TreeEntry * */ async function readTree({ fs, dir, gitdir = join(dir, '.git'), oid, filepath = undefined, cache = {}, }) { try { assertParameter('fs', fs); assertParameter('gitdir', gitdir); assertParameter('oid', oid); return await _readTree({ fs: new FileSystem(fs), cache, gitdir, oid, filepath, }) } catch (err) { err.caller = 'git.readTree'; throw err } } // @ts-check /** * Remove a file from the git index (aka staging area) * * Note that this does NOT delete the file in the working directory. * * @param {object} args * @param {FsClient} args.fs - a file system client * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir, '.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string} args.filepath - The path to the file to remove from the index * @param {object} [args.cache] - a [cache](cache.md) object * * @returns {Promise} Resolves successfully once the git index has been updated * * @example * await git.remove({ fs, dir: '/tutorial', filepath: 'README.md' }) * console.log('done') * */ async function remove({ fs: _fs, dir, gitdir = join(dir, '.git'), filepath, cache = {}, }) { try { assertParameter('fs', _fs); assertParameter('gitdir', gitdir); assertParameter('filepath', filepath); await GitIndexManager.acquire( { fs: new FileSystem(_fs), gitdir, cache }, async function(index) { index.delete({ filepath }); } ); } catch (err) { err.caller = 'git.remove'; throw err } } // @ts-check /** * @param {object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs * @param {object} args.cache * @param {SignCallback} [args.onSign] * @param {string} [args.dir] * @param {string} [args.gitdir=join(dir,'.git')] * @param {string} [args.ref] * @param {string} args.oid * @param {Object} args.author * @param {string} args.author.name * @param {string} args.author.email * @param {number} args.author.timestamp * @param {number} args.author.timezoneOffset * @param {Object} args.committer * @param {string} args.committer.name * @param {string} args.committer.email * @param {number} args.committer.timestamp * @param {number} args.committer.timezoneOffset * @param {string} [args.signingKey] * * @returns {Promise} */ async function _removeNote({ fs, cache, onSign, gitdir, ref = 'refs/notes/commits', oid, author, committer, signingKey, }) { // Get the current note commit let parent; try { parent = await GitRefManager.resolve({ gitdir, fs, ref }); } catch (err) { if (!(err instanceof NotFoundError)) { throw err } } // I'm using the "empty tree" magic number here for brevity const result = await _readTree({ fs, gitdir, oid: parent || '4b825dc642cb6eb9a060e54bf8d69288fbee4904', }); let tree = result.tree; // Remove the note blob entry from the tree tree = tree.filter(entry => entry.path !== oid); // Create the new note tree const treeOid = await _writeTree({ fs, gitdir, tree, }); // Create the new note commit const commitOid = await _commit({ fs, cache, onSign, gitdir, ref, tree: treeOid, parent: parent && [parent], message: `Note removed by 'isomorphic-git removeNote'\n`, author, committer, signingKey, }); return commitOid } // @ts-check /** * Remove an object note * * @param {object} args * @param {FsClient} args.fs - a file system client * @param {SignCallback} [args.onSign] - a PGP signing implementation * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string} [args.ref] - The notes ref to look under * @param {string} args.oid - The SHA-1 object id of the object to remove the note from. * @param {Object} [args.author] - The details about the author. * @param {string} [args.author.name] - Default is `user.name` config. * @param {string} [args.author.email] - Default is `user.email` config. * @param {number} [args.author.timestamp=Math.floor(Date.now()/1000)] - Set the author timestamp field. This is the integer number of seconds since the Unix epoch (1970-01-01 00:00:00). * @param {number} [args.author.timezoneOffset] - Set the author timezone offset field. This is the difference, in minutes, from the current timezone to UTC. Default is `(new Date()).getTimezoneOffset()`. * @param {Object} [args.committer = author] - The details about the note committer, in the same format as the author parameter. If not specified, the author details are used. * @param {string} [args.committer.name] - Default is `user.name` config. * @param {string} [args.committer.email] - Default is `user.email` config. * @param {number} [args.committer.timestamp=Math.floor(Date.now()/1000)] - Set the committer timestamp field. This is the integer number of seconds since the Unix epoch (1970-01-01 00:00:00). * @param {number} [args.committer.timezoneOffset] - Set the committer timezone offset field. This is the difference, in minutes, from the current timezone to UTC. Default is `(new Date()).getTimezoneOffset()`. * @param {string} [args.signingKey] - Sign the tag object using this private PGP key. * @param {object} [args.cache] - a [cache](cache.md) object * * @returns {Promise} Resolves successfully with the SHA-1 object id of the commit object for the note removal. */ async function removeNote({ fs: _fs, onSign, dir, gitdir = join(dir, '.git'), ref = 'refs/notes/commits', oid, author: _author, committer: _committer, signingKey, cache = {}, }) { try { assertParameter('fs', _fs); assertParameter('gitdir', gitdir); assertParameter('oid', oid); const fs = new FileSystem(_fs); const author = await normalizeAuthorObject({ fs, gitdir, author: _author }); if (!author) throw new MissingNameError('author') const committer = await normalizeCommitterObject({ fs, gitdir, author, committer: _committer, }); if (!committer) throw new MissingNameError('committer') return await _removeNote({ fs, cache, onSign, gitdir, ref, oid, author, committer, signingKey, }) } catch (err) { err.caller = 'git.removeNote'; throw err } } // @ts-check /** * Rename a branch * * @param {object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs * @param {string} args.gitdir * @param {string} args.ref - The name of the new branch * @param {string} args.oldref - The name of the old branch * @param {boolean} [args.checkout = false] * * @returns {Promise} Resolves successfully when filesystem operations are complete */ async function _renameBranch({ fs, gitdir, oldref, ref, checkout = false, }) { if (ref !== cleanGitRef.clean(ref)) { throw new InvalidRefNameError(ref, cleanGitRef.clean(ref)) } if (oldref !== cleanGitRef.clean(oldref)) { throw new InvalidRefNameError(oldref, cleanGitRef.clean(oldref)) } const fulloldref = `refs/heads/${oldref}`; const fullnewref = `refs/heads/${ref}`; const newexist = await GitRefManager.exists({ fs, gitdir, ref: fullnewref }); if (newexist) { throw new AlreadyExistsError('branch', ref, false) } const value = await GitRefManager.resolve({ fs, gitdir, ref: fulloldref, depth: 1, }); await GitRefManager.writeRef({ fs, gitdir, ref: fullnewref, value }); await GitRefManager.deleteRef({ fs, gitdir, ref: fulloldref }); if (checkout) { // Update HEAD await GitRefManager.writeSymbolicRef({ fs, gitdir, ref: 'HEAD', value: fullnewref, }); } } // @ts-check /** * Rename a branch * * @param {object} args * @param {FsClient} args.fs - a file system implementation * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string} args.ref - What to name the branch * @param {string} args.oldref - What the name of the branch was * @param {boolean} [args.checkout = false] - Update `HEAD` to point at the newly created branch * * @returns {Promise} Resolves successfully when filesystem operations are complete * * @example * await git.renameBranch({ fs, dir: '/tutorial', ref: 'main', oldref: 'master' }) * console.log('done') * */ async function renameBranch({ fs, dir, gitdir = join(dir, '.git'), ref, oldref, checkout = false, }) { try { assertParameter('fs', fs); assertParameter('gitdir', gitdir); assertParameter('ref', ref); assertParameter('oldref', oldref); return await _renameBranch({ fs: new FileSystem(fs), gitdir, ref, oldref, checkout, }) } catch (err) { err.caller = 'git.renameBranch'; throw err } } async function hashObject$1({ gitdir, type, object }) { return shasum(GitObject.wrap({ type, object })) } // @ts-check /** * Reset a file in the git index (aka staging area) * * Note that this does NOT modify the file in the working directory. * * @param {object} args * @param {FsClient} args.fs - a file system client * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir, '.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string} args.filepath - The path to the file to reset in the index * @param {string} [args.ref = 'HEAD'] - A ref to the commit to use * @param {object} [args.cache] - a [cache](cache.md) object * * @returns {Promise} Resolves successfully once the git index has been updated * * @example * await git.resetIndex({ fs, dir: '/tutorial', filepath: 'README.md' }) * console.log('done') * */ async function resetIndex({ fs: _fs, dir, gitdir = join(dir, '.git'), filepath, ref, cache = {}, }) { try { assertParameter('fs', _fs); assertParameter('gitdir', gitdir); assertParameter('filepath', filepath); const fs = new FileSystem(_fs); let oid; let workdirOid; try { // Resolve commit oid = await GitRefManager.resolve({ fs, gitdir, ref: ref || 'HEAD' }); } catch (e) { if (ref) { // Only throw the error if a ref is explicitly provided throw e } } // Not having an oid at this point means `resetIndex()` was called without explicit `ref` on a new git // repository. If that happens, we can skip resolving the file path. if (oid) { try { // Resolve blob oid = await resolveFilepath({ fs, cache, gitdir, oid, filepath, }); } catch (e) { // This means we're resetting the file to a "deleted" state oid = null; } } // For files that aren't in the workdir use zeros let stats = { ctime: new Date(0), mtime: new Date(0), dev: 0, ino: 0, mode: 0, uid: 0, gid: 0, size: 0, }; // If the file exists in the workdir... const object = dir && (await fs.read(join(dir, filepath))); if (object) { // ... and has the same hash as the desired state... workdirOid = await hashObject$1({ gitdir, type: 'blob', object, }); if (oid === workdirOid) { // ... use the workdir Stats object stats = await fs.lstat(join(dir, filepath)); } } await GitIndexManager.acquire({ fs, gitdir, cache }, async function(index) { index.delete({ filepath }); if (oid) { index.insert({ filepath, stats, oid }); } }); } catch (err) { err.caller = 'git.reset'; throw err } } // @ts-check /** * Get the value of a symbolic ref or resolve a ref to its SHA-1 object id * * @param {object} args * @param {FsClient} args.fs - a file system client * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir, '.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string} args.ref - The ref to resolve * @param {number} [args.depth = undefined] - How many symbolic references to follow before returning * * @returns {Promise} Resolves successfully with a SHA-1 object id or the value of a symbolic ref * * @example * let currentCommit = await git.resolveRef({ fs, dir: '/tutorial', ref: 'HEAD' }) * console.log(currentCommit) * let currentBranch = await git.resolveRef({ fs, dir: '/tutorial', ref: 'HEAD', depth: 2 }) * console.log(currentBranch) * */ async function resolveRef({ fs, dir, gitdir = join(dir, '.git'), ref, depth, }) { try { assertParameter('fs', fs); assertParameter('gitdir', gitdir); assertParameter('ref', ref); const oid = await GitRefManager.resolve({ fs: new FileSystem(fs), gitdir, ref, depth, }); return oid } catch (err) { err.caller = 'git.resolveRef'; throw err } } // @ts-check /** * Write an entry to the git config files. * * *Caveats:* * - Currently only the local `$GIT_DIR/config` file can be read or written. However support for the global `~/.gitconfig` and system `$(prefix)/etc/gitconfig` will be added in the future. * - The current parser does not support the more exotic features of the git-config file format such as `[include]` and `[includeIf]`. * * @param {Object} args * @param {FsClient} args.fs - a file system implementation * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string} args.path - The key of the git config entry * @param {string | boolean | number | void} args.value - A value to store at that path. (Use `undefined` as the value to delete a config entry.) * @param {boolean} [args.append = false] - If true, will append rather than replace when setting (use with multi-valued config options). * * @returns {Promise} Resolves successfully when operation completed * * @example * // Write config value * await git.setConfig({ * fs, * dir: '/tutorial', * path: 'user.name', * value: 'Mr. Test' * }) * * // Print out config file * let file = await fs.promises.readFile('/tutorial/.git/config', 'utf8') * console.log(file) * * // Delete a config entry * await git.setConfig({ * fs, * dir: '/tutorial', * path: 'user.name', * value: undefined * }) * * // Print out config file * file = await fs.promises.readFile('/tutorial/.git/config', 'utf8') * console.log(file) */ async function setConfig({ fs: _fs, dir, gitdir = join(dir, '.git'), path, value, append = false, }) { try { assertParameter('fs', _fs); assertParameter('gitdir', gitdir); assertParameter('path', path); // assertParameter('value', value) // We actually allow 'undefined' as a value to unset/delete const fs = new FileSystem(_fs); const config = await GitConfigManager.get({ fs, gitdir }); if (append) { await config.append(path, value); } else { await config.set(path, value); } await GitConfigManager.save({ fs, gitdir, config }); } catch (err) { err.caller = 'git.setConfig'; throw err } } // @ts-check /** * Tell whether a file has been changed * * The possible resolve values are: * * | status | description | * | --------------------- | ------------------------------------------------------------------------------------- | * | `"ignored"` | file ignored by a .gitignore rule | * | `"unmodified"` | file unchanged from HEAD commit | * | `"*modified"` | file has modifications, not yet staged | * | `"*deleted"` | file has been removed, but the removal is not yet staged | * | `"*added"` | file is untracked, not yet staged | * | `"absent"` | file not present in HEAD commit, staging area, or working dir | * | `"modified"` | file has modifications, staged | * | `"deleted"` | file has been removed, staged | * | `"added"` | previously untracked file, staged | * | `"*unmodified"` | working dir and HEAD commit match, but index differs | * | `"*absent"` | file not present in working dir or HEAD commit, but present in the index | * | `"*undeleted"` | file was deleted from the index, but is still in the working dir | * | `"*undeletemodified"` | file was deleted from the index, but is present with modifications in the working dir | * * @param {object} args * @param {FsClient} args.fs - a file system client * @param {string} args.dir - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir, '.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string} args.filepath - The path to the file to query * @param {object} [args.cache] - a [cache](cache.md) object * * @returns {Promise<'ignored'|'unmodified'|'*modified'|'*deleted'|'*added'|'absent'|'modified'|'deleted'|'added'|'*unmodified'|'*absent'|'*undeleted'|'*undeletemodified'>} Resolves successfully with the file's git status * * @example * let status = await git.status({ fs, dir: '/tutorial', filepath: 'README.md' }) * console.log(status) * */ async function status({ fs: _fs, dir, gitdir = join(dir, '.git'), filepath, cache = {}, }) { try { assertParameter('fs', _fs); assertParameter('gitdir', gitdir); assertParameter('filepath', filepath); const fs = new FileSystem(_fs); const ignored = await GitIgnoreManager.isIgnored({ fs, gitdir, dir, filepath, }); if (ignored) { return 'ignored' } const headTree = await getHeadTree({ fs, cache, gitdir }); const treeOid = await getOidAtPath({ fs, cache, gitdir, tree: headTree, path: filepath, }); const indexEntry = await GitIndexManager.acquire( { fs, gitdir, cache }, async function(index) { for (const entry of index) { if (entry.path === filepath) return entry } return null } ); const stats = await fs.lstat(join(dir, filepath)); const H = treeOid !== null; // head const I = indexEntry !== null; // index const W = stats !== null; // working dir const getWorkdirOid = async () => { if (I && !compareStats(indexEntry, stats)) { return indexEntry.oid } else { const object = await fs.read(join(dir, filepath)); const workdirOid = await hashObject$1({ gitdir, type: 'blob', object, }); // If the oid in the index === working dir oid but stats differed update cache if (I && indexEntry.oid === workdirOid) { // and as long as our fs.stats aren't bad. // size of -1 happens over a BrowserFS HTTP Backend that doesn't serve Content-Length headers // (like the Karma webserver) because BrowserFS HTTP Backend uses HTTP HEAD requests to do fs.stat if (stats.size !== -1) { // We don't await this so we can return faster for one-off cases. GitIndexManager.acquire({ fs, gitdir, cache }, async function( index ) { index.insert({ filepath, stats, oid: workdirOid }); }); } } return workdirOid } }; if (!H && !W && !I) return 'absent' // --- if (!H && !W && I) return '*absent' // -A- if (!H && W && !I) return '*added' // --A if (!H && W && I) { const workdirOid = await getWorkdirOid(); // @ts-ignore return workdirOid === indexEntry.oid ? 'added' : '*added' // -AA : -AB } if (H && !W && !I) return 'deleted' // A-- if (H && !W && I) { // @ts-ignore return treeOid === indexEntry.oid ? '*deleted' : '*deleted' // AA- : AB- } if (H && W && !I) { const workdirOid = await getWorkdirOid(); return workdirOid === treeOid ? '*undeleted' : '*undeletemodified' // A-A : A-B } if (H && W && I) { const workdirOid = await getWorkdirOid(); if (workdirOid === treeOid) { // @ts-ignore return workdirOid === indexEntry.oid ? 'unmodified' : '*unmodified' // AAA : ABA } else { // @ts-ignore return workdirOid === indexEntry.oid ? 'modified' : '*modified' // ABB : AAB } } /* --- -A- --A -AA -AB A-- AA- AB- A-A A-B AAA ABA ABB AAB */ } catch (err) { err.caller = 'git.status'; throw err } } async function getOidAtPath({ fs, cache, gitdir, tree, path }) { if (typeof path === 'string') path = path.split('/'); const dirname = path.shift(); for (const entry of tree) { if (entry.path === dirname) { if (path.length === 0) { return entry.oid } const { type, object } = await _readObject({ fs, cache, gitdir, oid: entry.oid, }); if (type === 'tree') { const tree = GitTree.from(object); return getOidAtPath({ fs, cache, gitdir, tree, path }) } if (type === 'blob') { throw new ObjectTypeError(entry.oid, type, 'blob', path.join('/')) } } } return null } async function getHeadTree({ fs, cache, gitdir }) { // Get the tree from the HEAD commit. let oid; try { oid = await GitRefManager.resolve({ fs, gitdir, ref: 'HEAD' }); } catch (e) { // Handle fresh branches with no commits if (e instanceof NotFoundError) { return [] } } const { tree } = await _readTree({ fs, cache, gitdir, oid }); return tree } // @ts-check /** * Efficiently get the status of multiple files at once. * * The returned `StatusMatrix` is admittedly not the easiest format to read. * However it conveys a large amount of information in dense format that should make it easy to create reports about the current state of the repository; * without having to do multiple, time-consuming isomorphic-git calls. * My hope is that the speed and flexibility of the function will make up for the learning curve of interpreting the return value. * * ```js live * // get the status of all the files in 'src' * let status = await git.statusMatrix({ * fs, * dir: '/tutorial', * filter: f => f.startsWith('src/') * }) * console.log(status) * ``` * * ```js live * // get the status of all the JSON and Markdown files * let status = await git.statusMatrix({ * fs, * dir: '/tutorial', * filter: f => f.endsWith('.json') || f.endsWith('.md') * }) * console.log(status) * ``` * * The result is returned as a 2D array. * The outer array represents the files and/or blobs in the repo, in alphabetical order. * The inner arrays describe the status of the file: * the first value is the filepath, and the next three are integers * representing the HEAD status, WORKDIR status, and STAGE status of the entry. * * ```js * // example StatusMatrix * [ * ["a.txt", 0, 2, 0], // new, untracked * ["b.txt", 0, 2, 2], // added, staged * ["c.txt", 0, 2, 3], // added, staged, with unstaged changes * ["d.txt", 1, 1, 1], // unmodified * ["e.txt", 1, 2, 1], // modified, unstaged * ["f.txt", 1, 2, 2], // modified, staged * ["g.txt", 1, 2, 3], // modified, staged, with unstaged changes * ["h.txt", 1, 0, 1], // deleted, unstaged * ["i.txt", 1, 0, 0], // deleted, staged * ] * ``` * * - The HEAD status is either absent (0) or present (1). * - The WORKDIR status is either absent (0), identical to HEAD (1), or different from HEAD (2). * - The STAGE status is either absent (0), identical to HEAD (1), identical to WORKDIR (2), or different from WORKDIR (3). * * ```ts * type Filename = string * type HeadStatus = 0 | 1 * type WorkdirStatus = 0 | 1 | 2 * type StageStatus = 0 | 1 | 2 | 3 * * type StatusRow = [Filename, HeadStatus, WorkdirStatus, StageStatus] * * type StatusMatrix = StatusRow[] * ``` * * > Think of the natural progression of file modifications as being from HEAD (previous) -> WORKDIR (current) -> STAGE (next). * > Then HEAD is "version 1", WORKDIR is "version 2", and STAGE is "version 3". * > Then, imagine a "version 0" which is before the file was created. * > Then the status value in each column corresponds to the oldest version of the file it is identical to. * > (For a file to be identical to "version 0" means the file is deleted.) * * Here are some examples of queries you can answer using the result: * * #### Q: What files have been deleted? * ```js * const FILE = 0, WORKDIR = 2 * * const filenames = (await statusMatrix({ dir })) * .filter(row => row[WORKDIR] === 0) * .map(row => row[FILE]) * ``` * * #### Q: What files have unstaged changes? * ```js * const FILE = 0, WORKDIR = 2, STAGE = 3 * * const filenames = (await statusMatrix({ dir })) * .filter(row => row[WORKDIR] !== row[STAGE]) * .map(row => row[FILE]) * ``` * * #### Q: What files have been modified since the last commit? * ```js * const FILE = 0, HEAD = 1, WORKDIR = 2 * * const filenames = (await statusMatrix({ dir })) * .filter(row => row[HEAD] !== row[WORKDIR]) * .map(row => row[FILE]) * ``` * * #### Q: What files will NOT be changed if I commit right now? * ```js * const FILE = 0, HEAD = 1, STAGE = 3 * * const filenames = (await statusMatrix({ dir })) * .filter(row => row[HEAD] === row[STAGE]) * .map(row => row[FILE]) * ``` * * For reference, here are all possible combinations: * * | HEAD | WORKDIR | STAGE | `git status --short` equivalent | * | ---- | ------- | ----- | ------------------------------- | * | 0 | 0 | 0 | `` | * | 0 | 0 | 3 | `AD` | * | 0 | 2 | 0 | `??` | * | 0 | 2 | 2 | `A ` | * | 0 | 2 | 3 | `AM` | * | 1 | 0 | 0 | `D ` | * | 1 | 0 | 1 | ` D` | * | 1 | 0 | 3 | `MD` | * | 1 | 1 | 0 | `D ` + `??` | * | 1 | 1 | 1 | `` | * | 1 | 1 | 3 | `MM` | * | 1 | 2 | 0 | `D ` + `??` | * | 1 | 2 | 1 | ` M` | * | 1 | 2 | 2 | `M ` | * | 1 | 2 | 3 | `MM` | * * @param {object} args * @param {FsClient} args.fs - a file system client * @param {string} args.dir - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir, '.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string} [args.ref = 'HEAD'] - Optionally specify a different commit to compare against the workdir and stage instead of the HEAD * @param {string[]} [args.filepaths = ['.']] - Limit the query to the given files and directories * @param {function(string): boolean} [args.filter] - Filter the results to only those whose filepath matches a function. * @param {object} [args.cache] - a [cache](cache.md) object * @param {boolean} [args.ignored = false] - include ignored files in the result * * @returns {Promise>} Resolves with a status matrix, described below. * @see StatusRow */ async function statusMatrix({ fs: _fs, dir, gitdir = join(dir, '.git'), ref = 'HEAD', filepaths = ['.'], filter, cache = {}, ignored: shouldIgnore = false, }) { try { assertParameter('fs', _fs); assertParameter('gitdir', gitdir); assertParameter('ref', ref); const fs = new FileSystem(_fs); return await _walk({ fs, cache, dir, gitdir, trees: [TREE({ ref }), WORKDIR(), STAGE()], map: async function(filepath, [head, workdir, stage]) { // Ignore ignored files, but only if they are not already tracked. if (!head && !stage && workdir) { if (!shouldIgnore) { const isIgnored = await GitIgnoreManager.isIgnored({ fs, dir, filepath, }); if (isIgnored) { return null } } } // match against base paths if (!filepaths.some(base => worthWalking(filepath, base))) { return null } // Late filter against file names if (filter) { if (!filter(filepath)) return } const [headType, workdirType, stageType] = await Promise.all([ head && head.type(), workdir && workdir.type(), stage && stage.type(), ]); const isBlob = [headType, workdirType, stageType].includes('blob'); // For now, bail on directories unless the file is also a blob in another tree if ((headType === 'tree' || headType === 'special') && !isBlob) return if (headType === 'commit') return null if ((workdirType === 'tree' || workdirType === 'special') && !isBlob) return if (stageType === 'commit') return null if ((stageType === 'tree' || stageType === 'special') && !isBlob) return // Figure out the oids for files, using the staged oid for the working dir oid if the stats match. const headOid = headType === 'blob' ? await head.oid() : undefined; const stageOid = stageType === 'blob' ? await stage.oid() : undefined; let workdirOid; if ( headType !== 'blob' && workdirType === 'blob' && stageType !== 'blob' ) { // We don't actually NEED the sha. Any sha will do // TODO: update this logic to handle N trees instead of just 3. workdirOid = '42'; } else if (workdirType === 'blob') { workdirOid = await workdir.oid(); } const entry = [undefined, headOid, workdirOid, stageOid]; const result = entry.map(value => entry.indexOf(value)); result.shift(); // remove leading undefined entry return [filepath, ...result] }, }) } catch (err) { err.caller = 'git.statusMatrix'; throw err } } // @ts-check /** * Create a lightweight tag * * @param {object} args * @param {FsClient} args.fs - a file system client * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string} args.ref - What to name the tag * @param {string} [args.object = 'HEAD'] - What oid the tag refers to. (Will resolve to oid if value is a ref.) By default, the commit object which is referred by the current `HEAD` is used. * @param {boolean} [args.force = false] - Instead of throwing an error if a tag named `ref` already exists, overwrite the existing tag. * * @returns {Promise} Resolves successfully when filesystem operations are complete * * @example * await git.tag({ fs, dir: '/tutorial', ref: 'test-tag' }) * console.log('done') * */ async function tag({ fs: _fs, dir, gitdir = join(dir, '.git'), ref, object, force = false, }) { try { assertParameter('fs', _fs); assertParameter('gitdir', gitdir); assertParameter('ref', ref); const fs = new FileSystem(_fs); if (ref === undefined) { throw new MissingParameterError('ref') } ref = ref.startsWith('refs/tags/') ? ref : `refs/tags/${ref}`; // Resolve passed object const value = await GitRefManager.resolve({ fs, gitdir, ref: object || 'HEAD', }); if (!force && (await GitRefManager.exists({ fs, gitdir, ref }))) { throw new AlreadyExistsError('tag', ref) } await GitRefManager.writeRef({ fs, gitdir, ref, value }); } catch (err) { err.caller = 'git.tag'; throw err } } // @ts-check /** * Register file contents in the working tree or object database to the git index (aka staging area). * * @param {object} args * @param {FsClient} args.fs - a file system client * @param {string} args.dir - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir, '.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string} args.filepath - File to act upon. * @param {string} [args.oid] - OID of the object in the object database to add to the index with the specified filepath. * @param {number} [args.mode = 100644] - The file mode to add the file to the index. * @param {boolean} [args.add] - Adds the specified file to the index if it does not yet exist in the index. * @param {boolean} [args.remove] - Remove the specified file from the index if it does not exist in the workspace anymore. * @param {boolean} [args.force] - Remove the specified file from the index, even if it still exists in the workspace. * @param {object} [args.cache] - a [cache](cache.md) object * * @returns {Promise} Resolves successfully with the SHA-1 object id of the object written or updated in the index, or nothing if the file was removed. * * @example * await git.updateIndex({ * fs, * dir: '/tutorial', * filepath: 'readme.md' * }) * * @example * // Manually create a blob in the object database. * let oid = await git.writeBlob({ * fs, * dir: '/tutorial', * blob: new Uint8Array([]) * }) * * // Write the object in the object database to the index. * await git.updateIndex({ * fs, * dir: '/tutorial', * add: true, * filepath: 'readme.md', * oid * }) */ async function updateIndex({ fs: _fs, dir, gitdir = join(dir, '.git'), cache = {}, filepath, oid, mode, add, remove, force, }) { try { assertParameter('fs', _fs); assertParameter('gitdir', gitdir); assertParameter('filepath', filepath); const fs = new FileSystem(_fs); if (remove) { return await GitIndexManager.acquire( { fs, gitdir, cache }, async function(index) { let fileStats; if (!force) { // Check if the file is still present in the working directory fileStats = await fs.lstat(join(dir, filepath)); if (fileStats) { if (fileStats.isDirectory()) { // Removing directories should not work throw new InvalidFilepathError('directory') } // Do nothing if we don't force and the file still exists in the workdir return } } // Directories are not allowed, so we make sure the provided filepath exists in the index if (index.has({ filepath })) { index.delete({ filepath, }); } } ) } // Test if it is a file and exists on disk if `remove` is not provided, only of no oid is provided let fileStats; if (!oid) { fileStats = await fs.lstat(join(dir, filepath)); if (!fileStats) { throw new NotFoundError( `file at "${filepath}" on disk and "remove" not set` ) } if (fileStats.isDirectory()) { throw new InvalidFilepathError('directory') } } return await GitIndexManager.acquire({ fs, gitdir, cache }, async function( index ) { if (!add && !index.has({ filepath })) { // If the index does not contain the filepath yet and `add` is not set, we should throw throw new NotFoundError( `file at "${filepath}" in index and "add" not set` ) } // By default we use 0 for the stats of the index file let stats = { ctime: new Date(0), mtime: new Date(0), dev: 0, ino: 0, mode, uid: 0, gid: 0, size: 0, }; if (!oid) { stats = fileStats; // Write the file to the object database const object = stats.isSymbolicLink() ? await fs.readlink(join(dir, filepath)) : await fs.read(join(dir, filepath)); oid = await _writeObject({ fs, gitdir, type: 'blob', format: 'content', object, }); } index.insert({ filepath, oid: oid, stats, }); return oid }) } catch (err) { err.caller = 'git.updateIndex'; throw err } } // @ts-check /** * Return the version number of isomorphic-git * * I don't know why you might need this. I added it just so I could check that I was getting * the correct version of the library and not a cached version. * * @returns {string} the version string taken from package.json at publication time * * @example * console.log(git.version()) * */ function version() { try { return pkg.version } catch (err) { err.caller = 'git.version'; throw err } } // @ts-check /** * @callback WalkerMap * @param {string} filename * @param {Array} entries * @returns {Promise} */ /** * @callback WalkerReduce * @param {any} parent * @param {any[]} children * @returns {Promise} */ /** * @callback WalkerIterateCallback * @param {WalkerEntry[]} entries * @returns {Promise} */ /** * @callback WalkerIterate * @param {WalkerIterateCallback} walk * @param {IterableIterator} children * @returns {Promise} */ /** * A powerful recursive tree-walking utility. * * The `walk` API simplifies gathering detailed information about a tree or comparing all the filepaths in two or more trees. * Trees can be git commits, the working directory, or the or git index (staging area). * As long as a file or directory is present in at least one of the trees, it will be traversed. * Entries are traversed in alphabetical order. * * The arguments to `walk` are the `trees` you want to traverse, and 3 optional transform functions: * `map`, `reduce`, and `iterate`. * * ## `TREE`, `WORKDIR`, and `STAGE` * * Tree walkers are represented by three separate functions that can be imported: * * ```js * import { TREE, WORKDIR, STAGE } from 'isomorphic-git' * ``` * * These functions return opaque handles called `Walker`s. * The only thing that `Walker` objects are good for is passing into `walk`. * Here are the three `Walker`s passed into `walk` by the `statusMatrix` command for example: * * ```js * let ref = 'HEAD' * * let trees = [TREE({ ref }), WORKDIR(), STAGE()] * ``` * * For the arguments, see the doc pages for [TREE](./TREE.md), [WORKDIR](./WORKDIR.md), and [STAGE](./STAGE.md). * * `map`, `reduce`, and `iterate` allow you control the recursive walk by pruning and transforming `WalkerEntry`s into the desired result. * * ## WalkerEntry * * {@link WalkerEntry typedef} * * `map` receives an array of `WalkerEntry[]` as its main argument, one `WalkerEntry` for each `Walker` in the `trees` argument. * The methods are memoized per `WalkerEntry` so calling them multiple times in a `map` function does not adversely impact performance. * By only computing these values if needed, you build can build lean, mean, efficient walking machines. * * ### WalkerEntry#type() * * Returns the kind as a string. This is normally either `tree` or `blob`. * * `TREE`, `STAGE`, and `WORKDIR` walkers all return a string. * * Possible values: * * - `'tree'` directory * - `'blob'` file * - `'special'` used by `WORKDIR` to represent irregular files like sockets and FIFOs * - `'commit'` used by `TREE` to represent submodules * * ```js * await entry.type() * ``` * * ### WalkerEntry#mode() * * Returns the file mode as a number. Use this to distinguish between regular files, symlinks, and executable files. * * `TREE`, `STAGE`, and `WORKDIR` walkers all return a number for all `type`s of entries. * * It has been normalized to one of the 4 values that are allowed in git commits: * * - `0o40000` directory * - `0o100644` file * - `0o100755` file (executable) * - `0o120000` symlink * * Tip: to make modes more readable, you can print them to octal using `.toString(8)`. * * ```js * await entry.mode() * ``` * * ### WalkerEntry#oid() * * Returns the SHA-1 object id for blobs and trees. * * `TREE` walkers return a string for `blob` and `tree` entries. * * `STAGE` and `WORKDIR` walkers return a string for `blob` entries and `undefined` for `tree` entries. * * ```js * await entry.oid() * ``` * * ### WalkerEntry#content() * * Returns the file contents as a Buffer. * * `TREE` and `WORKDIR` walkers return a Buffer for `blob` entries and `undefined` for `tree` entries. * * `STAGE` walkers always return `undefined` since the file contents are never stored in the stage. * * ```js * await entry.content() * ``` * * ### WalkerEntry#stat() * * Returns a normalized subset of filesystem Stat data. * * `WORKDIR` walkers return a `Stat` for `blob` and `tree` entries. * * `STAGE` walkers return a `Stat` for `blob` entries and `undefined` for `tree` entries. * * `TREE` walkers return `undefined` for all entry types. * * ```js * await entry.stat() * ``` * * {@link Stat typedef} * * ## map(string, Array) => Promise * * {@link WalkerMap typedef} * * This is the function that is called once per entry BEFORE visiting the children of that node. * * If you return `null` for a `tree` entry, then none of the children of that `tree` entry will be walked. * * This is a good place for query logic, such as examining the contents of a file. * Ultimately, compare all the entries and return any values you are interested in. * If you do not return a value (or return undefined) that entry will be filtered from the results. * * Example 1: Find all the files containing the word 'foo'. * ```js * async function map(filepath, [head, workdir]) { * let content = (await workdir.content()).toString('utf8') * if (content.contains('foo')) { * return { * filepath, * content * } * } * } * ``` * * Example 2: Return the difference between the working directory and the HEAD commit * ```js * const diff = require('diff-lines') * async function map(filepath, [head, workdir]) { * return { * filepath, * oid: await head.oid(), * diff: diff((await head.content()).toString('utf8'), (await workdir.content()).toString('utf8')) * } * } * ``` * * Example 3: * ```js * let path = require('path') * // Only examine files in the directory `cwd` * let cwd = 'src/app' * async function map (filepath, [head, workdir, stage]) { * if ( * // don't skip the root directory * head.fullpath !== '.' && * // return true for 'src' and 'src/app' * !cwd.startsWith(filepath) && * // return true for 'src/app/*' * path.dirname(filepath) !== cwd * ) { * return null * } else { * return filepath * } * } * ``` * * ## reduce(parent, children) * * {@link WalkerReduce typedef} * * This is the function that is called once per entry AFTER visiting the children of that node. * * Default: `async (parent, children) => parent === undefined ? children.flat() : [parent, children].flat()` * * The default implementation of this function returns all directories and children in a giant flat array. * You can define a different accumulation method though. * * Example: Return a hierarchical structure * ```js * async function reduce (parent, children) { * return Object.assign(parent, { children }) * } * ``` * * ## iterate(walk, children) * * {@link WalkerIterate typedef} * * {@link WalkerIterateCallback typedef} * * Default: `(walk, children) => Promise.all([...children].map(walk))` * * The default implementation recurses all children concurrently using Promise.all. * However you could use a custom function to traverse children serially or use a global queue to throttle recursion. * * @param {object} args * @param {FsClient} args.fs - a file system client * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {Walker[]} args.trees - The trees you want to traverse * @param {WalkerMap} [args.map] - Transform `WalkerEntry`s into a result form * @param {WalkerReduce} [args.reduce] - Control how mapped entries are combined with their parent result * @param {WalkerIterate} [args.iterate] - Fine-tune how entries within a tree are iterated over * @param {object} [args.cache] - a [cache](cache.md) object * * @returns {Promise} The finished tree-walking result */ async function walk({ fs, dir, gitdir = join(dir, '.git'), trees, map, reduce, iterate, cache = {}, }) { try { assertParameter('fs', fs); assertParameter('gitdir', gitdir); assertParameter('trees', trees); return await _walk({ fs: new FileSystem(fs), cache, dir, gitdir, trees, map, reduce, iterate, }) } catch (err) { err.caller = 'git.walk'; throw err } } // @ts-check /** * Write a blob object directly * * @param {object} args * @param {FsClient} args.fs - a file system client * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {Uint8Array} args.blob - The blob object to write * * @returns {Promise} Resolves successfully with the SHA-1 object id of the newly written object * * @example * // Manually create a blob. * let oid = await git.writeBlob({ * fs, * dir: '/tutorial', * blob: new Uint8Array([]) * }) * * console.log('oid', oid) // should be 'e69de29bb2d1d6434b8b29ae775ad8c2e48c5391' * */ async function writeBlob({ fs, dir, gitdir = join(dir, '.git'), blob }) { try { assertParameter('fs', fs); assertParameter('gitdir', gitdir); assertParameter('blob', blob); return await _writeObject({ fs: new FileSystem(fs), gitdir, type: 'blob', object: blob, format: 'content', }) } catch (err) { err.caller = 'git.writeBlob'; throw err } } // @ts-check /** * @param {object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs * @param {string} args.gitdir * @param {CommitObject} args.commit * * @returns {Promise} * @see CommitObject * */ async function _writeCommit({ fs, gitdir, commit }) { // Convert object to buffer const object = GitCommit.from(commit).toObject(); const oid = await _writeObject({ fs, gitdir, type: 'commit', object, format: 'content', }); return oid } // @ts-check /** * Write a commit object directly * * @param {object} args * @param {FsClient} args.fs - a file system client * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {CommitObject} args.commit - The object to write * * @returns {Promise} Resolves successfully with the SHA-1 object id of the newly written object * @see CommitObject * */ async function writeCommit({ fs, dir, gitdir = join(dir, '.git'), commit, }) { try { assertParameter('fs', fs); assertParameter('gitdir', gitdir); assertParameter('commit', commit); return await _writeCommit({ fs: new FileSystem(fs), gitdir, commit, }) } catch (err) { err.caller = 'git.writeCommit'; throw err } } // @ts-check /** * Write a git object directly * * `format` can have the following values: * * | param | description | * | ---------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------- | * | 'deflated' | Treat `object` as the raw deflate-compressed buffer for an object, meaning can be written to `.git/objects/**` as-is. | * | 'wrapped' | Treat `object` as the inflated object buffer wrapped in the git object header. This is the raw buffer used when calculating the SHA-1 object id of a git object. | * | 'content' | Treat `object` as the object buffer without the git header. | * | 'parsed' | Treat `object` as a parsed representation of the object. | * * If `format` is `'parsed'`, then `object` must match one of the schemas for `CommitObject`, `TreeObject`, `TagObject`, or a `string` (for blobs). * * {@link CommitObject typedef} * * {@link TreeObject typedef} * * {@link TagObject typedef} * * If `format` is `'content'`, `'wrapped'`, or `'deflated'`, `object` should be a `Uint8Array`. * * @deprecated * > This command is overly complicated. * > * > If you know the type of object you are writing, use [`writeBlob`](./writeBlob.md), [`writeCommit`](./writeCommit.md), [`writeTag`](./writeTag.md), or [`writeTree`](./writeTree.md). * * @param {object} args * @param {FsClient} args.fs - a file system client * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string | Uint8Array | CommitObject | TreeObject | TagObject} args.object - The object to write. * @param {'blob'|'tree'|'commit'|'tag'} [args.type] - The kind of object to write. * @param {'deflated' | 'wrapped' | 'content' | 'parsed'} [args.format = 'parsed'] - What format the object is in. The possible choices are listed below. * @param {string} [args.oid] - If `format` is `'deflated'` then this param is required. Otherwise it is calculated. * @param {string} [args.encoding] - If `type` is `'blob'` then `object` will be converted to a Uint8Array using `encoding`. * * @returns {Promise} Resolves successfully with the SHA-1 object id of the newly written object. * * @example * // Manually create an annotated tag. * let sha = await git.resolveRef({ fs, dir: '/tutorial', ref: 'HEAD' }) * console.log('commit', sha) * * let oid = await git.writeObject({ * fs, * dir: '/tutorial', * type: 'tag', * object: { * object: sha, * type: 'commit', * tag: 'my-tag', * tagger: { * name: 'your name', * email: 'email@example.com', * timestamp: Math.floor(Date.now()/1000), * timezoneOffset: new Date().getTimezoneOffset() * }, * message: 'Optional message' * } * }) * * console.log('tag', oid) * */ async function writeObject({ fs: _fs, dir, gitdir = join(dir, '.git'), type, object, format = 'parsed', oid, encoding = undefined, }) { try { const fs = new FileSystem(_fs); // Convert object to buffer if (format === 'parsed') { switch (type) { case 'commit': object = GitCommit.from(object).toObject(); break case 'tree': object = GitTree.from(object).toObject(); break case 'blob': object = Buffer.from(object, encoding); break case 'tag': object = GitAnnotatedTag.from(object).toObject(); break default: throw new ObjectTypeError(oid || '', type, 'blob|commit|tag|tree') } // GitObjectManager does not know how to serialize content, so we tweak that parameter before passing it. format = 'content'; } oid = await _writeObject({ fs, gitdir, type, object, oid, format, }); return oid } catch (err) { err.caller = 'git.writeObject'; throw err } } // @ts-check /** * Write a ref which refers to the specified SHA-1 object id, or a symbolic ref which refers to the specified ref. * * @param {object} args * @param {FsClient} args.fs - a file system client * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {string} args.ref - The name of the ref to write * @param {string} args.value - When `symbolic` is false, a ref or an SHA-1 object id. When true, a ref starting with `refs/`. * @param {boolean} [args.force = false] - Instead of throwing an error if a ref named `ref` already exists, overwrite the existing ref. * @param {boolean} [args.symbolic = false] - Whether the ref is symbolic or not. * * @returns {Promise} Resolves successfully when filesystem operations are complete * * @example * await git.writeRef({ * fs, * dir: '/tutorial', * ref: 'refs/heads/another-branch', * value: 'HEAD' * }) * await git.writeRef({ * fs, * dir: '/tutorial', * ref: 'HEAD', * value: 'refs/heads/another-branch', * force: true, * symbolic: true * }) * console.log('done') * */ async function writeRef({ fs: _fs, dir, gitdir = join(dir, '.git'), ref, value, force = false, symbolic = false, }) { try { assertParameter('fs', _fs); assertParameter('gitdir', gitdir); assertParameter('ref', ref); assertParameter('value', value); const fs = new FileSystem(_fs); if (ref !== cleanGitRef.clean(ref)) { throw new InvalidRefNameError(ref, cleanGitRef.clean(ref)) } if (!force && (await GitRefManager.exists({ fs, gitdir, ref }))) { throw new AlreadyExistsError('ref', ref) } if (symbolic) { await GitRefManager.writeSymbolicRef({ fs, gitdir, ref, value, }); } else { value = await GitRefManager.resolve({ fs, gitdir, ref: value, }); await GitRefManager.writeRef({ fs, gitdir, ref, value, }); } } catch (err) { err.caller = 'git.writeRef'; throw err } } // @ts-check /** * @param {object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs * @param {string} args.gitdir * @param {TagObject} args.tag * * @returns {Promise} */ async function _writeTag({ fs, gitdir, tag }) { // Convert object to buffer const object = GitAnnotatedTag.from(tag).toObject(); const oid = await _writeObject({ fs, gitdir, type: 'tag', object, format: 'content', }); return oid } // @ts-check /** * Write an annotated tag object directly * * @param {object} args * @param {FsClient} args.fs - a file system client * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {TagObject} args.tag - The object to write * * @returns {Promise} Resolves successfully with the SHA-1 object id of the newly written object * @see TagObject * * @example * // Manually create an annotated tag. * let sha = await git.resolveRef({ fs, dir: '/tutorial', ref: 'HEAD' }) * console.log('commit', sha) * * let oid = await git.writeTag({ * fs, * dir: '/tutorial', * tag: { * object: sha, * type: 'commit', * tag: 'my-tag', * tagger: { * name: 'your name', * email: 'email@example.com', * timestamp: Math.floor(Date.now()/1000), * timezoneOffset: new Date().getTimezoneOffset() * }, * message: 'Optional message' * } * }) * * console.log('tag', oid) * */ async function writeTag({ fs, dir, gitdir = join(dir, '.git'), tag }) { try { assertParameter('fs', fs); assertParameter('gitdir', gitdir); assertParameter('tag', tag); return await _writeTag({ fs: new FileSystem(fs), gitdir, tag, }) } catch (err) { err.caller = 'git.writeTag'; throw err } } // @ts-check /** * Write a tree object directly * * @param {object} args * @param {FsClient} args.fs - a file system client * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path * @param {TreeObject} args.tree - The object to write * * @returns {Promise} Resolves successfully with the SHA-1 object id of the newly written object. * @see TreeObject * @see TreeEntry * */ async function writeTree({ fs, dir, gitdir = join(dir, '.git'), tree }) { try { assertParameter('fs', fs); assertParameter('gitdir', gitdir); assertParameter('tree', tree); return await _writeTree({ fs: new FileSystem(fs), gitdir, tree, }) } catch (err) { err.caller = 'git.writeTree'; throw err } } // default export var index = { Errors, STAGE, TREE, WORKDIR, add, addNote, addRemote, annotatedTag, branch, checkout, clone, commit, getConfig, getConfigAll, setConfig, currentBranch, deleteBranch, deleteRef, deleteRemote, deleteTag, expandOid, expandRef, fastForward, fetch, findMergeBase, findRoot, getRemoteInfo, getRemoteInfo2, hashBlob, indexPack, init, isDescendent, isIgnored, listBranches, listFiles, listNotes, listRemotes, listServerRefs, listTags, log, merge, packObjects, pull, push, readBlob, readCommit, readNote, readObject, readTag, readTree, remove, removeNote, renameBranch, resetIndex, updateIndex, resolveRef, status, statusMatrix, tag, version, walk, writeBlob, writeCommit, writeObject, writeRef, writeTag, writeTree, }; exports.Errors = Errors; exports.STAGE = STAGE; exports.TREE = TREE; exports.WORKDIR = WORKDIR; exports.add = add; exports.addNote = addNote; exports.addRemote = addRemote; exports.annotatedTag = annotatedTag; exports.branch = branch; exports.checkout = checkout; exports.clone = clone; exports.commit = commit; exports.currentBranch = currentBranch; exports.default = index; exports.deleteBranch = deleteBranch; exports.deleteRef = deleteRef; exports.deleteRemote = deleteRemote; exports.deleteTag = deleteTag; exports.expandOid = expandOid; exports.expandRef = expandRef; exports.fastForward = fastForward; exports.fetch = fetch; exports.findMergeBase = findMergeBase; exports.findRoot = findRoot; exports.getConfig = getConfig; exports.getConfigAll = getConfigAll; exports.getRemoteInfo = getRemoteInfo; exports.getRemoteInfo2 = getRemoteInfo2; exports.hashBlob = hashBlob; exports.indexPack = indexPack; exports.init = init; exports.isDescendent = isDescendent; exports.isIgnored = isIgnored; exports.listBranches = listBranches; exports.listFiles = listFiles; exports.listNotes = listNotes; exports.listRemotes = listRemotes; exports.listServerRefs = listServerRefs; exports.listTags = listTags; exports.log = log; exports.merge = merge; exports.packObjects = packObjects; exports.pull = pull; exports.push = push; exports.readBlob = readBlob; exports.readCommit = readCommit; exports.readNote = readNote; exports.readObject = readObject; exports.readTag = readTag; exports.readTree = readTree; exports.remove = remove; exports.removeNote = removeNote; exports.renameBranch = renameBranch; exports.resetIndex = resetIndex; exports.resolveRef = resolveRef; exports.setConfig = setConfig; exports.status = status; exports.statusMatrix = statusMatrix; exports.tag = tag; exports.updateIndex = updateIndex; exports.version = version; exports.walk = walk; exports.writeBlob = writeBlob; exports.writeCommit = writeCommit; exports.writeObject = writeObject; exports.writeRef = writeRef; exports.writeTag = writeTag; exports.writeTree = writeTree;